commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
bb25e2d989f208b803830349748900f687473b12
|
scripts/lie_docking.py
|
scripts/lie_docking.py
|
import sys
from autobahn.twisted.wamp import ApplicationRunner
from twisted.internet.defer import inlineCallbacks, returnValue
from twisted.internet import reactor
from lie_system import LieApplicationSession
class LIEWorkflow(LieApplicationSession):
_isauthenticated = False
@inlineCallbacks
def authenticate(self, username, password):
authentication = yield self.call(u'liestudio.user.login', username, password)
if authentication:
self._isauthenticated = True
self.logging.info('Authentication of user: {0}, {1}'.format(username, self._isauthenticated))
returnValue(self._isauthenticated)
@inlineCallbacks
def onJoin(self, details):
self.logging.info("Simulating a LIE workflow")
print(self.config)
#Try to login
# print(yield self.authenticate('lieadmin','liepw@#'))
# reactor.stop()
# return
#
# if not isauthenticated:
# raise('Unable to authenticate')
#Get a number of ligand structures
lig_cids = ['cid001', 'cid002', 'cid003', 'cid004', 'cid005']
self.logging.info('Retrieve structures by cid for {0} compounds'.format(len(lig_cids)))
protein = yield self.call(u'liestudio.structures.get_structure', 'protein')
ligands = [self.call(u'liestudio.structures.get_structure', cid) for cid in lig_cids]
#Dock structures
self.logging.info('Dock {0} structures'.format(len(ligands)))
docked = []
for structure in ligands:
b = yield structure
docked.append(self.call(u'liestudio.docking.run', protein['result'], b['result']))
#Simulating a MD run
self.logging.info('Running MD for {0} structures'.format(len(docked)))
md = []
for result in docked:
k = yield result
md.append(self.call(u'liestudio.md.run', k['result']))
for n in md:
f = yield(n)
print(f)
self.logging.info('Finished workflow')
reactor.stop()
if __name__ == '__main__':
runner = ApplicationRunner(
u"ws://localhost:8080/ws",
u"liestudio",
)
runner.run(LIEWorkflow)
|
Add scripts directory with lie_workflow script
|
Add scripts directory with lie_workflow script
|
Python
|
apache-2.0
|
MD-Studio/MDStudio,MD-Studio/MDStudio,MD-Studio/MDStudio,MD-Studio/MDStudio,MD-Studio/MDStudio
|
Add scripts directory with lie_workflow script
|
import sys
from autobahn.twisted.wamp import ApplicationRunner
from twisted.internet.defer import inlineCallbacks, returnValue
from twisted.internet import reactor
from lie_system import LieApplicationSession
class LIEWorkflow(LieApplicationSession):
_isauthenticated = False
@inlineCallbacks
def authenticate(self, username, password):
authentication = yield self.call(u'liestudio.user.login', username, password)
if authentication:
self._isauthenticated = True
self.logging.info('Authentication of user: {0}, {1}'.format(username, self._isauthenticated))
returnValue(self._isauthenticated)
@inlineCallbacks
def onJoin(self, details):
self.logging.info("Simulating a LIE workflow")
print(self.config)
#Try to login
# print(yield self.authenticate('lieadmin','liepw@#'))
# reactor.stop()
# return
#
# if not isauthenticated:
# raise('Unable to authenticate')
#Get a number of ligand structures
lig_cids = ['cid001', 'cid002', 'cid003', 'cid004', 'cid005']
self.logging.info('Retrieve structures by cid for {0} compounds'.format(len(lig_cids)))
protein = yield self.call(u'liestudio.structures.get_structure', 'protein')
ligands = [self.call(u'liestudio.structures.get_structure', cid) for cid in lig_cids]
#Dock structures
self.logging.info('Dock {0} structures'.format(len(ligands)))
docked = []
for structure in ligands:
b = yield structure
docked.append(self.call(u'liestudio.docking.run', protein['result'], b['result']))
#Simulating a MD run
self.logging.info('Running MD for {0} structures'.format(len(docked)))
md = []
for result in docked:
k = yield result
md.append(self.call(u'liestudio.md.run', k['result']))
for n in md:
f = yield(n)
print(f)
self.logging.info('Finished workflow')
reactor.stop()
if __name__ == '__main__':
runner = ApplicationRunner(
u"ws://localhost:8080/ws",
u"liestudio",
)
runner.run(LIEWorkflow)
|
<commit_before><commit_msg>Add scripts directory with lie_workflow script<commit_after>
|
import sys
from autobahn.twisted.wamp import ApplicationRunner
from twisted.internet.defer import inlineCallbacks, returnValue
from twisted.internet import reactor
from lie_system import LieApplicationSession
class LIEWorkflow(LieApplicationSession):
_isauthenticated = False
@inlineCallbacks
def authenticate(self, username, password):
authentication = yield self.call(u'liestudio.user.login', username, password)
if authentication:
self._isauthenticated = True
self.logging.info('Authentication of user: {0}, {1}'.format(username, self._isauthenticated))
returnValue(self._isauthenticated)
@inlineCallbacks
def onJoin(self, details):
self.logging.info("Simulating a LIE workflow")
print(self.config)
#Try to login
# print(yield self.authenticate('lieadmin','liepw@#'))
# reactor.stop()
# return
#
# if not isauthenticated:
# raise('Unable to authenticate')
#Get a number of ligand structures
lig_cids = ['cid001', 'cid002', 'cid003', 'cid004', 'cid005']
self.logging.info('Retrieve structures by cid for {0} compounds'.format(len(lig_cids)))
protein = yield self.call(u'liestudio.structures.get_structure', 'protein')
ligands = [self.call(u'liestudio.structures.get_structure', cid) for cid in lig_cids]
#Dock structures
self.logging.info('Dock {0} structures'.format(len(ligands)))
docked = []
for structure in ligands:
b = yield structure
docked.append(self.call(u'liestudio.docking.run', protein['result'], b['result']))
#Simulating a MD run
self.logging.info('Running MD for {0} structures'.format(len(docked)))
md = []
for result in docked:
k = yield result
md.append(self.call(u'liestudio.md.run', k['result']))
for n in md:
f = yield(n)
print(f)
self.logging.info('Finished workflow')
reactor.stop()
if __name__ == '__main__':
runner = ApplicationRunner(
u"ws://localhost:8080/ws",
u"liestudio",
)
runner.run(LIEWorkflow)
|
Add scripts directory with lie_workflow scriptimport sys
from autobahn.twisted.wamp import ApplicationRunner
from twisted.internet.defer import inlineCallbacks, returnValue
from twisted.internet import reactor
from lie_system import LieApplicationSession
class LIEWorkflow(LieApplicationSession):
_isauthenticated = False
@inlineCallbacks
def authenticate(self, username, password):
authentication = yield self.call(u'liestudio.user.login', username, password)
if authentication:
self._isauthenticated = True
self.logging.info('Authentication of user: {0}, {1}'.format(username, self._isauthenticated))
returnValue(self._isauthenticated)
@inlineCallbacks
def onJoin(self, details):
self.logging.info("Simulating a LIE workflow")
print(self.config)
#Try to login
# print(yield self.authenticate('lieadmin','liepw@#'))
# reactor.stop()
# return
#
# if not isauthenticated:
# raise('Unable to authenticate')
#Get a number of ligand structures
lig_cids = ['cid001', 'cid002', 'cid003', 'cid004', 'cid005']
self.logging.info('Retrieve structures by cid for {0} compounds'.format(len(lig_cids)))
protein = yield self.call(u'liestudio.structures.get_structure', 'protein')
ligands = [self.call(u'liestudio.structures.get_structure', cid) for cid in lig_cids]
#Dock structures
self.logging.info('Dock {0} structures'.format(len(ligands)))
docked = []
for structure in ligands:
b = yield structure
docked.append(self.call(u'liestudio.docking.run', protein['result'], b['result']))
#Simulating a MD run
self.logging.info('Running MD for {0} structures'.format(len(docked)))
md = []
for result in docked:
k = yield result
md.append(self.call(u'liestudio.md.run', k['result']))
for n in md:
f = yield(n)
print(f)
self.logging.info('Finished workflow')
reactor.stop()
if __name__ == '__main__':
runner = ApplicationRunner(
u"ws://localhost:8080/ws",
u"liestudio",
)
runner.run(LIEWorkflow)
|
<commit_before><commit_msg>Add scripts directory with lie_workflow script<commit_after>import sys
from autobahn.twisted.wamp import ApplicationRunner
from twisted.internet.defer import inlineCallbacks, returnValue
from twisted.internet import reactor
from lie_system import LieApplicationSession
class LIEWorkflow(LieApplicationSession):
_isauthenticated = False
@inlineCallbacks
def authenticate(self, username, password):
authentication = yield self.call(u'liestudio.user.login', username, password)
if authentication:
self._isauthenticated = True
self.logging.info('Authentication of user: {0}, {1}'.format(username, self._isauthenticated))
returnValue(self._isauthenticated)
@inlineCallbacks
def onJoin(self, details):
self.logging.info("Simulating a LIE workflow")
print(self.config)
#Try to login
# print(yield self.authenticate('lieadmin','liepw@#'))
# reactor.stop()
# return
#
# if not isauthenticated:
# raise('Unable to authenticate')
#Get a number of ligand structures
lig_cids = ['cid001', 'cid002', 'cid003', 'cid004', 'cid005']
self.logging.info('Retrieve structures by cid for {0} compounds'.format(len(lig_cids)))
protein = yield self.call(u'liestudio.structures.get_structure', 'protein')
ligands = [self.call(u'liestudio.structures.get_structure', cid) for cid in lig_cids]
#Dock structures
self.logging.info('Dock {0} structures'.format(len(ligands)))
docked = []
for structure in ligands:
b = yield structure
docked.append(self.call(u'liestudio.docking.run', protein['result'], b['result']))
#Simulating a MD run
self.logging.info('Running MD for {0} structures'.format(len(docked)))
md = []
for result in docked:
k = yield result
md.append(self.call(u'liestudio.md.run', k['result']))
for n in md:
f = yield(n)
print(f)
self.logging.info('Finished workflow')
reactor.stop()
if __name__ == '__main__':
runner = ApplicationRunner(
u"ws://localhost:8080/ws",
u"liestudio",
)
runner.run(LIEWorkflow)
|
|
171c46d0698a53d38f332aa681c2e5ea3a09fd00
|
db/migrations/backfill_github_repo_id.py
|
db/migrations/backfill_github_repo_id.py
|
"""Backfills tables to populate GitHub's repo ID."""
import rethinkdb as r
import db.plugins
import db.util
r_conn = db.util.r_conn
def backfill_github_plugin_repos():
"""Adds the 'repo_id' field from the repo_data field if available."""
r.table('plugin_github_repos').update({
'repo_id': r.row['repo_data']['id'].default('').coerce_to('string')
}).run(r_conn())
def backfill_plugins():
"""Backfill rows of the plugin table with github_repo_id.
Populated from the corresponding rows of the plugin_github_repos table,
joining on the key (github_owner, github_repo_name).
"""
repos = r.table('plugin_github_repos').pluck(
'repo_id', 'owner', 'repo_name').run(r_conn())
for i, repo in enumerate(repos):
if repo['repo_id'] == '':
continue
query = r.table('plugins').get_all([repo['owner'], repo['repo_name']],
index='github_owner_repo')
plugin = db.util.get_first(query)
if not plugin:
continue
plugin['github_repo_id'] = repo['repo_id']
db.plugins.insert(plugin)
print '%s\tBackfilled %s' % (i, plugin['slug'])
if __name__ == '__main__':
backfill_github_plugin_repos()
backfill_plugins()
|
Add migration script to add the 'github_repo_id' field
|
Add migration script to add the 'github_repo_id' field
|
Python
|
mit
|
jonafato/vim-awesome,shaialon/vim-awesome,divad12/vim-awesome,divad12/vim-awesome,divad12/vim-awesome,vim-awesome/vim-awesome,vim-awesome/vim-awesome,jonafato/vim-awesome,vim-awesome/vim-awesome,divad12/vim-awesome,vim-awesome/vim-awesome,vim-awesome/vim-awesome,shaialon/vim-awesome,starcraftman/vim-awesome,jonafato/vim-awesome,jonafato/vim-awesome,starcraftman/vim-awesome,starcraftman/vim-awesome,shaialon/vim-awesome,shaialon/vim-awesome,starcraftman/vim-awesome
|
Add migration script to add the 'github_repo_id' field
|
"""Backfills tables to populate GitHub's repo ID."""
import rethinkdb as r
import db.plugins
import db.util
r_conn = db.util.r_conn
def backfill_github_plugin_repos():
"""Adds the 'repo_id' field from the repo_data field if available."""
r.table('plugin_github_repos').update({
'repo_id': r.row['repo_data']['id'].default('').coerce_to('string')
}).run(r_conn())
def backfill_plugins():
"""Backfill rows of the plugin table with github_repo_id.
Populated from the corresponding rows of the plugin_github_repos table,
joining on the key (github_owner, github_repo_name).
"""
repos = r.table('plugin_github_repos').pluck(
'repo_id', 'owner', 'repo_name').run(r_conn())
for i, repo in enumerate(repos):
if repo['repo_id'] == '':
continue
query = r.table('plugins').get_all([repo['owner'], repo['repo_name']],
index='github_owner_repo')
plugin = db.util.get_first(query)
if not plugin:
continue
plugin['github_repo_id'] = repo['repo_id']
db.plugins.insert(plugin)
print '%s\tBackfilled %s' % (i, plugin['slug'])
if __name__ == '__main__':
backfill_github_plugin_repos()
backfill_plugins()
|
<commit_before><commit_msg>Add migration script to add the 'github_repo_id' field<commit_after>
|
"""Backfills tables to populate GitHub's repo ID."""
import rethinkdb as r
import db.plugins
import db.util
r_conn = db.util.r_conn
def backfill_github_plugin_repos():
"""Adds the 'repo_id' field from the repo_data field if available."""
r.table('plugin_github_repos').update({
'repo_id': r.row['repo_data']['id'].default('').coerce_to('string')
}).run(r_conn())
def backfill_plugins():
"""Backfill rows of the plugin table with github_repo_id.
Populated from the corresponding rows of the plugin_github_repos table,
joining on the key (github_owner, github_repo_name).
"""
repos = r.table('plugin_github_repos').pluck(
'repo_id', 'owner', 'repo_name').run(r_conn())
for i, repo in enumerate(repos):
if repo['repo_id'] == '':
continue
query = r.table('plugins').get_all([repo['owner'], repo['repo_name']],
index='github_owner_repo')
plugin = db.util.get_first(query)
if not plugin:
continue
plugin['github_repo_id'] = repo['repo_id']
db.plugins.insert(plugin)
print '%s\tBackfilled %s' % (i, plugin['slug'])
if __name__ == '__main__':
backfill_github_plugin_repos()
backfill_plugins()
|
Add migration script to add the 'github_repo_id' field"""Backfills tables to populate GitHub's repo ID."""
import rethinkdb as r
import db.plugins
import db.util
r_conn = db.util.r_conn
def backfill_github_plugin_repos():
"""Adds the 'repo_id' field from the repo_data field if available."""
r.table('plugin_github_repos').update({
'repo_id': r.row['repo_data']['id'].default('').coerce_to('string')
}).run(r_conn())
def backfill_plugins():
"""Backfill rows of the plugin table with github_repo_id.
Populated from the corresponding rows of the plugin_github_repos table,
joining on the key (github_owner, github_repo_name).
"""
repos = r.table('plugin_github_repos').pluck(
'repo_id', 'owner', 'repo_name').run(r_conn())
for i, repo in enumerate(repos):
if repo['repo_id'] == '':
continue
query = r.table('plugins').get_all([repo['owner'], repo['repo_name']],
index='github_owner_repo')
plugin = db.util.get_first(query)
if not plugin:
continue
plugin['github_repo_id'] = repo['repo_id']
db.plugins.insert(plugin)
print '%s\tBackfilled %s' % (i, plugin['slug'])
if __name__ == '__main__':
backfill_github_plugin_repos()
backfill_plugins()
|
<commit_before><commit_msg>Add migration script to add the 'github_repo_id' field<commit_after>"""Backfills tables to populate GitHub's repo ID."""
import rethinkdb as r
import db.plugins
import db.util
r_conn = db.util.r_conn
def backfill_github_plugin_repos():
"""Adds the 'repo_id' field from the repo_data field if available."""
r.table('plugin_github_repos').update({
'repo_id': r.row['repo_data']['id'].default('').coerce_to('string')
}).run(r_conn())
def backfill_plugins():
"""Backfill rows of the plugin table with github_repo_id.
Populated from the corresponding rows of the plugin_github_repos table,
joining on the key (github_owner, github_repo_name).
"""
repos = r.table('plugin_github_repos').pluck(
'repo_id', 'owner', 'repo_name').run(r_conn())
for i, repo in enumerate(repos):
if repo['repo_id'] == '':
continue
query = r.table('plugins').get_all([repo['owner'], repo['repo_name']],
index='github_owner_repo')
plugin = db.util.get_first(query)
if not plugin:
continue
plugin['github_repo_id'] = repo['repo_id']
db.plugins.insert(plugin)
print '%s\tBackfilled %s' % (i, plugin['slug'])
if __name__ == '__main__':
backfill_github_plugin_repos()
backfill_plugins()
|
|
5a48dc3c389ad42b761b40d6e3ba838a8c3adad0
|
opal/migrations/0029_auto_20170707_1337.py
|
opal/migrations/0029_auto_20170707_1337.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('opal', '0028_auto_20170210_1146'),
]
operations = [
migrations.AlterField(
model_name='macro',
name='expanded',
field=models.TextField(help_text=b'This is the text that it will expand to.'),
),
]
|
Fix typo in Macro help text
|
Fix typo in Macro help text
|
Python
|
agpl-3.0
|
khchine5/opal,khchine5/opal,khchine5/opal
|
Fix typo in Macro help text
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('opal', '0028_auto_20170210_1146'),
]
operations = [
migrations.AlterField(
model_name='macro',
name='expanded',
field=models.TextField(help_text=b'This is the text that it will expand to.'),
),
]
|
<commit_before><commit_msg>Fix typo in Macro help text<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('opal', '0028_auto_20170210_1146'),
]
operations = [
migrations.AlterField(
model_name='macro',
name='expanded',
field=models.TextField(help_text=b'This is the text that it will expand to.'),
),
]
|
Fix typo in Macro help text# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('opal', '0028_auto_20170210_1146'),
]
operations = [
migrations.AlterField(
model_name='macro',
name='expanded',
field=models.TextField(help_text=b'This is the text that it will expand to.'),
),
]
|
<commit_before><commit_msg>Fix typo in Macro help text<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('opal', '0028_auto_20170210_1146'),
]
operations = [
migrations.AlterField(
model_name='macro',
name='expanded',
field=models.TextField(help_text=b'This is the text that it will expand to.'),
),
]
|
|
1773e9084964b3c44e0ab46ea64da0ac3d1ba671
|
final/problem6.py
|
final/problem6.py
|
# Problem 6-1
# 10.0 points possible (graded)
class Person(object):
def __init__(self, name):
self.name = name
def say(self, stuff):
return self.name + ' says: ' + stuff
def __str__(self):
return self.name
class Lecturer(Person):
def lecture(self, stuff):
return 'I believe that ' + Person.say(self, stuff)
class Professor(Lecturer):
def say(self, stuff):
return self.name + ' says: ' + self.lecture(stuff)
class ArrogantProfessor(Professor):
def lecture(self, stuff):
return 'It is obvious that ' + Person.say(self, stuff)
def say(self, stuff):
return self.name + ' says: ' + self.lecture(stuff)
e = Person('eric')
le = Lecturer('eric')
pe = Professor('eric')
ae = ArrogantProfessor('eric')
e.say('the sky is blue')
le.say('the sky is blue')
le.lecture('the sky is blue')
pe.say('the sky is blue')
pe.lecture('the sky is blue')
print(ae.say('the sky is blue'))
print(ae.lecture('the sky is blue'))
|
Implement lecture & say methods in the ArrogantProfessor class using inheritance
|
Implement lecture & say methods in the ArrogantProfessor class using inheritance
|
Python
|
mit
|
Kunal57/MIT_6.00.1x
|
Implement lecture & say methods in the ArrogantProfessor class using inheritance
|
# Problem 6-1
# 10.0 points possible (graded)
class Person(object):
def __init__(self, name):
self.name = name
def say(self, stuff):
return self.name + ' says: ' + stuff
def __str__(self):
return self.name
class Lecturer(Person):
def lecture(self, stuff):
return 'I believe that ' + Person.say(self, stuff)
class Professor(Lecturer):
def say(self, stuff):
return self.name + ' says: ' + self.lecture(stuff)
class ArrogantProfessor(Professor):
def lecture(self, stuff):
return 'It is obvious that ' + Person.say(self, stuff)
def say(self, stuff):
return self.name + ' says: ' + self.lecture(stuff)
e = Person('eric')
le = Lecturer('eric')
pe = Professor('eric')
ae = ArrogantProfessor('eric')
e.say('the sky is blue')
le.say('the sky is blue')
le.lecture('the sky is blue')
pe.say('the sky is blue')
pe.lecture('the sky is blue')
print(ae.say('the sky is blue'))
print(ae.lecture('the sky is blue'))
|
<commit_before><commit_msg>Implement lecture & say methods in the ArrogantProfessor class using inheritance<commit_after>
|
# Problem 6-1
# 10.0 points possible (graded)
class Person(object):
def __init__(self, name):
self.name = name
def say(self, stuff):
return self.name + ' says: ' + stuff
def __str__(self):
return self.name
class Lecturer(Person):
def lecture(self, stuff):
return 'I believe that ' + Person.say(self, stuff)
class Professor(Lecturer):
def say(self, stuff):
return self.name + ' says: ' + self.lecture(stuff)
class ArrogantProfessor(Professor):
def lecture(self, stuff):
return 'It is obvious that ' + Person.say(self, stuff)
def say(self, stuff):
return self.name + ' says: ' + self.lecture(stuff)
e = Person('eric')
le = Lecturer('eric')
pe = Professor('eric')
ae = ArrogantProfessor('eric')
e.say('the sky is blue')
le.say('the sky is blue')
le.lecture('the sky is blue')
pe.say('the sky is blue')
pe.lecture('the sky is blue')
print(ae.say('the sky is blue'))
print(ae.lecture('the sky is blue'))
|
Implement lecture & say methods in the ArrogantProfessor class using inheritance# Problem 6-1
# 10.0 points possible (graded)
class Person(object):
def __init__(self, name):
self.name = name
def say(self, stuff):
return self.name + ' says: ' + stuff
def __str__(self):
return self.name
class Lecturer(Person):
def lecture(self, stuff):
return 'I believe that ' + Person.say(self, stuff)
class Professor(Lecturer):
def say(self, stuff):
return self.name + ' says: ' + self.lecture(stuff)
class ArrogantProfessor(Professor):
def lecture(self, stuff):
return 'It is obvious that ' + Person.say(self, stuff)
def say(self, stuff):
return self.name + ' says: ' + self.lecture(stuff)
e = Person('eric')
le = Lecturer('eric')
pe = Professor('eric')
ae = ArrogantProfessor('eric')
e.say('the sky is blue')
le.say('the sky is blue')
le.lecture('the sky is blue')
pe.say('the sky is blue')
pe.lecture('the sky is blue')
print(ae.say('the sky is blue'))
print(ae.lecture('the sky is blue'))
|
<commit_before><commit_msg>Implement lecture & say methods in the ArrogantProfessor class using inheritance<commit_after># Problem 6-1
# 10.0 points possible (graded)
class Person(object):
def __init__(self, name):
self.name = name
def say(self, stuff):
return self.name + ' says: ' + stuff
def __str__(self):
return self.name
class Lecturer(Person):
def lecture(self, stuff):
return 'I believe that ' + Person.say(self, stuff)
class Professor(Lecturer):
def say(self, stuff):
return self.name + ' says: ' + self.lecture(stuff)
class ArrogantProfessor(Professor):
def lecture(self, stuff):
return 'It is obvious that ' + Person.say(self, stuff)
def say(self, stuff):
return self.name + ' says: ' + self.lecture(stuff)
e = Person('eric')
le = Lecturer('eric')
pe = Professor('eric')
ae = ArrogantProfessor('eric')
e.say('the sky is blue')
le.say('the sky is blue')
le.lecture('the sky is blue')
pe.say('the sky is blue')
pe.lecture('the sky is blue')
print(ae.say('the sky is blue'))
print(ae.lecture('the sky is blue'))
|
|
c297f803a30989d3f3593d98b5af861a839d4e0a
|
fuse_module/registry-fs.py
|
fuse_module/registry-fs.py
|
#!/usr/bin/env python
from __future__ import with_statement
import logging
import os
import sys
import stat
import yaml
from fuse import FUSE, FuseOSError, Operations, LoggingMixIn
from docker_registry.drivers.elliptics import Storage
logging.basicConfig()
log = logging.getLogger("")
log.setLevel(logging.DEBUG)
DIRECTORY_CONTENT = "DIRECTORY"
MAGIC_NUMBER = len(DIRECTORY_CONTENT)
class RegistryFS(LoggingMixIn, Operations):
def __init__(self, config_path):
with open(config_path, 'r') as f:
cfg = yaml.load(f)
try:
self.storage = Storage(config=cfg)
except Exception as err:
FuseOSError(err)
self.storage.put_content("z/y/x", "HAHA")
def transform_path(self, path):
# strip a starting slash
# and convert unicode to a normal string
return str(path.lstrip("/"))
def readdir(self, path, fh):
if path == "/":
path = ""
path = self.transform_path(path)
def apply(item):
return item.partition(path)[2].lstrip("/")
return (apply(i) for i in self.storage.list_directory(path) if i)
def getattr(self, path, fh=None):
if path == "/":
r = os.lstat(path)
return dict((key, getattr(r, key))
for key in dir(r) if key.startswith("st_"))
path = self.transform_path(path)
ares = self.storage._session.lookup(path)
# ugly hack
for i in ares.get():
res = {'st_atime': i.timestamp.tsec,
'st_ctime': i.timestamp.tsec,
'st_mode': 0777, # ugly hack
'st_mtime': i.timestamp.tsec,
'st_nlink': 1,
'st_size': i.size}
if res['st_size'] == MAGIC_NUMBER and\
self.storage.get_content(path) == DIRECTORY_CONTENT:
res['st_mode'] |= stat.S_IFDIR
else:
res['st_mode'] |= stat.S_IFREG
return res
def read(self, path, length, offset, fh):
path = self.transform_path(path)
return self.storage.get_content(path)
def main(mountpoint, config_path):
FUSE(RegistryFS(config_path),
mountpoint, foreground=True)
if __name__ == '__main__':
main(sys.argv[2], sys.argv[1])
|
Introduce fuse module to mount Registry Ellipticsbased FS
|
Introduce fuse module to mount Registry Ellipticsbased FS
|
Python
|
apache-2.0
|
noxiouz/docker-registry-driver-elliptics,noxiouz/docker-registry-driver-elliptics
|
Introduce fuse module to mount Registry Ellipticsbased FS
|
#!/usr/bin/env python
from __future__ import with_statement
import logging
import os
import sys
import stat
import yaml
from fuse import FUSE, FuseOSError, Operations, LoggingMixIn
from docker_registry.drivers.elliptics import Storage
logging.basicConfig()
log = logging.getLogger("")
log.setLevel(logging.DEBUG)
DIRECTORY_CONTENT = "DIRECTORY"
MAGIC_NUMBER = len(DIRECTORY_CONTENT)
class RegistryFS(LoggingMixIn, Operations):
def __init__(self, config_path):
with open(config_path, 'r') as f:
cfg = yaml.load(f)
try:
self.storage = Storage(config=cfg)
except Exception as err:
FuseOSError(err)
self.storage.put_content("z/y/x", "HAHA")
def transform_path(self, path):
# strip a starting slash
# and convert unicode to a normal string
return str(path.lstrip("/"))
def readdir(self, path, fh):
if path == "/":
path = ""
path = self.transform_path(path)
def apply(item):
return item.partition(path)[2].lstrip("/")
return (apply(i) for i in self.storage.list_directory(path) if i)
def getattr(self, path, fh=None):
if path == "/":
r = os.lstat(path)
return dict((key, getattr(r, key))
for key in dir(r) if key.startswith("st_"))
path = self.transform_path(path)
ares = self.storage._session.lookup(path)
# ugly hack
for i in ares.get():
res = {'st_atime': i.timestamp.tsec,
'st_ctime': i.timestamp.tsec,
'st_mode': 0777, # ugly hack
'st_mtime': i.timestamp.tsec,
'st_nlink': 1,
'st_size': i.size}
if res['st_size'] == MAGIC_NUMBER and\
self.storage.get_content(path) == DIRECTORY_CONTENT:
res['st_mode'] |= stat.S_IFDIR
else:
res['st_mode'] |= stat.S_IFREG
return res
def read(self, path, length, offset, fh):
path = self.transform_path(path)
return self.storage.get_content(path)
def main(mountpoint, config_path):
FUSE(RegistryFS(config_path),
mountpoint, foreground=True)
if __name__ == '__main__':
main(sys.argv[2], sys.argv[1])
|
<commit_before><commit_msg>Introduce fuse module to mount Registry Ellipticsbased FS<commit_after>
|
#!/usr/bin/env python
from __future__ import with_statement
import logging
import os
import sys
import stat
import yaml
from fuse import FUSE, FuseOSError, Operations, LoggingMixIn
from docker_registry.drivers.elliptics import Storage
logging.basicConfig()
log = logging.getLogger("")
log.setLevel(logging.DEBUG)
DIRECTORY_CONTENT = "DIRECTORY"
MAGIC_NUMBER = len(DIRECTORY_CONTENT)
class RegistryFS(LoggingMixIn, Operations):
def __init__(self, config_path):
with open(config_path, 'r') as f:
cfg = yaml.load(f)
try:
self.storage = Storage(config=cfg)
except Exception as err:
FuseOSError(err)
self.storage.put_content("z/y/x", "HAHA")
def transform_path(self, path):
# strip a starting slash
# and convert unicode to a normal string
return str(path.lstrip("/"))
def readdir(self, path, fh):
if path == "/":
path = ""
path = self.transform_path(path)
def apply(item):
return item.partition(path)[2].lstrip("/")
return (apply(i) for i in self.storage.list_directory(path) if i)
def getattr(self, path, fh=None):
if path == "/":
r = os.lstat(path)
return dict((key, getattr(r, key))
for key in dir(r) if key.startswith("st_"))
path = self.transform_path(path)
ares = self.storage._session.lookup(path)
# ugly hack
for i in ares.get():
res = {'st_atime': i.timestamp.tsec,
'st_ctime': i.timestamp.tsec,
'st_mode': 0777, # ugly hack
'st_mtime': i.timestamp.tsec,
'st_nlink': 1,
'st_size': i.size}
if res['st_size'] == MAGIC_NUMBER and\
self.storage.get_content(path) == DIRECTORY_CONTENT:
res['st_mode'] |= stat.S_IFDIR
else:
res['st_mode'] |= stat.S_IFREG
return res
def read(self, path, length, offset, fh):
path = self.transform_path(path)
return self.storage.get_content(path)
def main(mountpoint, config_path):
FUSE(RegistryFS(config_path),
mountpoint, foreground=True)
if __name__ == '__main__':
main(sys.argv[2], sys.argv[1])
|
Introduce fuse module to mount Registry Ellipticsbased FS#!/usr/bin/env python
from __future__ import with_statement
import logging
import os
import sys
import stat
import yaml
from fuse import FUSE, FuseOSError, Operations, LoggingMixIn
from docker_registry.drivers.elliptics import Storage
logging.basicConfig()
log = logging.getLogger("")
log.setLevel(logging.DEBUG)
DIRECTORY_CONTENT = "DIRECTORY"
MAGIC_NUMBER = len(DIRECTORY_CONTENT)
class RegistryFS(LoggingMixIn, Operations):
def __init__(self, config_path):
with open(config_path, 'r') as f:
cfg = yaml.load(f)
try:
self.storage = Storage(config=cfg)
except Exception as err:
FuseOSError(err)
self.storage.put_content("z/y/x", "HAHA")
def transform_path(self, path):
# strip a starting slash
# and convert unicode to a normal string
return str(path.lstrip("/"))
def readdir(self, path, fh):
if path == "/":
path = ""
path = self.transform_path(path)
def apply(item):
return item.partition(path)[2].lstrip("/")
return (apply(i) for i in self.storage.list_directory(path) if i)
def getattr(self, path, fh=None):
if path == "/":
r = os.lstat(path)
return dict((key, getattr(r, key))
for key in dir(r) if key.startswith("st_"))
path = self.transform_path(path)
ares = self.storage._session.lookup(path)
# ugly hack
for i in ares.get():
res = {'st_atime': i.timestamp.tsec,
'st_ctime': i.timestamp.tsec,
'st_mode': 0777, # ugly hack
'st_mtime': i.timestamp.tsec,
'st_nlink': 1,
'st_size': i.size}
if res['st_size'] == MAGIC_NUMBER and\
self.storage.get_content(path) == DIRECTORY_CONTENT:
res['st_mode'] |= stat.S_IFDIR
else:
res['st_mode'] |= stat.S_IFREG
return res
def read(self, path, length, offset, fh):
path = self.transform_path(path)
return self.storage.get_content(path)
def main(mountpoint, config_path):
FUSE(RegistryFS(config_path),
mountpoint, foreground=True)
if __name__ == '__main__':
main(sys.argv[2], sys.argv[1])
|
<commit_before><commit_msg>Introduce fuse module to mount Registry Ellipticsbased FS<commit_after>#!/usr/bin/env python
from __future__ import with_statement
import logging
import os
import sys
import stat
import yaml
from fuse import FUSE, FuseOSError, Operations, LoggingMixIn
from docker_registry.drivers.elliptics import Storage
logging.basicConfig()
log = logging.getLogger("")
log.setLevel(logging.DEBUG)
DIRECTORY_CONTENT = "DIRECTORY"
MAGIC_NUMBER = len(DIRECTORY_CONTENT)
class RegistryFS(LoggingMixIn, Operations):
def __init__(self, config_path):
with open(config_path, 'r') as f:
cfg = yaml.load(f)
try:
self.storage = Storage(config=cfg)
except Exception as err:
FuseOSError(err)
self.storage.put_content("z/y/x", "HAHA")
def transform_path(self, path):
# strip a starting slash
# and convert unicode to a normal string
return str(path.lstrip("/"))
def readdir(self, path, fh):
if path == "/":
path = ""
path = self.transform_path(path)
def apply(item):
return item.partition(path)[2].lstrip("/")
return (apply(i) for i in self.storage.list_directory(path) if i)
def getattr(self, path, fh=None):
if path == "/":
r = os.lstat(path)
return dict((key, getattr(r, key))
for key in dir(r) if key.startswith("st_"))
path = self.transform_path(path)
ares = self.storage._session.lookup(path)
# ugly hack
for i in ares.get():
res = {'st_atime': i.timestamp.tsec,
'st_ctime': i.timestamp.tsec,
'st_mode': 0777, # ugly hack
'st_mtime': i.timestamp.tsec,
'st_nlink': 1,
'st_size': i.size}
if res['st_size'] == MAGIC_NUMBER and\
self.storage.get_content(path) == DIRECTORY_CONTENT:
res['st_mode'] |= stat.S_IFDIR
else:
res['st_mode'] |= stat.S_IFREG
return res
def read(self, path, length, offset, fh):
path = self.transform_path(path)
return self.storage.get_content(path)
def main(mountpoint, config_path):
FUSE(RegistryFS(config_path),
mountpoint, foreground=True)
if __name__ == '__main__':
main(sys.argv[2], sys.argv[1])
|
|
57fc7ec4630174315b0a453073009783757ba3f6
|
tests/test_themes.py
|
tests/test_themes.py
|
import os
import textwrap
import shutil
import pytest
@pytest.fixture(scope='function')
def theme_project_tmpdir(tmpdir):
# Copy themes-project to a temp dir, and copy demo-project content to it
themes_dir = os.path.join(os.path.dirname(__file__), 'themes-project')
content_dir = os.path.join(os.path.dirname(__file__), 'demo-project', 'content')
temp_dir = tmpdir.mkdir("temp").join('themes-project')
shutil.copytree(themes_dir, str(temp_dir))
shutil.copytree(content_dir, str(temp_dir.join('content')))
return temp_dir
@pytest.fixture(scope='function')
def theme_project(theme_project_tmpdir):
from lektor.project import Project
# Create the .lektorproject file
lektorfile_text = textwrap.dedent(u"""
[project]
name = Themes Project
""")
theme_project_tmpdir.join("themes.lektorproject").write_text(lektorfile_text,
"utf8",
ensure=True)
return Project.from_path(str(theme_project_tmpdir))
@pytest.fixture(scope='function')
def theme_env(theme_project):
from lektor.environment import Environment
return Environment(theme_project)
@pytest.fixture(scope='function')
def theme_pad(theme_env):
from lektor.db import Database
return Database(theme_env).new_pad()
@pytest.fixture(scope='function')
def theme_builder(theme_pad, tmpdir):
from lektor.builder import Builder
return Builder(theme_pad, str(tmpdir.mkdir("output")))
def test_themes(theme_builder):
assert theme_builder
|
Add basic test and fixtures for themes.
|
Add basic test and fixtures for themes.
|
Python
|
bsd-3-clause
|
lektor/lektor,lektor/lektor,lektor/lektor,lektor/lektor
|
Add basic test and fixtures for themes.
|
import os
import textwrap
import shutil
import pytest
@pytest.fixture(scope='function')
def theme_project_tmpdir(tmpdir):
# Copy themes-project to a temp dir, and copy demo-project content to it
themes_dir = os.path.join(os.path.dirname(__file__), 'themes-project')
content_dir = os.path.join(os.path.dirname(__file__), 'demo-project', 'content')
temp_dir = tmpdir.mkdir("temp").join('themes-project')
shutil.copytree(themes_dir, str(temp_dir))
shutil.copytree(content_dir, str(temp_dir.join('content')))
return temp_dir
@pytest.fixture(scope='function')
def theme_project(theme_project_tmpdir):
from lektor.project import Project
# Create the .lektorproject file
lektorfile_text = textwrap.dedent(u"""
[project]
name = Themes Project
""")
theme_project_tmpdir.join("themes.lektorproject").write_text(lektorfile_text,
"utf8",
ensure=True)
return Project.from_path(str(theme_project_tmpdir))
@pytest.fixture(scope='function')
def theme_env(theme_project):
from lektor.environment import Environment
return Environment(theme_project)
@pytest.fixture(scope='function')
def theme_pad(theme_env):
from lektor.db import Database
return Database(theme_env).new_pad()
@pytest.fixture(scope='function')
def theme_builder(theme_pad, tmpdir):
from lektor.builder import Builder
return Builder(theme_pad, str(tmpdir.mkdir("output")))
def test_themes(theme_builder):
assert theme_builder
|
<commit_before><commit_msg>Add basic test and fixtures for themes.<commit_after>
|
import os
import textwrap
import shutil
import pytest
@pytest.fixture(scope='function')
def theme_project_tmpdir(tmpdir):
# Copy themes-project to a temp dir, and copy demo-project content to it
themes_dir = os.path.join(os.path.dirname(__file__), 'themes-project')
content_dir = os.path.join(os.path.dirname(__file__), 'demo-project', 'content')
temp_dir = tmpdir.mkdir("temp").join('themes-project')
shutil.copytree(themes_dir, str(temp_dir))
shutil.copytree(content_dir, str(temp_dir.join('content')))
return temp_dir
@pytest.fixture(scope='function')
def theme_project(theme_project_tmpdir):
from lektor.project import Project
# Create the .lektorproject file
lektorfile_text = textwrap.dedent(u"""
[project]
name = Themes Project
""")
theme_project_tmpdir.join("themes.lektorproject").write_text(lektorfile_text,
"utf8",
ensure=True)
return Project.from_path(str(theme_project_tmpdir))
@pytest.fixture(scope='function')
def theme_env(theme_project):
from lektor.environment import Environment
return Environment(theme_project)
@pytest.fixture(scope='function')
def theme_pad(theme_env):
from lektor.db import Database
return Database(theme_env).new_pad()
@pytest.fixture(scope='function')
def theme_builder(theme_pad, tmpdir):
from lektor.builder import Builder
return Builder(theme_pad, str(tmpdir.mkdir("output")))
def test_themes(theme_builder):
assert theme_builder
|
Add basic test and fixtures for themes.import os
import textwrap
import shutil
import pytest
@pytest.fixture(scope='function')
def theme_project_tmpdir(tmpdir):
# Copy themes-project to a temp dir, and copy demo-project content to it
themes_dir = os.path.join(os.path.dirname(__file__), 'themes-project')
content_dir = os.path.join(os.path.dirname(__file__), 'demo-project', 'content')
temp_dir = tmpdir.mkdir("temp").join('themes-project')
shutil.copytree(themes_dir, str(temp_dir))
shutil.copytree(content_dir, str(temp_dir.join('content')))
return temp_dir
@pytest.fixture(scope='function')
def theme_project(theme_project_tmpdir):
from lektor.project import Project
# Create the .lektorproject file
lektorfile_text = textwrap.dedent(u"""
[project]
name = Themes Project
""")
theme_project_tmpdir.join("themes.lektorproject").write_text(lektorfile_text,
"utf8",
ensure=True)
return Project.from_path(str(theme_project_tmpdir))
@pytest.fixture(scope='function')
def theme_env(theme_project):
from lektor.environment import Environment
return Environment(theme_project)
@pytest.fixture(scope='function')
def theme_pad(theme_env):
from lektor.db import Database
return Database(theme_env).new_pad()
@pytest.fixture(scope='function')
def theme_builder(theme_pad, tmpdir):
from lektor.builder import Builder
return Builder(theme_pad, str(tmpdir.mkdir("output")))
def test_themes(theme_builder):
assert theme_builder
|
<commit_before><commit_msg>Add basic test and fixtures for themes.<commit_after>import os
import textwrap
import shutil
import pytest
@pytest.fixture(scope='function')
def theme_project_tmpdir(tmpdir):
# Copy themes-project to a temp dir, and copy demo-project content to it
themes_dir = os.path.join(os.path.dirname(__file__), 'themes-project')
content_dir = os.path.join(os.path.dirname(__file__), 'demo-project', 'content')
temp_dir = tmpdir.mkdir("temp").join('themes-project')
shutil.copytree(themes_dir, str(temp_dir))
shutil.copytree(content_dir, str(temp_dir.join('content')))
return temp_dir
@pytest.fixture(scope='function')
def theme_project(theme_project_tmpdir):
from lektor.project import Project
# Create the .lektorproject file
lektorfile_text = textwrap.dedent(u"""
[project]
name = Themes Project
""")
theme_project_tmpdir.join("themes.lektorproject").write_text(lektorfile_text,
"utf8",
ensure=True)
return Project.from_path(str(theme_project_tmpdir))
@pytest.fixture(scope='function')
def theme_env(theme_project):
from lektor.environment import Environment
return Environment(theme_project)
@pytest.fixture(scope='function')
def theme_pad(theme_env):
from lektor.db import Database
return Database(theme_env).new_pad()
@pytest.fixture(scope='function')
def theme_builder(theme_pad, tmpdir):
from lektor.builder import Builder
return Builder(theme_pad, str(tmpdir.mkdir("output")))
def test_themes(theme_builder):
assert theme_builder
|
|
6aea126e743b8a2b6808f4d9cc329b4955e796ae
|
tests/test_config.py
|
tests/test_config.py
|
import os
import unittest
from config import get_config
class ConfigTest(unittest.TestCase):
def setUp(self):
os.environ['DESTALINATOR_STRING_VARIABLE'] = 'test'
os.environ['DESTALINATOR_LIST_VARIABLE'] = 'test,'
def test_environment_variable_configs(self):
self.assertEqual(get_config().string_variable, 'test')
self.assertEqual(get_config().list_variable, ['test'])
|
Add config environment variables test
|
Add config environment variables test
|
Python
|
apache-2.0
|
royrapoport/destalinator,randsleadershipslack/destalinator,TheConnMan/destalinator,TheConnMan/destalinator,royrapoport/destalinator,randsleadershipslack/destalinator
|
Add config environment variables test
|
import os
import unittest
from config import get_config
class ConfigTest(unittest.TestCase):
def setUp(self):
os.environ['DESTALINATOR_STRING_VARIABLE'] = 'test'
os.environ['DESTALINATOR_LIST_VARIABLE'] = 'test,'
def test_environment_variable_configs(self):
self.assertEqual(get_config().string_variable, 'test')
self.assertEqual(get_config().list_variable, ['test'])
|
<commit_before><commit_msg>Add config environment variables test<commit_after>
|
import os
import unittest
from config import get_config
class ConfigTest(unittest.TestCase):
def setUp(self):
os.environ['DESTALINATOR_STRING_VARIABLE'] = 'test'
os.environ['DESTALINATOR_LIST_VARIABLE'] = 'test,'
def test_environment_variable_configs(self):
self.assertEqual(get_config().string_variable, 'test')
self.assertEqual(get_config().list_variable, ['test'])
|
Add config environment variables testimport os
import unittest
from config import get_config
class ConfigTest(unittest.TestCase):
def setUp(self):
os.environ['DESTALINATOR_STRING_VARIABLE'] = 'test'
os.environ['DESTALINATOR_LIST_VARIABLE'] = 'test,'
def test_environment_variable_configs(self):
self.assertEqual(get_config().string_variable, 'test')
self.assertEqual(get_config().list_variable, ['test'])
|
<commit_before><commit_msg>Add config environment variables test<commit_after>import os
import unittest
from config import get_config
class ConfigTest(unittest.TestCase):
def setUp(self):
os.environ['DESTALINATOR_STRING_VARIABLE'] = 'test'
os.environ['DESTALINATOR_LIST_VARIABLE'] = 'test,'
def test_environment_variable_configs(self):
self.assertEqual(get_config().string_variable, 'test')
self.assertEqual(get_config().list_variable, ['test'])
|
|
05ebb3eb9e28e738145cd81fc256c7b83ba32e7b
|
scripts/make_patch_fit_queue.py
|
scripts/make_patch_fit_queue.py
|
#!/usr/bin/env python
# encoding: utf-8
"""
Make a CANFAR queue for patch fitting.
>>> from canque import Submission
>>> sub = Submission(user_name, script_path)
>>> sub.add_job('my args', "job.log")
>>> sub.write("jobs.sub")
2015-06-10 - Created by Jonathan Sick
"""
import argparse
import json
from canque import Submission
def main():
args = parse_args()
with open(args.json_patch_path, 'r') as f:
patch_json = json.load(f)
sub = Submission('jonathansick', 'androcmd_scripts/patch_fit.sh')
job_num = 0
for brick in args.bricks:
nums = patch_numbers_for_brick(brick, patch_json)
while len(nums) > 0:
create_job(job_num, sub, brick, nums, args.n, args.vodir)
job_num += 1
sub.write(args.queue_file)
def parse_args():
parser = argparse.ArgumentParser(
description='e.g.:\n\nmake_patch_fit_queue.py brick_23_queue.sub '
'--bricks 23 '
'--json ~/Downloads/patches.json --n 30')
parser.add_argument('queue_file',
help='Output path of queue submission file')
parser.add_argument('--bricks', type=int,
nargs='*',
help='Brick number(s)')
parser.add_argument('--json', dest='json_patch_path',
help='Path to patch JSON file')
parser.add_argument('--vodir',
help='VOSpace directory to save results in',
default='phat/patches')
parser.add_argument('--n', type=int,
help='Max number of jobs per brick')
return parser.parse_args()
def patch_numbers_for_brick(brick, patch_json):
nums = []
for patch in patch_json:
if patch['brick'] == brick:
nums.append(int(patch['patch'].split('_')[-1]))
return nums
def create_job(job_num, sub, brick, patch_numbers, max_n, vos_dir):
ns = []
for i in xrange(max_n):
if len(patch_numbers) > 0:
ns.append(str(patch_numbers.pop(0)))
else:
break
job_arg = '{brick:d} {nums} {vos}'.format(
brick=brick,
nums=','.join(ns),
vos=vos_dir)
sub.add_job(job_arg, "patches_{brick:d}_{job_num:d}.log".format(
job_num=job_num, brick=brick))
if __name__ == '__main__':
main()
|
Create script to make PHAT patch fitting queue
|
Create script to make PHAT patch fitting queue
|
Python
|
mit
|
jonathansick/androcmd,jonathansick/androcmd
|
Create script to make PHAT patch fitting queue
|
#!/usr/bin/env python
# encoding: utf-8
"""
Make a CANFAR queue for patch fitting.
>>> from canque import Submission
>>> sub = Submission(user_name, script_path)
>>> sub.add_job('my args', "job.log")
>>> sub.write("jobs.sub")
2015-06-10 - Created by Jonathan Sick
"""
import argparse
import json
from canque import Submission
def main():
args = parse_args()
with open(args.json_patch_path, 'r') as f:
patch_json = json.load(f)
sub = Submission('jonathansick', 'androcmd_scripts/patch_fit.sh')
job_num = 0
for brick in args.bricks:
nums = patch_numbers_for_brick(brick, patch_json)
while len(nums) > 0:
create_job(job_num, sub, brick, nums, args.n, args.vodir)
job_num += 1
sub.write(args.queue_file)
def parse_args():
parser = argparse.ArgumentParser(
description='e.g.:\n\nmake_patch_fit_queue.py brick_23_queue.sub '
'--bricks 23 '
'--json ~/Downloads/patches.json --n 30')
parser.add_argument('queue_file',
help='Output path of queue submission file')
parser.add_argument('--bricks', type=int,
nargs='*',
help='Brick number(s)')
parser.add_argument('--json', dest='json_patch_path',
help='Path to patch JSON file')
parser.add_argument('--vodir',
help='VOSpace directory to save results in',
default='phat/patches')
parser.add_argument('--n', type=int,
help='Max number of jobs per brick')
return parser.parse_args()
def patch_numbers_for_brick(brick, patch_json):
nums = []
for patch in patch_json:
if patch['brick'] == brick:
nums.append(int(patch['patch'].split('_')[-1]))
return nums
def create_job(job_num, sub, brick, patch_numbers, max_n, vos_dir):
ns = []
for i in xrange(max_n):
if len(patch_numbers) > 0:
ns.append(str(patch_numbers.pop(0)))
else:
break
job_arg = '{brick:d} {nums} {vos}'.format(
brick=brick,
nums=','.join(ns),
vos=vos_dir)
sub.add_job(job_arg, "patches_{brick:d}_{job_num:d}.log".format(
job_num=job_num, brick=brick))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Create script to make PHAT patch fitting queue<commit_after>
|
#!/usr/bin/env python
# encoding: utf-8
"""
Make a CANFAR queue for patch fitting.
>>> from canque import Submission
>>> sub = Submission(user_name, script_path)
>>> sub.add_job('my args', "job.log")
>>> sub.write("jobs.sub")
2015-06-10 - Created by Jonathan Sick
"""
import argparse
import json
from canque import Submission
def main():
args = parse_args()
with open(args.json_patch_path, 'r') as f:
patch_json = json.load(f)
sub = Submission('jonathansick', 'androcmd_scripts/patch_fit.sh')
job_num = 0
for brick in args.bricks:
nums = patch_numbers_for_brick(brick, patch_json)
while len(nums) > 0:
create_job(job_num, sub, brick, nums, args.n, args.vodir)
job_num += 1
sub.write(args.queue_file)
def parse_args():
parser = argparse.ArgumentParser(
description='e.g.:\n\nmake_patch_fit_queue.py brick_23_queue.sub '
'--bricks 23 '
'--json ~/Downloads/patches.json --n 30')
parser.add_argument('queue_file',
help='Output path of queue submission file')
parser.add_argument('--bricks', type=int,
nargs='*',
help='Brick number(s)')
parser.add_argument('--json', dest='json_patch_path',
help='Path to patch JSON file')
parser.add_argument('--vodir',
help='VOSpace directory to save results in',
default='phat/patches')
parser.add_argument('--n', type=int,
help='Max number of jobs per brick')
return parser.parse_args()
def patch_numbers_for_brick(brick, patch_json):
nums = []
for patch in patch_json:
if patch['brick'] == brick:
nums.append(int(patch['patch'].split('_')[-1]))
return nums
def create_job(job_num, sub, brick, patch_numbers, max_n, vos_dir):
ns = []
for i in xrange(max_n):
if len(patch_numbers) > 0:
ns.append(str(patch_numbers.pop(0)))
else:
break
job_arg = '{brick:d} {nums} {vos}'.format(
brick=brick,
nums=','.join(ns),
vos=vos_dir)
sub.add_job(job_arg, "patches_{brick:d}_{job_num:d}.log".format(
job_num=job_num, brick=brick))
if __name__ == '__main__':
main()
|
Create script to make PHAT patch fitting queue#!/usr/bin/env python
# encoding: utf-8
"""
Make a CANFAR queue for patch fitting.
>>> from canque import Submission
>>> sub = Submission(user_name, script_path)
>>> sub.add_job('my args', "job.log")
>>> sub.write("jobs.sub")
2015-06-10 - Created by Jonathan Sick
"""
import argparse
import json
from canque import Submission
def main():
args = parse_args()
with open(args.json_patch_path, 'r') as f:
patch_json = json.load(f)
sub = Submission('jonathansick', 'androcmd_scripts/patch_fit.sh')
job_num = 0
for brick in args.bricks:
nums = patch_numbers_for_brick(brick, patch_json)
while len(nums) > 0:
create_job(job_num, sub, brick, nums, args.n, args.vodir)
job_num += 1
sub.write(args.queue_file)
def parse_args():
parser = argparse.ArgumentParser(
description='e.g.:\n\nmake_patch_fit_queue.py brick_23_queue.sub '
'--bricks 23 '
'--json ~/Downloads/patches.json --n 30')
parser.add_argument('queue_file',
help='Output path of queue submission file')
parser.add_argument('--bricks', type=int,
nargs='*',
help='Brick number(s)')
parser.add_argument('--json', dest='json_patch_path',
help='Path to patch JSON file')
parser.add_argument('--vodir',
help='VOSpace directory to save results in',
default='phat/patches')
parser.add_argument('--n', type=int,
help='Max number of jobs per brick')
return parser.parse_args()
def patch_numbers_for_brick(brick, patch_json):
nums = []
for patch in patch_json:
if patch['brick'] == brick:
nums.append(int(patch['patch'].split('_')[-1]))
return nums
def create_job(job_num, sub, brick, patch_numbers, max_n, vos_dir):
ns = []
for i in xrange(max_n):
if len(patch_numbers) > 0:
ns.append(str(patch_numbers.pop(0)))
else:
break
job_arg = '{brick:d} {nums} {vos}'.format(
brick=brick,
nums=','.join(ns),
vos=vos_dir)
sub.add_job(job_arg, "patches_{brick:d}_{job_num:d}.log".format(
job_num=job_num, brick=brick))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Create script to make PHAT patch fitting queue<commit_after>#!/usr/bin/env python
# encoding: utf-8
"""
Make a CANFAR queue for patch fitting.
>>> from canque import Submission
>>> sub = Submission(user_name, script_path)
>>> sub.add_job('my args', "job.log")
>>> sub.write("jobs.sub")
2015-06-10 - Created by Jonathan Sick
"""
import argparse
import json
from canque import Submission
def main():
args = parse_args()
with open(args.json_patch_path, 'r') as f:
patch_json = json.load(f)
sub = Submission('jonathansick', 'androcmd_scripts/patch_fit.sh')
job_num = 0
for brick in args.bricks:
nums = patch_numbers_for_brick(brick, patch_json)
while len(nums) > 0:
create_job(job_num, sub, brick, nums, args.n, args.vodir)
job_num += 1
sub.write(args.queue_file)
def parse_args():
parser = argparse.ArgumentParser(
description='e.g.:\n\nmake_patch_fit_queue.py brick_23_queue.sub '
'--bricks 23 '
'--json ~/Downloads/patches.json --n 30')
parser.add_argument('queue_file',
help='Output path of queue submission file')
parser.add_argument('--bricks', type=int,
nargs='*',
help='Brick number(s)')
parser.add_argument('--json', dest='json_patch_path',
help='Path to patch JSON file')
parser.add_argument('--vodir',
help='VOSpace directory to save results in',
default='phat/patches')
parser.add_argument('--n', type=int,
help='Max number of jobs per brick')
return parser.parse_args()
def patch_numbers_for_brick(brick, patch_json):
nums = []
for patch in patch_json:
if patch['brick'] == brick:
nums.append(int(patch['patch'].split('_')[-1]))
return nums
def create_job(job_num, sub, brick, patch_numbers, max_n, vos_dir):
ns = []
for i in xrange(max_n):
if len(patch_numbers) > 0:
ns.append(str(patch_numbers.pop(0)))
else:
break
job_arg = '{brick:d} {nums} {vos}'.format(
brick=brick,
nums=','.join(ns),
vos=vos_dir)
sub.add_job(job_arg, "patches_{brick:d}_{job_num:d}.log".format(
job_num=job_num, brick=brick))
if __name__ == '__main__':
main()
|
|
22e80f1fccb120f90774195d1592750f6d39a8a2
|
testbot.py
|
testbot.py
|
from bot import Tofbot
class TestTofbot(Tofbot):
def __init__(self, nick, name, chan, origin):
chans = [chan]
Tofbot.__init__(self, nick, name, chans, debug=False)
self.chan = chan
self.origin = origin
def msg(self, chan, msg):
print (" -> %s" % msg)
def send(self, msg):
print ("<- %s" % msg)
self.dispatch(self.origin, [msg, 'PRIVMSG', self.chan])
|
Add mock object for easier testing
|
Add mock object for easier testing
|
Python
|
bsd-2-clause
|
p0nce/tofbot,p0nce/tofbot,chmduquesne/tofbot,martinkirch/tofbot,soulaklabs/tofbot,soulaklabs/tofbot,tofbot/tofbot,martinkirch/tofbot,tofbot/tofbot
|
Add mock object for easier testing
|
from bot import Tofbot
class TestTofbot(Tofbot):
def __init__(self, nick, name, chan, origin):
chans = [chan]
Tofbot.__init__(self, nick, name, chans, debug=False)
self.chan = chan
self.origin = origin
def msg(self, chan, msg):
print (" -> %s" % msg)
def send(self, msg):
print ("<- %s" % msg)
self.dispatch(self.origin, [msg, 'PRIVMSG', self.chan])
|
<commit_before><commit_msg>Add mock object for easier testing<commit_after>
|
from bot import Tofbot
class TestTofbot(Tofbot):
def __init__(self, nick, name, chan, origin):
chans = [chan]
Tofbot.__init__(self, nick, name, chans, debug=False)
self.chan = chan
self.origin = origin
def msg(self, chan, msg):
print (" -> %s" % msg)
def send(self, msg):
print ("<- %s" % msg)
self.dispatch(self.origin, [msg, 'PRIVMSG', self.chan])
|
Add mock object for easier testingfrom bot import Tofbot
class TestTofbot(Tofbot):
def __init__(self, nick, name, chan, origin):
chans = [chan]
Tofbot.__init__(self, nick, name, chans, debug=False)
self.chan = chan
self.origin = origin
def msg(self, chan, msg):
print (" -> %s" % msg)
def send(self, msg):
print ("<- %s" % msg)
self.dispatch(self.origin, [msg, 'PRIVMSG', self.chan])
|
<commit_before><commit_msg>Add mock object for easier testing<commit_after>from bot import Tofbot
class TestTofbot(Tofbot):
def __init__(self, nick, name, chan, origin):
chans = [chan]
Tofbot.__init__(self, nick, name, chans, debug=False)
self.chan = chan
self.origin = origin
def msg(self, chan, msg):
print (" -> %s" % msg)
def send(self, msg):
print ("<- %s" % msg)
self.dispatch(self.origin, [msg, 'PRIVMSG', self.chan])
|
|
47956133433425c3e0c1f33ff6596c9ead3f6558
|
dosagelib/plugins/projectfuture.py
|
dosagelib/plugins/projectfuture.py
|
# -*- coding: utf-8 -*-
# Copyright (C) 2019-2020 Tobias Gruetzmacher
from __future__ import absolute_import, division, print_function
from .common import _ParserScraper
class ProjectFuture(_ParserScraper):
imageSearch = '//td[@class="tamid"]/img'
prevSearch = '//a[./img[@alt="Previous"]]'
def __init__(self, name, comic, first, last=None):
if name == 'ProjectFuture':
super(ProjectFuture, self).__init__(name)
else:
super(ProjectFuture, self).__init__('ProjectFuture/' + name)
self.url = 'http://www.projectfuturecomic.com/' + comic + '.php'
self.stripUrl = self.url + '?strip=%s'
self.firstStripUrl = self.stripUrl % first
if last:
self.url = self.stripUrl
self.endOfLife = True
@classmethod
def getmodules(cls):
return (
cls('AWalkInTheWoods', 'simeon', '1', last='12'),
cls('BenjaminBuranAndTheArkOfUr', 'ben', '00', last='23'),
cls('BookOfTenets', 'tenets', '01', last='45'),
cls('CriticalMass', 'criticalmass', 'cover', last='26'),
cls('DarkLordRising', 'darklord', '01-00', last='10-10'),
cls('FishingTrip', 'fishing', '01-00'),
cls('HeadsYouLose', 'heads', '00-01', last='07-12'),
cls('NiallsStory', 'niall', '00'),
cls('ProjectFuture', 'strip', '0'),
cls('RedValentine', 'redvalentine', '1', last='6'),
cls('ShortStories', 'shorts', '01-00'),
cls('StrangeBedfellows', 'bedfellows', '1', last='6'),
cls('TheAxemanCometh', 'axeman', '01-01', last='02-18'),
cls('ToCatchADemon', 'daxxon', '01-00', last='03-14'),
cls('TheDarkAngel', 'darkangel', 'cover', last='54'),
cls('TheEpsilonProject', 'epsilon', '00-01'),
cls('TheHarvest', 'harvest', '01-00'),
cls('TheSierraChronicles', 'sierra', '0', last='29'),
cls('TheTuppenyMan', 'tuppenny', '00', last='16'),
cls('TurningANewPage', 'azrael', '1', last='54'),
)
|
Add site engine for ProjectFuture
|
Add site engine for ProjectFuture
|
Python
|
mit
|
webcomics/dosage,webcomics/dosage,peterjanes/dosage,peterjanes/dosage
|
Add site engine for ProjectFuture
|
# -*- coding: utf-8 -*-
# Copyright (C) 2019-2020 Tobias Gruetzmacher
from __future__ import absolute_import, division, print_function
from .common import _ParserScraper
class ProjectFuture(_ParserScraper):
imageSearch = '//td[@class="tamid"]/img'
prevSearch = '//a[./img[@alt="Previous"]]'
def __init__(self, name, comic, first, last=None):
if name == 'ProjectFuture':
super(ProjectFuture, self).__init__(name)
else:
super(ProjectFuture, self).__init__('ProjectFuture/' + name)
self.url = 'http://www.projectfuturecomic.com/' + comic + '.php'
self.stripUrl = self.url + '?strip=%s'
self.firstStripUrl = self.stripUrl % first
if last:
self.url = self.stripUrl
self.endOfLife = True
@classmethod
def getmodules(cls):
return (
cls('AWalkInTheWoods', 'simeon', '1', last='12'),
cls('BenjaminBuranAndTheArkOfUr', 'ben', '00', last='23'),
cls('BookOfTenets', 'tenets', '01', last='45'),
cls('CriticalMass', 'criticalmass', 'cover', last='26'),
cls('DarkLordRising', 'darklord', '01-00', last='10-10'),
cls('FishingTrip', 'fishing', '01-00'),
cls('HeadsYouLose', 'heads', '00-01', last='07-12'),
cls('NiallsStory', 'niall', '00'),
cls('ProjectFuture', 'strip', '0'),
cls('RedValentine', 'redvalentine', '1', last='6'),
cls('ShortStories', 'shorts', '01-00'),
cls('StrangeBedfellows', 'bedfellows', '1', last='6'),
cls('TheAxemanCometh', 'axeman', '01-01', last='02-18'),
cls('ToCatchADemon', 'daxxon', '01-00', last='03-14'),
cls('TheDarkAngel', 'darkangel', 'cover', last='54'),
cls('TheEpsilonProject', 'epsilon', '00-01'),
cls('TheHarvest', 'harvest', '01-00'),
cls('TheSierraChronicles', 'sierra', '0', last='29'),
cls('TheTuppenyMan', 'tuppenny', '00', last='16'),
cls('TurningANewPage', 'azrael', '1', last='54'),
)
|
<commit_before><commit_msg>Add site engine for ProjectFuture<commit_after>
|
# -*- coding: utf-8 -*-
# Copyright (C) 2019-2020 Tobias Gruetzmacher
from __future__ import absolute_import, division, print_function
from .common import _ParserScraper
class ProjectFuture(_ParserScraper):
imageSearch = '//td[@class="tamid"]/img'
prevSearch = '//a[./img[@alt="Previous"]]'
def __init__(self, name, comic, first, last=None):
if name == 'ProjectFuture':
super(ProjectFuture, self).__init__(name)
else:
super(ProjectFuture, self).__init__('ProjectFuture/' + name)
self.url = 'http://www.projectfuturecomic.com/' + comic + '.php'
self.stripUrl = self.url + '?strip=%s'
self.firstStripUrl = self.stripUrl % first
if last:
self.url = self.stripUrl
self.endOfLife = True
@classmethod
def getmodules(cls):
return (
cls('AWalkInTheWoods', 'simeon', '1', last='12'),
cls('BenjaminBuranAndTheArkOfUr', 'ben', '00', last='23'),
cls('BookOfTenets', 'tenets', '01', last='45'),
cls('CriticalMass', 'criticalmass', 'cover', last='26'),
cls('DarkLordRising', 'darklord', '01-00', last='10-10'),
cls('FishingTrip', 'fishing', '01-00'),
cls('HeadsYouLose', 'heads', '00-01', last='07-12'),
cls('NiallsStory', 'niall', '00'),
cls('ProjectFuture', 'strip', '0'),
cls('RedValentine', 'redvalentine', '1', last='6'),
cls('ShortStories', 'shorts', '01-00'),
cls('StrangeBedfellows', 'bedfellows', '1', last='6'),
cls('TheAxemanCometh', 'axeman', '01-01', last='02-18'),
cls('ToCatchADemon', 'daxxon', '01-00', last='03-14'),
cls('TheDarkAngel', 'darkangel', 'cover', last='54'),
cls('TheEpsilonProject', 'epsilon', '00-01'),
cls('TheHarvest', 'harvest', '01-00'),
cls('TheSierraChronicles', 'sierra', '0', last='29'),
cls('TheTuppenyMan', 'tuppenny', '00', last='16'),
cls('TurningANewPage', 'azrael', '1', last='54'),
)
|
Add site engine for ProjectFuture# -*- coding: utf-8 -*-
# Copyright (C) 2019-2020 Tobias Gruetzmacher
from __future__ import absolute_import, division, print_function
from .common import _ParserScraper
class ProjectFuture(_ParserScraper):
imageSearch = '//td[@class="tamid"]/img'
prevSearch = '//a[./img[@alt="Previous"]]'
def __init__(self, name, comic, first, last=None):
if name == 'ProjectFuture':
super(ProjectFuture, self).__init__(name)
else:
super(ProjectFuture, self).__init__('ProjectFuture/' + name)
self.url = 'http://www.projectfuturecomic.com/' + comic + '.php'
self.stripUrl = self.url + '?strip=%s'
self.firstStripUrl = self.stripUrl % first
if last:
self.url = self.stripUrl
self.endOfLife = True
@classmethod
def getmodules(cls):
return (
cls('AWalkInTheWoods', 'simeon', '1', last='12'),
cls('BenjaminBuranAndTheArkOfUr', 'ben', '00', last='23'),
cls('BookOfTenets', 'tenets', '01', last='45'),
cls('CriticalMass', 'criticalmass', 'cover', last='26'),
cls('DarkLordRising', 'darklord', '01-00', last='10-10'),
cls('FishingTrip', 'fishing', '01-00'),
cls('HeadsYouLose', 'heads', '00-01', last='07-12'),
cls('NiallsStory', 'niall', '00'),
cls('ProjectFuture', 'strip', '0'),
cls('RedValentine', 'redvalentine', '1', last='6'),
cls('ShortStories', 'shorts', '01-00'),
cls('StrangeBedfellows', 'bedfellows', '1', last='6'),
cls('TheAxemanCometh', 'axeman', '01-01', last='02-18'),
cls('ToCatchADemon', 'daxxon', '01-00', last='03-14'),
cls('TheDarkAngel', 'darkangel', 'cover', last='54'),
cls('TheEpsilonProject', 'epsilon', '00-01'),
cls('TheHarvest', 'harvest', '01-00'),
cls('TheSierraChronicles', 'sierra', '0', last='29'),
cls('TheTuppenyMan', 'tuppenny', '00', last='16'),
cls('TurningANewPage', 'azrael', '1', last='54'),
)
|
<commit_before><commit_msg>Add site engine for ProjectFuture<commit_after># -*- coding: utf-8 -*-
# Copyright (C) 2019-2020 Tobias Gruetzmacher
from __future__ import absolute_import, division, print_function
from .common import _ParserScraper
class ProjectFuture(_ParserScraper):
imageSearch = '//td[@class="tamid"]/img'
prevSearch = '//a[./img[@alt="Previous"]]'
def __init__(self, name, comic, first, last=None):
if name == 'ProjectFuture':
super(ProjectFuture, self).__init__(name)
else:
super(ProjectFuture, self).__init__('ProjectFuture/' + name)
self.url = 'http://www.projectfuturecomic.com/' + comic + '.php'
self.stripUrl = self.url + '?strip=%s'
self.firstStripUrl = self.stripUrl % first
if last:
self.url = self.stripUrl
self.endOfLife = True
@classmethod
def getmodules(cls):
return (
cls('AWalkInTheWoods', 'simeon', '1', last='12'),
cls('BenjaminBuranAndTheArkOfUr', 'ben', '00', last='23'),
cls('BookOfTenets', 'tenets', '01', last='45'),
cls('CriticalMass', 'criticalmass', 'cover', last='26'),
cls('DarkLordRising', 'darklord', '01-00', last='10-10'),
cls('FishingTrip', 'fishing', '01-00'),
cls('HeadsYouLose', 'heads', '00-01', last='07-12'),
cls('NiallsStory', 'niall', '00'),
cls('ProjectFuture', 'strip', '0'),
cls('RedValentine', 'redvalentine', '1', last='6'),
cls('ShortStories', 'shorts', '01-00'),
cls('StrangeBedfellows', 'bedfellows', '1', last='6'),
cls('TheAxemanCometh', 'axeman', '01-01', last='02-18'),
cls('ToCatchADemon', 'daxxon', '01-00', last='03-14'),
cls('TheDarkAngel', 'darkangel', 'cover', last='54'),
cls('TheEpsilonProject', 'epsilon', '00-01'),
cls('TheHarvest', 'harvest', '01-00'),
cls('TheSierraChronicles', 'sierra', '0', last='29'),
cls('TheTuppenyMan', 'tuppenny', '00', last='16'),
cls('TurningANewPage', 'azrael', '1', last='54'),
)
|
|
46733c983a90d6dbfd32aed8f41ff4ed377b86d2
|
src/windowsAnkiRestart.py
|
src/windowsAnkiRestart.py
|
import os, time, sys, time
from shutil import copy
watched_file = sys.argv[1]
addon_path = sys.argv[2]
os.startfile(r'C:\Program Files (x86)\Anki\anki.exe')
new_t = old_t = 0
while 1:
old_t = new_t
new_t = os.stat(watched_file)[8]
if old_t != new_t:
copy(watched_file, addon_path)
os.system("TASKKILL /F /IM anki.exe")
os.startfile(r'C:\Program Files (x86)\Anki\anki.exe')
time.sleep(1);
|
Add script to automate plugin testing on Windows
|
Add script to automate plugin testing on Windows
* Opens Anki for you
* Polls every second for a change to the watched file (uses time last modified)
* Restarts Anki automatically
|
Python
|
apache-2.0
|
CSE-437/AnkiPlugin,CSE-437/AnkiPlugin
|
Add script to automate plugin testing on Windows
* Opens Anki for you
* Polls every second for a change to the watched file (uses time last modified)
* Restarts Anki automatically
|
import os, time, sys, time
from shutil import copy
watched_file = sys.argv[1]
addon_path = sys.argv[2]
os.startfile(r'C:\Program Files (x86)\Anki\anki.exe')
new_t = old_t = 0
while 1:
old_t = new_t
new_t = os.stat(watched_file)[8]
if old_t != new_t:
copy(watched_file, addon_path)
os.system("TASKKILL /F /IM anki.exe")
os.startfile(r'C:\Program Files (x86)\Anki\anki.exe')
time.sleep(1);
|
<commit_before><commit_msg>Add script to automate plugin testing on Windows
* Opens Anki for you
* Polls every second for a change to the watched file (uses time last modified)
* Restarts Anki automatically<commit_after>
|
import os, time, sys, time
from shutil import copy
watched_file = sys.argv[1]
addon_path = sys.argv[2]
os.startfile(r'C:\Program Files (x86)\Anki\anki.exe')
new_t = old_t = 0
while 1:
old_t = new_t
new_t = os.stat(watched_file)[8]
if old_t != new_t:
copy(watched_file, addon_path)
os.system("TASKKILL /F /IM anki.exe")
os.startfile(r'C:\Program Files (x86)\Anki\anki.exe')
time.sleep(1);
|
Add script to automate plugin testing on Windows
* Opens Anki for you
* Polls every second for a change to the watched file (uses time last modified)
* Restarts Anki automaticallyimport os, time, sys, time
from shutil import copy
watched_file = sys.argv[1]
addon_path = sys.argv[2]
os.startfile(r'C:\Program Files (x86)\Anki\anki.exe')
new_t = old_t = 0
while 1:
old_t = new_t
new_t = os.stat(watched_file)[8]
if old_t != new_t:
copy(watched_file, addon_path)
os.system("TASKKILL /F /IM anki.exe")
os.startfile(r'C:\Program Files (x86)\Anki\anki.exe')
time.sleep(1);
|
<commit_before><commit_msg>Add script to automate plugin testing on Windows
* Opens Anki for you
* Polls every second for a change to the watched file (uses time last modified)
* Restarts Anki automatically<commit_after>import os, time, sys, time
from shutil import copy
watched_file = sys.argv[1]
addon_path = sys.argv[2]
os.startfile(r'C:\Program Files (x86)\Anki\anki.exe')
new_t = old_t = 0
while 1:
old_t = new_t
new_t = os.stat(watched_file)[8]
if old_t != new_t:
copy(watched_file, addon_path)
os.system("TASKKILL /F /IM anki.exe")
os.startfile(r'C:\Program Files (x86)\Anki\anki.exe')
time.sleep(1);
|
|
e57c94dc1692447581e25f1385514785c4d03018
|
axe_selenium_python/tests/test_axe.py
|
axe_selenium_python/tests/test_axe.py
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
from os import path
import pytest
@pytest.mark.nondestructive
def test_execute(axe):
"""Run axe against base_url and verify JSON output."""
axe.inject()
data = axe.execute()
assert data is not None, data
@pytest.mark.nondestructive
def test_report(axe):
"""Test that report exists."""
axe.inject()
results = axe.execute()
violations = results["violations"]
report = axe.report(violations)
assert report is not None, report
@pytest.mark.nondestructive
def test_write_results(base_url, axe):
"""Assert that write results method creates a non-empty file."""
axe.inject()
data = axe.execute()
filename = 'results.json'
axe.write_results(filename, data)
# check that file exists and is not empty
assert path.exists(filename), 'Output file not found.'
assert path.getsize(filename) > 0, 'File contains no data.'
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
from os import path
import pytest
@pytest.mark.nondestructive
def test_execute(axe):
"""Run axe against base_url and verify JSON output."""
axe.inject()
data = axe.execute()
assert data is not None, data
@pytest.mark.nondestructive
def test_report(axe):
"""Test that report exists."""
axe.inject()
results = axe.execute()
violations = results["violations"]
report = axe.report(violations)
assert report is not None, report
@pytest.mark.nondestructive
def test_write_results(base_url, axe):
"""Assert that write results method creates a non-empty file."""
axe.inject()
data = axe.execute()
filename = 'results.json'
axe.write_results(filename, data)
# check that file exists and is not empty
assert path.exists(filename), 'Output file not found.'
assert path.getsize(filename) > 0, 'File contains no data.'
|
Add blank line to fix flake8-isort
|
Add blank line to fix flake8-isort
|
Python
|
mpl-2.0
|
kimberlythegeek/axe-selenium-python,kimberlythegeek/axe-selenium-python
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
from os import path
import pytest
@pytest.mark.nondestructive
def test_execute(axe):
"""Run axe against base_url and verify JSON output."""
axe.inject()
data = axe.execute()
assert data is not None, data
@pytest.mark.nondestructive
def test_report(axe):
"""Test that report exists."""
axe.inject()
results = axe.execute()
violations = results["violations"]
report = axe.report(violations)
assert report is not None, report
@pytest.mark.nondestructive
def test_write_results(base_url, axe):
"""Assert that write results method creates a non-empty file."""
axe.inject()
data = axe.execute()
filename = 'results.json'
axe.write_results(filename, data)
# check that file exists and is not empty
assert path.exists(filename), 'Output file not found.'
assert path.getsize(filename) > 0, 'File contains no data.'
Add blank line to fix flake8-isort
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
from os import path
import pytest
@pytest.mark.nondestructive
def test_execute(axe):
"""Run axe against base_url and verify JSON output."""
axe.inject()
data = axe.execute()
assert data is not None, data
@pytest.mark.nondestructive
def test_report(axe):
"""Test that report exists."""
axe.inject()
results = axe.execute()
violations = results["violations"]
report = axe.report(violations)
assert report is not None, report
@pytest.mark.nondestructive
def test_write_results(base_url, axe):
"""Assert that write results method creates a non-empty file."""
axe.inject()
data = axe.execute()
filename = 'results.json'
axe.write_results(filename, data)
# check that file exists and is not empty
assert path.exists(filename), 'Output file not found.'
assert path.getsize(filename) > 0, 'File contains no data.'
|
<commit_before># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
from os import path
import pytest
@pytest.mark.nondestructive
def test_execute(axe):
"""Run axe against base_url and verify JSON output."""
axe.inject()
data = axe.execute()
assert data is not None, data
@pytest.mark.nondestructive
def test_report(axe):
"""Test that report exists."""
axe.inject()
results = axe.execute()
violations = results["violations"]
report = axe.report(violations)
assert report is not None, report
@pytest.mark.nondestructive
def test_write_results(base_url, axe):
"""Assert that write results method creates a non-empty file."""
axe.inject()
data = axe.execute()
filename = 'results.json'
axe.write_results(filename, data)
# check that file exists and is not empty
assert path.exists(filename), 'Output file not found.'
assert path.getsize(filename) > 0, 'File contains no data.'
<commit_msg>Add blank line to fix flake8-isort<commit_after>
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
from os import path
import pytest
@pytest.mark.nondestructive
def test_execute(axe):
"""Run axe against base_url and verify JSON output."""
axe.inject()
data = axe.execute()
assert data is not None, data
@pytest.mark.nondestructive
def test_report(axe):
"""Test that report exists."""
axe.inject()
results = axe.execute()
violations = results["violations"]
report = axe.report(violations)
assert report is not None, report
@pytest.mark.nondestructive
def test_write_results(base_url, axe):
"""Assert that write results method creates a non-empty file."""
axe.inject()
data = axe.execute()
filename = 'results.json'
axe.write_results(filename, data)
# check that file exists and is not empty
assert path.exists(filename), 'Output file not found.'
assert path.getsize(filename) > 0, 'File contains no data.'
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
from os import path
import pytest
@pytest.mark.nondestructive
def test_execute(axe):
"""Run axe against base_url and verify JSON output."""
axe.inject()
data = axe.execute()
assert data is not None, data
@pytest.mark.nondestructive
def test_report(axe):
"""Test that report exists."""
axe.inject()
results = axe.execute()
violations = results["violations"]
report = axe.report(violations)
assert report is not None, report
@pytest.mark.nondestructive
def test_write_results(base_url, axe):
"""Assert that write results method creates a non-empty file."""
axe.inject()
data = axe.execute()
filename = 'results.json'
axe.write_results(filename, data)
# check that file exists and is not empty
assert path.exists(filename), 'Output file not found.'
assert path.getsize(filename) > 0, 'File contains no data.'
Add blank line to fix flake8-isort# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
from os import path
import pytest
@pytest.mark.nondestructive
def test_execute(axe):
"""Run axe against base_url and verify JSON output."""
axe.inject()
data = axe.execute()
assert data is not None, data
@pytest.mark.nondestructive
def test_report(axe):
"""Test that report exists."""
axe.inject()
results = axe.execute()
violations = results["violations"]
report = axe.report(violations)
assert report is not None, report
@pytest.mark.nondestructive
def test_write_results(base_url, axe):
"""Assert that write results method creates a non-empty file."""
axe.inject()
data = axe.execute()
filename = 'results.json'
axe.write_results(filename, data)
# check that file exists and is not empty
assert path.exists(filename), 'Output file not found.'
assert path.getsize(filename) > 0, 'File contains no data.'
|
<commit_before># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
from os import path
import pytest
@pytest.mark.nondestructive
def test_execute(axe):
"""Run axe against base_url and verify JSON output."""
axe.inject()
data = axe.execute()
assert data is not None, data
@pytest.mark.nondestructive
def test_report(axe):
"""Test that report exists."""
axe.inject()
results = axe.execute()
violations = results["violations"]
report = axe.report(violations)
assert report is not None, report
@pytest.mark.nondestructive
def test_write_results(base_url, axe):
"""Assert that write results method creates a non-empty file."""
axe.inject()
data = axe.execute()
filename = 'results.json'
axe.write_results(filename, data)
# check that file exists and is not empty
assert path.exists(filename), 'Output file not found.'
assert path.getsize(filename) > 0, 'File contains no data.'
<commit_msg>Add blank line to fix flake8-isort<commit_after># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
from os import path
import pytest
@pytest.mark.nondestructive
def test_execute(axe):
"""Run axe against base_url and verify JSON output."""
axe.inject()
data = axe.execute()
assert data is not None, data
@pytest.mark.nondestructive
def test_report(axe):
"""Test that report exists."""
axe.inject()
results = axe.execute()
violations = results["violations"]
report = axe.report(violations)
assert report is not None, report
@pytest.mark.nondestructive
def test_write_results(base_url, axe):
"""Assert that write results method creates a non-empty file."""
axe.inject()
data = axe.execute()
filename = 'results.json'
axe.write_results(filename, data)
# check that file exists and is not empty
assert path.exists(filename), 'Output file not found.'
assert path.getsize(filename) > 0, 'File contains no data.'
|
bd3c47780ac0c1629e8657bb03188bf6af27ebb1
|
scripts/robot_control/actuator_feedback.py
|
scripts/robot_control/actuator_feedback.py
|
"""Helps eye-ball rpc-based motor control"""
import time
from ev3control.rpc import Robot
# for convenience set the time to one second
duration = 1000
speed = 50
with Robot(None) as robot:
for _ in range(1):
t0 = time.time()
print('starting position ', robot.left_track.position)
print(time.time() - t0)
robot.move_straight(speed, duration)
robot.wait_until_not_moving()
t0 = time.time()
print("ending position ", robot.left_track.position)
print(time.time() - t0)
|
Add small script to test motor feedback
|
Add small script to test motor feedback
|
Python
|
mit
|
TheCamusean/DLRCev3,TheCamusean/DLRCev3
|
Add small script to test motor feedback
|
"""Helps eye-ball rpc-based motor control"""
import time
from ev3control.rpc import Robot
# for convenience set the time to one second
duration = 1000
speed = 50
with Robot(None) as robot:
for _ in range(1):
t0 = time.time()
print('starting position ', robot.left_track.position)
print(time.time() - t0)
robot.move_straight(speed, duration)
robot.wait_until_not_moving()
t0 = time.time()
print("ending position ", robot.left_track.position)
print(time.time() - t0)
|
<commit_before><commit_msg>Add small script to test motor feedback<commit_after>
|
"""Helps eye-ball rpc-based motor control"""
import time
from ev3control.rpc import Robot
# for convenience set the time to one second
duration = 1000
speed = 50
with Robot(None) as robot:
for _ in range(1):
t0 = time.time()
print('starting position ', robot.left_track.position)
print(time.time() - t0)
robot.move_straight(speed, duration)
robot.wait_until_not_moving()
t0 = time.time()
print("ending position ", robot.left_track.position)
print(time.time() - t0)
|
Add small script to test motor feedback"""Helps eye-ball rpc-based motor control"""
import time
from ev3control.rpc import Robot
# for convenience set the time to one second
duration = 1000
speed = 50
with Robot(None) as robot:
for _ in range(1):
t0 = time.time()
print('starting position ', robot.left_track.position)
print(time.time() - t0)
robot.move_straight(speed, duration)
robot.wait_until_not_moving()
t0 = time.time()
print("ending position ", robot.left_track.position)
print(time.time() - t0)
|
<commit_before><commit_msg>Add small script to test motor feedback<commit_after>"""Helps eye-ball rpc-based motor control"""
import time
from ev3control.rpc import Robot
# for convenience set the time to one second
duration = 1000
speed = 50
with Robot(None) as robot:
for _ in range(1):
t0 = time.time()
print('starting position ', robot.left_track.position)
print(time.time() - t0)
robot.move_straight(speed, duration)
robot.wait_until_not_moving()
t0 = time.time()
print("ending position ", robot.left_track.position)
print(time.time() - t0)
|
|
720a011b59ea1fbe38e2ae940133cb4485d12d8d
|
api/migrations/0009_merge_20161031_1057.py
|
api/migrations/0009_merge_20161031_1057.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-10-31 12:57
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('api', '0007_media_tags'),
('api', '0008_media_description'),
]
operations = [
]
|
Build Fix - Merged db migrations
|
Build Fix - Merged db migrations
|
Python
|
bsd-3-clause
|
falcaopetri/enqueuer-api
|
Build Fix - Merged db migrations
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-10-31 12:57
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('api', '0007_media_tags'),
('api', '0008_media_description'),
]
operations = [
]
|
<commit_before><commit_msg>Build Fix - Merged db migrations<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-10-31 12:57
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('api', '0007_media_tags'),
('api', '0008_media_description'),
]
operations = [
]
|
Build Fix - Merged db migrations# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-10-31 12:57
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('api', '0007_media_tags'),
('api', '0008_media_description'),
]
operations = [
]
|
<commit_before><commit_msg>Build Fix - Merged db migrations<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-10-31 12:57
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('api', '0007_media_tags'),
('api', '0008_media_description'),
]
operations = [
]
|
|
c63a5e46e6e3a5e0106c20a3217b53e17147aaf8
|
test/test_core_dataset.py
|
test/test_core_dataset.py
|
import os
import shutil
import tempfile
import unittest
from lobster.cmssw import dataset
from lobster import fs, se
class TestDataset(unittest.TestCase):
@classmethod
def setUpClass(cls):
path = os.path.expandvars(
os.environ.get('LOBSTER_STORAGE', '/hadoop/store/user/') +
os.environ.get('LOBSTER_USER', os.environ['USER']) + '/')
if not os.path.exists(path):
os.makedirs(path)
cls.workdir = tempfile.mkdtemp(prefix=path)
os.chmod(cls.workdir, 0777)
os.makedirs(os.path.join(cls.workdir, 'eggs'))
for i in range(10):
with open(os.path.join(cls.workdir, 'eggs', str(i) + '.txt'), 'w') as f:
f.write('stir-fry')
os.makedirs(os.path.join(cls.workdir, 'ham'))
for i in range(5):
with open(os.path.join(cls.workdir, 'ham', str(i) + '.txt'), 'w') as f:
f.write('bacon')
@classmethod
def tearDownClass(cls):
shutil.rmtree(cls.workdir)
def runTest(self):
s = se.StorageConfiguration({'input': ['file://' + self.workdir]})
s.activate()
with fs.default():
info = dataset.MetaInterface().get_info({'label': 'ham', 'files': 'eggs/'})
assert len(info.files) == 10
info = dataset.MetaInterface().get_info({'label': 'ham', 'files': ['eggs/', 'ham/']})
assert len(info.files) == 15
info = dataset.MetaInterface().get_info({'label': 'ham', 'files': 'eggs/1.txt'})
assert len(info.files) == 1
|
Add test for file-based datasets.
|
Add test for file-based datasets.
|
Python
|
mit
|
matz-e/lobster,matz-e/lobster,matz-e/lobster
|
Add test for file-based datasets.
|
import os
import shutil
import tempfile
import unittest
from lobster.cmssw import dataset
from lobster import fs, se
class TestDataset(unittest.TestCase):
@classmethod
def setUpClass(cls):
path = os.path.expandvars(
os.environ.get('LOBSTER_STORAGE', '/hadoop/store/user/') +
os.environ.get('LOBSTER_USER', os.environ['USER']) + '/')
if not os.path.exists(path):
os.makedirs(path)
cls.workdir = tempfile.mkdtemp(prefix=path)
os.chmod(cls.workdir, 0777)
os.makedirs(os.path.join(cls.workdir, 'eggs'))
for i in range(10):
with open(os.path.join(cls.workdir, 'eggs', str(i) + '.txt'), 'w') as f:
f.write('stir-fry')
os.makedirs(os.path.join(cls.workdir, 'ham'))
for i in range(5):
with open(os.path.join(cls.workdir, 'ham', str(i) + '.txt'), 'w') as f:
f.write('bacon')
@classmethod
def tearDownClass(cls):
shutil.rmtree(cls.workdir)
def runTest(self):
s = se.StorageConfiguration({'input': ['file://' + self.workdir]})
s.activate()
with fs.default():
info = dataset.MetaInterface().get_info({'label': 'ham', 'files': 'eggs/'})
assert len(info.files) == 10
info = dataset.MetaInterface().get_info({'label': 'ham', 'files': ['eggs/', 'ham/']})
assert len(info.files) == 15
info = dataset.MetaInterface().get_info({'label': 'ham', 'files': 'eggs/1.txt'})
assert len(info.files) == 1
|
<commit_before><commit_msg>Add test for file-based datasets.<commit_after>
|
import os
import shutil
import tempfile
import unittest
from lobster.cmssw import dataset
from lobster import fs, se
class TestDataset(unittest.TestCase):
@classmethod
def setUpClass(cls):
path = os.path.expandvars(
os.environ.get('LOBSTER_STORAGE', '/hadoop/store/user/') +
os.environ.get('LOBSTER_USER', os.environ['USER']) + '/')
if not os.path.exists(path):
os.makedirs(path)
cls.workdir = tempfile.mkdtemp(prefix=path)
os.chmod(cls.workdir, 0777)
os.makedirs(os.path.join(cls.workdir, 'eggs'))
for i in range(10):
with open(os.path.join(cls.workdir, 'eggs', str(i) + '.txt'), 'w') as f:
f.write('stir-fry')
os.makedirs(os.path.join(cls.workdir, 'ham'))
for i in range(5):
with open(os.path.join(cls.workdir, 'ham', str(i) + '.txt'), 'w') as f:
f.write('bacon')
@classmethod
def tearDownClass(cls):
shutil.rmtree(cls.workdir)
def runTest(self):
s = se.StorageConfiguration({'input': ['file://' + self.workdir]})
s.activate()
with fs.default():
info = dataset.MetaInterface().get_info({'label': 'ham', 'files': 'eggs/'})
assert len(info.files) == 10
info = dataset.MetaInterface().get_info({'label': 'ham', 'files': ['eggs/', 'ham/']})
assert len(info.files) == 15
info = dataset.MetaInterface().get_info({'label': 'ham', 'files': 'eggs/1.txt'})
assert len(info.files) == 1
|
Add test for file-based datasets.import os
import shutil
import tempfile
import unittest
from lobster.cmssw import dataset
from lobster import fs, se
class TestDataset(unittest.TestCase):
@classmethod
def setUpClass(cls):
path = os.path.expandvars(
os.environ.get('LOBSTER_STORAGE', '/hadoop/store/user/') +
os.environ.get('LOBSTER_USER', os.environ['USER']) + '/')
if not os.path.exists(path):
os.makedirs(path)
cls.workdir = tempfile.mkdtemp(prefix=path)
os.chmod(cls.workdir, 0777)
os.makedirs(os.path.join(cls.workdir, 'eggs'))
for i in range(10):
with open(os.path.join(cls.workdir, 'eggs', str(i) + '.txt'), 'w') as f:
f.write('stir-fry')
os.makedirs(os.path.join(cls.workdir, 'ham'))
for i in range(5):
with open(os.path.join(cls.workdir, 'ham', str(i) + '.txt'), 'w') as f:
f.write('bacon')
@classmethod
def tearDownClass(cls):
shutil.rmtree(cls.workdir)
def runTest(self):
s = se.StorageConfiguration({'input': ['file://' + self.workdir]})
s.activate()
with fs.default():
info = dataset.MetaInterface().get_info({'label': 'ham', 'files': 'eggs/'})
assert len(info.files) == 10
info = dataset.MetaInterface().get_info({'label': 'ham', 'files': ['eggs/', 'ham/']})
assert len(info.files) == 15
info = dataset.MetaInterface().get_info({'label': 'ham', 'files': 'eggs/1.txt'})
assert len(info.files) == 1
|
<commit_before><commit_msg>Add test for file-based datasets.<commit_after>import os
import shutil
import tempfile
import unittest
from lobster.cmssw import dataset
from lobster import fs, se
class TestDataset(unittest.TestCase):
@classmethod
def setUpClass(cls):
path = os.path.expandvars(
os.environ.get('LOBSTER_STORAGE', '/hadoop/store/user/') +
os.environ.get('LOBSTER_USER', os.environ['USER']) + '/')
if not os.path.exists(path):
os.makedirs(path)
cls.workdir = tempfile.mkdtemp(prefix=path)
os.chmod(cls.workdir, 0777)
os.makedirs(os.path.join(cls.workdir, 'eggs'))
for i in range(10):
with open(os.path.join(cls.workdir, 'eggs', str(i) + '.txt'), 'w') as f:
f.write('stir-fry')
os.makedirs(os.path.join(cls.workdir, 'ham'))
for i in range(5):
with open(os.path.join(cls.workdir, 'ham', str(i) + '.txt'), 'w') as f:
f.write('bacon')
@classmethod
def tearDownClass(cls):
shutil.rmtree(cls.workdir)
def runTest(self):
s = se.StorageConfiguration({'input': ['file://' + self.workdir]})
s.activate()
with fs.default():
info = dataset.MetaInterface().get_info({'label': 'ham', 'files': 'eggs/'})
assert len(info.files) == 10
info = dataset.MetaInterface().get_info({'label': 'ham', 'files': ['eggs/', 'ham/']})
assert len(info.files) == 15
info = dataset.MetaInterface().get_info({'label': 'ham', 'files': 'eggs/1.txt'})
assert len(info.files) == 1
|
|
49d6984f2c57cf2169304de8fd3956e2f21aba19
|
tests/test_comments.py
|
tests/test_comments.py
|
import collections
import logging
import pytest
from mappyfile.pprint import PrettyPrinter
def test_comment():
d = collections.Order/edDict()
d["name"] = "Test"
d["__type__"] = "layer"
d["__comments__"] = {"name": "Test comment"}
pp = PrettyPrinter(indent=0, quote="'", newlinechar="\n")
s = pp.pprint(d)
exp = """LAYER
NAME 'Test' # Test comment
END"""
assert(s == exp)
def test_double_comment():
d = collections.OrderedDict()
d["name"] = "Test"
d["__type__"] = "layer"
d["__comments__"] = {"name": "Name comment", "type": "Type comment"}
d["type"] = "polygon"
pp = PrettyPrinter(indent=0, quote="'", newlinechar="\n")
s = pp.pprint(d)
exp = """LAYER
NAME 'Test' # Name comment
TYPE POLYGON # Type comment
END"""
assert(s == exp)
def test_header_comment():
"""
__type__ is used as the key for any object-level comments
"""
d = collections.OrderedDict()
d["name"] = "Test"
d["__type__"] = "layer"
d["__comments__"] = {"__type__": "Layer comment"}
pp = PrettyPrinter(indent=0, quote="'", newlinechar="\n")
s = pp.pprint(d)
exp = """# Layer comment
LAYER
NAME 'Test'
END"""
assert(s == exp)
def test_header_list_comments():
"""
__type__ is used as the key for any object-level comments
"""
d = collections.OrderedDict()
d["name"] = "Test"
d["__type__"] = "layer"
d["__comments__"] = {"__type__": ["Layer comment 1", "Layer comment 2"]}
pp = PrettyPrinter(indent=0, quote="'", newlinechar="\n")
s = pp.pprint(d)
exp = """# Layer comment 1
# Layer comment 2
LAYER
NAME 'Test'
END"""
assert(s == exp)
def run_tests():
pytest.main(["tests/test_comments.py"])
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
logging.getLogger('mappyfile').setLevel(logging.INFO)
run_tests()
print("Done!")
|
Add a test suite for comments in the dict structure
|
Add a test suite for comments in the dict structure
|
Python
|
mit
|
geographika/mappyfile,geographika/mappyfile
|
Add a test suite for comments in the dict structure
|
import collections
import logging
import pytest
from mappyfile.pprint import PrettyPrinter
def test_comment():
d = collections.Order/edDict()
d["name"] = "Test"
d["__type__"] = "layer"
d["__comments__"] = {"name": "Test comment"}
pp = PrettyPrinter(indent=0, quote="'", newlinechar="\n")
s = pp.pprint(d)
exp = """LAYER
NAME 'Test' # Test comment
END"""
assert(s == exp)
def test_double_comment():
d = collections.OrderedDict()
d["name"] = "Test"
d["__type__"] = "layer"
d["__comments__"] = {"name": "Name comment", "type": "Type comment"}
d["type"] = "polygon"
pp = PrettyPrinter(indent=0, quote="'", newlinechar="\n")
s = pp.pprint(d)
exp = """LAYER
NAME 'Test' # Name comment
TYPE POLYGON # Type comment
END"""
assert(s == exp)
def test_header_comment():
"""
__type__ is used as the key for any object-level comments
"""
d = collections.OrderedDict()
d["name"] = "Test"
d["__type__"] = "layer"
d["__comments__"] = {"__type__": "Layer comment"}
pp = PrettyPrinter(indent=0, quote="'", newlinechar="\n")
s = pp.pprint(d)
exp = """# Layer comment
LAYER
NAME 'Test'
END"""
assert(s == exp)
def test_header_list_comments():
"""
__type__ is used as the key for any object-level comments
"""
d = collections.OrderedDict()
d["name"] = "Test"
d["__type__"] = "layer"
d["__comments__"] = {"__type__": ["Layer comment 1", "Layer comment 2"]}
pp = PrettyPrinter(indent=0, quote="'", newlinechar="\n")
s = pp.pprint(d)
exp = """# Layer comment 1
# Layer comment 2
LAYER
NAME 'Test'
END"""
assert(s == exp)
def run_tests():
pytest.main(["tests/test_comments.py"])
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
logging.getLogger('mappyfile').setLevel(logging.INFO)
run_tests()
print("Done!")
|
<commit_before><commit_msg>Add a test suite for comments in the dict structure<commit_after>
|
import collections
import logging
import pytest
from mappyfile.pprint import PrettyPrinter
def test_comment():
d = collections.Order/edDict()
d["name"] = "Test"
d["__type__"] = "layer"
d["__comments__"] = {"name": "Test comment"}
pp = PrettyPrinter(indent=0, quote="'", newlinechar="\n")
s = pp.pprint(d)
exp = """LAYER
NAME 'Test' # Test comment
END"""
assert(s == exp)
def test_double_comment():
d = collections.OrderedDict()
d["name"] = "Test"
d["__type__"] = "layer"
d["__comments__"] = {"name": "Name comment", "type": "Type comment"}
d["type"] = "polygon"
pp = PrettyPrinter(indent=0, quote="'", newlinechar="\n")
s = pp.pprint(d)
exp = """LAYER
NAME 'Test' # Name comment
TYPE POLYGON # Type comment
END"""
assert(s == exp)
def test_header_comment():
"""
__type__ is used as the key for any object-level comments
"""
d = collections.OrderedDict()
d["name"] = "Test"
d["__type__"] = "layer"
d["__comments__"] = {"__type__": "Layer comment"}
pp = PrettyPrinter(indent=0, quote="'", newlinechar="\n")
s = pp.pprint(d)
exp = """# Layer comment
LAYER
NAME 'Test'
END"""
assert(s == exp)
def test_header_list_comments():
"""
__type__ is used as the key for any object-level comments
"""
d = collections.OrderedDict()
d["name"] = "Test"
d["__type__"] = "layer"
d["__comments__"] = {"__type__": ["Layer comment 1", "Layer comment 2"]}
pp = PrettyPrinter(indent=0, quote="'", newlinechar="\n")
s = pp.pprint(d)
exp = """# Layer comment 1
# Layer comment 2
LAYER
NAME 'Test'
END"""
assert(s == exp)
def run_tests():
pytest.main(["tests/test_comments.py"])
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
logging.getLogger('mappyfile').setLevel(logging.INFO)
run_tests()
print("Done!")
|
Add a test suite for comments in the dict structureimport collections
import logging
import pytest
from mappyfile.pprint import PrettyPrinter
def test_comment():
d = collections.Order/edDict()
d["name"] = "Test"
d["__type__"] = "layer"
d["__comments__"] = {"name": "Test comment"}
pp = PrettyPrinter(indent=0, quote="'", newlinechar="\n")
s = pp.pprint(d)
exp = """LAYER
NAME 'Test' # Test comment
END"""
assert(s == exp)
def test_double_comment():
d = collections.OrderedDict()
d["name"] = "Test"
d["__type__"] = "layer"
d["__comments__"] = {"name": "Name comment", "type": "Type comment"}
d["type"] = "polygon"
pp = PrettyPrinter(indent=0, quote="'", newlinechar="\n")
s = pp.pprint(d)
exp = """LAYER
NAME 'Test' # Name comment
TYPE POLYGON # Type comment
END"""
assert(s == exp)
def test_header_comment():
"""
__type__ is used as the key for any object-level comments
"""
d = collections.OrderedDict()
d["name"] = "Test"
d["__type__"] = "layer"
d["__comments__"] = {"__type__": "Layer comment"}
pp = PrettyPrinter(indent=0, quote="'", newlinechar="\n")
s = pp.pprint(d)
exp = """# Layer comment
LAYER
NAME 'Test'
END"""
assert(s == exp)
def test_header_list_comments():
"""
__type__ is used as the key for any object-level comments
"""
d = collections.OrderedDict()
d["name"] = "Test"
d["__type__"] = "layer"
d["__comments__"] = {"__type__": ["Layer comment 1", "Layer comment 2"]}
pp = PrettyPrinter(indent=0, quote="'", newlinechar="\n")
s = pp.pprint(d)
exp = """# Layer comment 1
# Layer comment 2
LAYER
NAME 'Test'
END"""
assert(s == exp)
def run_tests():
pytest.main(["tests/test_comments.py"])
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
logging.getLogger('mappyfile').setLevel(logging.INFO)
run_tests()
print("Done!")
|
<commit_before><commit_msg>Add a test suite for comments in the dict structure<commit_after>import collections
import logging
import pytest
from mappyfile.pprint import PrettyPrinter
def test_comment():
d = collections.Order/edDict()
d["name"] = "Test"
d["__type__"] = "layer"
d["__comments__"] = {"name": "Test comment"}
pp = PrettyPrinter(indent=0, quote="'", newlinechar="\n")
s = pp.pprint(d)
exp = """LAYER
NAME 'Test' # Test comment
END"""
assert(s == exp)
def test_double_comment():
d = collections.OrderedDict()
d["name"] = "Test"
d["__type__"] = "layer"
d["__comments__"] = {"name": "Name comment", "type": "Type comment"}
d["type"] = "polygon"
pp = PrettyPrinter(indent=0, quote="'", newlinechar="\n")
s = pp.pprint(d)
exp = """LAYER
NAME 'Test' # Name comment
TYPE POLYGON # Type comment
END"""
assert(s == exp)
def test_header_comment():
"""
__type__ is used as the key for any object-level comments
"""
d = collections.OrderedDict()
d["name"] = "Test"
d["__type__"] = "layer"
d["__comments__"] = {"__type__": "Layer comment"}
pp = PrettyPrinter(indent=0, quote="'", newlinechar="\n")
s = pp.pprint(d)
exp = """# Layer comment
LAYER
NAME 'Test'
END"""
assert(s == exp)
def test_header_list_comments():
"""
__type__ is used as the key for any object-level comments
"""
d = collections.OrderedDict()
d["name"] = "Test"
d["__type__"] = "layer"
d["__comments__"] = {"__type__": ["Layer comment 1", "Layer comment 2"]}
pp = PrettyPrinter(indent=0, quote="'", newlinechar="\n")
s = pp.pprint(d)
exp = """# Layer comment 1
# Layer comment 2
LAYER
NAME 'Test'
END"""
assert(s == exp)
def run_tests():
pytest.main(["tests/test_comments.py"])
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
logging.getLogger('mappyfile').setLevel(logging.INFO)
run_tests()
print("Done!")
|
|
d2b4965f43d5fd853584b0abee666573a184c260
|
tilepack/tile_cover.py
|
tilepack/tile_cover.py
|
from tilepack.builder import cover_bbox
import argparse
import os
import requests
def main():
parser = argparse.ArgumentParser()
parser.add_argument('min_zoom',
type=int,
help='The minimum zoom level to include')
parser.add_argument('max_zoom',
type=int,
help='The maximum zoom level to include')
parser.add_argument('--cities_url',
default="https://raw.githubusercontent.com/mapzen/metroextractor-cities/master/cities.geojson",
help='A GeoJSON URL with features to cover with tiles')
parser.add_argument('--output_prefix',
default="output",
help='The path prefix to output coverage data to')
args = parser.parse_args()
cities_resp = requests.get(args.cities_url)
cities_resp.raise_for_status()
cities_data = cities_resp.json()
features = cities_data['features']
for feature in features:
min_lon, min_lat, max_lon, max_lat = feature['bbox']
feature['properties']['area'] = (max_lon - min_lon) * (max_lat - min_lat)
biggest_features = sorted(features, key=lambda f: f['properties']['area'], reverse=True)[:200]
for feature in biggest_features:
name = feature['properties']['name']
min_lon, min_lat, max_lon, max_lat = feature['bbox']
count = 0
with open(os.path.join(args.output_prefix, '{}.csv'.format(name)), 'w') as f:
for zoom in range(args.min_zoom, args.max_zoom + 1):
for x, y, z in cover_bbox(min_lon, min_lat, max_lon, max_lat, zoom=zoom):
f.write('{}/{}/{}\n'.format(z, x, y))
count += 1
print("Wrote out {} tiles to {}".format(count, f.name))
if __name__ == '__main__':
main()
|
Add a script to compute tile coverage for city bounding boxes
|
Add a script to compute tile coverage for city bounding boxes
|
Python
|
mit
|
tilezen/tilepacks
|
Add a script to compute tile coverage for city bounding boxes
|
from tilepack.builder import cover_bbox
import argparse
import os
import requests
def main():
parser = argparse.ArgumentParser()
parser.add_argument('min_zoom',
type=int,
help='The minimum zoom level to include')
parser.add_argument('max_zoom',
type=int,
help='The maximum zoom level to include')
parser.add_argument('--cities_url',
default="https://raw.githubusercontent.com/mapzen/metroextractor-cities/master/cities.geojson",
help='A GeoJSON URL with features to cover with tiles')
parser.add_argument('--output_prefix',
default="output",
help='The path prefix to output coverage data to')
args = parser.parse_args()
cities_resp = requests.get(args.cities_url)
cities_resp.raise_for_status()
cities_data = cities_resp.json()
features = cities_data['features']
for feature in features:
min_lon, min_lat, max_lon, max_lat = feature['bbox']
feature['properties']['area'] = (max_lon - min_lon) * (max_lat - min_lat)
biggest_features = sorted(features, key=lambda f: f['properties']['area'], reverse=True)[:200]
for feature in biggest_features:
name = feature['properties']['name']
min_lon, min_lat, max_lon, max_lat = feature['bbox']
count = 0
with open(os.path.join(args.output_prefix, '{}.csv'.format(name)), 'w') as f:
for zoom in range(args.min_zoom, args.max_zoom + 1):
for x, y, z in cover_bbox(min_lon, min_lat, max_lon, max_lat, zoom=zoom):
f.write('{}/{}/{}\n'.format(z, x, y))
count += 1
print("Wrote out {} tiles to {}".format(count, f.name))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a script to compute tile coverage for city bounding boxes<commit_after>
|
from tilepack.builder import cover_bbox
import argparse
import os
import requests
def main():
parser = argparse.ArgumentParser()
parser.add_argument('min_zoom',
type=int,
help='The minimum zoom level to include')
parser.add_argument('max_zoom',
type=int,
help='The maximum zoom level to include')
parser.add_argument('--cities_url',
default="https://raw.githubusercontent.com/mapzen/metroextractor-cities/master/cities.geojson",
help='A GeoJSON URL with features to cover with tiles')
parser.add_argument('--output_prefix',
default="output",
help='The path prefix to output coverage data to')
args = parser.parse_args()
cities_resp = requests.get(args.cities_url)
cities_resp.raise_for_status()
cities_data = cities_resp.json()
features = cities_data['features']
for feature in features:
min_lon, min_lat, max_lon, max_lat = feature['bbox']
feature['properties']['area'] = (max_lon - min_lon) * (max_lat - min_lat)
biggest_features = sorted(features, key=lambda f: f['properties']['area'], reverse=True)[:200]
for feature in biggest_features:
name = feature['properties']['name']
min_lon, min_lat, max_lon, max_lat = feature['bbox']
count = 0
with open(os.path.join(args.output_prefix, '{}.csv'.format(name)), 'w') as f:
for zoom in range(args.min_zoom, args.max_zoom + 1):
for x, y, z in cover_bbox(min_lon, min_lat, max_lon, max_lat, zoom=zoom):
f.write('{}/{}/{}\n'.format(z, x, y))
count += 1
print("Wrote out {} tiles to {}".format(count, f.name))
if __name__ == '__main__':
main()
|
Add a script to compute tile coverage for city bounding boxesfrom tilepack.builder import cover_bbox
import argparse
import os
import requests
def main():
parser = argparse.ArgumentParser()
parser.add_argument('min_zoom',
type=int,
help='The minimum zoom level to include')
parser.add_argument('max_zoom',
type=int,
help='The maximum zoom level to include')
parser.add_argument('--cities_url',
default="https://raw.githubusercontent.com/mapzen/metroextractor-cities/master/cities.geojson",
help='A GeoJSON URL with features to cover with tiles')
parser.add_argument('--output_prefix',
default="output",
help='The path prefix to output coverage data to')
args = parser.parse_args()
cities_resp = requests.get(args.cities_url)
cities_resp.raise_for_status()
cities_data = cities_resp.json()
features = cities_data['features']
for feature in features:
min_lon, min_lat, max_lon, max_lat = feature['bbox']
feature['properties']['area'] = (max_lon - min_lon) * (max_lat - min_lat)
biggest_features = sorted(features, key=lambda f: f['properties']['area'], reverse=True)[:200]
for feature in biggest_features:
name = feature['properties']['name']
min_lon, min_lat, max_lon, max_lat = feature['bbox']
count = 0
with open(os.path.join(args.output_prefix, '{}.csv'.format(name)), 'w') as f:
for zoom in range(args.min_zoom, args.max_zoom + 1):
for x, y, z in cover_bbox(min_lon, min_lat, max_lon, max_lat, zoom=zoom):
f.write('{}/{}/{}\n'.format(z, x, y))
count += 1
print("Wrote out {} tiles to {}".format(count, f.name))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a script to compute tile coverage for city bounding boxes<commit_after>from tilepack.builder import cover_bbox
import argparse
import os
import requests
def main():
parser = argparse.ArgumentParser()
parser.add_argument('min_zoom',
type=int,
help='The minimum zoom level to include')
parser.add_argument('max_zoom',
type=int,
help='The maximum zoom level to include')
parser.add_argument('--cities_url',
default="https://raw.githubusercontent.com/mapzen/metroextractor-cities/master/cities.geojson",
help='A GeoJSON URL with features to cover with tiles')
parser.add_argument('--output_prefix',
default="output",
help='The path prefix to output coverage data to')
args = parser.parse_args()
cities_resp = requests.get(args.cities_url)
cities_resp.raise_for_status()
cities_data = cities_resp.json()
features = cities_data['features']
for feature in features:
min_lon, min_lat, max_lon, max_lat = feature['bbox']
feature['properties']['area'] = (max_lon - min_lon) * (max_lat - min_lat)
biggest_features = sorted(features, key=lambda f: f['properties']['area'], reverse=True)[:200]
for feature in biggest_features:
name = feature['properties']['name']
min_lon, min_lat, max_lon, max_lat = feature['bbox']
count = 0
with open(os.path.join(args.output_prefix, '{}.csv'.format(name)), 'w') as f:
for zoom in range(args.min_zoom, args.max_zoom + 1):
for x, y, z in cover_bbox(min_lon, min_lat, max_lon, max_lat, zoom=zoom):
f.write('{}/{}/{}\n'.format(z, x, y))
count += 1
print("Wrote out {} tiles to {}".format(count, f.name))
if __name__ == '__main__':
main()
|
|
641d4a0fc8c226b7a1e8e62c8c8e9fd3892a9ed7
|
tests/method_call_test.py
|
tests/method_call_test.py
|
import unittest
from pykka import Actor
class MethodCallTest(unittest.TestCase):
def setUp(self):
class ActorWithMethods(Actor):
foo = 'bar'
def functional_hello(self, s):
return 'Hello, %s!' % s
def set_foo(self, s):
self.foo = s
self.actor = ActorWithMethods().start()
def tearDown(self):
self.actor.stop()
def test_functional_method_call_returns_correct_value(self):
self.assertEqual('Hello, world!',
self.actor.functional_hello('world').get())
self.assertEqual('Hello, moon!',
self.actor.functional_hello('moon').get())
def test_side_effect_of_method_is_observable(self):
self.assertEqual('bar', self.actor.foo.get())
self.actor.set_foo('baz')
self.assertEqual('baz', self.actor.foo.get())
|
Test method calls, with return value or side effect
|
Test method calls, with return value or side effect
|
Python
|
apache-2.0
|
tamland/pykka,jodal/pykka,tempbottle/pykka
|
Test method calls, with return value or side effect
|
import unittest
from pykka import Actor
class MethodCallTest(unittest.TestCase):
def setUp(self):
class ActorWithMethods(Actor):
foo = 'bar'
def functional_hello(self, s):
return 'Hello, %s!' % s
def set_foo(self, s):
self.foo = s
self.actor = ActorWithMethods().start()
def tearDown(self):
self.actor.stop()
def test_functional_method_call_returns_correct_value(self):
self.assertEqual('Hello, world!',
self.actor.functional_hello('world').get())
self.assertEqual('Hello, moon!',
self.actor.functional_hello('moon').get())
def test_side_effect_of_method_is_observable(self):
self.assertEqual('bar', self.actor.foo.get())
self.actor.set_foo('baz')
self.assertEqual('baz', self.actor.foo.get())
|
<commit_before><commit_msg>Test method calls, with return value or side effect<commit_after>
|
import unittest
from pykka import Actor
class MethodCallTest(unittest.TestCase):
def setUp(self):
class ActorWithMethods(Actor):
foo = 'bar'
def functional_hello(self, s):
return 'Hello, %s!' % s
def set_foo(self, s):
self.foo = s
self.actor = ActorWithMethods().start()
def tearDown(self):
self.actor.stop()
def test_functional_method_call_returns_correct_value(self):
self.assertEqual('Hello, world!',
self.actor.functional_hello('world').get())
self.assertEqual('Hello, moon!',
self.actor.functional_hello('moon').get())
def test_side_effect_of_method_is_observable(self):
self.assertEqual('bar', self.actor.foo.get())
self.actor.set_foo('baz')
self.assertEqual('baz', self.actor.foo.get())
|
Test method calls, with return value or side effectimport unittest
from pykka import Actor
class MethodCallTest(unittest.TestCase):
def setUp(self):
class ActorWithMethods(Actor):
foo = 'bar'
def functional_hello(self, s):
return 'Hello, %s!' % s
def set_foo(self, s):
self.foo = s
self.actor = ActorWithMethods().start()
def tearDown(self):
self.actor.stop()
def test_functional_method_call_returns_correct_value(self):
self.assertEqual('Hello, world!',
self.actor.functional_hello('world').get())
self.assertEqual('Hello, moon!',
self.actor.functional_hello('moon').get())
def test_side_effect_of_method_is_observable(self):
self.assertEqual('bar', self.actor.foo.get())
self.actor.set_foo('baz')
self.assertEqual('baz', self.actor.foo.get())
|
<commit_before><commit_msg>Test method calls, with return value or side effect<commit_after>import unittest
from pykka import Actor
class MethodCallTest(unittest.TestCase):
def setUp(self):
class ActorWithMethods(Actor):
foo = 'bar'
def functional_hello(self, s):
return 'Hello, %s!' % s
def set_foo(self, s):
self.foo = s
self.actor = ActorWithMethods().start()
def tearDown(self):
self.actor.stop()
def test_functional_method_call_returns_correct_value(self):
self.assertEqual('Hello, world!',
self.actor.functional_hello('world').get())
self.assertEqual('Hello, moon!',
self.actor.functional_hello('moon').get())
def test_side_effect_of_method_is_observable(self):
self.assertEqual('bar', self.actor.foo.get())
self.actor.set_foo('baz')
self.assertEqual('baz', self.actor.foo.get())
|
|
d487881f0a4483ddc489a47843e8fa5afb0b97bc
|
test/test_command.py
|
test/test_command.py
|
import io
import subprocess
import unittest
import yaml
class Command(unittest.TestCase):
stdin = u''
expected = {}
def runTest(self):
stdout = io.StringIO
prc = subprocess.Popen(
['bin/yamlicious'],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
env={
'PYTHONPATH': '.'
}
)
prc.stdin.write(self.stdin)
prc.stdin.close()
self.assertEquals(self.expected, yaml.load(prc.stdout.read()))
class CommandMerge(Command):
stdin = u"""\
_merge:
- stuff:
- is awesome
- stuff:
- is cool
"""
expected = {
'stuff': [
'is awesome',
'is cool',
]
}
|
Add smoke test of the yamlicious command.
|
Add smoke test of the yamlicious command.
|
Python
|
bsd-2-clause
|
derrley/yamlicious,derrley/yamlicious
|
Add smoke test of the yamlicious command.
|
import io
import subprocess
import unittest
import yaml
class Command(unittest.TestCase):
stdin = u''
expected = {}
def runTest(self):
stdout = io.StringIO
prc = subprocess.Popen(
['bin/yamlicious'],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
env={
'PYTHONPATH': '.'
}
)
prc.stdin.write(self.stdin)
prc.stdin.close()
self.assertEquals(self.expected, yaml.load(prc.stdout.read()))
class CommandMerge(Command):
stdin = u"""\
_merge:
- stuff:
- is awesome
- stuff:
- is cool
"""
expected = {
'stuff': [
'is awesome',
'is cool',
]
}
|
<commit_before><commit_msg>Add smoke test of the yamlicious command.<commit_after>
|
import io
import subprocess
import unittest
import yaml
class Command(unittest.TestCase):
stdin = u''
expected = {}
def runTest(self):
stdout = io.StringIO
prc = subprocess.Popen(
['bin/yamlicious'],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
env={
'PYTHONPATH': '.'
}
)
prc.stdin.write(self.stdin)
prc.stdin.close()
self.assertEquals(self.expected, yaml.load(prc.stdout.read()))
class CommandMerge(Command):
stdin = u"""\
_merge:
- stuff:
- is awesome
- stuff:
- is cool
"""
expected = {
'stuff': [
'is awesome',
'is cool',
]
}
|
Add smoke test of the yamlicious command.import io
import subprocess
import unittest
import yaml
class Command(unittest.TestCase):
stdin = u''
expected = {}
def runTest(self):
stdout = io.StringIO
prc = subprocess.Popen(
['bin/yamlicious'],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
env={
'PYTHONPATH': '.'
}
)
prc.stdin.write(self.stdin)
prc.stdin.close()
self.assertEquals(self.expected, yaml.load(prc.stdout.read()))
class CommandMerge(Command):
stdin = u"""\
_merge:
- stuff:
- is awesome
- stuff:
- is cool
"""
expected = {
'stuff': [
'is awesome',
'is cool',
]
}
|
<commit_before><commit_msg>Add smoke test of the yamlicious command.<commit_after>import io
import subprocess
import unittest
import yaml
class Command(unittest.TestCase):
stdin = u''
expected = {}
def runTest(self):
stdout = io.StringIO
prc = subprocess.Popen(
['bin/yamlicious'],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
env={
'PYTHONPATH': '.'
}
)
prc.stdin.write(self.stdin)
prc.stdin.close()
self.assertEquals(self.expected, yaml.load(prc.stdout.read()))
class CommandMerge(Command):
stdin = u"""\
_merge:
- stuff:
- is awesome
- stuff:
- is cool
"""
expected = {
'stuff': [
'is awesome',
'is cool',
]
}
|
|
a88ec15ba8c738c9f672373a3ca59b92ac2594ba
|
db/shot_attempt.py
|
db/shot_attempt.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import uuid
from db.common import Base
from db.specific_event import SpecificEvent
class ShotAttempt(Base, SpecificEvent):
__tablename__ = 'shot_attempts'
__autoload__ = True
STANDARD_ATTRS = [
"game_id", "team_id", "event_id", "player_id", "shot_attempt_type",
"plus_minus", "num_situation", "plr_situation", "actual", "score_diff"
]
def __init__(self, game_id, team_id, event_id, player_id, data_dict):
self.shot_attempt_id = uuid.uuid4().urn
self.game_id = game_id
self.team_id = team_id
self.event_id = event_id
self.player_id = player_id
for attr in data_dict:
setattr(self, attr, data_dict[attr])
else:
if attr in ['actual']:
setattr(self, attr, False)
else:
setattr(self, attr, None)
|
Add shot attempt item class definition
|
Add shot attempt item class definition
|
Python
|
mit
|
leaffan/pynhldb
|
Add shot attempt item class definition
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import uuid
from db.common import Base
from db.specific_event import SpecificEvent
class ShotAttempt(Base, SpecificEvent):
__tablename__ = 'shot_attempts'
__autoload__ = True
STANDARD_ATTRS = [
"game_id", "team_id", "event_id", "player_id", "shot_attempt_type",
"plus_minus", "num_situation", "plr_situation", "actual", "score_diff"
]
def __init__(self, game_id, team_id, event_id, player_id, data_dict):
self.shot_attempt_id = uuid.uuid4().urn
self.game_id = game_id
self.team_id = team_id
self.event_id = event_id
self.player_id = player_id
for attr in data_dict:
setattr(self, attr, data_dict[attr])
else:
if attr in ['actual']:
setattr(self, attr, False)
else:
setattr(self, attr, None)
|
<commit_before><commit_msg>Add shot attempt item class definition<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import uuid
from db.common import Base
from db.specific_event import SpecificEvent
class ShotAttempt(Base, SpecificEvent):
__tablename__ = 'shot_attempts'
__autoload__ = True
STANDARD_ATTRS = [
"game_id", "team_id", "event_id", "player_id", "shot_attempt_type",
"plus_minus", "num_situation", "plr_situation", "actual", "score_diff"
]
def __init__(self, game_id, team_id, event_id, player_id, data_dict):
self.shot_attempt_id = uuid.uuid4().urn
self.game_id = game_id
self.team_id = team_id
self.event_id = event_id
self.player_id = player_id
for attr in data_dict:
setattr(self, attr, data_dict[attr])
else:
if attr in ['actual']:
setattr(self, attr, False)
else:
setattr(self, attr, None)
|
Add shot attempt item class definition#!/usr/bin/env python
# -*- coding: utf-8 -*-
import uuid
from db.common import Base
from db.specific_event import SpecificEvent
class ShotAttempt(Base, SpecificEvent):
__tablename__ = 'shot_attempts'
__autoload__ = True
STANDARD_ATTRS = [
"game_id", "team_id", "event_id", "player_id", "shot_attempt_type",
"plus_minus", "num_situation", "plr_situation", "actual", "score_diff"
]
def __init__(self, game_id, team_id, event_id, player_id, data_dict):
self.shot_attempt_id = uuid.uuid4().urn
self.game_id = game_id
self.team_id = team_id
self.event_id = event_id
self.player_id = player_id
for attr in data_dict:
setattr(self, attr, data_dict[attr])
else:
if attr in ['actual']:
setattr(self, attr, False)
else:
setattr(self, attr, None)
|
<commit_before><commit_msg>Add shot attempt item class definition<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import uuid
from db.common import Base
from db.specific_event import SpecificEvent
class ShotAttempt(Base, SpecificEvent):
__tablename__ = 'shot_attempts'
__autoload__ = True
STANDARD_ATTRS = [
"game_id", "team_id", "event_id", "player_id", "shot_attempt_type",
"plus_minus", "num_situation", "plr_situation", "actual", "score_diff"
]
def __init__(self, game_id, team_id, event_id, player_id, data_dict):
self.shot_attempt_id = uuid.uuid4().urn
self.game_id = game_id
self.team_id = team_id
self.event_id = event_id
self.player_id = player_id
for attr in data_dict:
setattr(self, attr, data_dict[attr])
else:
if attr in ['actual']:
setattr(self, attr, False)
else:
setattr(self, attr, None)
|
|
b2dfa7ea44a0b9e061ffbb346fe9196ba96c2a44
|
nanshe_workflow/_reg_joblib.py
|
nanshe_workflow/_reg_joblib.py
|
import dask
import dask.distributed
import distributed
try:
import dask.distributed.joblib
except ImportError:
pass
try:
import distributed.joblib
except ImportError:
pass
import sklearn
import sklearn.externals
import sklearn.externals.joblib
|
Add backwards compatible Distributed Joblib hook
|
Add backwards compatible Distributed Joblib hook
Depending on the versions of Distributed, Joblib, and scikit-learn,
there are different strategies for registering the Joblib backend. Try
going with the standard Distributed technique first, which may fail for
Distributed 1.24.0+. In other cases, import `joblib` and
`sklearn.externals.joblib`, which should handle this for Dask when
Joblib 0.12.2+ and scikit-learn 0.20.0+ are present. This should ensure
that all three are aware of each other and can work correctly.
This dummy module is only useful internally. So mark it as such. It can
be dropped at any time without explanation once convenient.
Using this design, we can comfortably use this in-place of the other
`import`s we were doing to achieve this effect.
|
Python
|
apache-2.0
|
nanshe-org/nanshe_workflow,DudLab/nanshe_workflow
|
Add backwards compatible Distributed Joblib hook
Depending on the versions of Distributed, Joblib, and scikit-learn,
there are different strategies for registering the Joblib backend. Try
going with the standard Distributed technique first, which may fail for
Distributed 1.24.0+. In other cases, import `joblib` and
`sklearn.externals.joblib`, which should handle this for Dask when
Joblib 0.12.2+ and scikit-learn 0.20.0+ are present. This should ensure
that all three are aware of each other and can work correctly.
This dummy module is only useful internally. So mark it as such. It can
be dropped at any time without explanation once convenient.
Using this design, we can comfortably use this in-place of the other
`import`s we were doing to achieve this effect.
|
import dask
import dask.distributed
import distributed
try:
import dask.distributed.joblib
except ImportError:
pass
try:
import distributed.joblib
except ImportError:
pass
import sklearn
import sklearn.externals
import sklearn.externals.joblib
|
<commit_before><commit_msg>Add backwards compatible Distributed Joblib hook
Depending on the versions of Distributed, Joblib, and scikit-learn,
there are different strategies for registering the Joblib backend. Try
going with the standard Distributed technique first, which may fail for
Distributed 1.24.0+. In other cases, import `joblib` and
`sklearn.externals.joblib`, which should handle this for Dask when
Joblib 0.12.2+ and scikit-learn 0.20.0+ are present. This should ensure
that all three are aware of each other and can work correctly.
This dummy module is only useful internally. So mark it as such. It can
be dropped at any time without explanation once convenient.
Using this design, we can comfortably use this in-place of the other
`import`s we were doing to achieve this effect.<commit_after>
|
import dask
import dask.distributed
import distributed
try:
import dask.distributed.joblib
except ImportError:
pass
try:
import distributed.joblib
except ImportError:
pass
import sklearn
import sklearn.externals
import sklearn.externals.joblib
|
Add backwards compatible Distributed Joblib hook
Depending on the versions of Distributed, Joblib, and scikit-learn,
there are different strategies for registering the Joblib backend. Try
going with the standard Distributed technique first, which may fail for
Distributed 1.24.0+. In other cases, import `joblib` and
`sklearn.externals.joblib`, which should handle this for Dask when
Joblib 0.12.2+ and scikit-learn 0.20.0+ are present. This should ensure
that all three are aware of each other and can work correctly.
This dummy module is only useful internally. So mark it as such. It can
be dropped at any time without explanation once convenient.
Using this design, we can comfortably use this in-place of the other
`import`s we were doing to achieve this effect.import dask
import dask.distributed
import distributed
try:
import dask.distributed.joblib
except ImportError:
pass
try:
import distributed.joblib
except ImportError:
pass
import sklearn
import sklearn.externals
import sklearn.externals.joblib
|
<commit_before><commit_msg>Add backwards compatible Distributed Joblib hook
Depending on the versions of Distributed, Joblib, and scikit-learn,
there are different strategies for registering the Joblib backend. Try
going with the standard Distributed technique first, which may fail for
Distributed 1.24.0+. In other cases, import `joblib` and
`sklearn.externals.joblib`, which should handle this for Dask when
Joblib 0.12.2+ and scikit-learn 0.20.0+ are present. This should ensure
that all three are aware of each other and can work correctly.
This dummy module is only useful internally. So mark it as such. It can
be dropped at any time without explanation once convenient.
Using this design, we can comfortably use this in-place of the other
`import`s we were doing to achieve this effect.<commit_after>import dask
import dask.distributed
import distributed
try:
import dask.distributed.joblib
except ImportError:
pass
try:
import distributed.joblib
except ImportError:
pass
import sklearn
import sklearn.externals
import sklearn.externals.joblib
|
|
e7299524720b4c30cfe4e8ebbc27eaed18d885b4
|
tests/test_upsert.py
|
tests/test_upsert.py
|
from django.db import models
from psqlextra import HStoreField
from psqlextra.query import ConflictAction
from .fake_model import get_fake_model
def test_upsert():
"""Tests whether simple upserts works correctly."""
model = get_fake_model({
'title': HStoreField(uniqueness=['key1']),
'cookies': models.CharField(max_length=255, null=True)
})
obj1 = (
model.objects
.upsert_and_get(
conflict_target=[('title', 'key1')],
fields=dict(
title={'key1': 'beer'},
cookies='cheers'
)
)
)
obj1.refresh_from_db()
assert obj1.title['key1'] == 'beer'
assert obj1.cookies == 'cheers'
obj2 = (
model.objects
.upsert_and_get(
conflict_target=[('title', 'key1')],
fields=dict(
title={'key1': 'beer'},
cookies='choco'
)
)
)
obj1.refresh_from_db()
obj2.refresh_from_db()
# assert both objects are the same
assert obj1.id == obj2.id
assert obj1.title['key1'] == 'beer'
assert obj1.cookies == 'choco'
assert obj2.title['key1'] == 'beer'
assert obj2.cookies == 'choco'
|
Add test for old upsert syntax
|
Add test for old upsert syntax
|
Python
|
mit
|
SectorLabs/django-postgres-extra
|
Add test for old upsert syntax
|
from django.db import models
from psqlextra import HStoreField
from psqlextra.query import ConflictAction
from .fake_model import get_fake_model
def test_upsert():
"""Tests whether simple upserts works correctly."""
model = get_fake_model({
'title': HStoreField(uniqueness=['key1']),
'cookies': models.CharField(max_length=255, null=True)
})
obj1 = (
model.objects
.upsert_and_get(
conflict_target=[('title', 'key1')],
fields=dict(
title={'key1': 'beer'},
cookies='cheers'
)
)
)
obj1.refresh_from_db()
assert obj1.title['key1'] == 'beer'
assert obj1.cookies == 'cheers'
obj2 = (
model.objects
.upsert_and_get(
conflict_target=[('title', 'key1')],
fields=dict(
title={'key1': 'beer'},
cookies='choco'
)
)
)
obj1.refresh_from_db()
obj2.refresh_from_db()
# assert both objects are the same
assert obj1.id == obj2.id
assert obj1.title['key1'] == 'beer'
assert obj1.cookies == 'choco'
assert obj2.title['key1'] == 'beer'
assert obj2.cookies == 'choco'
|
<commit_before><commit_msg>Add test for old upsert syntax<commit_after>
|
from django.db import models
from psqlextra import HStoreField
from psqlextra.query import ConflictAction
from .fake_model import get_fake_model
def test_upsert():
"""Tests whether simple upserts works correctly."""
model = get_fake_model({
'title': HStoreField(uniqueness=['key1']),
'cookies': models.CharField(max_length=255, null=True)
})
obj1 = (
model.objects
.upsert_and_get(
conflict_target=[('title', 'key1')],
fields=dict(
title={'key1': 'beer'},
cookies='cheers'
)
)
)
obj1.refresh_from_db()
assert obj1.title['key1'] == 'beer'
assert obj1.cookies == 'cheers'
obj2 = (
model.objects
.upsert_and_get(
conflict_target=[('title', 'key1')],
fields=dict(
title={'key1': 'beer'},
cookies='choco'
)
)
)
obj1.refresh_from_db()
obj2.refresh_from_db()
# assert both objects are the same
assert obj1.id == obj2.id
assert obj1.title['key1'] == 'beer'
assert obj1.cookies == 'choco'
assert obj2.title['key1'] == 'beer'
assert obj2.cookies == 'choco'
|
Add test for old upsert syntaxfrom django.db import models
from psqlextra import HStoreField
from psqlextra.query import ConflictAction
from .fake_model import get_fake_model
def test_upsert():
"""Tests whether simple upserts works correctly."""
model = get_fake_model({
'title': HStoreField(uniqueness=['key1']),
'cookies': models.CharField(max_length=255, null=True)
})
obj1 = (
model.objects
.upsert_and_get(
conflict_target=[('title', 'key1')],
fields=dict(
title={'key1': 'beer'},
cookies='cheers'
)
)
)
obj1.refresh_from_db()
assert obj1.title['key1'] == 'beer'
assert obj1.cookies == 'cheers'
obj2 = (
model.objects
.upsert_and_get(
conflict_target=[('title', 'key1')],
fields=dict(
title={'key1': 'beer'},
cookies='choco'
)
)
)
obj1.refresh_from_db()
obj2.refresh_from_db()
# assert both objects are the same
assert obj1.id == obj2.id
assert obj1.title['key1'] == 'beer'
assert obj1.cookies == 'choco'
assert obj2.title['key1'] == 'beer'
assert obj2.cookies == 'choco'
|
<commit_before><commit_msg>Add test for old upsert syntax<commit_after>from django.db import models
from psqlextra import HStoreField
from psqlextra.query import ConflictAction
from .fake_model import get_fake_model
def test_upsert():
"""Tests whether simple upserts works correctly."""
model = get_fake_model({
'title': HStoreField(uniqueness=['key1']),
'cookies': models.CharField(max_length=255, null=True)
})
obj1 = (
model.objects
.upsert_and_get(
conflict_target=[('title', 'key1')],
fields=dict(
title={'key1': 'beer'},
cookies='cheers'
)
)
)
obj1.refresh_from_db()
assert obj1.title['key1'] == 'beer'
assert obj1.cookies == 'cheers'
obj2 = (
model.objects
.upsert_and_get(
conflict_target=[('title', 'key1')],
fields=dict(
title={'key1': 'beer'},
cookies='choco'
)
)
)
obj1.refresh_from_db()
obj2.refresh_from_db()
# assert both objects are the same
assert obj1.id == obj2.id
assert obj1.title['key1'] == 'beer'
assert obj1.cookies == 'choco'
assert obj2.title['key1'] == 'beer'
assert obj2.cookies == 'choco'
|
|
58cd27f4daa921a63d0a80c31f5ff1bf73cb1992
|
lintcode/Medium/040_Implement_Queue_by_Two_Stacks.py
|
lintcode/Medium/040_Implement_Queue_by_Two_Stacks.py
|
class MyQueue:
def __init__(self):
self.stack1 = []
self.stack2 = []
def push(self, element):
# write your code here
self.stack1.append(element)
def top(self):
# write your code here
# return the top element
return self.stack1[0]
def pop(self):
# write your code here
# pop and return the top element
res = None
while (self.stack1):
ele = self.stack1.pop(-1)
if (self.stack1):
self.stack2.append(ele)
else:
res = ele
while (self.stack2):
ele = self.stack2.pop(-1)
self.stack1.append(ele)
return res
|
Add solution to lintcode question 40
|
Add solution to lintcode question 40
|
Python
|
mit
|
Rhadow/leetcode,Rhadow/leetcode,Rhadow/leetcode,Rhadow/leetcode
|
Add solution to lintcode question 40
|
class MyQueue:
def __init__(self):
self.stack1 = []
self.stack2 = []
def push(self, element):
# write your code here
self.stack1.append(element)
def top(self):
# write your code here
# return the top element
return self.stack1[0]
def pop(self):
# write your code here
# pop and return the top element
res = None
while (self.stack1):
ele = self.stack1.pop(-1)
if (self.stack1):
self.stack2.append(ele)
else:
res = ele
while (self.stack2):
ele = self.stack2.pop(-1)
self.stack1.append(ele)
return res
|
<commit_before><commit_msg>Add solution to lintcode question 40<commit_after>
|
class MyQueue:
def __init__(self):
self.stack1 = []
self.stack2 = []
def push(self, element):
# write your code here
self.stack1.append(element)
def top(self):
# write your code here
# return the top element
return self.stack1[0]
def pop(self):
# write your code here
# pop and return the top element
res = None
while (self.stack1):
ele = self.stack1.pop(-1)
if (self.stack1):
self.stack2.append(ele)
else:
res = ele
while (self.stack2):
ele = self.stack2.pop(-1)
self.stack1.append(ele)
return res
|
Add solution to lintcode question 40class MyQueue:
def __init__(self):
self.stack1 = []
self.stack2 = []
def push(self, element):
# write your code here
self.stack1.append(element)
def top(self):
# write your code here
# return the top element
return self.stack1[0]
def pop(self):
# write your code here
# pop and return the top element
res = None
while (self.stack1):
ele = self.stack1.pop(-1)
if (self.stack1):
self.stack2.append(ele)
else:
res = ele
while (self.stack2):
ele = self.stack2.pop(-1)
self.stack1.append(ele)
return res
|
<commit_before><commit_msg>Add solution to lintcode question 40<commit_after>class MyQueue:
def __init__(self):
self.stack1 = []
self.stack2 = []
def push(self, element):
# write your code here
self.stack1.append(element)
def top(self):
# write your code here
# return the top element
return self.stack1[0]
def pop(self):
# write your code here
# pop and return the top element
res = None
while (self.stack1):
ele = self.stack1.pop(-1)
if (self.stack1):
self.stack2.append(ele)
else:
res = ele
while (self.stack2):
ele = self.stack2.pop(-1)
self.stack1.append(ele)
return res
|
|
893c11200043d0775e38ef065359806e8162e30c
|
zou/app/services/custom_actions_service.py
|
zou/app/services/custom_actions_service.py
|
from zou.app.models.custom_action import CustomAction
from zou.app.utils import cache, fields
def clear_custom_action_cache():
cache.cache.delete_memoized(get_custom_actions)
@cache.memoize_function(120)
def get_custom_actions():
return fields.serialize_models(CustomAction.get_all())
|
Add service module for custom actions
|
Add service module for custom actions
|
Python
|
agpl-3.0
|
cgwire/zou
|
Add service module for custom actions
|
from zou.app.models.custom_action import CustomAction
from zou.app.utils import cache, fields
def clear_custom_action_cache():
cache.cache.delete_memoized(get_custom_actions)
@cache.memoize_function(120)
def get_custom_actions():
return fields.serialize_models(CustomAction.get_all())
|
<commit_before><commit_msg>Add service module for custom actions<commit_after>
|
from zou.app.models.custom_action import CustomAction
from zou.app.utils import cache, fields
def clear_custom_action_cache():
cache.cache.delete_memoized(get_custom_actions)
@cache.memoize_function(120)
def get_custom_actions():
return fields.serialize_models(CustomAction.get_all())
|
Add service module for custom actionsfrom zou.app.models.custom_action import CustomAction
from zou.app.utils import cache, fields
def clear_custom_action_cache():
cache.cache.delete_memoized(get_custom_actions)
@cache.memoize_function(120)
def get_custom_actions():
return fields.serialize_models(CustomAction.get_all())
|
<commit_before><commit_msg>Add service module for custom actions<commit_after>from zou.app.models.custom_action import CustomAction
from zou.app.utils import cache, fields
def clear_custom_action_cache():
cache.cache.delete_memoized(get_custom_actions)
@cache.memoize_function(120)
def get_custom_actions():
return fields.serialize_models(CustomAction.get_all())
|
|
b5de0f73962d770ef9325619ae724768da581014
|
test/test_frontend/test_modes/test_zoom.py
|
test/test_frontend/test_modes/test_zoom.py
|
from PyQt4 import QtCore
from PyQt4 import QtGui
from PyQt4.QtTest import QTest
from pyqode.core import frontend, style
from pyqode.core.frontend import modes
editor = None
mode = None
def setup_module():
global editor, mode
editor = frontend.CodeEdit()
mode = modes.ZoomMode()
frontend.install_mode(editor, mode)
frontend.open_file(editor, __file__)
editor.show()
QTest.qWait(500)
def teardown_module():
global editor
frontend.stop_server(editor)
del editor
def test_enabled():
global mode
assert mode.enabled
mode.enabled = False
mode.enabled = True
def test_key_events():
global editor
zoom = editor.font_size
QTest.keyPress(editor, '+', QtCore.Qt.ControlModifier)
assert editor.font_size > zoom
QTest.keyPress(editor, '0', QtCore.Qt.ControlModifier)
assert editor.font_size == zoom
QTest.keyPress(editor, '-', QtCore.Qt.ControlModifier)
assert editor.font_size < zoom
editor.wheelEvent(QtGui.QWheelEvent(
QtCore.QPoint(10, 10), 1, QtCore.Qt.MidButton,
QtCore.Qt.ControlModifier))
editor.wheelEvent(QtGui.QWheelEvent(
QtCore.QPoint(10, 10), -1, QtCore.Qt.MidButton,
QtCore.Qt.ControlModifier))
|
Add tests for zoom mode
|
Add tests for zoom mode
|
Python
|
mit
|
pyQode/pyqode.core,pyQode/pyqode.core,zwadar/pyqode.core
|
Add tests for zoom mode
|
from PyQt4 import QtCore
from PyQt4 import QtGui
from PyQt4.QtTest import QTest
from pyqode.core import frontend, style
from pyqode.core.frontend import modes
editor = None
mode = None
def setup_module():
global editor, mode
editor = frontend.CodeEdit()
mode = modes.ZoomMode()
frontend.install_mode(editor, mode)
frontend.open_file(editor, __file__)
editor.show()
QTest.qWait(500)
def teardown_module():
global editor
frontend.stop_server(editor)
del editor
def test_enabled():
global mode
assert mode.enabled
mode.enabled = False
mode.enabled = True
def test_key_events():
global editor
zoom = editor.font_size
QTest.keyPress(editor, '+', QtCore.Qt.ControlModifier)
assert editor.font_size > zoom
QTest.keyPress(editor, '0', QtCore.Qt.ControlModifier)
assert editor.font_size == zoom
QTest.keyPress(editor, '-', QtCore.Qt.ControlModifier)
assert editor.font_size < zoom
editor.wheelEvent(QtGui.QWheelEvent(
QtCore.QPoint(10, 10), 1, QtCore.Qt.MidButton,
QtCore.Qt.ControlModifier))
editor.wheelEvent(QtGui.QWheelEvent(
QtCore.QPoint(10, 10), -1, QtCore.Qt.MidButton,
QtCore.Qt.ControlModifier))
|
<commit_before><commit_msg>Add tests for zoom mode<commit_after>
|
from PyQt4 import QtCore
from PyQt4 import QtGui
from PyQt4.QtTest import QTest
from pyqode.core import frontend, style
from pyqode.core.frontend import modes
editor = None
mode = None
def setup_module():
global editor, mode
editor = frontend.CodeEdit()
mode = modes.ZoomMode()
frontend.install_mode(editor, mode)
frontend.open_file(editor, __file__)
editor.show()
QTest.qWait(500)
def teardown_module():
global editor
frontend.stop_server(editor)
del editor
def test_enabled():
global mode
assert mode.enabled
mode.enabled = False
mode.enabled = True
def test_key_events():
global editor
zoom = editor.font_size
QTest.keyPress(editor, '+', QtCore.Qt.ControlModifier)
assert editor.font_size > zoom
QTest.keyPress(editor, '0', QtCore.Qt.ControlModifier)
assert editor.font_size == zoom
QTest.keyPress(editor, '-', QtCore.Qt.ControlModifier)
assert editor.font_size < zoom
editor.wheelEvent(QtGui.QWheelEvent(
QtCore.QPoint(10, 10), 1, QtCore.Qt.MidButton,
QtCore.Qt.ControlModifier))
editor.wheelEvent(QtGui.QWheelEvent(
QtCore.QPoint(10, 10), -1, QtCore.Qt.MidButton,
QtCore.Qt.ControlModifier))
|
Add tests for zoom modefrom PyQt4 import QtCore
from PyQt4 import QtGui
from PyQt4.QtTest import QTest
from pyqode.core import frontend, style
from pyqode.core.frontend import modes
editor = None
mode = None
def setup_module():
global editor, mode
editor = frontend.CodeEdit()
mode = modes.ZoomMode()
frontend.install_mode(editor, mode)
frontend.open_file(editor, __file__)
editor.show()
QTest.qWait(500)
def teardown_module():
global editor
frontend.stop_server(editor)
del editor
def test_enabled():
global mode
assert mode.enabled
mode.enabled = False
mode.enabled = True
def test_key_events():
global editor
zoom = editor.font_size
QTest.keyPress(editor, '+', QtCore.Qt.ControlModifier)
assert editor.font_size > zoom
QTest.keyPress(editor, '0', QtCore.Qt.ControlModifier)
assert editor.font_size == zoom
QTest.keyPress(editor, '-', QtCore.Qt.ControlModifier)
assert editor.font_size < zoom
editor.wheelEvent(QtGui.QWheelEvent(
QtCore.QPoint(10, 10), 1, QtCore.Qt.MidButton,
QtCore.Qt.ControlModifier))
editor.wheelEvent(QtGui.QWheelEvent(
QtCore.QPoint(10, 10), -1, QtCore.Qt.MidButton,
QtCore.Qt.ControlModifier))
|
<commit_before><commit_msg>Add tests for zoom mode<commit_after>from PyQt4 import QtCore
from PyQt4 import QtGui
from PyQt4.QtTest import QTest
from pyqode.core import frontend, style
from pyqode.core.frontend import modes
editor = None
mode = None
def setup_module():
global editor, mode
editor = frontend.CodeEdit()
mode = modes.ZoomMode()
frontend.install_mode(editor, mode)
frontend.open_file(editor, __file__)
editor.show()
QTest.qWait(500)
def teardown_module():
global editor
frontend.stop_server(editor)
del editor
def test_enabled():
global mode
assert mode.enabled
mode.enabled = False
mode.enabled = True
def test_key_events():
global editor
zoom = editor.font_size
QTest.keyPress(editor, '+', QtCore.Qt.ControlModifier)
assert editor.font_size > zoom
QTest.keyPress(editor, '0', QtCore.Qt.ControlModifier)
assert editor.font_size == zoom
QTest.keyPress(editor, '-', QtCore.Qt.ControlModifier)
assert editor.font_size < zoom
editor.wheelEvent(QtGui.QWheelEvent(
QtCore.QPoint(10, 10), 1, QtCore.Qt.MidButton,
QtCore.Qt.ControlModifier))
editor.wheelEvent(QtGui.QWheelEvent(
QtCore.QPoint(10, 10), -1, QtCore.Qt.MidButton,
QtCore.Qt.ControlModifier))
|
|
4918c909a23479be344eb52c40e1461fa94bd330
|
two_factor/migrations/0006_phonedevice_key_default.py
|
two_factor/migrations/0006_phonedevice_key_default.py
|
from django.db import migrations, models
import two_factor.models
class Migration(migrations.Migration):
dependencies = [
('two_factor', '0005_auto_20160224_0450'),
]
operations = [
migrations.AlterField(
model_name='phonedevice',
name='key',
field=models.CharField(default=two_factor.models.random_hex_str, help_text='Hex-encoded secret key', max_length=40, validators=[two_factor.models.key_validator]),
),
]
|
Add migration for byte string fix
|
Add migration for byte string fix
Changing the `PhoneDevice.key` default requires a model state migration.
See: 268c0d6 (Merge pull request #281 from Ameriks/byte_string_fix)
|
Python
|
mit
|
Bouke/django-two-factor-auth,Bouke/django-two-factor-auth
|
Add migration for byte string fix
Changing the `PhoneDevice.key` default requires a model state migration.
See: 268c0d6 (Merge pull request #281 from Ameriks/byte_string_fix)
|
from django.db import migrations, models
import two_factor.models
class Migration(migrations.Migration):
dependencies = [
('two_factor', '0005_auto_20160224_0450'),
]
operations = [
migrations.AlterField(
model_name='phonedevice',
name='key',
field=models.CharField(default=two_factor.models.random_hex_str, help_text='Hex-encoded secret key', max_length=40, validators=[two_factor.models.key_validator]),
),
]
|
<commit_before><commit_msg>Add migration for byte string fix
Changing the `PhoneDevice.key` default requires a model state migration.
See: 268c0d6 (Merge pull request #281 from Ameriks/byte_string_fix)<commit_after>
|
from django.db import migrations, models
import two_factor.models
class Migration(migrations.Migration):
dependencies = [
('two_factor', '0005_auto_20160224_0450'),
]
operations = [
migrations.AlterField(
model_name='phonedevice',
name='key',
field=models.CharField(default=two_factor.models.random_hex_str, help_text='Hex-encoded secret key', max_length=40, validators=[two_factor.models.key_validator]),
),
]
|
Add migration for byte string fix
Changing the `PhoneDevice.key` default requires a model state migration.
See: 268c0d6 (Merge pull request #281 from Ameriks/byte_string_fix)from django.db import migrations, models
import two_factor.models
class Migration(migrations.Migration):
dependencies = [
('two_factor', '0005_auto_20160224_0450'),
]
operations = [
migrations.AlterField(
model_name='phonedevice',
name='key',
field=models.CharField(default=two_factor.models.random_hex_str, help_text='Hex-encoded secret key', max_length=40, validators=[two_factor.models.key_validator]),
),
]
|
<commit_before><commit_msg>Add migration for byte string fix
Changing the `PhoneDevice.key` default requires a model state migration.
See: 268c0d6 (Merge pull request #281 from Ameriks/byte_string_fix)<commit_after>from django.db import migrations, models
import two_factor.models
class Migration(migrations.Migration):
dependencies = [
('two_factor', '0005_auto_20160224_0450'),
]
operations = [
migrations.AlterField(
model_name='phonedevice',
name='key',
field=models.CharField(default=two_factor.models.random_hex_str, help_text='Hex-encoded secret key', max_length=40, validators=[two_factor.models.key_validator]),
),
]
|
|
a22355b129538cf7a7e5184c02f392952cd0b623
|
radio/sqlviews.py
|
radio/sqlviews.py
|
from django.db import connection
def create_repeater_tx_view():
"""
"""
from django.db import connection
cursor = connection.cursor()
cursor.execute('''
DROP VIEW IF EXISTS radio_repeater_tx_v;
CREATE OR REPLACE VIEW radio_repeater_tx_v AS
SELECT a.site_name,
a.last_inspected,
a.sss_display,
a.sss_description,
b.name as district,
a.channel_number,
a.point,
a.link_description,
ST_AsText(a.link_point) as link_point,
a.tx_frequency,
a.ctcss_tx,
a.nac_tx,
a.tx_antenna_height,
a.tx_power,
a.tx_antenna_gain,
a.output_color,
a.output_radius,
a.output_clutter
FROM radio_repeater a join radio_district b on a.district_id = b.id
''')
def create_repeater_rx_view():
"""
"""
from django.db import connection
cursor = connection.cursor()
cursor.execute('''
DROP VIEW IF EXISTS radio_repeater_rx_v;
CREATE OR REPLACE VIEW radio_repeater_rx_v AS
SELECT a.site_name,
a.last_inspected,
a.sss_display,
a.sss_description,
b.name as district,
a.channel_number,
a.point,
a.link_description,
ST_AsText(a.link_point) as link_point,
a.rx_frequency,
a.ctcss_rx,
a.nac_rx,
a.rx_antenna_height,
a.rx_power,
a.rx_antenna_gain,
a.output_color,
a.output_radius,
a.output_clutter
FROM radio_repeater a join radio_district b on a.district_id = b.id
''')
def create_all_views():
create_repeater_tx_view()
create_repeater_rx_view()
|
Create view for repeater tx and repater rx
|
Create view for repeater tx and repater rx
|
Python
|
bsd-3-clause
|
ropable/resource_tracking,ropable/resource_tracking,ropable/resource_tracking
|
Create view for repeater tx and repater rx
|
from django.db import connection
def create_repeater_tx_view():
"""
"""
from django.db import connection
cursor = connection.cursor()
cursor.execute('''
DROP VIEW IF EXISTS radio_repeater_tx_v;
CREATE OR REPLACE VIEW radio_repeater_tx_v AS
SELECT a.site_name,
a.last_inspected,
a.sss_display,
a.sss_description,
b.name as district,
a.channel_number,
a.point,
a.link_description,
ST_AsText(a.link_point) as link_point,
a.tx_frequency,
a.ctcss_tx,
a.nac_tx,
a.tx_antenna_height,
a.tx_power,
a.tx_antenna_gain,
a.output_color,
a.output_radius,
a.output_clutter
FROM radio_repeater a join radio_district b on a.district_id = b.id
''')
def create_repeater_rx_view():
"""
"""
from django.db import connection
cursor = connection.cursor()
cursor.execute('''
DROP VIEW IF EXISTS radio_repeater_rx_v;
CREATE OR REPLACE VIEW radio_repeater_rx_v AS
SELECT a.site_name,
a.last_inspected,
a.sss_display,
a.sss_description,
b.name as district,
a.channel_number,
a.point,
a.link_description,
ST_AsText(a.link_point) as link_point,
a.rx_frequency,
a.ctcss_rx,
a.nac_rx,
a.rx_antenna_height,
a.rx_power,
a.rx_antenna_gain,
a.output_color,
a.output_radius,
a.output_clutter
FROM radio_repeater a join radio_district b on a.district_id = b.id
''')
def create_all_views():
create_repeater_tx_view()
create_repeater_rx_view()
|
<commit_before><commit_msg>Create view for repeater tx and repater rx<commit_after>
|
from django.db import connection
def create_repeater_tx_view():
"""
"""
from django.db import connection
cursor = connection.cursor()
cursor.execute('''
DROP VIEW IF EXISTS radio_repeater_tx_v;
CREATE OR REPLACE VIEW radio_repeater_tx_v AS
SELECT a.site_name,
a.last_inspected,
a.sss_display,
a.sss_description,
b.name as district,
a.channel_number,
a.point,
a.link_description,
ST_AsText(a.link_point) as link_point,
a.tx_frequency,
a.ctcss_tx,
a.nac_tx,
a.tx_antenna_height,
a.tx_power,
a.tx_antenna_gain,
a.output_color,
a.output_radius,
a.output_clutter
FROM radio_repeater a join radio_district b on a.district_id = b.id
''')
def create_repeater_rx_view():
"""
"""
from django.db import connection
cursor = connection.cursor()
cursor.execute('''
DROP VIEW IF EXISTS radio_repeater_rx_v;
CREATE OR REPLACE VIEW radio_repeater_rx_v AS
SELECT a.site_name,
a.last_inspected,
a.sss_display,
a.sss_description,
b.name as district,
a.channel_number,
a.point,
a.link_description,
ST_AsText(a.link_point) as link_point,
a.rx_frequency,
a.ctcss_rx,
a.nac_rx,
a.rx_antenna_height,
a.rx_power,
a.rx_antenna_gain,
a.output_color,
a.output_radius,
a.output_clutter
FROM radio_repeater a join radio_district b on a.district_id = b.id
''')
def create_all_views():
create_repeater_tx_view()
create_repeater_rx_view()
|
Create view for repeater tx and repater rxfrom django.db import connection
def create_repeater_tx_view():
"""
"""
from django.db import connection
cursor = connection.cursor()
cursor.execute('''
DROP VIEW IF EXISTS radio_repeater_tx_v;
CREATE OR REPLACE VIEW radio_repeater_tx_v AS
SELECT a.site_name,
a.last_inspected,
a.sss_display,
a.sss_description,
b.name as district,
a.channel_number,
a.point,
a.link_description,
ST_AsText(a.link_point) as link_point,
a.tx_frequency,
a.ctcss_tx,
a.nac_tx,
a.tx_antenna_height,
a.tx_power,
a.tx_antenna_gain,
a.output_color,
a.output_radius,
a.output_clutter
FROM radio_repeater a join radio_district b on a.district_id = b.id
''')
def create_repeater_rx_view():
"""
"""
from django.db import connection
cursor = connection.cursor()
cursor.execute('''
DROP VIEW IF EXISTS radio_repeater_rx_v;
CREATE OR REPLACE VIEW radio_repeater_rx_v AS
SELECT a.site_name,
a.last_inspected,
a.sss_display,
a.sss_description,
b.name as district,
a.channel_number,
a.point,
a.link_description,
ST_AsText(a.link_point) as link_point,
a.rx_frequency,
a.ctcss_rx,
a.nac_rx,
a.rx_antenna_height,
a.rx_power,
a.rx_antenna_gain,
a.output_color,
a.output_radius,
a.output_clutter
FROM radio_repeater a join radio_district b on a.district_id = b.id
''')
def create_all_views():
create_repeater_tx_view()
create_repeater_rx_view()
|
<commit_before><commit_msg>Create view for repeater tx and repater rx<commit_after>from django.db import connection
def create_repeater_tx_view():
"""
"""
from django.db import connection
cursor = connection.cursor()
cursor.execute('''
DROP VIEW IF EXISTS radio_repeater_tx_v;
CREATE OR REPLACE VIEW radio_repeater_tx_v AS
SELECT a.site_name,
a.last_inspected,
a.sss_display,
a.sss_description,
b.name as district,
a.channel_number,
a.point,
a.link_description,
ST_AsText(a.link_point) as link_point,
a.tx_frequency,
a.ctcss_tx,
a.nac_tx,
a.tx_antenna_height,
a.tx_power,
a.tx_antenna_gain,
a.output_color,
a.output_radius,
a.output_clutter
FROM radio_repeater a join radio_district b on a.district_id = b.id
''')
def create_repeater_rx_view():
"""
"""
from django.db import connection
cursor = connection.cursor()
cursor.execute('''
DROP VIEW IF EXISTS radio_repeater_rx_v;
CREATE OR REPLACE VIEW radio_repeater_rx_v AS
SELECT a.site_name,
a.last_inspected,
a.sss_display,
a.sss_description,
b.name as district,
a.channel_number,
a.point,
a.link_description,
ST_AsText(a.link_point) as link_point,
a.rx_frequency,
a.ctcss_rx,
a.nac_rx,
a.rx_antenna_height,
a.rx_power,
a.rx_antenna_gain,
a.output_color,
a.output_radius,
a.output_clutter
FROM radio_repeater a join radio_district b on a.district_id = b.id
''')
def create_all_views():
create_repeater_tx_view()
create_repeater_rx_view()
|
|
68eea54f3147decd466be16cccc03ab5ec8504bb
|
leetcode/116-Populating-Next-Right-Pointers-in-Each-Node/PopulNxtRhtPnterinEachNode_001_rec.py
|
leetcode/116-Populating-Next-Right-Pointers-in-Each-Node/PopulNxtRhtPnterinEachNode_001_rec.py
|
# Definition for binary tree with next pointer.
# class TreeLinkNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
# self.next = None
class Solution(object):
def connect(self, root):
"""
:type root: TreeLinkNode
:rtype: nothing
"""
if root is not None:
l, r = root.left, root.right
self.connect(l)
self.connect(r)
while l is not None:
l.next = r
l = l.right
r = r.left
|
Create Populate Nxt Ptr in each node REC
|
Create Populate Nxt Ptr in each node REC
|
Python
|
mit
|
Chasego/codirit,cc13ny/Allin,cc13ny/Allin,cc13ny/algo,Chasego/codi,cc13ny/algo,Chasego/codirit,cc13ny/algo,cc13ny/Allin,Chasego/codirit,Chasego/codirit,Chasego/codi,cc13ny/algo,cc13ny/Allin,Chasego/cod,Chasego/cod,Chasego/codi,Chasego/codi,Chasego/cod,cc13ny/Allin,Chasego/codi,Chasego/cod,cc13ny/algo,Chasego/cod,Chasego/codirit
|
Create Populate Nxt Ptr in each node REC
|
# Definition for binary tree with next pointer.
# class TreeLinkNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
# self.next = None
class Solution(object):
def connect(self, root):
"""
:type root: TreeLinkNode
:rtype: nothing
"""
if root is not None:
l, r = root.left, root.right
self.connect(l)
self.connect(r)
while l is not None:
l.next = r
l = l.right
r = r.left
|
<commit_before><commit_msg>Create Populate Nxt Ptr in each node REC<commit_after>
|
# Definition for binary tree with next pointer.
# class TreeLinkNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
# self.next = None
class Solution(object):
def connect(self, root):
"""
:type root: TreeLinkNode
:rtype: nothing
"""
if root is not None:
l, r = root.left, root.right
self.connect(l)
self.connect(r)
while l is not None:
l.next = r
l = l.right
r = r.left
|
Create Populate Nxt Ptr in each node REC# Definition for binary tree with next pointer.
# class TreeLinkNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
# self.next = None
class Solution(object):
def connect(self, root):
"""
:type root: TreeLinkNode
:rtype: nothing
"""
if root is not None:
l, r = root.left, root.right
self.connect(l)
self.connect(r)
while l is not None:
l.next = r
l = l.right
r = r.left
|
<commit_before><commit_msg>Create Populate Nxt Ptr in each node REC<commit_after># Definition for binary tree with next pointer.
# class TreeLinkNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
# self.next = None
class Solution(object):
def connect(self, root):
"""
:type root: TreeLinkNode
:rtype: nothing
"""
if root is not None:
l, r = root.left, root.right
self.connect(l)
self.connect(r)
while l is not None:
l.next = r
l = l.right
r = r.left
|
|
6f975b364301349bb19ea254b66f5d67008f4fdd
|
csunplugged/utils/errors/QueryParameterMultipleValuesError.py
|
csunplugged/utils/errors/QueryParameterMultipleValuesError.py
|
"""Exception for missing query parameter."""
class QueryParameterMultipleValuesError(Exception):
"""Exception for missing parameter in a GET query."""
def __init__(self, parameter, values):
"""Initialise exception.
Args:
parameter: The query parameter for the exception (str).
"""
super().__init__()
self.parameter = parameter
self.values = values
def __str__(self):
"""Override default error string.
Returns:
Error message for empty config file.
"""
text = "Parameter '{}' must only have one value, but multiple were given ({})."
return text.format(self.parameter, self.value)
|
Add new exception for >1 value provided for a single valued parameter
|
Add new exception for >1 value provided for a single valued parameter
|
Python
|
mit
|
uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged
|
Add new exception for >1 value provided for a single valued parameter
|
"""Exception for missing query parameter."""
class QueryParameterMultipleValuesError(Exception):
"""Exception for missing parameter in a GET query."""
def __init__(self, parameter, values):
"""Initialise exception.
Args:
parameter: The query parameter for the exception (str).
"""
super().__init__()
self.parameter = parameter
self.values = values
def __str__(self):
"""Override default error string.
Returns:
Error message for empty config file.
"""
text = "Parameter '{}' must only have one value, but multiple were given ({})."
return text.format(self.parameter, self.value)
|
<commit_before><commit_msg>Add new exception for >1 value provided for a single valued parameter<commit_after>
|
"""Exception for missing query parameter."""
class QueryParameterMultipleValuesError(Exception):
"""Exception for missing parameter in a GET query."""
def __init__(self, parameter, values):
"""Initialise exception.
Args:
parameter: The query parameter for the exception (str).
"""
super().__init__()
self.parameter = parameter
self.values = values
def __str__(self):
"""Override default error string.
Returns:
Error message for empty config file.
"""
text = "Parameter '{}' must only have one value, but multiple were given ({})."
return text.format(self.parameter, self.value)
|
Add new exception for >1 value provided for a single valued parameter"""Exception for missing query parameter."""
class QueryParameterMultipleValuesError(Exception):
"""Exception for missing parameter in a GET query."""
def __init__(self, parameter, values):
"""Initialise exception.
Args:
parameter: The query parameter for the exception (str).
"""
super().__init__()
self.parameter = parameter
self.values = values
def __str__(self):
"""Override default error string.
Returns:
Error message for empty config file.
"""
text = "Parameter '{}' must only have one value, but multiple were given ({})."
return text.format(self.parameter, self.value)
|
<commit_before><commit_msg>Add new exception for >1 value provided for a single valued parameter<commit_after>"""Exception for missing query parameter."""
class QueryParameterMultipleValuesError(Exception):
"""Exception for missing parameter in a GET query."""
def __init__(self, parameter, values):
"""Initialise exception.
Args:
parameter: The query parameter for the exception (str).
"""
super().__init__()
self.parameter = parameter
self.values = values
def __str__(self):
"""Override default error string.
Returns:
Error message for empty config file.
"""
text = "Parameter '{}' must only have one value, but multiple were given ({})."
return text.format(self.parameter, self.value)
|
|
cee38843bcf4c628b1c1adc6014dbae7ad2e60c0
|
acq4/devices/Scanner/scan_program/tests/test_spiral.py
|
acq4/devices/Scanner/scan_program/tests/test_spiral.py
|
from __future__ import division
import numpy as np
from acq4.devices.Scanner.scan_program.spiral import SpiralScan
def test_spiral():
r1 = 10e-6
r2 = 20e-6
a1 = 1.
a2 = 30.
ss = SpiralScan((r1, r2), (a1, a2))
# check that analytically computed path length matches numerically computed
# paths
l1 = ss.length()
npts = ss.path(10000, uniform=False)
dif = npts[1:] - npts[:-1]
l2 = ((dif**2).sum(axis=1)**0.5).sum()
assert np.allclose(l1, l2)
upts = ss.path(10000, uniform=True)
dif = upts[1:] - upts[:-1]
ulengths = (dif**2).sum(axis=1)**0.5
l3 = ulengths.sum()
assert np.allclose(l1, l3)
assert ulengths.std() / ulengths.mean() < 1e-5
# check that uniform spacing actually works
assert np.allclose(upts[0], npts[0])
assert np.allclose(upts[-1], npts[-1])
assert np.allclose(ulengths, l1 / (len(upts)-1))
|
Add unit tests for spiral scan
|
Add unit tests for spiral scan
|
Python
|
mit
|
acq4/acq4,mgraupe/acq4,campagnola/acq4,meganbkratz/acq4,meganbkratz/acq4,acq4/acq4,acq4/acq4,mgraupe/acq4,tropp/acq4,mgraupe/acq4,meganbkratz/acq4,pbmanis/acq4,pbmanis/acq4,acq4/acq4,pbmanis/acq4,mgraupe/acq4,campagnola/acq4,tropp/acq4,campagnola/acq4,pbmanis/acq4,tropp/acq4,campagnola/acq4,meganbkratz/acq4,tropp/acq4,mgraupe/acq4
|
Add unit tests for spiral scan
|
from __future__ import division
import numpy as np
from acq4.devices.Scanner.scan_program.spiral import SpiralScan
def test_spiral():
r1 = 10e-6
r2 = 20e-6
a1 = 1.
a2 = 30.
ss = SpiralScan((r1, r2), (a1, a2))
# check that analytically computed path length matches numerically computed
# paths
l1 = ss.length()
npts = ss.path(10000, uniform=False)
dif = npts[1:] - npts[:-1]
l2 = ((dif**2).sum(axis=1)**0.5).sum()
assert np.allclose(l1, l2)
upts = ss.path(10000, uniform=True)
dif = upts[1:] - upts[:-1]
ulengths = (dif**2).sum(axis=1)**0.5
l3 = ulengths.sum()
assert np.allclose(l1, l3)
assert ulengths.std() / ulengths.mean() < 1e-5
# check that uniform spacing actually works
assert np.allclose(upts[0], npts[0])
assert np.allclose(upts[-1], npts[-1])
assert np.allclose(ulengths, l1 / (len(upts)-1))
|
<commit_before><commit_msg>Add unit tests for spiral scan<commit_after>
|
from __future__ import division
import numpy as np
from acq4.devices.Scanner.scan_program.spiral import SpiralScan
def test_spiral():
r1 = 10e-6
r2 = 20e-6
a1 = 1.
a2 = 30.
ss = SpiralScan((r1, r2), (a1, a2))
# check that analytically computed path length matches numerically computed
# paths
l1 = ss.length()
npts = ss.path(10000, uniform=False)
dif = npts[1:] - npts[:-1]
l2 = ((dif**2).sum(axis=1)**0.5).sum()
assert np.allclose(l1, l2)
upts = ss.path(10000, uniform=True)
dif = upts[1:] - upts[:-1]
ulengths = (dif**2).sum(axis=1)**0.5
l3 = ulengths.sum()
assert np.allclose(l1, l3)
assert ulengths.std() / ulengths.mean() < 1e-5
# check that uniform spacing actually works
assert np.allclose(upts[0], npts[0])
assert np.allclose(upts[-1], npts[-1])
assert np.allclose(ulengths, l1 / (len(upts)-1))
|
Add unit tests for spiral scanfrom __future__ import division
import numpy as np
from acq4.devices.Scanner.scan_program.spiral import SpiralScan
def test_spiral():
r1 = 10e-6
r2 = 20e-6
a1 = 1.
a2 = 30.
ss = SpiralScan((r1, r2), (a1, a2))
# check that analytically computed path length matches numerically computed
# paths
l1 = ss.length()
npts = ss.path(10000, uniform=False)
dif = npts[1:] - npts[:-1]
l2 = ((dif**2).sum(axis=1)**0.5).sum()
assert np.allclose(l1, l2)
upts = ss.path(10000, uniform=True)
dif = upts[1:] - upts[:-1]
ulengths = (dif**2).sum(axis=1)**0.5
l3 = ulengths.sum()
assert np.allclose(l1, l3)
assert ulengths.std() / ulengths.mean() < 1e-5
# check that uniform spacing actually works
assert np.allclose(upts[0], npts[0])
assert np.allclose(upts[-1], npts[-1])
assert np.allclose(ulengths, l1 / (len(upts)-1))
|
<commit_before><commit_msg>Add unit tests for spiral scan<commit_after>from __future__ import division
import numpy as np
from acq4.devices.Scanner.scan_program.spiral import SpiralScan
def test_spiral():
r1 = 10e-6
r2 = 20e-6
a1 = 1.
a2 = 30.
ss = SpiralScan((r1, r2), (a1, a2))
# check that analytically computed path length matches numerically computed
# paths
l1 = ss.length()
npts = ss.path(10000, uniform=False)
dif = npts[1:] - npts[:-1]
l2 = ((dif**2).sum(axis=1)**0.5).sum()
assert np.allclose(l1, l2)
upts = ss.path(10000, uniform=True)
dif = upts[1:] - upts[:-1]
ulengths = (dif**2).sum(axis=1)**0.5
l3 = ulengths.sum()
assert np.allclose(l1, l3)
assert ulengths.std() / ulengths.mean() < 1e-5
# check that uniform spacing actually works
assert np.allclose(upts[0], npts[0])
assert np.allclose(upts[-1], npts[-1])
assert np.allclose(ulengths, l1 / (len(upts)-1))
|
|
6524be7830e1db3b7accbc95fb70318a66101c8e
|
cpro/migrations/0016_auto_20171216_1606.py
|
cpro/migrations/0016_auto_20171216_1606.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('cpro', '0015_auto_20170217_0801'),
]
operations = [
migrations.AlterField(
model_name='card',
name='i_skill',
field=models.PositiveIntegerField(null=True, verbose_name='Skill', choices=[(0, 'Lesser Perfect Lock'), (1, 'Greater Perfect Lock'), (2, 'Extreme Perfect Lock'), (3, 'Combo Lock'), (4, 'Healer'), (5, 'Life Lock'), (6, 'Combo Bonus'), (7, 'Perfect Score Bonus'), (8, 'Overload'), (9, 'Score Boost'), (10, 'All Round'), (11, 'Concentration'), (12, 'Skill Boost'), (13, 'Cute/Cool/Passion Focus')]),
preserve_default=True,
),
]
|
Add 2 skill types: Skill Boost and Cute/Cool/Passion Focus
|
Add 2 skill types: Skill Boost and Cute/Cool/Passion Focus
|
Python
|
apache-2.0
|
SchoolIdolTomodachi/CinderellaProducers,SchoolIdolTomodachi/CinderellaProducers
|
Add 2 skill types: Skill Boost and Cute/Cool/Passion Focus
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('cpro', '0015_auto_20170217_0801'),
]
operations = [
migrations.AlterField(
model_name='card',
name='i_skill',
field=models.PositiveIntegerField(null=True, verbose_name='Skill', choices=[(0, 'Lesser Perfect Lock'), (1, 'Greater Perfect Lock'), (2, 'Extreme Perfect Lock'), (3, 'Combo Lock'), (4, 'Healer'), (5, 'Life Lock'), (6, 'Combo Bonus'), (7, 'Perfect Score Bonus'), (8, 'Overload'), (9, 'Score Boost'), (10, 'All Round'), (11, 'Concentration'), (12, 'Skill Boost'), (13, 'Cute/Cool/Passion Focus')]),
preserve_default=True,
),
]
|
<commit_before><commit_msg>Add 2 skill types: Skill Boost and Cute/Cool/Passion Focus<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('cpro', '0015_auto_20170217_0801'),
]
operations = [
migrations.AlterField(
model_name='card',
name='i_skill',
field=models.PositiveIntegerField(null=True, verbose_name='Skill', choices=[(0, 'Lesser Perfect Lock'), (1, 'Greater Perfect Lock'), (2, 'Extreme Perfect Lock'), (3, 'Combo Lock'), (4, 'Healer'), (5, 'Life Lock'), (6, 'Combo Bonus'), (7, 'Perfect Score Bonus'), (8, 'Overload'), (9, 'Score Boost'), (10, 'All Round'), (11, 'Concentration'), (12, 'Skill Boost'), (13, 'Cute/Cool/Passion Focus')]),
preserve_default=True,
),
]
|
Add 2 skill types: Skill Boost and Cute/Cool/Passion Focus# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('cpro', '0015_auto_20170217_0801'),
]
operations = [
migrations.AlterField(
model_name='card',
name='i_skill',
field=models.PositiveIntegerField(null=True, verbose_name='Skill', choices=[(0, 'Lesser Perfect Lock'), (1, 'Greater Perfect Lock'), (2, 'Extreme Perfect Lock'), (3, 'Combo Lock'), (4, 'Healer'), (5, 'Life Lock'), (6, 'Combo Bonus'), (7, 'Perfect Score Bonus'), (8, 'Overload'), (9, 'Score Boost'), (10, 'All Round'), (11, 'Concentration'), (12, 'Skill Boost'), (13, 'Cute/Cool/Passion Focus')]),
preserve_default=True,
),
]
|
<commit_before><commit_msg>Add 2 skill types: Skill Boost and Cute/Cool/Passion Focus<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('cpro', '0015_auto_20170217_0801'),
]
operations = [
migrations.AlterField(
model_name='card',
name='i_skill',
field=models.PositiveIntegerField(null=True, verbose_name='Skill', choices=[(0, 'Lesser Perfect Lock'), (1, 'Greater Perfect Lock'), (2, 'Extreme Perfect Lock'), (3, 'Combo Lock'), (4, 'Healer'), (5, 'Life Lock'), (6, 'Combo Bonus'), (7, 'Perfect Score Bonus'), (8, 'Overload'), (9, 'Score Boost'), (10, 'All Round'), (11, 'Concentration'), (12, 'Skill Boost'), (13, 'Cute/Cool/Passion Focus')]),
preserve_default=True,
),
]
|
|
f772eb5b606ade1d1ec9b8e16c554b715634aba8
|
traits/util/tests/test_deprecated.py
|
traits/util/tests/test_deprecated.py
|
#------------------------------------------------------------------------------
# Copyright (c) 2005-2014, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in /LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
# Thanks for using Enthought open source!
#------------------------------------------------------------------------------
import unittest
from traits.testing.api import UnittestTools
from traits.util.api import deprecated
@deprecated("Addition is deprecated; use subtraction instead.")
def my_deprecated_addition(x, y):
return x + y
@deprecated("Broken code. Use something else.")
def my_bad_function():
1 / 0
class ClassWithDeprecatedBits(object):
@deprecated('bits are deprecated; use bytes')
def bits(self):
return 42
@deprecated('bytes are deprecated too. Use base 10.')
def bytes(self, required_arg, *args, **kwargs):
return required_arg, args, kwargs
class TestDeprecated(unittest.TestCase, UnittestTools):
def test_deprecated_function(self):
with self.assertDeprecated():
result = my_deprecated_addition(42, 1729)
self.assertEqual(result, 1771)
def test_deprecated_exception_raising_function(self):
with self.assertRaises(ZeroDivisionError):
with self.assertDeprecated():
my_bad_function()
def test_deprecated_method(self):
obj = ClassWithDeprecatedBits()
with self.assertDeprecated():
result = obj.bits()
self.assertEqual(result, 42)
def test_deprecated_method_with_fancy_signature(self):
obj = ClassWithDeprecatedBits()
with self.assertDeprecated():
result = obj.bytes(3, 27, 65, name='Boris', age=-3.2)
self.assertEqual(
result, (3, (27, 65), {'name': 'Boris', 'age': -3.2}))
|
Add tests for 'deprecated' decorator with functions and methods.
|
Add tests for 'deprecated' decorator with functions and methods.
|
Python
|
bsd-3-clause
|
burnpanck/traits,burnpanck/traits
|
Add tests for 'deprecated' decorator with functions and methods.
|
#------------------------------------------------------------------------------
# Copyright (c) 2005-2014, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in /LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
# Thanks for using Enthought open source!
#------------------------------------------------------------------------------
import unittest
from traits.testing.api import UnittestTools
from traits.util.api import deprecated
@deprecated("Addition is deprecated; use subtraction instead.")
def my_deprecated_addition(x, y):
return x + y
@deprecated("Broken code. Use something else.")
def my_bad_function():
1 / 0
class ClassWithDeprecatedBits(object):
@deprecated('bits are deprecated; use bytes')
def bits(self):
return 42
@deprecated('bytes are deprecated too. Use base 10.')
def bytes(self, required_arg, *args, **kwargs):
return required_arg, args, kwargs
class TestDeprecated(unittest.TestCase, UnittestTools):
def test_deprecated_function(self):
with self.assertDeprecated():
result = my_deprecated_addition(42, 1729)
self.assertEqual(result, 1771)
def test_deprecated_exception_raising_function(self):
with self.assertRaises(ZeroDivisionError):
with self.assertDeprecated():
my_bad_function()
def test_deprecated_method(self):
obj = ClassWithDeprecatedBits()
with self.assertDeprecated():
result = obj.bits()
self.assertEqual(result, 42)
def test_deprecated_method_with_fancy_signature(self):
obj = ClassWithDeprecatedBits()
with self.assertDeprecated():
result = obj.bytes(3, 27, 65, name='Boris', age=-3.2)
self.assertEqual(
result, (3, (27, 65), {'name': 'Boris', 'age': -3.2}))
|
<commit_before><commit_msg>Add tests for 'deprecated' decorator with functions and methods.<commit_after>
|
#------------------------------------------------------------------------------
# Copyright (c) 2005-2014, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in /LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
# Thanks for using Enthought open source!
#------------------------------------------------------------------------------
import unittest
from traits.testing.api import UnittestTools
from traits.util.api import deprecated
@deprecated("Addition is deprecated; use subtraction instead.")
def my_deprecated_addition(x, y):
return x + y
@deprecated("Broken code. Use something else.")
def my_bad_function():
1 / 0
class ClassWithDeprecatedBits(object):
@deprecated('bits are deprecated; use bytes')
def bits(self):
return 42
@deprecated('bytes are deprecated too. Use base 10.')
def bytes(self, required_arg, *args, **kwargs):
return required_arg, args, kwargs
class TestDeprecated(unittest.TestCase, UnittestTools):
def test_deprecated_function(self):
with self.assertDeprecated():
result = my_deprecated_addition(42, 1729)
self.assertEqual(result, 1771)
def test_deprecated_exception_raising_function(self):
with self.assertRaises(ZeroDivisionError):
with self.assertDeprecated():
my_bad_function()
def test_deprecated_method(self):
obj = ClassWithDeprecatedBits()
with self.assertDeprecated():
result = obj.bits()
self.assertEqual(result, 42)
def test_deprecated_method_with_fancy_signature(self):
obj = ClassWithDeprecatedBits()
with self.assertDeprecated():
result = obj.bytes(3, 27, 65, name='Boris', age=-3.2)
self.assertEqual(
result, (3, (27, 65), {'name': 'Boris', 'age': -3.2}))
|
Add tests for 'deprecated' decorator with functions and methods.#------------------------------------------------------------------------------
# Copyright (c) 2005-2014, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in /LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
# Thanks for using Enthought open source!
#------------------------------------------------------------------------------
import unittest
from traits.testing.api import UnittestTools
from traits.util.api import deprecated
@deprecated("Addition is deprecated; use subtraction instead.")
def my_deprecated_addition(x, y):
return x + y
@deprecated("Broken code. Use something else.")
def my_bad_function():
1 / 0
class ClassWithDeprecatedBits(object):
@deprecated('bits are deprecated; use bytes')
def bits(self):
return 42
@deprecated('bytes are deprecated too. Use base 10.')
def bytes(self, required_arg, *args, **kwargs):
return required_arg, args, kwargs
class TestDeprecated(unittest.TestCase, UnittestTools):
def test_deprecated_function(self):
with self.assertDeprecated():
result = my_deprecated_addition(42, 1729)
self.assertEqual(result, 1771)
def test_deprecated_exception_raising_function(self):
with self.assertRaises(ZeroDivisionError):
with self.assertDeprecated():
my_bad_function()
def test_deprecated_method(self):
obj = ClassWithDeprecatedBits()
with self.assertDeprecated():
result = obj.bits()
self.assertEqual(result, 42)
def test_deprecated_method_with_fancy_signature(self):
obj = ClassWithDeprecatedBits()
with self.assertDeprecated():
result = obj.bytes(3, 27, 65, name='Boris', age=-3.2)
self.assertEqual(
result, (3, (27, 65), {'name': 'Boris', 'age': -3.2}))
|
<commit_before><commit_msg>Add tests for 'deprecated' decorator with functions and methods.<commit_after>#------------------------------------------------------------------------------
# Copyright (c) 2005-2014, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in /LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
# Thanks for using Enthought open source!
#------------------------------------------------------------------------------
import unittest
from traits.testing.api import UnittestTools
from traits.util.api import deprecated
@deprecated("Addition is deprecated; use subtraction instead.")
def my_deprecated_addition(x, y):
return x + y
@deprecated("Broken code. Use something else.")
def my_bad_function():
1 / 0
class ClassWithDeprecatedBits(object):
@deprecated('bits are deprecated; use bytes')
def bits(self):
return 42
@deprecated('bytes are deprecated too. Use base 10.')
def bytes(self, required_arg, *args, **kwargs):
return required_arg, args, kwargs
class TestDeprecated(unittest.TestCase, UnittestTools):
def test_deprecated_function(self):
with self.assertDeprecated():
result = my_deprecated_addition(42, 1729)
self.assertEqual(result, 1771)
def test_deprecated_exception_raising_function(self):
with self.assertRaises(ZeroDivisionError):
with self.assertDeprecated():
my_bad_function()
def test_deprecated_method(self):
obj = ClassWithDeprecatedBits()
with self.assertDeprecated():
result = obj.bits()
self.assertEqual(result, 42)
def test_deprecated_method_with_fancy_signature(self):
obj = ClassWithDeprecatedBits()
with self.assertDeprecated():
result = obj.bytes(3, 27, 65, name='Boris', age=-3.2)
self.assertEqual(
result, (3, (27, 65), {'name': 'Boris', 'age': -3.2}))
|
|
ca9654586c03dc746219b4e9dd6742ed9eca989d
|
source/bark/logger.py
|
source/bark/logger.py
|
# :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
import copy
import bark
from .log import Log
class Logger(Log):
'''Helper for emitting logs.
A logger can be used to preset common information (such as a name) and then
emit :py:class:`~bark.log.Log` records with that information already
present.
'''
def __init__(self, name, **kw):
'''Initialise logger with identifying *name*.'''
kw['name'] = name
super(Logger, self).__init__(**kw)
def log(self, message, **kw):
'''Emit a :py:class:`~bark.log.Log` record.
A copy of this logger's information is made and then merged with the
passed in *kw* arguments before being emitted.
'''
log = copy.deepcopy(self)
log.update(**kw)
log['message'] = message
# Call global handle method.
bark.handle(log)
|
Add Logger class that acts as a convenient holder of common log information.
|
Add Logger class that acts as a convenient holder of common log information.
Also deals with emitting logs by calling global handle.
|
Python
|
apache-2.0
|
4degrees/mill,4degrees/sawmill
|
Add Logger class that acts as a convenient holder of common log information.
Also deals with emitting logs by calling global handle.
|
# :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
import copy
import bark
from .log import Log
class Logger(Log):
'''Helper for emitting logs.
A logger can be used to preset common information (such as a name) and then
emit :py:class:`~bark.log.Log` records with that information already
present.
'''
def __init__(self, name, **kw):
'''Initialise logger with identifying *name*.'''
kw['name'] = name
super(Logger, self).__init__(**kw)
def log(self, message, **kw):
'''Emit a :py:class:`~bark.log.Log` record.
A copy of this logger's information is made and then merged with the
passed in *kw* arguments before being emitted.
'''
log = copy.deepcopy(self)
log.update(**kw)
log['message'] = message
# Call global handle method.
bark.handle(log)
|
<commit_before><commit_msg>Add Logger class that acts as a convenient holder of common log information.
Also deals with emitting logs by calling global handle.<commit_after>
|
# :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
import copy
import bark
from .log import Log
class Logger(Log):
'''Helper for emitting logs.
A logger can be used to preset common information (such as a name) and then
emit :py:class:`~bark.log.Log` records with that information already
present.
'''
def __init__(self, name, **kw):
'''Initialise logger with identifying *name*.'''
kw['name'] = name
super(Logger, self).__init__(**kw)
def log(self, message, **kw):
'''Emit a :py:class:`~bark.log.Log` record.
A copy of this logger's information is made and then merged with the
passed in *kw* arguments before being emitted.
'''
log = copy.deepcopy(self)
log.update(**kw)
log['message'] = message
# Call global handle method.
bark.handle(log)
|
Add Logger class that acts as a convenient holder of common log information.
Also deals with emitting logs by calling global handle.# :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
import copy
import bark
from .log import Log
class Logger(Log):
'''Helper for emitting logs.
A logger can be used to preset common information (such as a name) and then
emit :py:class:`~bark.log.Log` records with that information already
present.
'''
def __init__(self, name, **kw):
'''Initialise logger with identifying *name*.'''
kw['name'] = name
super(Logger, self).__init__(**kw)
def log(self, message, **kw):
'''Emit a :py:class:`~bark.log.Log` record.
A copy of this logger's information is made and then merged with the
passed in *kw* arguments before being emitted.
'''
log = copy.deepcopy(self)
log.update(**kw)
log['message'] = message
# Call global handle method.
bark.handle(log)
|
<commit_before><commit_msg>Add Logger class that acts as a convenient holder of common log information.
Also deals with emitting logs by calling global handle.<commit_after># :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
import copy
import bark
from .log import Log
class Logger(Log):
'''Helper for emitting logs.
A logger can be used to preset common information (such as a name) and then
emit :py:class:`~bark.log.Log` records with that information already
present.
'''
def __init__(self, name, **kw):
'''Initialise logger with identifying *name*.'''
kw['name'] = name
super(Logger, self).__init__(**kw)
def log(self, message, **kw):
'''Emit a :py:class:`~bark.log.Log` record.
A copy of this logger's information is made and then merged with the
passed in *kw* arguments before being emitted.
'''
log = copy.deepcopy(self)
log.update(**kw)
log['message'] = message
# Call global handle method.
bark.handle(log)
|
|
112a661139dfb85b4b3de346f569b3eb28a76e48
|
tests/test_memory.py
|
tests/test_memory.py
|
'Test of memory allocation'
import numpy as np
from nose.tools import assert_raises
from videocore.assembler import qpu, assemble
from videocore.driver import Driver, DriverError, \
DEFAULT_DATA_AREA_SIZE, DEFAULT_CODE_AREA_SIZE
def test_maximum_alloc():
with Driver() as drv:
size = DEFAULT_DATA_AREA_SIZE
a = drv.alloc(shape = size, dtype = np.uint8)
assert size == a.nbytes
def test_too_large_alloc():
with Driver() as drv:
size = DEFAULT_DATA_AREA_SIZE + 1
assert_raises(DriverError, drv.alloc, shape = size, dtype = np.uint8)
@qpu
def one_nop(asm):
nop()
def test_maximum_code():
with Driver() as drv:
code_one_nop = assemble(one_nop)
code = code_one_nop * (DEFAULT_CODE_AREA_SIZE // 8)
assert len(code) == DEFAULT_CODE_AREA_SIZE
prog = drv.program(code)
assert prog.size == DEFAULT_CODE_AREA_SIZE
def test_too_large_code():
with Driver() as drv:
code_one_nop = assemble(one_nop)
code = code_one_nop * (DEFAULT_CODE_AREA_SIZE // 8 + 1)
assert_raises(DriverError, drv.program, code)
|
Add a test for memory allocation
|
Add a test for memory allocation
|
Python
|
mit
|
nineties/py-videocore
|
Add a test for memory allocation
|
'Test of memory allocation'
import numpy as np
from nose.tools import assert_raises
from videocore.assembler import qpu, assemble
from videocore.driver import Driver, DriverError, \
DEFAULT_DATA_AREA_SIZE, DEFAULT_CODE_AREA_SIZE
def test_maximum_alloc():
with Driver() as drv:
size = DEFAULT_DATA_AREA_SIZE
a = drv.alloc(shape = size, dtype = np.uint8)
assert size == a.nbytes
def test_too_large_alloc():
with Driver() as drv:
size = DEFAULT_DATA_AREA_SIZE + 1
assert_raises(DriverError, drv.alloc, shape = size, dtype = np.uint8)
@qpu
def one_nop(asm):
nop()
def test_maximum_code():
with Driver() as drv:
code_one_nop = assemble(one_nop)
code = code_one_nop * (DEFAULT_CODE_AREA_SIZE // 8)
assert len(code) == DEFAULT_CODE_AREA_SIZE
prog = drv.program(code)
assert prog.size == DEFAULT_CODE_AREA_SIZE
def test_too_large_code():
with Driver() as drv:
code_one_nop = assemble(one_nop)
code = code_one_nop * (DEFAULT_CODE_AREA_SIZE // 8 + 1)
assert_raises(DriverError, drv.program, code)
|
<commit_before><commit_msg>Add a test for memory allocation<commit_after>
|
'Test of memory allocation'
import numpy as np
from nose.tools import assert_raises
from videocore.assembler import qpu, assemble
from videocore.driver import Driver, DriverError, \
DEFAULT_DATA_AREA_SIZE, DEFAULT_CODE_AREA_SIZE
def test_maximum_alloc():
with Driver() as drv:
size = DEFAULT_DATA_AREA_SIZE
a = drv.alloc(shape = size, dtype = np.uint8)
assert size == a.nbytes
def test_too_large_alloc():
with Driver() as drv:
size = DEFAULT_DATA_AREA_SIZE + 1
assert_raises(DriverError, drv.alloc, shape = size, dtype = np.uint8)
@qpu
def one_nop(asm):
nop()
def test_maximum_code():
with Driver() as drv:
code_one_nop = assemble(one_nop)
code = code_one_nop * (DEFAULT_CODE_AREA_SIZE // 8)
assert len(code) == DEFAULT_CODE_AREA_SIZE
prog = drv.program(code)
assert prog.size == DEFAULT_CODE_AREA_SIZE
def test_too_large_code():
with Driver() as drv:
code_one_nop = assemble(one_nop)
code = code_one_nop * (DEFAULT_CODE_AREA_SIZE // 8 + 1)
assert_raises(DriverError, drv.program, code)
|
Add a test for memory allocation'Test of memory allocation'
import numpy as np
from nose.tools import assert_raises
from videocore.assembler import qpu, assemble
from videocore.driver import Driver, DriverError, \
DEFAULT_DATA_AREA_SIZE, DEFAULT_CODE_AREA_SIZE
def test_maximum_alloc():
with Driver() as drv:
size = DEFAULT_DATA_AREA_SIZE
a = drv.alloc(shape = size, dtype = np.uint8)
assert size == a.nbytes
def test_too_large_alloc():
with Driver() as drv:
size = DEFAULT_DATA_AREA_SIZE + 1
assert_raises(DriverError, drv.alloc, shape = size, dtype = np.uint8)
@qpu
def one_nop(asm):
nop()
def test_maximum_code():
with Driver() as drv:
code_one_nop = assemble(one_nop)
code = code_one_nop * (DEFAULT_CODE_AREA_SIZE // 8)
assert len(code) == DEFAULT_CODE_AREA_SIZE
prog = drv.program(code)
assert prog.size == DEFAULT_CODE_AREA_SIZE
def test_too_large_code():
with Driver() as drv:
code_one_nop = assemble(one_nop)
code = code_one_nop * (DEFAULT_CODE_AREA_SIZE // 8 + 1)
assert_raises(DriverError, drv.program, code)
|
<commit_before><commit_msg>Add a test for memory allocation<commit_after>'Test of memory allocation'
import numpy as np
from nose.tools import assert_raises
from videocore.assembler import qpu, assemble
from videocore.driver import Driver, DriverError, \
DEFAULT_DATA_AREA_SIZE, DEFAULT_CODE_AREA_SIZE
def test_maximum_alloc():
with Driver() as drv:
size = DEFAULT_DATA_AREA_SIZE
a = drv.alloc(shape = size, dtype = np.uint8)
assert size == a.nbytes
def test_too_large_alloc():
with Driver() as drv:
size = DEFAULT_DATA_AREA_SIZE + 1
assert_raises(DriverError, drv.alloc, shape = size, dtype = np.uint8)
@qpu
def one_nop(asm):
nop()
def test_maximum_code():
with Driver() as drv:
code_one_nop = assemble(one_nop)
code = code_one_nop * (DEFAULT_CODE_AREA_SIZE // 8)
assert len(code) == DEFAULT_CODE_AREA_SIZE
prog = drv.program(code)
assert prog.size == DEFAULT_CODE_AREA_SIZE
def test_too_large_code():
with Driver() as drv:
code_one_nop = assemble(one_nop)
code = code_one_nop * (DEFAULT_CODE_AREA_SIZE // 8 + 1)
assert_raises(DriverError, drv.program, code)
|
|
8945ecc79c0233147a9c516b2530be958503841c
|
tests/test_typing.py
|
tests/test_typing.py
|
"""tests/test_typing.py.
Tests to ensure hugs interacts as expected with the Python3.5+ typing module
Copyright (C) 2016 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
from typing import Optional
import hug
def test_annotation_support(hug_api):
"""Test to ensure it is possible to use a typing object to annotate a hug endpoint"""
@hug.get(api=hug_api)
def echo(text: Optional[str]=None):
return text or 'missing'
assert hug.test.get(hug_api, 'echo').data == 'missing'
assert hug.test.get(hug_api, 'echo', text='not missing') == 'not missing'
|
Add test for typing module
|
Add test for typing module
|
Python
|
mit
|
timothycrosley/hug,timothycrosley/hug,MuhammadAlkarouri/hug,MuhammadAlkarouri/hug,timothycrosley/hug,MuhammadAlkarouri/hug
|
Add test for typing module
|
"""tests/test_typing.py.
Tests to ensure hugs interacts as expected with the Python3.5+ typing module
Copyright (C) 2016 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
from typing import Optional
import hug
def test_annotation_support(hug_api):
"""Test to ensure it is possible to use a typing object to annotate a hug endpoint"""
@hug.get(api=hug_api)
def echo(text: Optional[str]=None):
return text or 'missing'
assert hug.test.get(hug_api, 'echo').data == 'missing'
assert hug.test.get(hug_api, 'echo', text='not missing') == 'not missing'
|
<commit_before><commit_msg>Add test for typing module<commit_after>
|
"""tests/test_typing.py.
Tests to ensure hugs interacts as expected with the Python3.5+ typing module
Copyright (C) 2016 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
from typing import Optional
import hug
def test_annotation_support(hug_api):
"""Test to ensure it is possible to use a typing object to annotate a hug endpoint"""
@hug.get(api=hug_api)
def echo(text: Optional[str]=None):
return text or 'missing'
assert hug.test.get(hug_api, 'echo').data == 'missing'
assert hug.test.get(hug_api, 'echo', text='not missing') == 'not missing'
|
Add test for typing module"""tests/test_typing.py.
Tests to ensure hugs interacts as expected with the Python3.5+ typing module
Copyright (C) 2016 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
from typing import Optional
import hug
def test_annotation_support(hug_api):
"""Test to ensure it is possible to use a typing object to annotate a hug endpoint"""
@hug.get(api=hug_api)
def echo(text: Optional[str]=None):
return text or 'missing'
assert hug.test.get(hug_api, 'echo').data == 'missing'
assert hug.test.get(hug_api, 'echo', text='not missing') == 'not missing'
|
<commit_before><commit_msg>Add test for typing module<commit_after>"""tests/test_typing.py.
Tests to ensure hugs interacts as expected with the Python3.5+ typing module
Copyright (C) 2016 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
from typing import Optional
import hug
def test_annotation_support(hug_api):
"""Test to ensure it is possible to use a typing object to annotate a hug endpoint"""
@hug.get(api=hug_api)
def echo(text: Optional[str]=None):
return text or 'missing'
assert hug.test.get(hug_api, 'echo').data == 'missing'
assert hug.test.get(hug_api, 'echo', text='not missing') == 'not missing'
|
|
418e948a3ac3708f96d2137553e81480e8d01d1b
|
spdx/tv_to_rdf.py
|
spdx/tv_to_rdf.py
|
#!/usr/bin/env python
# Copyright (C) 2017 BMW AG
# Author: Thomas Hafner
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import codecs
from spdx.parsers.tagvalue import Parser
from spdx.parsers.loggers import StandardLogger
from spdx.parsers.tagvaluebuilders import Builder
from spdx.writers.rdf import write_document
def tv_to_rdf(infile_name, outfile_name):
"""Converts a SPDX file from tag/value format to RDF format."""
parser = Parser(Builder(), StandardLogger())
parser.build()
with open(infile_name) as infile:
data = infile.read()
document, error = parser.parse(data)
if not error:
with open(outfile_name, mode='w') as outfile:
write_document(document, outfile)
else:
print 'Errors encountered while parsing RDF file.'
messages = []
document.validate(messages)
print '\n'.join(messages)
if __name__ == '__main__':
tv_to_rdf(*sys.argv[1:])
|
Add convenience function to convert from tag/value format to RDF.
|
Add convenience function to convert from tag/value format to RDF.
Signed-off-by: Philippe Ombredanne <ca95c4a6a4931f366cbdaf5878c5016609417d37@nexb.com>
|
Python
|
apache-2.0
|
spdx/tools-python
|
Add convenience function to convert from tag/value format to RDF.
Signed-off-by: Philippe Ombredanne <ca95c4a6a4931f366cbdaf5878c5016609417d37@nexb.com>
|
#!/usr/bin/env python
# Copyright (C) 2017 BMW AG
# Author: Thomas Hafner
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import codecs
from spdx.parsers.tagvalue import Parser
from spdx.parsers.loggers import StandardLogger
from spdx.parsers.tagvaluebuilders import Builder
from spdx.writers.rdf import write_document
def tv_to_rdf(infile_name, outfile_name):
"""Converts a SPDX file from tag/value format to RDF format."""
parser = Parser(Builder(), StandardLogger())
parser.build()
with open(infile_name) as infile:
data = infile.read()
document, error = parser.parse(data)
if not error:
with open(outfile_name, mode='w') as outfile:
write_document(document, outfile)
else:
print 'Errors encountered while parsing RDF file.'
messages = []
document.validate(messages)
print '\n'.join(messages)
if __name__ == '__main__':
tv_to_rdf(*sys.argv[1:])
|
<commit_before><commit_msg>Add convenience function to convert from tag/value format to RDF.
Signed-off-by: Philippe Ombredanne <ca95c4a6a4931f366cbdaf5878c5016609417d37@nexb.com><commit_after>
|
#!/usr/bin/env python
# Copyright (C) 2017 BMW AG
# Author: Thomas Hafner
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import codecs
from spdx.parsers.tagvalue import Parser
from spdx.parsers.loggers import StandardLogger
from spdx.parsers.tagvaluebuilders import Builder
from spdx.writers.rdf import write_document
def tv_to_rdf(infile_name, outfile_name):
"""Converts a SPDX file from tag/value format to RDF format."""
parser = Parser(Builder(), StandardLogger())
parser.build()
with open(infile_name) as infile:
data = infile.read()
document, error = parser.parse(data)
if not error:
with open(outfile_name, mode='w') as outfile:
write_document(document, outfile)
else:
print 'Errors encountered while parsing RDF file.'
messages = []
document.validate(messages)
print '\n'.join(messages)
if __name__ == '__main__':
tv_to_rdf(*sys.argv[1:])
|
Add convenience function to convert from tag/value format to RDF.
Signed-off-by: Philippe Ombredanne <ca95c4a6a4931f366cbdaf5878c5016609417d37@nexb.com>#!/usr/bin/env python
# Copyright (C) 2017 BMW AG
# Author: Thomas Hafner
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import codecs
from spdx.parsers.tagvalue import Parser
from spdx.parsers.loggers import StandardLogger
from spdx.parsers.tagvaluebuilders import Builder
from spdx.writers.rdf import write_document
def tv_to_rdf(infile_name, outfile_name):
"""Converts a SPDX file from tag/value format to RDF format."""
parser = Parser(Builder(), StandardLogger())
parser.build()
with open(infile_name) as infile:
data = infile.read()
document, error = parser.parse(data)
if not error:
with open(outfile_name, mode='w') as outfile:
write_document(document, outfile)
else:
print 'Errors encountered while parsing RDF file.'
messages = []
document.validate(messages)
print '\n'.join(messages)
if __name__ == '__main__':
tv_to_rdf(*sys.argv[1:])
|
<commit_before><commit_msg>Add convenience function to convert from tag/value format to RDF.
Signed-off-by: Philippe Ombredanne <ca95c4a6a4931f366cbdaf5878c5016609417d37@nexb.com><commit_after>#!/usr/bin/env python
# Copyright (C) 2017 BMW AG
# Author: Thomas Hafner
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import codecs
from spdx.parsers.tagvalue import Parser
from spdx.parsers.loggers import StandardLogger
from spdx.parsers.tagvaluebuilders import Builder
from spdx.writers.rdf import write_document
def tv_to_rdf(infile_name, outfile_name):
"""Converts a SPDX file from tag/value format to RDF format."""
parser = Parser(Builder(), StandardLogger())
parser.build()
with open(infile_name) as infile:
data = infile.read()
document, error = parser.parse(data)
if not error:
with open(outfile_name, mode='w') as outfile:
write_document(document, outfile)
else:
print 'Errors encountered while parsing RDF file.'
messages = []
document.validate(messages)
print '\n'.join(messages)
if __name__ == '__main__':
tv_to_rdf(*sys.argv[1:])
|
|
4f82eae6efab831f6a6bd1e6c4a2ee28b452a2cd
|
tests/test_local_sync_client.py
|
tests/test_local_sync_client.py
|
# -*- coding: utf-8 -*-
import os
import tempfile
from s3backup.local_sync_client import LocalSyncClient
class TestLocalSyncClient(object):
def test_keys(self):
local_dir = tempfile.mkdtemp()
for object_name in ('foo', 'bar'):
file_name = '{}/{}'.format(local_dir, object_name)
with open(file_name, 'w'):
os.utime(file_name, None)
sync_client = LocalSyncClient(local_dir)
assert set(sync_client.keys()) == {'foo', 'bar'}
|
Add basic test for LocalSyncClient
|
Add basic test for LocalSyncClient
|
Python
|
mit
|
MichaelAquilina/s3backup,MichaelAquilina/s3backup
|
Add basic test for LocalSyncClient
|
# -*- coding: utf-8 -*-
import os
import tempfile
from s3backup.local_sync_client import LocalSyncClient
class TestLocalSyncClient(object):
def test_keys(self):
local_dir = tempfile.mkdtemp()
for object_name in ('foo', 'bar'):
file_name = '{}/{}'.format(local_dir, object_name)
with open(file_name, 'w'):
os.utime(file_name, None)
sync_client = LocalSyncClient(local_dir)
assert set(sync_client.keys()) == {'foo', 'bar'}
|
<commit_before><commit_msg>Add basic test for LocalSyncClient<commit_after>
|
# -*- coding: utf-8 -*-
import os
import tempfile
from s3backup.local_sync_client import LocalSyncClient
class TestLocalSyncClient(object):
def test_keys(self):
local_dir = tempfile.mkdtemp()
for object_name in ('foo', 'bar'):
file_name = '{}/{}'.format(local_dir, object_name)
with open(file_name, 'w'):
os.utime(file_name, None)
sync_client = LocalSyncClient(local_dir)
assert set(sync_client.keys()) == {'foo', 'bar'}
|
Add basic test for LocalSyncClient# -*- coding: utf-8 -*-
import os
import tempfile
from s3backup.local_sync_client import LocalSyncClient
class TestLocalSyncClient(object):
def test_keys(self):
local_dir = tempfile.mkdtemp()
for object_name in ('foo', 'bar'):
file_name = '{}/{}'.format(local_dir, object_name)
with open(file_name, 'w'):
os.utime(file_name, None)
sync_client = LocalSyncClient(local_dir)
assert set(sync_client.keys()) == {'foo', 'bar'}
|
<commit_before><commit_msg>Add basic test for LocalSyncClient<commit_after># -*- coding: utf-8 -*-
import os
import tempfile
from s3backup.local_sync_client import LocalSyncClient
class TestLocalSyncClient(object):
def test_keys(self):
local_dir = tempfile.mkdtemp()
for object_name in ('foo', 'bar'):
file_name = '{}/{}'.format(local_dir, object_name)
with open(file_name, 'w'):
os.utime(file_name, None)
sync_client = LocalSyncClient(local_dir)
assert set(sync_client.keys()) == {'foo', 'bar'}
|
|
80d410dc40250ef9da1f733551bd04cca84f3723
|
sri21_vs_vmx.py
|
sri21_vs_vmx.py
|
import pkg_resources
pkg_resources.require('aphla')
import aphla as ap
# Load the machine
ap.machines.load('SRI21')
sri21_pvs = set()
# Find unique pv names for SRI21
BPMS = ap.getElements('BPM')
for BPM in range(len(BPMS)):
pv_name = BPMS[BPM].pv()[0].split(':')[0]
sri21_pvs.add(pv_name)
# Load the machine
ap.machines.load('VMX')
vmx_pvs = set()
# Find unique pv names for SRI21
BPMS = ap.getElements('BPM')
for BPM in range(len(BPMS)):
pv_name = BPMS[BPM].pv()[0].split(':')[0]
vmx_pvs.add(pv_name)
print 'All PVs in \'SRI21\' that are not in \'VMX\''
print sri21_pvs - vmx_pvs
print 'All PVs in \'SRI21\' that are not in \'VMX\''
print vmx_pvs - sri21_pvs
|
Modify caget method to convert to values using a list
|
Modify caget method to convert to values using a list
Also refactoring some of the variables names.
|
Python
|
apache-2.0
|
razvanvasile/Work-Mini-Projects,razvanvasile/Work-Mini-Projects,razvanvasile/Work-Mini-Projects
|
Modify caget method to convert to values using a list
Also refactoring some of the variables names.
|
import pkg_resources
pkg_resources.require('aphla')
import aphla as ap
# Load the machine
ap.machines.load('SRI21')
sri21_pvs = set()
# Find unique pv names for SRI21
BPMS = ap.getElements('BPM')
for BPM in range(len(BPMS)):
pv_name = BPMS[BPM].pv()[0].split(':')[0]
sri21_pvs.add(pv_name)
# Load the machine
ap.machines.load('VMX')
vmx_pvs = set()
# Find unique pv names for SRI21
BPMS = ap.getElements('BPM')
for BPM in range(len(BPMS)):
pv_name = BPMS[BPM].pv()[0].split(':')[0]
vmx_pvs.add(pv_name)
print 'All PVs in \'SRI21\' that are not in \'VMX\''
print sri21_pvs - vmx_pvs
print 'All PVs in \'SRI21\' that are not in \'VMX\''
print vmx_pvs - sri21_pvs
|
<commit_before><commit_msg>Modify caget method to convert to values using a list
Also refactoring some of the variables names.<commit_after>
|
import pkg_resources
pkg_resources.require('aphla')
import aphla as ap
# Load the machine
ap.machines.load('SRI21')
sri21_pvs = set()
# Find unique pv names for SRI21
BPMS = ap.getElements('BPM')
for BPM in range(len(BPMS)):
pv_name = BPMS[BPM].pv()[0].split(':')[0]
sri21_pvs.add(pv_name)
# Load the machine
ap.machines.load('VMX')
vmx_pvs = set()
# Find unique pv names for SRI21
BPMS = ap.getElements('BPM')
for BPM in range(len(BPMS)):
pv_name = BPMS[BPM].pv()[0].split(':')[0]
vmx_pvs.add(pv_name)
print 'All PVs in \'SRI21\' that are not in \'VMX\''
print sri21_pvs - vmx_pvs
print 'All PVs in \'SRI21\' that are not in \'VMX\''
print vmx_pvs - sri21_pvs
|
Modify caget method to convert to values using a list
Also refactoring some of the variables names.import pkg_resources
pkg_resources.require('aphla')
import aphla as ap
# Load the machine
ap.machines.load('SRI21')
sri21_pvs = set()
# Find unique pv names for SRI21
BPMS = ap.getElements('BPM')
for BPM in range(len(BPMS)):
pv_name = BPMS[BPM].pv()[0].split(':')[0]
sri21_pvs.add(pv_name)
# Load the machine
ap.machines.load('VMX')
vmx_pvs = set()
# Find unique pv names for SRI21
BPMS = ap.getElements('BPM')
for BPM in range(len(BPMS)):
pv_name = BPMS[BPM].pv()[0].split(':')[0]
vmx_pvs.add(pv_name)
print 'All PVs in \'SRI21\' that are not in \'VMX\''
print sri21_pvs - vmx_pvs
print 'All PVs in \'SRI21\' that are not in \'VMX\''
print vmx_pvs - sri21_pvs
|
<commit_before><commit_msg>Modify caget method to convert to values using a list
Also refactoring some of the variables names.<commit_after>import pkg_resources
pkg_resources.require('aphla')
import aphla as ap
# Load the machine
ap.machines.load('SRI21')
sri21_pvs = set()
# Find unique pv names for SRI21
BPMS = ap.getElements('BPM')
for BPM in range(len(BPMS)):
pv_name = BPMS[BPM].pv()[0].split(':')[0]
sri21_pvs.add(pv_name)
# Load the machine
ap.machines.load('VMX')
vmx_pvs = set()
# Find unique pv names for SRI21
BPMS = ap.getElements('BPM')
for BPM in range(len(BPMS)):
pv_name = BPMS[BPM].pv()[0].split(':')[0]
vmx_pvs.add(pv_name)
print 'All PVs in \'SRI21\' that are not in \'VMX\''
print sri21_pvs - vmx_pvs
print 'All PVs in \'SRI21\' that are not in \'VMX\''
print vmx_pvs - sri21_pvs
|
|
8307c3f1e580393ffe433ce10d84a09273be11f5
|
tests/correlations/test_utils.py
|
tests/correlations/test_utils.py
|
# -*- coding: utf-8 -*-
import datetime
import pytest
from components.correlations.utils import call_attributes
FIELDS = ['started']
def test_successful_attribute_call():
class ProperlyAttributed(object):
started = datetime.datetime.now()
obj = ProperlyAttributed()
assert call_attributes(obj, FIELDS)
def test_failing_attribute_call():
class ImproperlyAttributed(object):
timestamp = datetime.datetime.now()
obj = ImproperlyAttributed()
with pytest.raises(AttributeError):
call_attributes(obj, FIELDS)
|
Test the call_attribute utility method.
|
Test the call_attribute utility method.
|
Python
|
apache-2.0
|
hello-base/web,hello-base/web,hello-base/web,hello-base/web
|
Test the call_attribute utility method.
|
# -*- coding: utf-8 -*-
import datetime
import pytest
from components.correlations.utils import call_attributes
FIELDS = ['started']
def test_successful_attribute_call():
class ProperlyAttributed(object):
started = datetime.datetime.now()
obj = ProperlyAttributed()
assert call_attributes(obj, FIELDS)
def test_failing_attribute_call():
class ImproperlyAttributed(object):
timestamp = datetime.datetime.now()
obj = ImproperlyAttributed()
with pytest.raises(AttributeError):
call_attributes(obj, FIELDS)
|
<commit_before><commit_msg>Test the call_attribute utility method.<commit_after>
|
# -*- coding: utf-8 -*-
import datetime
import pytest
from components.correlations.utils import call_attributes
FIELDS = ['started']
def test_successful_attribute_call():
class ProperlyAttributed(object):
started = datetime.datetime.now()
obj = ProperlyAttributed()
assert call_attributes(obj, FIELDS)
def test_failing_attribute_call():
class ImproperlyAttributed(object):
timestamp = datetime.datetime.now()
obj = ImproperlyAttributed()
with pytest.raises(AttributeError):
call_attributes(obj, FIELDS)
|
Test the call_attribute utility method.# -*- coding: utf-8 -*-
import datetime
import pytest
from components.correlations.utils import call_attributes
FIELDS = ['started']
def test_successful_attribute_call():
class ProperlyAttributed(object):
started = datetime.datetime.now()
obj = ProperlyAttributed()
assert call_attributes(obj, FIELDS)
def test_failing_attribute_call():
class ImproperlyAttributed(object):
timestamp = datetime.datetime.now()
obj = ImproperlyAttributed()
with pytest.raises(AttributeError):
call_attributes(obj, FIELDS)
|
<commit_before><commit_msg>Test the call_attribute utility method.<commit_after># -*- coding: utf-8 -*-
import datetime
import pytest
from components.correlations.utils import call_attributes
FIELDS = ['started']
def test_successful_attribute_call():
class ProperlyAttributed(object):
started = datetime.datetime.now()
obj = ProperlyAttributed()
assert call_attributes(obj, FIELDS)
def test_failing_attribute_call():
class ImproperlyAttributed(object):
timestamp = datetime.datetime.now()
obj = ImproperlyAttributed()
with pytest.raises(AttributeError):
call_attributes(obj, FIELDS)
|
|
e40b9fb2ffaf91c11e167f6691738cc523eb9717
|
samples/com/icallinterceptor.py
|
samples/com/icallinterceptor.py
|
import windows
import windows.generated_def as gdef
from windows import winproxy
# POC of ICallInterceptor
# Based on works by Pavel Yosifovich
# http://blogs.microsoft.co.il/pavely/2018/02/28/intercepting-com-objects-with-cogetinterceptor/
# TODO: clean / comment
windows.com.init()
target = gdef.INetFwPolicy2
fakefirewall = gdef.INetFwPolicy2()
interceptor = gdef.ICallInterceptor()
winproxy.CoGetInterceptor(target.IID, None, interceptor.IID, interceptor)
real_firewall = windows.system.network.firewall
class MySink(windows.com.COMImplementation):
IMPLEMENT = gdef.ICallFrameEvents
def OnCall(self, this, frame):
this = gdef.ICallFrameEvents(this) # TODO: auto-translate this ?
frame = gdef.ICallFrame(frame)
print(this)
print(frame)
name = gdef.PWSTR()
name2 = gdef.PWSTR()
frame.GetNames(name, name2)
stack = frame.GetStackLocation()
print(name)
print(name2)
x = gdef.CALLFRAMEPARAMINFO()
ci = gdef.CALLFRAMEINFO()
y = windows.com.ImprovedVariant()
frame.GetParamInfo(1, x)
frame.GetParam(1, y)
frame.GetInfo(ci)
windows.utils.sprint(x)
# vbool = windows.current_process.read_dword(stack + 8)
# You can use this to call the real function :)
frame.Invoke(real_firewall)
frame.SetReturnValue(1234)
print("COM COM MON PYTHON :D")
return 0
xsink = MySink()
interceptor.RegisterSink(xsink)
interceptor.QueryInterface(fakefirewall.IID, fakefirewall)
enabled = gdef.VARIANT_BOOL()
res = fakefirewall.get_FirewallEnabled(2, enabled)
print("return value = {0}".format(res))
print("enabled = {0}".format(enabled))
|
Add quick POC of ICallInterceptor to COM samples
|
Add quick POC of ICallInterceptor to COM samples
|
Python
|
bsd-3-clause
|
hakril/PythonForWindows
|
Add quick POC of ICallInterceptor to COM samples
|
import windows
import windows.generated_def as gdef
from windows import winproxy
# POC of ICallInterceptor
# Based on works by Pavel Yosifovich
# http://blogs.microsoft.co.il/pavely/2018/02/28/intercepting-com-objects-with-cogetinterceptor/
# TODO: clean / comment
windows.com.init()
target = gdef.INetFwPolicy2
fakefirewall = gdef.INetFwPolicy2()
interceptor = gdef.ICallInterceptor()
winproxy.CoGetInterceptor(target.IID, None, interceptor.IID, interceptor)
real_firewall = windows.system.network.firewall
class MySink(windows.com.COMImplementation):
IMPLEMENT = gdef.ICallFrameEvents
def OnCall(self, this, frame):
this = gdef.ICallFrameEvents(this) # TODO: auto-translate this ?
frame = gdef.ICallFrame(frame)
print(this)
print(frame)
name = gdef.PWSTR()
name2 = gdef.PWSTR()
frame.GetNames(name, name2)
stack = frame.GetStackLocation()
print(name)
print(name2)
x = gdef.CALLFRAMEPARAMINFO()
ci = gdef.CALLFRAMEINFO()
y = windows.com.ImprovedVariant()
frame.GetParamInfo(1, x)
frame.GetParam(1, y)
frame.GetInfo(ci)
windows.utils.sprint(x)
# vbool = windows.current_process.read_dword(stack + 8)
# You can use this to call the real function :)
frame.Invoke(real_firewall)
frame.SetReturnValue(1234)
print("COM COM MON PYTHON :D")
return 0
xsink = MySink()
interceptor.RegisterSink(xsink)
interceptor.QueryInterface(fakefirewall.IID, fakefirewall)
enabled = gdef.VARIANT_BOOL()
res = fakefirewall.get_FirewallEnabled(2, enabled)
print("return value = {0}".format(res))
print("enabled = {0}".format(enabled))
|
<commit_before><commit_msg>Add quick POC of ICallInterceptor to COM samples<commit_after>
|
import windows
import windows.generated_def as gdef
from windows import winproxy
# POC of ICallInterceptor
# Based on works by Pavel Yosifovich
# http://blogs.microsoft.co.il/pavely/2018/02/28/intercepting-com-objects-with-cogetinterceptor/
# TODO: clean / comment
windows.com.init()
target = gdef.INetFwPolicy2
fakefirewall = gdef.INetFwPolicy2()
interceptor = gdef.ICallInterceptor()
winproxy.CoGetInterceptor(target.IID, None, interceptor.IID, interceptor)
real_firewall = windows.system.network.firewall
class MySink(windows.com.COMImplementation):
IMPLEMENT = gdef.ICallFrameEvents
def OnCall(self, this, frame):
this = gdef.ICallFrameEvents(this) # TODO: auto-translate this ?
frame = gdef.ICallFrame(frame)
print(this)
print(frame)
name = gdef.PWSTR()
name2 = gdef.PWSTR()
frame.GetNames(name, name2)
stack = frame.GetStackLocation()
print(name)
print(name2)
x = gdef.CALLFRAMEPARAMINFO()
ci = gdef.CALLFRAMEINFO()
y = windows.com.ImprovedVariant()
frame.GetParamInfo(1, x)
frame.GetParam(1, y)
frame.GetInfo(ci)
windows.utils.sprint(x)
# vbool = windows.current_process.read_dword(stack + 8)
# You can use this to call the real function :)
frame.Invoke(real_firewall)
frame.SetReturnValue(1234)
print("COM COM MON PYTHON :D")
return 0
xsink = MySink()
interceptor.RegisterSink(xsink)
interceptor.QueryInterface(fakefirewall.IID, fakefirewall)
enabled = gdef.VARIANT_BOOL()
res = fakefirewall.get_FirewallEnabled(2, enabled)
print("return value = {0}".format(res))
print("enabled = {0}".format(enabled))
|
Add quick POC of ICallInterceptor to COM samplesimport windows
import windows.generated_def as gdef
from windows import winproxy
# POC of ICallInterceptor
# Based on works by Pavel Yosifovich
# http://blogs.microsoft.co.il/pavely/2018/02/28/intercepting-com-objects-with-cogetinterceptor/
# TODO: clean / comment
windows.com.init()
target = gdef.INetFwPolicy2
fakefirewall = gdef.INetFwPolicy2()
interceptor = gdef.ICallInterceptor()
winproxy.CoGetInterceptor(target.IID, None, interceptor.IID, interceptor)
real_firewall = windows.system.network.firewall
class MySink(windows.com.COMImplementation):
IMPLEMENT = gdef.ICallFrameEvents
def OnCall(self, this, frame):
this = gdef.ICallFrameEvents(this) # TODO: auto-translate this ?
frame = gdef.ICallFrame(frame)
print(this)
print(frame)
name = gdef.PWSTR()
name2 = gdef.PWSTR()
frame.GetNames(name, name2)
stack = frame.GetStackLocation()
print(name)
print(name2)
x = gdef.CALLFRAMEPARAMINFO()
ci = gdef.CALLFRAMEINFO()
y = windows.com.ImprovedVariant()
frame.GetParamInfo(1, x)
frame.GetParam(1, y)
frame.GetInfo(ci)
windows.utils.sprint(x)
# vbool = windows.current_process.read_dword(stack + 8)
# You can use this to call the real function :)
frame.Invoke(real_firewall)
frame.SetReturnValue(1234)
print("COM COM MON PYTHON :D")
return 0
xsink = MySink()
interceptor.RegisterSink(xsink)
interceptor.QueryInterface(fakefirewall.IID, fakefirewall)
enabled = gdef.VARIANT_BOOL()
res = fakefirewall.get_FirewallEnabled(2, enabled)
print("return value = {0}".format(res))
print("enabled = {0}".format(enabled))
|
<commit_before><commit_msg>Add quick POC of ICallInterceptor to COM samples<commit_after>import windows
import windows.generated_def as gdef
from windows import winproxy
# POC of ICallInterceptor
# Based on works by Pavel Yosifovich
# http://blogs.microsoft.co.il/pavely/2018/02/28/intercepting-com-objects-with-cogetinterceptor/
# TODO: clean / comment
windows.com.init()
target = gdef.INetFwPolicy2
fakefirewall = gdef.INetFwPolicy2()
interceptor = gdef.ICallInterceptor()
winproxy.CoGetInterceptor(target.IID, None, interceptor.IID, interceptor)
real_firewall = windows.system.network.firewall
class MySink(windows.com.COMImplementation):
IMPLEMENT = gdef.ICallFrameEvents
def OnCall(self, this, frame):
this = gdef.ICallFrameEvents(this) # TODO: auto-translate this ?
frame = gdef.ICallFrame(frame)
print(this)
print(frame)
name = gdef.PWSTR()
name2 = gdef.PWSTR()
frame.GetNames(name, name2)
stack = frame.GetStackLocation()
print(name)
print(name2)
x = gdef.CALLFRAMEPARAMINFO()
ci = gdef.CALLFRAMEINFO()
y = windows.com.ImprovedVariant()
frame.GetParamInfo(1, x)
frame.GetParam(1, y)
frame.GetInfo(ci)
windows.utils.sprint(x)
# vbool = windows.current_process.read_dword(stack + 8)
# You can use this to call the real function :)
frame.Invoke(real_firewall)
frame.SetReturnValue(1234)
print("COM COM MON PYTHON :D")
return 0
xsink = MySink()
interceptor.RegisterSink(xsink)
interceptor.QueryInterface(fakefirewall.IID, fakefirewall)
enabled = gdef.VARIANT_BOOL()
res = fakefirewall.get_FirewallEnabled(2, enabled)
print("return value = {0}".format(res))
print("enabled = {0}".format(enabled))
|
|
23e2fa314ca6e912ad74e7c1bad6005da15d8186
|
scripts/print_view_hierarchy.py
|
scripts/print_view_hierarchy.py
|
"""Prints the current view hierarchy.
Usage: pv
"""
def print_view_hierarchy(debugger, command, result, internal_dict):
debugger.HandleCommand('po [[UIWindow keyWindow] recursiveDescription]')
def __lldb_init_module(debugger, internal_dict):
debugger.HandleCommand('command script add -f print_view_hierarchy.print_view_hierarchy pv')
|
Add print view hierarchy command.
|
Add print view hierarchy command.
|
Python
|
mit
|
mrhappyasthma/HappyDebugging,mrhappyasthma/happydebugging
|
Add print view hierarchy command.
|
"""Prints the current view hierarchy.
Usage: pv
"""
def print_view_hierarchy(debugger, command, result, internal_dict):
debugger.HandleCommand('po [[UIWindow keyWindow] recursiveDescription]')
def __lldb_init_module(debugger, internal_dict):
debugger.HandleCommand('command script add -f print_view_hierarchy.print_view_hierarchy pv')
|
<commit_before><commit_msg>Add print view hierarchy command.<commit_after>
|
"""Prints the current view hierarchy.
Usage: pv
"""
def print_view_hierarchy(debugger, command, result, internal_dict):
debugger.HandleCommand('po [[UIWindow keyWindow] recursiveDescription]')
def __lldb_init_module(debugger, internal_dict):
debugger.HandleCommand('command script add -f print_view_hierarchy.print_view_hierarchy pv')
|
Add print view hierarchy command."""Prints the current view hierarchy.
Usage: pv
"""
def print_view_hierarchy(debugger, command, result, internal_dict):
debugger.HandleCommand('po [[UIWindow keyWindow] recursiveDescription]')
def __lldb_init_module(debugger, internal_dict):
debugger.HandleCommand('command script add -f print_view_hierarchy.print_view_hierarchy pv')
|
<commit_before><commit_msg>Add print view hierarchy command.<commit_after>"""Prints the current view hierarchy.
Usage: pv
"""
def print_view_hierarchy(debugger, command, result, internal_dict):
debugger.HandleCommand('po [[UIWindow keyWindow] recursiveDescription]')
def __lldb_init_module(debugger, internal_dict):
debugger.HandleCommand('command script add -f print_view_hierarchy.print_view_hierarchy pv')
|
|
22821de77f2c9ca16ed95b7042f8e2e266c6afcb
|
astrobin/management/commands/message_all.py
|
astrobin/management/commands/message_all.py
|
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
import persistent_messages
class Command(BaseCommand):
help = "Sends a message to all users."
def handle(self, *args, **options):
if len(args) < 2:
print "Need two arbuments: subject and body."
subject = args[0]
body = args[1]
sender = User.objects.get(username = 'astrobin')
for recipient in User.objects.all():
if recipient.username != 'astrobin':
persistent_messages.add_message_without_storage(
recipient,
sender,
persistent_messages.SUCCESS,
body,
subject = subject)
|
Add command to message everybody.
|
Add command to message everybody.
|
Python
|
agpl-3.0
|
astrobin/astrobin,astrobin/astrobin,astrobin/astrobin,astrobin/astrobin
|
Add command to message everybody.
|
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
import persistent_messages
class Command(BaseCommand):
help = "Sends a message to all users."
def handle(self, *args, **options):
if len(args) < 2:
print "Need two arbuments: subject and body."
subject = args[0]
body = args[1]
sender = User.objects.get(username = 'astrobin')
for recipient in User.objects.all():
if recipient.username != 'astrobin':
persistent_messages.add_message_without_storage(
recipient,
sender,
persistent_messages.SUCCESS,
body,
subject = subject)
|
<commit_before><commit_msg>Add command to message everybody.<commit_after>
|
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
import persistent_messages
class Command(BaseCommand):
help = "Sends a message to all users."
def handle(self, *args, **options):
if len(args) < 2:
print "Need two arbuments: subject and body."
subject = args[0]
body = args[1]
sender = User.objects.get(username = 'astrobin')
for recipient in User.objects.all():
if recipient.username != 'astrobin':
persistent_messages.add_message_without_storage(
recipient,
sender,
persistent_messages.SUCCESS,
body,
subject = subject)
|
Add command to message everybody.from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
import persistent_messages
class Command(BaseCommand):
help = "Sends a message to all users."
def handle(self, *args, **options):
if len(args) < 2:
print "Need two arbuments: subject and body."
subject = args[0]
body = args[1]
sender = User.objects.get(username = 'astrobin')
for recipient in User.objects.all():
if recipient.username != 'astrobin':
persistent_messages.add_message_without_storage(
recipient,
sender,
persistent_messages.SUCCESS,
body,
subject = subject)
|
<commit_before><commit_msg>Add command to message everybody.<commit_after>from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
import persistent_messages
class Command(BaseCommand):
help = "Sends a message to all users."
def handle(self, *args, **options):
if len(args) < 2:
print "Need two arbuments: subject and body."
subject = args[0]
body = args[1]
sender = User.objects.get(username = 'astrobin')
for recipient in User.objects.all():
if recipient.username != 'astrobin':
persistent_messages.add_message_without_storage(
recipient,
sender,
persistent_messages.SUCCESS,
body,
subject = subject)
|
|
df5572a6780fca1d657b622f231edd4e5948613e
|
bot/storage/api.py
|
bot/storage/api.py
|
from bot.storage.async.scheduler import StorageScheduler
from bot.storage.data_source.data_source import StorageDataSource
class StorageApi:
def __init__(self, data_source: StorageDataSource, scheduler: StorageScheduler):
self.data_source = data_source
self.scheduler = scheduler
scheduler.set_context_manager(data_source.context_manager())
self.init()
def init(self):
self._no_result(self.__init, "init")
def __init(self):
self.data_source.init()
def _no_result(self, func: callable, name: str):
self.scheduler.schedule_no_result(func, name)
def _with_result(self, func: callable, name: str):
return self.scheduler.schedule_with_result(func, name)
|
Create StorageApi inspired by clock-bot one
|
Create StorageApi inspired by clock-bot one
|
Python
|
agpl-3.0
|
alvarogzp/telegram-bot,alvarogzp/telegram-bot
|
Create StorageApi inspired by clock-bot one
|
from bot.storage.async.scheduler import StorageScheduler
from bot.storage.data_source.data_source import StorageDataSource
class StorageApi:
def __init__(self, data_source: StorageDataSource, scheduler: StorageScheduler):
self.data_source = data_source
self.scheduler = scheduler
scheduler.set_context_manager(data_source.context_manager())
self.init()
def init(self):
self._no_result(self.__init, "init")
def __init(self):
self.data_source.init()
def _no_result(self, func: callable, name: str):
self.scheduler.schedule_no_result(func, name)
def _with_result(self, func: callable, name: str):
return self.scheduler.schedule_with_result(func, name)
|
<commit_before><commit_msg>Create StorageApi inspired by clock-bot one<commit_after>
|
from bot.storage.async.scheduler import StorageScheduler
from bot.storage.data_source.data_source import StorageDataSource
class StorageApi:
def __init__(self, data_source: StorageDataSource, scheduler: StorageScheduler):
self.data_source = data_source
self.scheduler = scheduler
scheduler.set_context_manager(data_source.context_manager())
self.init()
def init(self):
self._no_result(self.__init, "init")
def __init(self):
self.data_source.init()
def _no_result(self, func: callable, name: str):
self.scheduler.schedule_no_result(func, name)
def _with_result(self, func: callable, name: str):
return self.scheduler.schedule_with_result(func, name)
|
Create StorageApi inspired by clock-bot onefrom bot.storage.async.scheduler import StorageScheduler
from bot.storage.data_source.data_source import StorageDataSource
class StorageApi:
def __init__(self, data_source: StorageDataSource, scheduler: StorageScheduler):
self.data_source = data_source
self.scheduler = scheduler
scheduler.set_context_manager(data_source.context_manager())
self.init()
def init(self):
self._no_result(self.__init, "init")
def __init(self):
self.data_source.init()
def _no_result(self, func: callable, name: str):
self.scheduler.schedule_no_result(func, name)
def _with_result(self, func: callable, name: str):
return self.scheduler.schedule_with_result(func, name)
|
<commit_before><commit_msg>Create StorageApi inspired by clock-bot one<commit_after>from bot.storage.async.scheduler import StorageScheduler
from bot.storage.data_source.data_source import StorageDataSource
class StorageApi:
def __init__(self, data_source: StorageDataSource, scheduler: StorageScheduler):
self.data_source = data_source
self.scheduler = scheduler
scheduler.set_context_manager(data_source.context_manager())
self.init()
def init(self):
self._no_result(self.__init, "init")
def __init(self):
self.data_source.init()
def _no_result(self, func: callable, name: str):
self.scheduler.schedule_no_result(func, name)
def _with_result(self, func: callable, name: str):
return self.scheduler.schedule_with_result(func, name)
|
|
233290d578aad1fde80999e75ccc29353ddaf834
|
zerver/management/commands/purge_queue.py
|
zerver/management/commands/purge_queue.py
|
from __future__ import absolute_import
from django.core.management.base import BaseCommand
from django.core.management import CommandError
from zerver.lib.queue import SimpleQueueClient
import logging
class Command(BaseCommand):
args = "<queue name>"
help = "Discards all messages from the given queue"
def handle(self, *args, **options):
if len(args) != 1:
raise CommandError("Wrong number of arguments")
queue_name = args[0]
queue = SimpleQueueClient()
queue.drain_queue(queue_name)
|
Add a management command to purge a rabbitmq queue
|
Add a management command to purge a rabbitmq queue
This is useful in debugging when you just want to discard all the
messages in a queue because they have the wrong structure.
(imported from commit 8559ac74f11841430b4d0c801d5506ebcb74c3eb)
|
Python
|
apache-2.0
|
kokoar/zulip,codeKonami/zulip,developerfm/zulip,zwily/zulip,dattatreya303/zulip,aliceriot/zulip,glovebx/zulip,blaze225/zulip,dawran6/zulip,littledogboy/zulip,JPJPJPOPOP/zulip,bastianh/zulip,littledogboy/zulip,wdaher/zulip,glovebx/zulip,grave-w-grave/zulip,brockwhittaker/zulip,souravbadami/zulip,akuseru/zulip,RobotCaleb/zulip,rishig/zulip,xuanhan863/zulip,guiquanz/zulip,jeffcao/zulip,DazWorrall/zulip,amyliu345/zulip,proliming/zulip,brainwane/zulip,mohsenSy/zulip,easyfmxu/zulip,suxinde2009/zulip,dhcrzf/zulip,dwrpayne/zulip,rishig/zulip,mdavid/zulip,ipernet/zulip,KingxBanana/zulip,PaulPetring/zulip,natanovia/zulip,tommyip/zulip,shubhamdhama/zulip,moria/zulip,praveenaki/zulip,ahmadassaf/zulip,dhcrzf/zulip,LeeRisk/zulip,aakash-cr7/zulip,developerfm/zulip,hafeez3000/zulip,hj3938/zulip,codeKonami/zulip,timabbott/zulip,jeffcao/zulip,j831/zulip,Batterfii/zulip,littledogboy/zulip,firstblade/zulip,Frouk/zulip,MariaFaBella85/zulip,MariaFaBella85/zulip,zofuthan/zulip,stamhe/zulip,jainayush975/zulip,zwily/zulip,deer-hope/zulip,xuanhan863/zulip,sup95/zulip,thomasboyt/zulip,qq1012803704/zulip,jeffcao/zulip,yocome/zulip,susansls/zulip,mansilladev/zulip,jrowan/zulip,swinghu/zulip,zachallaun/zulip,mansilladev/zulip,KJin99/zulip,timabbott/zulip,hustlzp/zulip,Suninus/zulip,lfranchi/zulip,zulip/zulip,natanovia/zulip,aliceriot/zulip,glovebx/zulip,wangdeshui/zulip,yuvipanda/zulip,proliming/zulip,umkay/zulip,Qgap/zulip,avastu/zulip,shubhamdhama/zulip,DazWorrall/zulip,yocome/zulip,wweiradio/zulip,bastianh/zulip,suxinde2009/zulip,showell/zulip,synicalsyntax/zulip,hengqujushi/zulip,MayB/zulip,niftynei/zulip,willingc/zulip,proliming/zulip,jeffcao/zulip,umkay/zulip,jonesgithub/zulip,ipernet/zulip,schatt/zulip,suxinde2009/zulip,atomic-labs/zulip,huangkebo/zulip,sonali0901/zulip,shaunstanislaus/zulip,dwrpayne/zulip,christi3k/zulip,christi3k/zulip,wavelets/zulip,synicalsyntax/zulip,zacps/zulip,mohsenSy/zulip,babbage/zulip,m1ssou/zulip,suxinde2009/zulip,technicalpickles/zulip,synicalsyntax/zulip,luyifan/zulip,shubhamdhama/zulip,Vallher/zulip,moria/zulip,bssrdf/zulip,paxapy/zulip,alliejones/zulip,LAndreas/zulip,JanzTam/zulip,schatt/zulip,wangdeshui/zulip,kou/zulip,ryansnowboarder/zulip,bitemyapp/zulip,jeffcao/zulip,rht/zulip,themass/zulip,Diptanshu8/zulip,SmartPeople/zulip,saitodisse/zulip,developerfm/zulip,tiansiyuan/zulip,bastianh/zulip,vabs22/zulip,qq1012803704/zulip,firstblade/zulip,hayderimran7/zulip,Vallher/zulip,tiansiyuan/zulip,arpitpanwar/zulip,aps-sids/zulip,Diptanshu8/zulip,noroot/zulip,KingxBanana/zulip,Qgap/zulip,nicholasbs/zulip,jphilipsen05/zulip,alliejones/zulip,gkotian/zulip,wangdeshui/zulip,MayB/zulip,KJin99/zulip,itnihao/zulip,shrikrishnaholla/zulip,pradiptad/zulip,ApsOps/zulip,amyliu345/zulip,willingc/zulip,Frouk/zulip,suxinde2009/zulip,luyifan/zulip,dnmfarrell/zulip,akuseru/zulip,johnny9/zulip,hafeez3000/zulip,bowlofstew/zulip,reyha/zulip,akuseru/zulip,sharmaeklavya2/zulip,amanharitsh123/zulip,dxq-git/zulip,johnnygaddarr/zulip,calvinleenyc/zulip,wavelets/zulip,kaiyuanheshang/zulip,zwily/zulip,glovebx/zulip,firstblade/zulip,gkotian/zulip,amallia/zulip,dxq-git/zulip,hustlzp/zulip,hackerkid/zulip,swinghu/zulip,PhilSk/zulip,andersk/zulip,jeffcao/zulip,itnihao/zulip,schatt/zulip,JanzTam/zulip,yuvipanda/zulip,andersk/zulip,jonesgithub/zulip,wweiradio/zulip,AZtheAsian/zulip,mahim97/zulip,samatdav/zulip,aps-sids/zulip,so0k/zulip,amanharitsh123/zulip,praveenaki/zulip,so0k/zulip,babbage/zulip,ApsOps/zulip,paxapy/zulip,xuxiao/zulip,jeffcao/zulip,umkay/zulip,zorojean/zulip,EasonYi/zulip,j831/zulip,seapasulli/zulip,kokoar/zulip,grave-w-grave/zulip,umkay/zulip,wweiradio/zulip,vakila/zulip,rishig/zulip,noroot/zulip,rishig/zulip,shaunstanislaus/zulip,isht3/zulip,bitemyapp/zulip,hayderimran7/zulip,reyha/zulip,ipernet/zulip,showell/zulip,vikas-parashar/zulip,grave-w-grave/zulip,aliceriot/zulip,Qgap/zulip,shrikrishnaholla/zulip,mansilladev/zulip,SmartPeople/zulip,dotcool/zulip,verma-varsha/zulip,dwrpayne/zulip,hustlzp/zulip,mohsenSy/zulip,m1ssou/zulip,SmartPeople/zulip,JanzTam/zulip,DazWorrall/zulip,proliming/zulip,JPJPJPOPOP/zulip,zofuthan/zulip,babbage/zulip,hengqujushi/zulip,rishig/zulip,vikas-parashar/zulip,EasonYi/zulip,firstblade/zulip,shaunstanislaus/zulip,MariaFaBella85/zulip,dhcrzf/zulip,zofuthan/zulip,guiquanz/zulip,armooo/zulip,saitodisse/zulip,bowlofstew/zulip,Gabriel0402/zulip,kokoar/zulip,gigawhitlocks/zulip,jonesgithub/zulip,jackrzhang/zulip,joyhchen/zulip,hustlzp/zulip,ericzhou2008/zulip,tdr130/zulip,wdaher/zulip,peguin40/zulip,karamcnair/zulip,yocome/zulip,udxxabp/zulip,jimmy54/zulip,thomasboyt/zulip,sup95/zulip,shrikrishnaholla/zulip,swinghu/zulip,AZtheAsian/zulip,shaunstanislaus/zulip,souravbadami/zulip,Galexrt/zulip,themass/zulip,PhilSk/zulip,amallia/zulip,esander91/zulip,aliceriot/zulip,vakila/zulip,andersk/zulip,proliming/zulip,jackrzhang/zulip,souravbadami/zulip,gkotian/zulip,Frouk/zulip,thomasboyt/zulip,dattatreya303/zulip,sonali0901/zulip,mahim97/zulip,Juanvulcano/zulip,swinghu/zulip,ApsOps/zulip,zorojean/zulip,dhcrzf/zulip,hustlzp/zulip,nicholasbs/zulip,zulip/zulip,ahmadassaf/zulip,sharmaeklavya2/zulip,Batterfii/zulip,hj3938/zulip,aliceriot/zulip,vikas-parashar/zulip,dwrpayne/zulip,he15his/zulip,arpitpanwar/zulip,dattatreya303/zulip,ApsOps/zulip,armooo/zulip,peiwei/zulip,tommyip/zulip,andersk/zulip,christi3k/zulip,Juanvulcano/zulip,dnmfarrell/zulip,suxinde2009/zulip,JPJPJPOPOP/zulip,eastlhu/zulip,gkotian/zulip,ryanbackman/zulip,udxxabp/zulip,j831/zulip,ericzhou2008/zulip,joyhchen/zulip,mdavid/zulip,arpith/zulip,avastu/zulip,hustlzp/zulip,hackerkid/zulip,AZtheAsian/zulip,kou/zulip,sup95/zulip,aakash-cr7/zulip,jimmy54/zulip,levixie/zulip,sharmaeklavya2/zulip,jerryge/zulip,verma-varsha/zulip,EasonYi/zulip,zhaoweigg/zulip,zofuthan/zulip,paxapy/zulip,zachallaun/zulip,souravbadami/zulip,samatdav/zulip,ikasumiwt/zulip,armooo/zulip,brockwhittaker/zulip,blaze225/zulip,wdaher/zulip,ericzhou2008/zulip,Drooids/zulip,deer-hope/zulip,dotcool/zulip,joshisa/zulip,adnanh/zulip,Cheppers/zulip,avastu/zulip,shrikrishnaholla/zulip,joyhchen/zulip,karamcnair/zulip,timabbott/zulip,mahim97/zulip,seapasulli/zulip,dwrpayne/zulip,Galexrt/zulip,esander91/zulip,so0k/zulip,niftynei/zulip,krtkmj/zulip,JPJPJPOPOP/zulip,calvinleenyc/zulip,avastu/zulip,voidException/zulip,amyliu345/zulip,zulip/zulip,amyliu345/zulip,seapasulli/zulip,aps-sids/zulip,Vallher/zulip,zachallaun/zulip,MayB/zulip,joshisa/zulip,timabbott/zulip,timabbott/zulip,atomic-labs/zulip,vakila/zulip,guiquanz/zulip,arpitpanwar/zulip,Vallher/zulip,sharmaeklavya2/zulip,kou/zulip,zulip/zulip,yocome/zulip,voidException/zulip,Suninus/zulip,LAndreas/zulip,xuanhan863/zulip,Gabriel0402/zulip,tbutter/zulip,mdavid/zulip,xuxiao/zulip,showell/zulip,seapasulli/zulip,udxxabp/zulip,LAndreas/zulip,KingxBanana/zulip,he15his/zulip,johnnygaddarr/zulip,natanovia/zulip,guiquanz/zulip,KJin99/zulip,noroot/zulip,jackrzhang/zulip,zachallaun/zulip,tiansiyuan/zulip,ericzhou2008/zulip,MayB/zulip,thomasboyt/zulip,KJin99/zulip,guiquanz/zulip,alliejones/zulip,bastianh/zulip,eastlhu/zulip,Juanvulcano/zulip,hustlzp/zulip,peguin40/zulip,dwrpayne/zulip,fw1121/zulip,Jianchun1/zulip,johnnygaddarr/zulip,joyhchen/zulip,wavelets/zulip,ufosky-server/zulip,levixie/zulip,lfranchi/zulip,joshisa/zulip,bitemyapp/zulip,adnanh/zulip,stamhe/zulip,bitemyapp/zulip,fw1121/zulip,ufosky-server/zulip,schatt/zulip,jimmy54/zulip,dawran6/zulip,ahmadassaf/zulip,moria/zulip,tiansiyuan/zulip,punchagan/zulip,babbage/zulip,so0k/zulip,huangkebo/zulip,johnny9/zulip,kokoar/zulip,lfranchi/zulip,xuanhan863/zulip,umkay/zulip,krtkmj/zulip,gigawhitlocks/zulip,vikas-parashar/zulip,proliming/zulip,dotcool/zulip,saitodisse/zulip,hayderimran7/zulip,wangdeshui/zulip,technicalpickles/zulip,EasonYi/zulip,eeshangarg/zulip,Galexrt/zulip,brainwane/zulip,tbutter/zulip,pradiptad/zulip,ahmadassaf/zulip,kokoar/zulip,Cheppers/zulip,adnanh/zulip,atomic-labs/zulip,samatdav/zulip,niftynei/zulip,bowlofstew/zulip,mdavid/zulip,hackerkid/zulip,zhaoweigg/zulip,reyha/zulip,Galexrt/zulip,SmartPeople/zulip,dxq-git/zulip,tommyip/zulip,shubhamdhama/zulip,tommyip/zulip,synicalsyntax/zulip,tbutter/zulip,udxxabp/zulip,KingxBanana/zulip,alliejones/zulip,avastu/zulip,themass/zulip,Jianchun1/zulip,calvinleenyc/zulip,Qgap/zulip,yuvipanda/zulip,willingc/zulip,niftynei/zulip,paxapy/zulip,j831/zulip,qq1012803704/zulip,ryansnowboarder/zulip,dawran6/zulip,pradiptad/zulip,zhaoweigg/zulip,ikasumiwt/zulip,suxinde2009/zulip,brockwhittaker/zulip,dattatreya303/zulip,Suninus/zulip,Suninus/zulip,yocome/zulip,arpith/zulip,brockwhittaker/zulip,Drooids/zulip,jessedhillon/zulip,shrikrishnaholla/zulip,shubhamdhama/zulip,huangkebo/zulip,vakila/zulip,ashwinirudrappa/zulip,christi3k/zulip,levixie/zulip,mansilladev/zulip,swinghu/zulip,zacps/zulip,dattatreya303/zulip,xuxiao/zulip,Juanvulcano/zulip,glovebx/zulip,zorojean/zulip,zofuthan/zulip,pradiptad/zulip,levixie/zulip,bluesea/zulip,dhcrzf/zulip,voidException/zulip,tbutter/zulip,krtkmj/zulip,dwrpayne/zulip,arpith/zulip,ryanbackman/zulip,codeKonami/zulip,xuxiao/zulip,willingc/zulip,andersk/zulip,udxxabp/zulip,developerfm/zulip,kaiyuanheshang/zulip,joyhchen/zulip,arpith/zulip,mohsenSy/zulip,zachallaun/zulip,zhaoweigg/zulip,verma-varsha/zulip,stamhe/zulip,vaidap/zulip,mansilladev/zulip,arpitpanwar/zulip,cosmicAsymmetry/zulip,Diptanshu8/zulip,jerryge/zulip,JPJPJPOPOP/zulip,johnnygaddarr/zulip,bluesea/zulip,eeshangarg/zulip,sharmaeklavya2/zulip,arpitpanwar/zulip,dnmfarrell/zulip,RobotCaleb/zulip,firstblade/zulip,wdaher/zulip,KingxBanana/zulip,Galexrt/zulip,ashwinirudrappa/zulip,aps-sids/zulip,Batterfii/zulip,armooo/zulip,alliejones/zulip,blaze225/zulip,wdaher/zulip,zorojean/zulip,he15his/zulip,zwily/zulip,ahmadassaf/zulip,avastu/zulip,m1ssou/zulip,moria/zulip,blaze225/zulip,themass/zulip,sonali0901/zulip,mahim97/zulip,jessedhillon/zulip,jessedhillon/zulip,voidException/zulip,Gabriel0402/zulip,Batterfii/zulip,Jianchun1/zulip,m1ssou/zulip,ipernet/zulip,saitodisse/zulip,umkay/zulip,tdr130/zulip,cosmicAsymmetry/zulip,easyfmxu/zulip,ashwinirudrappa/zulip,jphilipsen05/zulip,EasonYi/zulip,hackerkid/zulip,showell/zulip,voidException/zulip,babbage/zulip,hj3938/zulip,aakash-cr7/zulip,zwily/zulip,eeshangarg/zulip,eeshangarg/zulip,jonesgithub/zulip,amanharitsh123/zulip,Frouk/zulip,dawran6/zulip,vaidap/zulip,glovebx/zulip,kou/zulip,Juanvulcano/zulip,hengqujushi/zulip,itnihao/zulip,peiwei/zulip,levixie/zulip,kaiyuanheshang/zulip,isht3/zulip,huangkebo/zulip,codeKonami/zulip,natanovia/zulip,karamcnair/zulip,Frouk/zulip,blaze225/zulip,Batterfii/zulip,cosmicAsymmetry/zulip,susansls/zulip,ApsOps/zulip,dnmfarrell/zulip,amanharitsh123/zulip,yocome/zulip,timabbott/zulip,zacps/zulip,synicalsyntax/zulip,sharmaeklavya2/zulip,LeeRisk/zulip,RobotCaleb/zulip,samatdav/zulip,jessedhillon/zulip,peguin40/zulip,armooo/zulip,littledogboy/zulip,hafeez3000/zulip,hengqujushi/zulip,rht/zulip,swinghu/zulip,tiansiyuan/zulip,eeshangarg/zulip,punchagan/zulip,bitemyapp/zulip,noroot/zulip,so0k/zulip,ryanbackman/zulip,Drooids/zulip,vakila/zulip,Drooids/zulip,xuanhan863/zulip,hj3938/zulip,rht/zulip,Qgap/zulip,esander91/zulip,ipernet/zulip,itnihao/zulip,Gabriel0402/zulip,Vallher/zulip,alliejones/zulip,JanzTam/zulip,bssrdf/zulip,lfranchi/zulip,tdr130/zulip,Frouk/zulip,lfranchi/zulip,gigawhitlocks/zulip,brainwane/zulip,willingc/zulip,zwily/zulip,eastlhu/zulip,tbutter/zulip,SmartPeople/zulip,samatdav/zulip,mahim97/zulip,hj3938/zulip,TigorC/zulip,hayderimran7/zulip,jrowan/zulip,hayderimran7/zulip,udxxabp/zulip,akuseru/zulip,MayB/zulip,jimmy54/zulip,arpith/zulip,kou/zulip,christi3k/zulip,seapasulli/zulip,bssrdf/zulip,blaze225/zulip,calvinleenyc/zulip,calvinleenyc/zulip,wweiradio/zulip,LAndreas/zulip,verma-varsha/zulip,Galexrt/zulip,zorojean/zulip,esander91/zulip,peguin40/zulip,ikasumiwt/zulip,zhaoweigg/zulip,grave-w-grave/zulip,Vallher/zulip,PaulPetring/zulip,timabbott/zulip,jrowan/zulip,niftynei/zulip,aps-sids/zulip,noroot/zulip,ryansnowboarder/zulip,easyfmxu/zulip,peiwei/zulip,armooo/zulip,vabs22/zulip,zachallaun/zulip,sup95/zulip,rht/zulip,tbutter/zulip,zacps/zulip,jackrzhang/zulip,shrikrishnaholla/zulip,ryansnowboarder/zulip,KJin99/zulip,rht/zulip,wangdeshui/zulip,peguin40/zulip,esander91/zulip,RobotCaleb/zulip,peiwei/zulip,andersk/zulip,mohsenSy/zulip,hafeez3000/zulip,qq1012803704/zulip,sup95/zulip,thomasboyt/zulip,MariaFaBella85/zulip,wweiradio/zulip,he15his/zulip,noroot/zulip,shaunstanislaus/zulip,hengqujushi/zulip,hj3938/zulip,karamcnair/zulip,Diptanshu8/zulip,ahmadassaf/zulip,Gabriel0402/zulip,Qgap/zulip,thomasboyt/zulip,luyifan/zulip,mdavid/zulip,verma-varsha/zulip,punchagan/zulip,seapasulli/zulip,dxq-git/zulip,atomic-labs/zulip,dotcool/zulip,Juanvulcano/zulip,aliceriot/zulip,saitodisse/zulip,adnanh/zulip,TigorC/zulip,jackrzhang/zulip,atomic-labs/zulip,avastu/zulip,jainayush975/zulip,gigawhitlocks/zulip,ashwinirudrappa/zulip,synicalsyntax/zulip,LeeRisk/zulip,bluesea/zulip,easyfmxu/zulip,amallia/zulip,Jianchun1/zulip,hafeez3000/zulip,vaidap/zulip,amallia/zulip,johnny9/zulip,amyliu345/zulip,itnihao/zulip,ryanbackman/zulip,jrowan/zulip,Drooids/zulip,technicalpickles/zulip,vabs22/zulip,amallia/zulip,zofuthan/zulip,Qgap/zulip,kou/zulip,codeKonami/zulip,wavelets/zulip,joshisa/zulip,susansls/zulip,souravbadami/zulip,shaunstanislaus/zulip,gkotian/zulip,sonali0901/zulip,DazWorrall/zulip,nicholasbs/zulip,Galexrt/zulip,ericzhou2008/zulip,hayderimran7/zulip,punchagan/zulip,AZtheAsian/zulip,peiwei/zulip,amanharitsh123/zulip,ashwinirudrappa/zulip,dxq-git/zulip,paxapy/zulip,kokoar/zulip,guiquanz/zulip,eeshangarg/zulip,bluesea/zulip,PaulPetring/zulip,hackerkid/zulip,ipernet/zulip,bowlofstew/zulip,lfranchi/zulip,gigawhitlocks/zulip,luyifan/zulip,zulip/zulip,m1ssou/zulip,amyliu345/zulip,ericzhou2008/zulip,codeKonami/zulip,krtkmj/zulip,eeshangarg/zulip,aps-sids/zulip,deer-hope/zulip,wavelets/zulip,ikasumiwt/zulip,hengqujushi/zulip,developerfm/zulip,jphilipsen05/zulip,tdr130/zulip,RobotCaleb/zulip,fw1121/zulip,jainayush975/zulip,technicalpickles/zulip,souravbadami/zulip,ryanbackman/zulip,jessedhillon/zulip,easyfmxu/zulip,jrowan/zulip,jainayush975/zulip,aakash-cr7/zulip,christi3k/zulip,praveenaki/zulip,technicalpickles/zulip,ashwinirudrappa/zulip,punchagan/zulip,jackrzhang/zulip,kaiyuanheshang/zulip,KJin99/zulip,TigorC/zulip,vabs22/zulip,jonesgithub/zulip,PaulPetring/zulip,praveenaki/zulip,bssrdf/zulip,m1ssou/zulip,codeKonami/zulip,johnny9/zulip,rht/zulip,ashwinirudrappa/zulip,wangdeshui/zulip,SmartPeople/zulip,gigawhitlocks/zulip,rishig/zulip,huangkebo/zulip,kokoar/zulip,moria/zulip,susansls/zulip,RobotCaleb/zulip,praveenaki/zulip,ryansnowboarder/zulip,amanharitsh123/zulip,eastlhu/zulip,gkotian/zulip,Gabriel0402/zulip,Frouk/zulip,johnnygaddarr/zulip,ikasumiwt/zulip,ufosky-server/zulip,aakash-cr7/zulip,joshisa/zulip,willingc/zulip,KJin99/zulip,TigorC/zulip,stamhe/zulip,sonali0901/zulip,bssrdf/zulip,jphilipsen05/zulip,EasonYi/zulip,DazWorrall/zulip,shubhamdhama/zulip,LeeRisk/zulip,tommyip/zulip,bowlofstew/zulip,sonali0901/zulip,PaulPetring/zulip,j831/zulip,so0k/zulip,AZtheAsian/zulip,LeeRisk/zulip,MariaFaBella85/zulip,PhilSk/zulip,Suninus/zulip,ufosky-server/zulip,udxxabp/zulip,nicholasbs/zulip,qq1012803704/zulip,akuseru/zulip,adnanh/zulip,stamhe/zulip,themass/zulip,dnmfarrell/zulip,vikas-parashar/zulip,willingc/zulip,PhilSk/zulip,ufosky-server/zulip,punchagan/zulip,calvinleenyc/zulip,atomic-labs/zulip,reyha/zulip,alliejones/zulip,deer-hope/zulip,nicholasbs/zulip,xuxiao/zulip,firstblade/zulip,jainayush975/zulip,fw1121/zulip,shaunstanislaus/zulip,zhaoweigg/zulip,technicalpickles/zulip,babbage/zulip,hj3938/zulip,nicholasbs/zulip,ericzhou2008/zulip,babbage/zulip,eastlhu/zulip,krtkmj/zulip,hackerkid/zulip,isht3/zulip,bluesea/zulip,bastianh/zulip,johnny9/zulip,fw1121/zulip,peguin40/zulip,verma-varsha/zulip,jackrzhang/zulip,he15his/zulip,mohsenSy/zulip,Cheppers/zulip,praveenaki/zulip,LeeRisk/zulip,kou/zulip,jonesgithub/zulip,saitodisse/zulip,johnny9/zulip,ryansnowboarder/zulip,yocome/zulip,jimmy54/zulip,tiansiyuan/zulip,susansls/zulip,jimmy54/zulip,PhilSk/zulip,PaulPetring/zulip,themass/zulip,zorojean/zulip,jonesgithub/zulip,littledogboy/zulip,brainwane/zulip,bastianh/zulip,jessedhillon/zulip,jphilipsen05/zulip,DazWorrall/zulip,natanovia/zulip,Cheppers/zulip,bssrdf/zulip,Jianchun1/zulip,easyfmxu/zulip,proliming/zulip,developerfm/zulip,seapasulli/zulip,littledogboy/zulip,dattatreya303/zulip,joshisa/zulip,zorojean/zulip,technicalpickles/zulip,brainwane/zulip,he15his/zulip,itnihao/zulip,bitemyapp/zulip,jerryge/zulip,tbutter/zulip,grave-w-grave/zulip,yuvipanda/zulip,luyifan/zulip,itnihao/zulip,isht3/zulip,m1ssou/zulip,dxq-git/zulip,jerryge/zulip,moria/zulip,xuanhan863/zulip,nicholasbs/zulip,atomic-labs/zulip,pradiptad/zulip,punchagan/zulip,ufosky-server/zulip,yuvipanda/zulip,levixie/zulip,karamcnair/zulip,hackerkid/zulip,dnmfarrell/zulip,bluesea/zulip,synicalsyntax/zulip,mansilladev/zulip,Vallher/zulip,hengqujushi/zulip,reyha/zulip,vaidap/zulip,j831/zulip,AZtheAsian/zulip,jphilipsen05/zulip,deer-hope/zulip,Gabriel0402/zulip,qq1012803704/zulip,esander91/zulip,JanzTam/zulip,voidException/zulip,moria/zulip,kaiyuanheshang/zulip,TigorC/zulip,cosmicAsymmetry/zulip,JPJPJPOPOP/zulip,LeeRisk/zulip,dawran6/zulip,zacps/zulip,niftynei/zulip,showell/zulip,noroot/zulip,tommyip/zulip,krtkmj/zulip,eastlhu/zulip,susansls/zulip,Diptanshu8/zulip,MayB/zulip,Drooids/zulip,xuxiao/zulip,stamhe/zulip,armooo/zulip,PhilSk/zulip,vaidap/zulip,showell/zulip,so0k/zulip,ryanbackman/zulip,glovebx/zulip,qq1012803704/zulip,vakila/zulip,huangkebo/zulip,DazWorrall/zulip,fw1121/zulip,bowlofstew/zulip,jimmy54/zulip,shubhamdhama/zulip,krtkmj/zulip,ipernet/zulip,natanovia/zulip,JanzTam/zulip,themass/zulip,jessedhillon/zulip,LAndreas/zulip,ikasumiwt/zulip,amallia/zulip,he15his/zulip,joyhchen/zulip,wavelets/zulip,ApsOps/zulip,aliceriot/zulip,vikas-parashar/zulip,swinghu/zulip,aps-sids/zulip,brockwhittaker/zulip,Batterfii/zulip,brainwane/zulip,hafeez3000/zulip,adnanh/zulip,gigawhitlocks/zulip,ryansnowboarder/zulip,umkay/zulip,dawran6/zulip,zacps/zulip,MariaFaBella85/zulip,Cheppers/zulip,jainayush975/zulip,mdavid/zulip,johnny9/zulip,zhaoweigg/zulip,bastianh/zulip,JanzTam/zulip,akuseru/zulip,rishig/zulip,brockwhittaker/zulip,schatt/zulip,brainwane/zulip,tiansiyuan/zulip,bitemyapp/zulip,wavelets/zulip,saitodisse/zulip,jerryge/zulip,easyfmxu/zulip,joshisa/zulip,sup95/zulip,yuvipanda/zulip,cosmicAsymmetry/zulip,tommyip/zulip,dotcool/zulip,arpith/zulip,dotcool/zulip,karamcnair/zulip,pradiptad/zulip,schatt/zulip,MayB/zulip,vabs22/zulip,lfranchi/zulip,natanovia/zulip,Diptanshu8/zulip,hafeez3000/zulip,KingxBanana/zulip,arpitpanwar/zulip,esander91/zulip,wweiradio/zulip,isht3/zulip,dhcrzf/zulip,mahim97/zulip,Drooids/zulip,firstblade/zulip,Jianchun1/zulip,jerryge/zulip,wangdeshui/zulip,ikasumiwt/zulip,peiwei/zulip,littledogboy/zulip,TigorC/zulip,samatdav/zulip,voidException/zulip,grave-w-grave/zulip,PaulPetring/zulip,xuxiao/zulip,EasonYi/zulip,Suninus/zulip,jrowan/zulip,bowlofstew/zulip,wdaher/zulip,kaiyuanheshang/zulip,developerfm/zulip,wdaher/zulip,thomasboyt/zulip,huangkebo/zulip,arpitpanwar/zulip,dhcrzf/zulip,jerryge/zulip,dnmfarrell/zulip,levixie/zulip,praveenaki/zulip,aakash-cr7/zulip,mansilladev/zulip,ufosky-server/zulip,zulip/zulip,mdavid/zulip,ApsOps/zulip,LAndreas/zulip,gkotian/zulip,eastlhu/zulip,akuseru/zulip,showell/zulip,vabs22/zulip,vaidap/zulip,cosmicAsymmetry/zulip,adnanh/zulip,amallia/zulip,MariaFaBella85/zulip,yuvipanda/zulip,luyifan/zulip,stamhe/zulip,johnnygaddarr/zulip,bluesea/zulip,zulip/zulip,pradiptad/zulip,Cheppers/zulip,wweiradio/zulip,RobotCaleb/zulip,luyifan/zulip,Suninus/zulip,peiwei/zulip,shrikrishnaholla/zulip,reyha/zulip,schatt/zulip,LAndreas/zulip,kaiyuanheshang/zulip,deer-hope/zulip,tdr130/zulip,fw1121/zulip,zwily/zulip,Batterfii/zulip,xuanhan863/zulip,Cheppers/zulip,tdr130/zulip,bssrdf/zulip,paxapy/zulip,johnnygaddarr/zulip,hayderimran7/zulip,karamcnair/zulip,andersk/zulip,tdr130/zulip,isht3/zulip,ahmadassaf/zulip,zofuthan/zulip,guiquanz/zulip,vakila/zulip,rht/zulip,deer-hope/zulip,dotcool/zulip,zachallaun/zulip,dxq-git/zulip
|
Add a management command to purge a rabbitmq queue
This is useful in debugging when you just want to discard all the
messages in a queue because they have the wrong structure.
(imported from commit 8559ac74f11841430b4d0c801d5506ebcb74c3eb)
|
from __future__ import absolute_import
from django.core.management.base import BaseCommand
from django.core.management import CommandError
from zerver.lib.queue import SimpleQueueClient
import logging
class Command(BaseCommand):
args = "<queue name>"
help = "Discards all messages from the given queue"
def handle(self, *args, **options):
if len(args) != 1:
raise CommandError("Wrong number of arguments")
queue_name = args[0]
queue = SimpleQueueClient()
queue.drain_queue(queue_name)
|
<commit_before><commit_msg>Add a management command to purge a rabbitmq queue
This is useful in debugging when you just want to discard all the
messages in a queue because they have the wrong structure.
(imported from commit 8559ac74f11841430b4d0c801d5506ebcb74c3eb)<commit_after>
|
from __future__ import absolute_import
from django.core.management.base import BaseCommand
from django.core.management import CommandError
from zerver.lib.queue import SimpleQueueClient
import logging
class Command(BaseCommand):
args = "<queue name>"
help = "Discards all messages from the given queue"
def handle(self, *args, **options):
if len(args) != 1:
raise CommandError("Wrong number of arguments")
queue_name = args[0]
queue = SimpleQueueClient()
queue.drain_queue(queue_name)
|
Add a management command to purge a rabbitmq queue
This is useful in debugging when you just want to discard all the
messages in a queue because they have the wrong structure.
(imported from commit 8559ac74f11841430b4d0c801d5506ebcb74c3eb)from __future__ import absolute_import
from django.core.management.base import BaseCommand
from django.core.management import CommandError
from zerver.lib.queue import SimpleQueueClient
import logging
class Command(BaseCommand):
args = "<queue name>"
help = "Discards all messages from the given queue"
def handle(self, *args, **options):
if len(args) != 1:
raise CommandError("Wrong number of arguments")
queue_name = args[0]
queue = SimpleQueueClient()
queue.drain_queue(queue_name)
|
<commit_before><commit_msg>Add a management command to purge a rabbitmq queue
This is useful in debugging when you just want to discard all the
messages in a queue because they have the wrong structure.
(imported from commit 8559ac74f11841430b4d0c801d5506ebcb74c3eb)<commit_after>from __future__ import absolute_import
from django.core.management.base import BaseCommand
from django.core.management import CommandError
from zerver.lib.queue import SimpleQueueClient
import logging
class Command(BaseCommand):
args = "<queue name>"
help = "Discards all messages from the given queue"
def handle(self, *args, **options):
if len(args) != 1:
raise CommandError("Wrong number of arguments")
queue_name = args[0]
queue = SimpleQueueClient()
queue.drain_queue(queue_name)
|
|
86f60a2acb592a6ac6260867f8c9d423fc25d9d8
|
stdnum/us/rtn.py
|
stdnum/us/rtn.py
|
# rtn.py - functions for handling banking routing transit numbers
#
# Copyright (C) 2014 Lifealike Ltd
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""RTN (Routing transport number).
The routing transport number is a nine digit number used in the US banking
system for processing deposits between banks.
The last digit is a checksum.
>>> calc_check_digit('11100002')
'5'
>>> validate('111000025')
'111000025'
>>> validate('11100002') # Not nine digits
Traceback (most recent call last):
...
InvalidLength: ..
>>> validate('11100002B') # Not all numeric
Traceback (most recent call last):
...
InvalidFormat: ..
>>> validate('112000025') # bad checksum
Traceback (most recent call last):
...
InvalidChecksum: ..
"""
from stdnum.exceptions import *
from stdnum.util import clean
def compact(number):
"""Convert the number to the minimal representation. This strips the
number of any surrounding whitespace."""
number = clean(number).strip()
return number
def calc_check_digit(number):
"""Calculate the check digit. The number passed should not have the
check digit included."""
digits = [int(c) for c in number]
checksum = (
7 * (digits[0] + digits[3] + digits[6]) +
3 * (digits[1] + digits[4] + digits[7]) +
9 * (digits[2] + digits[5])
) % 10
return str(checksum)
def validate(number):
"""Checks to see if the number provided is a valid routing number. This
checks the length and check digit."""
number = compact(number)
if not number.isdigit():
raise InvalidFormat()
if len(number) != 9:
raise InvalidLength()
if calc_check_digit(number[:-1]) != number[-1]:
raise InvalidChecksum()
return number
def is_valid(number):
"""Checks to see if the number provided is a valid RTN. This checks the
length and check digit."""
try:
return bool(validate(number))
except ValidationError:
return False
|
Add US bank routing transit numbers
|
Add US bank routing transit numbers
|
Python
|
lgpl-2.1
|
tonyseek/python-stdnum,arthurdejong/python-stdnum,dchoruzy/python-stdnum,arthurdejong/python-stdnum,holvi/python-stdnum,holvi/python-stdnum,arthurdejong/python-stdnum,holvi/python-stdnum,t0mk/python-stdnum
|
Add US bank routing transit numbers
|
# rtn.py - functions for handling banking routing transit numbers
#
# Copyright (C) 2014 Lifealike Ltd
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""RTN (Routing transport number).
The routing transport number is a nine digit number used in the US banking
system for processing deposits between banks.
The last digit is a checksum.
>>> calc_check_digit('11100002')
'5'
>>> validate('111000025')
'111000025'
>>> validate('11100002') # Not nine digits
Traceback (most recent call last):
...
InvalidLength: ..
>>> validate('11100002B') # Not all numeric
Traceback (most recent call last):
...
InvalidFormat: ..
>>> validate('112000025') # bad checksum
Traceback (most recent call last):
...
InvalidChecksum: ..
"""
from stdnum.exceptions import *
from stdnum.util import clean
def compact(number):
"""Convert the number to the minimal representation. This strips the
number of any surrounding whitespace."""
number = clean(number).strip()
return number
def calc_check_digit(number):
"""Calculate the check digit. The number passed should not have the
check digit included."""
digits = [int(c) for c in number]
checksum = (
7 * (digits[0] + digits[3] + digits[6]) +
3 * (digits[1] + digits[4] + digits[7]) +
9 * (digits[2] + digits[5])
) % 10
return str(checksum)
def validate(number):
"""Checks to see if the number provided is a valid routing number. This
checks the length and check digit."""
number = compact(number)
if not number.isdigit():
raise InvalidFormat()
if len(number) != 9:
raise InvalidLength()
if calc_check_digit(number[:-1]) != number[-1]:
raise InvalidChecksum()
return number
def is_valid(number):
"""Checks to see if the number provided is a valid RTN. This checks the
length and check digit."""
try:
return bool(validate(number))
except ValidationError:
return False
|
<commit_before><commit_msg>Add US bank routing transit numbers<commit_after>
|
# rtn.py - functions for handling banking routing transit numbers
#
# Copyright (C) 2014 Lifealike Ltd
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""RTN (Routing transport number).
The routing transport number is a nine digit number used in the US banking
system for processing deposits between banks.
The last digit is a checksum.
>>> calc_check_digit('11100002')
'5'
>>> validate('111000025')
'111000025'
>>> validate('11100002') # Not nine digits
Traceback (most recent call last):
...
InvalidLength: ..
>>> validate('11100002B') # Not all numeric
Traceback (most recent call last):
...
InvalidFormat: ..
>>> validate('112000025') # bad checksum
Traceback (most recent call last):
...
InvalidChecksum: ..
"""
from stdnum.exceptions import *
from stdnum.util import clean
def compact(number):
"""Convert the number to the minimal representation. This strips the
number of any surrounding whitespace."""
number = clean(number).strip()
return number
def calc_check_digit(number):
"""Calculate the check digit. The number passed should not have the
check digit included."""
digits = [int(c) for c in number]
checksum = (
7 * (digits[0] + digits[3] + digits[6]) +
3 * (digits[1] + digits[4] + digits[7]) +
9 * (digits[2] + digits[5])
) % 10
return str(checksum)
def validate(number):
"""Checks to see if the number provided is a valid routing number. This
checks the length and check digit."""
number = compact(number)
if not number.isdigit():
raise InvalidFormat()
if len(number) != 9:
raise InvalidLength()
if calc_check_digit(number[:-1]) != number[-1]:
raise InvalidChecksum()
return number
def is_valid(number):
"""Checks to see if the number provided is a valid RTN. This checks the
length and check digit."""
try:
return bool(validate(number))
except ValidationError:
return False
|
Add US bank routing transit numbers# rtn.py - functions for handling banking routing transit numbers
#
# Copyright (C) 2014 Lifealike Ltd
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""RTN (Routing transport number).
The routing transport number is a nine digit number used in the US banking
system for processing deposits between banks.
The last digit is a checksum.
>>> calc_check_digit('11100002')
'5'
>>> validate('111000025')
'111000025'
>>> validate('11100002') # Not nine digits
Traceback (most recent call last):
...
InvalidLength: ..
>>> validate('11100002B') # Not all numeric
Traceback (most recent call last):
...
InvalidFormat: ..
>>> validate('112000025') # bad checksum
Traceback (most recent call last):
...
InvalidChecksum: ..
"""
from stdnum.exceptions import *
from stdnum.util import clean
def compact(number):
"""Convert the number to the minimal representation. This strips the
number of any surrounding whitespace."""
number = clean(number).strip()
return number
def calc_check_digit(number):
"""Calculate the check digit. The number passed should not have the
check digit included."""
digits = [int(c) for c in number]
checksum = (
7 * (digits[0] + digits[3] + digits[6]) +
3 * (digits[1] + digits[4] + digits[7]) +
9 * (digits[2] + digits[5])
) % 10
return str(checksum)
def validate(number):
"""Checks to see if the number provided is a valid routing number. This
checks the length and check digit."""
number = compact(number)
if not number.isdigit():
raise InvalidFormat()
if len(number) != 9:
raise InvalidLength()
if calc_check_digit(number[:-1]) != number[-1]:
raise InvalidChecksum()
return number
def is_valid(number):
"""Checks to see if the number provided is a valid RTN. This checks the
length and check digit."""
try:
return bool(validate(number))
except ValidationError:
return False
|
<commit_before><commit_msg>Add US bank routing transit numbers<commit_after># rtn.py - functions for handling banking routing transit numbers
#
# Copyright (C) 2014 Lifealike Ltd
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""RTN (Routing transport number).
The routing transport number is a nine digit number used in the US banking
system for processing deposits between banks.
The last digit is a checksum.
>>> calc_check_digit('11100002')
'5'
>>> validate('111000025')
'111000025'
>>> validate('11100002') # Not nine digits
Traceback (most recent call last):
...
InvalidLength: ..
>>> validate('11100002B') # Not all numeric
Traceback (most recent call last):
...
InvalidFormat: ..
>>> validate('112000025') # bad checksum
Traceback (most recent call last):
...
InvalidChecksum: ..
"""
from stdnum.exceptions import *
from stdnum.util import clean
def compact(number):
"""Convert the number to the minimal representation. This strips the
number of any surrounding whitespace."""
number = clean(number).strip()
return number
def calc_check_digit(number):
"""Calculate the check digit. The number passed should not have the
check digit included."""
digits = [int(c) for c in number]
checksum = (
7 * (digits[0] + digits[3] + digits[6]) +
3 * (digits[1] + digits[4] + digits[7]) +
9 * (digits[2] + digits[5])
) % 10
return str(checksum)
def validate(number):
"""Checks to see if the number provided is a valid routing number. This
checks the length and check digit."""
number = compact(number)
if not number.isdigit():
raise InvalidFormat()
if len(number) != 9:
raise InvalidLength()
if calc_check_digit(number[:-1]) != number[-1]:
raise InvalidChecksum()
return number
def is_valid(number):
"""Checks to see if the number provided is a valid RTN. This checks the
length and check digit."""
try:
return bool(validate(number))
except ValidationError:
return False
|
|
78ff64e41c7378200e99d26ea030ec12b70f0625
|
tests/test_io.py
|
tests/test_io.py
|
from __future__ import with_statement
from nose.tools import eq_
from fabric.io import OutputLooper
from fabric.context_managers import settings
def test_request_prompts():
"""
Test valid responses from prompts
"""
def run(txt, prompts):
with settings(prompts=prompts):
# try to fulfil the OutputLooper interface, only want to test
# _get_prompt_response. (str has a method upper)
ol = OutputLooper(str, 'upper', None, list(txt), None)
return ol._get_prompt_response()
prompts = {"prompt2": "response2",
"prompt1": "response1",
"prompt": "response"
}
eq_(run("this is a prompt for prompt1", prompts), ("prompt1", "response1"))
eq_(run("this is a prompt for prompt2", prompts), ("prompt2", "response2"))
eq_(run("this is a prompt for promptx:", prompts), (None, None))
eq_(run("prompt for promp", prompts), (None, None))
|
# -*- coding: utf-8 -*-
from __future__ import with_statement
import sys
from io import BytesIO
from nose.tools import eq_
from fabric.io import OutputLooper
from fabric.context_managers import hide, settings
from utils import mock_streams
def test_request_prompts():
"""
Test valid responses from prompts
"""
def run(txt, prompts):
with settings(prompts=prompts):
# try to fulfil the OutputLooper interface, only want to test
# _get_prompt_response. (str has a method upper)
ol = OutputLooper(str, 'upper', None, list(txt), None)
return ol._get_prompt_response()
prompts = {"prompt2": "response2",
"prompt1": "response1",
"prompt": "response"
}
eq_(run("this is a prompt for prompt1", prompts), ("prompt1", "response1"))
eq_(run("this is a prompt for prompt2", prompts), ("prompt2", "response2"))
eq_(run("this is a prompt for promptx:", prompts), (None, None))
eq_(run("prompt for promp", prompts), (None, None))
@mock_streams('stdout')
def test_pip_progressbar_at_4096_byte_boundary_error():
"""
Test for unicode characters from the pip installation progress bar
causing a UnicodeDecodeError.
"""
expect = '█' * 4096
class Mock(object):
def __init__(self):
three_bytes = u'█'.encode('utf-8')
# 4096 comes from OutputLooper.read_size being hard-coded to 4096
self.source = BytesIO(three_bytes * 4096)
def get_unicode_bytes(self, size):
return self.source.read(size)
ol = OutputLooper(Mock(), 'get_unicode_bytes', sys.stdout, None, None)
with settings(hide('everything')):
ol.loop()
eq_(expect, sys.stdout.getvalue())
|
Add test which detects the UnicodeDecodeError from pip
|
Add test which detects the UnicodeDecodeError from pip
UnicodeDecodeError: 'utf-8' codec can't decode byte 0xe2 in position 4095: unexpected end of data
https://github.com/mathiasertl/fabric/issues/5
|
Python
|
bsd-2-clause
|
ploxiln/fabric,mathiasertl/fabric,rodrigc/fabric
|
from __future__ import with_statement
from nose.tools import eq_
from fabric.io import OutputLooper
from fabric.context_managers import settings
def test_request_prompts():
"""
Test valid responses from prompts
"""
def run(txt, prompts):
with settings(prompts=prompts):
# try to fulfil the OutputLooper interface, only want to test
# _get_prompt_response. (str has a method upper)
ol = OutputLooper(str, 'upper', None, list(txt), None)
return ol._get_prompt_response()
prompts = {"prompt2": "response2",
"prompt1": "response1",
"prompt": "response"
}
eq_(run("this is a prompt for prompt1", prompts), ("prompt1", "response1"))
eq_(run("this is a prompt for prompt2", prompts), ("prompt2", "response2"))
eq_(run("this is a prompt for promptx:", prompts), (None, None))
eq_(run("prompt for promp", prompts), (None, None))
Add test which detects the UnicodeDecodeError from pip
UnicodeDecodeError: 'utf-8' codec can't decode byte 0xe2 in position 4095: unexpected end of data
https://github.com/mathiasertl/fabric/issues/5
|
# -*- coding: utf-8 -*-
from __future__ import with_statement
import sys
from io import BytesIO
from nose.tools import eq_
from fabric.io import OutputLooper
from fabric.context_managers import hide, settings
from utils import mock_streams
def test_request_prompts():
"""
Test valid responses from prompts
"""
def run(txt, prompts):
with settings(prompts=prompts):
# try to fulfil the OutputLooper interface, only want to test
# _get_prompt_response. (str has a method upper)
ol = OutputLooper(str, 'upper', None, list(txt), None)
return ol._get_prompt_response()
prompts = {"prompt2": "response2",
"prompt1": "response1",
"prompt": "response"
}
eq_(run("this is a prompt for prompt1", prompts), ("prompt1", "response1"))
eq_(run("this is a prompt for prompt2", prompts), ("prompt2", "response2"))
eq_(run("this is a prompt for promptx:", prompts), (None, None))
eq_(run("prompt for promp", prompts), (None, None))
@mock_streams('stdout')
def test_pip_progressbar_at_4096_byte_boundary_error():
"""
Test for unicode characters from the pip installation progress bar
causing a UnicodeDecodeError.
"""
expect = '█' * 4096
class Mock(object):
def __init__(self):
three_bytes = u'█'.encode('utf-8')
# 4096 comes from OutputLooper.read_size being hard-coded to 4096
self.source = BytesIO(three_bytes * 4096)
def get_unicode_bytes(self, size):
return self.source.read(size)
ol = OutputLooper(Mock(), 'get_unicode_bytes', sys.stdout, None, None)
with settings(hide('everything')):
ol.loop()
eq_(expect, sys.stdout.getvalue())
|
<commit_before>from __future__ import with_statement
from nose.tools import eq_
from fabric.io import OutputLooper
from fabric.context_managers import settings
def test_request_prompts():
"""
Test valid responses from prompts
"""
def run(txt, prompts):
with settings(prompts=prompts):
# try to fulfil the OutputLooper interface, only want to test
# _get_prompt_response. (str has a method upper)
ol = OutputLooper(str, 'upper', None, list(txt), None)
return ol._get_prompt_response()
prompts = {"prompt2": "response2",
"prompt1": "response1",
"prompt": "response"
}
eq_(run("this is a prompt for prompt1", prompts), ("prompt1", "response1"))
eq_(run("this is a prompt for prompt2", prompts), ("prompt2", "response2"))
eq_(run("this is a prompt for promptx:", prompts), (None, None))
eq_(run("prompt for promp", prompts), (None, None))
<commit_msg>Add test which detects the UnicodeDecodeError from pip
UnicodeDecodeError: 'utf-8' codec can't decode byte 0xe2 in position 4095: unexpected end of data
https://github.com/mathiasertl/fabric/issues/5<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import with_statement
import sys
from io import BytesIO
from nose.tools import eq_
from fabric.io import OutputLooper
from fabric.context_managers import hide, settings
from utils import mock_streams
def test_request_prompts():
"""
Test valid responses from prompts
"""
def run(txt, prompts):
with settings(prompts=prompts):
# try to fulfil the OutputLooper interface, only want to test
# _get_prompt_response. (str has a method upper)
ol = OutputLooper(str, 'upper', None, list(txt), None)
return ol._get_prompt_response()
prompts = {"prompt2": "response2",
"prompt1": "response1",
"prompt": "response"
}
eq_(run("this is a prompt for prompt1", prompts), ("prompt1", "response1"))
eq_(run("this is a prompt for prompt2", prompts), ("prompt2", "response2"))
eq_(run("this is a prompt for promptx:", prompts), (None, None))
eq_(run("prompt for promp", prompts), (None, None))
@mock_streams('stdout')
def test_pip_progressbar_at_4096_byte_boundary_error():
"""
Test for unicode characters from the pip installation progress bar
causing a UnicodeDecodeError.
"""
expect = '█' * 4096
class Mock(object):
def __init__(self):
three_bytes = u'█'.encode('utf-8')
# 4096 comes from OutputLooper.read_size being hard-coded to 4096
self.source = BytesIO(three_bytes * 4096)
def get_unicode_bytes(self, size):
return self.source.read(size)
ol = OutputLooper(Mock(), 'get_unicode_bytes', sys.stdout, None, None)
with settings(hide('everything')):
ol.loop()
eq_(expect, sys.stdout.getvalue())
|
from __future__ import with_statement
from nose.tools import eq_
from fabric.io import OutputLooper
from fabric.context_managers import settings
def test_request_prompts():
"""
Test valid responses from prompts
"""
def run(txt, prompts):
with settings(prompts=prompts):
# try to fulfil the OutputLooper interface, only want to test
# _get_prompt_response. (str has a method upper)
ol = OutputLooper(str, 'upper', None, list(txt), None)
return ol._get_prompt_response()
prompts = {"prompt2": "response2",
"prompt1": "response1",
"prompt": "response"
}
eq_(run("this is a prompt for prompt1", prompts), ("prompt1", "response1"))
eq_(run("this is a prompt for prompt2", prompts), ("prompt2", "response2"))
eq_(run("this is a prompt for promptx:", prompts), (None, None))
eq_(run("prompt for promp", prompts), (None, None))
Add test which detects the UnicodeDecodeError from pip
UnicodeDecodeError: 'utf-8' codec can't decode byte 0xe2 in position 4095: unexpected end of data
https://github.com/mathiasertl/fabric/issues/5# -*- coding: utf-8 -*-
from __future__ import with_statement
import sys
from io import BytesIO
from nose.tools import eq_
from fabric.io import OutputLooper
from fabric.context_managers import hide, settings
from utils import mock_streams
def test_request_prompts():
"""
Test valid responses from prompts
"""
def run(txt, prompts):
with settings(prompts=prompts):
# try to fulfil the OutputLooper interface, only want to test
# _get_prompt_response. (str has a method upper)
ol = OutputLooper(str, 'upper', None, list(txt), None)
return ol._get_prompt_response()
prompts = {"prompt2": "response2",
"prompt1": "response1",
"prompt": "response"
}
eq_(run("this is a prompt for prompt1", prompts), ("prompt1", "response1"))
eq_(run("this is a prompt for prompt2", prompts), ("prompt2", "response2"))
eq_(run("this is a prompt for promptx:", prompts), (None, None))
eq_(run("prompt for promp", prompts), (None, None))
@mock_streams('stdout')
def test_pip_progressbar_at_4096_byte_boundary_error():
"""
Test for unicode characters from the pip installation progress bar
causing a UnicodeDecodeError.
"""
expect = '█' * 4096
class Mock(object):
def __init__(self):
three_bytes = u'█'.encode('utf-8')
# 4096 comes from OutputLooper.read_size being hard-coded to 4096
self.source = BytesIO(three_bytes * 4096)
def get_unicode_bytes(self, size):
return self.source.read(size)
ol = OutputLooper(Mock(), 'get_unicode_bytes', sys.stdout, None, None)
with settings(hide('everything')):
ol.loop()
eq_(expect, sys.stdout.getvalue())
|
<commit_before>from __future__ import with_statement
from nose.tools import eq_
from fabric.io import OutputLooper
from fabric.context_managers import settings
def test_request_prompts():
"""
Test valid responses from prompts
"""
def run(txt, prompts):
with settings(prompts=prompts):
# try to fulfil the OutputLooper interface, only want to test
# _get_prompt_response. (str has a method upper)
ol = OutputLooper(str, 'upper', None, list(txt), None)
return ol._get_prompt_response()
prompts = {"prompt2": "response2",
"prompt1": "response1",
"prompt": "response"
}
eq_(run("this is a prompt for prompt1", prompts), ("prompt1", "response1"))
eq_(run("this is a prompt for prompt2", prompts), ("prompt2", "response2"))
eq_(run("this is a prompt for promptx:", prompts), (None, None))
eq_(run("prompt for promp", prompts), (None, None))
<commit_msg>Add test which detects the UnicodeDecodeError from pip
UnicodeDecodeError: 'utf-8' codec can't decode byte 0xe2 in position 4095: unexpected end of data
https://github.com/mathiasertl/fabric/issues/5<commit_after># -*- coding: utf-8 -*-
from __future__ import with_statement
import sys
from io import BytesIO
from nose.tools import eq_
from fabric.io import OutputLooper
from fabric.context_managers import hide, settings
from utils import mock_streams
def test_request_prompts():
"""
Test valid responses from prompts
"""
def run(txt, prompts):
with settings(prompts=prompts):
# try to fulfil the OutputLooper interface, only want to test
# _get_prompt_response. (str has a method upper)
ol = OutputLooper(str, 'upper', None, list(txt), None)
return ol._get_prompt_response()
prompts = {"prompt2": "response2",
"prompt1": "response1",
"prompt": "response"
}
eq_(run("this is a prompt for prompt1", prompts), ("prompt1", "response1"))
eq_(run("this is a prompt for prompt2", prompts), ("prompt2", "response2"))
eq_(run("this is a prompt for promptx:", prompts), (None, None))
eq_(run("prompt for promp", prompts), (None, None))
@mock_streams('stdout')
def test_pip_progressbar_at_4096_byte_boundary_error():
"""
Test for unicode characters from the pip installation progress bar
causing a UnicodeDecodeError.
"""
expect = '█' * 4096
class Mock(object):
def __init__(self):
three_bytes = u'█'.encode('utf-8')
# 4096 comes from OutputLooper.read_size being hard-coded to 4096
self.source = BytesIO(three_bytes * 4096)
def get_unicode_bytes(self, size):
return self.source.read(size)
ol = OutputLooper(Mock(), 'get_unicode_bytes', sys.stdout, None, None)
with settings(hide('everything')):
ol.loop()
eq_(expect, sys.stdout.getvalue())
|
60bf87a9cf20d053c10ded99f2f3d39be785e8b8
|
make_obs_mask.py
|
make_obs_mask.py
|
import numpy as np
import aplpy
import matplotlib.pyplot as plt
from astropy.io import fits
from config import plottingDictionary
def mask_obs_area(rms_data):
rms_data[np.isfinite(rms_data)] = 1.
rms_data[np.isnan(rms_data)] = 0.
return rms_data
def make_obs_mask(region_list,file_ext):
for region in region_list:
rms_hdu = fits.open('{0}/{0}_NH3_11_{1}_mom0_QA_trim.fits'.format(region,file_ext))
# Set nan pixels to zero to create mask
obs_mask = mask_obs_area(rms_hdu[0].data)
new_hdu = fits.PrimaryHDU(obs_mask,rms_hdu[0].header)
new_hdu.writeto('{0}/{0}_NH3_11_{1}_obsMask.fits'.format(region,file_ext),clobber=True)
region_list = ['B18','NGC1333','L1688','OrionA']
file_ext = 'DR1_rebase3'
make_obs_mask(region_list,file_ext)
|
Create mask file outlining map extents
|
Create mask file outlining map extents
|
Python
|
mit
|
GBTAmmoniaSurvey/DR1_analysis,rfriesen/DR1_analysis
|
Create mask file outlining map extents
|
import numpy as np
import aplpy
import matplotlib.pyplot as plt
from astropy.io import fits
from config import plottingDictionary
def mask_obs_area(rms_data):
rms_data[np.isfinite(rms_data)] = 1.
rms_data[np.isnan(rms_data)] = 0.
return rms_data
def make_obs_mask(region_list,file_ext):
for region in region_list:
rms_hdu = fits.open('{0}/{0}_NH3_11_{1}_mom0_QA_trim.fits'.format(region,file_ext))
# Set nan pixels to zero to create mask
obs_mask = mask_obs_area(rms_hdu[0].data)
new_hdu = fits.PrimaryHDU(obs_mask,rms_hdu[0].header)
new_hdu.writeto('{0}/{0}_NH3_11_{1}_obsMask.fits'.format(region,file_ext),clobber=True)
region_list = ['B18','NGC1333','L1688','OrionA']
file_ext = 'DR1_rebase3'
make_obs_mask(region_list,file_ext)
|
<commit_before><commit_msg>Create mask file outlining map extents<commit_after>
|
import numpy as np
import aplpy
import matplotlib.pyplot as plt
from astropy.io import fits
from config import plottingDictionary
def mask_obs_area(rms_data):
rms_data[np.isfinite(rms_data)] = 1.
rms_data[np.isnan(rms_data)] = 0.
return rms_data
def make_obs_mask(region_list,file_ext):
for region in region_list:
rms_hdu = fits.open('{0}/{0}_NH3_11_{1}_mom0_QA_trim.fits'.format(region,file_ext))
# Set nan pixels to zero to create mask
obs_mask = mask_obs_area(rms_hdu[0].data)
new_hdu = fits.PrimaryHDU(obs_mask,rms_hdu[0].header)
new_hdu.writeto('{0}/{0}_NH3_11_{1}_obsMask.fits'.format(region,file_ext),clobber=True)
region_list = ['B18','NGC1333','L1688','OrionA']
file_ext = 'DR1_rebase3'
make_obs_mask(region_list,file_ext)
|
Create mask file outlining map extentsimport numpy as np
import aplpy
import matplotlib.pyplot as plt
from astropy.io import fits
from config import plottingDictionary
def mask_obs_area(rms_data):
rms_data[np.isfinite(rms_data)] = 1.
rms_data[np.isnan(rms_data)] = 0.
return rms_data
def make_obs_mask(region_list,file_ext):
for region in region_list:
rms_hdu = fits.open('{0}/{0}_NH3_11_{1}_mom0_QA_trim.fits'.format(region,file_ext))
# Set nan pixels to zero to create mask
obs_mask = mask_obs_area(rms_hdu[0].data)
new_hdu = fits.PrimaryHDU(obs_mask,rms_hdu[0].header)
new_hdu.writeto('{0}/{0}_NH3_11_{1}_obsMask.fits'.format(region,file_ext),clobber=True)
region_list = ['B18','NGC1333','L1688','OrionA']
file_ext = 'DR1_rebase3'
make_obs_mask(region_list,file_ext)
|
<commit_before><commit_msg>Create mask file outlining map extents<commit_after>import numpy as np
import aplpy
import matplotlib.pyplot as plt
from astropy.io import fits
from config import plottingDictionary
def mask_obs_area(rms_data):
rms_data[np.isfinite(rms_data)] = 1.
rms_data[np.isnan(rms_data)] = 0.
return rms_data
def make_obs_mask(region_list,file_ext):
for region in region_list:
rms_hdu = fits.open('{0}/{0}_NH3_11_{1}_mom0_QA_trim.fits'.format(region,file_ext))
# Set nan pixels to zero to create mask
obs_mask = mask_obs_area(rms_hdu[0].data)
new_hdu = fits.PrimaryHDU(obs_mask,rms_hdu[0].header)
new_hdu.writeto('{0}/{0}_NH3_11_{1}_obsMask.fits'.format(region,file_ext),clobber=True)
region_list = ['B18','NGC1333','L1688','OrionA']
file_ext = 'DR1_rebase3'
make_obs_mask(region_list,file_ext)
|
|
4b198dd9b4109888692cd95881a6e5ebe64bccee
|
conf/init_mongodb.py
|
conf/init_mongodb.py
|
import getpass
import hashlib
from pymongo import MongoClient
spider_db = MongoClient().spider
admin_present = False
for user in spider_db.auth.find():
if user["username"] == "admin":
admin_present = True
break
if not admin_present:
password1 = getpass.getpass("Give a password for the admin user: ")
password2 = getpass.getpass("Give the password again: ")
if password1 == password2:
user = {
'username': "admin",
'password': hashlib.sha256(password1.encode("utf-8")).hexdigest(),
'role': ["admin"],
}
spider_db.auth.insert_one(user)
print("Add 'admin' user with password supplied.")
else:
print("Passwords not corresponding...")
else:
print("'admin' user already created.")
|
Add initialisation script to add the admin user.
|
Add initialisation script to add the admin user.
|
Python
|
apache-2.0
|
asteroide/immo_spider,asteroide/immo_spider,asteroide/immo_spider,asteroide/immo_spider
|
Add initialisation script to add the admin user.
|
import getpass
import hashlib
from pymongo import MongoClient
spider_db = MongoClient().spider
admin_present = False
for user in spider_db.auth.find():
if user["username"] == "admin":
admin_present = True
break
if not admin_present:
password1 = getpass.getpass("Give a password for the admin user: ")
password2 = getpass.getpass("Give the password again: ")
if password1 == password2:
user = {
'username': "admin",
'password': hashlib.sha256(password1.encode("utf-8")).hexdigest(),
'role': ["admin"],
}
spider_db.auth.insert_one(user)
print("Add 'admin' user with password supplied.")
else:
print("Passwords not corresponding...")
else:
print("'admin' user already created.")
|
<commit_before><commit_msg>Add initialisation script to add the admin user.<commit_after>
|
import getpass
import hashlib
from pymongo import MongoClient
spider_db = MongoClient().spider
admin_present = False
for user in spider_db.auth.find():
if user["username"] == "admin":
admin_present = True
break
if not admin_present:
password1 = getpass.getpass("Give a password for the admin user: ")
password2 = getpass.getpass("Give the password again: ")
if password1 == password2:
user = {
'username': "admin",
'password': hashlib.sha256(password1.encode("utf-8")).hexdigest(),
'role': ["admin"],
}
spider_db.auth.insert_one(user)
print("Add 'admin' user with password supplied.")
else:
print("Passwords not corresponding...")
else:
print("'admin' user already created.")
|
Add initialisation script to add the admin user.import getpass
import hashlib
from pymongo import MongoClient
spider_db = MongoClient().spider
admin_present = False
for user in spider_db.auth.find():
if user["username"] == "admin":
admin_present = True
break
if not admin_present:
password1 = getpass.getpass("Give a password for the admin user: ")
password2 = getpass.getpass("Give the password again: ")
if password1 == password2:
user = {
'username': "admin",
'password': hashlib.sha256(password1.encode("utf-8")).hexdigest(),
'role': ["admin"],
}
spider_db.auth.insert_one(user)
print("Add 'admin' user with password supplied.")
else:
print("Passwords not corresponding...")
else:
print("'admin' user already created.")
|
<commit_before><commit_msg>Add initialisation script to add the admin user.<commit_after>import getpass
import hashlib
from pymongo import MongoClient
spider_db = MongoClient().spider
admin_present = False
for user in spider_db.auth.find():
if user["username"] == "admin":
admin_present = True
break
if not admin_present:
password1 = getpass.getpass("Give a password for the admin user: ")
password2 = getpass.getpass("Give the password again: ")
if password1 == password2:
user = {
'username': "admin",
'password': hashlib.sha256(password1.encode("utf-8")).hexdigest(),
'role': ["admin"],
}
spider_db.auth.insert_one(user)
print("Add 'admin' user with password supplied.")
else:
print("Passwords not corresponding...")
else:
print("'admin' user already created.")
|
|
1018589a17484c5ed337b80a5887aef68df7adbb
|
util/hgfilesize.py
|
util/hgfilesize.py
|
from mercurial import context
from mercurial.i18n import _
'''
[extensions]
hgfilesize=~/m5/incoming/util/hgfilesize.py
[hooks]
pretxncommit = python:hgfilesize.limit_file_size
pretxnchangegroup = python:hgfilesize.limit_file_size
[limit_file_size]
maximum_file_size = 200000
'''
def limit_file_size(ui, repo, node=None, **kwargs):
'''forbid files over a given size'''
# default limit is 1 MB
limit = int(ui.config('limit_file_size', 'maximum_file_size', 1024*1024))
existing_tip = context.changectx(repo, node).rev()
new_tip = context.changectx(repo, 'tip').rev()
for rev in xrange(existing_tip, new_tip + 1):
ctx = context.changectx(repo, rev)
for f in ctx.files():
fctx = ctx.filectx(f)
if fctx.size() > limit:
ui.write(_('file %s of %s is too large: %d > %d\n') % \
(f, ctx, fctx.size(), limit))
return True # This is invalid
return False # Things are OK.
|
Add a hook to limit the size of any individual file
|
hooks: Add a hook to limit the size of any individual file
|
Python
|
bsd-3-clause
|
pombredanne/http-repo.gem5.org-gem5-,pombredanne/http-repo.gem5.org-gem5-,hoangt/tpzsimul.gem5,pombredanne/http-repo.gem5.org-gem5-,pombredanne/http-repo.gem5.org-gem5-,vovojh/gem5,pombredanne/http-repo.gem5.org-gem5-,vovojh/gem5,vovojh/gem5,hoangt/tpzsimul.gem5,hoangt/tpzsimul.gem5,hoangt/tpzsimul.gem5,hoangt/tpzsimul.gem5,vovojh/gem5,vovojh/gem5,pombredanne/http-repo.gem5.org-gem5-,vovojh/gem5,vovojh/gem5,hoangt/tpzsimul.gem5,pombredanne/http-repo.gem5.org-gem5-,hoangt/tpzsimul.gem5
|
hooks: Add a hook to limit the size of any individual file
|
from mercurial import context
from mercurial.i18n import _
'''
[extensions]
hgfilesize=~/m5/incoming/util/hgfilesize.py
[hooks]
pretxncommit = python:hgfilesize.limit_file_size
pretxnchangegroup = python:hgfilesize.limit_file_size
[limit_file_size]
maximum_file_size = 200000
'''
def limit_file_size(ui, repo, node=None, **kwargs):
'''forbid files over a given size'''
# default limit is 1 MB
limit = int(ui.config('limit_file_size', 'maximum_file_size', 1024*1024))
existing_tip = context.changectx(repo, node).rev()
new_tip = context.changectx(repo, 'tip').rev()
for rev in xrange(existing_tip, new_tip + 1):
ctx = context.changectx(repo, rev)
for f in ctx.files():
fctx = ctx.filectx(f)
if fctx.size() > limit:
ui.write(_('file %s of %s is too large: %d > %d\n') % \
(f, ctx, fctx.size(), limit))
return True # This is invalid
return False # Things are OK.
|
<commit_before><commit_msg>hooks: Add a hook to limit the size of any individual file<commit_after>
|
from mercurial import context
from mercurial.i18n import _
'''
[extensions]
hgfilesize=~/m5/incoming/util/hgfilesize.py
[hooks]
pretxncommit = python:hgfilesize.limit_file_size
pretxnchangegroup = python:hgfilesize.limit_file_size
[limit_file_size]
maximum_file_size = 200000
'''
def limit_file_size(ui, repo, node=None, **kwargs):
'''forbid files over a given size'''
# default limit is 1 MB
limit = int(ui.config('limit_file_size', 'maximum_file_size', 1024*1024))
existing_tip = context.changectx(repo, node).rev()
new_tip = context.changectx(repo, 'tip').rev()
for rev in xrange(existing_tip, new_tip + 1):
ctx = context.changectx(repo, rev)
for f in ctx.files():
fctx = ctx.filectx(f)
if fctx.size() > limit:
ui.write(_('file %s of %s is too large: %d > %d\n') % \
(f, ctx, fctx.size(), limit))
return True # This is invalid
return False # Things are OK.
|
hooks: Add a hook to limit the size of any individual filefrom mercurial import context
from mercurial.i18n import _
'''
[extensions]
hgfilesize=~/m5/incoming/util/hgfilesize.py
[hooks]
pretxncommit = python:hgfilesize.limit_file_size
pretxnchangegroup = python:hgfilesize.limit_file_size
[limit_file_size]
maximum_file_size = 200000
'''
def limit_file_size(ui, repo, node=None, **kwargs):
'''forbid files over a given size'''
# default limit is 1 MB
limit = int(ui.config('limit_file_size', 'maximum_file_size', 1024*1024))
existing_tip = context.changectx(repo, node).rev()
new_tip = context.changectx(repo, 'tip').rev()
for rev in xrange(existing_tip, new_tip + 1):
ctx = context.changectx(repo, rev)
for f in ctx.files():
fctx = ctx.filectx(f)
if fctx.size() > limit:
ui.write(_('file %s of %s is too large: %d > %d\n') % \
(f, ctx, fctx.size(), limit))
return True # This is invalid
return False # Things are OK.
|
<commit_before><commit_msg>hooks: Add a hook to limit the size of any individual file<commit_after>from mercurial import context
from mercurial.i18n import _
'''
[extensions]
hgfilesize=~/m5/incoming/util/hgfilesize.py
[hooks]
pretxncommit = python:hgfilesize.limit_file_size
pretxnchangegroup = python:hgfilesize.limit_file_size
[limit_file_size]
maximum_file_size = 200000
'''
def limit_file_size(ui, repo, node=None, **kwargs):
'''forbid files over a given size'''
# default limit is 1 MB
limit = int(ui.config('limit_file_size', 'maximum_file_size', 1024*1024))
existing_tip = context.changectx(repo, node).rev()
new_tip = context.changectx(repo, 'tip').rev()
for rev in xrange(existing_tip, new_tip + 1):
ctx = context.changectx(repo, rev)
for f in ctx.files():
fctx = ctx.filectx(f)
if fctx.size() > limit:
ui.write(_('file %s of %s is too large: %d > %d\n') % \
(f, ctx, fctx.size(), limit))
return True # This is invalid
return False # Things are OK.
|
|
eebe2343ebcd10adfe5b5dc4b413c477e759bed8
|
algorithms/recursion/towersofhanoi.py
|
algorithms/recursion/towersofhanoi.py
|
def hanoi(disks, source, helper, target):
if disks >= 1:
# Move n-1 disks from source to helper
hanoi(disks - 1, source, target, helper)
# Move 1 disk from source to target
print source, "->", target
# Move previously moved disks from helper to target
hanoi(disks - 1, helper, source, target)
num_disks = 3
source, helper, target = 1, 2, 3
hanoi(num_disks, source, helper, target)
|
Add towers of hanoi implementation
|
Add towers of hanoi implementation
Solves the tower of hanoi game for 3 disks.
|
Python
|
mit
|
chinhtle/python_fun
|
Add towers of hanoi implementation
Solves the tower of hanoi game for 3 disks.
|
def hanoi(disks, source, helper, target):
if disks >= 1:
# Move n-1 disks from source to helper
hanoi(disks - 1, source, target, helper)
# Move 1 disk from source to target
print source, "->", target
# Move previously moved disks from helper to target
hanoi(disks - 1, helper, source, target)
num_disks = 3
source, helper, target = 1, 2, 3
hanoi(num_disks, source, helper, target)
|
<commit_before><commit_msg>Add towers of hanoi implementation
Solves the tower of hanoi game for 3 disks.<commit_after>
|
def hanoi(disks, source, helper, target):
if disks >= 1:
# Move n-1 disks from source to helper
hanoi(disks - 1, source, target, helper)
# Move 1 disk from source to target
print source, "->", target
# Move previously moved disks from helper to target
hanoi(disks - 1, helper, source, target)
num_disks = 3
source, helper, target = 1, 2, 3
hanoi(num_disks, source, helper, target)
|
Add towers of hanoi implementation
Solves the tower of hanoi game for 3 disks.def hanoi(disks, source, helper, target):
if disks >= 1:
# Move n-1 disks from source to helper
hanoi(disks - 1, source, target, helper)
# Move 1 disk from source to target
print source, "->", target
# Move previously moved disks from helper to target
hanoi(disks - 1, helper, source, target)
num_disks = 3
source, helper, target = 1, 2, 3
hanoi(num_disks, source, helper, target)
|
<commit_before><commit_msg>Add towers of hanoi implementation
Solves the tower of hanoi game for 3 disks.<commit_after>def hanoi(disks, source, helper, target):
if disks >= 1:
# Move n-1 disks from source to helper
hanoi(disks - 1, source, target, helper)
# Move 1 disk from source to target
print source, "->", target
# Move previously moved disks from helper to target
hanoi(disks - 1, helper, source, target)
num_disks = 3
source, helper, target = 1, 2, 3
hanoi(num_disks, source, helper, target)
|
|
554f7eb48fea7af7d857e145ce1d36a25c73e6e3
|
app/modules/parse_input.py
|
app/modules/parse_input.py
|
import re
from app.module.shogi import Koma
class ParseInput:
self.koma_names = [
"歩",
"と",
"香",
"香車",
"桂",
"桂馬",
"銀",
"銀将",
"金",
"金将",
"角",
"角行",
"馬",
"飛",
"飛車",
"龍",
"王",
"玉",
"王将",
"玉将",
]
str2info = {
"一" : 0
"1" : 0
}
@staticmethod
def parse(input_str):
"([一二三四五六七八九123456789123456789]{2})("+koma_names_string_regex+")([上右下左]{1,2})?(成)?"
|
Add script for parse. Not completed, but commit for move branch.
|
Add script for parse. Not completed, but commit for move branch.
|
Python
|
mit
|
setokinto/slack-shogi
|
Add script for parse. Not completed, but commit for move branch.
|
import re
from app.module.shogi import Koma
class ParseInput:
self.koma_names = [
"歩",
"と",
"香",
"香車",
"桂",
"桂馬",
"銀",
"銀将",
"金",
"金将",
"角",
"角行",
"馬",
"飛",
"飛車",
"龍",
"王",
"玉",
"王将",
"玉将",
]
str2info = {
"一" : 0
"1" : 0
}
@staticmethod
def parse(input_str):
"([一二三四五六七八九123456789123456789]{2})("+koma_names_string_regex+")([上右下左]{1,2})?(成)?"
|
<commit_before><commit_msg>Add script for parse. Not completed, but commit for move branch.<commit_after>
|
import re
from app.module.shogi import Koma
class ParseInput:
self.koma_names = [
"歩",
"と",
"香",
"香車",
"桂",
"桂馬",
"銀",
"銀将",
"金",
"金将",
"角",
"角行",
"馬",
"飛",
"飛車",
"龍",
"王",
"玉",
"王将",
"玉将",
]
str2info = {
"一" : 0
"1" : 0
}
@staticmethod
def parse(input_str):
"([一二三四五六七八九123456789123456789]{2})("+koma_names_string_regex+")([上右下左]{1,2})?(成)?"
|
Add script for parse. Not completed, but commit for move branch.
import re
from app.module.shogi import Koma
class ParseInput:
self.koma_names = [
"歩",
"と",
"香",
"香車",
"桂",
"桂馬",
"銀",
"銀将",
"金",
"金将",
"角",
"角行",
"馬",
"飛",
"飛車",
"龍",
"王",
"玉",
"王将",
"玉将",
]
str2info = {
"一" : 0
"1" : 0
}
@staticmethod
def parse(input_str):
"([一二三四五六七八九123456789123456789]{2})("+koma_names_string_regex+")([上右下左]{1,2})?(成)?"
|
<commit_before><commit_msg>Add script for parse. Not completed, but commit for move branch.<commit_after>
import re
from app.module.shogi import Koma
class ParseInput:
self.koma_names = [
"歩",
"と",
"香",
"香車",
"桂",
"桂馬",
"銀",
"銀将",
"金",
"金将",
"角",
"角行",
"馬",
"飛",
"飛車",
"龍",
"王",
"玉",
"王将",
"玉将",
]
str2info = {
"一" : 0
"1" : 0
}
@staticmethod
def parse(input_str):
"([一二三四五六七八九123456789123456789]{2})("+koma_names_string_regex+")([上右下左]{1,2})?(成)?"
|
|
5a0b08e8eaa7cf0be947cda5f7f7e639bb38a689
|
components/session.py
|
components/session.py
|
"""A class for SQLGitHub sessions.
Sample Usage:
g = Github(token)
s = SgSession(g, ["name", "description"], "abseil.repos")
print(s.Execute())
"""
import table as tb
import table_fetcher
import expression_evaluator
class SgSession:
"""A class for SQLGitHub sessions."""
def __init__(self, github, field_exprs, source, condition=None):
self._fetcher = table_fetcher.SgTableFetcher(github)
self._field_exprs = field_exprs
self._source = source
self._condition = condition
def Execute(self):
# source is either a label (eg. "google.issues") or a SgSession
source_table = self._source.Execute() if isinstance(self._source, SgSession) else self._fetcher.Fetch(self._source)
if self._condition:
filtered_table = tb.SgTable()
filtered_table.SetFields(source_table.GetFields())
for row in source_table:
if self._condition.Evaluate(source_table.GetFields(), row):
filtered_table.Append(row)
else:
filtered_table = source_table
return expression_evaluator.SgExpressionEvaluator.EvaluateExpressionsInTable(filtered_table, self._field_exprs)
|
Add SgSession - a meta class for a query instance
|
Add SgSession - a meta class for a query instance
|
Python
|
mit
|
lnishan/SQLGitHub
|
Add SgSession - a meta class for a query instance
|
"""A class for SQLGitHub sessions.
Sample Usage:
g = Github(token)
s = SgSession(g, ["name", "description"], "abseil.repos")
print(s.Execute())
"""
import table as tb
import table_fetcher
import expression_evaluator
class SgSession:
"""A class for SQLGitHub sessions."""
def __init__(self, github, field_exprs, source, condition=None):
self._fetcher = table_fetcher.SgTableFetcher(github)
self._field_exprs = field_exprs
self._source = source
self._condition = condition
def Execute(self):
# source is either a label (eg. "google.issues") or a SgSession
source_table = self._source.Execute() if isinstance(self._source, SgSession) else self._fetcher.Fetch(self._source)
if self._condition:
filtered_table = tb.SgTable()
filtered_table.SetFields(source_table.GetFields())
for row in source_table:
if self._condition.Evaluate(source_table.GetFields(), row):
filtered_table.Append(row)
else:
filtered_table = source_table
return expression_evaluator.SgExpressionEvaluator.EvaluateExpressionsInTable(filtered_table, self._field_exprs)
|
<commit_before><commit_msg>Add SgSession - a meta class for a query instance<commit_after>
|
"""A class for SQLGitHub sessions.
Sample Usage:
g = Github(token)
s = SgSession(g, ["name", "description"], "abseil.repos")
print(s.Execute())
"""
import table as tb
import table_fetcher
import expression_evaluator
class SgSession:
"""A class for SQLGitHub sessions."""
def __init__(self, github, field_exprs, source, condition=None):
self._fetcher = table_fetcher.SgTableFetcher(github)
self._field_exprs = field_exprs
self._source = source
self._condition = condition
def Execute(self):
# source is either a label (eg. "google.issues") or a SgSession
source_table = self._source.Execute() if isinstance(self._source, SgSession) else self._fetcher.Fetch(self._source)
if self._condition:
filtered_table = tb.SgTable()
filtered_table.SetFields(source_table.GetFields())
for row in source_table:
if self._condition.Evaluate(source_table.GetFields(), row):
filtered_table.Append(row)
else:
filtered_table = source_table
return expression_evaluator.SgExpressionEvaluator.EvaluateExpressionsInTable(filtered_table, self._field_exprs)
|
Add SgSession - a meta class for a query instance"""A class for SQLGitHub sessions.
Sample Usage:
g = Github(token)
s = SgSession(g, ["name", "description"], "abseil.repos")
print(s.Execute())
"""
import table as tb
import table_fetcher
import expression_evaluator
class SgSession:
"""A class for SQLGitHub sessions."""
def __init__(self, github, field_exprs, source, condition=None):
self._fetcher = table_fetcher.SgTableFetcher(github)
self._field_exprs = field_exprs
self._source = source
self._condition = condition
def Execute(self):
# source is either a label (eg. "google.issues") or a SgSession
source_table = self._source.Execute() if isinstance(self._source, SgSession) else self._fetcher.Fetch(self._source)
if self._condition:
filtered_table = tb.SgTable()
filtered_table.SetFields(source_table.GetFields())
for row in source_table:
if self._condition.Evaluate(source_table.GetFields(), row):
filtered_table.Append(row)
else:
filtered_table = source_table
return expression_evaluator.SgExpressionEvaluator.EvaluateExpressionsInTable(filtered_table, self._field_exprs)
|
<commit_before><commit_msg>Add SgSession - a meta class for a query instance<commit_after>"""A class for SQLGitHub sessions.
Sample Usage:
g = Github(token)
s = SgSession(g, ["name", "description"], "abseil.repos")
print(s.Execute())
"""
import table as tb
import table_fetcher
import expression_evaluator
class SgSession:
"""A class for SQLGitHub sessions."""
def __init__(self, github, field_exprs, source, condition=None):
self._fetcher = table_fetcher.SgTableFetcher(github)
self._field_exprs = field_exprs
self._source = source
self._condition = condition
def Execute(self):
# source is either a label (eg. "google.issues") or a SgSession
source_table = self._source.Execute() if isinstance(self._source, SgSession) else self._fetcher.Fetch(self._source)
if self._condition:
filtered_table = tb.SgTable()
filtered_table.SetFields(source_table.GetFields())
for row in source_table:
if self._condition.Evaluate(source_table.GetFields(), row):
filtered_table.Append(row)
else:
filtered_table = source_table
return expression_evaluator.SgExpressionEvaluator.EvaluateExpressionsInTable(filtered_table, self._field_exprs)
|
|
c42a9633d9b4bbd0d2dec33b1e343e76d167bf6a
|
fix_name.py
|
fix_name.py
|
import os
import shutil
import sys
def fullpath_names(root_dir, file_list):
"""Create a list of fullpath file-name
"""
fullpath_list = []
for i in file_list:
fullpath_list.append(os.path.join(root_dir, i))
return fullpath_list
def get_files_and_subdirs(root_dir):
"""Return a tuple with all dirs and files inside root_dir
"""
f, s = [], []
for (dirpath, dirnames, filenames) in os.walk(root_dir):
s += fullpath_names(dirpath, dirnames)
f += fullpath_names(dirpath, filenames)
return s + f
def escape_space_and_uppercase(file_list):
"""
Rename files that contains blank spaces or upper letters by dots and
lower letters
"""
for i, filename in enumerate(file_list):
dirname, basename = os.path.split(filename)
if ' ' in basename or not basename.islower():
new_name = basename.lower().replace(' ', '.')
renamed_file = os.path.join(dirname, new_name)
shutil.move(filename, renamed_file)
file_list[i] = renamed_file
print file_list[i]
if __name__ == '__main__':
if len(sys.argv) != 2:
print 'usage: python fix_name.py FOLDER'
folder = sys.argv[1]
print 'This program is going to change name of many files.'
confirm = raw_input('Are you sure to continue? (y/n) ')
if confirm == 'y':
dirs_and_files = get_files_and_subdirs(folder)
escape_space_and_uppercase(dirs_and_files[::-1])
print 'Done successfully'
|
Add program to rename files automatically
|
Add program to rename files automatically
|
Python
|
mit
|
escorciav/linux-utils,escorciav/linux-utils
|
Add program to rename files automatically
|
import os
import shutil
import sys
def fullpath_names(root_dir, file_list):
"""Create a list of fullpath file-name
"""
fullpath_list = []
for i in file_list:
fullpath_list.append(os.path.join(root_dir, i))
return fullpath_list
def get_files_and_subdirs(root_dir):
"""Return a tuple with all dirs and files inside root_dir
"""
f, s = [], []
for (dirpath, dirnames, filenames) in os.walk(root_dir):
s += fullpath_names(dirpath, dirnames)
f += fullpath_names(dirpath, filenames)
return s + f
def escape_space_and_uppercase(file_list):
"""
Rename files that contains blank spaces or upper letters by dots and
lower letters
"""
for i, filename in enumerate(file_list):
dirname, basename = os.path.split(filename)
if ' ' in basename or not basename.islower():
new_name = basename.lower().replace(' ', '.')
renamed_file = os.path.join(dirname, new_name)
shutil.move(filename, renamed_file)
file_list[i] = renamed_file
print file_list[i]
if __name__ == '__main__':
if len(sys.argv) != 2:
print 'usage: python fix_name.py FOLDER'
folder = sys.argv[1]
print 'This program is going to change name of many files.'
confirm = raw_input('Are you sure to continue? (y/n) ')
if confirm == 'y':
dirs_and_files = get_files_and_subdirs(folder)
escape_space_and_uppercase(dirs_and_files[::-1])
print 'Done successfully'
|
<commit_before><commit_msg>Add program to rename files automatically<commit_after>
|
import os
import shutil
import sys
def fullpath_names(root_dir, file_list):
"""Create a list of fullpath file-name
"""
fullpath_list = []
for i in file_list:
fullpath_list.append(os.path.join(root_dir, i))
return fullpath_list
def get_files_and_subdirs(root_dir):
"""Return a tuple with all dirs and files inside root_dir
"""
f, s = [], []
for (dirpath, dirnames, filenames) in os.walk(root_dir):
s += fullpath_names(dirpath, dirnames)
f += fullpath_names(dirpath, filenames)
return s + f
def escape_space_and_uppercase(file_list):
"""
Rename files that contains blank spaces or upper letters by dots and
lower letters
"""
for i, filename in enumerate(file_list):
dirname, basename = os.path.split(filename)
if ' ' in basename or not basename.islower():
new_name = basename.lower().replace(' ', '.')
renamed_file = os.path.join(dirname, new_name)
shutil.move(filename, renamed_file)
file_list[i] = renamed_file
print file_list[i]
if __name__ == '__main__':
if len(sys.argv) != 2:
print 'usage: python fix_name.py FOLDER'
folder = sys.argv[1]
print 'This program is going to change name of many files.'
confirm = raw_input('Are you sure to continue? (y/n) ')
if confirm == 'y':
dirs_and_files = get_files_and_subdirs(folder)
escape_space_and_uppercase(dirs_and_files[::-1])
print 'Done successfully'
|
Add program to rename files automaticallyimport os
import shutil
import sys
def fullpath_names(root_dir, file_list):
"""Create a list of fullpath file-name
"""
fullpath_list = []
for i in file_list:
fullpath_list.append(os.path.join(root_dir, i))
return fullpath_list
def get_files_and_subdirs(root_dir):
"""Return a tuple with all dirs and files inside root_dir
"""
f, s = [], []
for (dirpath, dirnames, filenames) in os.walk(root_dir):
s += fullpath_names(dirpath, dirnames)
f += fullpath_names(dirpath, filenames)
return s + f
def escape_space_and_uppercase(file_list):
"""
Rename files that contains blank spaces or upper letters by dots and
lower letters
"""
for i, filename in enumerate(file_list):
dirname, basename = os.path.split(filename)
if ' ' in basename or not basename.islower():
new_name = basename.lower().replace(' ', '.')
renamed_file = os.path.join(dirname, new_name)
shutil.move(filename, renamed_file)
file_list[i] = renamed_file
print file_list[i]
if __name__ == '__main__':
if len(sys.argv) != 2:
print 'usage: python fix_name.py FOLDER'
folder = sys.argv[1]
print 'This program is going to change name of many files.'
confirm = raw_input('Are you sure to continue? (y/n) ')
if confirm == 'y':
dirs_and_files = get_files_and_subdirs(folder)
escape_space_and_uppercase(dirs_and_files[::-1])
print 'Done successfully'
|
<commit_before><commit_msg>Add program to rename files automatically<commit_after>import os
import shutil
import sys
def fullpath_names(root_dir, file_list):
"""Create a list of fullpath file-name
"""
fullpath_list = []
for i in file_list:
fullpath_list.append(os.path.join(root_dir, i))
return fullpath_list
def get_files_and_subdirs(root_dir):
"""Return a tuple with all dirs and files inside root_dir
"""
f, s = [], []
for (dirpath, dirnames, filenames) in os.walk(root_dir):
s += fullpath_names(dirpath, dirnames)
f += fullpath_names(dirpath, filenames)
return s + f
def escape_space_and_uppercase(file_list):
"""
Rename files that contains blank spaces or upper letters by dots and
lower letters
"""
for i, filename in enumerate(file_list):
dirname, basename = os.path.split(filename)
if ' ' in basename or not basename.islower():
new_name = basename.lower().replace(' ', '.')
renamed_file = os.path.join(dirname, new_name)
shutil.move(filename, renamed_file)
file_list[i] = renamed_file
print file_list[i]
if __name__ == '__main__':
if len(sys.argv) != 2:
print 'usage: python fix_name.py FOLDER'
folder = sys.argv[1]
print 'This program is going to change name of many files.'
confirm = raw_input('Are you sure to continue? (y/n) ')
if confirm == 'y':
dirs_and_files = get_files_and_subdirs(folder)
escape_space_and_uppercase(dirs_and_files[::-1])
print 'Done successfully'
|
|
921d4544c2f2a78aeb36addcfa9f01720de2acb2
|
plugins/limit.py
|
plugins/limit.py
|
from parser import parse_prefix
class Limit_Plugin:
""" Plugin to control which users can use which commands.
"""
def __init__(self, mib, params=None):
self.mib = mib
self.mib.register_privmsg_cmd('allow', self.allow)
self.mib.register_privmsg_cmd('deny', self.deny)
self.load_lists()
def clean(self):
self.save_lists()
def allow(self, msg):
parsed = self.parse(msg)
if parsed:
cmd, mask = parsed
print 'Adding %s for command %s' % (mask, cmd)
self.mib.add_cmd_permission(cmd, mask)
def deny(self, msg):
parsed = self.parse(msg)
if parsed:
cmd, mask = parsed
print 'Removing %s from command %s' % (mask, cmd)
self.mib.rm_cmd_permission(cmd, mask)
def parse(self, msg):
prefix = parse_prefix(msg.prefix)
postfix = msg.postfix.split()
if len(postfix) != 2:
error_msg = 'Usage: mask command'
self.mib.socket.send('PRIVMSG %s :%s' % (prefix.nick, error_msg))
return None
mask = postfix[0]
cmd = postfix[1]
return (cmd, mask)
def load_lists(self):
try:
f = open('limit.cfg')
except IOError:
return
try:
for line in f:
line = line.split()
if len(line) != 2:
continue # config file syntax error
cmd = line[0]
mask = line[1]
self.mib.add_cmd_permission(cmd, mask, regexpify=False)
finally:
f.close()
def save_lists(self):
try:
f = open('limit.cfg', 'w')
except IOError:
return
try:
for cmd in self.mib.command_masks:
for regexp in self.mib.command_masks[cmd]:
line = '%s %s\n' % (cmd, regexp.pattern)
f.write(line)
finally:
f.close()
def init(mib, params=None):
return Limit_Plugin(mib, params)
|
Add plugin for handling command permissions
|
Add plugin for handling command permissions
|
Python
|
mit
|
aalien/mib
|
Add plugin for handling command permissions
|
from parser import parse_prefix
class Limit_Plugin:
""" Plugin to control which users can use which commands.
"""
def __init__(self, mib, params=None):
self.mib = mib
self.mib.register_privmsg_cmd('allow', self.allow)
self.mib.register_privmsg_cmd('deny', self.deny)
self.load_lists()
def clean(self):
self.save_lists()
def allow(self, msg):
parsed = self.parse(msg)
if parsed:
cmd, mask = parsed
print 'Adding %s for command %s' % (mask, cmd)
self.mib.add_cmd_permission(cmd, mask)
def deny(self, msg):
parsed = self.parse(msg)
if parsed:
cmd, mask = parsed
print 'Removing %s from command %s' % (mask, cmd)
self.mib.rm_cmd_permission(cmd, mask)
def parse(self, msg):
prefix = parse_prefix(msg.prefix)
postfix = msg.postfix.split()
if len(postfix) != 2:
error_msg = 'Usage: mask command'
self.mib.socket.send('PRIVMSG %s :%s' % (prefix.nick, error_msg))
return None
mask = postfix[0]
cmd = postfix[1]
return (cmd, mask)
def load_lists(self):
try:
f = open('limit.cfg')
except IOError:
return
try:
for line in f:
line = line.split()
if len(line) != 2:
continue # config file syntax error
cmd = line[0]
mask = line[1]
self.mib.add_cmd_permission(cmd, mask, regexpify=False)
finally:
f.close()
def save_lists(self):
try:
f = open('limit.cfg', 'w')
except IOError:
return
try:
for cmd in self.mib.command_masks:
for regexp in self.mib.command_masks[cmd]:
line = '%s %s\n' % (cmd, regexp.pattern)
f.write(line)
finally:
f.close()
def init(mib, params=None):
return Limit_Plugin(mib, params)
|
<commit_before><commit_msg>Add plugin for handling command permissions<commit_after>
|
from parser import parse_prefix
class Limit_Plugin:
""" Plugin to control which users can use which commands.
"""
def __init__(self, mib, params=None):
self.mib = mib
self.mib.register_privmsg_cmd('allow', self.allow)
self.mib.register_privmsg_cmd('deny', self.deny)
self.load_lists()
def clean(self):
self.save_lists()
def allow(self, msg):
parsed = self.parse(msg)
if parsed:
cmd, mask = parsed
print 'Adding %s for command %s' % (mask, cmd)
self.mib.add_cmd_permission(cmd, mask)
def deny(self, msg):
parsed = self.parse(msg)
if parsed:
cmd, mask = parsed
print 'Removing %s from command %s' % (mask, cmd)
self.mib.rm_cmd_permission(cmd, mask)
def parse(self, msg):
prefix = parse_prefix(msg.prefix)
postfix = msg.postfix.split()
if len(postfix) != 2:
error_msg = 'Usage: mask command'
self.mib.socket.send('PRIVMSG %s :%s' % (prefix.nick, error_msg))
return None
mask = postfix[0]
cmd = postfix[1]
return (cmd, mask)
def load_lists(self):
try:
f = open('limit.cfg')
except IOError:
return
try:
for line in f:
line = line.split()
if len(line) != 2:
continue # config file syntax error
cmd = line[0]
mask = line[1]
self.mib.add_cmd_permission(cmd, mask, regexpify=False)
finally:
f.close()
def save_lists(self):
try:
f = open('limit.cfg', 'w')
except IOError:
return
try:
for cmd in self.mib.command_masks:
for regexp in self.mib.command_masks[cmd]:
line = '%s %s\n' % (cmd, regexp.pattern)
f.write(line)
finally:
f.close()
def init(mib, params=None):
return Limit_Plugin(mib, params)
|
Add plugin for handling command permissionsfrom parser import parse_prefix
class Limit_Plugin:
""" Plugin to control which users can use which commands.
"""
def __init__(self, mib, params=None):
self.mib = mib
self.mib.register_privmsg_cmd('allow', self.allow)
self.mib.register_privmsg_cmd('deny', self.deny)
self.load_lists()
def clean(self):
self.save_lists()
def allow(self, msg):
parsed = self.parse(msg)
if parsed:
cmd, mask = parsed
print 'Adding %s for command %s' % (mask, cmd)
self.mib.add_cmd_permission(cmd, mask)
def deny(self, msg):
parsed = self.parse(msg)
if parsed:
cmd, mask = parsed
print 'Removing %s from command %s' % (mask, cmd)
self.mib.rm_cmd_permission(cmd, mask)
def parse(self, msg):
prefix = parse_prefix(msg.prefix)
postfix = msg.postfix.split()
if len(postfix) != 2:
error_msg = 'Usage: mask command'
self.mib.socket.send('PRIVMSG %s :%s' % (prefix.nick, error_msg))
return None
mask = postfix[0]
cmd = postfix[1]
return (cmd, mask)
def load_lists(self):
try:
f = open('limit.cfg')
except IOError:
return
try:
for line in f:
line = line.split()
if len(line) != 2:
continue # config file syntax error
cmd = line[0]
mask = line[1]
self.mib.add_cmd_permission(cmd, mask, regexpify=False)
finally:
f.close()
def save_lists(self):
try:
f = open('limit.cfg', 'w')
except IOError:
return
try:
for cmd in self.mib.command_masks:
for regexp in self.mib.command_masks[cmd]:
line = '%s %s\n' % (cmd, regexp.pattern)
f.write(line)
finally:
f.close()
def init(mib, params=None):
return Limit_Plugin(mib, params)
|
<commit_before><commit_msg>Add plugin for handling command permissions<commit_after>from parser import parse_prefix
class Limit_Plugin:
""" Plugin to control which users can use which commands.
"""
def __init__(self, mib, params=None):
self.mib = mib
self.mib.register_privmsg_cmd('allow', self.allow)
self.mib.register_privmsg_cmd('deny', self.deny)
self.load_lists()
def clean(self):
self.save_lists()
def allow(self, msg):
parsed = self.parse(msg)
if parsed:
cmd, mask = parsed
print 'Adding %s for command %s' % (mask, cmd)
self.mib.add_cmd_permission(cmd, mask)
def deny(self, msg):
parsed = self.parse(msg)
if parsed:
cmd, mask = parsed
print 'Removing %s from command %s' % (mask, cmd)
self.mib.rm_cmd_permission(cmd, mask)
def parse(self, msg):
prefix = parse_prefix(msg.prefix)
postfix = msg.postfix.split()
if len(postfix) != 2:
error_msg = 'Usage: mask command'
self.mib.socket.send('PRIVMSG %s :%s' % (prefix.nick, error_msg))
return None
mask = postfix[0]
cmd = postfix[1]
return (cmd, mask)
def load_lists(self):
try:
f = open('limit.cfg')
except IOError:
return
try:
for line in f:
line = line.split()
if len(line) != 2:
continue # config file syntax error
cmd = line[0]
mask = line[1]
self.mib.add_cmd_permission(cmd, mask, regexpify=False)
finally:
f.close()
def save_lists(self):
try:
f = open('limit.cfg', 'w')
except IOError:
return
try:
for cmd in self.mib.command_masks:
for regexp in self.mib.command_masks[cmd]:
line = '%s %s\n' % (cmd, regexp.pattern)
f.write(line)
finally:
f.close()
def init(mib, params=None):
return Limit_Plugin(mib, params)
|
|
8c6559a442b2ca4a934aecd755b4643d2e234c84
|
bin/pympit_multiprocess.py
|
bin/pympit_multiprocess.py
|
#!/usr/bin/env python
from __future__ import absolute_import, division, print_function, unicode_literals, with_statement
from mpi4py import MPI
import sys
import os
import numpy as np
import multiprocessing
#- Default number of processes to use for multiprocessing
if 'SLURM_CPUS_PER_TASK' in os.environ.keys():
mproc = int(os.environ['SLURM_CPUS_PER_TASK'])
else:
mproc = max(1, multiprocessing.cpu_count() // 2)
def compute(seed) :
std = None
nsamp = 100
try :
np.random.seed(seed)
data = np.random.random(size=nsamp)
std = np.std(data)
except :
std = 0
return std
def _func(arg) :
return compute(**arg)
comm = MPI.COMM_WORLD
rank = comm.rank
nproc = comm.size
ngroup = int(nproc / 4)
group = int(rank / ngroup)
group_rank = rank % ngroup
comm_group = comm.Split(color=group, key=group_rank)
comm_rank = comm.Split(color=group_rank, key=group)
start = MPI.Wtime()
if group_rank == 0:
print("Group {} of {} has {} processes".format(group+1, ngroup, comm_group.size))
comm_group.barrier()
comm_rank.barrier()
comm.barrier()
job_seeds = list(range(rank*100, (rank+1)*100, 1))
pool = multiprocessing.Pool(mproc)
local_std = pool.map(_func, job_seeds)
pool.close()
pool.join()
comm_group.barrier()
comm_rank.barrier()
comm.barrier()
std = comm.gather(local_std, root=0)
if rank == 0:
np.savetxt('pympit_mp_out.txt', std)
stop = MPI.Wtime()
elapsed = stop - start
comm.barrier()
for p in range(comm.size):
if p == comm.rank:
print("proc {:02d} {:.3f}s:".format(p, elapsed))
for line in local_out:
print(" {}".format(line.rstrip()))
comm.barrier()
|
Add new test script that calls multiprocessing.
|
Add new test script that calls multiprocessing.
|
Python
|
bsd-2-clause
|
tskisner/pympit,tskisner/pympit
|
Add new test script that calls multiprocessing.
|
#!/usr/bin/env python
from __future__ import absolute_import, division, print_function, unicode_literals, with_statement
from mpi4py import MPI
import sys
import os
import numpy as np
import multiprocessing
#- Default number of processes to use for multiprocessing
if 'SLURM_CPUS_PER_TASK' in os.environ.keys():
mproc = int(os.environ['SLURM_CPUS_PER_TASK'])
else:
mproc = max(1, multiprocessing.cpu_count() // 2)
def compute(seed) :
std = None
nsamp = 100
try :
np.random.seed(seed)
data = np.random.random(size=nsamp)
std = np.std(data)
except :
std = 0
return std
def _func(arg) :
return compute(**arg)
comm = MPI.COMM_WORLD
rank = comm.rank
nproc = comm.size
ngroup = int(nproc / 4)
group = int(rank / ngroup)
group_rank = rank % ngroup
comm_group = comm.Split(color=group, key=group_rank)
comm_rank = comm.Split(color=group_rank, key=group)
start = MPI.Wtime()
if group_rank == 0:
print("Group {} of {} has {} processes".format(group+1, ngroup, comm_group.size))
comm_group.barrier()
comm_rank.barrier()
comm.barrier()
job_seeds = list(range(rank*100, (rank+1)*100, 1))
pool = multiprocessing.Pool(mproc)
local_std = pool.map(_func, job_seeds)
pool.close()
pool.join()
comm_group.barrier()
comm_rank.barrier()
comm.barrier()
std = comm.gather(local_std, root=0)
if rank == 0:
np.savetxt('pympit_mp_out.txt', std)
stop = MPI.Wtime()
elapsed = stop - start
comm.barrier()
for p in range(comm.size):
if p == comm.rank:
print("proc {:02d} {:.3f}s:".format(p, elapsed))
for line in local_out:
print(" {}".format(line.rstrip()))
comm.barrier()
|
<commit_before><commit_msg>Add new test script that calls multiprocessing.<commit_after>
|
#!/usr/bin/env python
from __future__ import absolute_import, division, print_function, unicode_literals, with_statement
from mpi4py import MPI
import sys
import os
import numpy as np
import multiprocessing
#- Default number of processes to use for multiprocessing
if 'SLURM_CPUS_PER_TASK' in os.environ.keys():
mproc = int(os.environ['SLURM_CPUS_PER_TASK'])
else:
mproc = max(1, multiprocessing.cpu_count() // 2)
def compute(seed) :
std = None
nsamp = 100
try :
np.random.seed(seed)
data = np.random.random(size=nsamp)
std = np.std(data)
except :
std = 0
return std
def _func(arg) :
return compute(**arg)
comm = MPI.COMM_WORLD
rank = comm.rank
nproc = comm.size
ngroup = int(nproc / 4)
group = int(rank / ngroup)
group_rank = rank % ngroup
comm_group = comm.Split(color=group, key=group_rank)
comm_rank = comm.Split(color=group_rank, key=group)
start = MPI.Wtime()
if group_rank == 0:
print("Group {} of {} has {} processes".format(group+1, ngroup, comm_group.size))
comm_group.barrier()
comm_rank.barrier()
comm.barrier()
job_seeds = list(range(rank*100, (rank+1)*100, 1))
pool = multiprocessing.Pool(mproc)
local_std = pool.map(_func, job_seeds)
pool.close()
pool.join()
comm_group.barrier()
comm_rank.barrier()
comm.barrier()
std = comm.gather(local_std, root=0)
if rank == 0:
np.savetxt('pympit_mp_out.txt', std)
stop = MPI.Wtime()
elapsed = stop - start
comm.barrier()
for p in range(comm.size):
if p == comm.rank:
print("proc {:02d} {:.3f}s:".format(p, elapsed))
for line in local_out:
print(" {}".format(line.rstrip()))
comm.barrier()
|
Add new test script that calls multiprocessing.#!/usr/bin/env python
from __future__ import absolute_import, division, print_function, unicode_literals, with_statement
from mpi4py import MPI
import sys
import os
import numpy as np
import multiprocessing
#- Default number of processes to use for multiprocessing
if 'SLURM_CPUS_PER_TASK' in os.environ.keys():
mproc = int(os.environ['SLURM_CPUS_PER_TASK'])
else:
mproc = max(1, multiprocessing.cpu_count() // 2)
def compute(seed) :
std = None
nsamp = 100
try :
np.random.seed(seed)
data = np.random.random(size=nsamp)
std = np.std(data)
except :
std = 0
return std
def _func(arg) :
return compute(**arg)
comm = MPI.COMM_WORLD
rank = comm.rank
nproc = comm.size
ngroup = int(nproc / 4)
group = int(rank / ngroup)
group_rank = rank % ngroup
comm_group = comm.Split(color=group, key=group_rank)
comm_rank = comm.Split(color=group_rank, key=group)
start = MPI.Wtime()
if group_rank == 0:
print("Group {} of {} has {} processes".format(group+1, ngroup, comm_group.size))
comm_group.barrier()
comm_rank.barrier()
comm.barrier()
job_seeds = list(range(rank*100, (rank+1)*100, 1))
pool = multiprocessing.Pool(mproc)
local_std = pool.map(_func, job_seeds)
pool.close()
pool.join()
comm_group.barrier()
comm_rank.barrier()
comm.barrier()
std = comm.gather(local_std, root=0)
if rank == 0:
np.savetxt('pympit_mp_out.txt', std)
stop = MPI.Wtime()
elapsed = stop - start
comm.barrier()
for p in range(comm.size):
if p == comm.rank:
print("proc {:02d} {:.3f}s:".format(p, elapsed))
for line in local_out:
print(" {}".format(line.rstrip()))
comm.barrier()
|
<commit_before><commit_msg>Add new test script that calls multiprocessing.<commit_after>#!/usr/bin/env python
from __future__ import absolute_import, division, print_function, unicode_literals, with_statement
from mpi4py import MPI
import sys
import os
import numpy as np
import multiprocessing
#- Default number of processes to use for multiprocessing
if 'SLURM_CPUS_PER_TASK' in os.environ.keys():
mproc = int(os.environ['SLURM_CPUS_PER_TASK'])
else:
mproc = max(1, multiprocessing.cpu_count() // 2)
def compute(seed) :
std = None
nsamp = 100
try :
np.random.seed(seed)
data = np.random.random(size=nsamp)
std = np.std(data)
except :
std = 0
return std
def _func(arg) :
return compute(**arg)
comm = MPI.COMM_WORLD
rank = comm.rank
nproc = comm.size
ngroup = int(nproc / 4)
group = int(rank / ngroup)
group_rank = rank % ngroup
comm_group = comm.Split(color=group, key=group_rank)
comm_rank = comm.Split(color=group_rank, key=group)
start = MPI.Wtime()
if group_rank == 0:
print("Group {} of {} has {} processes".format(group+1, ngroup, comm_group.size))
comm_group.barrier()
comm_rank.barrier()
comm.barrier()
job_seeds = list(range(rank*100, (rank+1)*100, 1))
pool = multiprocessing.Pool(mproc)
local_std = pool.map(_func, job_seeds)
pool.close()
pool.join()
comm_group.barrier()
comm_rank.barrier()
comm.barrier()
std = comm.gather(local_std, root=0)
if rank == 0:
np.savetxt('pympit_mp_out.txt', std)
stop = MPI.Wtime()
elapsed = stop - start
comm.barrier()
for p in range(comm.size):
if p == comm.rank:
print("proc {:02d} {:.3f}s:".format(p, elapsed))
for line in local_out:
print(" {}".format(line.rstrip()))
comm.barrier()
|
|
f84ba2d213636482951553cc453b33a4bac8541f
|
pytest_doctest_custom.py
|
pytest_doctest_custom.py
|
"""Py.test doctest custom plugin"""
# By Danilo J. S. Bellini
import sys, functools
def printer(value):
"""Prints the object representation using the given custom formatter."""
if value is not None:
print(printer.repr(value)) # This attribute has to be set elsewhere
def temp_replace(obj, attr_name, value):
"""
Returns a decorator that replaces obj.attr = value before calling the
wrapped function and restores obj.attr afterwards.
"""
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
backup = getattr(obj, attr_name)
setattr(obj, attr_name, value)
result = func(*args, **kwargs)
setattr(obj, attr_name, backup)
return result
return wrapper
return decorator
def pytest_configure(config):
"""
Hook for changing ``doctest.DocTestRunner.run`` method so that the
``sys.__displayhook__`` calls the given printer function while a doctest
is running.
"""
import doctest
enable_printer = temp_replace(sys, "__displayhook__", printer)
doctest.DocTestRunner.run = enable_printer(doctest.DocTestRunner.run)
# As the public method doctest.DocTestRunner.run replaces sys.displayhook
# by sys.__displayhook__, we could also had changed "displayhook" on the
# _DocTestRunner__run protected method
|
Create the plugin based on PyScanPrev conftest.py
|
Create the plugin based on PyScanPrev conftest.py
|
Python
|
mit
|
danilobellini/pytest-doctest-custom
|
Create the plugin based on PyScanPrev conftest.py
|
"""Py.test doctest custom plugin"""
# By Danilo J. S. Bellini
import sys, functools
def printer(value):
"""Prints the object representation using the given custom formatter."""
if value is not None:
print(printer.repr(value)) # This attribute has to be set elsewhere
def temp_replace(obj, attr_name, value):
"""
Returns a decorator that replaces obj.attr = value before calling the
wrapped function and restores obj.attr afterwards.
"""
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
backup = getattr(obj, attr_name)
setattr(obj, attr_name, value)
result = func(*args, **kwargs)
setattr(obj, attr_name, backup)
return result
return wrapper
return decorator
def pytest_configure(config):
"""
Hook for changing ``doctest.DocTestRunner.run`` method so that the
``sys.__displayhook__`` calls the given printer function while a doctest
is running.
"""
import doctest
enable_printer = temp_replace(sys, "__displayhook__", printer)
doctest.DocTestRunner.run = enable_printer(doctest.DocTestRunner.run)
# As the public method doctest.DocTestRunner.run replaces sys.displayhook
# by sys.__displayhook__, we could also had changed "displayhook" on the
# _DocTestRunner__run protected method
|
<commit_before><commit_msg>Create the plugin based on PyScanPrev conftest.py<commit_after>
|
"""Py.test doctest custom plugin"""
# By Danilo J. S. Bellini
import sys, functools
def printer(value):
"""Prints the object representation using the given custom formatter."""
if value is not None:
print(printer.repr(value)) # This attribute has to be set elsewhere
def temp_replace(obj, attr_name, value):
"""
Returns a decorator that replaces obj.attr = value before calling the
wrapped function and restores obj.attr afterwards.
"""
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
backup = getattr(obj, attr_name)
setattr(obj, attr_name, value)
result = func(*args, **kwargs)
setattr(obj, attr_name, backup)
return result
return wrapper
return decorator
def pytest_configure(config):
"""
Hook for changing ``doctest.DocTestRunner.run`` method so that the
``sys.__displayhook__`` calls the given printer function while a doctest
is running.
"""
import doctest
enable_printer = temp_replace(sys, "__displayhook__", printer)
doctest.DocTestRunner.run = enable_printer(doctest.DocTestRunner.run)
# As the public method doctest.DocTestRunner.run replaces sys.displayhook
# by sys.__displayhook__, we could also had changed "displayhook" on the
# _DocTestRunner__run protected method
|
Create the plugin based on PyScanPrev conftest.py"""Py.test doctest custom plugin"""
# By Danilo J. S. Bellini
import sys, functools
def printer(value):
"""Prints the object representation using the given custom formatter."""
if value is not None:
print(printer.repr(value)) # This attribute has to be set elsewhere
def temp_replace(obj, attr_name, value):
"""
Returns a decorator that replaces obj.attr = value before calling the
wrapped function and restores obj.attr afterwards.
"""
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
backup = getattr(obj, attr_name)
setattr(obj, attr_name, value)
result = func(*args, **kwargs)
setattr(obj, attr_name, backup)
return result
return wrapper
return decorator
def pytest_configure(config):
"""
Hook for changing ``doctest.DocTestRunner.run`` method so that the
``sys.__displayhook__`` calls the given printer function while a doctest
is running.
"""
import doctest
enable_printer = temp_replace(sys, "__displayhook__", printer)
doctest.DocTestRunner.run = enable_printer(doctest.DocTestRunner.run)
# As the public method doctest.DocTestRunner.run replaces sys.displayhook
# by sys.__displayhook__, we could also had changed "displayhook" on the
# _DocTestRunner__run protected method
|
<commit_before><commit_msg>Create the plugin based on PyScanPrev conftest.py<commit_after>"""Py.test doctest custom plugin"""
# By Danilo J. S. Bellini
import sys, functools
def printer(value):
"""Prints the object representation using the given custom formatter."""
if value is not None:
print(printer.repr(value)) # This attribute has to be set elsewhere
def temp_replace(obj, attr_name, value):
"""
Returns a decorator that replaces obj.attr = value before calling the
wrapped function and restores obj.attr afterwards.
"""
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
backup = getattr(obj, attr_name)
setattr(obj, attr_name, value)
result = func(*args, **kwargs)
setattr(obj, attr_name, backup)
return result
return wrapper
return decorator
def pytest_configure(config):
"""
Hook for changing ``doctest.DocTestRunner.run`` method so that the
``sys.__displayhook__`` calls the given printer function while a doctest
is running.
"""
import doctest
enable_printer = temp_replace(sys, "__displayhook__", printer)
doctest.DocTestRunner.run = enable_printer(doctest.DocTestRunner.run)
# As the public method doctest.DocTestRunner.run replaces sys.displayhook
# by sys.__displayhook__, we could also had changed "displayhook" on the
# _DocTestRunner__run protected method
|
|
de2496c1cd6ba7c9e59110f6e7efa0d1e01304ba
|
eval_hand.py
|
eval_hand.py
|
from collections import defaultdict
#ranks. Add (rank of cards involved / 13) to these values
#for straights and flushes, (highest card / 13) is added
###########
hand_str_dict = {
'high card': 0,
'pair' = 1,
'two_pair' = 2,
'three_kind' = 3,
'straight' = 4,
'flush' = 5,
'full_house' = 6,
'four_kind' = 7,
'straight_flush' = 8,
}
def eval_hand(hole, community):
result = 0.0
hand_ranks = defaultdict(int)
hand_suits = defaultdict(int)
for card in hole:
rank = card["rank"]
hand_ranks[rank] += 1
suit = card["suit"]
hand_suits[suit] += 1
for card in community:
rank = card["rank"]
hand_ranks[rank] += 1
suit = card["suit"]
hand_suits[suit] += 1
for card in hand_ranks
if __name__ == "__main__":
result = eval_hand([{"rank":11, "suit":'S'},{"rank":2, "suit":'D'}], [{"rank":2, "suit":'D'}])
print result
|
Add code for evaluating a hand. Incomplete.
|
Add code for evaluating a hand. Incomplete.
|
Python
|
mit
|
jason-rossmaier/poker-player-pokerface
|
Add code for evaluating a hand. Incomplete.
|
from collections import defaultdict
#ranks. Add (rank of cards involved / 13) to these values
#for straights and flushes, (highest card / 13) is added
###########
hand_str_dict = {
'high card': 0,
'pair' = 1,
'two_pair' = 2,
'three_kind' = 3,
'straight' = 4,
'flush' = 5,
'full_house' = 6,
'four_kind' = 7,
'straight_flush' = 8,
}
def eval_hand(hole, community):
result = 0.0
hand_ranks = defaultdict(int)
hand_suits = defaultdict(int)
for card in hole:
rank = card["rank"]
hand_ranks[rank] += 1
suit = card["suit"]
hand_suits[suit] += 1
for card in community:
rank = card["rank"]
hand_ranks[rank] += 1
suit = card["suit"]
hand_suits[suit] += 1
for card in hand_ranks
if __name__ == "__main__":
result = eval_hand([{"rank":11, "suit":'S'},{"rank":2, "suit":'D'}], [{"rank":2, "suit":'D'}])
print result
|
<commit_before><commit_msg>Add code for evaluating a hand. Incomplete.<commit_after>
|
from collections import defaultdict
#ranks. Add (rank of cards involved / 13) to these values
#for straights and flushes, (highest card / 13) is added
###########
hand_str_dict = {
'high card': 0,
'pair' = 1,
'two_pair' = 2,
'three_kind' = 3,
'straight' = 4,
'flush' = 5,
'full_house' = 6,
'four_kind' = 7,
'straight_flush' = 8,
}
def eval_hand(hole, community):
result = 0.0
hand_ranks = defaultdict(int)
hand_suits = defaultdict(int)
for card in hole:
rank = card["rank"]
hand_ranks[rank] += 1
suit = card["suit"]
hand_suits[suit] += 1
for card in community:
rank = card["rank"]
hand_ranks[rank] += 1
suit = card["suit"]
hand_suits[suit] += 1
for card in hand_ranks
if __name__ == "__main__":
result = eval_hand([{"rank":11, "suit":'S'},{"rank":2, "suit":'D'}], [{"rank":2, "suit":'D'}])
print result
|
Add code for evaluating a hand. Incomplete.from collections import defaultdict
#ranks. Add (rank of cards involved / 13) to these values
#for straights and flushes, (highest card / 13) is added
###########
hand_str_dict = {
'high card': 0,
'pair' = 1,
'two_pair' = 2,
'three_kind' = 3,
'straight' = 4,
'flush' = 5,
'full_house' = 6,
'four_kind' = 7,
'straight_flush' = 8,
}
def eval_hand(hole, community):
result = 0.0
hand_ranks = defaultdict(int)
hand_suits = defaultdict(int)
for card in hole:
rank = card["rank"]
hand_ranks[rank] += 1
suit = card["suit"]
hand_suits[suit] += 1
for card in community:
rank = card["rank"]
hand_ranks[rank] += 1
suit = card["suit"]
hand_suits[suit] += 1
for card in hand_ranks
if __name__ == "__main__":
result = eval_hand([{"rank":11, "suit":'S'},{"rank":2, "suit":'D'}], [{"rank":2, "suit":'D'}])
print result
|
<commit_before><commit_msg>Add code for evaluating a hand. Incomplete.<commit_after>from collections import defaultdict
#ranks. Add (rank of cards involved / 13) to these values
#for straights and flushes, (highest card / 13) is added
###########
hand_str_dict = {
'high card': 0,
'pair' = 1,
'two_pair' = 2,
'three_kind' = 3,
'straight' = 4,
'flush' = 5,
'full_house' = 6,
'four_kind' = 7,
'straight_flush' = 8,
}
def eval_hand(hole, community):
result = 0.0
hand_ranks = defaultdict(int)
hand_suits = defaultdict(int)
for card in hole:
rank = card["rank"]
hand_ranks[rank] += 1
suit = card["suit"]
hand_suits[suit] += 1
for card in community:
rank = card["rank"]
hand_ranks[rank] += 1
suit = card["suit"]
hand_suits[suit] += 1
for card in hand_ranks
if __name__ == "__main__":
result = eval_hand([{"rank":11, "suit":'S'},{"rank":2, "suit":'D'}], [{"rank":2, "suit":'D'}])
print result
|
|
a3927ac00117c1a9dd6896bc013a7c087cb1edcf
|
test/test_interaction_model.py
|
test/test_interaction_model.py
|
from _ebcf_alexa import interaction_model as im
from _ebcf_alexa.incoming_types import Intent
import pytest
@pytest.fixture
def intent_with_bad_slots():
return Intent({
'name': 'DefaultQuery',
'slots': {
'RelativeTo': {
'name': 'RelativeTo',
'value': 'today\'s'
},
'RequestType': {
'name': 'RequestType',
'value': 'strike'
}
}
})
PROMPT_FOR_SLOT_MSG = 'Did you want the workout, strength, or conditioning?'
class TestQueryIntent(object):
def test_bad_slots(self, intent_with_bad_slots):
response = im.query_intent(intent_with_bad_slots)
assert PROMPT_FOR_SLOT_MSG in response.output_speech.ssml
assert not response.should_end
assert 'intents' in response.attributes
assert 'DefaultQuery' in response.attributes['intents']
assert 'RelativeTo' in response.attributes['intents']['DefaultQuery']['slots']
assert response.attributes['intents']['DefaultQuery']['slots']['RelativeTo']['value'] == 'today'
assert PROMPT_FOR_SLOT_MSG in response.reprompt.ssml
|
Add failing unit test for query_intent to test RequestType reprompt
|
Add failing unit test for query_intent to test RequestType reprompt
|
Python
|
mit
|
dmotles/ebcf-alexa
|
Add failing unit test for query_intent to test RequestType reprompt
|
from _ebcf_alexa import interaction_model as im
from _ebcf_alexa.incoming_types import Intent
import pytest
@pytest.fixture
def intent_with_bad_slots():
return Intent({
'name': 'DefaultQuery',
'slots': {
'RelativeTo': {
'name': 'RelativeTo',
'value': 'today\'s'
},
'RequestType': {
'name': 'RequestType',
'value': 'strike'
}
}
})
PROMPT_FOR_SLOT_MSG = 'Did you want the workout, strength, or conditioning?'
class TestQueryIntent(object):
def test_bad_slots(self, intent_with_bad_slots):
response = im.query_intent(intent_with_bad_slots)
assert PROMPT_FOR_SLOT_MSG in response.output_speech.ssml
assert not response.should_end
assert 'intents' in response.attributes
assert 'DefaultQuery' in response.attributes['intents']
assert 'RelativeTo' in response.attributes['intents']['DefaultQuery']['slots']
assert response.attributes['intents']['DefaultQuery']['slots']['RelativeTo']['value'] == 'today'
assert PROMPT_FOR_SLOT_MSG in response.reprompt.ssml
|
<commit_before><commit_msg>Add failing unit test for query_intent to test RequestType reprompt<commit_after>
|
from _ebcf_alexa import interaction_model as im
from _ebcf_alexa.incoming_types import Intent
import pytest
@pytest.fixture
def intent_with_bad_slots():
return Intent({
'name': 'DefaultQuery',
'slots': {
'RelativeTo': {
'name': 'RelativeTo',
'value': 'today\'s'
},
'RequestType': {
'name': 'RequestType',
'value': 'strike'
}
}
})
PROMPT_FOR_SLOT_MSG = 'Did you want the workout, strength, or conditioning?'
class TestQueryIntent(object):
def test_bad_slots(self, intent_with_bad_slots):
response = im.query_intent(intent_with_bad_slots)
assert PROMPT_FOR_SLOT_MSG in response.output_speech.ssml
assert not response.should_end
assert 'intents' in response.attributes
assert 'DefaultQuery' in response.attributes['intents']
assert 'RelativeTo' in response.attributes['intents']['DefaultQuery']['slots']
assert response.attributes['intents']['DefaultQuery']['slots']['RelativeTo']['value'] == 'today'
assert PROMPT_FOR_SLOT_MSG in response.reprompt.ssml
|
Add failing unit test for query_intent to test RequestType repromptfrom _ebcf_alexa import interaction_model as im
from _ebcf_alexa.incoming_types import Intent
import pytest
@pytest.fixture
def intent_with_bad_slots():
return Intent({
'name': 'DefaultQuery',
'slots': {
'RelativeTo': {
'name': 'RelativeTo',
'value': 'today\'s'
},
'RequestType': {
'name': 'RequestType',
'value': 'strike'
}
}
})
PROMPT_FOR_SLOT_MSG = 'Did you want the workout, strength, or conditioning?'
class TestQueryIntent(object):
def test_bad_slots(self, intent_with_bad_slots):
response = im.query_intent(intent_with_bad_slots)
assert PROMPT_FOR_SLOT_MSG in response.output_speech.ssml
assert not response.should_end
assert 'intents' in response.attributes
assert 'DefaultQuery' in response.attributes['intents']
assert 'RelativeTo' in response.attributes['intents']['DefaultQuery']['slots']
assert response.attributes['intents']['DefaultQuery']['slots']['RelativeTo']['value'] == 'today'
assert PROMPT_FOR_SLOT_MSG in response.reprompt.ssml
|
<commit_before><commit_msg>Add failing unit test for query_intent to test RequestType reprompt<commit_after>from _ebcf_alexa import interaction_model as im
from _ebcf_alexa.incoming_types import Intent
import pytest
@pytest.fixture
def intent_with_bad_slots():
return Intent({
'name': 'DefaultQuery',
'slots': {
'RelativeTo': {
'name': 'RelativeTo',
'value': 'today\'s'
},
'RequestType': {
'name': 'RequestType',
'value': 'strike'
}
}
})
PROMPT_FOR_SLOT_MSG = 'Did you want the workout, strength, or conditioning?'
class TestQueryIntent(object):
def test_bad_slots(self, intent_with_bad_slots):
response = im.query_intent(intent_with_bad_slots)
assert PROMPT_FOR_SLOT_MSG in response.output_speech.ssml
assert not response.should_end
assert 'intents' in response.attributes
assert 'DefaultQuery' in response.attributes['intents']
assert 'RelativeTo' in response.attributes['intents']['DefaultQuery']['slots']
assert response.attributes['intents']['DefaultQuery']['slots']['RelativeTo']['value'] == 'today'
assert PROMPT_FOR_SLOT_MSG in response.reprompt.ssml
|
|
bd37f8c88e1f999160fcec2c4ce4370a8d189d71
|
tests/test_request.py
|
tests/test_request.py
|
import datetime
from borica import Request
class FakeSignature:
def sign(self, content):
return 'G' * 128
def test_general_request_base64_formatting():
request = Request(
transaction_type=10,
transaction_amount='99.99',
transaction_timestamp=datetime.datetime.fromtimestamp(0),
terminal_id='12345678',
order_id='12345678',
order_summary='Money for fun!',
signature=FakeSignature()
)
expected_request = (
"MTAxOTcwMDEwMTAyMDAwMDAwMDAwMDAwOTk5OTEyMzQ1Njc4MTIzNDU2NzggICAgICAg"
"TW9uZXkgZm9yIGZ1biEgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg"
"ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg"
"ICAgICAgICAgICAgICAgICAgICAgICBFTjEuMEdHR0dHR0dHR0dHR0dHR0dHR0dHR0dH"
"R0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dH"
"R0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dH"
"R0dH")
assert expected_request == str(request)
|
Add general base64 formatting test
|
Add general base64 formatting test
|
Python
|
mit
|
IOEra/borica
|
Add general base64 formatting test
|
import datetime
from borica import Request
class FakeSignature:
def sign(self, content):
return 'G' * 128
def test_general_request_base64_formatting():
request = Request(
transaction_type=10,
transaction_amount='99.99',
transaction_timestamp=datetime.datetime.fromtimestamp(0),
terminal_id='12345678',
order_id='12345678',
order_summary='Money for fun!',
signature=FakeSignature()
)
expected_request = (
"MTAxOTcwMDEwMTAyMDAwMDAwMDAwMDAwOTk5OTEyMzQ1Njc4MTIzNDU2NzggICAgICAg"
"TW9uZXkgZm9yIGZ1biEgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg"
"ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg"
"ICAgICAgICAgICAgICAgICAgICAgICBFTjEuMEdHR0dHR0dHR0dHR0dHR0dHR0dHR0dH"
"R0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dH"
"R0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dH"
"R0dH")
assert expected_request == str(request)
|
<commit_before><commit_msg>Add general base64 formatting test<commit_after>
|
import datetime
from borica import Request
class FakeSignature:
def sign(self, content):
return 'G' * 128
def test_general_request_base64_formatting():
request = Request(
transaction_type=10,
transaction_amount='99.99',
transaction_timestamp=datetime.datetime.fromtimestamp(0),
terminal_id='12345678',
order_id='12345678',
order_summary='Money for fun!',
signature=FakeSignature()
)
expected_request = (
"MTAxOTcwMDEwMTAyMDAwMDAwMDAwMDAwOTk5OTEyMzQ1Njc4MTIzNDU2NzggICAgICAg"
"TW9uZXkgZm9yIGZ1biEgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg"
"ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg"
"ICAgICAgICAgICAgICAgICAgICAgICBFTjEuMEdHR0dHR0dHR0dHR0dHR0dHR0dHR0dH"
"R0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dH"
"R0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dH"
"R0dH")
assert expected_request == str(request)
|
Add general base64 formatting testimport datetime
from borica import Request
class FakeSignature:
def sign(self, content):
return 'G' * 128
def test_general_request_base64_formatting():
request = Request(
transaction_type=10,
transaction_amount='99.99',
transaction_timestamp=datetime.datetime.fromtimestamp(0),
terminal_id='12345678',
order_id='12345678',
order_summary='Money for fun!',
signature=FakeSignature()
)
expected_request = (
"MTAxOTcwMDEwMTAyMDAwMDAwMDAwMDAwOTk5OTEyMzQ1Njc4MTIzNDU2NzggICAgICAg"
"TW9uZXkgZm9yIGZ1biEgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg"
"ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg"
"ICAgICAgICAgICAgICAgICAgICAgICBFTjEuMEdHR0dHR0dHR0dHR0dHR0dHR0dHR0dH"
"R0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dH"
"R0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dH"
"R0dH")
assert expected_request == str(request)
|
<commit_before><commit_msg>Add general base64 formatting test<commit_after>import datetime
from borica import Request
class FakeSignature:
def sign(self, content):
return 'G' * 128
def test_general_request_base64_formatting():
request = Request(
transaction_type=10,
transaction_amount='99.99',
transaction_timestamp=datetime.datetime.fromtimestamp(0),
terminal_id='12345678',
order_id='12345678',
order_summary='Money for fun!',
signature=FakeSignature()
)
expected_request = (
"MTAxOTcwMDEwMTAyMDAwMDAwMDAwMDAwOTk5OTEyMzQ1Njc4MTIzNDU2NzggICAgICAg"
"TW9uZXkgZm9yIGZ1biEgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg"
"ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg"
"ICAgICAgICAgICAgICAgICAgICAgICBFTjEuMEdHR0dHR0dHR0dHR0dHR0dHR0dHR0dH"
"R0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dH"
"R0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dHR0dH"
"R0dH")
assert expected_request == str(request)
|
|
21ddfc1802495f965524b163258c36231d8ee898
|
bluebottle/orders/migrations/0005_auto_20171003_1112.py
|
bluebottle/orders/migrations/0005_auto_20171003_1112.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2017-10-03 09:12
from __future__ import unicode_literals
import datetime
from django.db import migrations
from bluebottle.utils.utils import FSMTransition, StatusDefinition
def mark_as_failed(apps, schema_editor):
Order = apps.get_model('orders', 'Order')
orders = Order.objects.filter(
status=StatusDefinition.CREATED,
created__lte=datetime.date.today() - datetime.timedelta(days=5)
)
orders.update(status=StatusDefinition.FAILED)
class Migration(migrations.Migration):
dependencies = [
('orders', '0004_add_group_permissions'),
]
operations = [
migrations.RunPython(mark_as_failed)
]
|
Mark orders older then 5 days that are still 'new' as 'failed'. This fixed rewards that are showed as "claimed", because old order were not failed automatically.
|
Mark orders older then 5 days that are still 'new' as 'failed'. This
fixed rewards that are showed as "claimed", because old order were not
failed automatically.
BB-10849 #resolve
|
Python
|
bsd-3-clause
|
onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
|
Mark orders older then 5 days that are still 'new' as 'failed'. This
fixed rewards that are showed as "claimed", because old order were not
failed automatically.
BB-10849 #resolve
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2017-10-03 09:12
from __future__ import unicode_literals
import datetime
from django.db import migrations
from bluebottle.utils.utils import FSMTransition, StatusDefinition
def mark_as_failed(apps, schema_editor):
Order = apps.get_model('orders', 'Order')
orders = Order.objects.filter(
status=StatusDefinition.CREATED,
created__lte=datetime.date.today() - datetime.timedelta(days=5)
)
orders.update(status=StatusDefinition.FAILED)
class Migration(migrations.Migration):
dependencies = [
('orders', '0004_add_group_permissions'),
]
operations = [
migrations.RunPython(mark_as_failed)
]
|
<commit_before><commit_msg>Mark orders older then 5 days that are still 'new' as 'failed'. This
fixed rewards that are showed as "claimed", because old order were not
failed automatically.
BB-10849 #resolve<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2017-10-03 09:12
from __future__ import unicode_literals
import datetime
from django.db import migrations
from bluebottle.utils.utils import FSMTransition, StatusDefinition
def mark_as_failed(apps, schema_editor):
Order = apps.get_model('orders', 'Order')
orders = Order.objects.filter(
status=StatusDefinition.CREATED,
created__lte=datetime.date.today() - datetime.timedelta(days=5)
)
orders.update(status=StatusDefinition.FAILED)
class Migration(migrations.Migration):
dependencies = [
('orders', '0004_add_group_permissions'),
]
operations = [
migrations.RunPython(mark_as_failed)
]
|
Mark orders older then 5 days that are still 'new' as 'failed'. This
fixed rewards that are showed as "claimed", because old order were not
failed automatically.
BB-10849 #resolve# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2017-10-03 09:12
from __future__ import unicode_literals
import datetime
from django.db import migrations
from bluebottle.utils.utils import FSMTransition, StatusDefinition
def mark_as_failed(apps, schema_editor):
Order = apps.get_model('orders', 'Order')
orders = Order.objects.filter(
status=StatusDefinition.CREATED,
created__lte=datetime.date.today() - datetime.timedelta(days=5)
)
orders.update(status=StatusDefinition.FAILED)
class Migration(migrations.Migration):
dependencies = [
('orders', '0004_add_group_permissions'),
]
operations = [
migrations.RunPython(mark_as_failed)
]
|
<commit_before><commit_msg>Mark orders older then 5 days that are still 'new' as 'failed'. This
fixed rewards that are showed as "claimed", because old order were not
failed automatically.
BB-10849 #resolve<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2017-10-03 09:12
from __future__ import unicode_literals
import datetime
from django.db import migrations
from bluebottle.utils.utils import FSMTransition, StatusDefinition
def mark_as_failed(apps, schema_editor):
Order = apps.get_model('orders', 'Order')
orders = Order.objects.filter(
status=StatusDefinition.CREATED,
created__lte=datetime.date.today() - datetime.timedelta(days=5)
)
orders.update(status=StatusDefinition.FAILED)
class Migration(migrations.Migration):
dependencies = [
('orders', '0004_add_group_permissions'),
]
operations = [
migrations.RunPython(mark_as_failed)
]
|
|
6ac87bf5f6b86f507caa7764ed949d5b27d18517
|
tools/jinja_pylint.py
|
tools/jinja_pylint.py
|
#!/usr/bin/env python
'''
Run pylint on a Jinja 2 template.
'''
import jinja2, os, subprocess, sys, tempfile
# Clagged pieces from the runner.
START_BLOCK = '/*-'
END_BLOCK = '-*/'
START_VARIABLE = '/*?'
END_VARIABLE = '?*/'
START_COMMENT = '/*#'
END_COMMENT = '#*/'
def main(argv, out, err):
if len(argv) < 2 or argv[1] in ['--help', '-?']:
print >>err, 'usage: %s file pylint_args...' % argv[0]
return -1
if not os.path.exists(argv[1]):
print >>err, '%s not found' % argv[1]
return -1
root, template = os.path.split(os.path.abspath(argv[1]))
# Construct a Jinja environment that matches that of the runner.
env = jinja2.Environment(
loader=jinja2.FileSystemLoader(root),
extensions=["jinja2.ext.do", "jinja2.ext.loopcontrols"],
block_start_string=START_BLOCK,
block_end_string=END_BLOCK,
variable_start_string=START_VARIABLE,
variable_end_string=END_VARIABLE,
comment_start_string=START_COMMENT,
comment_end_string=END_COMMENT)
# Compile the template requested to a temporary directory.
tmp = tempfile.mkdtemp()
print >>out, 'compiling to %s...' % tmp
env.compile_templates(tmp, filter_func=lambda x: x == template, zip=None,
ignore_errors=False)
# Find it and run pylint on it.
py = os.path.join(tmp, os.listdir(tmp)[0])
print 'running pylint on %s...' % py
return subprocess.call(['pylint', py] + argv[2:])
if __name__ == '__main__':
sys.exit(main(sys.argv, sys.stdout, sys.stderr))
|
Add a tool for running pylint on Jinja templates.
|
Add a tool for running pylint on Jinja templates.
Closes JIRA CAMKES-315
|
Python
|
bsd-2-clause
|
agacek/camkes-tool,agacek/camkes-tool,smaccm/camkes-tool,agacek/camkes-tool,smaccm/camkes-tool,smaccm/camkes-tool,smaccm/camkes-tool
|
Add a tool for running pylint on Jinja templates.
Closes JIRA CAMKES-315
|
#!/usr/bin/env python
'''
Run pylint on a Jinja 2 template.
'''
import jinja2, os, subprocess, sys, tempfile
# Clagged pieces from the runner.
START_BLOCK = '/*-'
END_BLOCK = '-*/'
START_VARIABLE = '/*?'
END_VARIABLE = '?*/'
START_COMMENT = '/*#'
END_COMMENT = '#*/'
def main(argv, out, err):
if len(argv) < 2 or argv[1] in ['--help', '-?']:
print >>err, 'usage: %s file pylint_args...' % argv[0]
return -1
if not os.path.exists(argv[1]):
print >>err, '%s not found' % argv[1]
return -1
root, template = os.path.split(os.path.abspath(argv[1]))
# Construct a Jinja environment that matches that of the runner.
env = jinja2.Environment(
loader=jinja2.FileSystemLoader(root),
extensions=["jinja2.ext.do", "jinja2.ext.loopcontrols"],
block_start_string=START_BLOCK,
block_end_string=END_BLOCK,
variable_start_string=START_VARIABLE,
variable_end_string=END_VARIABLE,
comment_start_string=START_COMMENT,
comment_end_string=END_COMMENT)
# Compile the template requested to a temporary directory.
tmp = tempfile.mkdtemp()
print >>out, 'compiling to %s...' % tmp
env.compile_templates(tmp, filter_func=lambda x: x == template, zip=None,
ignore_errors=False)
# Find it and run pylint on it.
py = os.path.join(tmp, os.listdir(tmp)[0])
print 'running pylint on %s...' % py
return subprocess.call(['pylint', py] + argv[2:])
if __name__ == '__main__':
sys.exit(main(sys.argv, sys.stdout, sys.stderr))
|
<commit_before><commit_msg>Add a tool for running pylint on Jinja templates.
Closes JIRA CAMKES-315<commit_after>
|
#!/usr/bin/env python
'''
Run pylint on a Jinja 2 template.
'''
import jinja2, os, subprocess, sys, tempfile
# Clagged pieces from the runner.
START_BLOCK = '/*-'
END_BLOCK = '-*/'
START_VARIABLE = '/*?'
END_VARIABLE = '?*/'
START_COMMENT = '/*#'
END_COMMENT = '#*/'
def main(argv, out, err):
if len(argv) < 2 or argv[1] in ['--help', '-?']:
print >>err, 'usage: %s file pylint_args...' % argv[0]
return -1
if not os.path.exists(argv[1]):
print >>err, '%s not found' % argv[1]
return -1
root, template = os.path.split(os.path.abspath(argv[1]))
# Construct a Jinja environment that matches that of the runner.
env = jinja2.Environment(
loader=jinja2.FileSystemLoader(root),
extensions=["jinja2.ext.do", "jinja2.ext.loopcontrols"],
block_start_string=START_BLOCK,
block_end_string=END_BLOCK,
variable_start_string=START_VARIABLE,
variable_end_string=END_VARIABLE,
comment_start_string=START_COMMENT,
comment_end_string=END_COMMENT)
# Compile the template requested to a temporary directory.
tmp = tempfile.mkdtemp()
print >>out, 'compiling to %s...' % tmp
env.compile_templates(tmp, filter_func=lambda x: x == template, zip=None,
ignore_errors=False)
# Find it and run pylint on it.
py = os.path.join(tmp, os.listdir(tmp)[0])
print 'running pylint on %s...' % py
return subprocess.call(['pylint', py] + argv[2:])
if __name__ == '__main__':
sys.exit(main(sys.argv, sys.stdout, sys.stderr))
|
Add a tool for running pylint on Jinja templates.
Closes JIRA CAMKES-315#!/usr/bin/env python
'''
Run pylint on a Jinja 2 template.
'''
import jinja2, os, subprocess, sys, tempfile
# Clagged pieces from the runner.
START_BLOCK = '/*-'
END_BLOCK = '-*/'
START_VARIABLE = '/*?'
END_VARIABLE = '?*/'
START_COMMENT = '/*#'
END_COMMENT = '#*/'
def main(argv, out, err):
if len(argv) < 2 or argv[1] in ['--help', '-?']:
print >>err, 'usage: %s file pylint_args...' % argv[0]
return -1
if not os.path.exists(argv[1]):
print >>err, '%s not found' % argv[1]
return -1
root, template = os.path.split(os.path.abspath(argv[1]))
# Construct a Jinja environment that matches that of the runner.
env = jinja2.Environment(
loader=jinja2.FileSystemLoader(root),
extensions=["jinja2.ext.do", "jinja2.ext.loopcontrols"],
block_start_string=START_BLOCK,
block_end_string=END_BLOCK,
variable_start_string=START_VARIABLE,
variable_end_string=END_VARIABLE,
comment_start_string=START_COMMENT,
comment_end_string=END_COMMENT)
# Compile the template requested to a temporary directory.
tmp = tempfile.mkdtemp()
print >>out, 'compiling to %s...' % tmp
env.compile_templates(tmp, filter_func=lambda x: x == template, zip=None,
ignore_errors=False)
# Find it and run pylint on it.
py = os.path.join(tmp, os.listdir(tmp)[0])
print 'running pylint on %s...' % py
return subprocess.call(['pylint', py] + argv[2:])
if __name__ == '__main__':
sys.exit(main(sys.argv, sys.stdout, sys.stderr))
|
<commit_before><commit_msg>Add a tool for running pylint on Jinja templates.
Closes JIRA CAMKES-315<commit_after>#!/usr/bin/env python
'''
Run pylint on a Jinja 2 template.
'''
import jinja2, os, subprocess, sys, tempfile
# Clagged pieces from the runner.
START_BLOCK = '/*-'
END_BLOCK = '-*/'
START_VARIABLE = '/*?'
END_VARIABLE = '?*/'
START_COMMENT = '/*#'
END_COMMENT = '#*/'
def main(argv, out, err):
if len(argv) < 2 or argv[1] in ['--help', '-?']:
print >>err, 'usage: %s file pylint_args...' % argv[0]
return -1
if not os.path.exists(argv[1]):
print >>err, '%s not found' % argv[1]
return -1
root, template = os.path.split(os.path.abspath(argv[1]))
# Construct a Jinja environment that matches that of the runner.
env = jinja2.Environment(
loader=jinja2.FileSystemLoader(root),
extensions=["jinja2.ext.do", "jinja2.ext.loopcontrols"],
block_start_string=START_BLOCK,
block_end_string=END_BLOCK,
variable_start_string=START_VARIABLE,
variable_end_string=END_VARIABLE,
comment_start_string=START_COMMENT,
comment_end_string=END_COMMENT)
# Compile the template requested to a temporary directory.
tmp = tempfile.mkdtemp()
print >>out, 'compiling to %s...' % tmp
env.compile_templates(tmp, filter_func=lambda x: x == template, zip=None,
ignore_errors=False)
# Find it and run pylint on it.
py = os.path.join(tmp, os.listdir(tmp)[0])
print 'running pylint on %s...' % py
return subprocess.call(['pylint', py] + argv[2:])
if __name__ == '__main__':
sys.exit(main(sys.argv, sys.stdout, sys.stderr))
|
|
289e5856e15d06b15136a9003d7a3754c0a773f8
|
utils/import_srweb.py
|
utils/import_srweb.py
|
import argparse
import yaml
import sys
import re
parser = argparse.ArgumentParser()
parser.add_argument('source', help='Path to source file',
type=argparse.FileType('r'))
parser.add_argument('-o', '--output', type=argparse.FileType('w'),
default=sys.stdout, help='Output file')
parser.add_argument('-l', '--layout', help='Page layout',
default='main')
arguments = parser.parse_args()
PRE_START_REGEX = re.compile('^<pre><code class="override-lang ([a-z0-9_-]+)">(.*)')
def transliterate_markdown(src, dst):
in_code_block = False
for line in src:
if line.startswith('~~~'):
in_code_block = not in_code_block
if in_code_block:
dest_f.write('{% highlight python %}\n')
else:
dest_f.write('{% endhighlight %}\n')
elif line.startswith('</code></pre>'):
dst.write('{% endhighlight %}\n')
else:
ps = PRE_START_REGEX.match(line)
if ps is not None:
dst.write('{{% highlight {} %}}\n'.format(ps.group(1)))
dst.write(ps.group(2))
dst.write('\n')
else:
dst.write(line)
# Parse metadata
metadata = {}
MD_REGEX = re.compile('^//([A-Z_]+)\s*:\s*(.*)\s*$')
while True:
md_line = arguments.source.readline()
match = MD_REGEX.match(md_line)
if match is None:
break
if match.group(2):
metadata[match.group(1)] = match.group(2)
content_type = metadata.get('CONTENT_TYPE', 'markdown')
if content_type not in ('markdown', 'md'):
print("Tool can only convert Markdown source.", file=sys.stdout)
exit(1)
yaml_data = {'layout': arguments.layout}
yaml_data['title'] = metadata.get('TITLE', 'Website')
if 'DESCRIPTION' in metadata:
yaml_data['description'] = metadata['DESCRIPTION']
arguments.output.write('---\n')
yaml.dump(yaml_data, arguments.output, default_flow_style=False)
arguments.output.write('---\n')
transliterate_markdown(arguments.source, arguments.output)
|
Add a tool to import Markdown from srweb
|
Add a tool to import Markdown from srweb
|
Python
|
mit
|
prophile/srweb-jekyll,prophile/srweb-jekyll,PeterJCLaw/srweb-jekyll,prophile/srweb-jekyll,PeterJCLaw/srweb-jekyll,prophile/srweb-jekyll,PeterJCLaw/srweb-jekyll,prophile/srweb-jekyll,PeterJCLaw/srweb-jekyll,PeterJCLaw/srweb-jekyll,PeterJCLaw/srweb-jekyll,prophile/srweb-jekyll
|
Add a tool to import Markdown from srweb
|
import argparse
import yaml
import sys
import re
parser = argparse.ArgumentParser()
parser.add_argument('source', help='Path to source file',
type=argparse.FileType('r'))
parser.add_argument('-o', '--output', type=argparse.FileType('w'),
default=sys.stdout, help='Output file')
parser.add_argument('-l', '--layout', help='Page layout',
default='main')
arguments = parser.parse_args()
PRE_START_REGEX = re.compile('^<pre><code class="override-lang ([a-z0-9_-]+)">(.*)')
def transliterate_markdown(src, dst):
in_code_block = False
for line in src:
if line.startswith('~~~'):
in_code_block = not in_code_block
if in_code_block:
dest_f.write('{% highlight python %}\n')
else:
dest_f.write('{% endhighlight %}\n')
elif line.startswith('</code></pre>'):
dst.write('{% endhighlight %}\n')
else:
ps = PRE_START_REGEX.match(line)
if ps is not None:
dst.write('{{% highlight {} %}}\n'.format(ps.group(1)))
dst.write(ps.group(2))
dst.write('\n')
else:
dst.write(line)
# Parse metadata
metadata = {}
MD_REGEX = re.compile('^//([A-Z_]+)\s*:\s*(.*)\s*$')
while True:
md_line = arguments.source.readline()
match = MD_REGEX.match(md_line)
if match is None:
break
if match.group(2):
metadata[match.group(1)] = match.group(2)
content_type = metadata.get('CONTENT_TYPE', 'markdown')
if content_type not in ('markdown', 'md'):
print("Tool can only convert Markdown source.", file=sys.stdout)
exit(1)
yaml_data = {'layout': arguments.layout}
yaml_data['title'] = metadata.get('TITLE', 'Website')
if 'DESCRIPTION' in metadata:
yaml_data['description'] = metadata['DESCRIPTION']
arguments.output.write('---\n')
yaml.dump(yaml_data, arguments.output, default_flow_style=False)
arguments.output.write('---\n')
transliterate_markdown(arguments.source, arguments.output)
|
<commit_before><commit_msg>Add a tool to import Markdown from srweb<commit_after>
|
import argparse
import yaml
import sys
import re
parser = argparse.ArgumentParser()
parser.add_argument('source', help='Path to source file',
type=argparse.FileType('r'))
parser.add_argument('-o', '--output', type=argparse.FileType('w'),
default=sys.stdout, help='Output file')
parser.add_argument('-l', '--layout', help='Page layout',
default='main')
arguments = parser.parse_args()
PRE_START_REGEX = re.compile('^<pre><code class="override-lang ([a-z0-9_-]+)">(.*)')
def transliterate_markdown(src, dst):
in_code_block = False
for line in src:
if line.startswith('~~~'):
in_code_block = not in_code_block
if in_code_block:
dest_f.write('{% highlight python %}\n')
else:
dest_f.write('{% endhighlight %}\n')
elif line.startswith('</code></pre>'):
dst.write('{% endhighlight %}\n')
else:
ps = PRE_START_REGEX.match(line)
if ps is not None:
dst.write('{{% highlight {} %}}\n'.format(ps.group(1)))
dst.write(ps.group(2))
dst.write('\n')
else:
dst.write(line)
# Parse metadata
metadata = {}
MD_REGEX = re.compile('^//([A-Z_]+)\s*:\s*(.*)\s*$')
while True:
md_line = arguments.source.readline()
match = MD_REGEX.match(md_line)
if match is None:
break
if match.group(2):
metadata[match.group(1)] = match.group(2)
content_type = metadata.get('CONTENT_TYPE', 'markdown')
if content_type not in ('markdown', 'md'):
print("Tool can only convert Markdown source.", file=sys.stdout)
exit(1)
yaml_data = {'layout': arguments.layout}
yaml_data['title'] = metadata.get('TITLE', 'Website')
if 'DESCRIPTION' in metadata:
yaml_data['description'] = metadata['DESCRIPTION']
arguments.output.write('---\n')
yaml.dump(yaml_data, arguments.output, default_flow_style=False)
arguments.output.write('---\n')
transliterate_markdown(arguments.source, arguments.output)
|
Add a tool to import Markdown from srwebimport argparse
import yaml
import sys
import re
parser = argparse.ArgumentParser()
parser.add_argument('source', help='Path to source file',
type=argparse.FileType('r'))
parser.add_argument('-o', '--output', type=argparse.FileType('w'),
default=sys.stdout, help='Output file')
parser.add_argument('-l', '--layout', help='Page layout',
default='main')
arguments = parser.parse_args()
PRE_START_REGEX = re.compile('^<pre><code class="override-lang ([a-z0-9_-]+)">(.*)')
def transliterate_markdown(src, dst):
in_code_block = False
for line in src:
if line.startswith('~~~'):
in_code_block = not in_code_block
if in_code_block:
dest_f.write('{% highlight python %}\n')
else:
dest_f.write('{% endhighlight %}\n')
elif line.startswith('</code></pre>'):
dst.write('{% endhighlight %}\n')
else:
ps = PRE_START_REGEX.match(line)
if ps is not None:
dst.write('{{% highlight {} %}}\n'.format(ps.group(1)))
dst.write(ps.group(2))
dst.write('\n')
else:
dst.write(line)
# Parse metadata
metadata = {}
MD_REGEX = re.compile('^//([A-Z_]+)\s*:\s*(.*)\s*$')
while True:
md_line = arguments.source.readline()
match = MD_REGEX.match(md_line)
if match is None:
break
if match.group(2):
metadata[match.group(1)] = match.group(2)
content_type = metadata.get('CONTENT_TYPE', 'markdown')
if content_type not in ('markdown', 'md'):
print("Tool can only convert Markdown source.", file=sys.stdout)
exit(1)
yaml_data = {'layout': arguments.layout}
yaml_data['title'] = metadata.get('TITLE', 'Website')
if 'DESCRIPTION' in metadata:
yaml_data['description'] = metadata['DESCRIPTION']
arguments.output.write('---\n')
yaml.dump(yaml_data, arguments.output, default_flow_style=False)
arguments.output.write('---\n')
transliterate_markdown(arguments.source, arguments.output)
|
<commit_before><commit_msg>Add a tool to import Markdown from srweb<commit_after>import argparse
import yaml
import sys
import re
parser = argparse.ArgumentParser()
parser.add_argument('source', help='Path to source file',
type=argparse.FileType('r'))
parser.add_argument('-o', '--output', type=argparse.FileType('w'),
default=sys.stdout, help='Output file')
parser.add_argument('-l', '--layout', help='Page layout',
default='main')
arguments = parser.parse_args()
PRE_START_REGEX = re.compile('^<pre><code class="override-lang ([a-z0-9_-]+)">(.*)')
def transliterate_markdown(src, dst):
in_code_block = False
for line in src:
if line.startswith('~~~'):
in_code_block = not in_code_block
if in_code_block:
dest_f.write('{% highlight python %}\n')
else:
dest_f.write('{% endhighlight %}\n')
elif line.startswith('</code></pre>'):
dst.write('{% endhighlight %}\n')
else:
ps = PRE_START_REGEX.match(line)
if ps is not None:
dst.write('{{% highlight {} %}}\n'.format(ps.group(1)))
dst.write(ps.group(2))
dst.write('\n')
else:
dst.write(line)
# Parse metadata
metadata = {}
MD_REGEX = re.compile('^//([A-Z_]+)\s*:\s*(.*)\s*$')
while True:
md_line = arguments.source.readline()
match = MD_REGEX.match(md_line)
if match is None:
break
if match.group(2):
metadata[match.group(1)] = match.group(2)
content_type = metadata.get('CONTENT_TYPE', 'markdown')
if content_type not in ('markdown', 'md'):
print("Tool can only convert Markdown source.", file=sys.stdout)
exit(1)
yaml_data = {'layout': arguments.layout}
yaml_data['title'] = metadata.get('TITLE', 'Website')
if 'DESCRIPTION' in metadata:
yaml_data['description'] = metadata['DESCRIPTION']
arguments.output.write('---\n')
yaml.dump(yaml_data, arguments.output, default_flow_style=False)
arguments.output.write('---\n')
transliterate_markdown(arguments.source, arguments.output)
|
|
0388e7323452f7b2a94ac9593ffce07e35012e09
|
tests/aws_disk_integration_test.py
|
tests/aws_disk_integration_test.py
|
# Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Integration tests for AWS scratch disks."""
import os
import unittest
from perfkitbenchmarker import pkb
from perfkitbenchmarker import test_util
MOUNT_POINT = '/scratch'
@unittest.skipUnless('PERFKIT_INTEGRATION' in os.environ,
'PERFKIT_INTEGRATION not in environment')
class AwsScratchDiskIntegrationTest(unittest.TestCase):
def setUp(self):
pkb.SetUpPKB()
def testEBSStandard(self):
test_util.testDiskMounts({
'vm_groups': {
'vm_group_1': {
'cloud': 'AWS',
'vm_spec': {
'AWS': {
'machine_type': 'm4.large',
'zone': 'us-east-1a'
}
},
'disk_spec': {
'AWS': {
'disk_type': 'standard',
'disk_size': 2,
'mount_point': MOUNT_POINT
}
}
}
}
}, MOUNT_POINT)
def testEBSGP(self):
test_util.testDiskMounts({
'vm_groups': {
'vm_group_1': {
'cloud': 'AWS',
'vm_spec': {
'AWS': {
'machine_type': 'm4.large',
'zone': 'us-east-1a'
}
},
'disk_spec': {
'AWS': {
'disk_type': 'remote_ssd',
'disk_size': 2,
'mount_point': MOUNT_POINT
}
}
}
}
}, MOUNT_POINT)
def testLocalSSD(self):
test_util.testDiskMounts({
'vm_groups': {
'vm_group_1': {
'cloud': 'AWS',
'vm_spec': {
'AWS': {
'machine_type': 'm3.medium',
'zone': 'us-east-1a'
}
},
'disk_spec': {
'AWS': {
'disk_type': 'local',
'mount_point': MOUNT_POINT
}
}
}
}
}, MOUNT_POINT)
|
Add AWS disk integration tests
|
Add AWS disk integration tests
|
Python
|
apache-2.0
|
GoogleCloudPlatform/PerfKitBenchmarker,meteorfox/PerfKitBenchmarker,AdamIsrael/PerfKitBenchmarker,mateusz-blaszkowski/PerfKitBenchmarker,GoogleCloudPlatform/PerfKitBenchmarker,kivio/PerfKitBenchmarker,syed/PerfKitBenchmarker,GoogleCloudPlatform/PerfKitBenchmarker,kivio/PerfKitBenchmarker,AdamIsrael/PerfKitBenchmarker,syed/PerfKitBenchmarker,meteorfox/PerfKitBenchmarker,mateusz-blaszkowski/PerfKitBenchmarker,GoogleCloudPlatform/PerfKitBenchmarker
|
Add AWS disk integration tests
|
# Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Integration tests for AWS scratch disks."""
import os
import unittest
from perfkitbenchmarker import pkb
from perfkitbenchmarker import test_util
MOUNT_POINT = '/scratch'
@unittest.skipUnless('PERFKIT_INTEGRATION' in os.environ,
'PERFKIT_INTEGRATION not in environment')
class AwsScratchDiskIntegrationTest(unittest.TestCase):
def setUp(self):
pkb.SetUpPKB()
def testEBSStandard(self):
test_util.testDiskMounts({
'vm_groups': {
'vm_group_1': {
'cloud': 'AWS',
'vm_spec': {
'AWS': {
'machine_type': 'm4.large',
'zone': 'us-east-1a'
}
},
'disk_spec': {
'AWS': {
'disk_type': 'standard',
'disk_size': 2,
'mount_point': MOUNT_POINT
}
}
}
}
}, MOUNT_POINT)
def testEBSGP(self):
test_util.testDiskMounts({
'vm_groups': {
'vm_group_1': {
'cloud': 'AWS',
'vm_spec': {
'AWS': {
'machine_type': 'm4.large',
'zone': 'us-east-1a'
}
},
'disk_spec': {
'AWS': {
'disk_type': 'remote_ssd',
'disk_size': 2,
'mount_point': MOUNT_POINT
}
}
}
}
}, MOUNT_POINT)
def testLocalSSD(self):
test_util.testDiskMounts({
'vm_groups': {
'vm_group_1': {
'cloud': 'AWS',
'vm_spec': {
'AWS': {
'machine_type': 'm3.medium',
'zone': 'us-east-1a'
}
},
'disk_spec': {
'AWS': {
'disk_type': 'local',
'mount_point': MOUNT_POINT
}
}
}
}
}, MOUNT_POINT)
|
<commit_before><commit_msg>Add AWS disk integration tests<commit_after>
|
# Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Integration tests for AWS scratch disks."""
import os
import unittest
from perfkitbenchmarker import pkb
from perfkitbenchmarker import test_util
MOUNT_POINT = '/scratch'
@unittest.skipUnless('PERFKIT_INTEGRATION' in os.environ,
'PERFKIT_INTEGRATION not in environment')
class AwsScratchDiskIntegrationTest(unittest.TestCase):
def setUp(self):
pkb.SetUpPKB()
def testEBSStandard(self):
test_util.testDiskMounts({
'vm_groups': {
'vm_group_1': {
'cloud': 'AWS',
'vm_spec': {
'AWS': {
'machine_type': 'm4.large',
'zone': 'us-east-1a'
}
},
'disk_spec': {
'AWS': {
'disk_type': 'standard',
'disk_size': 2,
'mount_point': MOUNT_POINT
}
}
}
}
}, MOUNT_POINT)
def testEBSGP(self):
test_util.testDiskMounts({
'vm_groups': {
'vm_group_1': {
'cloud': 'AWS',
'vm_spec': {
'AWS': {
'machine_type': 'm4.large',
'zone': 'us-east-1a'
}
},
'disk_spec': {
'AWS': {
'disk_type': 'remote_ssd',
'disk_size': 2,
'mount_point': MOUNT_POINT
}
}
}
}
}, MOUNT_POINT)
def testLocalSSD(self):
test_util.testDiskMounts({
'vm_groups': {
'vm_group_1': {
'cloud': 'AWS',
'vm_spec': {
'AWS': {
'machine_type': 'm3.medium',
'zone': 'us-east-1a'
}
},
'disk_spec': {
'AWS': {
'disk_type': 'local',
'mount_point': MOUNT_POINT
}
}
}
}
}, MOUNT_POINT)
|
Add AWS disk integration tests# Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Integration tests for AWS scratch disks."""
import os
import unittest
from perfkitbenchmarker import pkb
from perfkitbenchmarker import test_util
MOUNT_POINT = '/scratch'
@unittest.skipUnless('PERFKIT_INTEGRATION' in os.environ,
'PERFKIT_INTEGRATION not in environment')
class AwsScratchDiskIntegrationTest(unittest.TestCase):
def setUp(self):
pkb.SetUpPKB()
def testEBSStandard(self):
test_util.testDiskMounts({
'vm_groups': {
'vm_group_1': {
'cloud': 'AWS',
'vm_spec': {
'AWS': {
'machine_type': 'm4.large',
'zone': 'us-east-1a'
}
},
'disk_spec': {
'AWS': {
'disk_type': 'standard',
'disk_size': 2,
'mount_point': MOUNT_POINT
}
}
}
}
}, MOUNT_POINT)
def testEBSGP(self):
test_util.testDiskMounts({
'vm_groups': {
'vm_group_1': {
'cloud': 'AWS',
'vm_spec': {
'AWS': {
'machine_type': 'm4.large',
'zone': 'us-east-1a'
}
},
'disk_spec': {
'AWS': {
'disk_type': 'remote_ssd',
'disk_size': 2,
'mount_point': MOUNT_POINT
}
}
}
}
}, MOUNT_POINT)
def testLocalSSD(self):
test_util.testDiskMounts({
'vm_groups': {
'vm_group_1': {
'cloud': 'AWS',
'vm_spec': {
'AWS': {
'machine_type': 'm3.medium',
'zone': 'us-east-1a'
}
},
'disk_spec': {
'AWS': {
'disk_type': 'local',
'mount_point': MOUNT_POINT
}
}
}
}
}, MOUNT_POINT)
|
<commit_before><commit_msg>Add AWS disk integration tests<commit_after># Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Integration tests for AWS scratch disks."""
import os
import unittest
from perfkitbenchmarker import pkb
from perfkitbenchmarker import test_util
MOUNT_POINT = '/scratch'
@unittest.skipUnless('PERFKIT_INTEGRATION' in os.environ,
'PERFKIT_INTEGRATION not in environment')
class AwsScratchDiskIntegrationTest(unittest.TestCase):
def setUp(self):
pkb.SetUpPKB()
def testEBSStandard(self):
test_util.testDiskMounts({
'vm_groups': {
'vm_group_1': {
'cloud': 'AWS',
'vm_spec': {
'AWS': {
'machine_type': 'm4.large',
'zone': 'us-east-1a'
}
},
'disk_spec': {
'AWS': {
'disk_type': 'standard',
'disk_size': 2,
'mount_point': MOUNT_POINT
}
}
}
}
}, MOUNT_POINT)
def testEBSGP(self):
test_util.testDiskMounts({
'vm_groups': {
'vm_group_1': {
'cloud': 'AWS',
'vm_spec': {
'AWS': {
'machine_type': 'm4.large',
'zone': 'us-east-1a'
}
},
'disk_spec': {
'AWS': {
'disk_type': 'remote_ssd',
'disk_size': 2,
'mount_point': MOUNT_POINT
}
}
}
}
}, MOUNT_POINT)
def testLocalSSD(self):
test_util.testDiskMounts({
'vm_groups': {
'vm_group_1': {
'cloud': 'AWS',
'vm_spec': {
'AWS': {
'machine_type': 'm3.medium',
'zone': 'us-east-1a'
}
},
'disk_spec': {
'AWS': {
'disk_type': 'local',
'mount_point': MOUNT_POINT
}
}
}
}
}, MOUNT_POINT)
|
|
bce877c7264a5bfad9de9d64d1871c5a86239d59
|
examples/listradio.py
|
examples/listradio.py
|
import gobject
import gtk
from kiwi.ui.widgets.list import Column, List
class Object:
def __init__(self, name, value):
self.name, self.value = name, value
columns = [Column('name'),
Column('value', data_type=bool, radio=True, editable=True)]
win = gtk.Window()
win.set_size_request(300, 120)
win.connect('delete-event', gtk.main_quit)
list = List(columns)
win.add(list)
for name, value in [('First', False),
('Second', False),
('Third', True),
('Fourth', False),
('Fifth', False)]:
list.add_instance(Object(name, value))
win.show_all()
gtk.main()
|
Test for radio buttons in lists
|
Test for radio buttons in lists
|
Python
|
lgpl-2.1
|
Schevo/kiwi,Schevo/kiwi,Schevo/kiwi
|
Test for radio buttons in lists
|
import gobject
import gtk
from kiwi.ui.widgets.list import Column, List
class Object:
def __init__(self, name, value):
self.name, self.value = name, value
columns = [Column('name'),
Column('value', data_type=bool, radio=True, editable=True)]
win = gtk.Window()
win.set_size_request(300, 120)
win.connect('delete-event', gtk.main_quit)
list = List(columns)
win.add(list)
for name, value in [('First', False),
('Second', False),
('Third', True),
('Fourth', False),
('Fifth', False)]:
list.add_instance(Object(name, value))
win.show_all()
gtk.main()
|
<commit_before><commit_msg>Test for radio buttons in lists<commit_after>
|
import gobject
import gtk
from kiwi.ui.widgets.list import Column, List
class Object:
def __init__(self, name, value):
self.name, self.value = name, value
columns = [Column('name'),
Column('value', data_type=bool, radio=True, editable=True)]
win = gtk.Window()
win.set_size_request(300, 120)
win.connect('delete-event', gtk.main_quit)
list = List(columns)
win.add(list)
for name, value in [('First', False),
('Second', False),
('Third', True),
('Fourth', False),
('Fifth', False)]:
list.add_instance(Object(name, value))
win.show_all()
gtk.main()
|
Test for radio buttons in listsimport gobject
import gtk
from kiwi.ui.widgets.list import Column, List
class Object:
def __init__(self, name, value):
self.name, self.value = name, value
columns = [Column('name'),
Column('value', data_type=bool, radio=True, editable=True)]
win = gtk.Window()
win.set_size_request(300, 120)
win.connect('delete-event', gtk.main_quit)
list = List(columns)
win.add(list)
for name, value in [('First', False),
('Second', False),
('Third', True),
('Fourth', False),
('Fifth', False)]:
list.add_instance(Object(name, value))
win.show_all()
gtk.main()
|
<commit_before><commit_msg>Test for radio buttons in lists<commit_after>import gobject
import gtk
from kiwi.ui.widgets.list import Column, List
class Object:
def __init__(self, name, value):
self.name, self.value = name, value
columns = [Column('name'),
Column('value', data_type=bool, radio=True, editable=True)]
win = gtk.Window()
win.set_size_request(300, 120)
win.connect('delete-event', gtk.main_quit)
list = List(columns)
win.add(list)
for name, value in [('First', False),
('Second', False),
('Third', True),
('Fourth', False),
('Fifth', False)]:
list.add_instance(Object(name, value))
win.show_all()
gtk.main()
|
|
98b7c4444b80abb4cf63cbe389df80b938b6c1fd
|
examples/proxy_rax.py
|
examples/proxy_rax.py
|
import random
import sc2
from sc2 import Race, Difficulty, ActionResult
from sc2.constants import *
from sc2.player import Bot, Computer
class ProxyRaxBot(sc2.BotAI):
async def on_step(self, state, iteration):
cc = self.units(COMMANDCENTER)
if not cc.exists:
target = self.known_enemy_structures.random_or(self.enemy_start_locations[0]).position
for unit in self.workers | self.units(MARINE):
await self.do(unit.attack(target))
return
else:
cc = cc.first
if self.units(MARINE).idle.amount > 15 and iteration % 50 == 1:
target = self.known_enemy_structures.random_or(self.enemy_start_locations[0]).position
for marine in self.units(MARINE).idle:
await self.do(marine.attack(target))
if self.can_afford(SCV) and self.workers.amount + len(cc.orders) < 16:
await self.do(cc.train(SCV))
elif self.supply_left < 2:
if self.can_afford(SUPPLYDEPOT):
await self.build(SUPPLYDEPOT, near=cc.position.towards(self.game_info.map_center, 5))
elif self.units(BARRACKS).amount < 3 or self.minerals > 500:
if self.can_afford(BARRACKS):
p = self.game_info.map_center.towards(self.enemy_start_locations[0], 25)
await self.build(BARRACKS, near=p)
if self.can_afford(MARINE) and self.units(BARRACKS).ready.exists:
await self.do(self.units(BARRACKS).ready.random.train(MARINE))
for scv in self.units(SCV).idle:
await self.do(scv.gather(self.state.mineral_field.closest_to(cc)))
sc2.run_game(sc2.maps.get("Sequencer LE"), [
Bot(Race.Terran, ProxyRaxBot()),
Computer(Race.Zerg, Difficulty.Hard)
], realtime=False)
|
Add proxy rax example for terran
|
Add proxy rax example for terran
|
Python
|
mit
|
Dentosal/python-sc2
|
Add proxy rax example for terran
|
import random
import sc2
from sc2 import Race, Difficulty, ActionResult
from sc2.constants import *
from sc2.player import Bot, Computer
class ProxyRaxBot(sc2.BotAI):
async def on_step(self, state, iteration):
cc = self.units(COMMANDCENTER)
if not cc.exists:
target = self.known_enemy_structures.random_or(self.enemy_start_locations[0]).position
for unit in self.workers | self.units(MARINE):
await self.do(unit.attack(target))
return
else:
cc = cc.first
if self.units(MARINE).idle.amount > 15 and iteration % 50 == 1:
target = self.known_enemy_structures.random_or(self.enemy_start_locations[0]).position
for marine in self.units(MARINE).idle:
await self.do(marine.attack(target))
if self.can_afford(SCV) and self.workers.amount + len(cc.orders) < 16:
await self.do(cc.train(SCV))
elif self.supply_left < 2:
if self.can_afford(SUPPLYDEPOT):
await self.build(SUPPLYDEPOT, near=cc.position.towards(self.game_info.map_center, 5))
elif self.units(BARRACKS).amount < 3 or self.minerals > 500:
if self.can_afford(BARRACKS):
p = self.game_info.map_center.towards(self.enemy_start_locations[0], 25)
await self.build(BARRACKS, near=p)
if self.can_afford(MARINE) and self.units(BARRACKS).ready.exists:
await self.do(self.units(BARRACKS).ready.random.train(MARINE))
for scv in self.units(SCV).idle:
await self.do(scv.gather(self.state.mineral_field.closest_to(cc)))
sc2.run_game(sc2.maps.get("Sequencer LE"), [
Bot(Race.Terran, ProxyRaxBot()),
Computer(Race.Zerg, Difficulty.Hard)
], realtime=False)
|
<commit_before><commit_msg>Add proxy rax example for terran<commit_after>
|
import random
import sc2
from sc2 import Race, Difficulty, ActionResult
from sc2.constants import *
from sc2.player import Bot, Computer
class ProxyRaxBot(sc2.BotAI):
async def on_step(self, state, iteration):
cc = self.units(COMMANDCENTER)
if not cc.exists:
target = self.known_enemy_structures.random_or(self.enemy_start_locations[0]).position
for unit in self.workers | self.units(MARINE):
await self.do(unit.attack(target))
return
else:
cc = cc.first
if self.units(MARINE).idle.amount > 15 and iteration % 50 == 1:
target = self.known_enemy_structures.random_or(self.enemy_start_locations[0]).position
for marine in self.units(MARINE).idle:
await self.do(marine.attack(target))
if self.can_afford(SCV) and self.workers.amount + len(cc.orders) < 16:
await self.do(cc.train(SCV))
elif self.supply_left < 2:
if self.can_afford(SUPPLYDEPOT):
await self.build(SUPPLYDEPOT, near=cc.position.towards(self.game_info.map_center, 5))
elif self.units(BARRACKS).amount < 3 or self.minerals > 500:
if self.can_afford(BARRACKS):
p = self.game_info.map_center.towards(self.enemy_start_locations[0], 25)
await self.build(BARRACKS, near=p)
if self.can_afford(MARINE) and self.units(BARRACKS).ready.exists:
await self.do(self.units(BARRACKS).ready.random.train(MARINE))
for scv in self.units(SCV).idle:
await self.do(scv.gather(self.state.mineral_field.closest_to(cc)))
sc2.run_game(sc2.maps.get("Sequencer LE"), [
Bot(Race.Terran, ProxyRaxBot()),
Computer(Race.Zerg, Difficulty.Hard)
], realtime=False)
|
Add proxy rax example for terranimport random
import sc2
from sc2 import Race, Difficulty, ActionResult
from sc2.constants import *
from sc2.player import Bot, Computer
class ProxyRaxBot(sc2.BotAI):
async def on_step(self, state, iteration):
cc = self.units(COMMANDCENTER)
if not cc.exists:
target = self.known_enemy_structures.random_or(self.enemy_start_locations[0]).position
for unit in self.workers | self.units(MARINE):
await self.do(unit.attack(target))
return
else:
cc = cc.first
if self.units(MARINE).idle.amount > 15 and iteration % 50 == 1:
target = self.known_enemy_structures.random_or(self.enemy_start_locations[0]).position
for marine in self.units(MARINE).idle:
await self.do(marine.attack(target))
if self.can_afford(SCV) and self.workers.amount + len(cc.orders) < 16:
await self.do(cc.train(SCV))
elif self.supply_left < 2:
if self.can_afford(SUPPLYDEPOT):
await self.build(SUPPLYDEPOT, near=cc.position.towards(self.game_info.map_center, 5))
elif self.units(BARRACKS).amount < 3 or self.minerals > 500:
if self.can_afford(BARRACKS):
p = self.game_info.map_center.towards(self.enemy_start_locations[0], 25)
await self.build(BARRACKS, near=p)
if self.can_afford(MARINE) and self.units(BARRACKS).ready.exists:
await self.do(self.units(BARRACKS).ready.random.train(MARINE))
for scv in self.units(SCV).idle:
await self.do(scv.gather(self.state.mineral_field.closest_to(cc)))
sc2.run_game(sc2.maps.get("Sequencer LE"), [
Bot(Race.Terran, ProxyRaxBot()),
Computer(Race.Zerg, Difficulty.Hard)
], realtime=False)
|
<commit_before><commit_msg>Add proxy rax example for terran<commit_after>import random
import sc2
from sc2 import Race, Difficulty, ActionResult
from sc2.constants import *
from sc2.player import Bot, Computer
class ProxyRaxBot(sc2.BotAI):
async def on_step(self, state, iteration):
cc = self.units(COMMANDCENTER)
if not cc.exists:
target = self.known_enemy_structures.random_or(self.enemy_start_locations[0]).position
for unit in self.workers | self.units(MARINE):
await self.do(unit.attack(target))
return
else:
cc = cc.first
if self.units(MARINE).idle.amount > 15 and iteration % 50 == 1:
target = self.known_enemy_structures.random_or(self.enemy_start_locations[0]).position
for marine in self.units(MARINE).idle:
await self.do(marine.attack(target))
if self.can_afford(SCV) and self.workers.amount + len(cc.orders) < 16:
await self.do(cc.train(SCV))
elif self.supply_left < 2:
if self.can_afford(SUPPLYDEPOT):
await self.build(SUPPLYDEPOT, near=cc.position.towards(self.game_info.map_center, 5))
elif self.units(BARRACKS).amount < 3 or self.minerals > 500:
if self.can_afford(BARRACKS):
p = self.game_info.map_center.towards(self.enemy_start_locations[0], 25)
await self.build(BARRACKS, near=p)
if self.can_afford(MARINE) and self.units(BARRACKS).ready.exists:
await self.do(self.units(BARRACKS).ready.random.train(MARINE))
for scv in self.units(SCV).idle:
await self.do(scv.gather(self.state.mineral_field.closest_to(cc)))
sc2.run_game(sc2.maps.get("Sequencer LE"), [
Bot(Race.Terran, ProxyRaxBot()),
Computer(Race.Zerg, Difficulty.Hard)
], realtime=False)
|
|
eaff4daa02a22a8d1dc233dd96a0848df195e6ea
|
setup.py
|
setup.py
|
import os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-onmydesk',
version='0.1',
packages=find_packages(exclude=('tests*',)),
include_package_data=True,
license='MIT License',
description='A simple Django app to build reports.',
long_description=README,
url='https://github.com/knowledge/knowledge4life/',
author='Alisson R. Perez',
author_email='alisson.perez@knowledge4.life',
install_requires=[
'XlsxWriter==0.8.3', # Used by XLSXOutput
'filelock==2.0.6',
'awesome-slugify==1.6.5',
],
keywords=['report', 'reporting', 'django'],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
|
import os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-onmydesk',
version='0.1.0',
packages=find_packages(exclude=('tests*',)),
include_package_data=True,
license='MIT License',
description='A simple Django app to build reports.',
long_description=README,
url='https://github.com/knowledge4life/django-onmydesk/',
author='Alisson R. Perez',
author_email='alisson.perez@knowledge4.life',
install_requires=[
'XlsxWriter==0.8.3', # Used by XLSXOutput
'filelock==2.0.6',
'awesome-slugify==1.6.5',
],
keywords=['report', 'reporting', 'django'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Office/Business :: Financial :: Spreadsheet',
],
)
|
Improve classifiers, version and url
|
Improve classifiers, version and url
|
Python
|
mit
|
knowledge4life/django-onmydesk,knowledge4life/django-onmydesk,alissonperez/django-onmydesk,knowledge4life/django-onmydesk,alissonperez/django-onmydesk,alissonperez/django-onmydesk
|
import os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-onmydesk',
version='0.1',
packages=find_packages(exclude=('tests*',)),
include_package_data=True,
license='MIT License',
description='A simple Django app to build reports.',
long_description=README,
url='https://github.com/knowledge/knowledge4life/',
author='Alisson R. Perez',
author_email='alisson.perez@knowledge4.life',
install_requires=[
'XlsxWriter==0.8.3', # Used by XLSXOutput
'filelock==2.0.6',
'awesome-slugify==1.6.5',
],
keywords=['report', 'reporting', 'django'],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
Improve classifiers, version and url
|
import os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-onmydesk',
version='0.1.0',
packages=find_packages(exclude=('tests*',)),
include_package_data=True,
license='MIT License',
description='A simple Django app to build reports.',
long_description=README,
url='https://github.com/knowledge4life/django-onmydesk/',
author='Alisson R. Perez',
author_email='alisson.perez@knowledge4.life',
install_requires=[
'XlsxWriter==0.8.3', # Used by XLSXOutput
'filelock==2.0.6',
'awesome-slugify==1.6.5',
],
keywords=['report', 'reporting', 'django'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Office/Business :: Financial :: Spreadsheet',
],
)
|
<commit_before>import os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-onmydesk',
version='0.1',
packages=find_packages(exclude=('tests*',)),
include_package_data=True,
license='MIT License',
description='A simple Django app to build reports.',
long_description=README,
url='https://github.com/knowledge/knowledge4life/',
author='Alisson R. Perez',
author_email='alisson.perez@knowledge4.life',
install_requires=[
'XlsxWriter==0.8.3', # Used by XLSXOutput
'filelock==2.0.6',
'awesome-slugify==1.6.5',
],
keywords=['report', 'reporting', 'django'],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
<commit_msg>Improve classifiers, version and url<commit_after>
|
import os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-onmydesk',
version='0.1.0',
packages=find_packages(exclude=('tests*',)),
include_package_data=True,
license='MIT License',
description='A simple Django app to build reports.',
long_description=README,
url='https://github.com/knowledge4life/django-onmydesk/',
author='Alisson R. Perez',
author_email='alisson.perez@knowledge4.life',
install_requires=[
'XlsxWriter==0.8.3', # Used by XLSXOutput
'filelock==2.0.6',
'awesome-slugify==1.6.5',
],
keywords=['report', 'reporting', 'django'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Office/Business :: Financial :: Spreadsheet',
],
)
|
import os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-onmydesk',
version='0.1',
packages=find_packages(exclude=('tests*',)),
include_package_data=True,
license='MIT License',
description='A simple Django app to build reports.',
long_description=README,
url='https://github.com/knowledge/knowledge4life/',
author='Alisson R. Perez',
author_email='alisson.perez@knowledge4.life',
install_requires=[
'XlsxWriter==0.8.3', # Used by XLSXOutput
'filelock==2.0.6',
'awesome-slugify==1.6.5',
],
keywords=['report', 'reporting', 'django'],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
Improve classifiers, version and urlimport os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-onmydesk',
version='0.1.0',
packages=find_packages(exclude=('tests*',)),
include_package_data=True,
license='MIT License',
description='A simple Django app to build reports.',
long_description=README,
url='https://github.com/knowledge4life/django-onmydesk/',
author='Alisson R. Perez',
author_email='alisson.perez@knowledge4.life',
install_requires=[
'XlsxWriter==0.8.3', # Used by XLSXOutput
'filelock==2.0.6',
'awesome-slugify==1.6.5',
],
keywords=['report', 'reporting', 'django'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Office/Business :: Financial :: Spreadsheet',
],
)
|
<commit_before>import os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-onmydesk',
version='0.1',
packages=find_packages(exclude=('tests*',)),
include_package_data=True,
license='MIT License',
description='A simple Django app to build reports.',
long_description=README,
url='https://github.com/knowledge/knowledge4life/',
author='Alisson R. Perez',
author_email='alisson.perez@knowledge4.life',
install_requires=[
'XlsxWriter==0.8.3', # Used by XLSXOutput
'filelock==2.0.6',
'awesome-slugify==1.6.5',
],
keywords=['report', 'reporting', 'django'],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
<commit_msg>Improve classifiers, version and url<commit_after>import os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-onmydesk',
version='0.1.0',
packages=find_packages(exclude=('tests*',)),
include_package_data=True,
license='MIT License',
description='A simple Django app to build reports.',
long_description=README,
url='https://github.com/knowledge4life/django-onmydesk/',
author='Alisson R. Perez',
author_email='alisson.perez@knowledge4.life',
install_requires=[
'XlsxWriter==0.8.3', # Used by XLSXOutput
'filelock==2.0.6',
'awesome-slugify==1.6.5',
],
keywords=['report', 'reporting', 'django'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Office/Business :: Financial :: Spreadsheet',
],
)
|
3481f958cb396be11a565bbb0213dc2d973d25b1
|
ceph_deploy/tests/parser/test_install.py
|
ceph_deploy/tests/parser/test_install.py
|
import pytest
from ceph_deploy.cli import get_parser
class TestParserInstall(object):
def setup(self):
self.parser = get_parser()
def test_install_help(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('install --help'.split())
out, err = capsys.readouterr()
assert 'usage: ceph-deploy install' in out
assert 'positional arguments:' in out
assert 'optional arguments:' in out
def test_install_host_required(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('install'.split())
out, err = capsys.readouterr()
assert "error: too few arguments" in err
def test_install_one_host(self):
args = self.parser.parse_args('install host1'.split())
assert args.host == ['host1']
def test_install_multiple_hosts(self):
hostnames = ['host1', 'host2', 'host3']
args = self.parser.parse_args(['install'] + hostnames)
assert frozenset(args.host) == frozenset(hostnames)
def test_install_release_default_is_none(self):
args = self.parser.parse_args('install host1'.split())
assert args.release is None
def test_install_release(self):
args = self.parser.parse_args('install --release hammer host1'.split())
assert args.release == "hammer"
assert args.version_kind == "stable"
@pytest.mark.skipif(reason="No release name sanity checking yet")
def test_install_release_bad_codename(self):
args = self.parser.parse_args('install --release cephalopod host1'.split())
assert args.release != "cephalopod"
|
Add initial argparse tests for install
|
[RM-11742] Add initial argparse tests for install
Signed-off-by: Travis Rhoden <e5e44d6dbac12e32e01c3bb8b67940d8b42e225b@redhat.com>
|
Python
|
mit
|
Vicente-Cheng/ceph-deploy,trhoden/ceph-deploy,imzhulei/ceph-deploy,osynge/ceph-deploy,SUSE/ceph-deploy,codenrhoden/ceph-deploy,zhouyuan/ceph-deploy,branto1/ceph-deploy,osynge/ceph-deploy,ceph/ceph-deploy,isyippee/ceph-deploy,SUSE/ceph-deploy,isyippee/ceph-deploy,SUSE/ceph-deploy-to-be-deleted,SUSE/ceph-deploy-to-be-deleted,imzhulei/ceph-deploy,Vicente-Cheng/ceph-deploy,branto1/ceph-deploy,shenhequnying/ceph-deploy,ghxandsky/ceph-deploy,trhoden/ceph-deploy,zhouyuan/ceph-deploy,ghxandsky/ceph-deploy,codenrhoden/ceph-deploy,ceph/ceph-deploy,shenhequnying/ceph-deploy
|
[RM-11742] Add initial argparse tests for install
Signed-off-by: Travis Rhoden <e5e44d6dbac12e32e01c3bb8b67940d8b42e225b@redhat.com>
|
import pytest
from ceph_deploy.cli import get_parser
class TestParserInstall(object):
def setup(self):
self.parser = get_parser()
def test_install_help(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('install --help'.split())
out, err = capsys.readouterr()
assert 'usage: ceph-deploy install' in out
assert 'positional arguments:' in out
assert 'optional arguments:' in out
def test_install_host_required(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('install'.split())
out, err = capsys.readouterr()
assert "error: too few arguments" in err
def test_install_one_host(self):
args = self.parser.parse_args('install host1'.split())
assert args.host == ['host1']
def test_install_multiple_hosts(self):
hostnames = ['host1', 'host2', 'host3']
args = self.parser.parse_args(['install'] + hostnames)
assert frozenset(args.host) == frozenset(hostnames)
def test_install_release_default_is_none(self):
args = self.parser.parse_args('install host1'.split())
assert args.release is None
def test_install_release(self):
args = self.parser.parse_args('install --release hammer host1'.split())
assert args.release == "hammer"
assert args.version_kind == "stable"
@pytest.mark.skipif(reason="No release name sanity checking yet")
def test_install_release_bad_codename(self):
args = self.parser.parse_args('install --release cephalopod host1'.split())
assert args.release != "cephalopod"
|
<commit_before><commit_msg>[RM-11742] Add initial argparse tests for install
Signed-off-by: Travis Rhoden <e5e44d6dbac12e32e01c3bb8b67940d8b42e225b@redhat.com><commit_after>
|
import pytest
from ceph_deploy.cli import get_parser
class TestParserInstall(object):
def setup(self):
self.parser = get_parser()
def test_install_help(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('install --help'.split())
out, err = capsys.readouterr()
assert 'usage: ceph-deploy install' in out
assert 'positional arguments:' in out
assert 'optional arguments:' in out
def test_install_host_required(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('install'.split())
out, err = capsys.readouterr()
assert "error: too few arguments" in err
def test_install_one_host(self):
args = self.parser.parse_args('install host1'.split())
assert args.host == ['host1']
def test_install_multiple_hosts(self):
hostnames = ['host1', 'host2', 'host3']
args = self.parser.parse_args(['install'] + hostnames)
assert frozenset(args.host) == frozenset(hostnames)
def test_install_release_default_is_none(self):
args = self.parser.parse_args('install host1'.split())
assert args.release is None
def test_install_release(self):
args = self.parser.parse_args('install --release hammer host1'.split())
assert args.release == "hammer"
assert args.version_kind == "stable"
@pytest.mark.skipif(reason="No release name sanity checking yet")
def test_install_release_bad_codename(self):
args = self.parser.parse_args('install --release cephalopod host1'.split())
assert args.release != "cephalopod"
|
[RM-11742] Add initial argparse tests for install
Signed-off-by: Travis Rhoden <e5e44d6dbac12e32e01c3bb8b67940d8b42e225b@redhat.com>import pytest
from ceph_deploy.cli import get_parser
class TestParserInstall(object):
def setup(self):
self.parser = get_parser()
def test_install_help(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('install --help'.split())
out, err = capsys.readouterr()
assert 'usage: ceph-deploy install' in out
assert 'positional arguments:' in out
assert 'optional arguments:' in out
def test_install_host_required(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('install'.split())
out, err = capsys.readouterr()
assert "error: too few arguments" in err
def test_install_one_host(self):
args = self.parser.parse_args('install host1'.split())
assert args.host == ['host1']
def test_install_multiple_hosts(self):
hostnames = ['host1', 'host2', 'host3']
args = self.parser.parse_args(['install'] + hostnames)
assert frozenset(args.host) == frozenset(hostnames)
def test_install_release_default_is_none(self):
args = self.parser.parse_args('install host1'.split())
assert args.release is None
def test_install_release(self):
args = self.parser.parse_args('install --release hammer host1'.split())
assert args.release == "hammer"
assert args.version_kind == "stable"
@pytest.mark.skipif(reason="No release name sanity checking yet")
def test_install_release_bad_codename(self):
args = self.parser.parse_args('install --release cephalopod host1'.split())
assert args.release != "cephalopod"
|
<commit_before><commit_msg>[RM-11742] Add initial argparse tests for install
Signed-off-by: Travis Rhoden <e5e44d6dbac12e32e01c3bb8b67940d8b42e225b@redhat.com><commit_after>import pytest
from ceph_deploy.cli import get_parser
class TestParserInstall(object):
def setup(self):
self.parser = get_parser()
def test_install_help(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('install --help'.split())
out, err = capsys.readouterr()
assert 'usage: ceph-deploy install' in out
assert 'positional arguments:' in out
assert 'optional arguments:' in out
def test_install_host_required(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('install'.split())
out, err = capsys.readouterr()
assert "error: too few arguments" in err
def test_install_one_host(self):
args = self.parser.parse_args('install host1'.split())
assert args.host == ['host1']
def test_install_multiple_hosts(self):
hostnames = ['host1', 'host2', 'host3']
args = self.parser.parse_args(['install'] + hostnames)
assert frozenset(args.host) == frozenset(hostnames)
def test_install_release_default_is_none(self):
args = self.parser.parse_args('install host1'.split())
assert args.release is None
def test_install_release(self):
args = self.parser.parse_args('install --release hammer host1'.split())
assert args.release == "hammer"
assert args.version_kind == "stable"
@pytest.mark.skipif(reason="No release name sanity checking yet")
def test_install_release_bad_codename(self):
args = self.parser.parse_args('install --release cephalopod host1'.split())
assert args.release != "cephalopod"
|
|
05dbf3473d320934ff306b673af7b72abc572375
|
scripts/list-packages.py
|
scripts/list-packages.py
|
#!/usr/bin/env python3
"""
Lists R packages in one docker image but not the other
"""
import docker
import argparse
import json
from urllib.request import urlopen
from urllib.error import HTTPError
argparser = argparse.ArgumentParser()
argparser.add_argument(
'src_image',
)
argparser.add_argument(
'dest_image',
)
args = argparser.parse_args()
client = docker.from_env()
def get_package_info(package_name):
"""
Return package data for package_name in CRAN repo
"""
url = f'https://packagemanager.rstudio.com/__api__/repos/1/packages/{package_name}'
try:
with urlopen(url) as resp:
data = json.load(resp)
except HTTPError as e:
# Provide an informative exception if we have a typo in package name
if e.code == 404:
# Package doesn't exist
print(f'Package "{package_name}" not found in package manager')
return { "name": package_name, "version": None }
else:
raise
return data
def packages_list(image_name):
raw_packages = client.containers.run(
image_name,
'R --quiet -e "installed.packages()[,c(1, 3)]"'
).decode().split('\n')[2:]
return set([rp.split()[0] for rp in raw_packages if len(rp.split()) == 3])
def main():
src_packages = packages_list(args.src_image)
dest_packages = packages_list(args.dest_image)
to_be_added = src_packages - dest_packages
for p in to_be_added:
info = get_package_info(p)
print(f'"{p}", "{info["version"]}",')
if __name__ == '__main__':
main()
|
Add script to diff installed R packages
|
Add script to diff installed R packages
Used to check if all the R packages in the current
R hub image are present in the new R hub image.
|
Python
|
bsd-3-clause
|
ryanlovett/datahub,berkeley-dsep-infra/datahub,ryanlovett/datahub,ryanlovett/datahub,berkeley-dsep-infra/datahub,berkeley-dsep-infra/datahub
|
Add script to diff installed R packages
Used to check if all the R packages in the current
R hub image are present in the new R hub image.
|
#!/usr/bin/env python3
"""
Lists R packages in one docker image but not the other
"""
import docker
import argparse
import json
from urllib.request import urlopen
from urllib.error import HTTPError
argparser = argparse.ArgumentParser()
argparser.add_argument(
'src_image',
)
argparser.add_argument(
'dest_image',
)
args = argparser.parse_args()
client = docker.from_env()
def get_package_info(package_name):
"""
Return package data for package_name in CRAN repo
"""
url = f'https://packagemanager.rstudio.com/__api__/repos/1/packages/{package_name}'
try:
with urlopen(url) as resp:
data = json.load(resp)
except HTTPError as e:
# Provide an informative exception if we have a typo in package name
if e.code == 404:
# Package doesn't exist
print(f'Package "{package_name}" not found in package manager')
return { "name": package_name, "version": None }
else:
raise
return data
def packages_list(image_name):
raw_packages = client.containers.run(
image_name,
'R --quiet -e "installed.packages()[,c(1, 3)]"'
).decode().split('\n')[2:]
return set([rp.split()[0] for rp in raw_packages if len(rp.split()) == 3])
def main():
src_packages = packages_list(args.src_image)
dest_packages = packages_list(args.dest_image)
to_be_added = src_packages - dest_packages
for p in to_be_added:
info = get_package_info(p)
print(f'"{p}", "{info["version"]}",')
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add script to diff installed R packages
Used to check if all the R packages in the current
R hub image are present in the new R hub image.<commit_after>
|
#!/usr/bin/env python3
"""
Lists R packages in one docker image but not the other
"""
import docker
import argparse
import json
from urllib.request import urlopen
from urllib.error import HTTPError
argparser = argparse.ArgumentParser()
argparser.add_argument(
'src_image',
)
argparser.add_argument(
'dest_image',
)
args = argparser.parse_args()
client = docker.from_env()
def get_package_info(package_name):
"""
Return package data for package_name in CRAN repo
"""
url = f'https://packagemanager.rstudio.com/__api__/repos/1/packages/{package_name}'
try:
with urlopen(url) as resp:
data = json.load(resp)
except HTTPError as e:
# Provide an informative exception if we have a typo in package name
if e.code == 404:
# Package doesn't exist
print(f'Package "{package_name}" not found in package manager')
return { "name": package_name, "version": None }
else:
raise
return data
def packages_list(image_name):
raw_packages = client.containers.run(
image_name,
'R --quiet -e "installed.packages()[,c(1, 3)]"'
).decode().split('\n')[2:]
return set([rp.split()[0] for rp in raw_packages if len(rp.split()) == 3])
def main():
src_packages = packages_list(args.src_image)
dest_packages = packages_list(args.dest_image)
to_be_added = src_packages - dest_packages
for p in to_be_added:
info = get_package_info(p)
print(f'"{p}", "{info["version"]}",')
if __name__ == '__main__':
main()
|
Add script to diff installed R packages
Used to check if all the R packages in the current
R hub image are present in the new R hub image.#!/usr/bin/env python3
"""
Lists R packages in one docker image but not the other
"""
import docker
import argparse
import json
from urllib.request import urlopen
from urllib.error import HTTPError
argparser = argparse.ArgumentParser()
argparser.add_argument(
'src_image',
)
argparser.add_argument(
'dest_image',
)
args = argparser.parse_args()
client = docker.from_env()
def get_package_info(package_name):
"""
Return package data for package_name in CRAN repo
"""
url = f'https://packagemanager.rstudio.com/__api__/repos/1/packages/{package_name}'
try:
with urlopen(url) as resp:
data = json.load(resp)
except HTTPError as e:
# Provide an informative exception if we have a typo in package name
if e.code == 404:
# Package doesn't exist
print(f'Package "{package_name}" not found in package manager')
return { "name": package_name, "version": None }
else:
raise
return data
def packages_list(image_name):
raw_packages = client.containers.run(
image_name,
'R --quiet -e "installed.packages()[,c(1, 3)]"'
).decode().split('\n')[2:]
return set([rp.split()[0] for rp in raw_packages if len(rp.split()) == 3])
def main():
src_packages = packages_list(args.src_image)
dest_packages = packages_list(args.dest_image)
to_be_added = src_packages - dest_packages
for p in to_be_added:
info = get_package_info(p)
print(f'"{p}", "{info["version"]}",')
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add script to diff installed R packages
Used to check if all the R packages in the current
R hub image are present in the new R hub image.<commit_after>#!/usr/bin/env python3
"""
Lists R packages in one docker image but not the other
"""
import docker
import argparse
import json
from urllib.request import urlopen
from urllib.error import HTTPError
argparser = argparse.ArgumentParser()
argparser.add_argument(
'src_image',
)
argparser.add_argument(
'dest_image',
)
args = argparser.parse_args()
client = docker.from_env()
def get_package_info(package_name):
"""
Return package data for package_name in CRAN repo
"""
url = f'https://packagemanager.rstudio.com/__api__/repos/1/packages/{package_name}'
try:
with urlopen(url) as resp:
data = json.load(resp)
except HTTPError as e:
# Provide an informative exception if we have a typo in package name
if e.code == 404:
# Package doesn't exist
print(f'Package "{package_name}" not found in package manager')
return { "name": package_name, "version": None }
else:
raise
return data
def packages_list(image_name):
raw_packages = client.containers.run(
image_name,
'R --quiet -e "installed.packages()[,c(1, 3)]"'
).decode().split('\n')[2:]
return set([rp.split()[0] for rp in raw_packages if len(rp.split()) == 3])
def main():
src_packages = packages_list(args.src_image)
dest_packages = packages_list(args.dest_image)
to_be_added = src_packages - dest_packages
for p in to_be_added:
info = get_package_info(p)
print(f'"{p}", "{info["version"]}",')
if __name__ == '__main__':
main()
|
|
8073f768c37129860def05797e6152de01e44267
|
scripts/remove-all-annotations.py
|
scripts/remove-all-annotations.py
|
#!/usr/bin/python
# This is a small helper script to remove all annotations from a
# project.
# You may need to install psycopg2, e.g. with:
# sudo apt-get install python-psycopg2
import sys
import psycopg2
import os
from common import db_connection, conf
if len(sys.argv) != 1:
print >> sys.stderr, "Usage:", sys.argv[0]
sys.exit(1)
c = db_connection.cursor()
print """Warning: this script removes all annotations from all projects
in the database '%s'""" % (conf['database'],)
print "To continue, type 'Yes' followed by Enter."
reply = raw_input()
if reply != 'Yes':
sys.exit(2)
tables_to_truncate = (
'treenode_class_instance',
'connector_class_instance',
'treenode_connector',
'class_instance_class_instance',
'class_instance',
'treenode',
'connector'
)
c.execute('TRUNCATE '+', '.join(tables_to_truncate))
db_connection.commit()
c.close()
db_connection.close()
|
Add a brutal (but fast) script for clearing *all* annotations from all projects
|
Add a brutal (but fast) script for clearing *all* annotations from all projects
|
Python
|
agpl-3.0
|
htem/CATMAID,htem/CATMAID,fzadow/CATMAID,fzadow/CATMAID,htem/CATMAID,fzadow/CATMAID,fzadow/CATMAID,htem/CATMAID
|
Add a brutal (but fast) script for clearing *all* annotations from all projects
|
#!/usr/bin/python
# This is a small helper script to remove all annotations from a
# project.
# You may need to install psycopg2, e.g. with:
# sudo apt-get install python-psycopg2
import sys
import psycopg2
import os
from common import db_connection, conf
if len(sys.argv) != 1:
print >> sys.stderr, "Usage:", sys.argv[0]
sys.exit(1)
c = db_connection.cursor()
print """Warning: this script removes all annotations from all projects
in the database '%s'""" % (conf['database'],)
print "To continue, type 'Yes' followed by Enter."
reply = raw_input()
if reply != 'Yes':
sys.exit(2)
tables_to_truncate = (
'treenode_class_instance',
'connector_class_instance',
'treenode_connector',
'class_instance_class_instance',
'class_instance',
'treenode',
'connector'
)
c.execute('TRUNCATE '+', '.join(tables_to_truncate))
db_connection.commit()
c.close()
db_connection.close()
|
<commit_before><commit_msg>Add a brutal (but fast) script for clearing *all* annotations from all projects<commit_after>
|
#!/usr/bin/python
# This is a small helper script to remove all annotations from a
# project.
# You may need to install psycopg2, e.g. with:
# sudo apt-get install python-psycopg2
import sys
import psycopg2
import os
from common import db_connection, conf
if len(sys.argv) != 1:
print >> sys.stderr, "Usage:", sys.argv[0]
sys.exit(1)
c = db_connection.cursor()
print """Warning: this script removes all annotations from all projects
in the database '%s'""" % (conf['database'],)
print "To continue, type 'Yes' followed by Enter."
reply = raw_input()
if reply != 'Yes':
sys.exit(2)
tables_to_truncate = (
'treenode_class_instance',
'connector_class_instance',
'treenode_connector',
'class_instance_class_instance',
'class_instance',
'treenode',
'connector'
)
c.execute('TRUNCATE '+', '.join(tables_to_truncate))
db_connection.commit()
c.close()
db_connection.close()
|
Add a brutal (but fast) script for clearing *all* annotations from all projects#!/usr/bin/python
# This is a small helper script to remove all annotations from a
# project.
# You may need to install psycopg2, e.g. with:
# sudo apt-get install python-psycopg2
import sys
import psycopg2
import os
from common import db_connection, conf
if len(sys.argv) != 1:
print >> sys.stderr, "Usage:", sys.argv[0]
sys.exit(1)
c = db_connection.cursor()
print """Warning: this script removes all annotations from all projects
in the database '%s'""" % (conf['database'],)
print "To continue, type 'Yes' followed by Enter."
reply = raw_input()
if reply != 'Yes':
sys.exit(2)
tables_to_truncate = (
'treenode_class_instance',
'connector_class_instance',
'treenode_connector',
'class_instance_class_instance',
'class_instance',
'treenode',
'connector'
)
c.execute('TRUNCATE '+', '.join(tables_to_truncate))
db_connection.commit()
c.close()
db_connection.close()
|
<commit_before><commit_msg>Add a brutal (but fast) script for clearing *all* annotations from all projects<commit_after>#!/usr/bin/python
# This is a small helper script to remove all annotations from a
# project.
# You may need to install psycopg2, e.g. with:
# sudo apt-get install python-psycopg2
import sys
import psycopg2
import os
from common import db_connection, conf
if len(sys.argv) != 1:
print >> sys.stderr, "Usage:", sys.argv[0]
sys.exit(1)
c = db_connection.cursor()
print """Warning: this script removes all annotations from all projects
in the database '%s'""" % (conf['database'],)
print "To continue, type 'Yes' followed by Enter."
reply = raw_input()
if reply != 'Yes':
sys.exit(2)
tables_to_truncate = (
'treenode_class_instance',
'connector_class_instance',
'treenode_connector',
'class_instance_class_instance',
'class_instance',
'treenode',
'connector'
)
c.execute('TRUNCATE '+', '.join(tables_to_truncate))
db_connection.commit()
c.close()
db_connection.close()
|
|
3f543f9e3a328441ae477ca3fb299fbc86ffc40f
|
oneflow/base/tasks.py
|
oneflow/base/tasks.py
|
# -*- coding: utf-8 -*-
import logging
#import datetime
from celery import task
from django.contrib.auth import get_user_model
LOGGER = logging.getLogger(__name__)
User = get_user_model()
#ftstamp = datetime.datetime.fromtimestamp
#now = datetime.datetime.now
@task
def celery_beat_test():
LOGGER.info(u'testing celery beat scheduler…')
@task
def refresh_access_tokens():
""" Refresh all access_tokens in turn to avoid hitting
http://dev.1flow.net/webapps/1flow/group/664/
"""
users = User.objects.all()
#count = users.count()
#sleep_time = 1500 / count
for user in users:
# See http://django-social-auth.readthedocs.org/en/latest/use_cases.html#token-refreshing # NOQA
#LOGGER.warning(u'Refreshing invalid access_token for user %s.',
# user.username)
social_accounts = user.social_auth.filter(provider='google-oauth2')
if social_accounts.count() == 0:
continue
for social in social_accounts:
try:
social.refresh_token()
except:
LOGGER.error(u'Access token could not be refreshed for user '
u'%s, forcing re-authentication at next login.')
# TODO: force re-auth of user at next visit.
|
# -*- coding: utf-8 -*-
import logging
#import datetime
from celery import task
from django.contrib.auth import get_user_model
LOGGER = logging.getLogger(__name__)
User = get_user_model()
#ftstamp = datetime.datetime.fromtimestamp
#now = datetime.datetime.now
@task
def celery_beat_test():
LOGGER.info(u'testing celery beat scheduler…')
@task
def refresh_access_tokens():
""" Refresh all access_tokens in turn to avoid hitting
http://dev.1flow.net/webapps/1flow/group/664/
"""
users = User.objects.all()
#count = users.count()
#sleep_time = 1500 / count
for user in users:
# See http://django-social-auth.readthedocs.org/en/latest/use_cases.html#token-refreshing # NOQA
#LOGGER.warning(u'Refreshing invalid access_token for user %s.',
# user.username)
social_accounts = user.social_auth.filter(provider='google-oauth2')
if social_accounts.count() == 0:
continue
for social in social_accounts:
try:
social.refresh_token()
except:
LOGGER.error(u'Access token could not be refreshed for user '
u'%s, forcing re-authentication at next login.',
user.username)
social.delete()
|
Fix for missing username and clear the social_auth to force re-authentication at next login.
|
Fix for missing username and clear the social_auth to force re-authentication at next login.
|
Python
|
agpl-3.0
|
WillianPaiva/1flow,WillianPaiva/1flow,WillianPaiva/1flow,1flow/1flow,1flow/1flow,1flow/1flow,WillianPaiva/1flow,1flow/1flow,WillianPaiva/1flow,1flow/1flow
|
# -*- coding: utf-8 -*-
import logging
#import datetime
from celery import task
from django.contrib.auth import get_user_model
LOGGER = logging.getLogger(__name__)
User = get_user_model()
#ftstamp = datetime.datetime.fromtimestamp
#now = datetime.datetime.now
@task
def celery_beat_test():
LOGGER.info(u'testing celery beat scheduler…')
@task
def refresh_access_tokens():
""" Refresh all access_tokens in turn to avoid hitting
http://dev.1flow.net/webapps/1flow/group/664/
"""
users = User.objects.all()
#count = users.count()
#sleep_time = 1500 / count
for user in users:
# See http://django-social-auth.readthedocs.org/en/latest/use_cases.html#token-refreshing # NOQA
#LOGGER.warning(u'Refreshing invalid access_token for user %s.',
# user.username)
social_accounts = user.social_auth.filter(provider='google-oauth2')
if social_accounts.count() == 0:
continue
for social in social_accounts:
try:
social.refresh_token()
except:
LOGGER.error(u'Access token could not be refreshed for user '
u'%s, forcing re-authentication at next login.')
# TODO: force re-auth of user at next visit.
Fix for missing username and clear the social_auth to force re-authentication at next login.
|
# -*- coding: utf-8 -*-
import logging
#import datetime
from celery import task
from django.contrib.auth import get_user_model
LOGGER = logging.getLogger(__name__)
User = get_user_model()
#ftstamp = datetime.datetime.fromtimestamp
#now = datetime.datetime.now
@task
def celery_beat_test():
LOGGER.info(u'testing celery beat scheduler…')
@task
def refresh_access_tokens():
""" Refresh all access_tokens in turn to avoid hitting
http://dev.1flow.net/webapps/1flow/group/664/
"""
users = User.objects.all()
#count = users.count()
#sleep_time = 1500 / count
for user in users:
# See http://django-social-auth.readthedocs.org/en/latest/use_cases.html#token-refreshing # NOQA
#LOGGER.warning(u'Refreshing invalid access_token for user %s.',
# user.username)
social_accounts = user.social_auth.filter(provider='google-oauth2')
if social_accounts.count() == 0:
continue
for social in social_accounts:
try:
social.refresh_token()
except:
LOGGER.error(u'Access token could not be refreshed for user '
u'%s, forcing re-authentication at next login.',
user.username)
social.delete()
|
<commit_before># -*- coding: utf-8 -*-
import logging
#import datetime
from celery import task
from django.contrib.auth import get_user_model
LOGGER = logging.getLogger(__name__)
User = get_user_model()
#ftstamp = datetime.datetime.fromtimestamp
#now = datetime.datetime.now
@task
def celery_beat_test():
LOGGER.info(u'testing celery beat scheduler…')
@task
def refresh_access_tokens():
""" Refresh all access_tokens in turn to avoid hitting
http://dev.1flow.net/webapps/1flow/group/664/
"""
users = User.objects.all()
#count = users.count()
#sleep_time = 1500 / count
for user in users:
# See http://django-social-auth.readthedocs.org/en/latest/use_cases.html#token-refreshing # NOQA
#LOGGER.warning(u'Refreshing invalid access_token for user %s.',
# user.username)
social_accounts = user.social_auth.filter(provider='google-oauth2')
if social_accounts.count() == 0:
continue
for social in social_accounts:
try:
social.refresh_token()
except:
LOGGER.error(u'Access token could not be refreshed for user '
u'%s, forcing re-authentication at next login.')
# TODO: force re-auth of user at next visit.
<commit_msg>Fix for missing username and clear the social_auth to force re-authentication at next login.<commit_after>
|
# -*- coding: utf-8 -*-
import logging
#import datetime
from celery import task
from django.contrib.auth import get_user_model
LOGGER = logging.getLogger(__name__)
User = get_user_model()
#ftstamp = datetime.datetime.fromtimestamp
#now = datetime.datetime.now
@task
def celery_beat_test():
LOGGER.info(u'testing celery beat scheduler…')
@task
def refresh_access_tokens():
""" Refresh all access_tokens in turn to avoid hitting
http://dev.1flow.net/webapps/1flow/group/664/
"""
users = User.objects.all()
#count = users.count()
#sleep_time = 1500 / count
for user in users:
# See http://django-social-auth.readthedocs.org/en/latest/use_cases.html#token-refreshing # NOQA
#LOGGER.warning(u'Refreshing invalid access_token for user %s.',
# user.username)
social_accounts = user.social_auth.filter(provider='google-oauth2')
if social_accounts.count() == 0:
continue
for social in social_accounts:
try:
social.refresh_token()
except:
LOGGER.error(u'Access token could not be refreshed for user '
u'%s, forcing re-authentication at next login.',
user.username)
social.delete()
|
# -*- coding: utf-8 -*-
import logging
#import datetime
from celery import task
from django.contrib.auth import get_user_model
LOGGER = logging.getLogger(__name__)
User = get_user_model()
#ftstamp = datetime.datetime.fromtimestamp
#now = datetime.datetime.now
@task
def celery_beat_test():
LOGGER.info(u'testing celery beat scheduler…')
@task
def refresh_access_tokens():
""" Refresh all access_tokens in turn to avoid hitting
http://dev.1flow.net/webapps/1flow/group/664/
"""
users = User.objects.all()
#count = users.count()
#sleep_time = 1500 / count
for user in users:
# See http://django-social-auth.readthedocs.org/en/latest/use_cases.html#token-refreshing # NOQA
#LOGGER.warning(u'Refreshing invalid access_token for user %s.',
# user.username)
social_accounts = user.social_auth.filter(provider='google-oauth2')
if social_accounts.count() == 0:
continue
for social in social_accounts:
try:
social.refresh_token()
except:
LOGGER.error(u'Access token could not be refreshed for user '
u'%s, forcing re-authentication at next login.')
# TODO: force re-auth of user at next visit.
Fix for missing username and clear the social_auth to force re-authentication at next login.# -*- coding: utf-8 -*-
import logging
#import datetime
from celery import task
from django.contrib.auth import get_user_model
LOGGER = logging.getLogger(__name__)
User = get_user_model()
#ftstamp = datetime.datetime.fromtimestamp
#now = datetime.datetime.now
@task
def celery_beat_test():
LOGGER.info(u'testing celery beat scheduler…')
@task
def refresh_access_tokens():
""" Refresh all access_tokens in turn to avoid hitting
http://dev.1flow.net/webapps/1flow/group/664/
"""
users = User.objects.all()
#count = users.count()
#sleep_time = 1500 / count
for user in users:
# See http://django-social-auth.readthedocs.org/en/latest/use_cases.html#token-refreshing # NOQA
#LOGGER.warning(u'Refreshing invalid access_token for user %s.',
# user.username)
social_accounts = user.social_auth.filter(provider='google-oauth2')
if social_accounts.count() == 0:
continue
for social in social_accounts:
try:
social.refresh_token()
except:
LOGGER.error(u'Access token could not be refreshed for user '
u'%s, forcing re-authentication at next login.',
user.username)
social.delete()
|
<commit_before># -*- coding: utf-8 -*-
import logging
#import datetime
from celery import task
from django.contrib.auth import get_user_model
LOGGER = logging.getLogger(__name__)
User = get_user_model()
#ftstamp = datetime.datetime.fromtimestamp
#now = datetime.datetime.now
@task
def celery_beat_test():
LOGGER.info(u'testing celery beat scheduler…')
@task
def refresh_access_tokens():
""" Refresh all access_tokens in turn to avoid hitting
http://dev.1flow.net/webapps/1flow/group/664/
"""
users = User.objects.all()
#count = users.count()
#sleep_time = 1500 / count
for user in users:
# See http://django-social-auth.readthedocs.org/en/latest/use_cases.html#token-refreshing # NOQA
#LOGGER.warning(u'Refreshing invalid access_token for user %s.',
# user.username)
social_accounts = user.social_auth.filter(provider='google-oauth2')
if social_accounts.count() == 0:
continue
for social in social_accounts:
try:
social.refresh_token()
except:
LOGGER.error(u'Access token could not be refreshed for user '
u'%s, forcing re-authentication at next login.')
# TODO: force re-auth of user at next visit.
<commit_msg>Fix for missing username and clear the social_auth to force re-authentication at next login.<commit_after># -*- coding: utf-8 -*-
import logging
#import datetime
from celery import task
from django.contrib.auth import get_user_model
LOGGER = logging.getLogger(__name__)
User = get_user_model()
#ftstamp = datetime.datetime.fromtimestamp
#now = datetime.datetime.now
@task
def celery_beat_test():
LOGGER.info(u'testing celery beat scheduler…')
@task
def refresh_access_tokens():
""" Refresh all access_tokens in turn to avoid hitting
http://dev.1flow.net/webapps/1flow/group/664/
"""
users = User.objects.all()
#count = users.count()
#sleep_time = 1500 / count
for user in users:
# See http://django-social-auth.readthedocs.org/en/latest/use_cases.html#token-refreshing # NOQA
#LOGGER.warning(u'Refreshing invalid access_token for user %s.',
# user.username)
social_accounts = user.social_auth.filter(provider='google-oauth2')
if social_accounts.count() == 0:
continue
for social in social_accounts:
try:
social.refresh_token()
except:
LOGGER.error(u'Access token could not be refreshed for user '
u'%s, forcing re-authentication at next login.',
user.username)
social.delete()
|
979101349fc35efaba5f23ecf2e8a09af0f52a7b
|
runtests.py
|
runtests.py
|
# runtests for django reusable apps
# see http://stackoverflow.com/questions/3841725/how-to-launch-tests-for-django-reusable-app
import glob
import os
import sys
import django
from django.conf import settings
from django.core.management import execute_from_command_line
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
sys.path.append(os.path.abspath(os.path.join(BASE_DIR, '..')))
# Unfortunately, apps can not be installed via ``modify_settings``
# decorator, because it would miss the database setup.
CUSTOM_INSTALLED_APPS = (
'dynamodb2_sessions',
)
ALWAYS_INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
)
ALWAYS_MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
settings.configure(
SECRET_KEY="django_tests_secret_key",
DEBUG=False,
TEMPLATE_DEBUG=False,
ALLOWED_HOSTS=[],
INSTALLED_APPS=ALWAYS_INSTALLED_APPS + CUSTOM_INSTALLED_APPS,
MIDDLEWARE_CLASSES=ALWAYS_MIDDLEWARE_CLASSES,
ROOT_URLCONF='tests.urls',
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
LANGUAGE_CODE='en-us',
TIME_ZONE='UTC',
USE_I18N=True,
USE_L10N=True,
USE_TZ=True,
STATIC_URL='/static/',
# Use a fast hasher to speed up tests.
PASSWORD_HASHERS=(
'django.contrib.auth.hashers.MD5PasswordHasher',
),
FIXTURE_DIRS=glob.glob(BASE_DIR + '/' + '*/fixtures/'),
# secret settings
AWS_REGION_NAME='',
AWS_ACCESS_KEY_ID='',
AWS_SECRET_ACCESS_KEY='',
DYNAMODB_SESSIONS_TABLE_NAME = '',
)
django.setup()
args = [sys.argv[0], 'test']
# Current module (``tests``) and its submodules.
test_cases = '.'
# Allow accessing test options from the command line.
offset = 1
try:
sys.argv[1]
except IndexError:
pass
else:
option = sys.argv[1].startswith('-')
if not option:
test_cases = sys.argv[1]
offset = 2
args.append(test_cases)
# ``verbosity`` can be overwritten from command line.
args.append('--verbosity=2')
args.extend(sys.argv[offset:])
execute_from_command_line(args)
|
Add settings file for launching tests
|
Add settings file for launching tests
|
Python
|
bsd-3-clause
|
amrael/django-dynamodb2-sessions
|
Add settings file for launching tests
|
# runtests for django reusable apps
# see http://stackoverflow.com/questions/3841725/how-to-launch-tests-for-django-reusable-app
import glob
import os
import sys
import django
from django.conf import settings
from django.core.management import execute_from_command_line
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
sys.path.append(os.path.abspath(os.path.join(BASE_DIR, '..')))
# Unfortunately, apps can not be installed via ``modify_settings``
# decorator, because it would miss the database setup.
CUSTOM_INSTALLED_APPS = (
'dynamodb2_sessions',
)
ALWAYS_INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
)
ALWAYS_MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
settings.configure(
SECRET_KEY="django_tests_secret_key",
DEBUG=False,
TEMPLATE_DEBUG=False,
ALLOWED_HOSTS=[],
INSTALLED_APPS=ALWAYS_INSTALLED_APPS + CUSTOM_INSTALLED_APPS,
MIDDLEWARE_CLASSES=ALWAYS_MIDDLEWARE_CLASSES,
ROOT_URLCONF='tests.urls',
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
LANGUAGE_CODE='en-us',
TIME_ZONE='UTC',
USE_I18N=True,
USE_L10N=True,
USE_TZ=True,
STATIC_URL='/static/',
# Use a fast hasher to speed up tests.
PASSWORD_HASHERS=(
'django.contrib.auth.hashers.MD5PasswordHasher',
),
FIXTURE_DIRS=glob.glob(BASE_DIR + '/' + '*/fixtures/'),
# secret settings
AWS_REGION_NAME='',
AWS_ACCESS_KEY_ID='',
AWS_SECRET_ACCESS_KEY='',
DYNAMODB_SESSIONS_TABLE_NAME = '',
)
django.setup()
args = [sys.argv[0], 'test']
# Current module (``tests``) and its submodules.
test_cases = '.'
# Allow accessing test options from the command line.
offset = 1
try:
sys.argv[1]
except IndexError:
pass
else:
option = sys.argv[1].startswith('-')
if not option:
test_cases = sys.argv[1]
offset = 2
args.append(test_cases)
# ``verbosity`` can be overwritten from command line.
args.append('--verbosity=2')
args.extend(sys.argv[offset:])
execute_from_command_line(args)
|
<commit_before><commit_msg>Add settings file for launching tests<commit_after>
|
# runtests for django reusable apps
# see http://stackoverflow.com/questions/3841725/how-to-launch-tests-for-django-reusable-app
import glob
import os
import sys
import django
from django.conf import settings
from django.core.management import execute_from_command_line
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
sys.path.append(os.path.abspath(os.path.join(BASE_DIR, '..')))
# Unfortunately, apps can not be installed via ``modify_settings``
# decorator, because it would miss the database setup.
CUSTOM_INSTALLED_APPS = (
'dynamodb2_sessions',
)
ALWAYS_INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
)
ALWAYS_MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
settings.configure(
SECRET_KEY="django_tests_secret_key",
DEBUG=False,
TEMPLATE_DEBUG=False,
ALLOWED_HOSTS=[],
INSTALLED_APPS=ALWAYS_INSTALLED_APPS + CUSTOM_INSTALLED_APPS,
MIDDLEWARE_CLASSES=ALWAYS_MIDDLEWARE_CLASSES,
ROOT_URLCONF='tests.urls',
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
LANGUAGE_CODE='en-us',
TIME_ZONE='UTC',
USE_I18N=True,
USE_L10N=True,
USE_TZ=True,
STATIC_URL='/static/',
# Use a fast hasher to speed up tests.
PASSWORD_HASHERS=(
'django.contrib.auth.hashers.MD5PasswordHasher',
),
FIXTURE_DIRS=glob.glob(BASE_DIR + '/' + '*/fixtures/'),
# secret settings
AWS_REGION_NAME='',
AWS_ACCESS_KEY_ID='',
AWS_SECRET_ACCESS_KEY='',
DYNAMODB_SESSIONS_TABLE_NAME = '',
)
django.setup()
args = [sys.argv[0], 'test']
# Current module (``tests``) and its submodules.
test_cases = '.'
# Allow accessing test options from the command line.
offset = 1
try:
sys.argv[1]
except IndexError:
pass
else:
option = sys.argv[1].startswith('-')
if not option:
test_cases = sys.argv[1]
offset = 2
args.append(test_cases)
# ``verbosity`` can be overwritten from command line.
args.append('--verbosity=2')
args.extend(sys.argv[offset:])
execute_from_command_line(args)
|
Add settings file for launching tests# runtests for django reusable apps
# see http://stackoverflow.com/questions/3841725/how-to-launch-tests-for-django-reusable-app
import glob
import os
import sys
import django
from django.conf import settings
from django.core.management import execute_from_command_line
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
sys.path.append(os.path.abspath(os.path.join(BASE_DIR, '..')))
# Unfortunately, apps can not be installed via ``modify_settings``
# decorator, because it would miss the database setup.
CUSTOM_INSTALLED_APPS = (
'dynamodb2_sessions',
)
ALWAYS_INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
)
ALWAYS_MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
settings.configure(
SECRET_KEY="django_tests_secret_key",
DEBUG=False,
TEMPLATE_DEBUG=False,
ALLOWED_HOSTS=[],
INSTALLED_APPS=ALWAYS_INSTALLED_APPS + CUSTOM_INSTALLED_APPS,
MIDDLEWARE_CLASSES=ALWAYS_MIDDLEWARE_CLASSES,
ROOT_URLCONF='tests.urls',
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
LANGUAGE_CODE='en-us',
TIME_ZONE='UTC',
USE_I18N=True,
USE_L10N=True,
USE_TZ=True,
STATIC_URL='/static/',
# Use a fast hasher to speed up tests.
PASSWORD_HASHERS=(
'django.contrib.auth.hashers.MD5PasswordHasher',
),
FIXTURE_DIRS=glob.glob(BASE_DIR + '/' + '*/fixtures/'),
# secret settings
AWS_REGION_NAME='',
AWS_ACCESS_KEY_ID='',
AWS_SECRET_ACCESS_KEY='',
DYNAMODB_SESSIONS_TABLE_NAME = '',
)
django.setup()
args = [sys.argv[0], 'test']
# Current module (``tests``) and its submodules.
test_cases = '.'
# Allow accessing test options from the command line.
offset = 1
try:
sys.argv[1]
except IndexError:
pass
else:
option = sys.argv[1].startswith('-')
if not option:
test_cases = sys.argv[1]
offset = 2
args.append(test_cases)
# ``verbosity`` can be overwritten from command line.
args.append('--verbosity=2')
args.extend(sys.argv[offset:])
execute_from_command_line(args)
|
<commit_before><commit_msg>Add settings file for launching tests<commit_after># runtests for django reusable apps
# see http://stackoverflow.com/questions/3841725/how-to-launch-tests-for-django-reusable-app
import glob
import os
import sys
import django
from django.conf import settings
from django.core.management import execute_from_command_line
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
sys.path.append(os.path.abspath(os.path.join(BASE_DIR, '..')))
# Unfortunately, apps can not be installed via ``modify_settings``
# decorator, because it would miss the database setup.
CUSTOM_INSTALLED_APPS = (
'dynamodb2_sessions',
)
ALWAYS_INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
)
ALWAYS_MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
settings.configure(
SECRET_KEY="django_tests_secret_key",
DEBUG=False,
TEMPLATE_DEBUG=False,
ALLOWED_HOSTS=[],
INSTALLED_APPS=ALWAYS_INSTALLED_APPS + CUSTOM_INSTALLED_APPS,
MIDDLEWARE_CLASSES=ALWAYS_MIDDLEWARE_CLASSES,
ROOT_URLCONF='tests.urls',
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
LANGUAGE_CODE='en-us',
TIME_ZONE='UTC',
USE_I18N=True,
USE_L10N=True,
USE_TZ=True,
STATIC_URL='/static/',
# Use a fast hasher to speed up tests.
PASSWORD_HASHERS=(
'django.contrib.auth.hashers.MD5PasswordHasher',
),
FIXTURE_DIRS=glob.glob(BASE_DIR + '/' + '*/fixtures/'),
# secret settings
AWS_REGION_NAME='',
AWS_ACCESS_KEY_ID='',
AWS_SECRET_ACCESS_KEY='',
DYNAMODB_SESSIONS_TABLE_NAME = '',
)
django.setup()
args = [sys.argv[0], 'test']
# Current module (``tests``) and its submodules.
test_cases = '.'
# Allow accessing test options from the command line.
offset = 1
try:
sys.argv[1]
except IndexError:
pass
else:
option = sys.argv[1].startswith('-')
if not option:
test_cases = sys.argv[1]
offset = 2
args.append(test_cases)
# ``verbosity`` can be overwritten from command line.
args.append('--verbosity=2')
args.extend(sys.argv[offset:])
execute_from_command_line(args)
|
|
ee621978cf6a218f71046afb0c1afeb9dd738677
|
doc/book/book-dist.py
|
doc/book/book-dist.py
|
#!/usr/bin/env python2
import sys
import os
import shutil
def die(msg):
sys.stderr.write('ERROR: ' + msg)
sys.exit(1)
cwd = os.getcwd()
if not os.path.exists('book') \
or not os.path.exists('Makefile'):
die('Please run this from the Subversion book source directory\n')
if not os.getenv('JAVA_HOME'):
die('JAVA_HOME is not set correctly.\n')
if os.path.exists('./usr'):
die('Please remove ./usr.\n')
os.putenv('FOP_OPTS', '-Xms100m -Xmx200m')
os.system('DESTDIR=. make book-clean install-book-html ' + \
'install-book-html-chunk install-book-pdf')
tarball = os.path.join(cwd, 'svnbook.tar.gz')
try:
os.chdir('./usr/share/doc/subversion')
os.rename('book', 'svnbook')
os.system('tar cvfz ' + tarball + ' svnbook')
finally:
os.chdir(cwd)
shutil.rmtree('./usr')
if not os.path.exists(tarball):
die('Hrm. It appears the tarball was not created.\n')
print 'Your tarball sits in ./svnbook.tar.gz. Enjoy!'
|
Add a little package-em-up script for the Subversion book.
|
Add a little package-em-up script for the Subversion book.
|
Python
|
apache-2.0
|
jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion
|
Add a little package-em-up script for the Subversion book.
|
#!/usr/bin/env python2
import sys
import os
import shutil
def die(msg):
sys.stderr.write('ERROR: ' + msg)
sys.exit(1)
cwd = os.getcwd()
if not os.path.exists('book') \
or not os.path.exists('Makefile'):
die('Please run this from the Subversion book source directory\n')
if not os.getenv('JAVA_HOME'):
die('JAVA_HOME is not set correctly.\n')
if os.path.exists('./usr'):
die('Please remove ./usr.\n')
os.putenv('FOP_OPTS', '-Xms100m -Xmx200m')
os.system('DESTDIR=. make book-clean install-book-html ' + \
'install-book-html-chunk install-book-pdf')
tarball = os.path.join(cwd, 'svnbook.tar.gz')
try:
os.chdir('./usr/share/doc/subversion')
os.rename('book', 'svnbook')
os.system('tar cvfz ' + tarball + ' svnbook')
finally:
os.chdir(cwd)
shutil.rmtree('./usr')
if not os.path.exists(tarball):
die('Hrm. It appears the tarball was not created.\n')
print 'Your tarball sits in ./svnbook.tar.gz. Enjoy!'
|
<commit_before><commit_msg>Add a little package-em-up script for the Subversion book.<commit_after>
|
#!/usr/bin/env python2
import sys
import os
import shutil
def die(msg):
sys.stderr.write('ERROR: ' + msg)
sys.exit(1)
cwd = os.getcwd()
if not os.path.exists('book') \
or not os.path.exists('Makefile'):
die('Please run this from the Subversion book source directory\n')
if not os.getenv('JAVA_HOME'):
die('JAVA_HOME is not set correctly.\n')
if os.path.exists('./usr'):
die('Please remove ./usr.\n')
os.putenv('FOP_OPTS', '-Xms100m -Xmx200m')
os.system('DESTDIR=. make book-clean install-book-html ' + \
'install-book-html-chunk install-book-pdf')
tarball = os.path.join(cwd, 'svnbook.tar.gz')
try:
os.chdir('./usr/share/doc/subversion')
os.rename('book', 'svnbook')
os.system('tar cvfz ' + tarball + ' svnbook')
finally:
os.chdir(cwd)
shutil.rmtree('./usr')
if not os.path.exists(tarball):
die('Hrm. It appears the tarball was not created.\n')
print 'Your tarball sits in ./svnbook.tar.gz. Enjoy!'
|
Add a little package-em-up script for the Subversion book.#!/usr/bin/env python2
import sys
import os
import shutil
def die(msg):
sys.stderr.write('ERROR: ' + msg)
sys.exit(1)
cwd = os.getcwd()
if not os.path.exists('book') \
or not os.path.exists('Makefile'):
die('Please run this from the Subversion book source directory\n')
if not os.getenv('JAVA_HOME'):
die('JAVA_HOME is not set correctly.\n')
if os.path.exists('./usr'):
die('Please remove ./usr.\n')
os.putenv('FOP_OPTS', '-Xms100m -Xmx200m')
os.system('DESTDIR=. make book-clean install-book-html ' + \
'install-book-html-chunk install-book-pdf')
tarball = os.path.join(cwd, 'svnbook.tar.gz')
try:
os.chdir('./usr/share/doc/subversion')
os.rename('book', 'svnbook')
os.system('tar cvfz ' + tarball + ' svnbook')
finally:
os.chdir(cwd)
shutil.rmtree('./usr')
if not os.path.exists(tarball):
die('Hrm. It appears the tarball was not created.\n')
print 'Your tarball sits in ./svnbook.tar.gz. Enjoy!'
|
<commit_before><commit_msg>Add a little package-em-up script for the Subversion book.<commit_after>#!/usr/bin/env python2
import sys
import os
import shutil
def die(msg):
sys.stderr.write('ERROR: ' + msg)
sys.exit(1)
cwd = os.getcwd()
if not os.path.exists('book') \
or not os.path.exists('Makefile'):
die('Please run this from the Subversion book source directory\n')
if not os.getenv('JAVA_HOME'):
die('JAVA_HOME is not set correctly.\n')
if os.path.exists('./usr'):
die('Please remove ./usr.\n')
os.putenv('FOP_OPTS', '-Xms100m -Xmx200m')
os.system('DESTDIR=. make book-clean install-book-html ' + \
'install-book-html-chunk install-book-pdf')
tarball = os.path.join(cwd, 'svnbook.tar.gz')
try:
os.chdir('./usr/share/doc/subversion')
os.rename('book', 'svnbook')
os.system('tar cvfz ' + tarball + ' svnbook')
finally:
os.chdir(cwd)
shutil.rmtree('./usr')
if not os.path.exists(tarball):
die('Hrm. It appears the tarball was not created.\n')
print 'Your tarball sits in ./svnbook.tar.gz. Enjoy!'
|
|
487c9fdd87c7435087d4b7260146d2f6f470ae7e
|
Main.py
|
Main.py
|
#!/usr/bin/python
#Geoff Spielman
#April 22, 2016
import sys
import time
import serial
ser = serial.Serial('/dev/ttyACM0', 9600)
#stings and ints for storing left and right motor power
mLint = 0
mRint = 0
mL = ''
mR = ''
#prepare for stop
Stp = '0,0!'
BAS=bytearray()
BAS.extend(map(ord, Stp))
def IsAnInt(recString):
try:
int(recString)
return True
except ValueError:
return False
print('Type "stop" at any time to stop car')
print('To maintain levels, don\'t enter integer. Valid input: 0<int<255')
while mL != 'stop' and mR != 'stop':
mL = input('Set left motor power: ')
if (mL.lower() != 'stop'):
mR = str(input('Set right motor power: '))
if (mL.lower() == 'stop' or mR.lower() == 'stop'): #immediately stop
ser.write(BAS)
sys.exit('Stopped')
#if user entered text (button mash), maintain motor levels
if IsAnInt(mL):
mLint = int(mL)
if mLint > 255:
mLint = 255
if mLint < 0:
mLint = 0
if IsAnInt(mR):
mRint = int(mR)
if mRint > 255:
mRint = 255
if mRint < 0:
mRint = 0
print('\nCurrent Values - Left Motor: %d Right Motor: %d' %(mLint, mRint))
output = str(mLint) + ',' + str(mRint) + '!' #this is how arduino finds values
BA = bytearray()
BA.extend(map(ord, output))
#print (BA)
ser.write(BA)
|
Move python code to Pi
|
Move python code to Pi
Not complete, just moving to Pi
|
Python
|
mit
|
GeoffSpielman/SimonSays,GeoffSpielman/SimonSays,GeoffSpielman/SimonSays,GeoffSpielman/SimonSays
|
Move python code to Pi
Not complete, just moving to Pi
|
#!/usr/bin/python
#Geoff Spielman
#April 22, 2016
import sys
import time
import serial
ser = serial.Serial('/dev/ttyACM0', 9600)
#stings and ints for storing left and right motor power
mLint = 0
mRint = 0
mL = ''
mR = ''
#prepare for stop
Stp = '0,0!'
BAS=bytearray()
BAS.extend(map(ord, Stp))
def IsAnInt(recString):
try:
int(recString)
return True
except ValueError:
return False
print('Type "stop" at any time to stop car')
print('To maintain levels, don\'t enter integer. Valid input: 0<int<255')
while mL != 'stop' and mR != 'stop':
mL = input('Set left motor power: ')
if (mL.lower() != 'stop'):
mR = str(input('Set right motor power: '))
if (mL.lower() == 'stop' or mR.lower() == 'stop'): #immediately stop
ser.write(BAS)
sys.exit('Stopped')
#if user entered text (button mash), maintain motor levels
if IsAnInt(mL):
mLint = int(mL)
if mLint > 255:
mLint = 255
if mLint < 0:
mLint = 0
if IsAnInt(mR):
mRint = int(mR)
if mRint > 255:
mRint = 255
if mRint < 0:
mRint = 0
print('\nCurrent Values - Left Motor: %d Right Motor: %d' %(mLint, mRint))
output = str(mLint) + ',' + str(mRint) + '!' #this is how arduino finds values
BA = bytearray()
BA.extend(map(ord, output))
#print (BA)
ser.write(BA)
|
<commit_before><commit_msg>Move python code to Pi
Not complete, just moving to Pi<commit_after>
|
#!/usr/bin/python
#Geoff Spielman
#April 22, 2016
import sys
import time
import serial
ser = serial.Serial('/dev/ttyACM0', 9600)
#stings and ints for storing left and right motor power
mLint = 0
mRint = 0
mL = ''
mR = ''
#prepare for stop
Stp = '0,0!'
BAS=bytearray()
BAS.extend(map(ord, Stp))
def IsAnInt(recString):
try:
int(recString)
return True
except ValueError:
return False
print('Type "stop" at any time to stop car')
print('To maintain levels, don\'t enter integer. Valid input: 0<int<255')
while mL != 'stop' and mR != 'stop':
mL = input('Set left motor power: ')
if (mL.lower() != 'stop'):
mR = str(input('Set right motor power: '))
if (mL.lower() == 'stop' or mR.lower() == 'stop'): #immediately stop
ser.write(BAS)
sys.exit('Stopped')
#if user entered text (button mash), maintain motor levels
if IsAnInt(mL):
mLint = int(mL)
if mLint > 255:
mLint = 255
if mLint < 0:
mLint = 0
if IsAnInt(mR):
mRint = int(mR)
if mRint > 255:
mRint = 255
if mRint < 0:
mRint = 0
print('\nCurrent Values - Left Motor: %d Right Motor: %d' %(mLint, mRint))
output = str(mLint) + ',' + str(mRint) + '!' #this is how arduino finds values
BA = bytearray()
BA.extend(map(ord, output))
#print (BA)
ser.write(BA)
|
Move python code to Pi
Not complete, just moving to Pi#!/usr/bin/python
#Geoff Spielman
#April 22, 2016
import sys
import time
import serial
ser = serial.Serial('/dev/ttyACM0', 9600)
#stings and ints for storing left and right motor power
mLint = 0
mRint = 0
mL = ''
mR = ''
#prepare for stop
Stp = '0,0!'
BAS=bytearray()
BAS.extend(map(ord, Stp))
def IsAnInt(recString):
try:
int(recString)
return True
except ValueError:
return False
print('Type "stop" at any time to stop car')
print('To maintain levels, don\'t enter integer. Valid input: 0<int<255')
while mL != 'stop' and mR != 'stop':
mL = input('Set left motor power: ')
if (mL.lower() != 'stop'):
mR = str(input('Set right motor power: '))
if (mL.lower() == 'stop' or mR.lower() == 'stop'): #immediately stop
ser.write(BAS)
sys.exit('Stopped')
#if user entered text (button mash), maintain motor levels
if IsAnInt(mL):
mLint = int(mL)
if mLint > 255:
mLint = 255
if mLint < 0:
mLint = 0
if IsAnInt(mR):
mRint = int(mR)
if mRint > 255:
mRint = 255
if mRint < 0:
mRint = 0
print('\nCurrent Values - Left Motor: %d Right Motor: %d' %(mLint, mRint))
output = str(mLint) + ',' + str(mRint) + '!' #this is how arduino finds values
BA = bytearray()
BA.extend(map(ord, output))
#print (BA)
ser.write(BA)
|
<commit_before><commit_msg>Move python code to Pi
Not complete, just moving to Pi<commit_after>#!/usr/bin/python
#Geoff Spielman
#April 22, 2016
import sys
import time
import serial
ser = serial.Serial('/dev/ttyACM0', 9600)
#stings and ints for storing left and right motor power
mLint = 0
mRint = 0
mL = ''
mR = ''
#prepare for stop
Stp = '0,0!'
BAS=bytearray()
BAS.extend(map(ord, Stp))
def IsAnInt(recString):
try:
int(recString)
return True
except ValueError:
return False
print('Type "stop" at any time to stop car')
print('To maintain levels, don\'t enter integer. Valid input: 0<int<255')
while mL != 'stop' and mR != 'stop':
mL = input('Set left motor power: ')
if (mL.lower() != 'stop'):
mR = str(input('Set right motor power: '))
if (mL.lower() == 'stop' or mR.lower() == 'stop'): #immediately stop
ser.write(BAS)
sys.exit('Stopped')
#if user entered text (button mash), maintain motor levels
if IsAnInt(mL):
mLint = int(mL)
if mLint > 255:
mLint = 255
if mLint < 0:
mLint = 0
if IsAnInt(mR):
mRint = int(mR)
if mRint > 255:
mRint = 255
if mRint < 0:
mRint = 0
print('\nCurrent Values - Left Motor: %d Right Motor: %d' %(mLint, mRint))
output = str(mLint) + ',' + str(mRint) + '!' #this is how arduino finds values
BA = bytearray()
BA.extend(map(ord, output))
#print (BA)
ser.write(BA)
|
|
89363fb720d259b60f9ec6d9872f59db1a28e14c
|
examples/Gauss_example.py
|
examples/Gauss_example.py
|
import sys
import time
import numpy as np
from abcpy.core import *
from abcpy.distributions import *
from distributed import Client
from dask.dot import dot_graph
from functools import partial
import matplotlib
import matplotlib.pyplot as plt
def normal_simu(n, mu, prng=None, latents=None):
if latents is None:
if prng is None:
prng = np.random.RandomState()
latents = prng.randn(n)
u = mu + latents
y = u
return y
def mean(y):
mu = np.mean(y, axis=1, keepdims=True)
return mu
def distance(x, y):
d = np.linalg.norm( np.array(x) - np.array(y), ord=2, axis=0)
return d
def main():
n = 1000
mu = 1.6
# Set up observed data y
latents = np.random.randn(n)
y = normal_simu(n, mu, latents=latents)
# Plot
plt.hist(y)
# Set up the simulator
simulator = partial(normal_simu, n)
# Specify the graphical model
mu = Prior('mu', 'uniform', 0, 4)
Y = Simulator('normal_simu', simulator, mu, observed=y)
S1 = Summary('S1', mean, Y)
d = Discrepancy('d', distance, S1)
# Specify the number of simulations
N = 1000000
# Time and run parallel
s = time.time()
dists = d.generate(N, batch_size=10000).compute()
print("Elapsed time %d sec" % (time.time() - s))
# Take the parameters
mu_sample = mu.generate(N).compute()
# Set threshold and reject to get posteriors
eps = 0.01
accepts = dists < eps
mu_post = mu_sample[accepts]
print("Number of accepted samples %d" % sum(accepts))
if len(mu_post) > 0:
print("Posterior for mu")
plt.hist(mu_post, bins=20)
else:
print("No accepted samples")
if __name__ == "__main__":
main()
|
Add script variant of the Gauss example
|
Add script variant of the Gauss example
|
Python
|
bsd-3-clause
|
lintusj1/elfi,elfi-dev/elfi,elfi-dev/elfi,HIIT/elfi,lintusj1/elfi
|
Add script variant of the Gauss example
|
import sys
import time
import numpy as np
from abcpy.core import *
from abcpy.distributions import *
from distributed import Client
from dask.dot import dot_graph
from functools import partial
import matplotlib
import matplotlib.pyplot as plt
def normal_simu(n, mu, prng=None, latents=None):
if latents is None:
if prng is None:
prng = np.random.RandomState()
latents = prng.randn(n)
u = mu + latents
y = u
return y
def mean(y):
mu = np.mean(y, axis=1, keepdims=True)
return mu
def distance(x, y):
d = np.linalg.norm( np.array(x) - np.array(y), ord=2, axis=0)
return d
def main():
n = 1000
mu = 1.6
# Set up observed data y
latents = np.random.randn(n)
y = normal_simu(n, mu, latents=latents)
# Plot
plt.hist(y)
# Set up the simulator
simulator = partial(normal_simu, n)
# Specify the graphical model
mu = Prior('mu', 'uniform', 0, 4)
Y = Simulator('normal_simu', simulator, mu, observed=y)
S1 = Summary('S1', mean, Y)
d = Discrepancy('d', distance, S1)
# Specify the number of simulations
N = 1000000
# Time and run parallel
s = time.time()
dists = d.generate(N, batch_size=10000).compute()
print("Elapsed time %d sec" % (time.time() - s))
# Take the parameters
mu_sample = mu.generate(N).compute()
# Set threshold and reject to get posteriors
eps = 0.01
accepts = dists < eps
mu_post = mu_sample[accepts]
print("Number of accepted samples %d" % sum(accepts))
if len(mu_post) > 0:
print("Posterior for mu")
plt.hist(mu_post, bins=20)
else:
print("No accepted samples")
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add script variant of the Gauss example<commit_after>
|
import sys
import time
import numpy as np
from abcpy.core import *
from abcpy.distributions import *
from distributed import Client
from dask.dot import dot_graph
from functools import partial
import matplotlib
import matplotlib.pyplot as plt
def normal_simu(n, mu, prng=None, latents=None):
if latents is None:
if prng is None:
prng = np.random.RandomState()
latents = prng.randn(n)
u = mu + latents
y = u
return y
def mean(y):
mu = np.mean(y, axis=1, keepdims=True)
return mu
def distance(x, y):
d = np.linalg.norm( np.array(x) - np.array(y), ord=2, axis=0)
return d
def main():
n = 1000
mu = 1.6
# Set up observed data y
latents = np.random.randn(n)
y = normal_simu(n, mu, latents=latents)
# Plot
plt.hist(y)
# Set up the simulator
simulator = partial(normal_simu, n)
# Specify the graphical model
mu = Prior('mu', 'uniform', 0, 4)
Y = Simulator('normal_simu', simulator, mu, observed=y)
S1 = Summary('S1', mean, Y)
d = Discrepancy('d', distance, S1)
# Specify the number of simulations
N = 1000000
# Time and run parallel
s = time.time()
dists = d.generate(N, batch_size=10000).compute()
print("Elapsed time %d sec" % (time.time() - s))
# Take the parameters
mu_sample = mu.generate(N).compute()
# Set threshold and reject to get posteriors
eps = 0.01
accepts = dists < eps
mu_post = mu_sample[accepts]
print("Number of accepted samples %d" % sum(accepts))
if len(mu_post) > 0:
print("Posterior for mu")
plt.hist(mu_post, bins=20)
else:
print("No accepted samples")
if __name__ == "__main__":
main()
|
Add script variant of the Gauss exampleimport sys
import time
import numpy as np
from abcpy.core import *
from abcpy.distributions import *
from distributed import Client
from dask.dot import dot_graph
from functools import partial
import matplotlib
import matplotlib.pyplot as plt
def normal_simu(n, mu, prng=None, latents=None):
if latents is None:
if prng is None:
prng = np.random.RandomState()
latents = prng.randn(n)
u = mu + latents
y = u
return y
def mean(y):
mu = np.mean(y, axis=1, keepdims=True)
return mu
def distance(x, y):
d = np.linalg.norm( np.array(x) - np.array(y), ord=2, axis=0)
return d
def main():
n = 1000
mu = 1.6
# Set up observed data y
latents = np.random.randn(n)
y = normal_simu(n, mu, latents=latents)
# Plot
plt.hist(y)
# Set up the simulator
simulator = partial(normal_simu, n)
# Specify the graphical model
mu = Prior('mu', 'uniform', 0, 4)
Y = Simulator('normal_simu', simulator, mu, observed=y)
S1 = Summary('S1', mean, Y)
d = Discrepancy('d', distance, S1)
# Specify the number of simulations
N = 1000000
# Time and run parallel
s = time.time()
dists = d.generate(N, batch_size=10000).compute()
print("Elapsed time %d sec" % (time.time() - s))
# Take the parameters
mu_sample = mu.generate(N).compute()
# Set threshold and reject to get posteriors
eps = 0.01
accepts = dists < eps
mu_post = mu_sample[accepts]
print("Number of accepted samples %d" % sum(accepts))
if len(mu_post) > 0:
print("Posterior for mu")
plt.hist(mu_post, bins=20)
else:
print("No accepted samples")
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add script variant of the Gauss example<commit_after>import sys
import time
import numpy as np
from abcpy.core import *
from abcpy.distributions import *
from distributed import Client
from dask.dot import dot_graph
from functools import partial
import matplotlib
import matplotlib.pyplot as plt
def normal_simu(n, mu, prng=None, latents=None):
if latents is None:
if prng is None:
prng = np.random.RandomState()
latents = prng.randn(n)
u = mu + latents
y = u
return y
def mean(y):
mu = np.mean(y, axis=1, keepdims=True)
return mu
def distance(x, y):
d = np.linalg.norm( np.array(x) - np.array(y), ord=2, axis=0)
return d
def main():
n = 1000
mu = 1.6
# Set up observed data y
latents = np.random.randn(n)
y = normal_simu(n, mu, latents=latents)
# Plot
plt.hist(y)
# Set up the simulator
simulator = partial(normal_simu, n)
# Specify the graphical model
mu = Prior('mu', 'uniform', 0, 4)
Y = Simulator('normal_simu', simulator, mu, observed=y)
S1 = Summary('S1', mean, Y)
d = Discrepancy('d', distance, S1)
# Specify the number of simulations
N = 1000000
# Time and run parallel
s = time.time()
dists = d.generate(N, batch_size=10000).compute()
print("Elapsed time %d sec" % (time.time() - s))
# Take the parameters
mu_sample = mu.generate(N).compute()
# Set threshold and reject to get posteriors
eps = 0.01
accepts = dists < eps
mu_post = mu_sample[accepts]
print("Number of accepted samples %d" % sum(accepts))
if len(mu_post) > 0:
print("Posterior for mu")
plt.hist(mu_post, bins=20)
else:
print("No accepted samples")
if __name__ == "__main__":
main()
|
|
a738a4b5a9d5dbb56b79ca85ce325e8bfcd4a0cf
|
genjsonfiletree.py
|
genjsonfiletree.py
|
#!/usr/bin/env python
"""Scans a directory tree for .js files and builds a JSON representation.
Scans a directory tree for .js files, and puts the contents into a single JSON
object map of path to content.
Ouput is written to stdout.
Usage:
$ genjsonfiletree.py
Scans the current directory.
$ genjsonfiletree.py path/to/dir
Scans the given directory.
"""
import os
import json
import logging
import sys
def _YieldPaths(root):
for dir_root, dirs, files in os.walk(root):
for file_path in files:
abspath = os.path.join(dir_root, file_path)
relpath = os.path.relpath(abspath, root)
yield relpath, abspath
def _YieldJsPaths(root):
for relpath, abspath in _YieldPaths(root):
_, ext = os.path.splitext(abspath)
if ext == '.js':
yield relpath, abspath
def ScanTree(tree_root):
tree = dict()
for relpath, abspath in _YieldJsPaths(tree_root):
logging.info('Reading file: %s', relpath)
logging.info('abspath: %s', abspath)
with open(abspath) as f:
tree[relpath] = f.read()
return tree
def main():
logging.basicConfig(level=logging.INFO)
if len(sys.argv) == 1:
logging.info('Path not specified. Using current directory as path.')
dir_root = os.getcwd()
elif len(sys.argv) == 2:
dir_root = sys.argv[1]
else:
sys.exit(__doc__)
logging.info('Scanning tree. Path: "%s"', dir_root)
tree = ScanTree(dir_root)
sys.stdout.write(json.dumps(tree))
if __name__ == '__main__':
main()
|
Add a script that scans a directory tree for .js files, reads them, and builds an map -- path from root to file contents. This is outputted as a JSON string to standard out.
|
Add a script that scans a directory tree for .js files, reads them,
and builds an map -- path from root to file contents.
This is outputted as a JSON string to standard out.
|
Python
|
apache-2.0
|
Prachigarg1/Prachi,Prachigarg1/Prachi,nanaze/jsdoctor,nanaze/jsdoctor,Prachigarg1/Prachi,nanaze/jsdoctor
|
Add a script that scans a directory tree for .js files, reads them,
and builds an map -- path from root to file contents.
This is outputted as a JSON string to standard out.
|
#!/usr/bin/env python
"""Scans a directory tree for .js files and builds a JSON representation.
Scans a directory tree for .js files, and puts the contents into a single JSON
object map of path to content.
Ouput is written to stdout.
Usage:
$ genjsonfiletree.py
Scans the current directory.
$ genjsonfiletree.py path/to/dir
Scans the given directory.
"""
import os
import json
import logging
import sys
def _YieldPaths(root):
for dir_root, dirs, files in os.walk(root):
for file_path in files:
abspath = os.path.join(dir_root, file_path)
relpath = os.path.relpath(abspath, root)
yield relpath, abspath
def _YieldJsPaths(root):
for relpath, abspath in _YieldPaths(root):
_, ext = os.path.splitext(abspath)
if ext == '.js':
yield relpath, abspath
def ScanTree(tree_root):
tree = dict()
for relpath, abspath in _YieldJsPaths(tree_root):
logging.info('Reading file: %s', relpath)
logging.info('abspath: %s', abspath)
with open(abspath) as f:
tree[relpath] = f.read()
return tree
def main():
logging.basicConfig(level=logging.INFO)
if len(sys.argv) == 1:
logging.info('Path not specified. Using current directory as path.')
dir_root = os.getcwd()
elif len(sys.argv) == 2:
dir_root = sys.argv[1]
else:
sys.exit(__doc__)
logging.info('Scanning tree. Path: "%s"', dir_root)
tree = ScanTree(dir_root)
sys.stdout.write(json.dumps(tree))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a script that scans a directory tree for .js files, reads them,
and builds an map -- path from root to file contents.
This is outputted as a JSON string to standard out.<commit_after>
|
#!/usr/bin/env python
"""Scans a directory tree for .js files and builds a JSON representation.
Scans a directory tree for .js files, and puts the contents into a single JSON
object map of path to content.
Ouput is written to stdout.
Usage:
$ genjsonfiletree.py
Scans the current directory.
$ genjsonfiletree.py path/to/dir
Scans the given directory.
"""
import os
import json
import logging
import sys
def _YieldPaths(root):
for dir_root, dirs, files in os.walk(root):
for file_path in files:
abspath = os.path.join(dir_root, file_path)
relpath = os.path.relpath(abspath, root)
yield relpath, abspath
def _YieldJsPaths(root):
for relpath, abspath in _YieldPaths(root):
_, ext = os.path.splitext(abspath)
if ext == '.js':
yield relpath, abspath
def ScanTree(tree_root):
tree = dict()
for relpath, abspath in _YieldJsPaths(tree_root):
logging.info('Reading file: %s', relpath)
logging.info('abspath: %s', abspath)
with open(abspath) as f:
tree[relpath] = f.read()
return tree
def main():
logging.basicConfig(level=logging.INFO)
if len(sys.argv) == 1:
logging.info('Path not specified. Using current directory as path.')
dir_root = os.getcwd()
elif len(sys.argv) == 2:
dir_root = sys.argv[1]
else:
sys.exit(__doc__)
logging.info('Scanning tree. Path: "%s"', dir_root)
tree = ScanTree(dir_root)
sys.stdout.write(json.dumps(tree))
if __name__ == '__main__':
main()
|
Add a script that scans a directory tree for .js files, reads them,
and builds an map -- path from root to file contents.
This is outputted as a JSON string to standard out.#!/usr/bin/env python
"""Scans a directory tree for .js files and builds a JSON representation.
Scans a directory tree for .js files, and puts the contents into a single JSON
object map of path to content.
Ouput is written to stdout.
Usage:
$ genjsonfiletree.py
Scans the current directory.
$ genjsonfiletree.py path/to/dir
Scans the given directory.
"""
import os
import json
import logging
import sys
def _YieldPaths(root):
for dir_root, dirs, files in os.walk(root):
for file_path in files:
abspath = os.path.join(dir_root, file_path)
relpath = os.path.relpath(abspath, root)
yield relpath, abspath
def _YieldJsPaths(root):
for relpath, abspath in _YieldPaths(root):
_, ext = os.path.splitext(abspath)
if ext == '.js':
yield relpath, abspath
def ScanTree(tree_root):
tree = dict()
for relpath, abspath in _YieldJsPaths(tree_root):
logging.info('Reading file: %s', relpath)
logging.info('abspath: %s', abspath)
with open(abspath) as f:
tree[relpath] = f.read()
return tree
def main():
logging.basicConfig(level=logging.INFO)
if len(sys.argv) == 1:
logging.info('Path not specified. Using current directory as path.')
dir_root = os.getcwd()
elif len(sys.argv) == 2:
dir_root = sys.argv[1]
else:
sys.exit(__doc__)
logging.info('Scanning tree. Path: "%s"', dir_root)
tree = ScanTree(dir_root)
sys.stdout.write(json.dumps(tree))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a script that scans a directory tree for .js files, reads them,
and builds an map -- path from root to file contents.
This is outputted as a JSON string to standard out.<commit_after>#!/usr/bin/env python
"""Scans a directory tree for .js files and builds a JSON representation.
Scans a directory tree for .js files, and puts the contents into a single JSON
object map of path to content.
Ouput is written to stdout.
Usage:
$ genjsonfiletree.py
Scans the current directory.
$ genjsonfiletree.py path/to/dir
Scans the given directory.
"""
import os
import json
import logging
import sys
def _YieldPaths(root):
for dir_root, dirs, files in os.walk(root):
for file_path in files:
abspath = os.path.join(dir_root, file_path)
relpath = os.path.relpath(abspath, root)
yield relpath, abspath
def _YieldJsPaths(root):
for relpath, abspath in _YieldPaths(root):
_, ext = os.path.splitext(abspath)
if ext == '.js':
yield relpath, abspath
def ScanTree(tree_root):
tree = dict()
for relpath, abspath in _YieldJsPaths(tree_root):
logging.info('Reading file: %s', relpath)
logging.info('abspath: %s', abspath)
with open(abspath) as f:
tree[relpath] = f.read()
return tree
def main():
logging.basicConfig(level=logging.INFO)
if len(sys.argv) == 1:
logging.info('Path not specified. Using current directory as path.')
dir_root = os.getcwd()
elif len(sys.argv) == 2:
dir_root = sys.argv[1]
else:
sys.exit(__doc__)
logging.info('Scanning tree. Path: "%s"', dir_root)
tree = ScanTree(dir_root)
sys.stdout.write(json.dumps(tree))
if __name__ == '__main__':
main()
|
|
58501ff914c34b2cc81ac99e6fdf384c8b677608
|
molly/apps/transport/importers/naptan.py
|
molly/apps/transport/importers/naptan.py
|
import httplib
from tempfile import TemporaryFile
from zipfile import ZipFile
from tch.parsers.naptan import NaptanParser
class NaptanImporter(object):
HTTP_HOST = "www.dft.gov.uk"
REMOTE_PATH = "/NaPTAN/snapshot/NaPTANxml.zip"
def __init__(self, stop_service):
self._http_connection = httplib.HTTPConnection(self.HTTP_HOST)
self._url = "http://%s%s" % (self.HTTP_HOST, self.REMOTE_PATH)
self._stop_service = stop_service
def _get_file_from_url(self):
temporary = TemporaryFile()
self._http_connection.request('GET', self._url)
temporary.write(self._http_connection.getresponse().read())
return ZipFile(temporary).open('NaPTAN.xml')
def start(self):
parser = NaptanParser()
for stop in parser.import_from_file(self._get_file_from_url(), self._url):
self._stop_service.insert_and_merge(stop)
|
Add importer wrapper for NaPTAN
|
Add importer wrapper for NaPTAN
|
Python
|
apache-2.0
|
ManchesterIO/mollyproject-next,ManchesterIO/mollyproject-next,ManchesterIO/mollyproject-next
|
Add importer wrapper for NaPTAN
|
import httplib
from tempfile import TemporaryFile
from zipfile import ZipFile
from tch.parsers.naptan import NaptanParser
class NaptanImporter(object):
HTTP_HOST = "www.dft.gov.uk"
REMOTE_PATH = "/NaPTAN/snapshot/NaPTANxml.zip"
def __init__(self, stop_service):
self._http_connection = httplib.HTTPConnection(self.HTTP_HOST)
self._url = "http://%s%s" % (self.HTTP_HOST, self.REMOTE_PATH)
self._stop_service = stop_service
def _get_file_from_url(self):
temporary = TemporaryFile()
self._http_connection.request('GET', self._url)
temporary.write(self._http_connection.getresponse().read())
return ZipFile(temporary).open('NaPTAN.xml')
def start(self):
parser = NaptanParser()
for stop in parser.import_from_file(self._get_file_from_url(), self._url):
self._stop_service.insert_and_merge(stop)
|
<commit_before><commit_msg>Add importer wrapper for NaPTAN<commit_after>
|
import httplib
from tempfile import TemporaryFile
from zipfile import ZipFile
from tch.parsers.naptan import NaptanParser
class NaptanImporter(object):
HTTP_HOST = "www.dft.gov.uk"
REMOTE_PATH = "/NaPTAN/snapshot/NaPTANxml.zip"
def __init__(self, stop_service):
self._http_connection = httplib.HTTPConnection(self.HTTP_HOST)
self._url = "http://%s%s" % (self.HTTP_HOST, self.REMOTE_PATH)
self._stop_service = stop_service
def _get_file_from_url(self):
temporary = TemporaryFile()
self._http_connection.request('GET', self._url)
temporary.write(self._http_connection.getresponse().read())
return ZipFile(temporary).open('NaPTAN.xml')
def start(self):
parser = NaptanParser()
for stop in parser.import_from_file(self._get_file_from_url(), self._url):
self._stop_service.insert_and_merge(stop)
|
Add importer wrapper for NaPTANimport httplib
from tempfile import TemporaryFile
from zipfile import ZipFile
from tch.parsers.naptan import NaptanParser
class NaptanImporter(object):
HTTP_HOST = "www.dft.gov.uk"
REMOTE_PATH = "/NaPTAN/snapshot/NaPTANxml.zip"
def __init__(self, stop_service):
self._http_connection = httplib.HTTPConnection(self.HTTP_HOST)
self._url = "http://%s%s" % (self.HTTP_HOST, self.REMOTE_PATH)
self._stop_service = stop_service
def _get_file_from_url(self):
temporary = TemporaryFile()
self._http_connection.request('GET', self._url)
temporary.write(self._http_connection.getresponse().read())
return ZipFile(temporary).open('NaPTAN.xml')
def start(self):
parser = NaptanParser()
for stop in parser.import_from_file(self._get_file_from_url(), self._url):
self._stop_service.insert_and_merge(stop)
|
<commit_before><commit_msg>Add importer wrapper for NaPTAN<commit_after>import httplib
from tempfile import TemporaryFile
from zipfile import ZipFile
from tch.parsers.naptan import NaptanParser
class NaptanImporter(object):
HTTP_HOST = "www.dft.gov.uk"
REMOTE_PATH = "/NaPTAN/snapshot/NaPTANxml.zip"
def __init__(self, stop_service):
self._http_connection = httplib.HTTPConnection(self.HTTP_HOST)
self._url = "http://%s%s" % (self.HTTP_HOST, self.REMOTE_PATH)
self._stop_service = stop_service
def _get_file_from_url(self):
temporary = TemporaryFile()
self._http_connection.request('GET', self._url)
temporary.write(self._http_connection.getresponse().read())
return ZipFile(temporary).open('NaPTAN.xml')
def start(self):
parser = NaptanParser()
for stop in parser.import_from_file(self._get_file_from_url(), self._url):
self._stop_service.insert_and_merge(stop)
|
|
3ce6f9d4e1cdd282ba6747aab80c18d2e9106168
|
greenfan/management/commands/turn-off-non-build-nodes.py
|
greenfan/management/commands/turn-off-non-build-nodes.py
|
#
# Copyright 2012 Cisco Systems, Inc.
#
# Author: Soren Hansen <sorhanse@cisco.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import tempfile
import urlparse
from subprocess import Popen
from time import sleep, time
from django.core.management.base import BaseCommand
from django.template import Context, Template
from fabric.api import env as fabric_env
from fabric.api import run, local, sudo, put
from greenfan import utils
from greenfan.models import Configuration, TestSpecification, Server
def run_cmd(args):
proc = Popen(args)
return proc.communicate()
class Command(BaseCommand):
def handle(self, job_id, **options):
job = TestSpecification.objects.get(id=job_id)
config = Configuration.get()
fabric_env.host_string = '%s@%s' % (config.admin_user, job.build_node().ip)
fabric_env.password = config.admin_password
fabric_env.abort_on_prompts = True
fabric_env.sudo_prefix = 'sudo -H -S -p \'%(sudo_prompt)s\' '
nodes = job.non_build_nodes()
for node in nodes:
sudo('timeout 10 cobbler system poweroff --name=%s' % (node,))
|
Add script to turn off all non-build nodes
|
Add script to turn off all non-build nodes
|
Python
|
apache-2.0
|
sorenh/python-django-greenfan,sorenh/python-django-greenfan
|
Add script to turn off all non-build nodes
|
#
# Copyright 2012 Cisco Systems, Inc.
#
# Author: Soren Hansen <sorhanse@cisco.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import tempfile
import urlparse
from subprocess import Popen
from time import sleep, time
from django.core.management.base import BaseCommand
from django.template import Context, Template
from fabric.api import env as fabric_env
from fabric.api import run, local, sudo, put
from greenfan import utils
from greenfan.models import Configuration, TestSpecification, Server
def run_cmd(args):
proc = Popen(args)
return proc.communicate()
class Command(BaseCommand):
def handle(self, job_id, **options):
job = TestSpecification.objects.get(id=job_id)
config = Configuration.get()
fabric_env.host_string = '%s@%s' % (config.admin_user, job.build_node().ip)
fabric_env.password = config.admin_password
fabric_env.abort_on_prompts = True
fabric_env.sudo_prefix = 'sudo -H -S -p \'%(sudo_prompt)s\' '
nodes = job.non_build_nodes()
for node in nodes:
sudo('timeout 10 cobbler system poweroff --name=%s' % (node,))
|
<commit_before><commit_msg>Add script to turn off all non-build nodes<commit_after>
|
#
# Copyright 2012 Cisco Systems, Inc.
#
# Author: Soren Hansen <sorhanse@cisco.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import tempfile
import urlparse
from subprocess import Popen
from time import sleep, time
from django.core.management.base import BaseCommand
from django.template import Context, Template
from fabric.api import env as fabric_env
from fabric.api import run, local, sudo, put
from greenfan import utils
from greenfan.models import Configuration, TestSpecification, Server
def run_cmd(args):
proc = Popen(args)
return proc.communicate()
class Command(BaseCommand):
def handle(self, job_id, **options):
job = TestSpecification.objects.get(id=job_id)
config = Configuration.get()
fabric_env.host_string = '%s@%s' % (config.admin_user, job.build_node().ip)
fabric_env.password = config.admin_password
fabric_env.abort_on_prompts = True
fabric_env.sudo_prefix = 'sudo -H -S -p \'%(sudo_prompt)s\' '
nodes = job.non_build_nodes()
for node in nodes:
sudo('timeout 10 cobbler system poweroff --name=%s' % (node,))
|
Add script to turn off all non-build nodes#
# Copyright 2012 Cisco Systems, Inc.
#
# Author: Soren Hansen <sorhanse@cisco.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import tempfile
import urlparse
from subprocess import Popen
from time import sleep, time
from django.core.management.base import BaseCommand
from django.template import Context, Template
from fabric.api import env as fabric_env
from fabric.api import run, local, sudo, put
from greenfan import utils
from greenfan.models import Configuration, TestSpecification, Server
def run_cmd(args):
proc = Popen(args)
return proc.communicate()
class Command(BaseCommand):
def handle(self, job_id, **options):
job = TestSpecification.objects.get(id=job_id)
config = Configuration.get()
fabric_env.host_string = '%s@%s' % (config.admin_user, job.build_node().ip)
fabric_env.password = config.admin_password
fabric_env.abort_on_prompts = True
fabric_env.sudo_prefix = 'sudo -H -S -p \'%(sudo_prompt)s\' '
nodes = job.non_build_nodes()
for node in nodes:
sudo('timeout 10 cobbler system poweroff --name=%s' % (node,))
|
<commit_before><commit_msg>Add script to turn off all non-build nodes<commit_after>#
# Copyright 2012 Cisco Systems, Inc.
#
# Author: Soren Hansen <sorhanse@cisco.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import tempfile
import urlparse
from subprocess import Popen
from time import sleep, time
from django.core.management.base import BaseCommand
from django.template import Context, Template
from fabric.api import env as fabric_env
from fabric.api import run, local, sudo, put
from greenfan import utils
from greenfan.models import Configuration, TestSpecification, Server
def run_cmd(args):
proc = Popen(args)
return proc.communicate()
class Command(BaseCommand):
def handle(self, job_id, **options):
job = TestSpecification.objects.get(id=job_id)
config = Configuration.get()
fabric_env.host_string = '%s@%s' % (config.admin_user, job.build_node().ip)
fabric_env.password = config.admin_password
fabric_env.abort_on_prompts = True
fabric_env.sudo_prefix = 'sudo -H -S -p \'%(sudo_prompt)s\' '
nodes = job.non_build_nodes()
for node in nodes:
sudo('timeout 10 cobbler system poweroff --name=%s' % (node,))
|
|
3220af50dd6c4642ac668b2c6dc89e9d76d0e732
|
apps/submission/tests/test_flows.py
|
apps/submission/tests/test_flows.py
|
from unittest import mock
from django.test import TestCase
from viewflow import lock
from viewflow.activation import STATUS
from viewflow.base import this
from ..flows import AsyncActivationHandler, AsyncHandler
class ProcessStub(object):
_default_manager = mock.Mock()
def __init__(self, flow_class=None):
self.flow_class = flow_class
def active_tasks(self):
return []
def save(self):
self.pk = 1
return
class TaskStub(object):
_default_manager = mock.Mock()
def __init__(self, flow_task=None):
self.flow_task = flow_task
self.process_id = 1
self.pk = 1
self.status = STATUS.NEW
self.started = None
@property
def leading(self):
from viewflow.models import Task
return Task.objects.none()
def save(self):
self.pk = 1
return
class FlowStub(object):
process_class = ProcessStub
task_class = TaskStub
lock_impl = lock.no_lock
instance = None
class AsyncFlow(FlowStub):
handler_task = AsyncHandler(this.task_handler)
method_called = False
def task_handler(self, activation):
AsyncFlow.method_called = True
class AsyncActivationHandlerTestCase(TestCase):
def init_node(self, node, flow_class=None, name='test_node'):
node.flow_class = flow_class or FlowStub
node.name = name
node.ready()
return node
def setUp(self):
ProcessStub._default_manager.get.return_value = ProcessStub()
TaskStub._default_manager.get.return_value = TaskStub()
FlowStub.instance = FlowStub()
def test_perform(self):
AsyncFlow.instance = AsyncFlow()
flow_task = self.init_node(
AsyncFlow.handler_task,
flow_class=AsyncFlow,
name='task'
)
act = AsyncActivationHandler()
act.initialize(flow_task, TaskStub())
# execute
act.perform()
self.assertEqual(act.task.status, STATUS.NEW)
self.assertTrue(AsyncFlow.method_called)
def test_callback(self):
assert False
|
Add tests for the AsyncActivationHandler
|
[draft] Add tests for the AsyncActivationHandler
|
Python
|
bsd-3-clause
|
Candihub/pixel,Candihub/pixel,Candihub/pixel,Candihub/pixel,Candihub/pixel
|
[draft] Add tests for the AsyncActivationHandler
|
from unittest import mock
from django.test import TestCase
from viewflow import lock
from viewflow.activation import STATUS
from viewflow.base import this
from ..flows import AsyncActivationHandler, AsyncHandler
class ProcessStub(object):
_default_manager = mock.Mock()
def __init__(self, flow_class=None):
self.flow_class = flow_class
def active_tasks(self):
return []
def save(self):
self.pk = 1
return
class TaskStub(object):
_default_manager = mock.Mock()
def __init__(self, flow_task=None):
self.flow_task = flow_task
self.process_id = 1
self.pk = 1
self.status = STATUS.NEW
self.started = None
@property
def leading(self):
from viewflow.models import Task
return Task.objects.none()
def save(self):
self.pk = 1
return
class FlowStub(object):
process_class = ProcessStub
task_class = TaskStub
lock_impl = lock.no_lock
instance = None
class AsyncFlow(FlowStub):
handler_task = AsyncHandler(this.task_handler)
method_called = False
def task_handler(self, activation):
AsyncFlow.method_called = True
class AsyncActivationHandlerTestCase(TestCase):
def init_node(self, node, flow_class=None, name='test_node'):
node.flow_class = flow_class or FlowStub
node.name = name
node.ready()
return node
def setUp(self):
ProcessStub._default_manager.get.return_value = ProcessStub()
TaskStub._default_manager.get.return_value = TaskStub()
FlowStub.instance = FlowStub()
def test_perform(self):
AsyncFlow.instance = AsyncFlow()
flow_task = self.init_node(
AsyncFlow.handler_task,
flow_class=AsyncFlow,
name='task'
)
act = AsyncActivationHandler()
act.initialize(flow_task, TaskStub())
# execute
act.perform()
self.assertEqual(act.task.status, STATUS.NEW)
self.assertTrue(AsyncFlow.method_called)
def test_callback(self):
assert False
|
<commit_before><commit_msg>[draft] Add tests for the AsyncActivationHandler<commit_after>
|
from unittest import mock
from django.test import TestCase
from viewflow import lock
from viewflow.activation import STATUS
from viewflow.base import this
from ..flows import AsyncActivationHandler, AsyncHandler
class ProcessStub(object):
_default_manager = mock.Mock()
def __init__(self, flow_class=None):
self.flow_class = flow_class
def active_tasks(self):
return []
def save(self):
self.pk = 1
return
class TaskStub(object):
_default_manager = mock.Mock()
def __init__(self, flow_task=None):
self.flow_task = flow_task
self.process_id = 1
self.pk = 1
self.status = STATUS.NEW
self.started = None
@property
def leading(self):
from viewflow.models import Task
return Task.objects.none()
def save(self):
self.pk = 1
return
class FlowStub(object):
process_class = ProcessStub
task_class = TaskStub
lock_impl = lock.no_lock
instance = None
class AsyncFlow(FlowStub):
handler_task = AsyncHandler(this.task_handler)
method_called = False
def task_handler(self, activation):
AsyncFlow.method_called = True
class AsyncActivationHandlerTestCase(TestCase):
def init_node(self, node, flow_class=None, name='test_node'):
node.flow_class = flow_class or FlowStub
node.name = name
node.ready()
return node
def setUp(self):
ProcessStub._default_manager.get.return_value = ProcessStub()
TaskStub._default_manager.get.return_value = TaskStub()
FlowStub.instance = FlowStub()
def test_perform(self):
AsyncFlow.instance = AsyncFlow()
flow_task = self.init_node(
AsyncFlow.handler_task,
flow_class=AsyncFlow,
name='task'
)
act = AsyncActivationHandler()
act.initialize(flow_task, TaskStub())
# execute
act.perform()
self.assertEqual(act.task.status, STATUS.NEW)
self.assertTrue(AsyncFlow.method_called)
def test_callback(self):
assert False
|
[draft] Add tests for the AsyncActivationHandlerfrom unittest import mock
from django.test import TestCase
from viewflow import lock
from viewflow.activation import STATUS
from viewflow.base import this
from ..flows import AsyncActivationHandler, AsyncHandler
class ProcessStub(object):
_default_manager = mock.Mock()
def __init__(self, flow_class=None):
self.flow_class = flow_class
def active_tasks(self):
return []
def save(self):
self.pk = 1
return
class TaskStub(object):
_default_manager = mock.Mock()
def __init__(self, flow_task=None):
self.flow_task = flow_task
self.process_id = 1
self.pk = 1
self.status = STATUS.NEW
self.started = None
@property
def leading(self):
from viewflow.models import Task
return Task.objects.none()
def save(self):
self.pk = 1
return
class FlowStub(object):
process_class = ProcessStub
task_class = TaskStub
lock_impl = lock.no_lock
instance = None
class AsyncFlow(FlowStub):
handler_task = AsyncHandler(this.task_handler)
method_called = False
def task_handler(self, activation):
AsyncFlow.method_called = True
class AsyncActivationHandlerTestCase(TestCase):
def init_node(self, node, flow_class=None, name='test_node'):
node.flow_class = flow_class or FlowStub
node.name = name
node.ready()
return node
def setUp(self):
ProcessStub._default_manager.get.return_value = ProcessStub()
TaskStub._default_manager.get.return_value = TaskStub()
FlowStub.instance = FlowStub()
def test_perform(self):
AsyncFlow.instance = AsyncFlow()
flow_task = self.init_node(
AsyncFlow.handler_task,
flow_class=AsyncFlow,
name='task'
)
act = AsyncActivationHandler()
act.initialize(flow_task, TaskStub())
# execute
act.perform()
self.assertEqual(act.task.status, STATUS.NEW)
self.assertTrue(AsyncFlow.method_called)
def test_callback(self):
assert False
|
<commit_before><commit_msg>[draft] Add tests for the AsyncActivationHandler<commit_after>from unittest import mock
from django.test import TestCase
from viewflow import lock
from viewflow.activation import STATUS
from viewflow.base import this
from ..flows import AsyncActivationHandler, AsyncHandler
class ProcessStub(object):
_default_manager = mock.Mock()
def __init__(self, flow_class=None):
self.flow_class = flow_class
def active_tasks(self):
return []
def save(self):
self.pk = 1
return
class TaskStub(object):
_default_manager = mock.Mock()
def __init__(self, flow_task=None):
self.flow_task = flow_task
self.process_id = 1
self.pk = 1
self.status = STATUS.NEW
self.started = None
@property
def leading(self):
from viewflow.models import Task
return Task.objects.none()
def save(self):
self.pk = 1
return
class FlowStub(object):
process_class = ProcessStub
task_class = TaskStub
lock_impl = lock.no_lock
instance = None
class AsyncFlow(FlowStub):
handler_task = AsyncHandler(this.task_handler)
method_called = False
def task_handler(self, activation):
AsyncFlow.method_called = True
class AsyncActivationHandlerTestCase(TestCase):
def init_node(self, node, flow_class=None, name='test_node'):
node.flow_class = flow_class or FlowStub
node.name = name
node.ready()
return node
def setUp(self):
ProcessStub._default_manager.get.return_value = ProcessStub()
TaskStub._default_manager.get.return_value = TaskStub()
FlowStub.instance = FlowStub()
def test_perform(self):
AsyncFlow.instance = AsyncFlow()
flow_task = self.init_node(
AsyncFlow.handler_task,
flow_class=AsyncFlow,
name='task'
)
act = AsyncActivationHandler()
act.initialize(flow_task, TaskStub())
# execute
act.perform()
self.assertEqual(act.task.status, STATUS.NEW)
self.assertTrue(AsyncFlow.method_called)
def test_callback(self):
assert False
|
|
0efa0da2313ae4fffca1a17e820262185d60a402
|
nova/db/sqlalchemy/migrate_repo/versions/103_instance_indexes.py
|
nova/db/sqlalchemy/migrate_repo/versions/103_instance_indexes.py
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
# Copyright 2012 Michael Still and Canonical Inc
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Index, MetaData, Table
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
for table in ['block_device_mapping',
'consoles',
'volumes']:
t = Table(table, meta, autoload=True)
i = Index('%s_instance_uuid_idx' % table, t.c.instance_uuid)
i.create(migrate_engine)
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
for table in ['block_device_mapping',
'consoles',
'volumes']:
t = Table(table, meta, autoload=True)
i = Index('%s_instance_uuid_idx' % table, t.c.instance_uuid)
i.drop(migrate_engine)
|
Add indexes to new instance_uuid columns.
|
Add indexes to new instance_uuid columns.
Resolves bug 1009738 as well as several other bugs that haven't been
reported yet.
Change-Id: I09b456df70aaaba1bc4ac00514e63bda804d7f92
|
Python
|
apache-2.0
|
angdraug/nova,usc-isi/nova,belmiromoreira/nova,ewindisch/nova,maelnor/nova,thomasem/nova,gooddata/openstack-nova,akash1808/nova,akash1808/nova,Stavitsky/nova,rrader/nova-docker-plugin,bclau/nova,JianyuWang/nova,openstack/nova,sebrandon1/nova,CEG-FYP-OpenStack/scheduler,OpenAcademy-OpenStack/nova-scheduler,projectcalico/calico-nova,Metaswitch/calico-nova,fajoy/nova,NoBodyCam/TftpPxeBootBareMetal,BeyondTheClouds/nova,badock/nova,qwefi/nova,CiscoSystems/nova,Juniper/nova,gspilio/nova,savi-dev/nova,aristanetworks/arista-ovs-nova,double12gzh/nova,ntt-sic/nova,phenoxim/nova,gooddata/openstack-nova,Triv90/Nova,vmturbo/nova,apporc/nova,virtualopensystems/nova,jianghuaw/nova,rrader/nova-docker-plugin,j-carpentier/nova,mikalstill/nova,bigswitch/nova,petrutlucian94/nova,tangfeixiong/nova,yatinkumbhare/openstack-nova,JianyuWang/nova,imsplitbit/nova,felixma/nova,tangfeixiong/nova,houshengbo/nova_vmware_compute_driver,bigswitch/nova,double12gzh/nova,akash1808/nova_test_latest,shail2810/nova,dstroppa/openstack-smartos-nova-grizzly,silenceli/nova,gooddata/openstack-nova,maheshp/novatest,j-carpentier/nova,alaski/nova,eayunstack/nova,Yuriy-Leonov/nova,Triv90/Nova,petrutlucian94/nova,imsplitbit/nova,paulmathews/nova,tanglei528/nova,watonyweng/nova,rickerc/nova_audit,leilihh/nova,dims/nova,TwinkleChawla/nova,leilihh/novaha,zzicewind/nova,rajalokan/nova,luogangyi/bcec-nova,nikesh-mahalka/nova,alexandrucoman/vbox-nova-driver,CCI-MOC/nova,maoy/zknova,Juniper/nova,TieWei/nova,cyx1231st/nova,cernops/nova,eharney/nova,barnsnake351/nova,felixma/nova,rajalokan/nova,CCI-MOC/nova,sridevikoushik31/openstack,cloudbau/nova,vmturbo/nova,kimjaejoong/nova,akash1808/nova_test_latest,citrix-openstack-build/nova,yosshy/nova,rajalokan/nova,takeshineshiro/nova,maoy/zknova,cyx1231st/nova,kimjaejoong/nova,hanlind/nova,CiscoSystems/nova,vmturbo/nova,joker946/nova,silenceli/nova,Tehsmash/nova,whitepages/nova,mgagne/nova,devendermishrajio/nova,whitepages/nova,JioCloud/nova_test_latest,shootstar/novatest,ewindisch/nova,yosshy/nova,dims/nova,cloudbase/nova-virtualbox,saleemjaveds/https-github.com-openstack-nova,apporc/nova,edulramirez/nova,vmturbo/nova,scripnichenko/nova,sridevikoushik31/openstack,yrobla/nova,isyippee/nova,Juniper/nova,orbitfp7/nova,Brocade-OpenSource/OpenStack-DNRM-Nova,sebrandon1/nova,eonpatapon/nova,paulmathews/nova,NoBodyCam/TftpPxeBootBareMetal,JioCloud/nova,savi-dev/nova,mandeepdhami/nova,barnsnake351/nova,varunarya10/nova_test_latest,bclau/nova,viggates/nova,sacharya/nova,SUSE-Cloud/nova,sebrandon1/nova,NewpTone/stacklab-nova,petrutlucian94/nova_dev,Metaswitch/calico-nova,plumgrid/plumgrid-nova,zhimin711/nova,klmitch/nova,Yusuke1987/openstack_template,saleemjaveds/https-github.com-openstack-nova,Juniper/nova,DirectXMan12/nova-hacking,maheshp/novatest,fnordahl/nova,rahulunair/nova,aristanetworks/arista-ovs-nova,adelina-t/nova,MountainWei/nova,mmnelemane/nova,sridevikoushik31/nova,eayunstack/nova,redhat-openstack/nova,tudorvio/nova,vladikr/nova_drafts,belmiromoreira/nova,mahak/nova,qwefi/nova,hanlind/nova,alvarolopez/nova,BeyondTheClouds/nova,ted-gould/nova,Francis-Liu/animated-broccoli,alvarolopez/nova,ruslanloman/nova,rahulunair/nova,dawnpower/nova,iuliat/nova,raildo/nova,Francis-Liu/animated-broccoli,shahar-stratoscale/nova,OpenAcademy-OpenStack/nova-scheduler,tianweizhang/nova,sridevikoushik31/nova,klmitch/nova,fnordahl/nova,yrobla/nova,devoid/nova,mandeepdhami/nova,maheshp/novatest,takeshineshiro/nova,eonpatapon/nova,mahak/nova,vladikr/nova_drafts,redhat-openstack/nova,NewpTone/stacklab-nova,yatinkumbhare/openstack-nova,plumgrid/plumgrid-nova,leilihh/nova,maoy/zknova,mahak/nova,gooddata/openstack-nova,petrutlucian94/nova_dev,noironetworks/nova,tianweizhang/nova,sridevikoushik31/nova,paulmathews/nova,berrange/nova,Brocade-OpenSource/OpenStack-DNRM-Nova,jianghuaw/nova,mikalstill/nova,dstroppa/openstack-smartos-nova-grizzly,mgagne/nova,luogangyi/bcec-nova,blueboxgroup/nova,spring-week-topos/nova-week,devoid/nova,JioCloud/nova,openstack/nova,rajalokan/nova,cloudbau/nova,hanlind/nova,LoHChina/nova,joker946/nova,isyippee/nova,cloudbase/nova,spring-week-topos/nova-week,JioCloud/nova_test_latest,devendermishrajio/nova_test_latest,tealover/nova,adelina-t/nova,zhimin711/nova,alexandrucoman/vbox-nova-driver,DirectXMan12/nova-hacking,fajoy/nova,devendermishrajio/nova,usc-isi/nova,raildo/nova,viggates/nova,nikesh-mahalka/nova,klmitch/nova,thomasem/nova,gspilio/nova,dstroppa/openstack-smartos-nova-grizzly,savi-dev/nova,fajoy/nova,zaina/nova,CEG-FYP-OpenStack/scheduler,scripnichenko/nova,LoHChina/nova,cloudbase/nova,ntt-sic/nova,Stavitsky/nova,TieWei/nova,badock/nova,orbitfp7/nova,watonyweng/nova,blueboxgroup/nova,shahar-stratoscale/nova,iuliat/nova,usc-isi/nova,phenoxim/nova,jianghuaw/nova,rahulunair/nova,Yusuke1987/openstack_template,houshengbo/nova_vmware_compute_driver,NeCTAR-RC/nova,sacharya/nova,aristanetworks/arista-ovs-nova,openstack/nova,alaski/nova,shail2810/nova,NeCTAR-RC/nova,zaina/nova,projectcalico/calico-nova,cloudbase/nova,gspilio/nova,bgxavier/nova,MountainWei/nova,jianghuaw/nova,CloudServer/nova,jeffrey4l/nova,NoBodyCam/TftpPxeBootBareMetal,sridevikoushik31/openstack,mikalstill/nova,sridevikoushik31/nova,cloudbase/nova-virtualbox,BeyondTheClouds/nova,affo/nova,shootstar/novatest,klmitch/nova,dawnpower/nova,DirectXMan12/nova-hacking,citrix-openstack-build/nova,virtualopensystems/nova,houshengbo/nova_vmware_compute_driver,leilihh/novaha,Yuriy-Leonov/nova,ted-gould/nova,Tehsmash/nova,CloudServer/nova,affo/nova,noironetworks/nova,cernops/nova,maelnor/nova,devendermishrajio/nova_test_latest,NewpTone/stacklab-nova,tudorvio/nova,jeffrey4l/nova,cernops/nova,edulramirez/nova,Triv90/Nova,SUSE-Cloud/nova,tealover/nova,yrobla/nova,tanglei528/nova,eharney/nova,varunarya10/nova_test_latest,angdraug/nova,mmnelemane/nova,TwinkleChawla/nova,rickerc/nova_audit,bgxavier/nova,zzicewind/nova,berrange/nova,ruslanloman/nova
|
Add indexes to new instance_uuid columns.
Resolves bug 1009738 as well as several other bugs that haven't been
reported yet.
Change-Id: I09b456df70aaaba1bc4ac00514e63bda804d7f92
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
# Copyright 2012 Michael Still and Canonical Inc
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Index, MetaData, Table
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
for table in ['block_device_mapping',
'consoles',
'volumes']:
t = Table(table, meta, autoload=True)
i = Index('%s_instance_uuid_idx' % table, t.c.instance_uuid)
i.create(migrate_engine)
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
for table in ['block_device_mapping',
'consoles',
'volumes']:
t = Table(table, meta, autoload=True)
i = Index('%s_instance_uuid_idx' % table, t.c.instance_uuid)
i.drop(migrate_engine)
|
<commit_before><commit_msg>Add indexes to new instance_uuid columns.
Resolves bug 1009738 as well as several other bugs that haven't been
reported yet.
Change-Id: I09b456df70aaaba1bc4ac00514e63bda804d7f92<commit_after>
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
# Copyright 2012 Michael Still and Canonical Inc
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Index, MetaData, Table
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
for table in ['block_device_mapping',
'consoles',
'volumes']:
t = Table(table, meta, autoload=True)
i = Index('%s_instance_uuid_idx' % table, t.c.instance_uuid)
i.create(migrate_engine)
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
for table in ['block_device_mapping',
'consoles',
'volumes']:
t = Table(table, meta, autoload=True)
i = Index('%s_instance_uuid_idx' % table, t.c.instance_uuid)
i.drop(migrate_engine)
|
Add indexes to new instance_uuid columns.
Resolves bug 1009738 as well as several other bugs that haven't been
reported yet.
Change-Id: I09b456df70aaaba1bc4ac00514e63bda804d7f92# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
# Copyright 2012 Michael Still and Canonical Inc
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Index, MetaData, Table
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
for table in ['block_device_mapping',
'consoles',
'volumes']:
t = Table(table, meta, autoload=True)
i = Index('%s_instance_uuid_idx' % table, t.c.instance_uuid)
i.create(migrate_engine)
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
for table in ['block_device_mapping',
'consoles',
'volumes']:
t = Table(table, meta, autoload=True)
i = Index('%s_instance_uuid_idx' % table, t.c.instance_uuid)
i.drop(migrate_engine)
|
<commit_before><commit_msg>Add indexes to new instance_uuid columns.
Resolves bug 1009738 as well as several other bugs that haven't been
reported yet.
Change-Id: I09b456df70aaaba1bc4ac00514e63bda804d7f92<commit_after># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
# Copyright 2012 Michael Still and Canonical Inc
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Index, MetaData, Table
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
for table in ['block_device_mapping',
'consoles',
'volumes']:
t = Table(table, meta, autoload=True)
i = Index('%s_instance_uuid_idx' % table, t.c.instance_uuid)
i.create(migrate_engine)
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
for table in ['block_device_mapping',
'consoles',
'volumes']:
t = Table(table, meta, autoload=True)
i = Index('%s_instance_uuid_idx' % table, t.c.instance_uuid)
i.drop(migrate_engine)
|
|
5dddba694e3bd06afbd7d68c1d8e895b8b6b947f
|
examples/LineAnimator_examples.py
|
examples/LineAnimator_examples.py
|
"""
=============
LineAnimator
=============
This example shows off some ways in which you can use the
LineAnimator object to animate line plots.
"""
import numpy as np
import matplotlib.pyplot as plt
from sunpy.visualization.imageanimator import LineAnimator
# Example 1: Animate a 2D cube of random data as a line plot along an
# axis where the x-axis drifts with time.
# Define some random data
data_shape0 = (10, 20)
data0 = np.random.rand(*data_shape0)
# Define the axis that will make up the line plot
plot_axis0 = 1
slider_axis0 = 0
# Define value along x axis which drift with time. To do this, define
# xdata to be the same shape as the data where each row/column
# (depending on axis to be animated) represents the x-axis values for
# a single frame of the animations.
xdata = np.tile(np.linspace(0, 100, data_shape0[plot_axis0]), (data_shape0[slider_axis0], 1))
# Generate animation object with variable x-axis data.
ani = LineAnimator(data0, plot_axis_index=plot_axis0, axis_ranges=[None, xdata])
# Show plot
plt.show()
|
Add LineAnimator example with variable x-axis values.
|
Add LineAnimator example with variable x-axis values.
|
Python
|
bsd-2-clause
|
dpshelio/sunpy,dpshelio/sunpy,dpshelio/sunpy
|
Add LineAnimator example with variable x-axis values.
|
"""
=============
LineAnimator
=============
This example shows off some ways in which you can use the
LineAnimator object to animate line plots.
"""
import numpy as np
import matplotlib.pyplot as plt
from sunpy.visualization.imageanimator import LineAnimator
# Example 1: Animate a 2D cube of random data as a line plot along an
# axis where the x-axis drifts with time.
# Define some random data
data_shape0 = (10, 20)
data0 = np.random.rand(*data_shape0)
# Define the axis that will make up the line plot
plot_axis0 = 1
slider_axis0 = 0
# Define value along x axis which drift with time. To do this, define
# xdata to be the same shape as the data where each row/column
# (depending on axis to be animated) represents the x-axis values for
# a single frame of the animations.
xdata = np.tile(np.linspace(0, 100, data_shape0[plot_axis0]), (data_shape0[slider_axis0], 1))
# Generate animation object with variable x-axis data.
ani = LineAnimator(data0, plot_axis_index=plot_axis0, axis_ranges=[None, xdata])
# Show plot
plt.show()
|
<commit_before><commit_msg>Add LineAnimator example with variable x-axis values.<commit_after>
|
"""
=============
LineAnimator
=============
This example shows off some ways in which you can use the
LineAnimator object to animate line plots.
"""
import numpy as np
import matplotlib.pyplot as plt
from sunpy.visualization.imageanimator import LineAnimator
# Example 1: Animate a 2D cube of random data as a line plot along an
# axis where the x-axis drifts with time.
# Define some random data
data_shape0 = (10, 20)
data0 = np.random.rand(*data_shape0)
# Define the axis that will make up the line plot
plot_axis0 = 1
slider_axis0 = 0
# Define value along x axis which drift with time. To do this, define
# xdata to be the same shape as the data where each row/column
# (depending on axis to be animated) represents the x-axis values for
# a single frame of the animations.
xdata = np.tile(np.linspace(0, 100, data_shape0[plot_axis0]), (data_shape0[slider_axis0], 1))
# Generate animation object with variable x-axis data.
ani = LineAnimator(data0, plot_axis_index=plot_axis0, axis_ranges=[None, xdata])
# Show plot
plt.show()
|
Add LineAnimator example with variable x-axis values."""
=============
LineAnimator
=============
This example shows off some ways in which you can use the
LineAnimator object to animate line plots.
"""
import numpy as np
import matplotlib.pyplot as plt
from sunpy.visualization.imageanimator import LineAnimator
# Example 1: Animate a 2D cube of random data as a line plot along an
# axis where the x-axis drifts with time.
# Define some random data
data_shape0 = (10, 20)
data0 = np.random.rand(*data_shape0)
# Define the axis that will make up the line plot
plot_axis0 = 1
slider_axis0 = 0
# Define value along x axis which drift with time. To do this, define
# xdata to be the same shape as the data where each row/column
# (depending on axis to be animated) represents the x-axis values for
# a single frame of the animations.
xdata = np.tile(np.linspace(0, 100, data_shape0[plot_axis0]), (data_shape0[slider_axis0], 1))
# Generate animation object with variable x-axis data.
ani = LineAnimator(data0, plot_axis_index=plot_axis0, axis_ranges=[None, xdata])
# Show plot
plt.show()
|
<commit_before><commit_msg>Add LineAnimator example with variable x-axis values.<commit_after>"""
=============
LineAnimator
=============
This example shows off some ways in which you can use the
LineAnimator object to animate line plots.
"""
import numpy as np
import matplotlib.pyplot as plt
from sunpy.visualization.imageanimator import LineAnimator
# Example 1: Animate a 2D cube of random data as a line plot along an
# axis where the x-axis drifts with time.
# Define some random data
data_shape0 = (10, 20)
data0 = np.random.rand(*data_shape0)
# Define the axis that will make up the line plot
plot_axis0 = 1
slider_axis0 = 0
# Define value along x axis which drift with time. To do this, define
# xdata to be the same shape as the data where each row/column
# (depending on axis to be animated) represents the x-axis values for
# a single frame of the animations.
xdata = np.tile(np.linspace(0, 100, data_shape0[plot_axis0]), (data_shape0[slider_axis0], 1))
# Generate animation object with variable x-axis data.
ani = LineAnimator(data0, plot_axis_index=plot_axis0, axis_ranges=[None, xdata])
# Show plot
plt.show()
|
|
ddb8d2f3078af9ded041b9eaacdfd3b8d69099bc
|
resolwe/flow/migrations/0025_entity_type.py
|
resolwe/flow/migrations/0025_entity_type.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2018-11-07 08:44
from __future__ import unicode_literals
from django.db import migrations
def populate_entity_type(apps, schema_editor):
"""Populate entity type from attached descriptor schema."""
Entity = apps.get_model('flow', 'Entity')
for entity in Entity.objects.all():
if entity.descriptor_schema is not None:
entity.type = entity.descriptor_schema.slug
entity.save()
class Migration(migrations.Migration):
dependencies = [
('flow', '0024_process_entity_3'),
]
operations = [
migrations.RunPython(populate_entity_type)
]
|
Add migration to populate Entity type
|
Add migration to populate Entity type
|
Python
|
apache-2.0
|
jberci/resolwe,genialis/resolwe,jberci/resolwe,genialis/resolwe
|
Add migration to populate Entity type
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2018-11-07 08:44
from __future__ import unicode_literals
from django.db import migrations
def populate_entity_type(apps, schema_editor):
"""Populate entity type from attached descriptor schema."""
Entity = apps.get_model('flow', 'Entity')
for entity in Entity.objects.all():
if entity.descriptor_schema is not None:
entity.type = entity.descriptor_schema.slug
entity.save()
class Migration(migrations.Migration):
dependencies = [
('flow', '0024_process_entity_3'),
]
operations = [
migrations.RunPython(populate_entity_type)
]
|
<commit_before><commit_msg>Add migration to populate Entity type<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2018-11-07 08:44
from __future__ import unicode_literals
from django.db import migrations
def populate_entity_type(apps, schema_editor):
"""Populate entity type from attached descriptor schema."""
Entity = apps.get_model('flow', 'Entity')
for entity in Entity.objects.all():
if entity.descriptor_schema is not None:
entity.type = entity.descriptor_schema.slug
entity.save()
class Migration(migrations.Migration):
dependencies = [
('flow', '0024_process_entity_3'),
]
operations = [
migrations.RunPython(populate_entity_type)
]
|
Add migration to populate Entity type# -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2018-11-07 08:44
from __future__ import unicode_literals
from django.db import migrations
def populate_entity_type(apps, schema_editor):
"""Populate entity type from attached descriptor schema."""
Entity = apps.get_model('flow', 'Entity')
for entity in Entity.objects.all():
if entity.descriptor_schema is not None:
entity.type = entity.descriptor_schema.slug
entity.save()
class Migration(migrations.Migration):
dependencies = [
('flow', '0024_process_entity_3'),
]
operations = [
migrations.RunPython(populate_entity_type)
]
|
<commit_before><commit_msg>Add migration to populate Entity type<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2018-11-07 08:44
from __future__ import unicode_literals
from django.db import migrations
def populate_entity_type(apps, schema_editor):
"""Populate entity type from attached descriptor schema."""
Entity = apps.get_model('flow', 'Entity')
for entity in Entity.objects.all():
if entity.descriptor_schema is not None:
entity.type = entity.descriptor_schema.slug
entity.save()
class Migration(migrations.Migration):
dependencies = [
('flow', '0024_process_entity_3'),
]
operations = [
migrations.RunPython(populate_entity_type)
]
|
|
69d80b136a1d3d3e59bfc9a1a156e9a555e6682e
|
renderchan/contrib/ffmpeg.py
|
renderchan/contrib/ffmpeg.py
|
__author__ = 'Konstantin Dmitriev'
from renderchan.module import RenderChanModule
from renderchan.utils import which
import subprocess
import os
import random
class RenderChanFfmpegModule(RenderChanModule):
def __init__(self):
RenderChanModule.__init__(self)
if os.name == 'nt':
self.conf['binary']=os.path.join(os.path.dirname(__file__),"..\\..\\..\\ffmpeg\\bin\\ffmpeg.exe")
else:
self.conf['binary']="ffmpeg"
self.conf["packetSize"]=0
def getInputFormats(self):
return ["mov", "avi", "mpg"]
def getOutputFormats(self):
return ["png"]
def render(self, filename, outputPath, startFrame, endFrame, format, updateCompletion, extraParams={}):
updateCompletion(0.0)
# TODO: Progress callback
commandline=[self.conf['binary'], "-i", filename, os.path.join(outputPath,"output_%04d.png")]
subprocess.check_call(commandline)
updateCompletion(1.0)
|
Add FFmpeg module for converting video files into image sequences
|
Add FFmpeg module for converting video files into image sequences
|
Python
|
bsd-3-clause
|
morevnaproject/RenderChan,scribblemaniac/RenderChan,morevnaproject/RenderChan,scribblemaniac/RenderChan
|
Add FFmpeg module for converting video files into image sequences
|
__author__ = 'Konstantin Dmitriev'
from renderchan.module import RenderChanModule
from renderchan.utils import which
import subprocess
import os
import random
class RenderChanFfmpegModule(RenderChanModule):
def __init__(self):
RenderChanModule.__init__(self)
if os.name == 'nt':
self.conf['binary']=os.path.join(os.path.dirname(__file__),"..\\..\\..\\ffmpeg\\bin\\ffmpeg.exe")
else:
self.conf['binary']="ffmpeg"
self.conf["packetSize"]=0
def getInputFormats(self):
return ["mov", "avi", "mpg"]
def getOutputFormats(self):
return ["png"]
def render(self, filename, outputPath, startFrame, endFrame, format, updateCompletion, extraParams={}):
updateCompletion(0.0)
# TODO: Progress callback
commandline=[self.conf['binary'], "-i", filename, os.path.join(outputPath,"output_%04d.png")]
subprocess.check_call(commandline)
updateCompletion(1.0)
|
<commit_before><commit_msg>Add FFmpeg module for converting video files into image sequences<commit_after>
|
__author__ = 'Konstantin Dmitriev'
from renderchan.module import RenderChanModule
from renderchan.utils import which
import subprocess
import os
import random
class RenderChanFfmpegModule(RenderChanModule):
def __init__(self):
RenderChanModule.__init__(self)
if os.name == 'nt':
self.conf['binary']=os.path.join(os.path.dirname(__file__),"..\\..\\..\\ffmpeg\\bin\\ffmpeg.exe")
else:
self.conf['binary']="ffmpeg"
self.conf["packetSize"]=0
def getInputFormats(self):
return ["mov", "avi", "mpg"]
def getOutputFormats(self):
return ["png"]
def render(self, filename, outputPath, startFrame, endFrame, format, updateCompletion, extraParams={}):
updateCompletion(0.0)
# TODO: Progress callback
commandline=[self.conf['binary'], "-i", filename, os.path.join(outputPath,"output_%04d.png")]
subprocess.check_call(commandline)
updateCompletion(1.0)
|
Add FFmpeg module for converting video files into image sequences
__author__ = 'Konstantin Dmitriev'
from renderchan.module import RenderChanModule
from renderchan.utils import which
import subprocess
import os
import random
class RenderChanFfmpegModule(RenderChanModule):
def __init__(self):
RenderChanModule.__init__(self)
if os.name == 'nt':
self.conf['binary']=os.path.join(os.path.dirname(__file__),"..\\..\\..\\ffmpeg\\bin\\ffmpeg.exe")
else:
self.conf['binary']="ffmpeg"
self.conf["packetSize"]=0
def getInputFormats(self):
return ["mov", "avi", "mpg"]
def getOutputFormats(self):
return ["png"]
def render(self, filename, outputPath, startFrame, endFrame, format, updateCompletion, extraParams={}):
updateCompletion(0.0)
# TODO: Progress callback
commandline=[self.conf['binary'], "-i", filename, os.path.join(outputPath,"output_%04d.png")]
subprocess.check_call(commandline)
updateCompletion(1.0)
|
<commit_before><commit_msg>Add FFmpeg module for converting video files into image sequences<commit_after>
__author__ = 'Konstantin Dmitriev'
from renderchan.module import RenderChanModule
from renderchan.utils import which
import subprocess
import os
import random
class RenderChanFfmpegModule(RenderChanModule):
def __init__(self):
RenderChanModule.__init__(self)
if os.name == 'nt':
self.conf['binary']=os.path.join(os.path.dirname(__file__),"..\\..\\..\\ffmpeg\\bin\\ffmpeg.exe")
else:
self.conf['binary']="ffmpeg"
self.conf["packetSize"]=0
def getInputFormats(self):
return ["mov", "avi", "mpg"]
def getOutputFormats(self):
return ["png"]
def render(self, filename, outputPath, startFrame, endFrame, format, updateCompletion, extraParams={}):
updateCompletion(0.0)
# TODO: Progress callback
commandline=[self.conf['binary'], "-i", filename, os.path.join(outputPath,"output_%04d.png")]
subprocess.check_call(commandline)
updateCompletion(1.0)
|
|
04d2998215b94c7f857a96ba40cb479209677b09
|
udemy-dl.py
|
udemy-dl.py
|
#!/usr/bin/python
print('''udemy-dl can now be installed using pip
pip install udemy-dl
If you are looking for the old script, it is now located here:
src/udemy_dl/udemy_dl.py
''')
|
Add a helpful message for people looking for old script
|
Add a helpful message for people looking for old script
|
Python
|
unlicense
|
rinodung/udemy-dl
|
Add a helpful message for people looking for old script
|
#!/usr/bin/python
print('''udemy-dl can now be installed using pip
pip install udemy-dl
If you are looking for the old script, it is now located here:
src/udemy_dl/udemy_dl.py
''')
|
<commit_before><commit_msg>Add a helpful message for people looking for old script<commit_after>
|
#!/usr/bin/python
print('''udemy-dl can now be installed using pip
pip install udemy-dl
If you are looking for the old script, it is now located here:
src/udemy_dl/udemy_dl.py
''')
|
Add a helpful message for people looking for old script#!/usr/bin/python
print('''udemy-dl can now be installed using pip
pip install udemy-dl
If you are looking for the old script, it is now located here:
src/udemy_dl/udemy_dl.py
''')
|
<commit_before><commit_msg>Add a helpful message for people looking for old script<commit_after>#!/usr/bin/python
print('''udemy-dl can now be installed using pip
pip install udemy-dl
If you are looking for the old script, it is now located here:
src/udemy_dl/udemy_dl.py
''')
|
|
1d39f6bdbd27ba2d655dd74f62de46fe96628671
|
test_multiply.py
|
test_multiply.py
|
import multiply as mp
def test_multiply():
"""
Tests that the multiply function works
"""
assert mp.multiply(5,5) == 25
assert mp.multiply(3,0) == 0
|
Test for the multiply.py function
|
Test for the multiply.py function
|
Python
|
mit
|
ericmjl/github-tutorial
|
Test for the multiply.py function
|
import multiply as mp
def test_multiply():
"""
Tests that the multiply function works
"""
assert mp.multiply(5,5) == 25
assert mp.multiply(3,0) == 0
|
<commit_before><commit_msg>Test for the multiply.py function<commit_after>
|
import multiply as mp
def test_multiply():
"""
Tests that the multiply function works
"""
assert mp.multiply(5,5) == 25
assert mp.multiply(3,0) == 0
|
Test for the multiply.py functionimport multiply as mp
def test_multiply():
"""
Tests that the multiply function works
"""
assert mp.multiply(5,5) == 25
assert mp.multiply(3,0) == 0
|
<commit_before><commit_msg>Test for the multiply.py function<commit_after>import multiply as mp
def test_multiply():
"""
Tests that the multiply function works
"""
assert mp.multiply(5,5) == 25
assert mp.multiply(3,0) == 0
|
|
9aec0558e4174c16c44a8a5598c14a1fbcee55a9
|
stock_updater.py
|
stock_updater.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
class StockUpdater:
def __init__(self, db_connection):
self.db_connection = db_connection
def set_items(self, items):
self.items = items
def set_table(self, table):
self.table = table
def set_destination_colums(self, product_code, quantity):
self.product_code_column = product_code
self.quantity_column = quantity
def update(self):
# cursor.execute_many?
for item in self.items:
self.update_quantity(item['product_code'], item['quantity'])
def update_quantity(self, product_code, quantity):
query = "UPDATE {} SET {} = ? WHERE {} LIKE ?".format(
self.table, self.quantity_column, self.product_code_column)
cursor = self.db_connection.cursor()
try:
cursor.execute(query, (quantity, product_code))
self.db_connection.commit()
except Exception as err:
raise err
finally:
cursor.close()
|
Add class for updating stock
|
Add class for updating stock
|
Python
|
mit
|
stormaaja/csvconverter,stormaaja/csvconverter,stormaaja/csvconverter
|
Add class for updating stock
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
class StockUpdater:
def __init__(self, db_connection):
self.db_connection = db_connection
def set_items(self, items):
self.items = items
def set_table(self, table):
self.table = table
def set_destination_colums(self, product_code, quantity):
self.product_code_column = product_code
self.quantity_column = quantity
def update(self):
# cursor.execute_many?
for item in self.items:
self.update_quantity(item['product_code'], item['quantity'])
def update_quantity(self, product_code, quantity):
query = "UPDATE {} SET {} = ? WHERE {} LIKE ?".format(
self.table, self.quantity_column, self.product_code_column)
cursor = self.db_connection.cursor()
try:
cursor.execute(query, (quantity, product_code))
self.db_connection.commit()
except Exception as err:
raise err
finally:
cursor.close()
|
<commit_before><commit_msg>Add class for updating stock<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
class StockUpdater:
def __init__(self, db_connection):
self.db_connection = db_connection
def set_items(self, items):
self.items = items
def set_table(self, table):
self.table = table
def set_destination_colums(self, product_code, quantity):
self.product_code_column = product_code
self.quantity_column = quantity
def update(self):
# cursor.execute_many?
for item in self.items:
self.update_quantity(item['product_code'], item['quantity'])
def update_quantity(self, product_code, quantity):
query = "UPDATE {} SET {} = ? WHERE {} LIKE ?".format(
self.table, self.quantity_column, self.product_code_column)
cursor = self.db_connection.cursor()
try:
cursor.execute(query, (quantity, product_code))
self.db_connection.commit()
except Exception as err:
raise err
finally:
cursor.close()
|
Add class for updating stock#!/usr/bin/env python
# -*- coding: utf-8 -*-
class StockUpdater:
def __init__(self, db_connection):
self.db_connection = db_connection
def set_items(self, items):
self.items = items
def set_table(self, table):
self.table = table
def set_destination_colums(self, product_code, quantity):
self.product_code_column = product_code
self.quantity_column = quantity
def update(self):
# cursor.execute_many?
for item in self.items:
self.update_quantity(item['product_code'], item['quantity'])
def update_quantity(self, product_code, quantity):
query = "UPDATE {} SET {} = ? WHERE {} LIKE ?".format(
self.table, self.quantity_column, self.product_code_column)
cursor = self.db_connection.cursor()
try:
cursor.execute(query, (quantity, product_code))
self.db_connection.commit()
except Exception as err:
raise err
finally:
cursor.close()
|
<commit_before><commit_msg>Add class for updating stock<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
class StockUpdater:
def __init__(self, db_connection):
self.db_connection = db_connection
def set_items(self, items):
self.items = items
def set_table(self, table):
self.table = table
def set_destination_colums(self, product_code, quantity):
self.product_code_column = product_code
self.quantity_column = quantity
def update(self):
# cursor.execute_many?
for item in self.items:
self.update_quantity(item['product_code'], item['quantity'])
def update_quantity(self, product_code, quantity):
query = "UPDATE {} SET {} = ? WHERE {} LIKE ?".format(
self.table, self.quantity_column, self.product_code_column)
cursor = self.db_connection.cursor()
try:
cursor.execute(query, (quantity, product_code))
self.db_connection.commit()
except Exception as err:
raise err
finally:
cursor.close()
|
|
ea75b737ff6e2b3c54bc5e6417d64bd16f446dc3
|
test/test_bot.py
|
test/test_bot.py
|
import re
import unittest
from gather.bot import ListenerBot
class TestGatherBot(unittest.TestCase):
def test_register(self):
bot = ListenerBot()
self.assertEqual({}, bot.actions)
regex = r'^test'
action = unittest.mock.Mock()
bot.register_action(regex, action)
self.assertEqual(
{regex: (re.compile(regex, re.IGNORECASE), action)},
bot.actions
)
if __name__ == '__main__':
unittest.main()
|
Add a test for bot action registration
|
Add a test for bot action registration
|
Python
|
mit
|
veryhappythings/discord-gather
|
Add a test for bot action registration
|
import re
import unittest
from gather.bot import ListenerBot
class TestGatherBot(unittest.TestCase):
def test_register(self):
bot = ListenerBot()
self.assertEqual({}, bot.actions)
regex = r'^test'
action = unittest.mock.Mock()
bot.register_action(regex, action)
self.assertEqual(
{regex: (re.compile(regex, re.IGNORECASE), action)},
bot.actions
)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add a test for bot action registration<commit_after>
|
import re
import unittest
from gather.bot import ListenerBot
class TestGatherBot(unittest.TestCase):
def test_register(self):
bot = ListenerBot()
self.assertEqual({}, bot.actions)
regex = r'^test'
action = unittest.mock.Mock()
bot.register_action(regex, action)
self.assertEqual(
{regex: (re.compile(regex, re.IGNORECASE), action)},
bot.actions
)
if __name__ == '__main__':
unittest.main()
|
Add a test for bot action registrationimport re
import unittest
from gather.bot import ListenerBot
class TestGatherBot(unittest.TestCase):
def test_register(self):
bot = ListenerBot()
self.assertEqual({}, bot.actions)
regex = r'^test'
action = unittest.mock.Mock()
bot.register_action(regex, action)
self.assertEqual(
{regex: (re.compile(regex, re.IGNORECASE), action)},
bot.actions
)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add a test for bot action registration<commit_after>import re
import unittest
from gather.bot import ListenerBot
class TestGatherBot(unittest.TestCase):
def test_register(self):
bot = ListenerBot()
self.assertEqual({}, bot.actions)
regex = r'^test'
action = unittest.mock.Mock()
bot.register_action(regex, action)
self.assertEqual(
{regex: (re.compile(regex, re.IGNORECASE), action)},
bot.actions
)
if __name__ == '__main__':
unittest.main()
|
|
75feb1bf2f7adec9a3c35fdd1c566922a24f44f4
|
test_analyzer.py
|
test_analyzer.py
|
#!/usr/bin/python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Figure out which tests ran, and which had any failures.
import sys
import re
testOkPatt = re.compile('..:..:.. INFO - \d+ INFO TEST-OK \| ([^ ]*)')
testFailPatt = re.compile('TEST-UNEXPECTED-FAIL \| ([^ ]*)')
def analyzeMochitestLog(mlog):
runningTests = set([])
failedTests = set([])
# Parse log for tests that ran and/or failed.
for l in mlog:
m = testOkPatt.match(l)
if m:
runningTests.add(m.group(1))
continue
m = testFailPatt.search(l)
if m:
failedTests.add(m.group(1))
continue
if 'leakcheck' in failedTests:
print 'Some test leaked.'
# Get the known list of tests that don't run in e10s.
disabledTests = set([])
f = open('disabled_mochitests.txt', 'r')
for l in f:
disabledTests.add(l[:-1])
okTests = []
stillFailedTests = []
for x in disabledTests:
if not x in runningTests:
continue
if x in failedTests:
stillFailedTests.append(x)
else:
okTests.append(x)
okTests.sort()
stillFailedTests.sort()
print
print 'Maybe could enable these tests:'
for t in okTests:
print ' ', t
print
print 'Still broken:'
for t in stillFailedTests:
print ' ', t
if len(sys.argv) < 2:
sys.stderr.write('Not enough arguments.\n')
exit()
f = open(sys.argv[1], 'r')
analyzeMochitestLog(f)
f.close()
|
Add mochitest failure test analyzer
|
Add mochitest failure test analyzer
|
Python
|
mpl-2.0
|
amccreight/mochitest-logs
|
Add mochitest failure test analyzer
|
#!/usr/bin/python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Figure out which tests ran, and which had any failures.
import sys
import re
testOkPatt = re.compile('..:..:.. INFO - \d+ INFO TEST-OK \| ([^ ]*)')
testFailPatt = re.compile('TEST-UNEXPECTED-FAIL \| ([^ ]*)')
def analyzeMochitestLog(mlog):
runningTests = set([])
failedTests = set([])
# Parse log for tests that ran and/or failed.
for l in mlog:
m = testOkPatt.match(l)
if m:
runningTests.add(m.group(1))
continue
m = testFailPatt.search(l)
if m:
failedTests.add(m.group(1))
continue
if 'leakcheck' in failedTests:
print 'Some test leaked.'
# Get the known list of tests that don't run in e10s.
disabledTests = set([])
f = open('disabled_mochitests.txt', 'r')
for l in f:
disabledTests.add(l[:-1])
okTests = []
stillFailedTests = []
for x in disabledTests:
if not x in runningTests:
continue
if x in failedTests:
stillFailedTests.append(x)
else:
okTests.append(x)
okTests.sort()
stillFailedTests.sort()
print
print 'Maybe could enable these tests:'
for t in okTests:
print ' ', t
print
print 'Still broken:'
for t in stillFailedTests:
print ' ', t
if len(sys.argv) < 2:
sys.stderr.write('Not enough arguments.\n')
exit()
f = open(sys.argv[1], 'r')
analyzeMochitestLog(f)
f.close()
|
<commit_before><commit_msg>Add mochitest failure test analyzer<commit_after>
|
#!/usr/bin/python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Figure out which tests ran, and which had any failures.
import sys
import re
testOkPatt = re.compile('..:..:.. INFO - \d+ INFO TEST-OK \| ([^ ]*)')
testFailPatt = re.compile('TEST-UNEXPECTED-FAIL \| ([^ ]*)')
def analyzeMochitestLog(mlog):
runningTests = set([])
failedTests = set([])
# Parse log for tests that ran and/or failed.
for l in mlog:
m = testOkPatt.match(l)
if m:
runningTests.add(m.group(1))
continue
m = testFailPatt.search(l)
if m:
failedTests.add(m.group(1))
continue
if 'leakcheck' in failedTests:
print 'Some test leaked.'
# Get the known list of tests that don't run in e10s.
disabledTests = set([])
f = open('disabled_mochitests.txt', 'r')
for l in f:
disabledTests.add(l[:-1])
okTests = []
stillFailedTests = []
for x in disabledTests:
if not x in runningTests:
continue
if x in failedTests:
stillFailedTests.append(x)
else:
okTests.append(x)
okTests.sort()
stillFailedTests.sort()
print
print 'Maybe could enable these tests:'
for t in okTests:
print ' ', t
print
print 'Still broken:'
for t in stillFailedTests:
print ' ', t
if len(sys.argv) < 2:
sys.stderr.write('Not enough arguments.\n')
exit()
f = open(sys.argv[1], 'r')
analyzeMochitestLog(f)
f.close()
|
Add mochitest failure test analyzer#!/usr/bin/python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Figure out which tests ran, and which had any failures.
import sys
import re
testOkPatt = re.compile('..:..:.. INFO - \d+ INFO TEST-OK \| ([^ ]*)')
testFailPatt = re.compile('TEST-UNEXPECTED-FAIL \| ([^ ]*)')
def analyzeMochitestLog(mlog):
runningTests = set([])
failedTests = set([])
# Parse log for tests that ran and/or failed.
for l in mlog:
m = testOkPatt.match(l)
if m:
runningTests.add(m.group(1))
continue
m = testFailPatt.search(l)
if m:
failedTests.add(m.group(1))
continue
if 'leakcheck' in failedTests:
print 'Some test leaked.'
# Get the known list of tests that don't run in e10s.
disabledTests = set([])
f = open('disabled_mochitests.txt', 'r')
for l in f:
disabledTests.add(l[:-1])
okTests = []
stillFailedTests = []
for x in disabledTests:
if not x in runningTests:
continue
if x in failedTests:
stillFailedTests.append(x)
else:
okTests.append(x)
okTests.sort()
stillFailedTests.sort()
print
print 'Maybe could enable these tests:'
for t in okTests:
print ' ', t
print
print 'Still broken:'
for t in stillFailedTests:
print ' ', t
if len(sys.argv) < 2:
sys.stderr.write('Not enough arguments.\n')
exit()
f = open(sys.argv[1], 'r')
analyzeMochitestLog(f)
f.close()
|
<commit_before><commit_msg>Add mochitest failure test analyzer<commit_after>#!/usr/bin/python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Figure out which tests ran, and which had any failures.
import sys
import re
testOkPatt = re.compile('..:..:.. INFO - \d+ INFO TEST-OK \| ([^ ]*)')
testFailPatt = re.compile('TEST-UNEXPECTED-FAIL \| ([^ ]*)')
def analyzeMochitestLog(mlog):
runningTests = set([])
failedTests = set([])
# Parse log for tests that ran and/or failed.
for l in mlog:
m = testOkPatt.match(l)
if m:
runningTests.add(m.group(1))
continue
m = testFailPatt.search(l)
if m:
failedTests.add(m.group(1))
continue
if 'leakcheck' in failedTests:
print 'Some test leaked.'
# Get the known list of tests that don't run in e10s.
disabledTests = set([])
f = open('disabled_mochitests.txt', 'r')
for l in f:
disabledTests.add(l[:-1])
okTests = []
stillFailedTests = []
for x in disabledTests:
if not x in runningTests:
continue
if x in failedTests:
stillFailedTests.append(x)
else:
okTests.append(x)
okTests.sort()
stillFailedTests.sort()
print
print 'Maybe could enable these tests:'
for t in okTests:
print ' ', t
print
print 'Still broken:'
for t in stillFailedTests:
print ' ', t
if len(sys.argv) < 2:
sys.stderr.write('Not enough arguments.\n')
exit()
f = open(sys.argv[1], 'r')
analyzeMochitestLog(f)
f.close()
|
|
feef32714b8be10c7b5fd8a536c0dee8b872a6ac
|
extract_binaries.py
|
extract_binaries.py
|
from argparse import ArgumentParser
import errno
import os.path
import urllib
import shutil
from subprocess import (
check_call,
check_output,
)
def get_args():
parser = ArgumentParser()
parser.add_argument('version', help='The version number of this juju.')
parser.add_argument('branch', help='The branch this juju came from.')
parser.add_argument('jenkins_url',
help='URL to the jenkins with binaries.')
parser.add_argument('target_dir', help='Directory to extract to.')
return parser.parse_args()
def extract_binary(version, branch, jenkins_url, target_dir):
if branch == 'gitbranch:master:github.com/juju/juju':
full_target = os.path.join(target_dir, 'master')
else:
full_target = os.path.join(target_dir, 'stable')
release = check_output(['lsb_release', '-sr']).strip()
arch = check_output(['dpkg', '--print-architecture']).strip()
juju_core_deb = 'juju-core_{}-0ubuntu1~{}.1~juju1_{}.deb'.format(
version, release, arch)
encoded_core_deb = urllib.quote(juju_core_deb)
deb_url = '{}/job/publish-revision/lastSuccessfulBuild/artifact/{}'.format(
jenkins_url, encoded_core_deb)
try:
os.unlink(juju_core_deb)
except OSError as e:
if e.errno != errno.ENOENT:
raise
check_call(['wget', '-q', deb_url])
shutil.rmtree(full_target)
check_call(['dpkg', '-x', juju_core_deb, full_target])
def main():
args = get_args()
extract_binary(args.version, args.branch, args.jenkins_url,
args.target_dir)
if __name__ == '__main__':
main()
|
Add python version of extract_binary.
|
Add python version of extract_binary.
|
Python
|
agpl-3.0
|
mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju
|
Add python version of extract_binary.
|
from argparse import ArgumentParser
import errno
import os.path
import urllib
import shutil
from subprocess import (
check_call,
check_output,
)
def get_args():
parser = ArgumentParser()
parser.add_argument('version', help='The version number of this juju.')
parser.add_argument('branch', help='The branch this juju came from.')
parser.add_argument('jenkins_url',
help='URL to the jenkins with binaries.')
parser.add_argument('target_dir', help='Directory to extract to.')
return parser.parse_args()
def extract_binary(version, branch, jenkins_url, target_dir):
if branch == 'gitbranch:master:github.com/juju/juju':
full_target = os.path.join(target_dir, 'master')
else:
full_target = os.path.join(target_dir, 'stable')
release = check_output(['lsb_release', '-sr']).strip()
arch = check_output(['dpkg', '--print-architecture']).strip()
juju_core_deb = 'juju-core_{}-0ubuntu1~{}.1~juju1_{}.deb'.format(
version, release, arch)
encoded_core_deb = urllib.quote(juju_core_deb)
deb_url = '{}/job/publish-revision/lastSuccessfulBuild/artifact/{}'.format(
jenkins_url, encoded_core_deb)
try:
os.unlink(juju_core_deb)
except OSError as e:
if e.errno != errno.ENOENT:
raise
check_call(['wget', '-q', deb_url])
shutil.rmtree(full_target)
check_call(['dpkg', '-x', juju_core_deb, full_target])
def main():
args = get_args()
extract_binary(args.version, args.branch, args.jenkins_url,
args.target_dir)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add python version of extract_binary.<commit_after>
|
from argparse import ArgumentParser
import errno
import os.path
import urllib
import shutil
from subprocess import (
check_call,
check_output,
)
def get_args():
parser = ArgumentParser()
parser.add_argument('version', help='The version number of this juju.')
parser.add_argument('branch', help='The branch this juju came from.')
parser.add_argument('jenkins_url',
help='URL to the jenkins with binaries.')
parser.add_argument('target_dir', help='Directory to extract to.')
return parser.parse_args()
def extract_binary(version, branch, jenkins_url, target_dir):
if branch == 'gitbranch:master:github.com/juju/juju':
full_target = os.path.join(target_dir, 'master')
else:
full_target = os.path.join(target_dir, 'stable')
release = check_output(['lsb_release', '-sr']).strip()
arch = check_output(['dpkg', '--print-architecture']).strip()
juju_core_deb = 'juju-core_{}-0ubuntu1~{}.1~juju1_{}.deb'.format(
version, release, arch)
encoded_core_deb = urllib.quote(juju_core_deb)
deb_url = '{}/job/publish-revision/lastSuccessfulBuild/artifact/{}'.format(
jenkins_url, encoded_core_deb)
try:
os.unlink(juju_core_deb)
except OSError as e:
if e.errno != errno.ENOENT:
raise
check_call(['wget', '-q', deb_url])
shutil.rmtree(full_target)
check_call(['dpkg', '-x', juju_core_deb, full_target])
def main():
args = get_args()
extract_binary(args.version, args.branch, args.jenkins_url,
args.target_dir)
if __name__ == '__main__':
main()
|
Add python version of extract_binary.from argparse import ArgumentParser
import errno
import os.path
import urllib
import shutil
from subprocess import (
check_call,
check_output,
)
def get_args():
parser = ArgumentParser()
parser.add_argument('version', help='The version number of this juju.')
parser.add_argument('branch', help='The branch this juju came from.')
parser.add_argument('jenkins_url',
help='URL to the jenkins with binaries.')
parser.add_argument('target_dir', help='Directory to extract to.')
return parser.parse_args()
def extract_binary(version, branch, jenkins_url, target_dir):
if branch == 'gitbranch:master:github.com/juju/juju':
full_target = os.path.join(target_dir, 'master')
else:
full_target = os.path.join(target_dir, 'stable')
release = check_output(['lsb_release', '-sr']).strip()
arch = check_output(['dpkg', '--print-architecture']).strip()
juju_core_deb = 'juju-core_{}-0ubuntu1~{}.1~juju1_{}.deb'.format(
version, release, arch)
encoded_core_deb = urllib.quote(juju_core_deb)
deb_url = '{}/job/publish-revision/lastSuccessfulBuild/artifact/{}'.format(
jenkins_url, encoded_core_deb)
try:
os.unlink(juju_core_deb)
except OSError as e:
if e.errno != errno.ENOENT:
raise
check_call(['wget', '-q', deb_url])
shutil.rmtree(full_target)
check_call(['dpkg', '-x', juju_core_deb, full_target])
def main():
args = get_args()
extract_binary(args.version, args.branch, args.jenkins_url,
args.target_dir)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add python version of extract_binary.<commit_after>from argparse import ArgumentParser
import errno
import os.path
import urllib
import shutil
from subprocess import (
check_call,
check_output,
)
def get_args():
parser = ArgumentParser()
parser.add_argument('version', help='The version number of this juju.')
parser.add_argument('branch', help='The branch this juju came from.')
parser.add_argument('jenkins_url',
help='URL to the jenkins with binaries.')
parser.add_argument('target_dir', help='Directory to extract to.')
return parser.parse_args()
def extract_binary(version, branch, jenkins_url, target_dir):
if branch == 'gitbranch:master:github.com/juju/juju':
full_target = os.path.join(target_dir, 'master')
else:
full_target = os.path.join(target_dir, 'stable')
release = check_output(['lsb_release', '-sr']).strip()
arch = check_output(['dpkg', '--print-architecture']).strip()
juju_core_deb = 'juju-core_{}-0ubuntu1~{}.1~juju1_{}.deb'.format(
version, release, arch)
encoded_core_deb = urllib.quote(juju_core_deb)
deb_url = '{}/job/publish-revision/lastSuccessfulBuild/artifact/{}'.format(
jenkins_url, encoded_core_deb)
try:
os.unlink(juju_core_deb)
except OSError as e:
if e.errno != errno.ENOENT:
raise
check_call(['wget', '-q', deb_url])
shutil.rmtree(full_target)
check_call(['dpkg', '-x', juju_core_deb, full_target])
def main():
args = get_args()
extract_binary(args.version, args.branch, args.jenkins_url,
args.target_dir)
if __name__ == '__main__':
main()
|
|
53a4d9525439456de3d0f13d92c5671a4c39b69f
|
ndlib_test/ndlibTest.py
|
ndlib_test/ndlibTest.py
|
import unittest
import networkx as nx
import sys
sys.path.append("..")
import ndlib.VoterModel as vm
import ndlib.SznajdModel as sm
import ndlib.MajorityRuleModel as mrm
__author__ = 'rossetti'
__license__ = "GPL"
__email__ = "giulio.rossetti@gmail.com"
class NdlibTest(unittest.TestCase):
def test_voter_model(self):
g = nx.erdos_renyi_graph(1000, 0.1)
model = vm.VoterModel(g)
model.set_initial_status({'model': {'percentage_infected': 0.2}})
iterations = model.iteration_bunch(10)
self.assertEqual(len(iterations), 10)
def test_sznajd_model(self):
g = nx.erdos_renyi_graph(1000, 0.1)
model = sm.SznajdModel(g)
model.set_initial_status({'model': {'percentage_infected': 0.2}})
iterations = model.iteration_bunch(10)
self.assertEqual(len(iterations), 10)
def test_majorityrule_model(self):
g = nx.complete_graph(100)
model = mrm.MajorityRuleModel(g, {'q': 3})
model.set_initial_status({'model': {'percentage_infected': 0.6}})
iterations = model.iteration_bunch(10)
self.assertEqual(len(iterations), 10)
|
Test suite for the ndlib
|
Test suite for the ndlib
|
Python
|
bsd-2-clause
|
GiulioRossetti/ndlib
|
Test suite for the ndlib
|
import unittest
import networkx as nx
import sys
sys.path.append("..")
import ndlib.VoterModel as vm
import ndlib.SznajdModel as sm
import ndlib.MajorityRuleModel as mrm
__author__ = 'rossetti'
__license__ = "GPL"
__email__ = "giulio.rossetti@gmail.com"
class NdlibTest(unittest.TestCase):
def test_voter_model(self):
g = nx.erdos_renyi_graph(1000, 0.1)
model = vm.VoterModel(g)
model.set_initial_status({'model': {'percentage_infected': 0.2}})
iterations = model.iteration_bunch(10)
self.assertEqual(len(iterations), 10)
def test_sznajd_model(self):
g = nx.erdos_renyi_graph(1000, 0.1)
model = sm.SznajdModel(g)
model.set_initial_status({'model': {'percentage_infected': 0.2}})
iterations = model.iteration_bunch(10)
self.assertEqual(len(iterations), 10)
def test_majorityrule_model(self):
g = nx.complete_graph(100)
model = mrm.MajorityRuleModel(g, {'q': 3})
model.set_initial_status({'model': {'percentage_infected': 0.6}})
iterations = model.iteration_bunch(10)
self.assertEqual(len(iterations), 10)
|
<commit_before><commit_msg>Test suite for the ndlib<commit_after>
|
import unittest
import networkx as nx
import sys
sys.path.append("..")
import ndlib.VoterModel as vm
import ndlib.SznajdModel as sm
import ndlib.MajorityRuleModel as mrm
__author__ = 'rossetti'
__license__ = "GPL"
__email__ = "giulio.rossetti@gmail.com"
class NdlibTest(unittest.TestCase):
def test_voter_model(self):
g = nx.erdos_renyi_graph(1000, 0.1)
model = vm.VoterModel(g)
model.set_initial_status({'model': {'percentage_infected': 0.2}})
iterations = model.iteration_bunch(10)
self.assertEqual(len(iterations), 10)
def test_sznajd_model(self):
g = nx.erdos_renyi_graph(1000, 0.1)
model = sm.SznajdModel(g)
model.set_initial_status({'model': {'percentage_infected': 0.2}})
iterations = model.iteration_bunch(10)
self.assertEqual(len(iterations), 10)
def test_majorityrule_model(self):
g = nx.complete_graph(100)
model = mrm.MajorityRuleModel(g, {'q': 3})
model.set_initial_status({'model': {'percentage_infected': 0.6}})
iterations = model.iteration_bunch(10)
self.assertEqual(len(iterations), 10)
|
Test suite for the ndlibimport unittest
import networkx as nx
import sys
sys.path.append("..")
import ndlib.VoterModel as vm
import ndlib.SznajdModel as sm
import ndlib.MajorityRuleModel as mrm
__author__ = 'rossetti'
__license__ = "GPL"
__email__ = "giulio.rossetti@gmail.com"
class NdlibTest(unittest.TestCase):
def test_voter_model(self):
g = nx.erdos_renyi_graph(1000, 0.1)
model = vm.VoterModel(g)
model.set_initial_status({'model': {'percentage_infected': 0.2}})
iterations = model.iteration_bunch(10)
self.assertEqual(len(iterations), 10)
def test_sznajd_model(self):
g = nx.erdos_renyi_graph(1000, 0.1)
model = sm.SznajdModel(g)
model.set_initial_status({'model': {'percentage_infected': 0.2}})
iterations = model.iteration_bunch(10)
self.assertEqual(len(iterations), 10)
def test_majorityrule_model(self):
g = nx.complete_graph(100)
model = mrm.MajorityRuleModel(g, {'q': 3})
model.set_initial_status({'model': {'percentage_infected': 0.6}})
iterations = model.iteration_bunch(10)
self.assertEqual(len(iterations), 10)
|
<commit_before><commit_msg>Test suite for the ndlib<commit_after>import unittest
import networkx as nx
import sys
sys.path.append("..")
import ndlib.VoterModel as vm
import ndlib.SznajdModel as sm
import ndlib.MajorityRuleModel as mrm
__author__ = 'rossetti'
__license__ = "GPL"
__email__ = "giulio.rossetti@gmail.com"
class NdlibTest(unittest.TestCase):
def test_voter_model(self):
g = nx.erdos_renyi_graph(1000, 0.1)
model = vm.VoterModel(g)
model.set_initial_status({'model': {'percentage_infected': 0.2}})
iterations = model.iteration_bunch(10)
self.assertEqual(len(iterations), 10)
def test_sznajd_model(self):
g = nx.erdos_renyi_graph(1000, 0.1)
model = sm.SznajdModel(g)
model.set_initial_status({'model': {'percentage_infected': 0.2}})
iterations = model.iteration_bunch(10)
self.assertEqual(len(iterations), 10)
def test_majorityrule_model(self):
g = nx.complete_graph(100)
model = mrm.MajorityRuleModel(g, {'q': 3})
model.set_initial_status({'model': {'percentage_infected': 0.6}})
iterations = model.iteration_bunch(10)
self.assertEqual(len(iterations), 10)
|
|
e6397785d5d17d1214c1d8fbf7cd503f4484c5d9
|
olympiad/square.py
|
olympiad/square.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2014 Fabian M.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Read postive integers from stdin until zero occurs and print the
# square of the odd integers.
if __name__ == "__main__":
numbers = []
while True:
read = int(input())
if read is 0: break
numbers.append(read)
print(sum(map(lambda x: x*x, filter(lambda x: x & 0x1, numbers))))
|
Add solution for problem A2
|
Add solution for problem A2
|
Python
|
apache-2.0
|
fabianm/olympiad,fabianm/olympiad,fabianm/olympiad
|
Add solution for problem A2
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2014 Fabian M.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Read postive integers from stdin until zero occurs and print the
# square of the odd integers.
if __name__ == "__main__":
numbers = []
while True:
read = int(input())
if read is 0: break
numbers.append(read)
print(sum(map(lambda x: x*x, filter(lambda x: x & 0x1, numbers))))
|
<commit_before><commit_msg>Add solution for problem A2<commit_after>
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2014 Fabian M.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Read postive integers from stdin until zero occurs and print the
# square of the odd integers.
if __name__ == "__main__":
numbers = []
while True:
read = int(input())
if read is 0: break
numbers.append(read)
print(sum(map(lambda x: x*x, filter(lambda x: x & 0x1, numbers))))
|
Add solution for problem A2#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2014 Fabian M.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Read postive integers from stdin until zero occurs and print the
# square of the odd integers.
if __name__ == "__main__":
numbers = []
while True:
read = int(input())
if read is 0: break
numbers.append(read)
print(sum(map(lambda x: x*x, filter(lambda x: x & 0x1, numbers))))
|
<commit_before><commit_msg>Add solution for problem A2<commit_after>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2014 Fabian M.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Read postive integers from stdin until zero occurs and print the
# square of the odd integers.
if __name__ == "__main__":
numbers = []
while True:
read = int(input())
if read is 0: break
numbers.append(read)
print(sum(map(lambda x: x*x, filter(lambda x: x & 0x1, numbers))))
|
|
a743e0293c3ca04e7b5e4cdb7059085e2b4fdb6c
|
test/test_document.py
|
test/test_document.py
|
from __future__ import division, absolute_import, print_function
import unittest
from svgpathtools import *
from io import StringIO
from os.path import join, dirname
class TestDocument(unittest.TestCase):
def test_from_file_path(self):
""" Test reading svg from file provided as path """
doc = Document(join(dirname(__file__), 'polygons.svg'))
self.assertEqual(len(doc.paths()), 2)
def test_from_file_object(self):
""" Test reading svg from file object that has already been opened """
with open(join(dirname(__file__), 'polygons.svg'), 'r') as file:
doc = Document(file)
self.assertEqual(len(doc.paths()), 2)
def test_from_stringio(self):
""" Test reading svg object contained in a StringIO object """
with open(join(dirname(__file__), 'polygons.svg'), 'r') as file:
# read entire file into string
file_content: str = file.read()
# prepare stringio object
file_as_stringio = StringIO()
# paste file content into it
file_as_stringio.write(file_content)
# reset curser to its beginning
file_as_stringio.seek(0)
doc = Document(file_as_stringio)
self.assertEqual(len(doc.paths()), 2)
def test_from_string_without_svg_attrs(self):
""" Test reading svg object contained in a string without svg attributes"""
with open(join(dirname(__file__), 'polygons.svg'), 'r') as file:
# read entire file into string
file_content: str = file.read()
doc = Document.from_svg_string(file_content)
self.assertEqual(len(doc.paths()), 2)
|
Add tests for creating from file location, file, StringIO, and string
|
Add tests for creating from file location, file, StringIO, and string
|
Python
|
mit
|
mathandy/svgpathtools
|
Add tests for creating from file location, file, StringIO, and string
|
from __future__ import division, absolute_import, print_function
import unittest
from svgpathtools import *
from io import StringIO
from os.path import join, dirname
class TestDocument(unittest.TestCase):
def test_from_file_path(self):
""" Test reading svg from file provided as path """
doc = Document(join(dirname(__file__), 'polygons.svg'))
self.assertEqual(len(doc.paths()), 2)
def test_from_file_object(self):
""" Test reading svg from file object that has already been opened """
with open(join(dirname(__file__), 'polygons.svg'), 'r') as file:
doc = Document(file)
self.assertEqual(len(doc.paths()), 2)
def test_from_stringio(self):
""" Test reading svg object contained in a StringIO object """
with open(join(dirname(__file__), 'polygons.svg'), 'r') as file:
# read entire file into string
file_content: str = file.read()
# prepare stringio object
file_as_stringio = StringIO()
# paste file content into it
file_as_stringio.write(file_content)
# reset curser to its beginning
file_as_stringio.seek(0)
doc = Document(file_as_stringio)
self.assertEqual(len(doc.paths()), 2)
def test_from_string_without_svg_attrs(self):
""" Test reading svg object contained in a string without svg attributes"""
with open(join(dirname(__file__), 'polygons.svg'), 'r') as file:
# read entire file into string
file_content: str = file.read()
doc = Document.from_svg_string(file_content)
self.assertEqual(len(doc.paths()), 2)
|
<commit_before><commit_msg>Add tests for creating from file location, file, StringIO, and string<commit_after>
|
from __future__ import division, absolute_import, print_function
import unittest
from svgpathtools import *
from io import StringIO
from os.path import join, dirname
class TestDocument(unittest.TestCase):
def test_from_file_path(self):
""" Test reading svg from file provided as path """
doc = Document(join(dirname(__file__), 'polygons.svg'))
self.assertEqual(len(doc.paths()), 2)
def test_from_file_object(self):
""" Test reading svg from file object that has already been opened """
with open(join(dirname(__file__), 'polygons.svg'), 'r') as file:
doc = Document(file)
self.assertEqual(len(doc.paths()), 2)
def test_from_stringio(self):
""" Test reading svg object contained in a StringIO object """
with open(join(dirname(__file__), 'polygons.svg'), 'r') as file:
# read entire file into string
file_content: str = file.read()
# prepare stringio object
file_as_stringio = StringIO()
# paste file content into it
file_as_stringio.write(file_content)
# reset curser to its beginning
file_as_stringio.seek(0)
doc = Document(file_as_stringio)
self.assertEqual(len(doc.paths()), 2)
def test_from_string_without_svg_attrs(self):
""" Test reading svg object contained in a string without svg attributes"""
with open(join(dirname(__file__), 'polygons.svg'), 'r') as file:
# read entire file into string
file_content: str = file.read()
doc = Document.from_svg_string(file_content)
self.assertEqual(len(doc.paths()), 2)
|
Add tests for creating from file location, file, StringIO, and stringfrom __future__ import division, absolute_import, print_function
import unittest
from svgpathtools import *
from io import StringIO
from os.path import join, dirname
class TestDocument(unittest.TestCase):
def test_from_file_path(self):
""" Test reading svg from file provided as path """
doc = Document(join(dirname(__file__), 'polygons.svg'))
self.assertEqual(len(doc.paths()), 2)
def test_from_file_object(self):
""" Test reading svg from file object that has already been opened """
with open(join(dirname(__file__), 'polygons.svg'), 'r') as file:
doc = Document(file)
self.assertEqual(len(doc.paths()), 2)
def test_from_stringio(self):
""" Test reading svg object contained in a StringIO object """
with open(join(dirname(__file__), 'polygons.svg'), 'r') as file:
# read entire file into string
file_content: str = file.read()
# prepare stringio object
file_as_stringio = StringIO()
# paste file content into it
file_as_stringio.write(file_content)
# reset curser to its beginning
file_as_stringio.seek(0)
doc = Document(file_as_stringio)
self.assertEqual(len(doc.paths()), 2)
def test_from_string_without_svg_attrs(self):
""" Test reading svg object contained in a string without svg attributes"""
with open(join(dirname(__file__), 'polygons.svg'), 'r') as file:
# read entire file into string
file_content: str = file.read()
doc = Document.from_svg_string(file_content)
self.assertEqual(len(doc.paths()), 2)
|
<commit_before><commit_msg>Add tests for creating from file location, file, StringIO, and string<commit_after>from __future__ import division, absolute_import, print_function
import unittest
from svgpathtools import *
from io import StringIO
from os.path import join, dirname
class TestDocument(unittest.TestCase):
def test_from_file_path(self):
""" Test reading svg from file provided as path """
doc = Document(join(dirname(__file__), 'polygons.svg'))
self.assertEqual(len(doc.paths()), 2)
def test_from_file_object(self):
""" Test reading svg from file object that has already been opened """
with open(join(dirname(__file__), 'polygons.svg'), 'r') as file:
doc = Document(file)
self.assertEqual(len(doc.paths()), 2)
def test_from_stringio(self):
""" Test reading svg object contained in a StringIO object """
with open(join(dirname(__file__), 'polygons.svg'), 'r') as file:
# read entire file into string
file_content: str = file.read()
# prepare stringio object
file_as_stringio = StringIO()
# paste file content into it
file_as_stringio.write(file_content)
# reset curser to its beginning
file_as_stringio.seek(0)
doc = Document(file_as_stringio)
self.assertEqual(len(doc.paths()), 2)
def test_from_string_without_svg_attrs(self):
""" Test reading svg object contained in a string without svg attributes"""
with open(join(dirname(__file__), 'polygons.svg'), 'r') as file:
# read entire file into string
file_content: str = file.read()
doc = Document.from_svg_string(file_content)
self.assertEqual(len(doc.paths()), 2)
|
|
807d589d941418a763b4ebfdd3c117df08de2df4
|
PositionalList.py
|
PositionalList.py
|
class _DoubleLinkedBase:
class _Node:
__slots__ = '_element', '_prev', '_next'
def __init__(self, element, prev, next):
self._element = element
self._prev = prev
self._next = next
def __init__(self):
self._header = self._Node(None, None, None)
self._tailer = self._Node(None, None, None)
self._header._next = self.tailer
self._tailer._prev = self.header
self._size = 0
def insert_between(self, e, prev, next):
node = self._Node(e, prev, next)
prev._next = node
next._prev = node
self._size += 1
return node
def delete(self, node):
node._prev._next = node._next
node._next._prev = node._prev
self._size -= 1
return node._element
# Given a position in the list, you can access the elements at O(1)
class PositionalList(_DoubleLinkedBase):
class Position:
def __init__(self):
|
Add positional list skeleton ???
|
Add positional list skeleton ???
|
Python
|
mit
|
xiao0720/leetcode,xliiauo/leetcode,xliiauo/leetcode,xiao0720/leetcode,xliiauo/leetcode
|
Add positional list skeleton ???
|
class _DoubleLinkedBase:
class _Node:
__slots__ = '_element', '_prev', '_next'
def __init__(self, element, prev, next):
self._element = element
self._prev = prev
self._next = next
def __init__(self):
self._header = self._Node(None, None, None)
self._tailer = self._Node(None, None, None)
self._header._next = self.tailer
self._tailer._prev = self.header
self._size = 0
def insert_between(self, e, prev, next):
node = self._Node(e, prev, next)
prev._next = node
next._prev = node
self._size += 1
return node
def delete(self, node):
node._prev._next = node._next
node._next._prev = node._prev
self._size -= 1
return node._element
# Given a position in the list, you can access the elements at O(1)
class PositionalList(_DoubleLinkedBase):
class Position:
def __init__(self):
|
<commit_before><commit_msg>Add positional list skeleton ???<commit_after>
|
class _DoubleLinkedBase:
class _Node:
__slots__ = '_element', '_prev', '_next'
def __init__(self, element, prev, next):
self._element = element
self._prev = prev
self._next = next
def __init__(self):
self._header = self._Node(None, None, None)
self._tailer = self._Node(None, None, None)
self._header._next = self.tailer
self._tailer._prev = self.header
self._size = 0
def insert_between(self, e, prev, next):
node = self._Node(e, prev, next)
prev._next = node
next._prev = node
self._size += 1
return node
def delete(self, node):
node._prev._next = node._next
node._next._prev = node._prev
self._size -= 1
return node._element
# Given a position in the list, you can access the elements at O(1)
class PositionalList(_DoubleLinkedBase):
class Position:
def __init__(self):
|
Add positional list skeleton ???class _DoubleLinkedBase:
class _Node:
__slots__ = '_element', '_prev', '_next'
def __init__(self, element, prev, next):
self._element = element
self._prev = prev
self._next = next
def __init__(self):
self._header = self._Node(None, None, None)
self._tailer = self._Node(None, None, None)
self._header._next = self.tailer
self._tailer._prev = self.header
self._size = 0
def insert_between(self, e, prev, next):
node = self._Node(e, prev, next)
prev._next = node
next._prev = node
self._size += 1
return node
def delete(self, node):
node._prev._next = node._next
node._next._prev = node._prev
self._size -= 1
return node._element
# Given a position in the list, you can access the elements at O(1)
class PositionalList(_DoubleLinkedBase):
class Position:
def __init__(self):
|
<commit_before><commit_msg>Add positional list skeleton ???<commit_after>class _DoubleLinkedBase:
class _Node:
__slots__ = '_element', '_prev', '_next'
def __init__(self, element, prev, next):
self._element = element
self._prev = prev
self._next = next
def __init__(self):
self._header = self._Node(None, None, None)
self._tailer = self._Node(None, None, None)
self._header._next = self.tailer
self._tailer._prev = self.header
self._size = 0
def insert_between(self, e, prev, next):
node = self._Node(e, prev, next)
prev._next = node
next._prev = node
self._size += 1
return node
def delete(self, node):
node._prev._next = node._next
node._next._prev = node._prev
self._size -= 1
return node._element
# Given a position in the list, you can access the elements at O(1)
class PositionalList(_DoubleLinkedBase):
class Position:
def __init__(self):
|
|
f5eeda75d9cc58c6552ab573f6866a144704c962
|
pyim/main/split.py
|
pyim/main/split.py
|
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from builtins import (ascii, bytes, chr, dict, filter, hex, input,
int, map, next, oct, open, pow, range, round,
str, super, zip)
from argparse import ArgumentParser
from pathlib import Path
import pysam
import pandas as pd
def setup_parser():
parser = ArgumentParser(prog='pyim-merge')
parser.add_argument('alignment_bam', type=Path)
parser.add_argument('read_barcode_map', type=Path)
parser.add_argument('--output_dir', type=Path, default='.')
return parser
def main():
parser = setup_parser()
args = parser.parse_args()
# Create output dir.
if not args.output_dir.exists():
args.output_dir.mkdir()
# Read barcodes.
barcode_map = pd.read_csv(str(args.read_barcode_map), sep='\t')
barcode_map = dict(zip(barcode_map['read_id'], barcode_map['barcode']))
# Split reads into separate files.
with pysam.AlignmentFile(str(args.alignment_bam), 'rb') as in_file:
out_files = {}
try:
# Open output files.
for sample in set(barcode_map.values()):
out_name = args.alignment_bam.stem + '.{}.bam'.format(sample)
out_path = args.output_dir / out_name
out_files[sample] = pysam.AlignmentFile(
str(out_path), 'wb', template=in_file)
# Write reads to separate files.
for read in in_file:
sample = barcode_map[read.query_name]
out_files[sample].write(read)
finally:
for out_path in out_files.values():
out_path.close()
if __name__ == '__main__':
main()
|
Split bam files based on barcodes.
|
Split bam files based on barcodes.
|
Python
|
mit
|
jrderuiter/pyim,jrderuiter/pyim
|
Split bam files based on barcodes.
|
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from builtins import (ascii, bytes, chr, dict, filter, hex, input,
int, map, next, oct, open, pow, range, round,
str, super, zip)
from argparse import ArgumentParser
from pathlib import Path
import pysam
import pandas as pd
def setup_parser():
parser = ArgumentParser(prog='pyim-merge')
parser.add_argument('alignment_bam', type=Path)
parser.add_argument('read_barcode_map', type=Path)
parser.add_argument('--output_dir', type=Path, default='.')
return parser
def main():
parser = setup_parser()
args = parser.parse_args()
# Create output dir.
if not args.output_dir.exists():
args.output_dir.mkdir()
# Read barcodes.
barcode_map = pd.read_csv(str(args.read_barcode_map), sep='\t')
barcode_map = dict(zip(barcode_map['read_id'], barcode_map['barcode']))
# Split reads into separate files.
with pysam.AlignmentFile(str(args.alignment_bam), 'rb') as in_file:
out_files = {}
try:
# Open output files.
for sample in set(barcode_map.values()):
out_name = args.alignment_bam.stem + '.{}.bam'.format(sample)
out_path = args.output_dir / out_name
out_files[sample] = pysam.AlignmentFile(
str(out_path), 'wb', template=in_file)
# Write reads to separate files.
for read in in_file:
sample = barcode_map[read.query_name]
out_files[sample].write(read)
finally:
for out_path in out_files.values():
out_path.close()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Split bam files based on barcodes.<commit_after>
|
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from builtins import (ascii, bytes, chr, dict, filter, hex, input,
int, map, next, oct, open, pow, range, round,
str, super, zip)
from argparse import ArgumentParser
from pathlib import Path
import pysam
import pandas as pd
def setup_parser():
parser = ArgumentParser(prog='pyim-merge')
parser.add_argument('alignment_bam', type=Path)
parser.add_argument('read_barcode_map', type=Path)
parser.add_argument('--output_dir', type=Path, default='.')
return parser
def main():
parser = setup_parser()
args = parser.parse_args()
# Create output dir.
if not args.output_dir.exists():
args.output_dir.mkdir()
# Read barcodes.
barcode_map = pd.read_csv(str(args.read_barcode_map), sep='\t')
barcode_map = dict(zip(barcode_map['read_id'], barcode_map['barcode']))
# Split reads into separate files.
with pysam.AlignmentFile(str(args.alignment_bam), 'rb') as in_file:
out_files = {}
try:
# Open output files.
for sample in set(barcode_map.values()):
out_name = args.alignment_bam.stem + '.{}.bam'.format(sample)
out_path = args.output_dir / out_name
out_files[sample] = pysam.AlignmentFile(
str(out_path), 'wb', template=in_file)
# Write reads to separate files.
for read in in_file:
sample = barcode_map[read.query_name]
out_files[sample].write(read)
finally:
for out_path in out_files.values():
out_path.close()
if __name__ == '__main__':
main()
|
Split bam files based on barcodes.from __future__ import (absolute_import, division,
print_function, unicode_literals)
from builtins import (ascii, bytes, chr, dict, filter, hex, input,
int, map, next, oct, open, pow, range, round,
str, super, zip)
from argparse import ArgumentParser
from pathlib import Path
import pysam
import pandas as pd
def setup_parser():
parser = ArgumentParser(prog='pyim-merge')
parser.add_argument('alignment_bam', type=Path)
parser.add_argument('read_barcode_map', type=Path)
parser.add_argument('--output_dir', type=Path, default='.')
return parser
def main():
parser = setup_parser()
args = parser.parse_args()
# Create output dir.
if not args.output_dir.exists():
args.output_dir.mkdir()
# Read barcodes.
barcode_map = pd.read_csv(str(args.read_barcode_map), sep='\t')
barcode_map = dict(zip(barcode_map['read_id'], barcode_map['barcode']))
# Split reads into separate files.
with pysam.AlignmentFile(str(args.alignment_bam), 'rb') as in_file:
out_files = {}
try:
# Open output files.
for sample in set(barcode_map.values()):
out_name = args.alignment_bam.stem + '.{}.bam'.format(sample)
out_path = args.output_dir / out_name
out_files[sample] = pysam.AlignmentFile(
str(out_path), 'wb', template=in_file)
# Write reads to separate files.
for read in in_file:
sample = barcode_map[read.query_name]
out_files[sample].write(read)
finally:
for out_path in out_files.values():
out_path.close()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Split bam files based on barcodes.<commit_after>from __future__ import (absolute_import, division,
print_function, unicode_literals)
from builtins import (ascii, bytes, chr, dict, filter, hex, input,
int, map, next, oct, open, pow, range, round,
str, super, zip)
from argparse import ArgumentParser
from pathlib import Path
import pysam
import pandas as pd
def setup_parser():
parser = ArgumentParser(prog='pyim-merge')
parser.add_argument('alignment_bam', type=Path)
parser.add_argument('read_barcode_map', type=Path)
parser.add_argument('--output_dir', type=Path, default='.')
return parser
def main():
parser = setup_parser()
args = parser.parse_args()
# Create output dir.
if not args.output_dir.exists():
args.output_dir.mkdir()
# Read barcodes.
barcode_map = pd.read_csv(str(args.read_barcode_map), sep='\t')
barcode_map = dict(zip(barcode_map['read_id'], barcode_map['barcode']))
# Split reads into separate files.
with pysam.AlignmentFile(str(args.alignment_bam), 'rb') as in_file:
out_files = {}
try:
# Open output files.
for sample in set(barcode_map.values()):
out_name = args.alignment_bam.stem + '.{}.bam'.format(sample)
out_path = args.output_dir / out_name
out_files[sample] = pysam.AlignmentFile(
str(out_path), 'wb', template=in_file)
# Write reads to separate files.
for read in in_file:
sample = barcode_map[read.query_name]
out_files[sample].write(read)
finally:
for out_path in out_files.values():
out_path.close()
if __name__ == '__main__':
main()
|
|
bc2d986478465c261e2a01a591c956b65364cd90
|
ldap-helper.py
|
ldap-helper.py
|
# Handles queries to the LDAP backend
# Reads the LDAP server configuration from a JSON file
import json
import ldap
first_connect = True
# The default config filename
config_file = 'config.json'
def load_config():
with open(config_file, 'r') as f:
config = json.load(f)
ldap_server = config['ldap_server']
ldap_version = config['ldap_version']
ldap_password = config['ldap_password']
ldap_user = config['ldap_user']
def connect():
if first_connect:
load_config()
first_connect = False
l = ldap.initialize('ldap://' + ldap_server)
try:
l.protocol_version = ldap.VERSION3 # parse this from config instead
l.simple_bind_s(ldap_user, ldap_password)
valid = True
except ldap.INVALID_CREDENTIALS:
print "Invalid login credentials"
sys.exit(-1)
except ldap.LDAPError, e:
if type(e.message) == dict and e.message.has_key('desc'):
print e.message['desc']
else:
print e
sys.exit(-2)
|
Add initial helper functions for connecting to LDAP
|
Add initial helper functions for connecting to LDAP
|
Python
|
mit
|
motorolja/ldap-updater
|
Add initial helper functions for connecting to LDAP
|
# Handles queries to the LDAP backend
# Reads the LDAP server configuration from a JSON file
import json
import ldap
first_connect = True
# The default config filename
config_file = 'config.json'
def load_config():
with open(config_file, 'r') as f:
config = json.load(f)
ldap_server = config['ldap_server']
ldap_version = config['ldap_version']
ldap_password = config['ldap_password']
ldap_user = config['ldap_user']
def connect():
if first_connect:
load_config()
first_connect = False
l = ldap.initialize('ldap://' + ldap_server)
try:
l.protocol_version = ldap.VERSION3 # parse this from config instead
l.simple_bind_s(ldap_user, ldap_password)
valid = True
except ldap.INVALID_CREDENTIALS:
print "Invalid login credentials"
sys.exit(-1)
except ldap.LDAPError, e:
if type(e.message) == dict and e.message.has_key('desc'):
print e.message['desc']
else:
print e
sys.exit(-2)
|
<commit_before><commit_msg>Add initial helper functions for connecting to LDAP<commit_after>
|
# Handles queries to the LDAP backend
# Reads the LDAP server configuration from a JSON file
import json
import ldap
first_connect = True
# The default config filename
config_file = 'config.json'
def load_config():
with open(config_file, 'r') as f:
config = json.load(f)
ldap_server = config['ldap_server']
ldap_version = config['ldap_version']
ldap_password = config['ldap_password']
ldap_user = config['ldap_user']
def connect():
if first_connect:
load_config()
first_connect = False
l = ldap.initialize('ldap://' + ldap_server)
try:
l.protocol_version = ldap.VERSION3 # parse this from config instead
l.simple_bind_s(ldap_user, ldap_password)
valid = True
except ldap.INVALID_CREDENTIALS:
print "Invalid login credentials"
sys.exit(-1)
except ldap.LDAPError, e:
if type(e.message) == dict and e.message.has_key('desc'):
print e.message['desc']
else:
print e
sys.exit(-2)
|
Add initial helper functions for connecting to LDAP# Handles queries to the LDAP backend
# Reads the LDAP server configuration from a JSON file
import json
import ldap
first_connect = True
# The default config filename
config_file = 'config.json'
def load_config():
with open(config_file, 'r') as f:
config = json.load(f)
ldap_server = config['ldap_server']
ldap_version = config['ldap_version']
ldap_password = config['ldap_password']
ldap_user = config['ldap_user']
def connect():
if first_connect:
load_config()
first_connect = False
l = ldap.initialize('ldap://' + ldap_server)
try:
l.protocol_version = ldap.VERSION3 # parse this from config instead
l.simple_bind_s(ldap_user, ldap_password)
valid = True
except ldap.INVALID_CREDENTIALS:
print "Invalid login credentials"
sys.exit(-1)
except ldap.LDAPError, e:
if type(e.message) == dict and e.message.has_key('desc'):
print e.message['desc']
else:
print e
sys.exit(-2)
|
<commit_before><commit_msg>Add initial helper functions for connecting to LDAP<commit_after># Handles queries to the LDAP backend
# Reads the LDAP server configuration from a JSON file
import json
import ldap
first_connect = True
# The default config filename
config_file = 'config.json'
def load_config():
with open(config_file, 'r') as f:
config = json.load(f)
ldap_server = config['ldap_server']
ldap_version = config['ldap_version']
ldap_password = config['ldap_password']
ldap_user = config['ldap_user']
def connect():
if first_connect:
load_config()
first_connect = False
l = ldap.initialize('ldap://' + ldap_server)
try:
l.protocol_version = ldap.VERSION3 # parse this from config instead
l.simple_bind_s(ldap_user, ldap_password)
valid = True
except ldap.INVALID_CREDENTIALS:
print "Invalid login credentials"
sys.exit(-1)
except ldap.LDAPError, e:
if type(e.message) == dict and e.message.has_key('desc'):
print e.message['desc']
else:
print e
sys.exit(-2)
|
|
64e5bf4187cd9a5e584901583a8ccd60a92314df
|
preshed/tests/test_pop.py
|
preshed/tests/test_pop.py
|
from ..maps import PreshMap
def test_pop1():
table = PreshMap()
table[10] = 20
table[30] = 25
assert table[10] == 20
assert table[30] == 25
table.pop(30)
assert table[10] == 20
|
Add some tests for pop method
|
Add some tests for pop method
|
Python
|
mit
|
syllog1sm/preshed,explosion/preshed,explosion/preshed,spacy-io/preshed,spacy-io/preshed,explosion/preshed
|
Add some tests for pop method
|
from ..maps import PreshMap
def test_pop1():
table = PreshMap()
table[10] = 20
table[30] = 25
assert table[10] == 20
assert table[30] == 25
table.pop(30)
assert table[10] == 20
|
<commit_before><commit_msg>Add some tests for pop method<commit_after>
|
from ..maps import PreshMap
def test_pop1():
table = PreshMap()
table[10] = 20
table[30] = 25
assert table[10] == 20
assert table[30] == 25
table.pop(30)
assert table[10] == 20
|
Add some tests for pop methodfrom ..maps import PreshMap
def test_pop1():
table = PreshMap()
table[10] = 20
table[30] = 25
assert table[10] == 20
assert table[30] == 25
table.pop(30)
assert table[10] == 20
|
<commit_before><commit_msg>Add some tests for pop method<commit_after>from ..maps import PreshMap
def test_pop1():
table = PreshMap()
table[10] = 20
table[30] = 25
assert table[10] == 20
assert table[30] == 25
table.pop(30)
assert table[10] == 20
|
|
e9e56b7ab243bf06721fac369e05db19dbfc8ee6
|
db/base/management/commands/update_all_sats.py
|
db/base/management/commands/update_all_sats.py
|
from orbit import satellite
from django.core.management.base import BaseCommand
from db.base.models import Satellite
class Command(BaseCommand):
help = 'Update existing Satellites'
def handle(self, *args, **options):
satellites = Satellite.objects.all()
for obj in satellites:
try:
sat = satellite(obj.norad_cat_id)
except:
self.stdout.write(('Satellite {} with Identifier {} does '
'not exist').format(obj.name, obj.norad_cat_id))
continue
obj.name = sat.name()
obj.save()
self.stdout.write(('Satellite {} with Identifier {} '
'found [updated]').format(obj.norad_cat_id, obj.name))
|
Add command to update Satellites names
|
Add command to update Satellites names
|
Python
|
agpl-3.0
|
Roboneet/satnogs-db,Roboneet/satnogs-db,Roboneet/satnogs-db,Roboneet/satnogs-db
|
Add command to update Satellites names
|
from orbit import satellite
from django.core.management.base import BaseCommand
from db.base.models import Satellite
class Command(BaseCommand):
help = 'Update existing Satellites'
def handle(self, *args, **options):
satellites = Satellite.objects.all()
for obj in satellites:
try:
sat = satellite(obj.norad_cat_id)
except:
self.stdout.write(('Satellite {} with Identifier {} does '
'not exist').format(obj.name, obj.norad_cat_id))
continue
obj.name = sat.name()
obj.save()
self.stdout.write(('Satellite {} with Identifier {} '
'found [updated]').format(obj.norad_cat_id, obj.name))
|
<commit_before><commit_msg>Add command to update Satellites names<commit_after>
|
from orbit import satellite
from django.core.management.base import BaseCommand
from db.base.models import Satellite
class Command(BaseCommand):
help = 'Update existing Satellites'
def handle(self, *args, **options):
satellites = Satellite.objects.all()
for obj in satellites:
try:
sat = satellite(obj.norad_cat_id)
except:
self.stdout.write(('Satellite {} with Identifier {} does '
'not exist').format(obj.name, obj.norad_cat_id))
continue
obj.name = sat.name()
obj.save()
self.stdout.write(('Satellite {} with Identifier {} '
'found [updated]').format(obj.norad_cat_id, obj.name))
|
Add command to update Satellites namesfrom orbit import satellite
from django.core.management.base import BaseCommand
from db.base.models import Satellite
class Command(BaseCommand):
help = 'Update existing Satellites'
def handle(self, *args, **options):
satellites = Satellite.objects.all()
for obj in satellites:
try:
sat = satellite(obj.norad_cat_id)
except:
self.stdout.write(('Satellite {} with Identifier {} does '
'not exist').format(obj.name, obj.norad_cat_id))
continue
obj.name = sat.name()
obj.save()
self.stdout.write(('Satellite {} with Identifier {} '
'found [updated]').format(obj.norad_cat_id, obj.name))
|
<commit_before><commit_msg>Add command to update Satellites names<commit_after>from orbit import satellite
from django.core.management.base import BaseCommand
from db.base.models import Satellite
class Command(BaseCommand):
help = 'Update existing Satellites'
def handle(self, *args, **options):
satellites = Satellite.objects.all()
for obj in satellites:
try:
sat = satellite(obj.norad_cat_id)
except:
self.stdout.write(('Satellite {} with Identifier {} does '
'not exist').format(obj.name, obj.norad_cat_id))
continue
obj.name = sat.name()
obj.save()
self.stdout.write(('Satellite {} with Identifier {} '
'found [updated]').format(obj.norad_cat_id, obj.name))
|
|
6572f323a7cf5121789d510391371051d3014d1b
|
corgi/visualization.py
|
corgi/visualization.py
|
import numpy as np
def hist(data, width=20):
counts, values = np.histogram(data)
max_count = counts.max()
for (count, value) in zip(counts, values):
scaled = int(round((count / max_count) * width))
print('%5.2f' % value, ':', 'X'*scaled)
|
Add a function for simple command line stdout histogram over an array
|
Add a function for simple command line stdout histogram over an array
|
Python
|
mit
|
log0ymxm/corgi
|
Add a function for simple command line stdout histogram over an array
|
import numpy as np
def hist(data, width=20):
counts, values = np.histogram(data)
max_count = counts.max()
for (count, value) in zip(counts, values):
scaled = int(round((count / max_count) * width))
print('%5.2f' % value, ':', 'X'*scaled)
|
<commit_before><commit_msg>Add a function for simple command line stdout histogram over an array<commit_after>
|
import numpy as np
def hist(data, width=20):
counts, values = np.histogram(data)
max_count = counts.max()
for (count, value) in zip(counts, values):
scaled = int(round((count / max_count) * width))
print('%5.2f' % value, ':', 'X'*scaled)
|
Add a function for simple command line stdout histogram over an arrayimport numpy as np
def hist(data, width=20):
counts, values = np.histogram(data)
max_count = counts.max()
for (count, value) in zip(counts, values):
scaled = int(round((count / max_count) * width))
print('%5.2f' % value, ':', 'X'*scaled)
|
<commit_before><commit_msg>Add a function for simple command line stdout histogram over an array<commit_after>import numpy as np
def hist(data, width=20):
counts, values = np.histogram(data)
max_count = counts.max()
for (count, value) in zip(counts, values):
scaled = int(round((count / max_count) * width))
print('%5.2f' % value, ':', 'X'*scaled)
|
|
eb09ba7be7f1eab48ee7e0cd1a6b7d7753907ff8
|
raiden/tests/unit/test_logging.py
|
raiden/tests/unit/test_logging.py
|
import structlog
import logging
import traceback
from raiden.log_config import configure_logging
def test_basic_logging(capsys):
configure_logging({'': 'DEBUG'})
log = structlog.get_logger('test').bind(foo='bar')
log.info('test event', key='value')
captured = capsys.readouterr()
assert 'test event' in captured.err
assert 'key=value' in captured.err
assert 'foo=bar' in captured.err
def test_redacted_request(capsys):
configure_logging({'': 'DEBUG'})
token = 'my_access_token123'
# use logging, as 'urllib3/requests'
log = logging.getLogger('urllib3.connectionpool')
log.debug('Starting new HTTPS connection (1): example.org:443')
log.debug(f'https://example.org:443 "GET /endpoint?access_token={token} HTTP/1.1" 200 403')
captured = capsys.readouterr()
assert token not in captured.err
assert 'access_token=<redacted>' in captured.err
def test_redacted_traceback(capsys):
configure_logging({'': 'DEBUG'})
token = 'my_access_token123'
try:
assert False, f'Failed acessing /endpoint?accessToken={token}'
except AssertionError:
traceback.print_exc()
captured = capsys.readouterr()
assert token not in captured.err
assert 'accessToken=<redacted>' in captured.err
|
Add logging and redaction tests
|
Add logging and redaction tests
|
Python
|
mit
|
hackaugusto/raiden,hackaugusto/raiden
|
Add logging and redaction tests
|
import structlog
import logging
import traceback
from raiden.log_config import configure_logging
def test_basic_logging(capsys):
configure_logging({'': 'DEBUG'})
log = structlog.get_logger('test').bind(foo='bar')
log.info('test event', key='value')
captured = capsys.readouterr()
assert 'test event' in captured.err
assert 'key=value' in captured.err
assert 'foo=bar' in captured.err
def test_redacted_request(capsys):
configure_logging({'': 'DEBUG'})
token = 'my_access_token123'
# use logging, as 'urllib3/requests'
log = logging.getLogger('urllib3.connectionpool')
log.debug('Starting new HTTPS connection (1): example.org:443')
log.debug(f'https://example.org:443 "GET /endpoint?access_token={token} HTTP/1.1" 200 403')
captured = capsys.readouterr()
assert token not in captured.err
assert 'access_token=<redacted>' in captured.err
def test_redacted_traceback(capsys):
configure_logging({'': 'DEBUG'})
token = 'my_access_token123'
try:
assert False, f'Failed acessing /endpoint?accessToken={token}'
except AssertionError:
traceback.print_exc()
captured = capsys.readouterr()
assert token not in captured.err
assert 'accessToken=<redacted>' in captured.err
|
<commit_before><commit_msg>Add logging and redaction tests<commit_after>
|
import structlog
import logging
import traceback
from raiden.log_config import configure_logging
def test_basic_logging(capsys):
configure_logging({'': 'DEBUG'})
log = structlog.get_logger('test').bind(foo='bar')
log.info('test event', key='value')
captured = capsys.readouterr()
assert 'test event' in captured.err
assert 'key=value' in captured.err
assert 'foo=bar' in captured.err
def test_redacted_request(capsys):
configure_logging({'': 'DEBUG'})
token = 'my_access_token123'
# use logging, as 'urllib3/requests'
log = logging.getLogger('urllib3.connectionpool')
log.debug('Starting new HTTPS connection (1): example.org:443')
log.debug(f'https://example.org:443 "GET /endpoint?access_token={token} HTTP/1.1" 200 403')
captured = capsys.readouterr()
assert token not in captured.err
assert 'access_token=<redacted>' in captured.err
def test_redacted_traceback(capsys):
configure_logging({'': 'DEBUG'})
token = 'my_access_token123'
try:
assert False, f'Failed acessing /endpoint?accessToken={token}'
except AssertionError:
traceback.print_exc()
captured = capsys.readouterr()
assert token not in captured.err
assert 'accessToken=<redacted>' in captured.err
|
Add logging and redaction testsimport structlog
import logging
import traceback
from raiden.log_config import configure_logging
def test_basic_logging(capsys):
configure_logging({'': 'DEBUG'})
log = structlog.get_logger('test').bind(foo='bar')
log.info('test event', key='value')
captured = capsys.readouterr()
assert 'test event' in captured.err
assert 'key=value' in captured.err
assert 'foo=bar' in captured.err
def test_redacted_request(capsys):
configure_logging({'': 'DEBUG'})
token = 'my_access_token123'
# use logging, as 'urllib3/requests'
log = logging.getLogger('urllib3.connectionpool')
log.debug('Starting new HTTPS connection (1): example.org:443')
log.debug(f'https://example.org:443 "GET /endpoint?access_token={token} HTTP/1.1" 200 403')
captured = capsys.readouterr()
assert token not in captured.err
assert 'access_token=<redacted>' in captured.err
def test_redacted_traceback(capsys):
configure_logging({'': 'DEBUG'})
token = 'my_access_token123'
try:
assert False, f'Failed acessing /endpoint?accessToken={token}'
except AssertionError:
traceback.print_exc()
captured = capsys.readouterr()
assert token not in captured.err
assert 'accessToken=<redacted>' in captured.err
|
<commit_before><commit_msg>Add logging and redaction tests<commit_after>import structlog
import logging
import traceback
from raiden.log_config import configure_logging
def test_basic_logging(capsys):
configure_logging({'': 'DEBUG'})
log = structlog.get_logger('test').bind(foo='bar')
log.info('test event', key='value')
captured = capsys.readouterr()
assert 'test event' in captured.err
assert 'key=value' in captured.err
assert 'foo=bar' in captured.err
def test_redacted_request(capsys):
configure_logging({'': 'DEBUG'})
token = 'my_access_token123'
# use logging, as 'urllib3/requests'
log = logging.getLogger('urllib3.connectionpool')
log.debug('Starting new HTTPS connection (1): example.org:443')
log.debug(f'https://example.org:443 "GET /endpoint?access_token={token} HTTP/1.1" 200 403')
captured = capsys.readouterr()
assert token not in captured.err
assert 'access_token=<redacted>' in captured.err
def test_redacted_traceback(capsys):
configure_logging({'': 'DEBUG'})
token = 'my_access_token123'
try:
assert False, f'Failed acessing /endpoint?accessToken={token}'
except AssertionError:
traceback.print_exc()
captured = capsys.readouterr()
assert token not in captured.err
assert 'accessToken=<redacted>' in captured.err
|
|
7cd4bf8bc72e3340daa70fff451760778a832e62
|
muspelheim/src/test/python/newlinejson.py
|
muspelheim/src/test/python/newlinejson.py
|
#!/usr/bin/env python
import json
import sys
def newline_json(in_file, out_file):
for line in json.load(in_file):
json.dump(line, out_file)
out_file.write('\n')
if __name__ == '__main__':
if len(sys.argv) < 2:
print "Usage: python newlinejson.py [path to ordinary json file]"
sys.exit(1)
f = open(sys.argv[1], 'r')
try:
newline_json(f, sys.stdout)
finally:
f.close()
|
Add script to convert JSON arrays to newline separated JSON
|
Add script to convert JSON arrays to newline separated JSON
[Finished #37633897]
|
Python
|
apache-2.0
|
quasar-analytics/quasar,quasar-analytics/quasar,jedesah/Quasar,slamdata/slamengine,drostron/quasar,drostron/quasar,drostron/quasar,jedesah/Quasar,slamdata/slamengine,jedesah/Quasar,slamdata/quasar,djspiewak/quasar,quasar-analytics/quasar,slamdata/slamengine,jedesah/Quasar,drostron/quasar,quasar-analytics/quasar
|
Add script to convert JSON arrays to newline separated JSON
[Finished #37633897]
|
#!/usr/bin/env python
import json
import sys
def newline_json(in_file, out_file):
for line in json.load(in_file):
json.dump(line, out_file)
out_file.write('\n')
if __name__ == '__main__':
if len(sys.argv) < 2:
print "Usage: python newlinejson.py [path to ordinary json file]"
sys.exit(1)
f = open(sys.argv[1], 'r')
try:
newline_json(f, sys.stdout)
finally:
f.close()
|
<commit_before><commit_msg>Add script to convert JSON arrays to newline separated JSON
[Finished #37633897]<commit_after>
|
#!/usr/bin/env python
import json
import sys
def newline_json(in_file, out_file):
for line in json.load(in_file):
json.dump(line, out_file)
out_file.write('\n')
if __name__ == '__main__':
if len(sys.argv) < 2:
print "Usage: python newlinejson.py [path to ordinary json file]"
sys.exit(1)
f = open(sys.argv[1], 'r')
try:
newline_json(f, sys.stdout)
finally:
f.close()
|
Add script to convert JSON arrays to newline separated JSON
[Finished #37633897]#!/usr/bin/env python
import json
import sys
def newline_json(in_file, out_file):
for line in json.load(in_file):
json.dump(line, out_file)
out_file.write('\n')
if __name__ == '__main__':
if len(sys.argv) < 2:
print "Usage: python newlinejson.py [path to ordinary json file]"
sys.exit(1)
f = open(sys.argv[1], 'r')
try:
newline_json(f, sys.stdout)
finally:
f.close()
|
<commit_before><commit_msg>Add script to convert JSON arrays to newline separated JSON
[Finished #37633897]<commit_after>#!/usr/bin/env python
import json
import sys
def newline_json(in_file, out_file):
for line in json.load(in_file):
json.dump(line, out_file)
out_file.write('\n')
if __name__ == '__main__':
if len(sys.argv) < 2:
print "Usage: python newlinejson.py [path to ordinary json file]"
sys.exit(1)
f = open(sys.argv[1], 'r')
try:
newline_json(f, sys.stdout)
finally:
f.close()
|
|
6758e02cddf18d93e26ac23750980397089e869c
|
dragonfire/tests/test_arithmetic.py
|
dragonfire/tests/test_arithmetic.py
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
"""
.. module:: text_dragonfire
:platform: Unix
:synopsis: tests for the arithmetic submodule.
.. moduleauthor:: Mehmet Mert Yıldıran <mert.yildiran@bil.omu.edu.tr>
"""
from dragonfire.arithmetic import arithmetic_parse, text2int
import pytest
def test_text2int():
assert text2int("seven billion one hundred million thirty one thousand three hundred thirty seven") == 7100031337
@pytest.mark.parametrize("command,response", [
("How much is 12 + 14?", "12 + 14 = 26"),
("How much is twelve thousand three hundred four plus two hundred fifty six?", "12304 + 256 = 12560"),
("What is five hundred eighty nine times six?", "589 * 6 = 3534"),
("What is five hundred eighty nine divided by 89?", "589 / 89 = 6.617977528089888"),
("What is seven billion five million and four thousand three hundred and four plus five million and four thousand three hundred and four?", "7005004304 + 5004304 = 7010008608"),
("How much is 16 - 23?", "16 - 23 = -7"),
("How much is 144 * 12?", "144 * 12 = 1728"),
("How much is 23 / 0?", "Sorry, but that does not make sense as the divisor cannot be zero."),
("How much is 12 + ( 14 * 3 )?", "12 + ( 14 * 3 ) = 54"),
("How much is 12 + ( 14 * )?", False)
])
def test_arithmetic_parse(command, response):
assert arithmetic_parse(command) == response
|
Add the tests for the arithmetic submodule
|
Add the tests for the arithmetic submodule
|
Python
|
mit
|
mertyildiran/Dragonfire,DragonComputer/Dragonfire,DragonComputer/Dragonfire,DragonComputer/Dragonfire,mertyildiran/Dragonfire
|
Add the tests for the arithmetic submodule
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
"""
.. module:: text_dragonfire
:platform: Unix
:synopsis: tests for the arithmetic submodule.
.. moduleauthor:: Mehmet Mert Yıldıran <mert.yildiran@bil.omu.edu.tr>
"""
from dragonfire.arithmetic import arithmetic_parse, text2int
import pytest
def test_text2int():
assert text2int("seven billion one hundred million thirty one thousand three hundred thirty seven") == 7100031337
@pytest.mark.parametrize("command,response", [
("How much is 12 + 14?", "12 + 14 = 26"),
("How much is twelve thousand three hundred four plus two hundred fifty six?", "12304 + 256 = 12560"),
("What is five hundred eighty nine times six?", "589 * 6 = 3534"),
("What is five hundred eighty nine divided by 89?", "589 / 89 = 6.617977528089888"),
("What is seven billion five million and four thousand three hundred and four plus five million and four thousand three hundred and four?", "7005004304 + 5004304 = 7010008608"),
("How much is 16 - 23?", "16 - 23 = -7"),
("How much is 144 * 12?", "144 * 12 = 1728"),
("How much is 23 / 0?", "Sorry, but that does not make sense as the divisor cannot be zero."),
("How much is 12 + ( 14 * 3 )?", "12 + ( 14 * 3 ) = 54"),
("How much is 12 + ( 14 * )?", False)
])
def test_arithmetic_parse(command, response):
assert arithmetic_parse(command) == response
|
<commit_before><commit_msg>Add the tests for the arithmetic submodule<commit_after>
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
"""
.. module:: text_dragonfire
:platform: Unix
:synopsis: tests for the arithmetic submodule.
.. moduleauthor:: Mehmet Mert Yıldıran <mert.yildiran@bil.omu.edu.tr>
"""
from dragonfire.arithmetic import arithmetic_parse, text2int
import pytest
def test_text2int():
assert text2int("seven billion one hundred million thirty one thousand three hundred thirty seven") == 7100031337
@pytest.mark.parametrize("command,response", [
("How much is 12 + 14?", "12 + 14 = 26"),
("How much is twelve thousand three hundred four plus two hundred fifty six?", "12304 + 256 = 12560"),
("What is five hundred eighty nine times six?", "589 * 6 = 3534"),
("What is five hundred eighty nine divided by 89?", "589 / 89 = 6.617977528089888"),
("What is seven billion five million and four thousand three hundred and four plus five million and four thousand three hundred and four?", "7005004304 + 5004304 = 7010008608"),
("How much is 16 - 23?", "16 - 23 = -7"),
("How much is 144 * 12?", "144 * 12 = 1728"),
("How much is 23 / 0?", "Sorry, but that does not make sense as the divisor cannot be zero."),
("How much is 12 + ( 14 * 3 )?", "12 + ( 14 * 3 ) = 54"),
("How much is 12 + ( 14 * )?", False)
])
def test_arithmetic_parse(command, response):
assert arithmetic_parse(command) == response
|
Add the tests for the arithmetic submodule#!/usr/bin/python3
# -*- coding: utf-8 -*-
"""
.. module:: text_dragonfire
:platform: Unix
:synopsis: tests for the arithmetic submodule.
.. moduleauthor:: Mehmet Mert Yıldıran <mert.yildiran@bil.omu.edu.tr>
"""
from dragonfire.arithmetic import arithmetic_parse, text2int
import pytest
def test_text2int():
assert text2int("seven billion one hundred million thirty one thousand three hundred thirty seven") == 7100031337
@pytest.mark.parametrize("command,response", [
("How much is 12 + 14?", "12 + 14 = 26"),
("How much is twelve thousand three hundred four plus two hundred fifty six?", "12304 + 256 = 12560"),
("What is five hundred eighty nine times six?", "589 * 6 = 3534"),
("What is five hundred eighty nine divided by 89?", "589 / 89 = 6.617977528089888"),
("What is seven billion five million and four thousand three hundred and four plus five million and four thousand three hundred and four?", "7005004304 + 5004304 = 7010008608"),
("How much is 16 - 23?", "16 - 23 = -7"),
("How much is 144 * 12?", "144 * 12 = 1728"),
("How much is 23 / 0?", "Sorry, but that does not make sense as the divisor cannot be zero."),
("How much is 12 + ( 14 * 3 )?", "12 + ( 14 * 3 ) = 54"),
("How much is 12 + ( 14 * )?", False)
])
def test_arithmetic_parse(command, response):
assert arithmetic_parse(command) == response
|
<commit_before><commit_msg>Add the tests for the arithmetic submodule<commit_after>#!/usr/bin/python3
# -*- coding: utf-8 -*-
"""
.. module:: text_dragonfire
:platform: Unix
:synopsis: tests for the arithmetic submodule.
.. moduleauthor:: Mehmet Mert Yıldıran <mert.yildiran@bil.omu.edu.tr>
"""
from dragonfire.arithmetic import arithmetic_parse, text2int
import pytest
def test_text2int():
assert text2int("seven billion one hundred million thirty one thousand three hundred thirty seven") == 7100031337
@pytest.mark.parametrize("command,response", [
("How much is 12 + 14?", "12 + 14 = 26"),
("How much is twelve thousand three hundred four plus two hundred fifty six?", "12304 + 256 = 12560"),
("What is five hundred eighty nine times six?", "589 * 6 = 3534"),
("What is five hundred eighty nine divided by 89?", "589 / 89 = 6.617977528089888"),
("What is seven billion five million and four thousand three hundred and four plus five million and four thousand three hundred and four?", "7005004304 + 5004304 = 7010008608"),
("How much is 16 - 23?", "16 - 23 = -7"),
("How much is 144 * 12?", "144 * 12 = 1728"),
("How much is 23 / 0?", "Sorry, but that does not make sense as the divisor cannot be zero."),
("How much is 12 + ( 14 * 3 )?", "12 + ( 14 * 3 ) = 54"),
("How much is 12 + ( 14 * )?", False)
])
def test_arithmetic_parse(command, response):
assert arithmetic_parse(command) == response
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.