commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
05f87be4c85036c69abc9404acb824c58d71f101
|
slice_ops.py
|
slice_ops.py
|
import slicer
import shapely.ops
import shapely.geometry
def border(sli, amount):
cuts = [cut.polygon(True) for cut in sli.cuts]
cut_outline = shapely.ops.cascaded_union(cuts) \
.buffer(amount / 2)
shape_outline = sli.poly.boundary.buffer(amount)
outlines = cut_outline.union(shape_outline)
newpoly = outlines.intersection(sli.poly)
sli.poly = newpoly
|
Add border operation... Damn that was easy
|
Add border operation... Damn that was easy
|
Python
|
mit
|
meshulam/sly
|
Add border operation... Damn that was easy
|
import slicer
import shapely.ops
import shapely.geometry
def border(sli, amount):
cuts = [cut.polygon(True) for cut in sli.cuts]
cut_outline = shapely.ops.cascaded_union(cuts) \
.buffer(amount / 2)
shape_outline = sli.poly.boundary.buffer(amount)
outlines = cut_outline.union(shape_outline)
newpoly = outlines.intersection(sli.poly)
sli.poly = newpoly
|
<commit_before><commit_msg>Add border operation... Damn that was easy<commit_after>
|
import slicer
import shapely.ops
import shapely.geometry
def border(sli, amount):
cuts = [cut.polygon(True) for cut in sli.cuts]
cut_outline = shapely.ops.cascaded_union(cuts) \
.buffer(amount / 2)
shape_outline = sli.poly.boundary.buffer(amount)
outlines = cut_outline.union(shape_outline)
newpoly = outlines.intersection(sli.poly)
sli.poly = newpoly
|
Add border operation... Damn that was easyimport slicer
import shapely.ops
import shapely.geometry
def border(sli, amount):
cuts = [cut.polygon(True) for cut in sli.cuts]
cut_outline = shapely.ops.cascaded_union(cuts) \
.buffer(amount / 2)
shape_outline = sli.poly.boundary.buffer(amount)
outlines = cut_outline.union(shape_outline)
newpoly = outlines.intersection(sli.poly)
sli.poly = newpoly
|
<commit_before><commit_msg>Add border operation... Damn that was easy<commit_after>import slicer
import shapely.ops
import shapely.geometry
def border(sli, amount):
cuts = [cut.polygon(True) for cut in sli.cuts]
cut_outline = shapely.ops.cascaded_union(cuts) \
.buffer(amount / 2)
shape_outline = sli.poly.boundary.buffer(amount)
outlines = cut_outline.union(shape_outline)
newpoly = outlines.intersection(sli.poly)
sli.poly = newpoly
|
|
4dfc0c49cec86f3c03b90fa66e1fc9de2ac665e6
|
samples/migrations/0012_auto_20170512_1138.py
|
samples/migrations/0012_auto_20170512_1138.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-05-12 14:38
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('samples', '0011_fluvaccine_date_applied'),
]
operations = [
migrations.AlterField(
model_name='collectedsample',
name='collection_date',
field=models.DateField(blank=True, null=True, verbose_name='Data de coleta'),
),
migrations.AlterField(
model_name='fluvaccine',
name='date_applied',
field=models.DateField(blank=True, null=True, verbose_name='Data de aplicação'),
),
]
|
Add migration file (fix fields)
|
:rocket: Add migration file (fix fields)
|
Python
|
mit
|
gems-uff/labsys,gems-uff/labsys,gems-uff/labsys
|
:rocket: Add migration file (fix fields)
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-05-12 14:38
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('samples', '0011_fluvaccine_date_applied'),
]
operations = [
migrations.AlterField(
model_name='collectedsample',
name='collection_date',
field=models.DateField(blank=True, null=True, verbose_name='Data de coleta'),
),
migrations.AlterField(
model_name='fluvaccine',
name='date_applied',
field=models.DateField(blank=True, null=True, verbose_name='Data de aplicação'),
),
]
|
<commit_before><commit_msg>:rocket: Add migration file (fix fields)<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-05-12 14:38
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('samples', '0011_fluvaccine_date_applied'),
]
operations = [
migrations.AlterField(
model_name='collectedsample',
name='collection_date',
field=models.DateField(blank=True, null=True, verbose_name='Data de coleta'),
),
migrations.AlterField(
model_name='fluvaccine',
name='date_applied',
field=models.DateField(blank=True, null=True, verbose_name='Data de aplicação'),
),
]
|
:rocket: Add migration file (fix fields)# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-05-12 14:38
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('samples', '0011_fluvaccine_date_applied'),
]
operations = [
migrations.AlterField(
model_name='collectedsample',
name='collection_date',
field=models.DateField(blank=True, null=True, verbose_name='Data de coleta'),
),
migrations.AlterField(
model_name='fluvaccine',
name='date_applied',
field=models.DateField(blank=True, null=True, verbose_name='Data de aplicação'),
),
]
|
<commit_before><commit_msg>:rocket: Add migration file (fix fields)<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-05-12 14:38
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('samples', '0011_fluvaccine_date_applied'),
]
operations = [
migrations.AlterField(
model_name='collectedsample',
name='collection_date',
field=models.DateField(blank=True, null=True, verbose_name='Data de coleta'),
),
migrations.AlterField(
model_name='fluvaccine',
name='date_applied',
field=models.DateField(blank=True, null=True, verbose_name='Data de aplicação'),
),
]
|
|
bc0aa69adc5b1e290941c221ddd498d3fb92244e
|
test.py
|
test.py
|
import nltk
from nltk.classify import MaxentClassifier
# Set up our training material in a nice dictionary.
training = {
'ingredients': [
'Pastry for 9-inch tart pan',
'Apple cider vinegar',
'3 eggs',
'1/4 cup sugar',
],
'steps': [
'Sift the powdered sugar and cocoa powder together.',
'Coarsely crush the peppercorns using a mortar and pestle.',
'While the vegetables are cooking, scrub the pig ears clean and cut away any knobby bits of cartilage so they will lie flat.',
'Heat the oven to 375 degrees.',
],
}
# Set up a list that will contain all of our tagged examples,
# which we will pass into the classifier at the end.
training_set = []
for key, val in training.items():
for i in val:
# Set up a list we can use for all of our features,
# which are just individual words in this case.
features = []
# Before we can tokenize words, we need to break the
# text out into sentences.
sentences = nltk.sent_tokenize(i)
for sentence in sentences:
features = features + nltk.word_tokenize(sentence)
# For this example, it's a good idea to normalize for case.
# You may or may not need to do this.
features = [i.lower() for i in features]
# Each feature needs a value. A typical use for a case like this
# is to use True or 1, though you can use almost any value for
# a more complicated application or analysis.
features = dict([(i, True) for i in features])
# NLTK expects you to feed a classifier a list of tuples
# where each tuple is (features, tag).
training_set.append((features, key))
def classify(s):
p = classifier.prob_classify(s)
import json
print("%s\n >>> %s, %s\n" % (json.dumps(s), p.max(), p.prob(p.max())))
return (p.max(), p.prob(p.max()))
# Train up our classifier
# TODO: get http://www.umiacs.umd.edu/~hal/megam/version0_91/ working
classifier = MaxentClassifier.train(training_set)
print()
print()
# Test it out!
# You need to feed the classifier your data in the same format you used
# to train it, in this case individual lowercase words.
classify({'apple': True, 'cider': True, 'vinegar': True, 'cocoa': True})
classify({'heat': True, 'oven': True})
classify({'prepare': True, 'oven': True})
classify({'nothing': True})
|
Add simple recipe tagger experiment
|
Add simple recipe tagger experiment
|
Python
|
isc
|
recipi/recipi,recipi/recipi,recipi/recipi
|
Add simple recipe tagger experiment
|
import nltk
from nltk.classify import MaxentClassifier
# Set up our training material in a nice dictionary.
training = {
'ingredients': [
'Pastry for 9-inch tart pan',
'Apple cider vinegar',
'3 eggs',
'1/4 cup sugar',
],
'steps': [
'Sift the powdered sugar and cocoa powder together.',
'Coarsely crush the peppercorns using a mortar and pestle.',
'While the vegetables are cooking, scrub the pig ears clean and cut away any knobby bits of cartilage so they will lie flat.',
'Heat the oven to 375 degrees.',
],
}
# Set up a list that will contain all of our tagged examples,
# which we will pass into the classifier at the end.
training_set = []
for key, val in training.items():
for i in val:
# Set up a list we can use for all of our features,
# which are just individual words in this case.
features = []
# Before we can tokenize words, we need to break the
# text out into sentences.
sentences = nltk.sent_tokenize(i)
for sentence in sentences:
features = features + nltk.word_tokenize(sentence)
# For this example, it's a good idea to normalize for case.
# You may or may not need to do this.
features = [i.lower() for i in features]
# Each feature needs a value. A typical use for a case like this
# is to use True or 1, though you can use almost any value for
# a more complicated application or analysis.
features = dict([(i, True) for i in features])
# NLTK expects you to feed a classifier a list of tuples
# where each tuple is (features, tag).
training_set.append((features, key))
def classify(s):
p = classifier.prob_classify(s)
import json
print("%s\n >>> %s, %s\n" % (json.dumps(s), p.max(), p.prob(p.max())))
return (p.max(), p.prob(p.max()))
# Train up our classifier
# TODO: get http://www.umiacs.umd.edu/~hal/megam/version0_91/ working
classifier = MaxentClassifier.train(training_set)
print()
print()
# Test it out!
# You need to feed the classifier your data in the same format you used
# to train it, in this case individual lowercase words.
classify({'apple': True, 'cider': True, 'vinegar': True, 'cocoa': True})
classify({'heat': True, 'oven': True})
classify({'prepare': True, 'oven': True})
classify({'nothing': True})
|
<commit_before><commit_msg>Add simple recipe tagger experiment<commit_after>
|
import nltk
from nltk.classify import MaxentClassifier
# Set up our training material in a nice dictionary.
training = {
'ingredients': [
'Pastry for 9-inch tart pan',
'Apple cider vinegar',
'3 eggs',
'1/4 cup sugar',
],
'steps': [
'Sift the powdered sugar and cocoa powder together.',
'Coarsely crush the peppercorns using a mortar and pestle.',
'While the vegetables are cooking, scrub the pig ears clean and cut away any knobby bits of cartilage so they will lie flat.',
'Heat the oven to 375 degrees.',
],
}
# Set up a list that will contain all of our tagged examples,
# which we will pass into the classifier at the end.
training_set = []
for key, val in training.items():
for i in val:
# Set up a list we can use for all of our features,
# which are just individual words in this case.
features = []
# Before we can tokenize words, we need to break the
# text out into sentences.
sentences = nltk.sent_tokenize(i)
for sentence in sentences:
features = features + nltk.word_tokenize(sentence)
# For this example, it's a good idea to normalize for case.
# You may or may not need to do this.
features = [i.lower() for i in features]
# Each feature needs a value. A typical use for a case like this
# is to use True or 1, though you can use almost any value for
# a more complicated application or analysis.
features = dict([(i, True) for i in features])
# NLTK expects you to feed a classifier a list of tuples
# where each tuple is (features, tag).
training_set.append((features, key))
def classify(s):
p = classifier.prob_classify(s)
import json
print("%s\n >>> %s, %s\n" % (json.dumps(s), p.max(), p.prob(p.max())))
return (p.max(), p.prob(p.max()))
# Train up our classifier
# TODO: get http://www.umiacs.umd.edu/~hal/megam/version0_91/ working
classifier = MaxentClassifier.train(training_set)
print()
print()
# Test it out!
# You need to feed the classifier your data in the same format you used
# to train it, in this case individual lowercase words.
classify({'apple': True, 'cider': True, 'vinegar': True, 'cocoa': True})
classify({'heat': True, 'oven': True})
classify({'prepare': True, 'oven': True})
classify({'nothing': True})
|
Add simple recipe tagger experimentimport nltk
from nltk.classify import MaxentClassifier
# Set up our training material in a nice dictionary.
training = {
'ingredients': [
'Pastry for 9-inch tart pan',
'Apple cider vinegar',
'3 eggs',
'1/4 cup sugar',
],
'steps': [
'Sift the powdered sugar and cocoa powder together.',
'Coarsely crush the peppercorns using a mortar and pestle.',
'While the vegetables are cooking, scrub the pig ears clean and cut away any knobby bits of cartilage so they will lie flat.',
'Heat the oven to 375 degrees.',
],
}
# Set up a list that will contain all of our tagged examples,
# which we will pass into the classifier at the end.
training_set = []
for key, val in training.items():
for i in val:
# Set up a list we can use for all of our features,
# which are just individual words in this case.
features = []
# Before we can tokenize words, we need to break the
# text out into sentences.
sentences = nltk.sent_tokenize(i)
for sentence in sentences:
features = features + nltk.word_tokenize(sentence)
# For this example, it's a good idea to normalize for case.
# You may or may not need to do this.
features = [i.lower() for i in features]
# Each feature needs a value. A typical use for a case like this
# is to use True or 1, though you can use almost any value for
# a more complicated application or analysis.
features = dict([(i, True) for i in features])
# NLTK expects you to feed a classifier a list of tuples
# where each tuple is (features, tag).
training_set.append((features, key))
def classify(s):
p = classifier.prob_classify(s)
import json
print("%s\n >>> %s, %s\n" % (json.dumps(s), p.max(), p.prob(p.max())))
return (p.max(), p.prob(p.max()))
# Train up our classifier
# TODO: get http://www.umiacs.umd.edu/~hal/megam/version0_91/ working
classifier = MaxentClassifier.train(training_set)
print()
print()
# Test it out!
# You need to feed the classifier your data in the same format you used
# to train it, in this case individual lowercase words.
classify({'apple': True, 'cider': True, 'vinegar': True, 'cocoa': True})
classify({'heat': True, 'oven': True})
classify({'prepare': True, 'oven': True})
classify({'nothing': True})
|
<commit_before><commit_msg>Add simple recipe tagger experiment<commit_after>import nltk
from nltk.classify import MaxentClassifier
# Set up our training material in a nice dictionary.
training = {
'ingredients': [
'Pastry for 9-inch tart pan',
'Apple cider vinegar',
'3 eggs',
'1/4 cup sugar',
],
'steps': [
'Sift the powdered sugar and cocoa powder together.',
'Coarsely crush the peppercorns using a mortar and pestle.',
'While the vegetables are cooking, scrub the pig ears clean and cut away any knobby bits of cartilage so they will lie flat.',
'Heat the oven to 375 degrees.',
],
}
# Set up a list that will contain all of our tagged examples,
# which we will pass into the classifier at the end.
training_set = []
for key, val in training.items():
for i in val:
# Set up a list we can use for all of our features,
# which are just individual words in this case.
features = []
# Before we can tokenize words, we need to break the
# text out into sentences.
sentences = nltk.sent_tokenize(i)
for sentence in sentences:
features = features + nltk.word_tokenize(sentence)
# For this example, it's a good idea to normalize for case.
# You may or may not need to do this.
features = [i.lower() for i in features]
# Each feature needs a value. A typical use for a case like this
# is to use True or 1, though you can use almost any value for
# a more complicated application or analysis.
features = dict([(i, True) for i in features])
# NLTK expects you to feed a classifier a list of tuples
# where each tuple is (features, tag).
training_set.append((features, key))
def classify(s):
p = classifier.prob_classify(s)
import json
print("%s\n >>> %s, %s\n" % (json.dumps(s), p.max(), p.prob(p.max())))
return (p.max(), p.prob(p.max()))
# Train up our classifier
# TODO: get http://www.umiacs.umd.edu/~hal/megam/version0_91/ working
classifier = MaxentClassifier.train(training_set)
print()
print()
# Test it out!
# You need to feed the classifier your data in the same format you used
# to train it, in this case individual lowercase words.
classify({'apple': True, 'cider': True, 'vinegar': True, 'cocoa': True})
classify({'heat': True, 'oven': True})
classify({'prepare': True, 'oven': True})
classify({'nothing': True})
|
|
7bace5ca301124f03d7ff98669ac08c0c32da55f
|
labs/lab-5/oop.py
|
labs/lab-5/oop.py
|
#!/usr/bin/python
#
# Copyright 2016 BMC Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class Animal(object):
def __init__(self):
self.voice = "???"
def speak(self):
print('A {0} says "{1}"'.format(self.__class__.__name__, self.voice))
class Cat(Animal):
def __init__(self):
super(Cat, self).__init__()
self.voice = 'Meow!'
class Dog(Animal):
def __init__(self):
super(Dog, self).__init__()
self.voice = 'Woof!'
if __name__ == '__main__':
animal = Animal()
animal.speak()
cat = Cat()
cat.speak()
dog = Dog()
dog.speak()
|
Add example OOP python script
|
Add example OOP python script
|
Python
|
apache-2.0
|
boundary/tsi-lab,jdgwartney/tsi-lab,jdgwartney/tsi-lab,boundary/tsi-lab,jdgwartney/tsi-lab,jdgwartney/tsi-lab,boundary/tsi-lab,boundary/tsi-lab
|
Add example OOP python script
|
#!/usr/bin/python
#
# Copyright 2016 BMC Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class Animal(object):
def __init__(self):
self.voice = "???"
def speak(self):
print('A {0} says "{1}"'.format(self.__class__.__name__, self.voice))
class Cat(Animal):
def __init__(self):
super(Cat, self).__init__()
self.voice = 'Meow!'
class Dog(Animal):
def __init__(self):
super(Dog, self).__init__()
self.voice = 'Woof!'
if __name__ == '__main__':
animal = Animal()
animal.speak()
cat = Cat()
cat.speak()
dog = Dog()
dog.speak()
|
<commit_before><commit_msg>Add example OOP python script<commit_after>
|
#!/usr/bin/python
#
# Copyright 2016 BMC Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class Animal(object):
def __init__(self):
self.voice = "???"
def speak(self):
print('A {0} says "{1}"'.format(self.__class__.__name__, self.voice))
class Cat(Animal):
def __init__(self):
super(Cat, self).__init__()
self.voice = 'Meow!'
class Dog(Animal):
def __init__(self):
super(Dog, self).__init__()
self.voice = 'Woof!'
if __name__ == '__main__':
animal = Animal()
animal.speak()
cat = Cat()
cat.speak()
dog = Dog()
dog.speak()
|
Add example OOP python script#!/usr/bin/python
#
# Copyright 2016 BMC Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class Animal(object):
def __init__(self):
self.voice = "???"
def speak(self):
print('A {0} says "{1}"'.format(self.__class__.__name__, self.voice))
class Cat(Animal):
def __init__(self):
super(Cat, self).__init__()
self.voice = 'Meow!'
class Dog(Animal):
def __init__(self):
super(Dog, self).__init__()
self.voice = 'Woof!'
if __name__ == '__main__':
animal = Animal()
animal.speak()
cat = Cat()
cat.speak()
dog = Dog()
dog.speak()
|
<commit_before><commit_msg>Add example OOP python script<commit_after>#!/usr/bin/python
#
# Copyright 2016 BMC Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class Animal(object):
def __init__(self):
self.voice = "???"
def speak(self):
print('A {0} says "{1}"'.format(self.__class__.__name__, self.voice))
class Cat(Animal):
def __init__(self):
super(Cat, self).__init__()
self.voice = 'Meow!'
class Dog(Animal):
def __init__(self):
super(Dog, self).__init__()
self.voice = 'Woof!'
if __name__ == '__main__':
animal = Animal()
animal.speak()
cat = Cat()
cat.speak()
dog = Dog()
dog.speak()
|
|
a83a48f6c9276b86c3cc13aeb000611036a6e3c4
|
jedihttp/handlers.py
|
jedihttp/handlers.py
|
import bottle
from bottle import response, request
import json
import jedi
import logging
app = bottle.Bottle( __name__ )
logger = logging.getLogger( __name__ )
@app.get( '/healthy' )
def healthy():
return _Json({})
@app.get( '/ready' )
def ready():
return _Json({})
@app.post( '/completions' )
def completion():
logger.info( 'received /completions request' )
script = _GetJediScript( request.json )
return _Json(
{
'completions': [ {
'name': completion.name,
'description': completion.description,
'docstring': completion.docstring(),
'module_path': completion.module_path,
'line': completion.line,
'column': completion.column
} for completion in script.completions() ]
} )
def _GetJediScript( request_data ):
source = request_data[ 'source' ]
line = request_data[ 'line' ]
col = request_data[ 'col' ]
path = request_data[ 'path' ]
return jedi.Script( source, line, col, path )
def _Json( data ):
response.content_type = 'application/json'
return json.dumps( data )
|
import bottle
from bottle import response, request
import json
import jedi
import logging
app = bottle.Bottle( __name__ )
logger = logging.getLogger( __name__ )
@app.post( '/healthy' )
def healthy():
return _Json({})
@app.post( '/ready' )
def ready():
return _Json({})
@app.post( '/completions' )
def completion():
logger.info( 'received /completions request' )
script = _GetJediScript( request.json )
return _Json(
{
'completions': [ {
'name': completion.name,
'description': completion.description,
'docstring': completion.docstring(),
'module_path': completion.module_path,
'line': completion.line,
'column': completion.column
} for completion in script.completions() ]
} )
def _GetJediScript( request_data ):
source = request_data[ 'source' ]
line = request_data[ 'line' ]
col = request_data[ 'col' ]
path = request_data[ 'path' ]
return jedi.Script( source, line, col, path )
def _Json( data ):
response.content_type = 'application/json'
return json.dumps( data )
|
Make all end-points accepting post
|
Make all end-points accepting post
|
Python
|
apache-2.0
|
micbou/JediHTTP,micbou/JediHTTP,vheon/JediHTTP,vheon/JediHTTP
|
import bottle
from bottle import response, request
import json
import jedi
import logging
app = bottle.Bottle( __name__ )
logger = logging.getLogger( __name__ )
@app.get( '/healthy' )
def healthy():
return _Json({})
@app.get( '/ready' )
def ready():
return _Json({})
@app.post( '/completions' )
def completion():
logger.info( 'received /completions request' )
script = _GetJediScript( request.json )
return _Json(
{
'completions': [ {
'name': completion.name,
'description': completion.description,
'docstring': completion.docstring(),
'module_path': completion.module_path,
'line': completion.line,
'column': completion.column
} for completion in script.completions() ]
} )
def _GetJediScript( request_data ):
source = request_data[ 'source' ]
line = request_data[ 'line' ]
col = request_data[ 'col' ]
path = request_data[ 'path' ]
return jedi.Script( source, line, col, path )
def _Json( data ):
response.content_type = 'application/json'
return json.dumps( data )
Make all end-points accepting post
|
import bottle
from bottle import response, request
import json
import jedi
import logging
app = bottle.Bottle( __name__ )
logger = logging.getLogger( __name__ )
@app.post( '/healthy' )
def healthy():
return _Json({})
@app.post( '/ready' )
def ready():
return _Json({})
@app.post( '/completions' )
def completion():
logger.info( 'received /completions request' )
script = _GetJediScript( request.json )
return _Json(
{
'completions': [ {
'name': completion.name,
'description': completion.description,
'docstring': completion.docstring(),
'module_path': completion.module_path,
'line': completion.line,
'column': completion.column
} for completion in script.completions() ]
} )
def _GetJediScript( request_data ):
source = request_data[ 'source' ]
line = request_data[ 'line' ]
col = request_data[ 'col' ]
path = request_data[ 'path' ]
return jedi.Script( source, line, col, path )
def _Json( data ):
response.content_type = 'application/json'
return json.dumps( data )
|
<commit_before>import bottle
from bottle import response, request
import json
import jedi
import logging
app = bottle.Bottle( __name__ )
logger = logging.getLogger( __name__ )
@app.get( '/healthy' )
def healthy():
return _Json({})
@app.get( '/ready' )
def ready():
return _Json({})
@app.post( '/completions' )
def completion():
logger.info( 'received /completions request' )
script = _GetJediScript( request.json )
return _Json(
{
'completions': [ {
'name': completion.name,
'description': completion.description,
'docstring': completion.docstring(),
'module_path': completion.module_path,
'line': completion.line,
'column': completion.column
} for completion in script.completions() ]
} )
def _GetJediScript( request_data ):
source = request_data[ 'source' ]
line = request_data[ 'line' ]
col = request_data[ 'col' ]
path = request_data[ 'path' ]
return jedi.Script( source, line, col, path )
def _Json( data ):
response.content_type = 'application/json'
return json.dumps( data )
<commit_msg>Make all end-points accepting post<commit_after>
|
import bottle
from bottle import response, request
import json
import jedi
import logging
app = bottle.Bottle( __name__ )
logger = logging.getLogger( __name__ )
@app.post( '/healthy' )
def healthy():
return _Json({})
@app.post( '/ready' )
def ready():
return _Json({})
@app.post( '/completions' )
def completion():
logger.info( 'received /completions request' )
script = _GetJediScript( request.json )
return _Json(
{
'completions': [ {
'name': completion.name,
'description': completion.description,
'docstring': completion.docstring(),
'module_path': completion.module_path,
'line': completion.line,
'column': completion.column
} for completion in script.completions() ]
} )
def _GetJediScript( request_data ):
source = request_data[ 'source' ]
line = request_data[ 'line' ]
col = request_data[ 'col' ]
path = request_data[ 'path' ]
return jedi.Script( source, line, col, path )
def _Json( data ):
response.content_type = 'application/json'
return json.dumps( data )
|
import bottle
from bottle import response, request
import json
import jedi
import logging
app = bottle.Bottle( __name__ )
logger = logging.getLogger( __name__ )
@app.get( '/healthy' )
def healthy():
return _Json({})
@app.get( '/ready' )
def ready():
return _Json({})
@app.post( '/completions' )
def completion():
logger.info( 'received /completions request' )
script = _GetJediScript( request.json )
return _Json(
{
'completions': [ {
'name': completion.name,
'description': completion.description,
'docstring': completion.docstring(),
'module_path': completion.module_path,
'line': completion.line,
'column': completion.column
} for completion in script.completions() ]
} )
def _GetJediScript( request_data ):
source = request_data[ 'source' ]
line = request_data[ 'line' ]
col = request_data[ 'col' ]
path = request_data[ 'path' ]
return jedi.Script( source, line, col, path )
def _Json( data ):
response.content_type = 'application/json'
return json.dumps( data )
Make all end-points accepting postimport bottle
from bottle import response, request
import json
import jedi
import logging
app = bottle.Bottle( __name__ )
logger = logging.getLogger( __name__ )
@app.post( '/healthy' )
def healthy():
return _Json({})
@app.post( '/ready' )
def ready():
return _Json({})
@app.post( '/completions' )
def completion():
logger.info( 'received /completions request' )
script = _GetJediScript( request.json )
return _Json(
{
'completions': [ {
'name': completion.name,
'description': completion.description,
'docstring': completion.docstring(),
'module_path': completion.module_path,
'line': completion.line,
'column': completion.column
} for completion in script.completions() ]
} )
def _GetJediScript( request_data ):
source = request_data[ 'source' ]
line = request_data[ 'line' ]
col = request_data[ 'col' ]
path = request_data[ 'path' ]
return jedi.Script( source, line, col, path )
def _Json( data ):
response.content_type = 'application/json'
return json.dumps( data )
|
<commit_before>import bottle
from bottle import response, request
import json
import jedi
import logging
app = bottle.Bottle( __name__ )
logger = logging.getLogger( __name__ )
@app.get( '/healthy' )
def healthy():
return _Json({})
@app.get( '/ready' )
def ready():
return _Json({})
@app.post( '/completions' )
def completion():
logger.info( 'received /completions request' )
script = _GetJediScript( request.json )
return _Json(
{
'completions': [ {
'name': completion.name,
'description': completion.description,
'docstring': completion.docstring(),
'module_path': completion.module_path,
'line': completion.line,
'column': completion.column
} for completion in script.completions() ]
} )
def _GetJediScript( request_data ):
source = request_data[ 'source' ]
line = request_data[ 'line' ]
col = request_data[ 'col' ]
path = request_data[ 'path' ]
return jedi.Script( source, line, col, path )
def _Json( data ):
response.content_type = 'application/json'
return json.dumps( data )
<commit_msg>Make all end-points accepting post<commit_after>import bottle
from bottle import response, request
import json
import jedi
import logging
app = bottle.Bottle( __name__ )
logger = logging.getLogger( __name__ )
@app.post( '/healthy' )
def healthy():
return _Json({})
@app.post( '/ready' )
def ready():
return _Json({})
@app.post( '/completions' )
def completion():
logger.info( 'received /completions request' )
script = _GetJediScript( request.json )
return _Json(
{
'completions': [ {
'name': completion.name,
'description': completion.description,
'docstring': completion.docstring(),
'module_path': completion.module_path,
'line': completion.line,
'column': completion.column
} for completion in script.completions() ]
} )
def _GetJediScript( request_data ):
source = request_data[ 'source' ]
line = request_data[ 'line' ]
col = request_data[ 'col' ]
path = request_data[ 'path' ]
return jedi.Script( source, line, col, path )
def _Json( data ):
response.content_type = 'application/json'
return json.dumps( data )
|
d35f2d7310c277625ea6e2e15b887ac9620696a7
|
tests/unit/glacier/test_vault.py
|
tests/unit/glacier/test_vault.py
|
#!/usr/bin/env python
# Copyright (c) 2012 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import unittest
from cStringIO import StringIO
import mock
from mock import ANY
from boto.glacier import vault
class TestVault(unittest.TestCase):
def setUp(self):
self.size_patch = mock.patch('os.path.getsize')
self.getsize = self.size_patch.start()
def tearDown(self):
self.size_patch.stop()
def test_upload_archive_small_file(self):
api = mock.Mock()
v = vault.Vault(api, None)
v.name = 'myvault'
self.getsize.return_value = 1
stringio = StringIO('content')
m = mock.mock_open()
m.return_value.read = stringio.read
api.upload_archive.return_value = {'ArchiveId': 'archive_id'}
with mock.patch('boto.glacier.vault.open', m, create=True):
archive_id = v.upload_archive('filename', 'my description')
self.assertEqual(archive_id, 'archive_id')
api.upload_archive.assert_called_with('myvault', m.return_value, ANY,
ANY, 'my description')
if __name__ == '__main__':
unittest.main()
|
Add unit test for glacier vault
|
Add unit test for glacier vault
Just verifies the args are forwarded to layer1 properly.
|
Python
|
mit
|
felix-d/boto,lochiiconnectivity/boto,weebygames/boto,abridgett/boto,appneta/boto,alex/boto,j-carl/boto,appneta/boto,rayluo/boto,lochiiconnectivity/boto,weka-io/boto,jameslegg/boto,drbild/boto,alfredodeza/boto,ocadotechnology/boto,janslow/boto,disruptek/boto,campenberger/boto,trademob/boto,elainexmas/boto,israelbenatar/boto,andresriancho/boto,kouk/boto,ekalosak/boto,shaunbrady/boto,khagler/boto,pfhayes/boto,jotes/boto,vijaylbais/boto,shipci/boto,andresriancho/boto,nexusz99/boto,lra/boto,garnaat/boto,vishnugonela/boto,jamesls/boto,tpodowd/boto,jamesls/boto,SaranyaKarthikeyan/boto,jindongh/boto,TiVoMaker/boto,darjus-amzn/boto,rosmo/boto,rjschwei/boto,disruptek/boto,jameslegg/boto,dimdung/boto,cyclecomputing/boto,nikhilraog/boto,zzzirk/boto,ric03uec/boto,ryansb/boto,awatts/boto,FATruden/boto,yangchaogit/boto,tpodowd/boto,Pretio/boto,acourtney2015/boto,dablak/boto,Timus1712/boto,Asana/boto,drbild/boto,revmischa/boto,kouk/boto,clouddocx/boto,stevenbrichards/boto,rjschwei/boto,bleib1dj/boto,varunarya10/boto,bryx-inc/boto,s0enke/boto,alex/boto,zachmullen/boto,podhmo/boto,nishigori/boto,ramitsurana/boto,serviceagility/boto,dablak/boto,ddzialak/boto
|
Add unit test for glacier vault
Just verifies the args are forwarded to layer1 properly.
|
#!/usr/bin/env python
# Copyright (c) 2012 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import unittest
from cStringIO import StringIO
import mock
from mock import ANY
from boto.glacier import vault
class TestVault(unittest.TestCase):
def setUp(self):
self.size_patch = mock.patch('os.path.getsize')
self.getsize = self.size_patch.start()
def tearDown(self):
self.size_patch.stop()
def test_upload_archive_small_file(self):
api = mock.Mock()
v = vault.Vault(api, None)
v.name = 'myvault'
self.getsize.return_value = 1
stringio = StringIO('content')
m = mock.mock_open()
m.return_value.read = stringio.read
api.upload_archive.return_value = {'ArchiveId': 'archive_id'}
with mock.patch('boto.glacier.vault.open', m, create=True):
archive_id = v.upload_archive('filename', 'my description')
self.assertEqual(archive_id, 'archive_id')
api.upload_archive.assert_called_with('myvault', m.return_value, ANY,
ANY, 'my description')
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add unit test for glacier vault
Just verifies the args are forwarded to layer1 properly.<commit_after>
|
#!/usr/bin/env python
# Copyright (c) 2012 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import unittest
from cStringIO import StringIO
import mock
from mock import ANY
from boto.glacier import vault
class TestVault(unittest.TestCase):
def setUp(self):
self.size_patch = mock.patch('os.path.getsize')
self.getsize = self.size_patch.start()
def tearDown(self):
self.size_patch.stop()
def test_upload_archive_small_file(self):
api = mock.Mock()
v = vault.Vault(api, None)
v.name = 'myvault'
self.getsize.return_value = 1
stringio = StringIO('content')
m = mock.mock_open()
m.return_value.read = stringio.read
api.upload_archive.return_value = {'ArchiveId': 'archive_id'}
with mock.patch('boto.glacier.vault.open', m, create=True):
archive_id = v.upload_archive('filename', 'my description')
self.assertEqual(archive_id, 'archive_id')
api.upload_archive.assert_called_with('myvault', m.return_value, ANY,
ANY, 'my description')
if __name__ == '__main__':
unittest.main()
|
Add unit test for glacier vault
Just verifies the args are forwarded to layer1 properly.#!/usr/bin/env python
# Copyright (c) 2012 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import unittest
from cStringIO import StringIO
import mock
from mock import ANY
from boto.glacier import vault
class TestVault(unittest.TestCase):
def setUp(self):
self.size_patch = mock.patch('os.path.getsize')
self.getsize = self.size_patch.start()
def tearDown(self):
self.size_patch.stop()
def test_upload_archive_small_file(self):
api = mock.Mock()
v = vault.Vault(api, None)
v.name = 'myvault'
self.getsize.return_value = 1
stringio = StringIO('content')
m = mock.mock_open()
m.return_value.read = stringio.read
api.upload_archive.return_value = {'ArchiveId': 'archive_id'}
with mock.patch('boto.glacier.vault.open', m, create=True):
archive_id = v.upload_archive('filename', 'my description')
self.assertEqual(archive_id, 'archive_id')
api.upload_archive.assert_called_with('myvault', m.return_value, ANY,
ANY, 'my description')
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add unit test for glacier vault
Just verifies the args are forwarded to layer1 properly.<commit_after>#!/usr/bin/env python
# Copyright (c) 2012 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import unittest
from cStringIO import StringIO
import mock
from mock import ANY
from boto.glacier import vault
class TestVault(unittest.TestCase):
def setUp(self):
self.size_patch = mock.patch('os.path.getsize')
self.getsize = self.size_patch.start()
def tearDown(self):
self.size_patch.stop()
def test_upload_archive_small_file(self):
api = mock.Mock()
v = vault.Vault(api, None)
v.name = 'myvault'
self.getsize.return_value = 1
stringio = StringIO('content')
m = mock.mock_open()
m.return_value.read = stringio.read
api.upload_archive.return_value = {'ArchiveId': 'archive_id'}
with mock.patch('boto.glacier.vault.open', m, create=True):
archive_id = v.upload_archive('filename', 'my description')
self.assertEqual(archive_id, 'archive_id')
api.upload_archive.assert_called_with('myvault', m.return_value, ANY,
ANY, 'my description')
if __name__ == '__main__':
unittest.main()
|
|
b6b65f0ca7253af5325eafc6b19e7cfecda231b3
|
hw3/hw3_2b.py
|
hw3/hw3_2b.py
|
import sympy
x1, x2 = sympy.symbols('x1 x2')
f = 8*x1 + 12*x2 + x1**2 -2*x2**2
df_dx1 = sympy.diff(f,x1)
df_dx2 = sympy.diff(f,x2)
H = sympy.hessian(f, (x1, x2))
xs = sympy.solve([df_dx1, df_dx2], [x1, x2])
H_xs = H.subs([(x1,xs[x1]), (x2,xs[x2])])
lambda_xs = H_xs.eigenvals()
count = 0
for i in lambda_xs.keys():
if i.evalf() <= 0:
count += 1
if count == 0:
print 'Local minima'
elif count == len(lambda_xs.keys()):
print 'Lacal maxima'
else:
print 'Saddle point'
|
Add solution for exercise 2b of hw3
|
Add solution for exercise 2b of hw3
|
Python
|
bsd-2-clause
|
escorciav/amcs211,escorciav/amcs211
|
Add solution for exercise 2b of hw3
|
import sympy
x1, x2 = sympy.symbols('x1 x2')
f = 8*x1 + 12*x2 + x1**2 -2*x2**2
df_dx1 = sympy.diff(f,x1)
df_dx2 = sympy.diff(f,x2)
H = sympy.hessian(f, (x1, x2))
xs = sympy.solve([df_dx1, df_dx2], [x1, x2])
H_xs = H.subs([(x1,xs[x1]), (x2,xs[x2])])
lambda_xs = H_xs.eigenvals()
count = 0
for i in lambda_xs.keys():
if i.evalf() <= 0:
count += 1
if count == 0:
print 'Local minima'
elif count == len(lambda_xs.keys()):
print 'Lacal maxima'
else:
print 'Saddle point'
|
<commit_before><commit_msg>Add solution for exercise 2b of hw3<commit_after>
|
import sympy
x1, x2 = sympy.symbols('x1 x2')
f = 8*x1 + 12*x2 + x1**2 -2*x2**2
df_dx1 = sympy.diff(f,x1)
df_dx2 = sympy.diff(f,x2)
H = sympy.hessian(f, (x1, x2))
xs = sympy.solve([df_dx1, df_dx2], [x1, x2])
H_xs = H.subs([(x1,xs[x1]), (x2,xs[x2])])
lambda_xs = H_xs.eigenvals()
count = 0
for i in lambda_xs.keys():
if i.evalf() <= 0:
count += 1
if count == 0:
print 'Local minima'
elif count == len(lambda_xs.keys()):
print 'Lacal maxima'
else:
print 'Saddle point'
|
Add solution for exercise 2b of hw3import sympy
x1, x2 = sympy.symbols('x1 x2')
f = 8*x1 + 12*x2 + x1**2 -2*x2**2
df_dx1 = sympy.diff(f,x1)
df_dx2 = sympy.diff(f,x2)
H = sympy.hessian(f, (x1, x2))
xs = sympy.solve([df_dx1, df_dx2], [x1, x2])
H_xs = H.subs([(x1,xs[x1]), (x2,xs[x2])])
lambda_xs = H_xs.eigenvals()
count = 0
for i in lambda_xs.keys():
if i.evalf() <= 0:
count += 1
if count == 0:
print 'Local minima'
elif count == len(lambda_xs.keys()):
print 'Lacal maxima'
else:
print 'Saddle point'
|
<commit_before><commit_msg>Add solution for exercise 2b of hw3<commit_after>import sympy
x1, x2 = sympy.symbols('x1 x2')
f = 8*x1 + 12*x2 + x1**2 -2*x2**2
df_dx1 = sympy.diff(f,x1)
df_dx2 = sympy.diff(f,x2)
H = sympy.hessian(f, (x1, x2))
xs = sympy.solve([df_dx1, df_dx2], [x1, x2])
H_xs = H.subs([(x1,xs[x1]), (x2,xs[x2])])
lambda_xs = H_xs.eigenvals()
count = 0
for i in lambda_xs.keys():
if i.evalf() <= 0:
count += 1
if count == 0:
print 'Local minima'
elif count == len(lambda_xs.keys()):
print 'Lacal maxima'
else:
print 'Saddle point'
|
|
71b0af732e6d151a22cc0d0b28b55020780af8b6
|
ftools.py
|
ftools.py
|
from functools import wraps
def memoize(obj):
# This is taken from the Python Decorator Library on the official Python
# wiki. https://wiki.python.org/moin/PythonDecoratorLibrary#Memoize
# Unfortunately we're using Python 2.x here and lru_cache isn't available
cache = obj.cache = {}
@wraps(obj)
def memoizer(*args, **kwargs):
key = str(args) + str(kwargs)
if key not in cache:
cache[key] = obj(*args, **kwargs)
return cache[key]
return memoizer
|
Add memoize function for python 2.x
|
Add memoize function for python 2.x
|
Python
|
mit
|
ironman5366/W.I.L.L,ironman5366/W.I.L.L
|
Add memoize function for python 2.x
|
from functools import wraps
def memoize(obj):
# This is taken from the Python Decorator Library on the official Python
# wiki. https://wiki.python.org/moin/PythonDecoratorLibrary#Memoize
# Unfortunately we're using Python 2.x here and lru_cache isn't available
cache = obj.cache = {}
@wraps(obj)
def memoizer(*args, **kwargs):
key = str(args) + str(kwargs)
if key not in cache:
cache[key] = obj(*args, **kwargs)
return cache[key]
return memoizer
|
<commit_before><commit_msg>Add memoize function for python 2.x<commit_after>
|
from functools import wraps
def memoize(obj):
# This is taken from the Python Decorator Library on the official Python
# wiki. https://wiki.python.org/moin/PythonDecoratorLibrary#Memoize
# Unfortunately we're using Python 2.x here and lru_cache isn't available
cache = obj.cache = {}
@wraps(obj)
def memoizer(*args, **kwargs):
key = str(args) + str(kwargs)
if key not in cache:
cache[key] = obj(*args, **kwargs)
return cache[key]
return memoizer
|
Add memoize function for python 2.xfrom functools import wraps
def memoize(obj):
# This is taken from the Python Decorator Library on the official Python
# wiki. https://wiki.python.org/moin/PythonDecoratorLibrary#Memoize
# Unfortunately we're using Python 2.x here and lru_cache isn't available
cache = obj.cache = {}
@wraps(obj)
def memoizer(*args, **kwargs):
key = str(args) + str(kwargs)
if key not in cache:
cache[key] = obj(*args, **kwargs)
return cache[key]
return memoizer
|
<commit_before><commit_msg>Add memoize function for python 2.x<commit_after>from functools import wraps
def memoize(obj):
# This is taken from the Python Decorator Library on the official Python
# wiki. https://wiki.python.org/moin/PythonDecoratorLibrary#Memoize
# Unfortunately we're using Python 2.x here and lru_cache isn't available
cache = obj.cache = {}
@wraps(obj)
def memoizer(*args, **kwargs):
key = str(args) + str(kwargs)
if key not in cache:
cache[key] = obj(*args, **kwargs)
return cache[key]
return memoizer
|
|
27788308891d9cd82da7782d62b5920ea7a54f80
|
employees/management/commands/dailycheck.py
|
employees/management/commands/dailycheck.py
|
from constance import config
from datetime import datetime
from django.core.management.base import BaseCommand
from django.core.mail import EmailMessage
from django.shortcuts import get_list_or_404
from employees.models import Employee
class Command(BaseCommand):
help = "Update scores daily."
def change_day(self):
employees = get_list_or_404(Employee)
for employee in employees:
employee.yesterday_given = employee.today_given
employee.yesterday_received = employee.today_received
employee.today_given = 0
employee.today_received = 0
employee.save()
def change_month(self):
employees = get_list_or_404(Employee)
for employee in employees:
employee.last_month_given = employee.current_month_given
employee.last_month_score = employee.current_month_score
employee.current_month_given = 0
employee.current_month_score = 0
employee.save()
def change_year(self):
employees = get_list_or_404(Employee)
for employee in employees:
employee.last_year_given = employee.current_year_given
employee.last_year_score = employee.current_year_score
employee.current_year_given = 0
employee.current_year_score = 0
employee.save()
def send_daily_email(self):
subject = config.DAILY_EXECUTION_CONFIRMATION_SUBJECT
message = config.DAILY_EXECUTION_CONFIRMATION_MESSAGE
email = EmailMessage(subject, message, to=[config.DAILY_EXECUTION_CONFIRMATION_EMAIL])
email.send()
def send_blocked_notification_email(self, employee):
subject = config.USER_BLOCKED_NOTIFICATION_SUBJECT
message = config.USER_BLOCKED_NOTIFICATION_MESSAGE % employee.username
email = EmailMessage(subject, message, to=[employee.email])
email.send()
def evaluate_block_users(self):
employees = get_list_or_404(Employee)
for employee in employees:
if employee.yesterday_given > config.MAX_STARS_GIVEN_DAY:
employee.is_blocked = True
if employee.yesterday_received > config.MAX_STARS_RECEIVED_DAY:
employee.is_blocked = True
if employee.current_month_given > config.MAX_STARS_GIVEN_MONTHLY:
employee.is_blocked = True
if employee.current_month_score > config.MAX_STARS_RECEIVED_MONTHLY:
employee.is_blocked = True
employee.save()
try:
if employee.is_blocked:
self.send_blocked_notification_email()
except Exception as e:
print e
def handle(self, *args, **options):
today = datetime.now()
self.change_day()
self.evaluate_block_users()
self.send_daily_email()
if today.day == 1:
self.change_month()
if (today.day == 1 and today.month == 1):
self.change_year()
|
Add custom command to daily check scores
|
Add custom command to daily check scores
|
Python
|
apache-2.0
|
belatrix/BackendAllStars
|
Add custom command to daily check scores
|
from constance import config
from datetime import datetime
from django.core.management.base import BaseCommand
from django.core.mail import EmailMessage
from django.shortcuts import get_list_or_404
from employees.models import Employee
class Command(BaseCommand):
help = "Update scores daily."
def change_day(self):
employees = get_list_or_404(Employee)
for employee in employees:
employee.yesterday_given = employee.today_given
employee.yesterday_received = employee.today_received
employee.today_given = 0
employee.today_received = 0
employee.save()
def change_month(self):
employees = get_list_or_404(Employee)
for employee in employees:
employee.last_month_given = employee.current_month_given
employee.last_month_score = employee.current_month_score
employee.current_month_given = 0
employee.current_month_score = 0
employee.save()
def change_year(self):
employees = get_list_or_404(Employee)
for employee in employees:
employee.last_year_given = employee.current_year_given
employee.last_year_score = employee.current_year_score
employee.current_year_given = 0
employee.current_year_score = 0
employee.save()
def send_daily_email(self):
subject = config.DAILY_EXECUTION_CONFIRMATION_SUBJECT
message = config.DAILY_EXECUTION_CONFIRMATION_MESSAGE
email = EmailMessage(subject, message, to=[config.DAILY_EXECUTION_CONFIRMATION_EMAIL])
email.send()
def send_blocked_notification_email(self, employee):
subject = config.USER_BLOCKED_NOTIFICATION_SUBJECT
message = config.USER_BLOCKED_NOTIFICATION_MESSAGE % employee.username
email = EmailMessage(subject, message, to=[employee.email])
email.send()
def evaluate_block_users(self):
employees = get_list_or_404(Employee)
for employee in employees:
if employee.yesterday_given > config.MAX_STARS_GIVEN_DAY:
employee.is_blocked = True
if employee.yesterday_received > config.MAX_STARS_RECEIVED_DAY:
employee.is_blocked = True
if employee.current_month_given > config.MAX_STARS_GIVEN_MONTHLY:
employee.is_blocked = True
if employee.current_month_score > config.MAX_STARS_RECEIVED_MONTHLY:
employee.is_blocked = True
employee.save()
try:
if employee.is_blocked:
self.send_blocked_notification_email()
except Exception as e:
print e
def handle(self, *args, **options):
today = datetime.now()
self.change_day()
self.evaluate_block_users()
self.send_daily_email()
if today.day == 1:
self.change_month()
if (today.day == 1 and today.month == 1):
self.change_year()
|
<commit_before><commit_msg>Add custom command to daily check scores<commit_after>
|
from constance import config
from datetime import datetime
from django.core.management.base import BaseCommand
from django.core.mail import EmailMessage
from django.shortcuts import get_list_or_404
from employees.models import Employee
class Command(BaseCommand):
help = "Update scores daily."
def change_day(self):
employees = get_list_or_404(Employee)
for employee in employees:
employee.yesterday_given = employee.today_given
employee.yesterday_received = employee.today_received
employee.today_given = 0
employee.today_received = 0
employee.save()
def change_month(self):
employees = get_list_or_404(Employee)
for employee in employees:
employee.last_month_given = employee.current_month_given
employee.last_month_score = employee.current_month_score
employee.current_month_given = 0
employee.current_month_score = 0
employee.save()
def change_year(self):
employees = get_list_or_404(Employee)
for employee in employees:
employee.last_year_given = employee.current_year_given
employee.last_year_score = employee.current_year_score
employee.current_year_given = 0
employee.current_year_score = 0
employee.save()
def send_daily_email(self):
subject = config.DAILY_EXECUTION_CONFIRMATION_SUBJECT
message = config.DAILY_EXECUTION_CONFIRMATION_MESSAGE
email = EmailMessage(subject, message, to=[config.DAILY_EXECUTION_CONFIRMATION_EMAIL])
email.send()
def send_blocked_notification_email(self, employee):
subject = config.USER_BLOCKED_NOTIFICATION_SUBJECT
message = config.USER_BLOCKED_NOTIFICATION_MESSAGE % employee.username
email = EmailMessage(subject, message, to=[employee.email])
email.send()
def evaluate_block_users(self):
employees = get_list_or_404(Employee)
for employee in employees:
if employee.yesterday_given > config.MAX_STARS_GIVEN_DAY:
employee.is_blocked = True
if employee.yesterday_received > config.MAX_STARS_RECEIVED_DAY:
employee.is_blocked = True
if employee.current_month_given > config.MAX_STARS_GIVEN_MONTHLY:
employee.is_blocked = True
if employee.current_month_score > config.MAX_STARS_RECEIVED_MONTHLY:
employee.is_blocked = True
employee.save()
try:
if employee.is_blocked:
self.send_blocked_notification_email()
except Exception as e:
print e
def handle(self, *args, **options):
today = datetime.now()
self.change_day()
self.evaluate_block_users()
self.send_daily_email()
if today.day == 1:
self.change_month()
if (today.day == 1 and today.month == 1):
self.change_year()
|
Add custom command to daily check scoresfrom constance import config
from datetime import datetime
from django.core.management.base import BaseCommand
from django.core.mail import EmailMessage
from django.shortcuts import get_list_or_404
from employees.models import Employee
class Command(BaseCommand):
help = "Update scores daily."
def change_day(self):
employees = get_list_or_404(Employee)
for employee in employees:
employee.yesterday_given = employee.today_given
employee.yesterday_received = employee.today_received
employee.today_given = 0
employee.today_received = 0
employee.save()
def change_month(self):
employees = get_list_or_404(Employee)
for employee in employees:
employee.last_month_given = employee.current_month_given
employee.last_month_score = employee.current_month_score
employee.current_month_given = 0
employee.current_month_score = 0
employee.save()
def change_year(self):
employees = get_list_or_404(Employee)
for employee in employees:
employee.last_year_given = employee.current_year_given
employee.last_year_score = employee.current_year_score
employee.current_year_given = 0
employee.current_year_score = 0
employee.save()
def send_daily_email(self):
subject = config.DAILY_EXECUTION_CONFIRMATION_SUBJECT
message = config.DAILY_EXECUTION_CONFIRMATION_MESSAGE
email = EmailMessage(subject, message, to=[config.DAILY_EXECUTION_CONFIRMATION_EMAIL])
email.send()
def send_blocked_notification_email(self, employee):
subject = config.USER_BLOCKED_NOTIFICATION_SUBJECT
message = config.USER_BLOCKED_NOTIFICATION_MESSAGE % employee.username
email = EmailMessage(subject, message, to=[employee.email])
email.send()
def evaluate_block_users(self):
employees = get_list_or_404(Employee)
for employee in employees:
if employee.yesterday_given > config.MAX_STARS_GIVEN_DAY:
employee.is_blocked = True
if employee.yesterday_received > config.MAX_STARS_RECEIVED_DAY:
employee.is_blocked = True
if employee.current_month_given > config.MAX_STARS_GIVEN_MONTHLY:
employee.is_blocked = True
if employee.current_month_score > config.MAX_STARS_RECEIVED_MONTHLY:
employee.is_blocked = True
employee.save()
try:
if employee.is_blocked:
self.send_blocked_notification_email()
except Exception as e:
print e
def handle(self, *args, **options):
today = datetime.now()
self.change_day()
self.evaluate_block_users()
self.send_daily_email()
if today.day == 1:
self.change_month()
if (today.day == 1 and today.month == 1):
self.change_year()
|
<commit_before><commit_msg>Add custom command to daily check scores<commit_after>from constance import config
from datetime import datetime
from django.core.management.base import BaseCommand
from django.core.mail import EmailMessage
from django.shortcuts import get_list_or_404
from employees.models import Employee
class Command(BaseCommand):
help = "Update scores daily."
def change_day(self):
employees = get_list_or_404(Employee)
for employee in employees:
employee.yesterday_given = employee.today_given
employee.yesterday_received = employee.today_received
employee.today_given = 0
employee.today_received = 0
employee.save()
def change_month(self):
employees = get_list_or_404(Employee)
for employee in employees:
employee.last_month_given = employee.current_month_given
employee.last_month_score = employee.current_month_score
employee.current_month_given = 0
employee.current_month_score = 0
employee.save()
def change_year(self):
employees = get_list_or_404(Employee)
for employee in employees:
employee.last_year_given = employee.current_year_given
employee.last_year_score = employee.current_year_score
employee.current_year_given = 0
employee.current_year_score = 0
employee.save()
def send_daily_email(self):
subject = config.DAILY_EXECUTION_CONFIRMATION_SUBJECT
message = config.DAILY_EXECUTION_CONFIRMATION_MESSAGE
email = EmailMessage(subject, message, to=[config.DAILY_EXECUTION_CONFIRMATION_EMAIL])
email.send()
def send_blocked_notification_email(self, employee):
subject = config.USER_BLOCKED_NOTIFICATION_SUBJECT
message = config.USER_BLOCKED_NOTIFICATION_MESSAGE % employee.username
email = EmailMessage(subject, message, to=[employee.email])
email.send()
def evaluate_block_users(self):
employees = get_list_or_404(Employee)
for employee in employees:
if employee.yesterday_given > config.MAX_STARS_GIVEN_DAY:
employee.is_blocked = True
if employee.yesterday_received > config.MAX_STARS_RECEIVED_DAY:
employee.is_blocked = True
if employee.current_month_given > config.MAX_STARS_GIVEN_MONTHLY:
employee.is_blocked = True
if employee.current_month_score > config.MAX_STARS_RECEIVED_MONTHLY:
employee.is_blocked = True
employee.save()
try:
if employee.is_blocked:
self.send_blocked_notification_email()
except Exception as e:
print e
def handle(self, *args, **options):
today = datetime.now()
self.change_day()
self.evaluate_block_users()
self.send_daily_email()
if today.day == 1:
self.change_month()
if (today.day == 1 and today.month == 1):
self.change_year()
|
|
8aac73fdc26fd838c3f91ffa9bc58e25777a5179
|
properties/tests/test_mach_angle.py
|
properties/tests/test_mach_angle.py
|
#!/usr/bin/env python
"""Test Mach angle functions.
Test data is obtained from http://www.grc.nasa.gov/WWW/k-12/airplane/machang.html.
"""
import nose
import nose.tools as nt
from properties.prandtl_meyer_function import mu_in_deg
@nt.raises(ValueError)
def test_mach_lesser_than_one():
m = 0.1
mu_in_deg(m)
def test_normal_mach():
m1 = 1.5
nt.assert_almost_equal(mu_in_deg(m1), 41.762, places=3)
m2 = 2.6
nt.assert_almost_equal(mu_in_deg(m2), 22.594, places=3)
if __name__ == '__main__':
nose.main()
|
Add tests for mach angle
|
Add tests for mach angle
|
Python
|
mit
|
iwarobots/TunnelDesign
|
Add tests for mach angle
|
#!/usr/bin/env python
"""Test Mach angle functions.
Test data is obtained from http://www.grc.nasa.gov/WWW/k-12/airplane/machang.html.
"""
import nose
import nose.tools as nt
from properties.prandtl_meyer_function import mu_in_deg
@nt.raises(ValueError)
def test_mach_lesser_than_one():
m = 0.1
mu_in_deg(m)
def test_normal_mach():
m1 = 1.5
nt.assert_almost_equal(mu_in_deg(m1), 41.762, places=3)
m2 = 2.6
nt.assert_almost_equal(mu_in_deg(m2), 22.594, places=3)
if __name__ == '__main__':
nose.main()
|
<commit_before><commit_msg>Add tests for mach angle<commit_after>
|
#!/usr/bin/env python
"""Test Mach angle functions.
Test data is obtained from http://www.grc.nasa.gov/WWW/k-12/airplane/machang.html.
"""
import nose
import nose.tools as nt
from properties.prandtl_meyer_function import mu_in_deg
@nt.raises(ValueError)
def test_mach_lesser_than_one():
m = 0.1
mu_in_deg(m)
def test_normal_mach():
m1 = 1.5
nt.assert_almost_equal(mu_in_deg(m1), 41.762, places=3)
m2 = 2.6
nt.assert_almost_equal(mu_in_deg(m2), 22.594, places=3)
if __name__ == '__main__':
nose.main()
|
Add tests for mach angle#!/usr/bin/env python
"""Test Mach angle functions.
Test data is obtained from http://www.grc.nasa.gov/WWW/k-12/airplane/machang.html.
"""
import nose
import nose.tools as nt
from properties.prandtl_meyer_function import mu_in_deg
@nt.raises(ValueError)
def test_mach_lesser_than_one():
m = 0.1
mu_in_deg(m)
def test_normal_mach():
m1 = 1.5
nt.assert_almost_equal(mu_in_deg(m1), 41.762, places=3)
m2 = 2.6
nt.assert_almost_equal(mu_in_deg(m2), 22.594, places=3)
if __name__ == '__main__':
nose.main()
|
<commit_before><commit_msg>Add tests for mach angle<commit_after>#!/usr/bin/env python
"""Test Mach angle functions.
Test data is obtained from http://www.grc.nasa.gov/WWW/k-12/airplane/machang.html.
"""
import nose
import nose.tools as nt
from properties.prandtl_meyer_function import mu_in_deg
@nt.raises(ValueError)
def test_mach_lesser_than_one():
m = 0.1
mu_in_deg(m)
def test_normal_mach():
m1 = 1.5
nt.assert_almost_equal(mu_in_deg(m1), 41.762, places=3)
m2 = 2.6
nt.assert_almost_equal(mu_in_deg(m2), 22.594, places=3)
if __name__ == '__main__':
nose.main()
|
|
d0c2ee2e0d848a586cc03ba5ac5da697b333ef32
|
Misc/listOfRandomNum.py
|
Misc/listOfRandomNum.py
|
#List of randoms
import random
import math
numList = []
for i in range(10):
numList.append(random.randrange(1, 20))
for i in numList:
print("Rand num = " + str(i))
|
Create list of random num
|
Create list of random num
|
Python
|
mit
|
JLJTECH/TutorialTesting
|
Create list of random num
|
#List of randoms
import random
import math
numList = []
for i in range(10):
numList.append(random.randrange(1, 20))
for i in numList:
print("Rand num = " + str(i))
|
<commit_before><commit_msg>Create list of random num<commit_after>
|
#List of randoms
import random
import math
numList = []
for i in range(10):
numList.append(random.randrange(1, 20))
for i in numList:
print("Rand num = " + str(i))
|
Create list of random num#List of randoms
import random
import math
numList = []
for i in range(10):
numList.append(random.randrange(1, 20))
for i in numList:
print("Rand num = " + str(i))
|
<commit_before><commit_msg>Create list of random num<commit_after>#List of randoms
import random
import math
numList = []
for i in range(10):
numList.append(random.randrange(1, 20))
for i in numList:
print("Rand num = " + str(i))
|
|
cd3f59026b9026d62537b38d4e9d70a740e88018
|
tests/test_java_mode.py
|
tests/test_java_mode.py
|
import editor_manager
import editor_common
import curses
import curses.ascii
import keytab
from ped_test_util import read_str,validate_screen,editor_test_suite,play_macro,screen_size,match_attr
def test_java_mode(testdir,capsys):
with capsys.disabled():
def main(stdscr):
lines_to_test = [
'// This is a simple Java program.',
'// FileName : "HelloWorld.java"',
'class HelloWorld',
'{',
' // Your program begins with a call to main()',
' // Prints "Hello, World" to the terminal window',
' public static void main(String args[])',
' {',
' System.out.println("Hello, World");',
' }',
'}'
]
args = { "java_test":"\n".join(lines_to_test)}
testfile = testdir.makefile(".java", **args)
green = curses.color_pair(1)
red = curses.color_pair(2)
cyan = curses.color_pair(3)
white = curses.color_pair(4)
ed = editor_common.Editor(stdscr,None,str(testfile))
ed.setWin(stdscr.subwin(ed.max_y,ed.max_x,0,0))
ed.main(False)
ed.main(False)
validate_screen(ed)
assert(ed.mode and ed.mode.name() == "java_mode")
match_list = [(0,0,32,red),(2,0,5,cyan),(4,4,44,red),(8,27,14,green)]
for line,pos,width,attr in match_list:
assert(match_attr(ed.scr,line+1,pos,1,width,attr))
ed.goto(7,5)
ed.endln()
ed.main(False,10)
assert(ed.getLine() == 8 and ed.getPos() == 4)
ed.insert('if (20 > 18) {')
ed.main(False,10)
ed.insert('System.out.println("20 greater than 18");')
ed.main(False,10)
ed.insert('}')
ed.main(False,10)
ed.main(False)
ed.main(False)
assert(match_attr(ed.scr,9,4,1,2,cyan))
assert(match_attr(ed.scr,10,27,1,20,green))
assert(ed.getLine() == 11 and ed.getPos() == 4)
curses.wrapper(main)
|
Add tests for java mode
|
Add tests for java mode
|
Python
|
mit
|
jpfxgood/ped
|
Add tests for java mode
|
import editor_manager
import editor_common
import curses
import curses.ascii
import keytab
from ped_test_util import read_str,validate_screen,editor_test_suite,play_macro,screen_size,match_attr
def test_java_mode(testdir,capsys):
with capsys.disabled():
def main(stdscr):
lines_to_test = [
'// This is a simple Java program.',
'// FileName : "HelloWorld.java"',
'class HelloWorld',
'{',
' // Your program begins with a call to main()',
' // Prints "Hello, World" to the terminal window',
' public static void main(String args[])',
' {',
' System.out.println("Hello, World");',
' }',
'}'
]
args = { "java_test":"\n".join(lines_to_test)}
testfile = testdir.makefile(".java", **args)
green = curses.color_pair(1)
red = curses.color_pair(2)
cyan = curses.color_pair(3)
white = curses.color_pair(4)
ed = editor_common.Editor(stdscr,None,str(testfile))
ed.setWin(stdscr.subwin(ed.max_y,ed.max_x,0,0))
ed.main(False)
ed.main(False)
validate_screen(ed)
assert(ed.mode and ed.mode.name() == "java_mode")
match_list = [(0,0,32,red),(2,0,5,cyan),(4,4,44,red),(8,27,14,green)]
for line,pos,width,attr in match_list:
assert(match_attr(ed.scr,line+1,pos,1,width,attr))
ed.goto(7,5)
ed.endln()
ed.main(False,10)
assert(ed.getLine() == 8 and ed.getPos() == 4)
ed.insert('if (20 > 18) {')
ed.main(False,10)
ed.insert('System.out.println("20 greater than 18");')
ed.main(False,10)
ed.insert('}')
ed.main(False,10)
ed.main(False)
ed.main(False)
assert(match_attr(ed.scr,9,4,1,2,cyan))
assert(match_attr(ed.scr,10,27,1,20,green))
assert(ed.getLine() == 11 and ed.getPos() == 4)
curses.wrapper(main)
|
<commit_before><commit_msg>Add tests for java mode<commit_after>
|
import editor_manager
import editor_common
import curses
import curses.ascii
import keytab
from ped_test_util import read_str,validate_screen,editor_test_suite,play_macro,screen_size,match_attr
def test_java_mode(testdir,capsys):
with capsys.disabled():
def main(stdscr):
lines_to_test = [
'// This is a simple Java program.',
'// FileName : "HelloWorld.java"',
'class HelloWorld',
'{',
' // Your program begins with a call to main()',
' // Prints "Hello, World" to the terminal window',
' public static void main(String args[])',
' {',
' System.out.println("Hello, World");',
' }',
'}'
]
args = { "java_test":"\n".join(lines_to_test)}
testfile = testdir.makefile(".java", **args)
green = curses.color_pair(1)
red = curses.color_pair(2)
cyan = curses.color_pair(3)
white = curses.color_pair(4)
ed = editor_common.Editor(stdscr,None,str(testfile))
ed.setWin(stdscr.subwin(ed.max_y,ed.max_x,0,0))
ed.main(False)
ed.main(False)
validate_screen(ed)
assert(ed.mode and ed.mode.name() == "java_mode")
match_list = [(0,0,32,red),(2,0,5,cyan),(4,4,44,red),(8,27,14,green)]
for line,pos,width,attr in match_list:
assert(match_attr(ed.scr,line+1,pos,1,width,attr))
ed.goto(7,5)
ed.endln()
ed.main(False,10)
assert(ed.getLine() == 8 and ed.getPos() == 4)
ed.insert('if (20 > 18) {')
ed.main(False,10)
ed.insert('System.out.println("20 greater than 18");')
ed.main(False,10)
ed.insert('}')
ed.main(False,10)
ed.main(False)
ed.main(False)
assert(match_attr(ed.scr,9,4,1,2,cyan))
assert(match_attr(ed.scr,10,27,1,20,green))
assert(ed.getLine() == 11 and ed.getPos() == 4)
curses.wrapper(main)
|
Add tests for java modeimport editor_manager
import editor_common
import curses
import curses.ascii
import keytab
from ped_test_util import read_str,validate_screen,editor_test_suite,play_macro,screen_size,match_attr
def test_java_mode(testdir,capsys):
with capsys.disabled():
def main(stdscr):
lines_to_test = [
'// This is a simple Java program.',
'// FileName : "HelloWorld.java"',
'class HelloWorld',
'{',
' // Your program begins with a call to main()',
' // Prints "Hello, World" to the terminal window',
' public static void main(String args[])',
' {',
' System.out.println("Hello, World");',
' }',
'}'
]
args = { "java_test":"\n".join(lines_to_test)}
testfile = testdir.makefile(".java", **args)
green = curses.color_pair(1)
red = curses.color_pair(2)
cyan = curses.color_pair(3)
white = curses.color_pair(4)
ed = editor_common.Editor(stdscr,None,str(testfile))
ed.setWin(stdscr.subwin(ed.max_y,ed.max_x,0,0))
ed.main(False)
ed.main(False)
validate_screen(ed)
assert(ed.mode and ed.mode.name() == "java_mode")
match_list = [(0,0,32,red),(2,0,5,cyan),(4,4,44,red),(8,27,14,green)]
for line,pos,width,attr in match_list:
assert(match_attr(ed.scr,line+1,pos,1,width,attr))
ed.goto(7,5)
ed.endln()
ed.main(False,10)
assert(ed.getLine() == 8 and ed.getPos() == 4)
ed.insert('if (20 > 18) {')
ed.main(False,10)
ed.insert('System.out.println("20 greater than 18");')
ed.main(False,10)
ed.insert('}')
ed.main(False,10)
ed.main(False)
ed.main(False)
assert(match_attr(ed.scr,9,4,1,2,cyan))
assert(match_attr(ed.scr,10,27,1,20,green))
assert(ed.getLine() == 11 and ed.getPos() == 4)
curses.wrapper(main)
|
<commit_before><commit_msg>Add tests for java mode<commit_after>import editor_manager
import editor_common
import curses
import curses.ascii
import keytab
from ped_test_util import read_str,validate_screen,editor_test_suite,play_macro,screen_size,match_attr
def test_java_mode(testdir,capsys):
with capsys.disabled():
def main(stdscr):
lines_to_test = [
'// This is a simple Java program.',
'// FileName : "HelloWorld.java"',
'class HelloWorld',
'{',
' // Your program begins with a call to main()',
' // Prints "Hello, World" to the terminal window',
' public static void main(String args[])',
' {',
' System.out.println("Hello, World");',
' }',
'}'
]
args = { "java_test":"\n".join(lines_to_test)}
testfile = testdir.makefile(".java", **args)
green = curses.color_pair(1)
red = curses.color_pair(2)
cyan = curses.color_pair(3)
white = curses.color_pair(4)
ed = editor_common.Editor(stdscr,None,str(testfile))
ed.setWin(stdscr.subwin(ed.max_y,ed.max_x,0,0))
ed.main(False)
ed.main(False)
validate_screen(ed)
assert(ed.mode and ed.mode.name() == "java_mode")
match_list = [(0,0,32,red),(2,0,5,cyan),(4,4,44,red),(8,27,14,green)]
for line,pos,width,attr in match_list:
assert(match_attr(ed.scr,line+1,pos,1,width,attr))
ed.goto(7,5)
ed.endln()
ed.main(False,10)
assert(ed.getLine() == 8 and ed.getPos() == 4)
ed.insert('if (20 > 18) {')
ed.main(False,10)
ed.insert('System.out.println("20 greater than 18");')
ed.main(False,10)
ed.insert('}')
ed.main(False,10)
ed.main(False)
ed.main(False)
assert(match_attr(ed.scr,9,4,1,2,cyan))
assert(match_attr(ed.scr,10,27,1,20,green))
assert(ed.getLine() == 11 and ed.getPos() == 4)
curses.wrapper(main)
|
|
f03f976696077db4146ea78e0d0b1ef5767f00ca
|
tests/unit/test_sign.py
|
tests/unit/test_sign.py
|
# Import libnacl libs
import libnacl.sign
# Import pythonlibs
import unittest
class TestSigning(unittest.TestCase):
'''
'''
def test_sign(self):
msg = ('Well, that\'s no ordinary rabbit. That\'s the most foul, '
'cruel, and bad-tempered rodent you ever set eyes on.')
signer = libnacl.sign.Signer()
signed = signer.sign(msg)
self.assertNotEqual(msg, signed)
veri = libnacl.sign.Verifier(signer.hex_vk())
verified = veri.verify(signed)
self.assertEqual(verified, msg)
|
Add high level signing capabilities
|
Add high level signing capabilities
|
Python
|
apache-2.0
|
cachedout/libnacl,saltstack/libnacl,johnttan/libnacl,mindw/libnacl,coinkite/libnacl,RaetProtocol/libnacl
|
Add high level signing capabilities
|
# Import libnacl libs
import libnacl.sign
# Import pythonlibs
import unittest
class TestSigning(unittest.TestCase):
'''
'''
def test_sign(self):
msg = ('Well, that\'s no ordinary rabbit. That\'s the most foul, '
'cruel, and bad-tempered rodent you ever set eyes on.')
signer = libnacl.sign.Signer()
signed = signer.sign(msg)
self.assertNotEqual(msg, signed)
veri = libnacl.sign.Verifier(signer.hex_vk())
verified = veri.verify(signed)
self.assertEqual(verified, msg)
|
<commit_before><commit_msg>Add high level signing capabilities<commit_after>
|
# Import libnacl libs
import libnacl.sign
# Import pythonlibs
import unittest
class TestSigning(unittest.TestCase):
'''
'''
def test_sign(self):
msg = ('Well, that\'s no ordinary rabbit. That\'s the most foul, '
'cruel, and bad-tempered rodent you ever set eyes on.')
signer = libnacl.sign.Signer()
signed = signer.sign(msg)
self.assertNotEqual(msg, signed)
veri = libnacl.sign.Verifier(signer.hex_vk())
verified = veri.verify(signed)
self.assertEqual(verified, msg)
|
Add high level signing capabilities# Import libnacl libs
import libnacl.sign
# Import pythonlibs
import unittest
class TestSigning(unittest.TestCase):
'''
'''
def test_sign(self):
msg = ('Well, that\'s no ordinary rabbit. That\'s the most foul, '
'cruel, and bad-tempered rodent you ever set eyes on.')
signer = libnacl.sign.Signer()
signed = signer.sign(msg)
self.assertNotEqual(msg, signed)
veri = libnacl.sign.Verifier(signer.hex_vk())
verified = veri.verify(signed)
self.assertEqual(verified, msg)
|
<commit_before><commit_msg>Add high level signing capabilities<commit_after># Import libnacl libs
import libnacl.sign
# Import pythonlibs
import unittest
class TestSigning(unittest.TestCase):
'''
'''
def test_sign(self):
msg = ('Well, that\'s no ordinary rabbit. That\'s the most foul, '
'cruel, and bad-tempered rodent you ever set eyes on.')
signer = libnacl.sign.Signer()
signed = signer.sign(msg)
self.assertNotEqual(msg, signed)
veri = libnacl.sign.Verifier(signer.hex_vk())
verified = veri.verify(signed)
self.assertEqual(verified, msg)
|
|
bb7031385af7931f9e12a8987375f929bcfb6b5a
|
scripts/devdeps.py
|
scripts/devdeps.py
|
from __future__ import print_function
import sys
try:
import colorama
def blue(text): return "%s%s%s" % (colorama.Fore.BLUE, text, colorama.Style.RESET_ALL)
def red(text): return "%s%s%s" % (colorama.Fore.RED, text, colorama.Style.RESET_ALL)
except ImportError:
def blue(text) : return text
def red(text) : return text
def depend_check(deps_name, *args):
"""Check for missing dependencies
"""
found = True
missing = []
for dependency in args:
try:
__import__(dependency)
except ImportError as e:
missing.append(dependency)
found = False
print('-'*80)
if not found:
print(red("You are missing the following %s dependencies:") % deps_name)
for dep in missing:
name = pkg_info_dict.get(dep, dep)
print(" * ", name)
print()
return False
else:
print(blue("All %s dependencies installed! You are good to go!\n") % deps_name)
return True
if __name__ == '__main__':
#Dictionary maps module names to package names
pkg_info_dict = {'bs4' : 'beautiful-soup',
'websocket' : 'websocket-client',
'sphinx_bootstrap_theme' : 'sphinx-bootstrap-theme',
'sphinxcontrib.httpdomain' : 'sphinxcontrib-httpdomain',
'pdiffer' : 'pdiff'
}
dev_deps = ['bs4', 'colorama', 'pdiffer', 'boto', 'nose', 'mock', 'coverage',
'websocket']
depend_check('Dev', *dev_deps)
docs_deps = ['graphviz', 'sphinx', 'pygments', 'sphinx_bootstrap_theme',
'sphinxcontrib.httpdomain']
depend_check('Docs', *docs_deps)
|
Create script that checks for dev and docs dependencies.
|
Create script that checks for dev and docs dependencies.
|
Python
|
bsd-3-clause
|
justacec/bokeh,schoolie/bokeh,aiguofer/bokeh,ChinaQuants/bokeh,lukebarnard1/bokeh,roxyboy/bokeh,jakirkham/bokeh,khkaminska/bokeh,srinathv/bokeh,msarahan/bokeh,Karel-van-de-Plassche/bokeh,CrazyGuo/bokeh,rothnic/bokeh,clairetang6/bokeh,quasiben/bokeh,birdsarah/bokeh,azjps/bokeh,mindriot101/bokeh,khkaminska/bokeh,timothydmorton/bokeh,evidation-health/bokeh,msarahan/bokeh,paultcochrane/bokeh,ericmjl/bokeh,ChinaQuants/bokeh,bokeh/bokeh,laurent-george/bokeh,ericdill/bokeh,timsnyder/bokeh,satishgoda/bokeh,ericmjl/bokeh,philippjfr/bokeh,CrazyGuo/bokeh,timsnyder/bokeh,tacaswell/bokeh,abele/bokeh,rhiever/bokeh,percyfal/bokeh,tacaswell/bokeh,stonebig/bokeh,aiguofer/bokeh,daodaoliang/bokeh,dennisobrien/bokeh,muku42/bokeh,awanke/bokeh,clairetang6/bokeh,xguse/bokeh,ahmadia/bokeh,evidation-health/bokeh,mindriot101/bokeh,percyfal/bokeh,CrazyGuo/bokeh,aiguofer/bokeh,laurent-george/bokeh,stuart-knock/bokeh,ericdill/bokeh,maxalbert/bokeh,saifrahmed/bokeh,DuCorey/bokeh,draperjames/bokeh,gpfreitas/bokeh,msarahan/bokeh,jplourenco/bokeh,xguse/bokeh,draperjames/bokeh,bsipocz/bokeh,tacaswell/bokeh,lukebarnard1/bokeh,maxalbert/bokeh,PythonCharmers/bokeh,dennisobrien/bokeh,birdsarah/bokeh,birdsarah/bokeh,azjps/bokeh,ptitjano/bokeh,jplourenco/bokeh,jplourenco/bokeh,philippjfr/bokeh,josherick/bokeh,canavandl/bokeh,ChristosChristofidis/bokeh,eteq/bokeh,azjps/bokeh,justacec/bokeh,ahmadia/bokeh,phobson/bokeh,quasiben/bokeh,matbra/bokeh,ericdill/bokeh,timothydmorton/bokeh,quasiben/bokeh,justacec/bokeh,rhiever/bokeh,daodaoliang/bokeh,eteq/bokeh,mutirri/bokeh,rs2/bokeh,clairetang6/bokeh,ahmadia/bokeh,timsnyder/bokeh,ptitjano/bokeh,awanke/bokeh,KasperPRasmussen/bokeh,htygithub/bokeh,ahmadia/bokeh,maxalbert/bokeh,mindriot101/bokeh,phobson/bokeh,Karel-van-de-Plassche/bokeh,carlvlewis/bokeh,alan-unravel/bokeh,stonebig/bokeh,carlvlewis/bokeh,jakirkham/bokeh,ptitjano/bokeh,philippjfr/bokeh,ericdill/bokeh,laurent-george/bokeh,daodaoliang/bokeh,philippjfr/bokeh,draperjames/bokeh,PythonCharmers/bokeh,ChristosChristofidis/bokeh,rhiever/bokeh,deeplook/bokeh,satishgoda/bokeh,KasperPRasmussen/bokeh,bokeh/bokeh,jakirkham/bokeh,timsnyder/bokeh,matbra/bokeh,abele/bokeh,caseyclements/bokeh,ericmjl/bokeh,mutirri/bokeh,jplourenco/bokeh,rothnic/bokeh,khkaminska/bokeh,matbra/bokeh,ChinaQuants/bokeh,percyfal/bokeh,roxyboy/bokeh,rhiever/bokeh,lukebarnard1/bokeh,matbra/bokeh,KasperPRasmussen/bokeh,canavandl/bokeh,bsipocz/bokeh,josherick/bokeh,aavanian/bokeh,PythonCharmers/bokeh,laurent-george/bokeh,azjps/bokeh,akloster/bokeh,mindriot101/bokeh,tacaswell/bokeh,KasperPRasmussen/bokeh,akloster/bokeh,timothydmorton/bokeh,stuart-knock/bokeh,KasperPRasmussen/bokeh,canavandl/bokeh,muku42/bokeh,jakirkham/bokeh,gpfreitas/bokeh,srinathv/bokeh,dennisobrien/bokeh,satishgoda/bokeh,rs2/bokeh,paultcochrane/bokeh,dennisobrien/bokeh,draperjames/bokeh,Karel-van-de-Plassche/bokeh,htygithub/bokeh,caseyclements/bokeh,bokeh/bokeh,josherick/bokeh,saifrahmed/bokeh,khkaminska/bokeh,aiguofer/bokeh,aiguofer/bokeh,maxalbert/bokeh,Karel-van-de-Plassche/bokeh,srinathv/bokeh,deeplook/bokeh,deeplook/bokeh,satishgoda/bokeh,mutirri/bokeh,evidation-health/bokeh,rs2/bokeh,aavanian/bokeh,bsipocz/bokeh,lukebarnard1/bokeh,DuCorey/bokeh,deeplook/bokeh,eteq/bokeh,alan-unravel/bokeh,phobson/bokeh,gpfreitas/bokeh,saifrahmed/bokeh,DuCorey/bokeh,alan-unravel/bokeh,Karel-van-de-Plassche/bokeh,rothnic/bokeh,evidation-health/bokeh,ChristosChristofidis/bokeh,paultcochrane/bokeh,ptitjano/bokeh,bsipocz/bokeh,awanke/bokeh,roxyboy/bokeh,CrazyGuo/bokeh,azjps/bokeh,josherick/bokeh,stonebig/bokeh,carlvlewis/bokeh,xguse/bokeh,percyfal/bokeh,stuart-knock/bokeh,bokeh/bokeh,stuart-knock/bokeh,caseyclements/bokeh,ericmjl/bokeh,ptitjano/bokeh,gpfreitas/bokeh,DuCorey/bokeh,daodaoliang/bokeh,schoolie/bokeh,dennisobrien/bokeh,schoolie/bokeh,birdsarah/bokeh,ChinaQuants/bokeh,bokeh/bokeh,abele/bokeh,stonebig/bokeh,htygithub/bokeh,rothnic/bokeh,alan-unravel/bokeh,percyfal/bokeh,aavanian/bokeh,awanke/bokeh,abele/bokeh,paultcochrane/bokeh,ericmjl/bokeh,schoolie/bokeh,PythonCharmers/bokeh,msarahan/bokeh,phobson/bokeh,phobson/bokeh,draperjames/bokeh,muku42/bokeh,philippjfr/bokeh,canavandl/bokeh,schoolie/bokeh,muku42/bokeh,akloster/bokeh,ChristosChristofidis/bokeh,htygithub/bokeh,jakirkham/bokeh,srinathv/bokeh,DuCorey/bokeh,akloster/bokeh,xguse/bokeh,roxyboy/bokeh,rs2/bokeh,clairetang6/bokeh,carlvlewis/bokeh,saifrahmed/bokeh,aavanian/bokeh,justacec/bokeh,rs2/bokeh,mutirri/bokeh,aavanian/bokeh,caseyclements/bokeh,timsnyder/bokeh,timothydmorton/bokeh,eteq/bokeh
|
Create script that checks for dev and docs dependencies.
|
from __future__ import print_function
import sys
try:
import colorama
def blue(text): return "%s%s%s" % (colorama.Fore.BLUE, text, colorama.Style.RESET_ALL)
def red(text): return "%s%s%s" % (colorama.Fore.RED, text, colorama.Style.RESET_ALL)
except ImportError:
def blue(text) : return text
def red(text) : return text
def depend_check(deps_name, *args):
"""Check for missing dependencies
"""
found = True
missing = []
for dependency in args:
try:
__import__(dependency)
except ImportError as e:
missing.append(dependency)
found = False
print('-'*80)
if not found:
print(red("You are missing the following %s dependencies:") % deps_name)
for dep in missing:
name = pkg_info_dict.get(dep, dep)
print(" * ", name)
print()
return False
else:
print(blue("All %s dependencies installed! You are good to go!\n") % deps_name)
return True
if __name__ == '__main__':
#Dictionary maps module names to package names
pkg_info_dict = {'bs4' : 'beautiful-soup',
'websocket' : 'websocket-client',
'sphinx_bootstrap_theme' : 'sphinx-bootstrap-theme',
'sphinxcontrib.httpdomain' : 'sphinxcontrib-httpdomain',
'pdiffer' : 'pdiff'
}
dev_deps = ['bs4', 'colorama', 'pdiffer', 'boto', 'nose', 'mock', 'coverage',
'websocket']
depend_check('Dev', *dev_deps)
docs_deps = ['graphviz', 'sphinx', 'pygments', 'sphinx_bootstrap_theme',
'sphinxcontrib.httpdomain']
depend_check('Docs', *docs_deps)
|
<commit_before><commit_msg>Create script that checks for dev and docs dependencies.<commit_after>
|
from __future__ import print_function
import sys
try:
import colorama
def blue(text): return "%s%s%s" % (colorama.Fore.BLUE, text, colorama.Style.RESET_ALL)
def red(text): return "%s%s%s" % (colorama.Fore.RED, text, colorama.Style.RESET_ALL)
except ImportError:
def blue(text) : return text
def red(text) : return text
def depend_check(deps_name, *args):
"""Check for missing dependencies
"""
found = True
missing = []
for dependency in args:
try:
__import__(dependency)
except ImportError as e:
missing.append(dependency)
found = False
print('-'*80)
if not found:
print(red("You are missing the following %s dependencies:") % deps_name)
for dep in missing:
name = pkg_info_dict.get(dep, dep)
print(" * ", name)
print()
return False
else:
print(blue("All %s dependencies installed! You are good to go!\n") % deps_name)
return True
if __name__ == '__main__':
#Dictionary maps module names to package names
pkg_info_dict = {'bs4' : 'beautiful-soup',
'websocket' : 'websocket-client',
'sphinx_bootstrap_theme' : 'sphinx-bootstrap-theme',
'sphinxcontrib.httpdomain' : 'sphinxcontrib-httpdomain',
'pdiffer' : 'pdiff'
}
dev_deps = ['bs4', 'colorama', 'pdiffer', 'boto', 'nose', 'mock', 'coverage',
'websocket']
depend_check('Dev', *dev_deps)
docs_deps = ['graphviz', 'sphinx', 'pygments', 'sphinx_bootstrap_theme',
'sphinxcontrib.httpdomain']
depend_check('Docs', *docs_deps)
|
Create script that checks for dev and docs dependencies.from __future__ import print_function
import sys
try:
import colorama
def blue(text): return "%s%s%s" % (colorama.Fore.BLUE, text, colorama.Style.RESET_ALL)
def red(text): return "%s%s%s" % (colorama.Fore.RED, text, colorama.Style.RESET_ALL)
except ImportError:
def blue(text) : return text
def red(text) : return text
def depend_check(deps_name, *args):
"""Check for missing dependencies
"""
found = True
missing = []
for dependency in args:
try:
__import__(dependency)
except ImportError as e:
missing.append(dependency)
found = False
print('-'*80)
if not found:
print(red("You are missing the following %s dependencies:") % deps_name)
for dep in missing:
name = pkg_info_dict.get(dep, dep)
print(" * ", name)
print()
return False
else:
print(blue("All %s dependencies installed! You are good to go!\n") % deps_name)
return True
if __name__ == '__main__':
#Dictionary maps module names to package names
pkg_info_dict = {'bs4' : 'beautiful-soup',
'websocket' : 'websocket-client',
'sphinx_bootstrap_theme' : 'sphinx-bootstrap-theme',
'sphinxcontrib.httpdomain' : 'sphinxcontrib-httpdomain',
'pdiffer' : 'pdiff'
}
dev_deps = ['bs4', 'colorama', 'pdiffer', 'boto', 'nose', 'mock', 'coverage',
'websocket']
depend_check('Dev', *dev_deps)
docs_deps = ['graphviz', 'sphinx', 'pygments', 'sphinx_bootstrap_theme',
'sphinxcontrib.httpdomain']
depend_check('Docs', *docs_deps)
|
<commit_before><commit_msg>Create script that checks for dev and docs dependencies.<commit_after>from __future__ import print_function
import sys
try:
import colorama
def blue(text): return "%s%s%s" % (colorama.Fore.BLUE, text, colorama.Style.RESET_ALL)
def red(text): return "%s%s%s" % (colorama.Fore.RED, text, colorama.Style.RESET_ALL)
except ImportError:
def blue(text) : return text
def red(text) : return text
def depend_check(deps_name, *args):
"""Check for missing dependencies
"""
found = True
missing = []
for dependency in args:
try:
__import__(dependency)
except ImportError as e:
missing.append(dependency)
found = False
print('-'*80)
if not found:
print(red("You are missing the following %s dependencies:") % deps_name)
for dep in missing:
name = pkg_info_dict.get(dep, dep)
print(" * ", name)
print()
return False
else:
print(blue("All %s dependencies installed! You are good to go!\n") % deps_name)
return True
if __name__ == '__main__':
#Dictionary maps module names to package names
pkg_info_dict = {'bs4' : 'beautiful-soup',
'websocket' : 'websocket-client',
'sphinx_bootstrap_theme' : 'sphinx-bootstrap-theme',
'sphinxcontrib.httpdomain' : 'sphinxcontrib-httpdomain',
'pdiffer' : 'pdiff'
}
dev_deps = ['bs4', 'colorama', 'pdiffer', 'boto', 'nose', 'mock', 'coverage',
'websocket']
depend_check('Dev', *dev_deps)
docs_deps = ['graphviz', 'sphinx', 'pygments', 'sphinx_bootstrap_theme',
'sphinxcontrib.httpdomain']
depend_check('Docs', *docs_deps)
|
|
f0da1774514c839b4b97fa92d2202437932dc99a
|
analysis/plot-skeleton.py
|
analysis/plot-skeleton.py
|
#!/usr/bin/env python
import climate
import database
import plots
@climate.annotate(
root='plot data rooted at this path',
pattern=('plot data from files matching this pattern', 'option'),
)
def main(root, pattern='*/*block02/*trial00*.csv.gz'):
with plots.space() as ax:
for trial in database.Experiment(root).trials_matching(pattern):
plots.skeleton(ax, trial, 100)
break
if __name__ == '__main__':
climate.call(main)
|
Add a small driver for plotting skeletons.
|
Add a small driver for plotting skeletons.
|
Python
|
mit
|
lmjohns3/cube-experiment,lmjohns3/cube-experiment,lmjohns3/cube-experiment
|
Add a small driver for plotting skeletons.
|
#!/usr/bin/env python
import climate
import database
import plots
@climate.annotate(
root='plot data rooted at this path',
pattern=('plot data from files matching this pattern', 'option'),
)
def main(root, pattern='*/*block02/*trial00*.csv.gz'):
with plots.space() as ax:
for trial in database.Experiment(root).trials_matching(pattern):
plots.skeleton(ax, trial, 100)
break
if __name__ == '__main__':
climate.call(main)
|
<commit_before><commit_msg>Add a small driver for plotting skeletons.<commit_after>
|
#!/usr/bin/env python
import climate
import database
import plots
@climate.annotate(
root='plot data rooted at this path',
pattern=('plot data from files matching this pattern', 'option'),
)
def main(root, pattern='*/*block02/*trial00*.csv.gz'):
with plots.space() as ax:
for trial in database.Experiment(root).trials_matching(pattern):
plots.skeleton(ax, trial, 100)
break
if __name__ == '__main__':
climate.call(main)
|
Add a small driver for plotting skeletons.#!/usr/bin/env python
import climate
import database
import plots
@climate.annotate(
root='plot data rooted at this path',
pattern=('plot data from files matching this pattern', 'option'),
)
def main(root, pattern='*/*block02/*trial00*.csv.gz'):
with plots.space() as ax:
for trial in database.Experiment(root).trials_matching(pattern):
plots.skeleton(ax, trial, 100)
break
if __name__ == '__main__':
climate.call(main)
|
<commit_before><commit_msg>Add a small driver for plotting skeletons.<commit_after>#!/usr/bin/env python
import climate
import database
import plots
@climate.annotate(
root='plot data rooted at this path',
pattern=('plot data from files matching this pattern', 'option'),
)
def main(root, pattern='*/*block02/*trial00*.csv.gz'):
with plots.space() as ax:
for trial in database.Experiment(root).trials_matching(pattern):
plots.skeleton(ax, trial, 100)
break
if __name__ == '__main__':
climate.call(main)
|
|
872dd45173e889db06e9b16105492c241f7badae
|
examples/rpc_dynamic.py
|
examples/rpc_dynamic.py
|
import asyncio
import aiozmq
import aiozmq.rpc
class DynamicHandler(aiozmq.rpc.AttrHandler):
def __init__(self, namespace=()):
self.namespace = namespace
def __getitem__(self, key):
try:
return getattr(self, key)
except AttributeError:
return DynamicHandler(self.namespace + (key,))
@aiozmq.rpc.method
def func(self):
return (self.namespace, 'val')
@asyncio.coroutine
def go():
server = yield from aiozmq.rpc.start_server(
DynamicHandler(), bind='tcp://*:*')
server_addr = next(iter(server.transport.bindings()))
client = yield from aiozmq.rpc.open_client(
connect=server_addr)
ret = yield from client.rpc.func()
assert ((), 'val') == ret, ret
ret = yield from client.rpc.a.func()
assert (('a',), 'val') == ret, ret
ret = yield from client.rpc.a.b.func()
assert (('a', 'b'), 'val') == ret, ret
server.close()
client.close()
def main():
asyncio.set_event_loop_policy(aiozmq.ZmqEventLoopPolicy())
asyncio.get_event_loop().run_until_complete(go())
print("DONE")
if __name__ == '__main__':
main()
|
Add an example for dynamic RPC lookup.
|
Add an example for dynamic RPC lookup.
|
Python
|
bsd-2-clause
|
claws/aiozmq,aio-libs/aiozmq,asteven/aiozmq,MetaMemoryT/aiozmq
|
Add an example for dynamic RPC lookup.
|
import asyncio
import aiozmq
import aiozmq.rpc
class DynamicHandler(aiozmq.rpc.AttrHandler):
def __init__(self, namespace=()):
self.namespace = namespace
def __getitem__(self, key):
try:
return getattr(self, key)
except AttributeError:
return DynamicHandler(self.namespace + (key,))
@aiozmq.rpc.method
def func(self):
return (self.namespace, 'val')
@asyncio.coroutine
def go():
server = yield from aiozmq.rpc.start_server(
DynamicHandler(), bind='tcp://*:*')
server_addr = next(iter(server.transport.bindings()))
client = yield from aiozmq.rpc.open_client(
connect=server_addr)
ret = yield from client.rpc.func()
assert ((), 'val') == ret, ret
ret = yield from client.rpc.a.func()
assert (('a',), 'val') == ret, ret
ret = yield from client.rpc.a.b.func()
assert (('a', 'b'), 'val') == ret, ret
server.close()
client.close()
def main():
asyncio.set_event_loop_policy(aiozmq.ZmqEventLoopPolicy())
asyncio.get_event_loop().run_until_complete(go())
print("DONE")
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add an example for dynamic RPC lookup.<commit_after>
|
import asyncio
import aiozmq
import aiozmq.rpc
class DynamicHandler(aiozmq.rpc.AttrHandler):
def __init__(self, namespace=()):
self.namespace = namespace
def __getitem__(self, key):
try:
return getattr(self, key)
except AttributeError:
return DynamicHandler(self.namespace + (key,))
@aiozmq.rpc.method
def func(self):
return (self.namespace, 'val')
@asyncio.coroutine
def go():
server = yield from aiozmq.rpc.start_server(
DynamicHandler(), bind='tcp://*:*')
server_addr = next(iter(server.transport.bindings()))
client = yield from aiozmq.rpc.open_client(
connect=server_addr)
ret = yield from client.rpc.func()
assert ((), 'val') == ret, ret
ret = yield from client.rpc.a.func()
assert (('a',), 'val') == ret, ret
ret = yield from client.rpc.a.b.func()
assert (('a', 'b'), 'val') == ret, ret
server.close()
client.close()
def main():
asyncio.set_event_loop_policy(aiozmq.ZmqEventLoopPolicy())
asyncio.get_event_loop().run_until_complete(go())
print("DONE")
if __name__ == '__main__':
main()
|
Add an example for dynamic RPC lookup.import asyncio
import aiozmq
import aiozmq.rpc
class DynamicHandler(aiozmq.rpc.AttrHandler):
def __init__(self, namespace=()):
self.namespace = namespace
def __getitem__(self, key):
try:
return getattr(self, key)
except AttributeError:
return DynamicHandler(self.namespace + (key,))
@aiozmq.rpc.method
def func(self):
return (self.namespace, 'val')
@asyncio.coroutine
def go():
server = yield from aiozmq.rpc.start_server(
DynamicHandler(), bind='tcp://*:*')
server_addr = next(iter(server.transport.bindings()))
client = yield from aiozmq.rpc.open_client(
connect=server_addr)
ret = yield from client.rpc.func()
assert ((), 'val') == ret, ret
ret = yield from client.rpc.a.func()
assert (('a',), 'val') == ret, ret
ret = yield from client.rpc.a.b.func()
assert (('a', 'b'), 'val') == ret, ret
server.close()
client.close()
def main():
asyncio.set_event_loop_policy(aiozmq.ZmqEventLoopPolicy())
asyncio.get_event_loop().run_until_complete(go())
print("DONE")
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add an example for dynamic RPC lookup.<commit_after>import asyncio
import aiozmq
import aiozmq.rpc
class DynamicHandler(aiozmq.rpc.AttrHandler):
def __init__(self, namespace=()):
self.namespace = namespace
def __getitem__(self, key):
try:
return getattr(self, key)
except AttributeError:
return DynamicHandler(self.namespace + (key,))
@aiozmq.rpc.method
def func(self):
return (self.namespace, 'val')
@asyncio.coroutine
def go():
server = yield from aiozmq.rpc.start_server(
DynamicHandler(), bind='tcp://*:*')
server_addr = next(iter(server.transport.bindings()))
client = yield from aiozmq.rpc.open_client(
connect=server_addr)
ret = yield from client.rpc.func()
assert ((), 'val') == ret, ret
ret = yield from client.rpc.a.func()
assert (('a',), 'val') == ret, ret
ret = yield from client.rpc.a.b.func()
assert (('a', 'b'), 'val') == ret, ret
server.close()
client.close()
def main():
asyncio.set_event_loop_policy(aiozmq.ZmqEventLoopPolicy())
asyncio.get_event_loop().run_until_complete(go())
print("DONE")
if __name__ == '__main__':
main()
|
|
d41005d14239a93237fb839084f029208b94539d
|
common/profile_default/ipython_notebook_config.py
|
common/profile_default/ipython_notebook_config.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Configuration file for ipython-notebook.
c = get_config()
c.NotebookApp.ip = '*'
c.NotebookApp.open_browser = False
c.NotebookApp.port = 8888
# Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-
# For headerssent by the upstream reverse proxy. Necessary if the proxy handles
# SSL
c.NotebookApp.trust_xheaders = True
# Include our extra templates
c.NotebookApp.extra_template_paths = ['/srv/templates/']
# Supply overrides for the tornado.web.Application that the IPython notebook
# uses.
c.NotebookApp.tornado_settings = {
'headers': {
'Content-Security-Policy': "frame-ancestors 'self' https://*.jupyter.org https://jupyter.github.io https://*.tmpnb.org"
},
'static_url_prefix': 'https://cdn.jupyter.org/notebook/3.1.0/'
}
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Configuration file for ipython-notebook.
c = get_config()
c.NotebookApp.ip = '*'
c.NotebookApp.open_browser = False
c.NotebookApp.port = 8888
# Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-
# For headerssent by the upstream reverse proxy. Necessary if the proxy handles
# SSL
c.NotebookApp.trust_xheaders = True
# Include our extra templates
c.NotebookApp.extra_template_paths = ['/srv/templates/']
# Supply overrides for the tornado.web.Application that the IPython notebook
# uses.
c.NotebookApp.tornado_settings = {
'headers': {
'Content-Security-Policy': "frame-ancestors 'self' https://*.jupyter.org https://jupyter.github.io https://*.tmpnb.org"
},
'static_url_prefix': 'https://cdn.jupyter.org/notebook/try/'
}
|
Use the custom.js as served from the CDN for try
|
Use the custom.js as served from the CDN for try
|
Python
|
bsd-3-clause
|
dietmarw/jupyter-docker-images,iamjakob/docker-demo-images,Zsailer/docker-jupyter-teaching,odewahn/docker-demo-images,jupyter/docker-demo-images,tanyaschlusser/docker-demo-images,iamjakob/docker-demo-images,Zsailer/docker-demo-images,CognitiveScale/docker-demo-images,Zsailer/docker-jupyter-teaching,ericdill/docker-demo-images,willjharmer/docker-demo-images,Zsailer/docker-jupyter-teaching,philipz/docker-demo-images,parente/docker-demo-images,philipz/docker-demo-images,vanceb/docker-demo-images,parente/docker-demo-images,willjharmer/docker-demo-images,modulexcite/docker-demo-images,parente/docker-demo-images,willjharmer/docker-demo-images,vanceb/docker-demo-images,vanceb/docker-demo-images,CognitiveScale/docker-demo-images,Zsailer/docker-demo-images,tanyaschlusser/docker-demo-images,Zsailer/docker-jupyter-teaching,pelucid/docker-demo-images,mjbright/docker-demo-images,modulexcite/docker-demo-images,mjbright/docker-demo-images,CognitiveScale/docker-demo-images,dietmarw/jupyter-docker-images,rgbkrk/docker-demo-images,philipz/docker-demo-images,danielballan/docker-demo-images,rgbkrk/docker-demo-images,rgbkrk/docker-demo-images,dietmarw/jupyter-docker-images,mjbright/docker-demo-images,modulexcite/docker-demo-images,pelucid/docker-demo-images,jupyter/docker-demo-images,iamjakob/docker-demo-images,Zsailer/docker-demo-images,pelucid/docker-demo-images,danielballan/docker-demo-images,jupyter/docker-demo-images,danielballan/docker-demo-images,ericdill/docker-demo-images,ericdill/docker-demo-images,odewahn/docker-demo-images,odewahn/docker-demo-images,CognitiveScale/docker-demo-images,tanyaschlusser/docker-demo-images
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Configuration file for ipython-notebook.
c = get_config()
c.NotebookApp.ip = '*'
c.NotebookApp.open_browser = False
c.NotebookApp.port = 8888
# Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-
# For headerssent by the upstream reverse proxy. Necessary if the proxy handles
# SSL
c.NotebookApp.trust_xheaders = True
# Include our extra templates
c.NotebookApp.extra_template_paths = ['/srv/templates/']
# Supply overrides for the tornado.web.Application that the IPython notebook
# uses.
c.NotebookApp.tornado_settings = {
'headers': {
'Content-Security-Policy': "frame-ancestors 'self' https://*.jupyter.org https://jupyter.github.io https://*.tmpnb.org"
},
'static_url_prefix': 'https://cdn.jupyter.org/notebook/3.1.0/'
}
Use the custom.js as served from the CDN for try
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Configuration file for ipython-notebook.
c = get_config()
c.NotebookApp.ip = '*'
c.NotebookApp.open_browser = False
c.NotebookApp.port = 8888
# Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-
# For headerssent by the upstream reverse proxy. Necessary if the proxy handles
# SSL
c.NotebookApp.trust_xheaders = True
# Include our extra templates
c.NotebookApp.extra_template_paths = ['/srv/templates/']
# Supply overrides for the tornado.web.Application that the IPython notebook
# uses.
c.NotebookApp.tornado_settings = {
'headers': {
'Content-Security-Policy': "frame-ancestors 'self' https://*.jupyter.org https://jupyter.github.io https://*.tmpnb.org"
},
'static_url_prefix': 'https://cdn.jupyter.org/notebook/try/'
}
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Configuration file for ipython-notebook.
c = get_config()
c.NotebookApp.ip = '*'
c.NotebookApp.open_browser = False
c.NotebookApp.port = 8888
# Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-
# For headerssent by the upstream reverse proxy. Necessary if the proxy handles
# SSL
c.NotebookApp.trust_xheaders = True
# Include our extra templates
c.NotebookApp.extra_template_paths = ['/srv/templates/']
# Supply overrides for the tornado.web.Application that the IPython notebook
# uses.
c.NotebookApp.tornado_settings = {
'headers': {
'Content-Security-Policy': "frame-ancestors 'self' https://*.jupyter.org https://jupyter.github.io https://*.tmpnb.org"
},
'static_url_prefix': 'https://cdn.jupyter.org/notebook/3.1.0/'
}
<commit_msg>Use the custom.js as served from the CDN for try<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Configuration file for ipython-notebook.
c = get_config()
c.NotebookApp.ip = '*'
c.NotebookApp.open_browser = False
c.NotebookApp.port = 8888
# Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-
# For headerssent by the upstream reverse proxy. Necessary if the proxy handles
# SSL
c.NotebookApp.trust_xheaders = True
# Include our extra templates
c.NotebookApp.extra_template_paths = ['/srv/templates/']
# Supply overrides for the tornado.web.Application that the IPython notebook
# uses.
c.NotebookApp.tornado_settings = {
'headers': {
'Content-Security-Policy': "frame-ancestors 'self' https://*.jupyter.org https://jupyter.github.io https://*.tmpnb.org"
},
'static_url_prefix': 'https://cdn.jupyter.org/notebook/try/'
}
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Configuration file for ipython-notebook.
c = get_config()
c.NotebookApp.ip = '*'
c.NotebookApp.open_browser = False
c.NotebookApp.port = 8888
# Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-
# For headerssent by the upstream reverse proxy. Necessary if the proxy handles
# SSL
c.NotebookApp.trust_xheaders = True
# Include our extra templates
c.NotebookApp.extra_template_paths = ['/srv/templates/']
# Supply overrides for the tornado.web.Application that the IPython notebook
# uses.
c.NotebookApp.tornado_settings = {
'headers': {
'Content-Security-Policy': "frame-ancestors 'self' https://*.jupyter.org https://jupyter.github.io https://*.tmpnb.org"
},
'static_url_prefix': 'https://cdn.jupyter.org/notebook/3.1.0/'
}
Use the custom.js as served from the CDN for try#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Configuration file for ipython-notebook.
c = get_config()
c.NotebookApp.ip = '*'
c.NotebookApp.open_browser = False
c.NotebookApp.port = 8888
# Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-
# For headerssent by the upstream reverse proxy. Necessary if the proxy handles
# SSL
c.NotebookApp.trust_xheaders = True
# Include our extra templates
c.NotebookApp.extra_template_paths = ['/srv/templates/']
# Supply overrides for the tornado.web.Application that the IPython notebook
# uses.
c.NotebookApp.tornado_settings = {
'headers': {
'Content-Security-Policy': "frame-ancestors 'self' https://*.jupyter.org https://jupyter.github.io https://*.tmpnb.org"
},
'static_url_prefix': 'https://cdn.jupyter.org/notebook/try/'
}
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Configuration file for ipython-notebook.
c = get_config()
c.NotebookApp.ip = '*'
c.NotebookApp.open_browser = False
c.NotebookApp.port = 8888
# Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-
# For headerssent by the upstream reverse proxy. Necessary if the proxy handles
# SSL
c.NotebookApp.trust_xheaders = True
# Include our extra templates
c.NotebookApp.extra_template_paths = ['/srv/templates/']
# Supply overrides for the tornado.web.Application that the IPython notebook
# uses.
c.NotebookApp.tornado_settings = {
'headers': {
'Content-Security-Policy': "frame-ancestors 'self' https://*.jupyter.org https://jupyter.github.io https://*.tmpnb.org"
},
'static_url_prefix': 'https://cdn.jupyter.org/notebook/3.1.0/'
}
<commit_msg>Use the custom.js as served from the CDN for try<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Configuration file for ipython-notebook.
c = get_config()
c.NotebookApp.ip = '*'
c.NotebookApp.open_browser = False
c.NotebookApp.port = 8888
# Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-
# For headerssent by the upstream reverse proxy. Necessary if the proxy handles
# SSL
c.NotebookApp.trust_xheaders = True
# Include our extra templates
c.NotebookApp.extra_template_paths = ['/srv/templates/']
# Supply overrides for the tornado.web.Application that the IPython notebook
# uses.
c.NotebookApp.tornado_settings = {
'headers': {
'Content-Security-Policy': "frame-ancestors 'self' https://*.jupyter.org https://jupyter.github.io https://*.tmpnb.org"
},
'static_url_prefix': 'https://cdn.jupyter.org/notebook/try/'
}
|
2b380d501b80afad8c7c5ec27537bcc682ed2775
|
commands/handle.py
|
commands/handle.py
|
import commands.cmds as cmds
def handle(self, chat_raw):
self.logger.info("Handling command: " + chat_raw + " (for player" + self.fquid + ")")
_atmp1 = chat_raw.split(" ")
_atmp2 = list(_atmp1[0])
del _atmp2[0]
del _atmp1[0]
cmdobj = {
"base": _atmp2,
"args_raw": _atmp1,
"scope": self,
"chat_raw": chat_raw
}
commands.cmds.InvalidCommand.begin(self, cmdobj) if _atmp2 not in commands.cmds.baseList else commands.cmds.baseList[_atmp2].begin(self, cmdobj)
|
import commands.cmds as cmds
def handle(self, chat_raw):
self.logger.info("Handling command: " + chat_raw + " (for player" + self.fquid + ")")
_atmp1 = chat_raw.split(" ")
_atmp2 = list(_atmp1[0])
del _atmp2[0]
del _atmp1[0]
cmdobj = {
"base": _atmp2,
"args_raw": _atmp1,
"scope": self,
"chat_raw": chat_raw
}
cmds.InvalidCommand.begin(self, cmdobj) if _atmp2 not in cmds.baseList else cmds.baseList[_atmp2].begin(self, cmdobj)
|
Fix some scope mistakes. This fix was part of the reverted commit.
|
Fix some scope mistakes. This fix was part of the reverted commit.
|
Python
|
mit
|
TiberiumPY/puremine,Armored-Dragon/pymineserver
|
import commands.cmds as cmds
def handle(self, chat_raw):
self.logger.info("Handling command: " + chat_raw + " (for player" + self.fquid + ")")
_atmp1 = chat_raw.split(" ")
_atmp2 = list(_atmp1[0])
del _atmp2[0]
del _atmp1[0]
cmdobj = {
"base": _atmp2,
"args_raw": _atmp1,
"scope": self,
"chat_raw": chat_raw
}
commands.cmds.InvalidCommand.begin(self, cmdobj) if _atmp2 not in commands.cmds.baseList else commands.cmds.baseList[_atmp2].begin(self, cmdobj)Fix some scope mistakes. This fix was part of the reverted commit.
|
import commands.cmds as cmds
def handle(self, chat_raw):
self.logger.info("Handling command: " + chat_raw + " (for player" + self.fquid + ")")
_atmp1 = chat_raw.split(" ")
_atmp2 = list(_atmp1[0])
del _atmp2[0]
del _atmp1[0]
cmdobj = {
"base": _atmp2,
"args_raw": _atmp1,
"scope": self,
"chat_raw": chat_raw
}
cmds.InvalidCommand.begin(self, cmdobj) if _atmp2 not in cmds.baseList else cmds.baseList[_atmp2].begin(self, cmdobj)
|
<commit_before>import commands.cmds as cmds
def handle(self, chat_raw):
self.logger.info("Handling command: " + chat_raw + " (for player" + self.fquid + ")")
_atmp1 = chat_raw.split(" ")
_atmp2 = list(_atmp1[0])
del _atmp2[0]
del _atmp1[0]
cmdobj = {
"base": _atmp2,
"args_raw": _atmp1,
"scope": self,
"chat_raw": chat_raw
}
commands.cmds.InvalidCommand.begin(self, cmdobj) if _atmp2 not in commands.cmds.baseList else commands.cmds.baseList[_atmp2].begin(self, cmdobj)<commit_msg>Fix some scope mistakes. This fix was part of the reverted commit.<commit_after>
|
import commands.cmds as cmds
def handle(self, chat_raw):
self.logger.info("Handling command: " + chat_raw + " (for player" + self.fquid + ")")
_atmp1 = chat_raw.split(" ")
_atmp2 = list(_atmp1[0])
del _atmp2[0]
del _atmp1[0]
cmdobj = {
"base": _atmp2,
"args_raw": _atmp1,
"scope": self,
"chat_raw": chat_raw
}
cmds.InvalidCommand.begin(self, cmdobj) if _atmp2 not in cmds.baseList else cmds.baseList[_atmp2].begin(self, cmdobj)
|
import commands.cmds as cmds
def handle(self, chat_raw):
self.logger.info("Handling command: " + chat_raw + " (for player" + self.fquid + ")")
_atmp1 = chat_raw.split(" ")
_atmp2 = list(_atmp1[0])
del _atmp2[0]
del _atmp1[0]
cmdobj = {
"base": _atmp2,
"args_raw": _atmp1,
"scope": self,
"chat_raw": chat_raw
}
commands.cmds.InvalidCommand.begin(self, cmdobj) if _atmp2 not in commands.cmds.baseList else commands.cmds.baseList[_atmp2].begin(self, cmdobj)Fix some scope mistakes. This fix was part of the reverted commit.import commands.cmds as cmds
def handle(self, chat_raw):
self.logger.info("Handling command: " + chat_raw + " (for player" + self.fquid + ")")
_atmp1 = chat_raw.split(" ")
_atmp2 = list(_atmp1[0])
del _atmp2[0]
del _atmp1[0]
cmdobj = {
"base": _atmp2,
"args_raw": _atmp1,
"scope": self,
"chat_raw": chat_raw
}
cmds.InvalidCommand.begin(self, cmdobj) if _atmp2 not in cmds.baseList else cmds.baseList[_atmp2].begin(self, cmdobj)
|
<commit_before>import commands.cmds as cmds
def handle(self, chat_raw):
self.logger.info("Handling command: " + chat_raw + " (for player" + self.fquid + ")")
_atmp1 = chat_raw.split(" ")
_atmp2 = list(_atmp1[0])
del _atmp2[0]
del _atmp1[0]
cmdobj = {
"base": _atmp2,
"args_raw": _atmp1,
"scope": self,
"chat_raw": chat_raw
}
commands.cmds.InvalidCommand.begin(self, cmdobj) if _atmp2 not in commands.cmds.baseList else commands.cmds.baseList[_atmp2].begin(self, cmdobj)<commit_msg>Fix some scope mistakes. This fix was part of the reverted commit.<commit_after>import commands.cmds as cmds
def handle(self, chat_raw):
self.logger.info("Handling command: " + chat_raw + " (for player" + self.fquid + ")")
_atmp1 = chat_raw.split(" ")
_atmp2 = list(_atmp1[0])
del _atmp2[0]
del _atmp1[0]
cmdobj = {
"base": _atmp2,
"args_raw": _atmp1,
"scope": self,
"chat_raw": chat_raw
}
cmds.InvalidCommand.begin(self, cmdobj) if _atmp2 not in cmds.baseList else cmds.baseList[_atmp2].begin(self, cmdobj)
|
b37f31b5adbdda3e5d40d2d8a9dde19b2e305c2c
|
ckanext/wirecloudview/tests/test_controller.py
|
ckanext/wirecloudview/tests/test_controller.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2018 Future Internet Consulting and Development Solutions S.L.
# This file is part of CKAN WireCloud View Extension.
# CKAN WireCloud View Extension is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# CKAN WireCloud View Extension is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with CKAN WireCloud View Extension. If not, see <http://www.gnu.org/licenses/>.
# This file is part of CKAN Data Requests Extension.
import json
import unittest
from mock import DEFAULT, patch
from ckanext.wirecloudview.controller import WireCloudViewController
class WirecloudViewControllerTest(unittest.TestCase):
@patch.multiple("ckanext.wirecloudview.controller", request=DEFAULT, get_plugin=DEFAULT, toolkit=DEFAULT, OAuth2Session=DEFAULT, response=DEFAULT)
def test_get_workspaces(self, request, get_plugin, toolkit, OAuth2Session, response):
self.controller = WireCloudViewController()
self.controller.client_id = "aclientid"
request.params = {
'incomplete': 'key words',
'limit': '20',
}
get_plugin().wirecloud_url = "https://dashboards.example.org"
oauth = OAuth2Session()
OAuth2Session.reset_mock()
oauth.get().json.return_value = {
"results": [
{"owner": "user1", "name": "dashboard1"},
{"owner": "user2", "name": "other-dashboard"},
]
}
oauth.get.reset_mock()
response.headers = {}
result = self.controller.get_workspaces()
self.assertEqual(
json.loads(result.decode('utf-8')),
{
"ResultSet": {
"Result": [
{"Name": "user1/dashboard1"},
{"Name": "user2/other-dashboard"},
]
}
}
)
self.assertEqual(response.headers[b'Content-Type'], b"application/json")
OAuth2Session.assert_called_once_with(self.controller.client_id, token=toolkit.c.usertoken)
oauth.get.assert_called_once_with("https://dashboards.example.org/api/search?namespace=workspace&q=key+words&maxresults=20")
|
Add tests for the controller module
|
Add tests for the controller module
|
Python
|
agpl-3.0
|
conwetlab/ckanext-wirecloud_view,conwetlab/ckanext-wirecloud_view,conwetlab/ckanext-wirecloud_view,conwetlab/ckanext-wirecloud_view
|
Add tests for the controller module
|
# -*- coding: utf-8 -*-
# Copyright (c) 2018 Future Internet Consulting and Development Solutions S.L.
# This file is part of CKAN WireCloud View Extension.
# CKAN WireCloud View Extension is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# CKAN WireCloud View Extension is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with CKAN WireCloud View Extension. If not, see <http://www.gnu.org/licenses/>.
# This file is part of CKAN Data Requests Extension.
import json
import unittest
from mock import DEFAULT, patch
from ckanext.wirecloudview.controller import WireCloudViewController
class WirecloudViewControllerTest(unittest.TestCase):
@patch.multiple("ckanext.wirecloudview.controller", request=DEFAULT, get_plugin=DEFAULT, toolkit=DEFAULT, OAuth2Session=DEFAULT, response=DEFAULT)
def test_get_workspaces(self, request, get_plugin, toolkit, OAuth2Session, response):
self.controller = WireCloudViewController()
self.controller.client_id = "aclientid"
request.params = {
'incomplete': 'key words',
'limit': '20',
}
get_plugin().wirecloud_url = "https://dashboards.example.org"
oauth = OAuth2Session()
OAuth2Session.reset_mock()
oauth.get().json.return_value = {
"results": [
{"owner": "user1", "name": "dashboard1"},
{"owner": "user2", "name": "other-dashboard"},
]
}
oauth.get.reset_mock()
response.headers = {}
result = self.controller.get_workspaces()
self.assertEqual(
json.loads(result.decode('utf-8')),
{
"ResultSet": {
"Result": [
{"Name": "user1/dashboard1"},
{"Name": "user2/other-dashboard"},
]
}
}
)
self.assertEqual(response.headers[b'Content-Type'], b"application/json")
OAuth2Session.assert_called_once_with(self.controller.client_id, token=toolkit.c.usertoken)
oauth.get.assert_called_once_with("https://dashboards.example.org/api/search?namespace=workspace&q=key+words&maxresults=20")
|
<commit_before><commit_msg>Add tests for the controller module<commit_after>
|
# -*- coding: utf-8 -*-
# Copyright (c) 2018 Future Internet Consulting and Development Solutions S.L.
# This file is part of CKAN WireCloud View Extension.
# CKAN WireCloud View Extension is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# CKAN WireCloud View Extension is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with CKAN WireCloud View Extension. If not, see <http://www.gnu.org/licenses/>.
# This file is part of CKAN Data Requests Extension.
import json
import unittest
from mock import DEFAULT, patch
from ckanext.wirecloudview.controller import WireCloudViewController
class WirecloudViewControllerTest(unittest.TestCase):
@patch.multiple("ckanext.wirecloudview.controller", request=DEFAULT, get_plugin=DEFAULT, toolkit=DEFAULT, OAuth2Session=DEFAULT, response=DEFAULT)
def test_get_workspaces(self, request, get_plugin, toolkit, OAuth2Session, response):
self.controller = WireCloudViewController()
self.controller.client_id = "aclientid"
request.params = {
'incomplete': 'key words',
'limit': '20',
}
get_plugin().wirecloud_url = "https://dashboards.example.org"
oauth = OAuth2Session()
OAuth2Session.reset_mock()
oauth.get().json.return_value = {
"results": [
{"owner": "user1", "name": "dashboard1"},
{"owner": "user2", "name": "other-dashboard"},
]
}
oauth.get.reset_mock()
response.headers = {}
result = self.controller.get_workspaces()
self.assertEqual(
json.loads(result.decode('utf-8')),
{
"ResultSet": {
"Result": [
{"Name": "user1/dashboard1"},
{"Name": "user2/other-dashboard"},
]
}
}
)
self.assertEqual(response.headers[b'Content-Type'], b"application/json")
OAuth2Session.assert_called_once_with(self.controller.client_id, token=toolkit.c.usertoken)
oauth.get.assert_called_once_with("https://dashboards.example.org/api/search?namespace=workspace&q=key+words&maxresults=20")
|
Add tests for the controller module# -*- coding: utf-8 -*-
# Copyright (c) 2018 Future Internet Consulting and Development Solutions S.L.
# This file is part of CKAN WireCloud View Extension.
# CKAN WireCloud View Extension is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# CKAN WireCloud View Extension is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with CKAN WireCloud View Extension. If not, see <http://www.gnu.org/licenses/>.
# This file is part of CKAN Data Requests Extension.
import json
import unittest
from mock import DEFAULT, patch
from ckanext.wirecloudview.controller import WireCloudViewController
class WirecloudViewControllerTest(unittest.TestCase):
@patch.multiple("ckanext.wirecloudview.controller", request=DEFAULT, get_plugin=DEFAULT, toolkit=DEFAULT, OAuth2Session=DEFAULT, response=DEFAULT)
def test_get_workspaces(self, request, get_plugin, toolkit, OAuth2Session, response):
self.controller = WireCloudViewController()
self.controller.client_id = "aclientid"
request.params = {
'incomplete': 'key words',
'limit': '20',
}
get_plugin().wirecloud_url = "https://dashboards.example.org"
oauth = OAuth2Session()
OAuth2Session.reset_mock()
oauth.get().json.return_value = {
"results": [
{"owner": "user1", "name": "dashboard1"},
{"owner": "user2", "name": "other-dashboard"},
]
}
oauth.get.reset_mock()
response.headers = {}
result = self.controller.get_workspaces()
self.assertEqual(
json.loads(result.decode('utf-8')),
{
"ResultSet": {
"Result": [
{"Name": "user1/dashboard1"},
{"Name": "user2/other-dashboard"},
]
}
}
)
self.assertEqual(response.headers[b'Content-Type'], b"application/json")
OAuth2Session.assert_called_once_with(self.controller.client_id, token=toolkit.c.usertoken)
oauth.get.assert_called_once_with("https://dashboards.example.org/api/search?namespace=workspace&q=key+words&maxresults=20")
|
<commit_before><commit_msg>Add tests for the controller module<commit_after># -*- coding: utf-8 -*-
# Copyright (c) 2018 Future Internet Consulting and Development Solutions S.L.
# This file is part of CKAN WireCloud View Extension.
# CKAN WireCloud View Extension is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# CKAN WireCloud View Extension is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with CKAN WireCloud View Extension. If not, see <http://www.gnu.org/licenses/>.
# This file is part of CKAN Data Requests Extension.
import json
import unittest
from mock import DEFAULT, patch
from ckanext.wirecloudview.controller import WireCloudViewController
class WirecloudViewControllerTest(unittest.TestCase):
@patch.multiple("ckanext.wirecloudview.controller", request=DEFAULT, get_plugin=DEFAULT, toolkit=DEFAULT, OAuth2Session=DEFAULT, response=DEFAULT)
def test_get_workspaces(self, request, get_plugin, toolkit, OAuth2Session, response):
self.controller = WireCloudViewController()
self.controller.client_id = "aclientid"
request.params = {
'incomplete': 'key words',
'limit': '20',
}
get_plugin().wirecloud_url = "https://dashboards.example.org"
oauth = OAuth2Session()
OAuth2Session.reset_mock()
oauth.get().json.return_value = {
"results": [
{"owner": "user1", "name": "dashboard1"},
{"owner": "user2", "name": "other-dashboard"},
]
}
oauth.get.reset_mock()
response.headers = {}
result = self.controller.get_workspaces()
self.assertEqual(
json.loads(result.decode('utf-8')),
{
"ResultSet": {
"Result": [
{"Name": "user1/dashboard1"},
{"Name": "user2/other-dashboard"},
]
}
}
)
self.assertEqual(response.headers[b'Content-Type'], b"application/json")
OAuth2Session.assert_called_once_with(self.controller.client_id, token=toolkit.c.usertoken)
oauth.get.assert_called_once_with("https://dashboards.example.org/api/search?namespace=workspace&q=key+words&maxresults=20")
|
|
545af0493cf08cb15d262f3a5333df6d1fce6848
|
brake/utils.py
|
brake/utils.py
|
from decorators import _backend
"""Access limits and increment counts without using a decorator."""
def get_limits(request, label, field, periods):
limits = []
count = 10
for period in periods:
limits.extend(_backend.limit(
label,
request,
field=field,
count=count,
period=period
))
count += 10
return limits
def inc_counts(request, label, field, periods):
for period in periods:
_backend.count(label, request, field=field, period=period)
|
Add util convenience functions for accessing data without decorators
|
Add util convenience functions for accessing data without decorators
|
Python
|
bsd-3-clause
|
SilentCircle/django-brake,SilentCircle/django-brake,skorokithakis/django-brake,skorokithakis/django-brake
|
Add util convenience functions for accessing data without decorators
|
from decorators import _backend
"""Access limits and increment counts without using a decorator."""
def get_limits(request, label, field, periods):
limits = []
count = 10
for period in periods:
limits.extend(_backend.limit(
label,
request,
field=field,
count=count,
period=period
))
count += 10
return limits
def inc_counts(request, label, field, periods):
for period in periods:
_backend.count(label, request, field=field, period=period)
|
<commit_before><commit_msg>Add util convenience functions for accessing data without decorators<commit_after>
|
from decorators import _backend
"""Access limits and increment counts without using a decorator."""
def get_limits(request, label, field, periods):
limits = []
count = 10
for period in periods:
limits.extend(_backend.limit(
label,
request,
field=field,
count=count,
period=period
))
count += 10
return limits
def inc_counts(request, label, field, periods):
for period in periods:
_backend.count(label, request, field=field, period=period)
|
Add util convenience functions for accessing data without decoratorsfrom decorators import _backend
"""Access limits and increment counts without using a decorator."""
def get_limits(request, label, field, periods):
limits = []
count = 10
for period in periods:
limits.extend(_backend.limit(
label,
request,
field=field,
count=count,
period=period
))
count += 10
return limits
def inc_counts(request, label, field, periods):
for period in periods:
_backend.count(label, request, field=field, period=period)
|
<commit_before><commit_msg>Add util convenience functions for accessing data without decorators<commit_after>from decorators import _backend
"""Access limits and increment counts without using a decorator."""
def get_limits(request, label, field, periods):
limits = []
count = 10
for period in periods:
limits.extend(_backend.limit(
label,
request,
field=field,
count=count,
period=period
))
count += 10
return limits
def inc_counts(request, label, field, periods):
for period in periods:
_backend.count(label, request, field=field, period=period)
|
|
2c900f8bddc9efb40d900bf28f8c6b3188add71e
|
test/test_trix_parse.py
|
test/test_trix_parse.py
|
#!/usr/bin/env python
from rdflib.graph import ConjunctiveGraph
import unittest
class TestTrixParse(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def testAperture(self):
g=ConjunctiveGraph()
g.parse("test/trix/aperture.trix",format="trix")
c=list(g.contexts())
#print list(g.contexts())
t=sum(map(len, g.contexts()))
self.assertEquals(t,24)
self.assertEquals(len(c),4)
#print "Parsed %d triples"%t
def testSpec(self):
g=ConjunctiveGraph()
g.parse("test/trix/nokia_example.trix",format="trix")
#print "Parsed %d triples"%len(g)
if __name__=='__main__':
unittest.main()
|
#!/usr/bin/env python
from rdflib.graph import ConjunctiveGraph
import unittest
class TestTrixParse(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def testAperture(self):
g=ConjunctiveGraph()
g.parse("test/trix/aperture.trix",format="trix")
c=list(g.contexts())
#print list(g.contexts())
t=sum(map(len, g.contexts()))
self.assertEquals(t,24)
self.assertEquals(len(c),4)
#print "Parsed %d triples"%t
def testSpec(self):
g=ConjunctiveGraph()
g.parse("test/trix/nokia_example.trix",format="trix")
#print "Parsed %d triples"%len(g)
def testNG4j(self):
g=ConjunctiveGraph()
g.parse("test/trix/ng4jtest.trix",format="trix")
#print "Parsed %d triples"%len(g)
import platform
if platform.system() == 'Java':
from nose import SkipTest
raise SkipTest('Jython issues - "JavaSAXParser" object has no attribute "start_namespace_decl"')
if __name__=='__main__':
unittest.main()
|
Disable trix parser tests with Jython
|
Disable trix parser tests with Jython
|
Python
|
bsd-3-clause
|
RDFLib/rdflib,avorio/rdflib,yingerj/rdflib,ssssam/rdflib,ssssam/rdflib,marma/rdflib,armandobs14/rdflib,armandobs14/rdflib,dbs/rdflib,dbs/rdflib,RDFLib/rdflib,marma/rdflib,RDFLib/rdflib,yingerj/rdflib,avorio/rdflib,ssssam/rdflib,armandobs14/rdflib,RDFLib/rdflib,dbs/rdflib,marma/rdflib,marma/rdflib,ssssam/rdflib,armandobs14/rdflib,dbs/rdflib,avorio/rdflib,yingerj/rdflib,yingerj/rdflib,avorio/rdflib
|
#!/usr/bin/env python
from rdflib.graph import ConjunctiveGraph
import unittest
class TestTrixParse(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def testAperture(self):
g=ConjunctiveGraph()
g.parse("test/trix/aperture.trix",format="trix")
c=list(g.contexts())
#print list(g.contexts())
t=sum(map(len, g.contexts()))
self.assertEquals(t,24)
self.assertEquals(len(c),4)
#print "Parsed %d triples"%t
def testSpec(self):
g=ConjunctiveGraph()
g.parse("test/trix/nokia_example.trix",format="trix")
#print "Parsed %d triples"%len(g)
if __name__=='__main__':
unittest.main()
Disable trix parser tests with Jython
|
#!/usr/bin/env python
from rdflib.graph import ConjunctiveGraph
import unittest
class TestTrixParse(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def testAperture(self):
g=ConjunctiveGraph()
g.parse("test/trix/aperture.trix",format="trix")
c=list(g.contexts())
#print list(g.contexts())
t=sum(map(len, g.contexts()))
self.assertEquals(t,24)
self.assertEquals(len(c),4)
#print "Parsed %d triples"%t
def testSpec(self):
g=ConjunctiveGraph()
g.parse("test/trix/nokia_example.trix",format="trix")
#print "Parsed %d triples"%len(g)
def testNG4j(self):
g=ConjunctiveGraph()
g.parse("test/trix/ng4jtest.trix",format="trix")
#print "Parsed %d triples"%len(g)
import platform
if platform.system() == 'Java':
from nose import SkipTest
raise SkipTest('Jython issues - "JavaSAXParser" object has no attribute "start_namespace_decl"')
if __name__=='__main__':
unittest.main()
|
<commit_before>#!/usr/bin/env python
from rdflib.graph import ConjunctiveGraph
import unittest
class TestTrixParse(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def testAperture(self):
g=ConjunctiveGraph()
g.parse("test/trix/aperture.trix",format="trix")
c=list(g.contexts())
#print list(g.contexts())
t=sum(map(len, g.contexts()))
self.assertEquals(t,24)
self.assertEquals(len(c),4)
#print "Parsed %d triples"%t
def testSpec(self):
g=ConjunctiveGraph()
g.parse("test/trix/nokia_example.trix",format="trix")
#print "Parsed %d triples"%len(g)
if __name__=='__main__':
unittest.main()
<commit_msg>Disable trix parser tests with Jython<commit_after>
|
#!/usr/bin/env python
from rdflib.graph import ConjunctiveGraph
import unittest
class TestTrixParse(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def testAperture(self):
g=ConjunctiveGraph()
g.parse("test/trix/aperture.trix",format="trix")
c=list(g.contexts())
#print list(g.contexts())
t=sum(map(len, g.contexts()))
self.assertEquals(t,24)
self.assertEquals(len(c),4)
#print "Parsed %d triples"%t
def testSpec(self):
g=ConjunctiveGraph()
g.parse("test/trix/nokia_example.trix",format="trix")
#print "Parsed %d triples"%len(g)
def testNG4j(self):
g=ConjunctiveGraph()
g.parse("test/trix/ng4jtest.trix",format="trix")
#print "Parsed %d triples"%len(g)
import platform
if platform.system() == 'Java':
from nose import SkipTest
raise SkipTest('Jython issues - "JavaSAXParser" object has no attribute "start_namespace_decl"')
if __name__=='__main__':
unittest.main()
|
#!/usr/bin/env python
from rdflib.graph import ConjunctiveGraph
import unittest
class TestTrixParse(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def testAperture(self):
g=ConjunctiveGraph()
g.parse("test/trix/aperture.trix",format="trix")
c=list(g.contexts())
#print list(g.contexts())
t=sum(map(len, g.contexts()))
self.assertEquals(t,24)
self.assertEquals(len(c),4)
#print "Parsed %d triples"%t
def testSpec(self):
g=ConjunctiveGraph()
g.parse("test/trix/nokia_example.trix",format="trix")
#print "Parsed %d triples"%len(g)
if __name__=='__main__':
unittest.main()
Disable trix parser tests with Jython#!/usr/bin/env python
from rdflib.graph import ConjunctiveGraph
import unittest
class TestTrixParse(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def testAperture(self):
g=ConjunctiveGraph()
g.parse("test/trix/aperture.trix",format="trix")
c=list(g.contexts())
#print list(g.contexts())
t=sum(map(len, g.contexts()))
self.assertEquals(t,24)
self.assertEquals(len(c),4)
#print "Parsed %d triples"%t
def testSpec(self):
g=ConjunctiveGraph()
g.parse("test/trix/nokia_example.trix",format="trix")
#print "Parsed %d triples"%len(g)
def testNG4j(self):
g=ConjunctiveGraph()
g.parse("test/trix/ng4jtest.trix",format="trix")
#print "Parsed %d triples"%len(g)
import platform
if platform.system() == 'Java':
from nose import SkipTest
raise SkipTest('Jython issues - "JavaSAXParser" object has no attribute "start_namespace_decl"')
if __name__=='__main__':
unittest.main()
|
<commit_before>#!/usr/bin/env python
from rdflib.graph import ConjunctiveGraph
import unittest
class TestTrixParse(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def testAperture(self):
g=ConjunctiveGraph()
g.parse("test/trix/aperture.trix",format="trix")
c=list(g.contexts())
#print list(g.contexts())
t=sum(map(len, g.contexts()))
self.assertEquals(t,24)
self.assertEquals(len(c),4)
#print "Parsed %d triples"%t
def testSpec(self):
g=ConjunctiveGraph()
g.parse("test/trix/nokia_example.trix",format="trix")
#print "Parsed %d triples"%len(g)
if __name__=='__main__':
unittest.main()
<commit_msg>Disable trix parser tests with Jython<commit_after>#!/usr/bin/env python
from rdflib.graph import ConjunctiveGraph
import unittest
class TestTrixParse(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def testAperture(self):
g=ConjunctiveGraph()
g.parse("test/trix/aperture.trix",format="trix")
c=list(g.contexts())
#print list(g.contexts())
t=sum(map(len, g.contexts()))
self.assertEquals(t,24)
self.assertEquals(len(c),4)
#print "Parsed %d triples"%t
def testSpec(self):
g=ConjunctiveGraph()
g.parse("test/trix/nokia_example.trix",format="trix")
#print "Parsed %d triples"%len(g)
def testNG4j(self):
g=ConjunctiveGraph()
g.parse("test/trix/ng4jtest.trix",format="trix")
#print "Parsed %d triples"%len(g)
import platform
if platform.system() == 'Java':
from nose import SkipTest
raise SkipTest('Jython issues - "JavaSAXParser" object has no attribute "start_namespace_decl"')
if __name__=='__main__':
unittest.main()
|
24b8437003269ebd10c46d0fbdaa3e432d7535d6
|
genotype-likelihoods.py
|
genotype-likelihoods.py
|
from __future__ import print_function
import sys
import cyvcf
from argparse import ArgumentParser, FileType
import toolz as tz
description = ("Create a table of probability of a non reference call for each "
"genotype for each sample. This is PL[0]. -1 is output for samples "
"with a missing PL call at a position.")
parser = ArgumentParser(description=description)
parser.add_argument("vcf", type=FileType('r'),
help="VCF file to convert, use '-' to read from stdin")
args = parser.parse_args()
vcf_reader = cyvcf.Reader(args.vcf)
records = tz.take(10, vcf_reader)
samples = vcf_reader.samples[1:5]
header = "\t".join([str(x) for x in ["CHROM", "POS", "ID", "REF", "ALT"] + samples])
print(header, file=sys.stdout)
for record in records:
line = [record.CHROM, record.POS, record.ID, record.REF, record.alleles[1]]
pls = [x.data.get("PL", None) for x in record.samples[1:5]]
pls = [x[0] if x else "-1" for x in pls]
print("\t".join([str(x) for x in line + pls]), file=sys.stdout)
|
Add VCF -> non-reference likelihood table script.
|
Add VCF -> non-reference likelihood table script.
|
Python
|
mit
|
roryk/junkdrawer,roryk/junkdrawer
|
Add VCF -> non-reference likelihood table script.
|
from __future__ import print_function
import sys
import cyvcf
from argparse import ArgumentParser, FileType
import toolz as tz
description = ("Create a table of probability of a non reference call for each "
"genotype for each sample. This is PL[0]. -1 is output for samples "
"with a missing PL call at a position.")
parser = ArgumentParser(description=description)
parser.add_argument("vcf", type=FileType('r'),
help="VCF file to convert, use '-' to read from stdin")
args = parser.parse_args()
vcf_reader = cyvcf.Reader(args.vcf)
records = tz.take(10, vcf_reader)
samples = vcf_reader.samples[1:5]
header = "\t".join([str(x) for x in ["CHROM", "POS", "ID", "REF", "ALT"] + samples])
print(header, file=sys.stdout)
for record in records:
line = [record.CHROM, record.POS, record.ID, record.REF, record.alleles[1]]
pls = [x.data.get("PL", None) for x in record.samples[1:5]]
pls = [x[0] if x else "-1" for x in pls]
print("\t".join([str(x) for x in line + pls]), file=sys.stdout)
|
<commit_before><commit_msg>Add VCF -> non-reference likelihood table script.<commit_after>
|
from __future__ import print_function
import sys
import cyvcf
from argparse import ArgumentParser, FileType
import toolz as tz
description = ("Create a table of probability of a non reference call for each "
"genotype for each sample. This is PL[0]. -1 is output for samples "
"with a missing PL call at a position.")
parser = ArgumentParser(description=description)
parser.add_argument("vcf", type=FileType('r'),
help="VCF file to convert, use '-' to read from stdin")
args = parser.parse_args()
vcf_reader = cyvcf.Reader(args.vcf)
records = tz.take(10, vcf_reader)
samples = vcf_reader.samples[1:5]
header = "\t".join([str(x) for x in ["CHROM", "POS", "ID", "REF", "ALT"] + samples])
print(header, file=sys.stdout)
for record in records:
line = [record.CHROM, record.POS, record.ID, record.REF, record.alleles[1]]
pls = [x.data.get("PL", None) for x in record.samples[1:5]]
pls = [x[0] if x else "-1" for x in pls]
print("\t".join([str(x) for x in line + pls]), file=sys.stdout)
|
Add VCF -> non-reference likelihood table script.from __future__ import print_function
import sys
import cyvcf
from argparse import ArgumentParser, FileType
import toolz as tz
description = ("Create a table of probability of a non reference call for each "
"genotype for each sample. This is PL[0]. -1 is output for samples "
"with a missing PL call at a position.")
parser = ArgumentParser(description=description)
parser.add_argument("vcf", type=FileType('r'),
help="VCF file to convert, use '-' to read from stdin")
args = parser.parse_args()
vcf_reader = cyvcf.Reader(args.vcf)
records = tz.take(10, vcf_reader)
samples = vcf_reader.samples[1:5]
header = "\t".join([str(x) for x in ["CHROM", "POS", "ID", "REF", "ALT"] + samples])
print(header, file=sys.stdout)
for record in records:
line = [record.CHROM, record.POS, record.ID, record.REF, record.alleles[1]]
pls = [x.data.get("PL", None) for x in record.samples[1:5]]
pls = [x[0] if x else "-1" for x in pls]
print("\t".join([str(x) for x in line + pls]), file=sys.stdout)
|
<commit_before><commit_msg>Add VCF -> non-reference likelihood table script.<commit_after>from __future__ import print_function
import sys
import cyvcf
from argparse import ArgumentParser, FileType
import toolz as tz
description = ("Create a table of probability of a non reference call for each "
"genotype for each sample. This is PL[0]. -1 is output for samples "
"with a missing PL call at a position.")
parser = ArgumentParser(description=description)
parser.add_argument("vcf", type=FileType('r'),
help="VCF file to convert, use '-' to read from stdin")
args = parser.parse_args()
vcf_reader = cyvcf.Reader(args.vcf)
records = tz.take(10, vcf_reader)
samples = vcf_reader.samples[1:5]
header = "\t".join([str(x) for x in ["CHROM", "POS", "ID", "REF", "ALT"] + samples])
print(header, file=sys.stdout)
for record in records:
line = [record.CHROM, record.POS, record.ID, record.REF, record.alleles[1]]
pls = [x.data.get("PL", None) for x in record.samples[1:5]]
pls = [x[0] if x else "-1" for x in pls]
print("\t".join([str(x) for x in line + pls]), file=sys.stdout)
|
|
0970115f9bc1bab019c23ab46e64b26d5e754313
|
led_display.py
|
led_display.py
|
import math
from gpiozero import LED
from time import sleep
g0 = LED(12)
f0 = LED(16)
a0 = LED(20)
b0 = LED(21)
e0 = LED(17)
d0 = LED(27)
c0 = LED(22)
g1 = LED(25)
f1 = LED(24)
a1 = LED(23)
b1 = LED(18)
e1 = LED(5)
d1 = LED(6)
c1 = LED(13)
PITCHES = {
'E2': ((a0, d0, e0, f0, g0), (b0, c0)),
'A2': ((a0, b0, c0, e0, f0, g0), (d0, )),
'D3': ((b0, c0, d0, e0, g0), (a0, f0,)),
'G3': ((a0, b0, c0, d0, f0, g0), (e0, )),
'B3': ((c0, d0, e0, f0, g0), (a0, b0,)),
'E4': ((a0, d0, e0, f0, g0), (b0, c0)),
}
DIRECTIONS = {
-1: ((a1, b1, f1, g1), (c1, d1, e1,)),
0: ((g1, ), (a1, b1, c1, d1, e1, f1, )),
1: ((c1, d1, e1, g1), (a1, b1, f1)),
}
def display_tuning_guidance(pitch, direction):
leds_on = PITCHES[pitch][0] + DIRECTIONS[direction][0]
leds_off = PITCHES[pitch][1] + DIRECTIONS[direction][1]
# Turn the appropriate leds on or off
for led in leds_on:
led.off()
for led in leds_off:
led.on()
|
Implement function for displaying tuning guidance on a DIY 8-segment LEDs display
|
Implement function for displaying tuning guidance on a DIY 8-segment LEDs display
|
Python
|
mit
|
Bastien-Brd/pi-tuner
|
Implement function for displaying tuning guidance on a DIY 8-segment LEDs display
|
import math
from gpiozero import LED
from time import sleep
g0 = LED(12)
f0 = LED(16)
a0 = LED(20)
b0 = LED(21)
e0 = LED(17)
d0 = LED(27)
c0 = LED(22)
g1 = LED(25)
f1 = LED(24)
a1 = LED(23)
b1 = LED(18)
e1 = LED(5)
d1 = LED(6)
c1 = LED(13)
PITCHES = {
'E2': ((a0, d0, e0, f0, g0), (b0, c0)),
'A2': ((a0, b0, c0, e0, f0, g0), (d0, )),
'D3': ((b0, c0, d0, e0, g0), (a0, f0,)),
'G3': ((a0, b0, c0, d0, f0, g0), (e0, )),
'B3': ((c0, d0, e0, f0, g0), (a0, b0,)),
'E4': ((a0, d0, e0, f0, g0), (b0, c0)),
}
DIRECTIONS = {
-1: ((a1, b1, f1, g1), (c1, d1, e1,)),
0: ((g1, ), (a1, b1, c1, d1, e1, f1, )),
1: ((c1, d1, e1, g1), (a1, b1, f1)),
}
def display_tuning_guidance(pitch, direction):
leds_on = PITCHES[pitch][0] + DIRECTIONS[direction][0]
leds_off = PITCHES[pitch][1] + DIRECTIONS[direction][1]
# Turn the appropriate leds on or off
for led in leds_on:
led.off()
for led in leds_off:
led.on()
|
<commit_before><commit_msg>Implement function for displaying tuning guidance on a DIY 8-segment LEDs display<commit_after>
|
import math
from gpiozero import LED
from time import sleep
g0 = LED(12)
f0 = LED(16)
a0 = LED(20)
b0 = LED(21)
e0 = LED(17)
d0 = LED(27)
c0 = LED(22)
g1 = LED(25)
f1 = LED(24)
a1 = LED(23)
b1 = LED(18)
e1 = LED(5)
d1 = LED(6)
c1 = LED(13)
PITCHES = {
'E2': ((a0, d0, e0, f0, g0), (b0, c0)),
'A2': ((a0, b0, c0, e0, f0, g0), (d0, )),
'D3': ((b0, c0, d0, e0, g0), (a0, f0,)),
'G3': ((a0, b0, c0, d0, f0, g0), (e0, )),
'B3': ((c0, d0, e0, f0, g0), (a0, b0,)),
'E4': ((a0, d0, e0, f0, g0), (b0, c0)),
}
DIRECTIONS = {
-1: ((a1, b1, f1, g1), (c1, d1, e1,)),
0: ((g1, ), (a1, b1, c1, d1, e1, f1, )),
1: ((c1, d1, e1, g1), (a1, b1, f1)),
}
def display_tuning_guidance(pitch, direction):
leds_on = PITCHES[pitch][0] + DIRECTIONS[direction][0]
leds_off = PITCHES[pitch][1] + DIRECTIONS[direction][1]
# Turn the appropriate leds on or off
for led in leds_on:
led.off()
for led in leds_off:
led.on()
|
Implement function for displaying tuning guidance on a DIY 8-segment LEDs displayimport math
from gpiozero import LED
from time import sleep
g0 = LED(12)
f0 = LED(16)
a0 = LED(20)
b0 = LED(21)
e0 = LED(17)
d0 = LED(27)
c0 = LED(22)
g1 = LED(25)
f1 = LED(24)
a1 = LED(23)
b1 = LED(18)
e1 = LED(5)
d1 = LED(6)
c1 = LED(13)
PITCHES = {
'E2': ((a0, d0, e0, f0, g0), (b0, c0)),
'A2': ((a0, b0, c0, e0, f0, g0), (d0, )),
'D3': ((b0, c0, d0, e0, g0), (a0, f0,)),
'G3': ((a0, b0, c0, d0, f0, g0), (e0, )),
'B3': ((c0, d0, e0, f0, g0), (a0, b0,)),
'E4': ((a0, d0, e0, f0, g0), (b0, c0)),
}
DIRECTIONS = {
-1: ((a1, b1, f1, g1), (c1, d1, e1,)),
0: ((g1, ), (a1, b1, c1, d1, e1, f1, )),
1: ((c1, d1, e1, g1), (a1, b1, f1)),
}
def display_tuning_guidance(pitch, direction):
leds_on = PITCHES[pitch][0] + DIRECTIONS[direction][0]
leds_off = PITCHES[pitch][1] + DIRECTIONS[direction][1]
# Turn the appropriate leds on or off
for led in leds_on:
led.off()
for led in leds_off:
led.on()
|
<commit_before><commit_msg>Implement function for displaying tuning guidance on a DIY 8-segment LEDs display<commit_after>import math
from gpiozero import LED
from time import sleep
g0 = LED(12)
f0 = LED(16)
a0 = LED(20)
b0 = LED(21)
e0 = LED(17)
d0 = LED(27)
c0 = LED(22)
g1 = LED(25)
f1 = LED(24)
a1 = LED(23)
b1 = LED(18)
e1 = LED(5)
d1 = LED(6)
c1 = LED(13)
PITCHES = {
'E2': ((a0, d0, e0, f0, g0), (b0, c0)),
'A2': ((a0, b0, c0, e0, f0, g0), (d0, )),
'D3': ((b0, c0, d0, e0, g0), (a0, f0,)),
'G3': ((a0, b0, c0, d0, f0, g0), (e0, )),
'B3': ((c0, d0, e0, f0, g0), (a0, b0,)),
'E4': ((a0, d0, e0, f0, g0), (b0, c0)),
}
DIRECTIONS = {
-1: ((a1, b1, f1, g1), (c1, d1, e1,)),
0: ((g1, ), (a1, b1, c1, d1, e1, f1, )),
1: ((c1, d1, e1, g1), (a1, b1, f1)),
}
def display_tuning_guidance(pitch, direction):
leds_on = PITCHES[pitch][0] + DIRECTIONS[direction][0]
leds_off = PITCHES[pitch][1] + DIRECTIONS[direction][1]
# Turn the appropriate leds on or off
for led in leds_on:
led.off()
for led in leds_off:
led.on()
|
|
550d8bcd49e5ec591286f3f42de7dd54ef853bb8
|
find_dupes.py
|
find_dupes.py
|
#!/usr/bin/env python3
import json
import os
import random
scriptpath = os.path.dirname(__file__)
data_dir = os.path.join(scriptpath, 'data')
all_json = [f for f in os.listdir(data_dir) if os.path.isfile(os.path.join(data_dir, f))]
quotes = []
for f in all_json:
filename = os.path.join(data_dir, f)
with open(filename) as json_data:
quotes += json.load(json_data)['data']
uniq_authors = { quote['author'] for quote in quotes}
uniq_quotes = { quote['quote'] for quote in quotes}
print('Unique quotes: {}, authors: {}'.format(len(uniq_quotes), len(uniq_authors)))
seen = set()
dupes = sorted([x for x in quotes if x['quote'] in seen or seen.add(x['quote'])], key=lambda x:x['quote'])
print(*dupes, sep='\n')
|
Add a utility script to print duplicates
|
Add a utility script to print duplicates
|
Python
|
mit
|
mubaris/motivate,mubaris/motivate
|
Add a utility script to print duplicates
|
#!/usr/bin/env python3
import json
import os
import random
scriptpath = os.path.dirname(__file__)
data_dir = os.path.join(scriptpath, 'data')
all_json = [f for f in os.listdir(data_dir) if os.path.isfile(os.path.join(data_dir, f))]
quotes = []
for f in all_json:
filename = os.path.join(data_dir, f)
with open(filename) as json_data:
quotes += json.load(json_data)['data']
uniq_authors = { quote['author'] for quote in quotes}
uniq_quotes = { quote['quote'] for quote in quotes}
print('Unique quotes: {}, authors: {}'.format(len(uniq_quotes), len(uniq_authors)))
seen = set()
dupes = sorted([x for x in quotes if x['quote'] in seen or seen.add(x['quote'])], key=lambda x:x['quote'])
print(*dupes, sep='\n')
|
<commit_before><commit_msg>Add a utility script to print duplicates<commit_after>
|
#!/usr/bin/env python3
import json
import os
import random
scriptpath = os.path.dirname(__file__)
data_dir = os.path.join(scriptpath, 'data')
all_json = [f for f in os.listdir(data_dir) if os.path.isfile(os.path.join(data_dir, f))]
quotes = []
for f in all_json:
filename = os.path.join(data_dir, f)
with open(filename) as json_data:
quotes += json.load(json_data)['data']
uniq_authors = { quote['author'] for quote in quotes}
uniq_quotes = { quote['quote'] for quote in quotes}
print('Unique quotes: {}, authors: {}'.format(len(uniq_quotes), len(uniq_authors)))
seen = set()
dupes = sorted([x for x in quotes if x['quote'] in seen or seen.add(x['quote'])], key=lambda x:x['quote'])
print(*dupes, sep='\n')
|
Add a utility script to print duplicates#!/usr/bin/env python3
import json
import os
import random
scriptpath = os.path.dirname(__file__)
data_dir = os.path.join(scriptpath, 'data')
all_json = [f for f in os.listdir(data_dir) if os.path.isfile(os.path.join(data_dir, f))]
quotes = []
for f in all_json:
filename = os.path.join(data_dir, f)
with open(filename) as json_data:
quotes += json.load(json_data)['data']
uniq_authors = { quote['author'] for quote in quotes}
uniq_quotes = { quote['quote'] for quote in quotes}
print('Unique quotes: {}, authors: {}'.format(len(uniq_quotes), len(uniq_authors)))
seen = set()
dupes = sorted([x for x in quotes if x['quote'] in seen or seen.add(x['quote'])], key=lambda x:x['quote'])
print(*dupes, sep='\n')
|
<commit_before><commit_msg>Add a utility script to print duplicates<commit_after>#!/usr/bin/env python3
import json
import os
import random
scriptpath = os.path.dirname(__file__)
data_dir = os.path.join(scriptpath, 'data')
all_json = [f for f in os.listdir(data_dir) if os.path.isfile(os.path.join(data_dir, f))]
quotes = []
for f in all_json:
filename = os.path.join(data_dir, f)
with open(filename) as json_data:
quotes += json.load(json_data)['data']
uniq_authors = { quote['author'] for quote in quotes}
uniq_quotes = { quote['quote'] for quote in quotes}
print('Unique quotes: {}, authors: {}'.format(len(uniq_quotes), len(uniq_authors)))
seen = set()
dupes = sorted([x for x in quotes if x['quote'] in seen or seen.add(x['quote'])], key=lambda x:x['quote'])
print(*dupes, sep='\n')
|
|
501c38ac9e8b9fbb35b64321e103a0dfe064e718
|
QGL/BasicSequences/BlankingSweeps.py
|
QGL/BasicSequences/BlankingSweeps.py
|
"""
Sequences for optimizing gating timing.
"""
from ..PulsePrimitives import *
from ..Compiler import compile_to_hardware
def sweep_gateDelay(qubit, sweepPts):
"""
Sweep the gate delay associated with a qubit channel using a simple Id, Id, X90, X90
seqeuence.
Parameters
---------
qubit : logical qubit to create sequences for
sweepPts : iterable to sweep the gate delay over.
"""
generator = qubit.physChan.generator
oldDelay = generator.gateDelay
for ct, delay in enumerate(sweepPts):
seqs = [[Id(qubit, length=120e-9), Id(qubit), MEAS(qubit)],
[Id(qubit, length=120e-9), MEAS(qubit)],
[Id(qubit, length=120e-9), X90(qubit), MEAS(qubit)],
[Id(qubit, length=120e-9), X90(qubit), MEAS(qubit)]]
generator.gateDelay = delay
compile_to_hardware(seqs, 'BlankingSweeps/GateDelay', suffix='_{}'.format(ct+1))
generator.gateDelay = oldDelay
|
Add a sequence module for optimizing gating
|
Add a sequence module for optimizing gating
--CAR
|
Python
|
apache-2.0
|
calebjordan/PyQLab,Plourde-Research-Lab/PyQLab,BBN-Q/PyQLab,rmcgurrin/PyQLab
|
Add a sequence module for optimizing gating
--CAR
|
"""
Sequences for optimizing gating timing.
"""
from ..PulsePrimitives import *
from ..Compiler import compile_to_hardware
def sweep_gateDelay(qubit, sweepPts):
"""
Sweep the gate delay associated with a qubit channel using a simple Id, Id, X90, X90
seqeuence.
Parameters
---------
qubit : logical qubit to create sequences for
sweepPts : iterable to sweep the gate delay over.
"""
generator = qubit.physChan.generator
oldDelay = generator.gateDelay
for ct, delay in enumerate(sweepPts):
seqs = [[Id(qubit, length=120e-9), Id(qubit), MEAS(qubit)],
[Id(qubit, length=120e-9), MEAS(qubit)],
[Id(qubit, length=120e-9), X90(qubit), MEAS(qubit)],
[Id(qubit, length=120e-9), X90(qubit), MEAS(qubit)]]
generator.gateDelay = delay
compile_to_hardware(seqs, 'BlankingSweeps/GateDelay', suffix='_{}'.format(ct+1))
generator.gateDelay = oldDelay
|
<commit_before><commit_msg>Add a sequence module for optimizing gating
--CAR<commit_after>
|
"""
Sequences for optimizing gating timing.
"""
from ..PulsePrimitives import *
from ..Compiler import compile_to_hardware
def sweep_gateDelay(qubit, sweepPts):
"""
Sweep the gate delay associated with a qubit channel using a simple Id, Id, X90, X90
seqeuence.
Parameters
---------
qubit : logical qubit to create sequences for
sweepPts : iterable to sweep the gate delay over.
"""
generator = qubit.physChan.generator
oldDelay = generator.gateDelay
for ct, delay in enumerate(sweepPts):
seqs = [[Id(qubit, length=120e-9), Id(qubit), MEAS(qubit)],
[Id(qubit, length=120e-9), MEAS(qubit)],
[Id(qubit, length=120e-9), X90(qubit), MEAS(qubit)],
[Id(qubit, length=120e-9), X90(qubit), MEAS(qubit)]]
generator.gateDelay = delay
compile_to_hardware(seqs, 'BlankingSweeps/GateDelay', suffix='_{}'.format(ct+1))
generator.gateDelay = oldDelay
|
Add a sequence module for optimizing gating
--CAR"""
Sequences for optimizing gating timing.
"""
from ..PulsePrimitives import *
from ..Compiler import compile_to_hardware
def sweep_gateDelay(qubit, sweepPts):
"""
Sweep the gate delay associated with a qubit channel using a simple Id, Id, X90, X90
seqeuence.
Parameters
---------
qubit : logical qubit to create sequences for
sweepPts : iterable to sweep the gate delay over.
"""
generator = qubit.physChan.generator
oldDelay = generator.gateDelay
for ct, delay in enumerate(sweepPts):
seqs = [[Id(qubit, length=120e-9), Id(qubit), MEAS(qubit)],
[Id(qubit, length=120e-9), MEAS(qubit)],
[Id(qubit, length=120e-9), X90(qubit), MEAS(qubit)],
[Id(qubit, length=120e-9), X90(qubit), MEAS(qubit)]]
generator.gateDelay = delay
compile_to_hardware(seqs, 'BlankingSweeps/GateDelay', suffix='_{}'.format(ct+1))
generator.gateDelay = oldDelay
|
<commit_before><commit_msg>Add a sequence module for optimizing gating
--CAR<commit_after>"""
Sequences for optimizing gating timing.
"""
from ..PulsePrimitives import *
from ..Compiler import compile_to_hardware
def sweep_gateDelay(qubit, sweepPts):
"""
Sweep the gate delay associated with a qubit channel using a simple Id, Id, X90, X90
seqeuence.
Parameters
---------
qubit : logical qubit to create sequences for
sweepPts : iterable to sweep the gate delay over.
"""
generator = qubit.physChan.generator
oldDelay = generator.gateDelay
for ct, delay in enumerate(sweepPts):
seqs = [[Id(qubit, length=120e-9), Id(qubit), MEAS(qubit)],
[Id(qubit, length=120e-9), MEAS(qubit)],
[Id(qubit, length=120e-9), X90(qubit), MEAS(qubit)],
[Id(qubit, length=120e-9), X90(qubit), MEAS(qubit)]]
generator.gateDelay = delay
compile_to_hardware(seqs, 'BlankingSweeps/GateDelay', suffix='_{}'.format(ct+1))
generator.gateDelay = oldDelay
|
|
fdd2a50445d2f2cb92480f8f42c463b312411361
|
mapit/management/commands/mapit_print_areas.py
|
mapit/management/commands/mapit_print_areas.py
|
# For each generation, show every area, grouped by type
from django.core.management.base import NoArgsCommand
from mapit.models import Area, Generation, Type, NameType, Country, CodeType
class Command(NoArgsCommand):
help = 'Show all areas by generation and area type'
def handle_noargs(self, **options):
for g in Generation.objects.all().order_by('id'):
print g
for t in Type.objects.all().order_by('code'):
qs = Area.objects.filter(type=t,
generation_high__gte=g,
generation_low__lte=g)
print " %s (number of areas: %d)" % (t, qs.count())
for a in qs:
print " ", a
|
Add a simple command to print all areas in all generations
|
Add a simple command to print all areas in all generations
|
Python
|
agpl-3.0
|
Sinar/mapit,chris48s/mapit,chris48s/mapit,Code4SA/mapit,opencorato/mapit,New-Bamboo/mapit,Sinar/mapit,opencorato/mapit,New-Bamboo/mapit,chris48s/mapit,Code4SA/mapit,opencorato/mapit,Code4SA/mapit
|
Add a simple command to print all areas in all generations
|
# For each generation, show every area, grouped by type
from django.core.management.base import NoArgsCommand
from mapit.models import Area, Generation, Type, NameType, Country, CodeType
class Command(NoArgsCommand):
help = 'Show all areas by generation and area type'
def handle_noargs(self, **options):
for g in Generation.objects.all().order_by('id'):
print g
for t in Type.objects.all().order_by('code'):
qs = Area.objects.filter(type=t,
generation_high__gte=g,
generation_low__lte=g)
print " %s (number of areas: %d)" % (t, qs.count())
for a in qs:
print " ", a
|
<commit_before><commit_msg>Add a simple command to print all areas in all generations<commit_after>
|
# For each generation, show every area, grouped by type
from django.core.management.base import NoArgsCommand
from mapit.models import Area, Generation, Type, NameType, Country, CodeType
class Command(NoArgsCommand):
help = 'Show all areas by generation and area type'
def handle_noargs(self, **options):
for g in Generation.objects.all().order_by('id'):
print g
for t in Type.objects.all().order_by('code'):
qs = Area.objects.filter(type=t,
generation_high__gte=g,
generation_low__lte=g)
print " %s (number of areas: %d)" % (t, qs.count())
for a in qs:
print " ", a
|
Add a simple command to print all areas in all generations# For each generation, show every area, grouped by type
from django.core.management.base import NoArgsCommand
from mapit.models import Area, Generation, Type, NameType, Country, CodeType
class Command(NoArgsCommand):
help = 'Show all areas by generation and area type'
def handle_noargs(self, **options):
for g in Generation.objects.all().order_by('id'):
print g
for t in Type.objects.all().order_by('code'):
qs = Area.objects.filter(type=t,
generation_high__gte=g,
generation_low__lte=g)
print " %s (number of areas: %d)" % (t, qs.count())
for a in qs:
print " ", a
|
<commit_before><commit_msg>Add a simple command to print all areas in all generations<commit_after># For each generation, show every area, grouped by type
from django.core.management.base import NoArgsCommand
from mapit.models import Area, Generation, Type, NameType, Country, CodeType
class Command(NoArgsCommand):
help = 'Show all areas by generation and area type'
def handle_noargs(self, **options):
for g in Generation.objects.all().order_by('id'):
print g
for t in Type.objects.all().order_by('code'):
qs = Area.objects.filter(type=t,
generation_high__gte=g,
generation_low__lte=g)
print " %s (number of areas: %d)" % (t, qs.count())
for a in qs:
print " ", a
|
|
92f799d0584b598f368df44201446531dffd7d13
|
python/utilities/transform_mp3_filenames.py
|
python/utilities/transform_mp3_filenames.py
|
# Extract the artist name from songs with filenames in this format:
# (number) - (artist) - (title).mp3
# and add the artists name to songs with filenames in this format:
# (number)..(title).mp3
# to make filenames in this format:
# (number)..(artist)..(title).mp3
#
# eg.: 14 - 13th Floor Elevators - You're Gonna Miss Me.mp3
# + 14..You're Gonna Miss Me.mp3
# => 14..13th Floor Elevators..You're Gonna Miss Me.mp3
#
# Copyright 2017 Dave Cuthbert
# MIT License
from __future__ import print_function #Not needed with python3
import os as os
import re as re
TARGET_DIR = r"/insert/target/path"
def extract_artist(title):
artist_regex = re.compile(' - (.*?) - ')
artist = artist_regex.search(title)
return artist.group(1)
def get_song_list():
song_list = os.listdir(os.getcwd())
return song_list
def get_artists():
song_list = get_song_list()
artists = []
for song in song_list:
artists.append(extract_artist(song))
return artists
def insert_artist_name():
artist_names = get_artists()
old_filenames = os.listdir(TARGET_DIR)
new_filenames = []
for (old_filename, artist) in zip(old_filenames, artist_names):
new_filename = re.sub('\.\.', '..' + artist + '..', old_filename)
os.rename(os.path.join(TARGET_DIR, old_filename),
os.path.join(TARGET_DIR, new_filename))
if "__main__" == __name__:
#print(*get_artists(), sep='\n') #DEBUG
insert_artist_name()
|
Copy paste artist from filename1 to filename2
|
Copy paste artist from filename1 to filename2
Utility to help consolidate groups of mp3s while preserving metadata in their filenames
|
Python
|
mit
|
daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various
|
Copy paste artist from filename1 to filename2
Utility to help consolidate groups of mp3s while preserving metadata in their filenames
|
# Extract the artist name from songs with filenames in this format:
# (number) - (artist) - (title).mp3
# and add the artists name to songs with filenames in this format:
# (number)..(title).mp3
# to make filenames in this format:
# (number)..(artist)..(title).mp3
#
# eg.: 14 - 13th Floor Elevators - You're Gonna Miss Me.mp3
# + 14..You're Gonna Miss Me.mp3
# => 14..13th Floor Elevators..You're Gonna Miss Me.mp3
#
# Copyright 2017 Dave Cuthbert
# MIT License
from __future__ import print_function #Not needed with python3
import os as os
import re as re
TARGET_DIR = r"/insert/target/path"
def extract_artist(title):
artist_regex = re.compile(' - (.*?) - ')
artist = artist_regex.search(title)
return artist.group(1)
def get_song_list():
song_list = os.listdir(os.getcwd())
return song_list
def get_artists():
song_list = get_song_list()
artists = []
for song in song_list:
artists.append(extract_artist(song))
return artists
def insert_artist_name():
artist_names = get_artists()
old_filenames = os.listdir(TARGET_DIR)
new_filenames = []
for (old_filename, artist) in zip(old_filenames, artist_names):
new_filename = re.sub('\.\.', '..' + artist + '..', old_filename)
os.rename(os.path.join(TARGET_DIR, old_filename),
os.path.join(TARGET_DIR, new_filename))
if "__main__" == __name__:
#print(*get_artists(), sep='\n') #DEBUG
insert_artist_name()
|
<commit_before><commit_msg>Copy paste artist from filename1 to filename2
Utility to help consolidate groups of mp3s while preserving metadata in their filenames<commit_after>
|
# Extract the artist name from songs with filenames in this format:
# (number) - (artist) - (title).mp3
# and add the artists name to songs with filenames in this format:
# (number)..(title).mp3
# to make filenames in this format:
# (number)..(artist)..(title).mp3
#
# eg.: 14 - 13th Floor Elevators - You're Gonna Miss Me.mp3
# + 14..You're Gonna Miss Me.mp3
# => 14..13th Floor Elevators..You're Gonna Miss Me.mp3
#
# Copyright 2017 Dave Cuthbert
# MIT License
from __future__ import print_function #Not needed with python3
import os as os
import re as re
TARGET_DIR = r"/insert/target/path"
def extract_artist(title):
artist_regex = re.compile(' - (.*?) - ')
artist = artist_regex.search(title)
return artist.group(1)
def get_song_list():
song_list = os.listdir(os.getcwd())
return song_list
def get_artists():
song_list = get_song_list()
artists = []
for song in song_list:
artists.append(extract_artist(song))
return artists
def insert_artist_name():
artist_names = get_artists()
old_filenames = os.listdir(TARGET_DIR)
new_filenames = []
for (old_filename, artist) in zip(old_filenames, artist_names):
new_filename = re.sub('\.\.', '..' + artist + '..', old_filename)
os.rename(os.path.join(TARGET_DIR, old_filename),
os.path.join(TARGET_DIR, new_filename))
if "__main__" == __name__:
#print(*get_artists(), sep='\n') #DEBUG
insert_artist_name()
|
Copy paste artist from filename1 to filename2
Utility to help consolidate groups of mp3s while preserving metadata in their filenames# Extract the artist name from songs with filenames in this format:
# (number) - (artist) - (title).mp3
# and add the artists name to songs with filenames in this format:
# (number)..(title).mp3
# to make filenames in this format:
# (number)..(artist)..(title).mp3
#
# eg.: 14 - 13th Floor Elevators - You're Gonna Miss Me.mp3
# + 14..You're Gonna Miss Me.mp3
# => 14..13th Floor Elevators..You're Gonna Miss Me.mp3
#
# Copyright 2017 Dave Cuthbert
# MIT License
from __future__ import print_function #Not needed with python3
import os as os
import re as re
TARGET_DIR = r"/insert/target/path"
def extract_artist(title):
artist_regex = re.compile(' - (.*?) - ')
artist = artist_regex.search(title)
return artist.group(1)
def get_song_list():
song_list = os.listdir(os.getcwd())
return song_list
def get_artists():
song_list = get_song_list()
artists = []
for song in song_list:
artists.append(extract_artist(song))
return artists
def insert_artist_name():
artist_names = get_artists()
old_filenames = os.listdir(TARGET_DIR)
new_filenames = []
for (old_filename, artist) in zip(old_filenames, artist_names):
new_filename = re.sub('\.\.', '..' + artist + '..', old_filename)
os.rename(os.path.join(TARGET_DIR, old_filename),
os.path.join(TARGET_DIR, new_filename))
if "__main__" == __name__:
#print(*get_artists(), sep='\n') #DEBUG
insert_artist_name()
|
<commit_before><commit_msg>Copy paste artist from filename1 to filename2
Utility to help consolidate groups of mp3s while preserving metadata in their filenames<commit_after># Extract the artist name from songs with filenames in this format:
# (number) - (artist) - (title).mp3
# and add the artists name to songs with filenames in this format:
# (number)..(title).mp3
# to make filenames in this format:
# (number)..(artist)..(title).mp3
#
# eg.: 14 - 13th Floor Elevators - You're Gonna Miss Me.mp3
# + 14..You're Gonna Miss Me.mp3
# => 14..13th Floor Elevators..You're Gonna Miss Me.mp3
#
# Copyright 2017 Dave Cuthbert
# MIT License
from __future__ import print_function #Not needed with python3
import os as os
import re as re
TARGET_DIR = r"/insert/target/path"
def extract_artist(title):
artist_regex = re.compile(' - (.*?) - ')
artist = artist_regex.search(title)
return artist.group(1)
def get_song_list():
song_list = os.listdir(os.getcwd())
return song_list
def get_artists():
song_list = get_song_list()
artists = []
for song in song_list:
artists.append(extract_artist(song))
return artists
def insert_artist_name():
artist_names = get_artists()
old_filenames = os.listdir(TARGET_DIR)
new_filenames = []
for (old_filename, artist) in zip(old_filenames, artist_names):
new_filename = re.sub('\.\.', '..' + artist + '..', old_filename)
os.rename(os.path.join(TARGET_DIR, old_filename),
os.path.join(TARGET_DIR, new_filename))
if "__main__" == __name__:
#print(*get_artists(), sep='\n') #DEBUG
insert_artist_name()
|
|
58e0ea4b555cf89ace4f5d97c579dbba905e7eeb
|
jsk_arc2017_common/scripts/list_objects.py
|
jsk_arc2017_common/scripts/list_objects.py
|
#!/usr/bin/env python
import os.path as osp
import rospkg
PKG_PATH = rospkg.RosPack().get_path('jsk_arc2017_common')
object_names = ['__background__']
with open(osp.join(PKG_PATH, 'data/names/objects.txt')) as f:
object_names += [x.strip() for x in f]
object_names.append('__shelf__')
for obj_id, obj in enumerate(object_names):
print('%2d: %s' % (obj_id, obj))
|
Add script to list objects
|
Add script to list objects
|
Python
|
bsd-3-clause
|
pazeshun/jsk_apc,pazeshun/jsk_apc,pazeshun/jsk_apc,pazeshun/jsk_apc,pazeshun/jsk_apc
|
Add script to list objects
|
#!/usr/bin/env python
import os.path as osp
import rospkg
PKG_PATH = rospkg.RosPack().get_path('jsk_arc2017_common')
object_names = ['__background__']
with open(osp.join(PKG_PATH, 'data/names/objects.txt')) as f:
object_names += [x.strip() for x in f]
object_names.append('__shelf__')
for obj_id, obj in enumerate(object_names):
print('%2d: %s' % (obj_id, obj))
|
<commit_before><commit_msg>Add script to list objects<commit_after>
|
#!/usr/bin/env python
import os.path as osp
import rospkg
PKG_PATH = rospkg.RosPack().get_path('jsk_arc2017_common')
object_names = ['__background__']
with open(osp.join(PKG_PATH, 'data/names/objects.txt')) as f:
object_names += [x.strip() for x in f]
object_names.append('__shelf__')
for obj_id, obj in enumerate(object_names):
print('%2d: %s' % (obj_id, obj))
|
Add script to list objects#!/usr/bin/env python
import os.path as osp
import rospkg
PKG_PATH = rospkg.RosPack().get_path('jsk_arc2017_common')
object_names = ['__background__']
with open(osp.join(PKG_PATH, 'data/names/objects.txt')) as f:
object_names += [x.strip() for x in f]
object_names.append('__shelf__')
for obj_id, obj in enumerate(object_names):
print('%2d: %s' % (obj_id, obj))
|
<commit_before><commit_msg>Add script to list objects<commit_after>#!/usr/bin/env python
import os.path as osp
import rospkg
PKG_PATH = rospkg.RosPack().get_path('jsk_arc2017_common')
object_names = ['__background__']
with open(osp.join(PKG_PATH, 'data/names/objects.txt')) as f:
object_names += [x.strip() for x in f]
object_names.append('__shelf__')
for obj_id, obj in enumerate(object_names):
print('%2d: %s' % (obj_id, obj))
|
|
836845abde53ee55bca93f098ece78880ab6b5c6
|
examples/events/create_massive_dummy_events.py
|
examples/events/create_massive_dummy_events.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymisp import PyMISP
from keys import misp_url, misp_key, misp_verifycert
import argparse
import tools
def init(url, key):
return PyMISP(url, key, misp_verifycert, 'json')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Create a given number of event containing a given number of attributes eachh.')
parser.add_argument("-l", "--limit", type=int, help="Number of events to create (default 1)")
parser.add_argument("-a", "--attribute", type=int, help="Number of attributes per event (default 3000)")
args = parser.parse_args()
misp = init(misp_url, misp_key)
if args.limit is None:
args.limit = 1
if args.attribute is None:
args.attribute = 3000
for i in range(args.limit):
tools.create_massive_dummy_events(misp, args.attribute)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymisp import PyMISP
from keys import url, key
import argparse
import tools
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Create a given number of event containing a given number of attributes eachh.')
parser.add_argument("-l", "--limit", type=int, help="Number of events to create (default 1)")
parser.add_argument("-a", "--attribute", type=int, help="Number of attributes per event (default 3000)")
args = parser.parse_args()
misp = PyMISP(url, key, True, 'json')
if args.limit is None:
args.limit = 1
if args.attribute is None:
args.attribute = 3000
for i in range(args.limit):
tools.create_massive_dummy_events(misp, args.attribute)
|
Use same variable names as testing environment
|
Use same variable names as testing environment
|
Python
|
bsd-2-clause
|
pombredanne/PyMISP,iglocska/PyMISP
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymisp import PyMISP
from keys import misp_url, misp_key, misp_verifycert
import argparse
import tools
def init(url, key):
return PyMISP(url, key, misp_verifycert, 'json')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Create a given number of event containing a given number of attributes eachh.')
parser.add_argument("-l", "--limit", type=int, help="Number of events to create (default 1)")
parser.add_argument("-a", "--attribute", type=int, help="Number of attributes per event (default 3000)")
args = parser.parse_args()
misp = init(misp_url, misp_key)
if args.limit is None:
args.limit = 1
if args.attribute is None:
args.attribute = 3000
for i in range(args.limit):
tools.create_massive_dummy_events(misp, args.attribute)
Use same variable names as testing environment
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymisp import PyMISP
from keys import url, key
import argparse
import tools
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Create a given number of event containing a given number of attributes eachh.')
parser.add_argument("-l", "--limit", type=int, help="Number of events to create (default 1)")
parser.add_argument("-a", "--attribute", type=int, help="Number of attributes per event (default 3000)")
args = parser.parse_args()
misp = PyMISP(url, key, True, 'json')
if args.limit is None:
args.limit = 1
if args.attribute is None:
args.attribute = 3000
for i in range(args.limit):
tools.create_massive_dummy_events(misp, args.attribute)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymisp import PyMISP
from keys import misp_url, misp_key, misp_verifycert
import argparse
import tools
def init(url, key):
return PyMISP(url, key, misp_verifycert, 'json')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Create a given number of event containing a given number of attributes eachh.')
parser.add_argument("-l", "--limit", type=int, help="Number of events to create (default 1)")
parser.add_argument("-a", "--attribute", type=int, help="Number of attributes per event (default 3000)")
args = parser.parse_args()
misp = init(misp_url, misp_key)
if args.limit is None:
args.limit = 1
if args.attribute is None:
args.attribute = 3000
for i in range(args.limit):
tools.create_massive_dummy_events(misp, args.attribute)
<commit_msg>Use same variable names as testing environment<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymisp import PyMISP
from keys import url, key
import argparse
import tools
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Create a given number of event containing a given number of attributes eachh.')
parser.add_argument("-l", "--limit", type=int, help="Number of events to create (default 1)")
parser.add_argument("-a", "--attribute", type=int, help="Number of attributes per event (default 3000)")
args = parser.parse_args()
misp = PyMISP(url, key, True, 'json')
if args.limit is None:
args.limit = 1
if args.attribute is None:
args.attribute = 3000
for i in range(args.limit):
tools.create_massive_dummy_events(misp, args.attribute)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymisp import PyMISP
from keys import misp_url, misp_key, misp_verifycert
import argparse
import tools
def init(url, key):
return PyMISP(url, key, misp_verifycert, 'json')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Create a given number of event containing a given number of attributes eachh.')
parser.add_argument("-l", "--limit", type=int, help="Number of events to create (default 1)")
parser.add_argument("-a", "--attribute", type=int, help="Number of attributes per event (default 3000)")
args = parser.parse_args()
misp = init(misp_url, misp_key)
if args.limit is None:
args.limit = 1
if args.attribute is None:
args.attribute = 3000
for i in range(args.limit):
tools.create_massive_dummy_events(misp, args.attribute)
Use same variable names as testing environment#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymisp import PyMISP
from keys import url, key
import argparse
import tools
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Create a given number of event containing a given number of attributes eachh.')
parser.add_argument("-l", "--limit", type=int, help="Number of events to create (default 1)")
parser.add_argument("-a", "--attribute", type=int, help="Number of attributes per event (default 3000)")
args = parser.parse_args()
misp = PyMISP(url, key, True, 'json')
if args.limit is None:
args.limit = 1
if args.attribute is None:
args.attribute = 3000
for i in range(args.limit):
tools.create_massive_dummy_events(misp, args.attribute)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymisp import PyMISP
from keys import misp_url, misp_key, misp_verifycert
import argparse
import tools
def init(url, key):
return PyMISP(url, key, misp_verifycert, 'json')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Create a given number of event containing a given number of attributes eachh.')
parser.add_argument("-l", "--limit", type=int, help="Number of events to create (default 1)")
parser.add_argument("-a", "--attribute", type=int, help="Number of attributes per event (default 3000)")
args = parser.parse_args()
misp = init(misp_url, misp_key)
if args.limit is None:
args.limit = 1
if args.attribute is None:
args.attribute = 3000
for i in range(args.limit):
tools.create_massive_dummy_events(misp, args.attribute)
<commit_msg>Use same variable names as testing environment<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymisp import PyMISP
from keys import url, key
import argparse
import tools
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Create a given number of event containing a given number of attributes eachh.')
parser.add_argument("-l", "--limit", type=int, help="Number of events to create (default 1)")
parser.add_argument("-a", "--attribute", type=int, help="Number of attributes per event (default 3000)")
args = parser.parse_args()
misp = PyMISP(url, key, True, 'json')
if args.limit is None:
args.limit = 1
if args.attribute is None:
args.attribute = 3000
for i in range(args.limit):
tools.create_massive_dummy_events(misp, args.attribute)
|
a635a8d58e46cf4ef1bc225f8824d73984971fee
|
countVowels.py
|
countVowels.py
|
""" Q6- Write a program that counts up the number of vowels contained in the string s. Valid vowels are: 'a', 'e', 'i',
'o', and 'u'. For example, if s = 'azcbobobegghakl', your program should print: Number of vowels: 5
"""
# Using the isVowel function from isVowel.py module (Answer of fifth question of Assignment 3)
def isVowel( char ):
# Converting the letter to lowercase for our convenience and hence, we do not need to check character's case and hence, simplifies the problem
# str.lower( char )
# The above function has been commented out since this is not required in this problem.. But, the above built-in function might be useful in normal cases.
# Splitting the condition: 'a' or 'e' or 'i' or 'o' or 'u' to make it more readable and easier to understand.
is_char_a = char == 'a'
is_char_e = char == 'e'
is_char_i = char == 'i'
is_char_o = char == 'o'
is_char_u = char == 'u'
is_char_vowel = is_char_a or is_char_e or is_char_i or is_char_o or is_char_u
return is_char_vowel
def countVowels( string ):
if str.islower( string ):
count = 0 # Counts the number of vowels
for letter in string:
if isVowel( letter ):
count += 1
print( "Number of vowels: " + str( count ) )
else:
if len( string ):
print( "Error: All the characters in the string should be in LOWERCASE." )
else:
print( "Error: The string is EMPTY." )
string = input( "Enter the string: " )
countVowels( string )
|
Add the answer to the sixth question of Assignment 3
|
Add the answer to the sixth question of Assignment 3
|
Python
|
mit
|
SuyashD95/python-assignments
|
Add the answer to the sixth question of Assignment 3
|
""" Q6- Write a program that counts up the number of vowels contained in the string s. Valid vowels are: 'a', 'e', 'i',
'o', and 'u'. For example, if s = 'azcbobobegghakl', your program should print: Number of vowels: 5
"""
# Using the isVowel function from isVowel.py module (Answer of fifth question of Assignment 3)
def isVowel( char ):
# Converting the letter to lowercase for our convenience and hence, we do not need to check character's case and hence, simplifies the problem
# str.lower( char )
# The above function has been commented out since this is not required in this problem.. But, the above built-in function might be useful in normal cases.
# Splitting the condition: 'a' or 'e' or 'i' or 'o' or 'u' to make it more readable and easier to understand.
is_char_a = char == 'a'
is_char_e = char == 'e'
is_char_i = char == 'i'
is_char_o = char == 'o'
is_char_u = char == 'u'
is_char_vowel = is_char_a or is_char_e or is_char_i or is_char_o or is_char_u
return is_char_vowel
def countVowels( string ):
if str.islower( string ):
count = 0 # Counts the number of vowels
for letter in string:
if isVowel( letter ):
count += 1
print( "Number of vowels: " + str( count ) )
else:
if len( string ):
print( "Error: All the characters in the string should be in LOWERCASE." )
else:
print( "Error: The string is EMPTY." )
string = input( "Enter the string: " )
countVowels( string )
|
<commit_before><commit_msg>Add the answer to the sixth question of Assignment 3<commit_after>
|
""" Q6- Write a program that counts up the number of vowels contained in the string s. Valid vowels are: 'a', 'e', 'i',
'o', and 'u'. For example, if s = 'azcbobobegghakl', your program should print: Number of vowels: 5
"""
# Using the isVowel function from isVowel.py module (Answer of fifth question of Assignment 3)
def isVowel( char ):
# Converting the letter to lowercase for our convenience and hence, we do not need to check character's case and hence, simplifies the problem
# str.lower( char )
# The above function has been commented out since this is not required in this problem.. But, the above built-in function might be useful in normal cases.
# Splitting the condition: 'a' or 'e' or 'i' or 'o' or 'u' to make it more readable and easier to understand.
is_char_a = char == 'a'
is_char_e = char == 'e'
is_char_i = char == 'i'
is_char_o = char == 'o'
is_char_u = char == 'u'
is_char_vowel = is_char_a or is_char_e or is_char_i or is_char_o or is_char_u
return is_char_vowel
def countVowels( string ):
if str.islower( string ):
count = 0 # Counts the number of vowels
for letter in string:
if isVowel( letter ):
count += 1
print( "Number of vowels: " + str( count ) )
else:
if len( string ):
print( "Error: All the characters in the string should be in LOWERCASE." )
else:
print( "Error: The string is EMPTY." )
string = input( "Enter the string: " )
countVowels( string )
|
Add the answer to the sixth question of Assignment 3""" Q6- Write a program that counts up the number of vowels contained in the string s. Valid vowels are: 'a', 'e', 'i',
'o', and 'u'. For example, if s = 'azcbobobegghakl', your program should print: Number of vowels: 5
"""
# Using the isVowel function from isVowel.py module (Answer of fifth question of Assignment 3)
def isVowel( char ):
# Converting the letter to lowercase for our convenience and hence, we do not need to check character's case and hence, simplifies the problem
# str.lower( char )
# The above function has been commented out since this is not required in this problem.. But, the above built-in function might be useful in normal cases.
# Splitting the condition: 'a' or 'e' or 'i' or 'o' or 'u' to make it more readable and easier to understand.
is_char_a = char == 'a'
is_char_e = char == 'e'
is_char_i = char == 'i'
is_char_o = char == 'o'
is_char_u = char == 'u'
is_char_vowel = is_char_a or is_char_e or is_char_i or is_char_o or is_char_u
return is_char_vowel
def countVowels( string ):
if str.islower( string ):
count = 0 # Counts the number of vowels
for letter in string:
if isVowel( letter ):
count += 1
print( "Number of vowels: " + str( count ) )
else:
if len( string ):
print( "Error: All the characters in the string should be in LOWERCASE." )
else:
print( "Error: The string is EMPTY." )
string = input( "Enter the string: " )
countVowels( string )
|
<commit_before><commit_msg>Add the answer to the sixth question of Assignment 3<commit_after>""" Q6- Write a program that counts up the number of vowels contained in the string s. Valid vowels are: 'a', 'e', 'i',
'o', and 'u'. For example, if s = 'azcbobobegghakl', your program should print: Number of vowels: 5
"""
# Using the isVowel function from isVowel.py module (Answer of fifth question of Assignment 3)
def isVowel( char ):
# Converting the letter to lowercase for our convenience and hence, we do not need to check character's case and hence, simplifies the problem
# str.lower( char )
# The above function has been commented out since this is not required in this problem.. But, the above built-in function might be useful in normal cases.
# Splitting the condition: 'a' or 'e' or 'i' or 'o' or 'u' to make it more readable and easier to understand.
is_char_a = char == 'a'
is_char_e = char == 'e'
is_char_i = char == 'i'
is_char_o = char == 'o'
is_char_u = char == 'u'
is_char_vowel = is_char_a or is_char_e or is_char_i or is_char_o or is_char_u
return is_char_vowel
def countVowels( string ):
if str.islower( string ):
count = 0 # Counts the number of vowels
for letter in string:
if isVowel( letter ):
count += 1
print( "Number of vowels: " + str( count ) )
else:
if len( string ):
print( "Error: All the characters in the string should be in LOWERCASE." )
else:
print( "Error: The string is EMPTY." )
string = input( "Enter the string: " )
countVowels( string )
|
|
a6137714c55ada55571759b851e1e4afa7818f29
|
app/utils/scripts/delete-docs.py
|
app/utils/scripts/delete-docs.py
|
#!/usr/bin/python
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Basic command line script to delete documents."""
import argparse
import sys
import models
import utils
import utils.db
COLLECTIONS = [
models.BOOT_COLLECTION,
models.DEFCONFIG_COLLECTION,
models.JOB_COLLECTION,
models.LAB_COLLECTION
]
ALL_COLLECTIONS = [
"all"
]
ALL_COLLECTIONS.extend(COLLECTIONS)
def parse_fields(fields):
for field in fields:
if "=" in field:
yield field.split("=", 1)
else:
utils.LOG.error("Field %s is not valid, not considered", field)
def _delete_with_spec(collection, spec_or_id, database):
ret_val = None
if collection == "all":
utils.LOG.info("Deleting documents in all collections")
for coll in COLLECTIONS:
utils.LOG.info("Deleting from %s...", coll)
ret_val = utils.db.delete(database[coll], spec)
else:
ret_val = utils.db.delete(database[collection], spec_or_id)
if ret_val == 200:
utils.LOG.info("Documents identified deleted: %s", spec_or_id)
else:
utils.LOG.error(
"Error deleting documents with the provided values: %s",
spec_or_id)
sys.exit(1)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Import boots from disk",
version=0.1
)
parser.add_argument(
"--collection", "-c",
type=str,
help="The name of the job to import",
dest="collection",
required=True,
choices=ALL_COLLECTIONS
)
parser.add_argument(
"--field", "-f",
help=(
"The necessary fields to identify the elements to delete; "
"they must be defined as key=value pairs"
),
dest="fields",
action="append",
required=True
)
args = parser.parse_args()
collection = args.collection
fields = args.fields
spec = {
k: v for k, v in parse_fields(fields)
}
if spec:
database = utils.db.get_db_connection({})
_delete_with_spec(collection, spec, database)
else:
utils.LOG.error("Don't know what to look for...")
sys.exit(1)
|
Add cli tool to delete documents.
|
Add cli tool to delete documents.
Change-Id: I16c99d4b625e627c693c6354aaaa191c5076344b
|
Python
|
lgpl-2.1
|
kernelci/kernelci-backend,joyxu/kernelci-backend,kernelci/kernelci-backend,joyxu/kernelci-backend,joyxu/kernelci-backend
|
Add cli tool to delete documents.
Change-Id: I16c99d4b625e627c693c6354aaaa191c5076344b
|
#!/usr/bin/python
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Basic command line script to delete documents."""
import argparse
import sys
import models
import utils
import utils.db
COLLECTIONS = [
models.BOOT_COLLECTION,
models.DEFCONFIG_COLLECTION,
models.JOB_COLLECTION,
models.LAB_COLLECTION
]
ALL_COLLECTIONS = [
"all"
]
ALL_COLLECTIONS.extend(COLLECTIONS)
def parse_fields(fields):
for field in fields:
if "=" in field:
yield field.split("=", 1)
else:
utils.LOG.error("Field %s is not valid, not considered", field)
def _delete_with_spec(collection, spec_or_id, database):
ret_val = None
if collection == "all":
utils.LOG.info("Deleting documents in all collections")
for coll in COLLECTIONS:
utils.LOG.info("Deleting from %s...", coll)
ret_val = utils.db.delete(database[coll], spec)
else:
ret_val = utils.db.delete(database[collection], spec_or_id)
if ret_val == 200:
utils.LOG.info("Documents identified deleted: %s", spec_or_id)
else:
utils.LOG.error(
"Error deleting documents with the provided values: %s",
spec_or_id)
sys.exit(1)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Import boots from disk",
version=0.1
)
parser.add_argument(
"--collection", "-c",
type=str,
help="The name of the job to import",
dest="collection",
required=True,
choices=ALL_COLLECTIONS
)
parser.add_argument(
"--field", "-f",
help=(
"The necessary fields to identify the elements to delete; "
"they must be defined as key=value pairs"
),
dest="fields",
action="append",
required=True
)
args = parser.parse_args()
collection = args.collection
fields = args.fields
spec = {
k: v for k, v in parse_fields(fields)
}
if spec:
database = utils.db.get_db_connection({})
_delete_with_spec(collection, spec, database)
else:
utils.LOG.error("Don't know what to look for...")
sys.exit(1)
|
<commit_before><commit_msg>Add cli tool to delete documents.
Change-Id: I16c99d4b625e627c693c6354aaaa191c5076344b<commit_after>
|
#!/usr/bin/python
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Basic command line script to delete documents."""
import argparse
import sys
import models
import utils
import utils.db
COLLECTIONS = [
models.BOOT_COLLECTION,
models.DEFCONFIG_COLLECTION,
models.JOB_COLLECTION,
models.LAB_COLLECTION
]
ALL_COLLECTIONS = [
"all"
]
ALL_COLLECTIONS.extend(COLLECTIONS)
def parse_fields(fields):
for field in fields:
if "=" in field:
yield field.split("=", 1)
else:
utils.LOG.error("Field %s is not valid, not considered", field)
def _delete_with_spec(collection, spec_or_id, database):
ret_val = None
if collection == "all":
utils.LOG.info("Deleting documents in all collections")
for coll in COLLECTIONS:
utils.LOG.info("Deleting from %s...", coll)
ret_val = utils.db.delete(database[coll], spec)
else:
ret_val = utils.db.delete(database[collection], spec_or_id)
if ret_val == 200:
utils.LOG.info("Documents identified deleted: %s", spec_or_id)
else:
utils.LOG.error(
"Error deleting documents with the provided values: %s",
spec_or_id)
sys.exit(1)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Import boots from disk",
version=0.1
)
parser.add_argument(
"--collection", "-c",
type=str,
help="The name of the job to import",
dest="collection",
required=True,
choices=ALL_COLLECTIONS
)
parser.add_argument(
"--field", "-f",
help=(
"The necessary fields to identify the elements to delete; "
"they must be defined as key=value pairs"
),
dest="fields",
action="append",
required=True
)
args = parser.parse_args()
collection = args.collection
fields = args.fields
spec = {
k: v for k, v in parse_fields(fields)
}
if spec:
database = utils.db.get_db_connection({})
_delete_with_spec(collection, spec, database)
else:
utils.LOG.error("Don't know what to look for...")
sys.exit(1)
|
Add cli tool to delete documents.
Change-Id: I16c99d4b625e627c693c6354aaaa191c5076344b#!/usr/bin/python
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Basic command line script to delete documents."""
import argparse
import sys
import models
import utils
import utils.db
COLLECTIONS = [
models.BOOT_COLLECTION,
models.DEFCONFIG_COLLECTION,
models.JOB_COLLECTION,
models.LAB_COLLECTION
]
ALL_COLLECTIONS = [
"all"
]
ALL_COLLECTIONS.extend(COLLECTIONS)
def parse_fields(fields):
for field in fields:
if "=" in field:
yield field.split("=", 1)
else:
utils.LOG.error("Field %s is not valid, not considered", field)
def _delete_with_spec(collection, spec_or_id, database):
ret_val = None
if collection == "all":
utils.LOG.info("Deleting documents in all collections")
for coll in COLLECTIONS:
utils.LOG.info("Deleting from %s...", coll)
ret_val = utils.db.delete(database[coll], spec)
else:
ret_val = utils.db.delete(database[collection], spec_or_id)
if ret_val == 200:
utils.LOG.info("Documents identified deleted: %s", spec_or_id)
else:
utils.LOG.error(
"Error deleting documents with the provided values: %s",
spec_or_id)
sys.exit(1)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Import boots from disk",
version=0.1
)
parser.add_argument(
"--collection", "-c",
type=str,
help="The name of the job to import",
dest="collection",
required=True,
choices=ALL_COLLECTIONS
)
parser.add_argument(
"--field", "-f",
help=(
"The necessary fields to identify the elements to delete; "
"they must be defined as key=value pairs"
),
dest="fields",
action="append",
required=True
)
args = parser.parse_args()
collection = args.collection
fields = args.fields
spec = {
k: v for k, v in parse_fields(fields)
}
if spec:
database = utils.db.get_db_connection({})
_delete_with_spec(collection, spec, database)
else:
utils.LOG.error("Don't know what to look for...")
sys.exit(1)
|
<commit_before><commit_msg>Add cli tool to delete documents.
Change-Id: I16c99d4b625e627c693c6354aaaa191c5076344b<commit_after>#!/usr/bin/python
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Basic command line script to delete documents."""
import argparse
import sys
import models
import utils
import utils.db
COLLECTIONS = [
models.BOOT_COLLECTION,
models.DEFCONFIG_COLLECTION,
models.JOB_COLLECTION,
models.LAB_COLLECTION
]
ALL_COLLECTIONS = [
"all"
]
ALL_COLLECTIONS.extend(COLLECTIONS)
def parse_fields(fields):
for field in fields:
if "=" in field:
yield field.split("=", 1)
else:
utils.LOG.error("Field %s is not valid, not considered", field)
def _delete_with_spec(collection, spec_or_id, database):
ret_val = None
if collection == "all":
utils.LOG.info("Deleting documents in all collections")
for coll in COLLECTIONS:
utils.LOG.info("Deleting from %s...", coll)
ret_val = utils.db.delete(database[coll], spec)
else:
ret_val = utils.db.delete(database[collection], spec_or_id)
if ret_val == 200:
utils.LOG.info("Documents identified deleted: %s", spec_or_id)
else:
utils.LOG.error(
"Error deleting documents with the provided values: %s",
spec_or_id)
sys.exit(1)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Import boots from disk",
version=0.1
)
parser.add_argument(
"--collection", "-c",
type=str,
help="The name of the job to import",
dest="collection",
required=True,
choices=ALL_COLLECTIONS
)
parser.add_argument(
"--field", "-f",
help=(
"The necessary fields to identify the elements to delete; "
"they must be defined as key=value pairs"
),
dest="fields",
action="append",
required=True
)
args = parser.parse_args()
collection = args.collection
fields = args.fields
spec = {
k: v for k, v in parse_fields(fields)
}
if spec:
database = utils.db.get_db_connection({})
_delete_with_spec(collection, spec, database)
else:
utils.LOG.error("Don't know what to look for...")
sys.exit(1)
|
|
163da52a48eb0d84cde47f7cfe99e1188350db47
|
mobib_basic.py
|
mobib_basic.py
|
#!/bin/env python3
import sys
from smartcard.System import readers
CALYPSO_CLA = [0x94]
SELECT_INS = [0xA4]
READ_RECORD_INS = [0xB2]
GET_RESPONSE_INS = [0xC0]
TICKETING_COUNTERS_FILE_ID = [0x20, 0x69]
def main():
local_readers = readers()
if local_readers:
if len(local_readers) == 1:
readerIndex = 0
else:
for i, reader in enumerate(local_readers):
print("[{}]: {}".format(i, reader))
readerIndex = int(input("Select a reader: "))
else:
print("No reader detected")
sys.exit(1)
calypso = local_readers[readerIndex].createConnection()
calypso.connect()
select_apdu = CALYPSO_CLA + SELECT_INS + [0x00, 0x00, 0x02] + TICKETING_COUNTERS_FILE_ID + [0x00]
data, sw1, sw2 = calypso.transmit(select_apdu)
if sw1 == 0x61:
get_response_apdu = [0x00] + GET_RESPONSE_INS + [0x00, 0x00, sw2]
data, sw1, sw2 = calypso.transmit(get_repsonse_apdu)
read_record_apdu = CALYPSO_CLA + READ_RECORD_INS + [0x01, 0x04, 0x1D]
data, sw1, sw2 = calypso.transmit(read_record_apdu)
if sw1 == 0x90:
# FIXME: each chunk of remaining trips stored on 3 bytes?
#chunks = [data[x:x+3] for x in range(0, len(data), 3)]
#total = 0
#for chunk in chunks:
# total += chunk[2]
#print("Number of remaining trips: {}".format(tot = chunks[i][2] for i in chunks))
print("Number of remaining trips: {}".format(sum(data)))
else:
print("Error getting number of remaining trips")
sys.exit(2)
if __name__ == '__main__':
main()
|
Add MOBIB Basic reader script
|
Add MOBIB Basic reader script
|
Python
|
mit
|
bparmentier/mobib-reader
|
Add MOBIB Basic reader script
|
#!/bin/env python3
import sys
from smartcard.System import readers
CALYPSO_CLA = [0x94]
SELECT_INS = [0xA4]
READ_RECORD_INS = [0xB2]
GET_RESPONSE_INS = [0xC0]
TICKETING_COUNTERS_FILE_ID = [0x20, 0x69]
def main():
local_readers = readers()
if local_readers:
if len(local_readers) == 1:
readerIndex = 0
else:
for i, reader in enumerate(local_readers):
print("[{}]: {}".format(i, reader))
readerIndex = int(input("Select a reader: "))
else:
print("No reader detected")
sys.exit(1)
calypso = local_readers[readerIndex].createConnection()
calypso.connect()
select_apdu = CALYPSO_CLA + SELECT_INS + [0x00, 0x00, 0x02] + TICKETING_COUNTERS_FILE_ID + [0x00]
data, sw1, sw2 = calypso.transmit(select_apdu)
if sw1 == 0x61:
get_response_apdu = [0x00] + GET_RESPONSE_INS + [0x00, 0x00, sw2]
data, sw1, sw2 = calypso.transmit(get_repsonse_apdu)
read_record_apdu = CALYPSO_CLA + READ_RECORD_INS + [0x01, 0x04, 0x1D]
data, sw1, sw2 = calypso.transmit(read_record_apdu)
if sw1 == 0x90:
# FIXME: each chunk of remaining trips stored on 3 bytes?
#chunks = [data[x:x+3] for x in range(0, len(data), 3)]
#total = 0
#for chunk in chunks:
# total += chunk[2]
#print("Number of remaining trips: {}".format(tot = chunks[i][2] for i in chunks))
print("Number of remaining trips: {}".format(sum(data)))
else:
print("Error getting number of remaining trips")
sys.exit(2)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add MOBIB Basic reader script<commit_after>
|
#!/bin/env python3
import sys
from smartcard.System import readers
CALYPSO_CLA = [0x94]
SELECT_INS = [0xA4]
READ_RECORD_INS = [0xB2]
GET_RESPONSE_INS = [0xC0]
TICKETING_COUNTERS_FILE_ID = [0x20, 0x69]
def main():
local_readers = readers()
if local_readers:
if len(local_readers) == 1:
readerIndex = 0
else:
for i, reader in enumerate(local_readers):
print("[{}]: {}".format(i, reader))
readerIndex = int(input("Select a reader: "))
else:
print("No reader detected")
sys.exit(1)
calypso = local_readers[readerIndex].createConnection()
calypso.connect()
select_apdu = CALYPSO_CLA + SELECT_INS + [0x00, 0x00, 0x02] + TICKETING_COUNTERS_FILE_ID + [0x00]
data, sw1, sw2 = calypso.transmit(select_apdu)
if sw1 == 0x61:
get_response_apdu = [0x00] + GET_RESPONSE_INS + [0x00, 0x00, sw2]
data, sw1, sw2 = calypso.transmit(get_repsonse_apdu)
read_record_apdu = CALYPSO_CLA + READ_RECORD_INS + [0x01, 0x04, 0x1D]
data, sw1, sw2 = calypso.transmit(read_record_apdu)
if sw1 == 0x90:
# FIXME: each chunk of remaining trips stored on 3 bytes?
#chunks = [data[x:x+3] for x in range(0, len(data), 3)]
#total = 0
#for chunk in chunks:
# total += chunk[2]
#print("Number of remaining trips: {}".format(tot = chunks[i][2] for i in chunks))
print("Number of remaining trips: {}".format(sum(data)))
else:
print("Error getting number of remaining trips")
sys.exit(2)
if __name__ == '__main__':
main()
|
Add MOBIB Basic reader script#!/bin/env python3
import sys
from smartcard.System import readers
CALYPSO_CLA = [0x94]
SELECT_INS = [0xA4]
READ_RECORD_INS = [0xB2]
GET_RESPONSE_INS = [0xC0]
TICKETING_COUNTERS_FILE_ID = [0x20, 0x69]
def main():
local_readers = readers()
if local_readers:
if len(local_readers) == 1:
readerIndex = 0
else:
for i, reader in enumerate(local_readers):
print("[{}]: {}".format(i, reader))
readerIndex = int(input("Select a reader: "))
else:
print("No reader detected")
sys.exit(1)
calypso = local_readers[readerIndex].createConnection()
calypso.connect()
select_apdu = CALYPSO_CLA + SELECT_INS + [0x00, 0x00, 0x02] + TICKETING_COUNTERS_FILE_ID + [0x00]
data, sw1, sw2 = calypso.transmit(select_apdu)
if sw1 == 0x61:
get_response_apdu = [0x00] + GET_RESPONSE_INS + [0x00, 0x00, sw2]
data, sw1, sw2 = calypso.transmit(get_repsonse_apdu)
read_record_apdu = CALYPSO_CLA + READ_RECORD_INS + [0x01, 0x04, 0x1D]
data, sw1, sw2 = calypso.transmit(read_record_apdu)
if sw1 == 0x90:
# FIXME: each chunk of remaining trips stored on 3 bytes?
#chunks = [data[x:x+3] for x in range(0, len(data), 3)]
#total = 0
#for chunk in chunks:
# total += chunk[2]
#print("Number of remaining trips: {}".format(tot = chunks[i][2] for i in chunks))
print("Number of remaining trips: {}".format(sum(data)))
else:
print("Error getting number of remaining trips")
sys.exit(2)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add MOBIB Basic reader script<commit_after>#!/bin/env python3
import sys
from smartcard.System import readers
CALYPSO_CLA = [0x94]
SELECT_INS = [0xA4]
READ_RECORD_INS = [0xB2]
GET_RESPONSE_INS = [0xC0]
TICKETING_COUNTERS_FILE_ID = [0x20, 0x69]
def main():
local_readers = readers()
if local_readers:
if len(local_readers) == 1:
readerIndex = 0
else:
for i, reader in enumerate(local_readers):
print("[{}]: {}".format(i, reader))
readerIndex = int(input("Select a reader: "))
else:
print("No reader detected")
sys.exit(1)
calypso = local_readers[readerIndex].createConnection()
calypso.connect()
select_apdu = CALYPSO_CLA + SELECT_INS + [0x00, 0x00, 0x02] + TICKETING_COUNTERS_FILE_ID + [0x00]
data, sw1, sw2 = calypso.transmit(select_apdu)
if sw1 == 0x61:
get_response_apdu = [0x00] + GET_RESPONSE_INS + [0x00, 0x00, sw2]
data, sw1, sw2 = calypso.transmit(get_repsonse_apdu)
read_record_apdu = CALYPSO_CLA + READ_RECORD_INS + [0x01, 0x04, 0x1D]
data, sw1, sw2 = calypso.transmit(read_record_apdu)
if sw1 == 0x90:
# FIXME: each chunk of remaining trips stored on 3 bytes?
#chunks = [data[x:x+3] for x in range(0, len(data), 3)]
#total = 0
#for chunk in chunks:
# total += chunk[2]
#print("Number of remaining trips: {}".format(tot = chunks[i][2] for i in chunks))
print("Number of remaining trips: {}".format(sum(data)))
else:
print("Error getting number of remaining trips")
sys.exit(2)
if __name__ == '__main__':
main()
|
|
f0392ebda49fa0222a3b317f50002d7e03659f47
|
bluebottle/funding_flutterwave/tests/test_states.py
|
bluebottle/funding_flutterwave/tests/test_states.py
|
from bluebottle.files.tests.factories import PrivateDocumentFactory
from bluebottle.funding.tests.factories import FundingFactory, PlainPayoutAccountFactory, \
BudgetLineFactory
from bluebottle.funding_flutterwave.tests.factories import FlutterwaveBankAccountFactory
from bluebottle.test.utils import BluebottleTestCase
from bluebottle.initiatives.tests.factories import InitiativeFactory
class FlutterwavePayoutAccountTestCase(BluebottleTestCase):
def setUp(self):
self.initiative = InitiativeFactory.create(status='approved')
self.funding = FundingFactory.create(initiative=self.initiative)
self.document = PrivateDocumentFactory.create()
self.payout_account = PlainPayoutAccountFactory.create(document=self.document)
self.bank_account = FlutterwaveBankAccountFactory.create(connect_account=self.payout_account)
self.funding.bank_account = self.bank_account
self.funding.save()
BudgetLineFactory.create(activity=self.funding)
def test_approve_bank_account(self):
self.bank_account.states.verify(save=True)
self.bank_account.refresh_from_db()
self.assertEqual(self.bank_account.status, 'verified')
self.payout_account.refresh_from_db()
self.assertEqual(self.payout_account.status, 'verified')
self.funding.refresh_from_db()
self.assertEqual(self.funding.status, 'submitted')
|
Test we can approve Flutterwave bank accounts
|
Test we can approve Flutterwave bank accounts
|
Python
|
bsd-3-clause
|
onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
|
Test we can approve Flutterwave bank accounts
|
from bluebottle.files.tests.factories import PrivateDocumentFactory
from bluebottle.funding.tests.factories import FundingFactory, PlainPayoutAccountFactory, \
BudgetLineFactory
from bluebottle.funding_flutterwave.tests.factories import FlutterwaveBankAccountFactory
from bluebottle.test.utils import BluebottleTestCase
from bluebottle.initiatives.tests.factories import InitiativeFactory
class FlutterwavePayoutAccountTestCase(BluebottleTestCase):
def setUp(self):
self.initiative = InitiativeFactory.create(status='approved')
self.funding = FundingFactory.create(initiative=self.initiative)
self.document = PrivateDocumentFactory.create()
self.payout_account = PlainPayoutAccountFactory.create(document=self.document)
self.bank_account = FlutterwaveBankAccountFactory.create(connect_account=self.payout_account)
self.funding.bank_account = self.bank_account
self.funding.save()
BudgetLineFactory.create(activity=self.funding)
def test_approve_bank_account(self):
self.bank_account.states.verify(save=True)
self.bank_account.refresh_from_db()
self.assertEqual(self.bank_account.status, 'verified')
self.payout_account.refresh_from_db()
self.assertEqual(self.payout_account.status, 'verified')
self.funding.refresh_from_db()
self.assertEqual(self.funding.status, 'submitted')
|
<commit_before><commit_msg>Test we can approve Flutterwave bank accounts<commit_after>
|
from bluebottle.files.tests.factories import PrivateDocumentFactory
from bluebottle.funding.tests.factories import FundingFactory, PlainPayoutAccountFactory, \
BudgetLineFactory
from bluebottle.funding_flutterwave.tests.factories import FlutterwaveBankAccountFactory
from bluebottle.test.utils import BluebottleTestCase
from bluebottle.initiatives.tests.factories import InitiativeFactory
class FlutterwavePayoutAccountTestCase(BluebottleTestCase):
def setUp(self):
self.initiative = InitiativeFactory.create(status='approved')
self.funding = FundingFactory.create(initiative=self.initiative)
self.document = PrivateDocumentFactory.create()
self.payout_account = PlainPayoutAccountFactory.create(document=self.document)
self.bank_account = FlutterwaveBankAccountFactory.create(connect_account=self.payout_account)
self.funding.bank_account = self.bank_account
self.funding.save()
BudgetLineFactory.create(activity=self.funding)
def test_approve_bank_account(self):
self.bank_account.states.verify(save=True)
self.bank_account.refresh_from_db()
self.assertEqual(self.bank_account.status, 'verified')
self.payout_account.refresh_from_db()
self.assertEqual(self.payout_account.status, 'verified')
self.funding.refresh_from_db()
self.assertEqual(self.funding.status, 'submitted')
|
Test we can approve Flutterwave bank accountsfrom bluebottle.files.tests.factories import PrivateDocumentFactory
from bluebottle.funding.tests.factories import FundingFactory, PlainPayoutAccountFactory, \
BudgetLineFactory
from bluebottle.funding_flutterwave.tests.factories import FlutterwaveBankAccountFactory
from bluebottle.test.utils import BluebottleTestCase
from bluebottle.initiatives.tests.factories import InitiativeFactory
class FlutterwavePayoutAccountTestCase(BluebottleTestCase):
def setUp(self):
self.initiative = InitiativeFactory.create(status='approved')
self.funding = FundingFactory.create(initiative=self.initiative)
self.document = PrivateDocumentFactory.create()
self.payout_account = PlainPayoutAccountFactory.create(document=self.document)
self.bank_account = FlutterwaveBankAccountFactory.create(connect_account=self.payout_account)
self.funding.bank_account = self.bank_account
self.funding.save()
BudgetLineFactory.create(activity=self.funding)
def test_approve_bank_account(self):
self.bank_account.states.verify(save=True)
self.bank_account.refresh_from_db()
self.assertEqual(self.bank_account.status, 'verified')
self.payout_account.refresh_from_db()
self.assertEqual(self.payout_account.status, 'verified')
self.funding.refresh_from_db()
self.assertEqual(self.funding.status, 'submitted')
|
<commit_before><commit_msg>Test we can approve Flutterwave bank accounts<commit_after>from bluebottle.files.tests.factories import PrivateDocumentFactory
from bluebottle.funding.tests.factories import FundingFactory, PlainPayoutAccountFactory, \
BudgetLineFactory
from bluebottle.funding_flutterwave.tests.factories import FlutterwaveBankAccountFactory
from bluebottle.test.utils import BluebottleTestCase
from bluebottle.initiatives.tests.factories import InitiativeFactory
class FlutterwavePayoutAccountTestCase(BluebottleTestCase):
def setUp(self):
self.initiative = InitiativeFactory.create(status='approved')
self.funding = FundingFactory.create(initiative=self.initiative)
self.document = PrivateDocumentFactory.create()
self.payout_account = PlainPayoutAccountFactory.create(document=self.document)
self.bank_account = FlutterwaveBankAccountFactory.create(connect_account=self.payout_account)
self.funding.bank_account = self.bank_account
self.funding.save()
BudgetLineFactory.create(activity=self.funding)
def test_approve_bank_account(self):
self.bank_account.states.verify(save=True)
self.bank_account.refresh_from_db()
self.assertEqual(self.bank_account.status, 'verified')
self.payout_account.refresh_from_db()
self.assertEqual(self.payout_account.status, 'verified')
self.funding.refresh_from_db()
self.assertEqual(self.funding.status, 'submitted')
|
|
4fe4cad49367b462c2201b98cce4382bff3a0206
|
DataWrangling/CaseStudy/mapparser.py
|
DataWrangling/CaseStudy/mapparser.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Your task is to use the iterative parsing to process the map file and
find out not only what tags are there, but also how many, to get the
feeling on how much of which data you can expect to have in the map.
Fill out the count_tags function. It should return a dictionary with the
tag name as the key and number of times this tag can be encountered in
the map as value.
Note that your code will be tested with a different data file than the 'example.osm'
"""
import xml.etree.cElementTree as ET
import pprint
import os
def count_tags(filename):
# YOUR CODE HERE
tags = {}
for event, elem in ET.iterparse(filename):
if elem.tag in tags:
tags[elem.tag] += 1
else:
tags[elem.tag] = 1
return tags
def test():
os.chdir('./data')
tags = count_tags('example.osm')
pprint.pprint(tags)
assert tags == {'bounds': 1,
'member': 3,
'nd': 4,
'node': 20,
'osm': 1,
'relation': 1,
'tag': 7,
'way': 1}
if __name__ == "__main__":
test()
|
Add a script which use the iterative parsing to process the map file and find out not only what tags are there, but also how many, to get the feeling on how much of which data you can expect to have in the map.
|
feat: Add a script which use the iterative parsing to process the map file and find out not only what tags are there, but also how many, to get the feeling on how much of which data you can expect to have in the map.
|
Python
|
mit
|
aguijarro/DataSciencePython
|
feat: Add a script which use the iterative parsing to process the map file and find out not only what tags are there, but also how many, to get the feeling on how much of which data you can expect to have in the map.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Your task is to use the iterative parsing to process the map file and
find out not only what tags are there, but also how many, to get the
feeling on how much of which data you can expect to have in the map.
Fill out the count_tags function. It should return a dictionary with the
tag name as the key and number of times this tag can be encountered in
the map as value.
Note that your code will be tested with a different data file than the 'example.osm'
"""
import xml.etree.cElementTree as ET
import pprint
import os
def count_tags(filename):
# YOUR CODE HERE
tags = {}
for event, elem in ET.iterparse(filename):
if elem.tag in tags:
tags[elem.tag] += 1
else:
tags[elem.tag] = 1
return tags
def test():
os.chdir('./data')
tags = count_tags('example.osm')
pprint.pprint(tags)
assert tags == {'bounds': 1,
'member': 3,
'nd': 4,
'node': 20,
'osm': 1,
'relation': 1,
'tag': 7,
'way': 1}
if __name__ == "__main__":
test()
|
<commit_before><commit_msg>feat: Add a script which use the iterative parsing to process the map file and find out not only what tags are there, but also how many, to get the feeling on how much of which data you can expect to have in the map.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Your task is to use the iterative parsing to process the map file and
find out not only what tags are there, but also how many, to get the
feeling on how much of which data you can expect to have in the map.
Fill out the count_tags function. It should return a dictionary with the
tag name as the key and number of times this tag can be encountered in
the map as value.
Note that your code will be tested with a different data file than the 'example.osm'
"""
import xml.etree.cElementTree as ET
import pprint
import os
def count_tags(filename):
# YOUR CODE HERE
tags = {}
for event, elem in ET.iterparse(filename):
if elem.tag in tags:
tags[elem.tag] += 1
else:
tags[elem.tag] = 1
return tags
def test():
os.chdir('./data')
tags = count_tags('example.osm')
pprint.pprint(tags)
assert tags == {'bounds': 1,
'member': 3,
'nd': 4,
'node': 20,
'osm': 1,
'relation': 1,
'tag': 7,
'way': 1}
if __name__ == "__main__":
test()
|
feat: Add a script which use the iterative parsing to process the map file and find out not only what tags are there, but also how many, to get the feeling on how much of which data you can expect to have in the map.#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Your task is to use the iterative parsing to process the map file and
find out not only what tags are there, but also how many, to get the
feeling on how much of which data you can expect to have in the map.
Fill out the count_tags function. It should return a dictionary with the
tag name as the key and number of times this tag can be encountered in
the map as value.
Note that your code will be tested with a different data file than the 'example.osm'
"""
import xml.etree.cElementTree as ET
import pprint
import os
def count_tags(filename):
# YOUR CODE HERE
tags = {}
for event, elem in ET.iterparse(filename):
if elem.tag in tags:
tags[elem.tag] += 1
else:
tags[elem.tag] = 1
return tags
def test():
os.chdir('./data')
tags = count_tags('example.osm')
pprint.pprint(tags)
assert tags == {'bounds': 1,
'member': 3,
'nd': 4,
'node': 20,
'osm': 1,
'relation': 1,
'tag': 7,
'way': 1}
if __name__ == "__main__":
test()
|
<commit_before><commit_msg>feat: Add a script which use the iterative parsing to process the map file and find out not only what tags are there, but also how many, to get the feeling on how much of which data you can expect to have in the map.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Your task is to use the iterative parsing to process the map file and
find out not only what tags are there, but also how many, to get the
feeling on how much of which data you can expect to have in the map.
Fill out the count_tags function. It should return a dictionary with the
tag name as the key and number of times this tag can be encountered in
the map as value.
Note that your code will be tested with a different data file than the 'example.osm'
"""
import xml.etree.cElementTree as ET
import pprint
import os
def count_tags(filename):
# YOUR CODE HERE
tags = {}
for event, elem in ET.iterparse(filename):
if elem.tag in tags:
tags[elem.tag] += 1
else:
tags[elem.tag] = 1
return tags
def test():
os.chdir('./data')
tags = count_tags('example.osm')
pprint.pprint(tags)
assert tags == {'bounds': 1,
'member': 3,
'nd': 4,
'node': 20,
'osm': 1,
'relation': 1,
'tag': 7,
'way': 1}
if __name__ == "__main__":
test()
|
|
3d18f6e3ba3519422aa30bd25f3511f62361d5ca
|
tests/chainer_tests/test_chainer_objects.py
|
tests/chainer_tests/test_chainer_objects.py
|
import importlib
import inspect
import pkgutil
import types
import six
import unittest
import chainer
from chainer import testing
def walk_modules():
root = chainer.__path__
for loader, modname, ispkg in pkgutil.walk_packages(root, 'chainer.'):
# Skip modules generated by protobuf.
if '_pb2' in modname:
continue
try:
mod = importlib.import_module(modname)
except ImportError:
continue
yield mod
def get_classes(module):
# Enumerate classes from a module
for name, o in module.__dict__.items():
if (inspect.isclass(o)
and o.__module__.startswith('chainer.')):
yield o
def get_functions(module):
# Enumerate functions from a module
# Normal functions
for k, o in module.__dict__.items():
if (isinstance(o, types.FunctionType)
and o.__module__.startswith('chainer.')):
yield o
# Methods defined in a class
for cls in get_classes(module):
if cls.__module__.startswith('chainer.'):
for k, o in cls.__dict__.items():
if inspect.isfunction(o):
yield o
def get_default_arguments(func):
# Retrieves the defaults arguments (names and values) of a function.
if six.PY2:
# Python 2
spec = inspect.getargspec(func)
if spec.defaults is not None:
n = len(spec.defaults)
for name, default_value in zip(spec.args[-n:], spec.defaults):
yield name, default_value
else:
# Python 3
signature = inspect.signature(func)
for name, param in signature.parameters.items():
if param.default is not inspect.Parameter.empty:
yield name, param.default
class TestFunctions(unittest.TestCase):
def test_no_mutable_default_args(self):
type_blacklist = (list, dict)
badlist = []
# Collect mutable default arguments
for mod in walk_modules():
for func in get_functions(mod):
for arg_name, value in get_default_arguments(func):
if isinstance(value, type_blacklist):
badlist.append((func, arg_name, type(value)))
if len(badlist) > 0:
# Report the error
s = six.StringIO()
s.write(
'Some functions have mutable values as default values:\n\n')
for func, arg_name, value_type in badlist:
s.write('{}.{}: arg=\'{}\' type={}\n'.format(
func.__module__, func.__name__, arg_name, value_type))
assert False, s.getvalue()
testing.run_module(__name__, __file__)
|
Add test to ensure no mutable default arguments
|
Add test to ensure no mutable default arguments
|
Python
|
mit
|
wkentaro/chainer,niboshi/chainer,chainer/chainer,niboshi/chainer,wkentaro/chainer,pfnet/chainer,niboshi/chainer,hvy/chainer,wkentaro/chainer,chainer/chainer,okuta/chainer,wkentaro/chainer,okuta/chainer,okuta/chainer,chainer/chainer,chainer/chainer,hvy/chainer,niboshi/chainer,hvy/chainer,okuta/chainer,hvy/chainer
|
Add test to ensure no mutable default arguments
|
import importlib
import inspect
import pkgutil
import types
import six
import unittest
import chainer
from chainer import testing
def walk_modules():
root = chainer.__path__
for loader, modname, ispkg in pkgutil.walk_packages(root, 'chainer.'):
# Skip modules generated by protobuf.
if '_pb2' in modname:
continue
try:
mod = importlib.import_module(modname)
except ImportError:
continue
yield mod
def get_classes(module):
# Enumerate classes from a module
for name, o in module.__dict__.items():
if (inspect.isclass(o)
and o.__module__.startswith('chainer.')):
yield o
def get_functions(module):
# Enumerate functions from a module
# Normal functions
for k, o in module.__dict__.items():
if (isinstance(o, types.FunctionType)
and o.__module__.startswith('chainer.')):
yield o
# Methods defined in a class
for cls in get_classes(module):
if cls.__module__.startswith('chainer.'):
for k, o in cls.__dict__.items():
if inspect.isfunction(o):
yield o
def get_default_arguments(func):
# Retrieves the defaults arguments (names and values) of a function.
if six.PY2:
# Python 2
spec = inspect.getargspec(func)
if spec.defaults is not None:
n = len(spec.defaults)
for name, default_value in zip(spec.args[-n:], spec.defaults):
yield name, default_value
else:
# Python 3
signature = inspect.signature(func)
for name, param in signature.parameters.items():
if param.default is not inspect.Parameter.empty:
yield name, param.default
class TestFunctions(unittest.TestCase):
def test_no_mutable_default_args(self):
type_blacklist = (list, dict)
badlist = []
# Collect mutable default arguments
for mod in walk_modules():
for func in get_functions(mod):
for arg_name, value in get_default_arguments(func):
if isinstance(value, type_blacklist):
badlist.append((func, arg_name, type(value)))
if len(badlist) > 0:
# Report the error
s = six.StringIO()
s.write(
'Some functions have mutable values as default values:\n\n')
for func, arg_name, value_type in badlist:
s.write('{}.{}: arg=\'{}\' type={}\n'.format(
func.__module__, func.__name__, arg_name, value_type))
assert False, s.getvalue()
testing.run_module(__name__, __file__)
|
<commit_before><commit_msg>Add test to ensure no mutable default arguments<commit_after>
|
import importlib
import inspect
import pkgutil
import types
import six
import unittest
import chainer
from chainer import testing
def walk_modules():
root = chainer.__path__
for loader, modname, ispkg in pkgutil.walk_packages(root, 'chainer.'):
# Skip modules generated by protobuf.
if '_pb2' in modname:
continue
try:
mod = importlib.import_module(modname)
except ImportError:
continue
yield mod
def get_classes(module):
# Enumerate classes from a module
for name, o in module.__dict__.items():
if (inspect.isclass(o)
and o.__module__.startswith('chainer.')):
yield o
def get_functions(module):
# Enumerate functions from a module
# Normal functions
for k, o in module.__dict__.items():
if (isinstance(o, types.FunctionType)
and o.__module__.startswith('chainer.')):
yield o
# Methods defined in a class
for cls in get_classes(module):
if cls.__module__.startswith('chainer.'):
for k, o in cls.__dict__.items():
if inspect.isfunction(o):
yield o
def get_default_arguments(func):
# Retrieves the defaults arguments (names and values) of a function.
if six.PY2:
# Python 2
spec = inspect.getargspec(func)
if spec.defaults is not None:
n = len(spec.defaults)
for name, default_value in zip(spec.args[-n:], spec.defaults):
yield name, default_value
else:
# Python 3
signature = inspect.signature(func)
for name, param in signature.parameters.items():
if param.default is not inspect.Parameter.empty:
yield name, param.default
class TestFunctions(unittest.TestCase):
def test_no_mutable_default_args(self):
type_blacklist = (list, dict)
badlist = []
# Collect mutable default arguments
for mod in walk_modules():
for func in get_functions(mod):
for arg_name, value in get_default_arguments(func):
if isinstance(value, type_blacklist):
badlist.append((func, arg_name, type(value)))
if len(badlist) > 0:
# Report the error
s = six.StringIO()
s.write(
'Some functions have mutable values as default values:\n\n')
for func, arg_name, value_type in badlist:
s.write('{}.{}: arg=\'{}\' type={}\n'.format(
func.__module__, func.__name__, arg_name, value_type))
assert False, s.getvalue()
testing.run_module(__name__, __file__)
|
Add test to ensure no mutable default argumentsimport importlib
import inspect
import pkgutil
import types
import six
import unittest
import chainer
from chainer import testing
def walk_modules():
root = chainer.__path__
for loader, modname, ispkg in pkgutil.walk_packages(root, 'chainer.'):
# Skip modules generated by protobuf.
if '_pb2' in modname:
continue
try:
mod = importlib.import_module(modname)
except ImportError:
continue
yield mod
def get_classes(module):
# Enumerate classes from a module
for name, o in module.__dict__.items():
if (inspect.isclass(o)
and o.__module__.startswith('chainer.')):
yield o
def get_functions(module):
# Enumerate functions from a module
# Normal functions
for k, o in module.__dict__.items():
if (isinstance(o, types.FunctionType)
and o.__module__.startswith('chainer.')):
yield o
# Methods defined in a class
for cls in get_classes(module):
if cls.__module__.startswith('chainer.'):
for k, o in cls.__dict__.items():
if inspect.isfunction(o):
yield o
def get_default_arguments(func):
# Retrieves the defaults arguments (names and values) of a function.
if six.PY2:
# Python 2
spec = inspect.getargspec(func)
if spec.defaults is not None:
n = len(spec.defaults)
for name, default_value in zip(spec.args[-n:], spec.defaults):
yield name, default_value
else:
# Python 3
signature = inspect.signature(func)
for name, param in signature.parameters.items():
if param.default is not inspect.Parameter.empty:
yield name, param.default
class TestFunctions(unittest.TestCase):
def test_no_mutable_default_args(self):
type_blacklist = (list, dict)
badlist = []
# Collect mutable default arguments
for mod in walk_modules():
for func in get_functions(mod):
for arg_name, value in get_default_arguments(func):
if isinstance(value, type_blacklist):
badlist.append((func, arg_name, type(value)))
if len(badlist) > 0:
# Report the error
s = six.StringIO()
s.write(
'Some functions have mutable values as default values:\n\n')
for func, arg_name, value_type in badlist:
s.write('{}.{}: arg=\'{}\' type={}\n'.format(
func.__module__, func.__name__, arg_name, value_type))
assert False, s.getvalue()
testing.run_module(__name__, __file__)
|
<commit_before><commit_msg>Add test to ensure no mutable default arguments<commit_after>import importlib
import inspect
import pkgutil
import types
import six
import unittest
import chainer
from chainer import testing
def walk_modules():
root = chainer.__path__
for loader, modname, ispkg in pkgutil.walk_packages(root, 'chainer.'):
# Skip modules generated by protobuf.
if '_pb2' in modname:
continue
try:
mod = importlib.import_module(modname)
except ImportError:
continue
yield mod
def get_classes(module):
# Enumerate classes from a module
for name, o in module.__dict__.items():
if (inspect.isclass(o)
and o.__module__.startswith('chainer.')):
yield o
def get_functions(module):
# Enumerate functions from a module
# Normal functions
for k, o in module.__dict__.items():
if (isinstance(o, types.FunctionType)
and o.__module__.startswith('chainer.')):
yield o
# Methods defined in a class
for cls in get_classes(module):
if cls.__module__.startswith('chainer.'):
for k, o in cls.__dict__.items():
if inspect.isfunction(o):
yield o
def get_default_arguments(func):
# Retrieves the defaults arguments (names and values) of a function.
if six.PY2:
# Python 2
spec = inspect.getargspec(func)
if spec.defaults is not None:
n = len(spec.defaults)
for name, default_value in zip(spec.args[-n:], spec.defaults):
yield name, default_value
else:
# Python 3
signature = inspect.signature(func)
for name, param in signature.parameters.items():
if param.default is not inspect.Parameter.empty:
yield name, param.default
class TestFunctions(unittest.TestCase):
def test_no_mutable_default_args(self):
type_blacklist = (list, dict)
badlist = []
# Collect mutable default arguments
for mod in walk_modules():
for func in get_functions(mod):
for arg_name, value in get_default_arguments(func):
if isinstance(value, type_blacklist):
badlist.append((func, arg_name, type(value)))
if len(badlist) > 0:
# Report the error
s = six.StringIO()
s.write(
'Some functions have mutable values as default values:\n\n')
for func, arg_name, value_type in badlist:
s.write('{}.{}: arg=\'{}\' type={}\n'.format(
func.__module__, func.__name__, arg_name, value_type))
assert False, s.getvalue()
testing.run_module(__name__, __file__)
|
|
fcb07c7cd94f96cd533c55d18a657673f9eeac7f
|
SpicyTwitch/Log_tools.py
|
SpicyTwitch/Log_tools.py
|
# Imports-----------------------------------------------------------------------
import logging
import os
from inspect import stack, getmodulename
from . import Storage
# Base setup--------------------------------------------------------------------
log_to_stdout = True
log_to_file = True
logging_level = logging.DEBUG # TODO: Change this back to INFO!
log_format = '[%(asctime)s] [%(levelname)s] [%(module)s] (%(funcName)s): ' \
'%(message)s'
date_format = '%Y/%m/%d %I:%M:%S %p'
log_formatter = logging.Formatter(log_format, datefmt=date_format)
console_handler = logging.StreamHandler()
console_handler.setFormatter(log_formatter)
log_storage = os.path.join(Storage.primary_storage_directory, 'logs')
if not os.path.exists(log_storage):
os.mkdir(log_storage)
# Functions---------------------------------------------------------------------
def get_module_name() -> str:
return getmodulename(stack()[2][1])
def create_logger() -> logging.Logger:
python_module = get_module_name()
module_logger = logging.getLogger(python_module)
if log_to_stdout:
module_logger.addHandler(console_handler)
if log_to_file:
file_path = os.path.join(log_storage, python_module + '.log')
file_handler = logging.FileHandler(file_path)
file_handler.setFormatter(log_formatter)
module_logger.addHandler(file_handler)
module_logger.setLevel(logging_level)
return module_logger
|
Move log related functions over to this file
|
Move log related functions over to this file
Meant for global use in SpicyTwitch
|
Python
|
mit
|
NekoGamiYuki/SpicyTwitch
|
Move log related functions over to this file
Meant for global use in SpicyTwitch
|
# Imports-----------------------------------------------------------------------
import logging
import os
from inspect import stack, getmodulename
from . import Storage
# Base setup--------------------------------------------------------------------
log_to_stdout = True
log_to_file = True
logging_level = logging.DEBUG # TODO: Change this back to INFO!
log_format = '[%(asctime)s] [%(levelname)s] [%(module)s] (%(funcName)s): ' \
'%(message)s'
date_format = '%Y/%m/%d %I:%M:%S %p'
log_formatter = logging.Formatter(log_format, datefmt=date_format)
console_handler = logging.StreamHandler()
console_handler.setFormatter(log_formatter)
log_storage = os.path.join(Storage.primary_storage_directory, 'logs')
if not os.path.exists(log_storage):
os.mkdir(log_storage)
# Functions---------------------------------------------------------------------
def get_module_name() -> str:
return getmodulename(stack()[2][1])
def create_logger() -> logging.Logger:
python_module = get_module_name()
module_logger = logging.getLogger(python_module)
if log_to_stdout:
module_logger.addHandler(console_handler)
if log_to_file:
file_path = os.path.join(log_storage, python_module + '.log')
file_handler = logging.FileHandler(file_path)
file_handler.setFormatter(log_formatter)
module_logger.addHandler(file_handler)
module_logger.setLevel(logging_level)
return module_logger
|
<commit_before><commit_msg>Move log related functions over to this file
Meant for global use in SpicyTwitch<commit_after>
|
# Imports-----------------------------------------------------------------------
import logging
import os
from inspect import stack, getmodulename
from . import Storage
# Base setup--------------------------------------------------------------------
log_to_stdout = True
log_to_file = True
logging_level = logging.DEBUG # TODO: Change this back to INFO!
log_format = '[%(asctime)s] [%(levelname)s] [%(module)s] (%(funcName)s): ' \
'%(message)s'
date_format = '%Y/%m/%d %I:%M:%S %p'
log_formatter = logging.Formatter(log_format, datefmt=date_format)
console_handler = logging.StreamHandler()
console_handler.setFormatter(log_formatter)
log_storage = os.path.join(Storage.primary_storage_directory, 'logs')
if not os.path.exists(log_storage):
os.mkdir(log_storage)
# Functions---------------------------------------------------------------------
def get_module_name() -> str:
return getmodulename(stack()[2][1])
def create_logger() -> logging.Logger:
python_module = get_module_name()
module_logger = logging.getLogger(python_module)
if log_to_stdout:
module_logger.addHandler(console_handler)
if log_to_file:
file_path = os.path.join(log_storage, python_module + '.log')
file_handler = logging.FileHandler(file_path)
file_handler.setFormatter(log_formatter)
module_logger.addHandler(file_handler)
module_logger.setLevel(logging_level)
return module_logger
|
Move log related functions over to this file
Meant for global use in SpicyTwitch# Imports-----------------------------------------------------------------------
import logging
import os
from inspect import stack, getmodulename
from . import Storage
# Base setup--------------------------------------------------------------------
log_to_stdout = True
log_to_file = True
logging_level = logging.DEBUG # TODO: Change this back to INFO!
log_format = '[%(asctime)s] [%(levelname)s] [%(module)s] (%(funcName)s): ' \
'%(message)s'
date_format = '%Y/%m/%d %I:%M:%S %p'
log_formatter = logging.Formatter(log_format, datefmt=date_format)
console_handler = logging.StreamHandler()
console_handler.setFormatter(log_formatter)
log_storage = os.path.join(Storage.primary_storage_directory, 'logs')
if not os.path.exists(log_storage):
os.mkdir(log_storage)
# Functions---------------------------------------------------------------------
def get_module_name() -> str:
return getmodulename(stack()[2][1])
def create_logger() -> logging.Logger:
python_module = get_module_name()
module_logger = logging.getLogger(python_module)
if log_to_stdout:
module_logger.addHandler(console_handler)
if log_to_file:
file_path = os.path.join(log_storage, python_module + '.log')
file_handler = logging.FileHandler(file_path)
file_handler.setFormatter(log_formatter)
module_logger.addHandler(file_handler)
module_logger.setLevel(logging_level)
return module_logger
|
<commit_before><commit_msg>Move log related functions over to this file
Meant for global use in SpicyTwitch<commit_after># Imports-----------------------------------------------------------------------
import logging
import os
from inspect import stack, getmodulename
from . import Storage
# Base setup--------------------------------------------------------------------
log_to_stdout = True
log_to_file = True
logging_level = logging.DEBUG # TODO: Change this back to INFO!
log_format = '[%(asctime)s] [%(levelname)s] [%(module)s] (%(funcName)s): ' \
'%(message)s'
date_format = '%Y/%m/%d %I:%M:%S %p'
log_formatter = logging.Formatter(log_format, datefmt=date_format)
console_handler = logging.StreamHandler()
console_handler.setFormatter(log_formatter)
log_storage = os.path.join(Storage.primary_storage_directory, 'logs')
if not os.path.exists(log_storage):
os.mkdir(log_storage)
# Functions---------------------------------------------------------------------
def get_module_name() -> str:
return getmodulename(stack()[2][1])
def create_logger() -> logging.Logger:
python_module = get_module_name()
module_logger = logging.getLogger(python_module)
if log_to_stdout:
module_logger.addHandler(console_handler)
if log_to_file:
file_path = os.path.join(log_storage, python_module + '.log')
file_handler = logging.FileHandler(file_path)
file_handler.setFormatter(log_formatter)
module_logger.addHandler(file_handler)
module_logger.setLevel(logging_level)
return module_logger
|
|
4061e5db7097a680405282e371ab3bf07758648a
|
projects/DensePose/tests/test_setup.py
|
projects/DensePose/tests/test_setup.py
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
import os
import unittest
from detectron2.config import get_cfg
from detectron2.engine import default_setup
from densepose import add_densepose_config
_CONFIG_DIR = "configs"
_QUICK_SCHEDULES_CONFIG_SUB_DIR = "quick_schedules"
_CONFIG_FILE_PREFIX = "densepose_"
_CONFIG_FILE_EXT = ".yaml"
def _get_config_dir():
return os.path.join(os.path.dirname(os.path.realpath(__file__)), "..", _CONFIG_DIR)
def _collect_config_files(config_dir):
paths = []
for entry in os.listdir(config_dir):
_, ext = os.path.splitext(entry)
if ext != _CONFIG_FILE_EXT:
continue
if not entry.startswith(_CONFIG_FILE_PREFIX):
continue
path = os.path.join(config_dir, entry)
paths.append(path)
return paths
def _get_config_files():
config_dir = _get_config_dir()
return _collect_config_files(config_dir)
def _get_quick_schedules_config_files():
config_dir = _get_config_dir()
config_dir = os.path.join(config_dir, _QUICK_SCHEDULES_CONFIG_SUB_DIR)
return _collect_config_files(config_dir)
class TestSetup(unittest.TestCase):
def _test_setup(self, config_file):
cfg = get_cfg()
add_densepose_config(cfg)
cfg.merge_from_file(config_file)
cfg.freeze()
default_setup(cfg, {})
def test_setup_configs(self):
config_files = _get_config_files()
for config_file in config_files:
self._test_setup(config_file)
def test_setup_quick_schedules_configs(self):
config_files = _get_quick_schedules_config_files()
for config_file in config_files:
self._test_setup(config_file)
|
Add simple unit tests to validate all configs
|
Add simple unit tests to validate all configs
Summary: Add simple unit tests to validate all configs: as demonstrated by the previous diff, this can not hurt :)
Reviewed By: vkhalidov
Differential Revision: D20491383
fbshipit-source-id: 1c7b82dfbf9cde43d38ece64a5fb1692d1c03a9b
|
Python
|
apache-2.0
|
facebookresearch/detectron2,facebookresearch/detectron2,facebookresearch/detectron2
|
Add simple unit tests to validate all configs
Summary: Add simple unit tests to validate all configs: as demonstrated by the previous diff, this can not hurt :)
Reviewed By: vkhalidov
Differential Revision: D20491383
fbshipit-source-id: 1c7b82dfbf9cde43d38ece64a5fb1692d1c03a9b
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
import os
import unittest
from detectron2.config import get_cfg
from detectron2.engine import default_setup
from densepose import add_densepose_config
_CONFIG_DIR = "configs"
_QUICK_SCHEDULES_CONFIG_SUB_DIR = "quick_schedules"
_CONFIG_FILE_PREFIX = "densepose_"
_CONFIG_FILE_EXT = ".yaml"
def _get_config_dir():
return os.path.join(os.path.dirname(os.path.realpath(__file__)), "..", _CONFIG_DIR)
def _collect_config_files(config_dir):
paths = []
for entry in os.listdir(config_dir):
_, ext = os.path.splitext(entry)
if ext != _CONFIG_FILE_EXT:
continue
if not entry.startswith(_CONFIG_FILE_PREFIX):
continue
path = os.path.join(config_dir, entry)
paths.append(path)
return paths
def _get_config_files():
config_dir = _get_config_dir()
return _collect_config_files(config_dir)
def _get_quick_schedules_config_files():
config_dir = _get_config_dir()
config_dir = os.path.join(config_dir, _QUICK_SCHEDULES_CONFIG_SUB_DIR)
return _collect_config_files(config_dir)
class TestSetup(unittest.TestCase):
def _test_setup(self, config_file):
cfg = get_cfg()
add_densepose_config(cfg)
cfg.merge_from_file(config_file)
cfg.freeze()
default_setup(cfg, {})
def test_setup_configs(self):
config_files = _get_config_files()
for config_file in config_files:
self._test_setup(config_file)
def test_setup_quick_schedules_configs(self):
config_files = _get_quick_schedules_config_files()
for config_file in config_files:
self._test_setup(config_file)
|
<commit_before><commit_msg>Add simple unit tests to validate all configs
Summary: Add simple unit tests to validate all configs: as demonstrated by the previous diff, this can not hurt :)
Reviewed By: vkhalidov
Differential Revision: D20491383
fbshipit-source-id: 1c7b82dfbf9cde43d38ece64a5fb1692d1c03a9b<commit_after>
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
import os
import unittest
from detectron2.config import get_cfg
from detectron2.engine import default_setup
from densepose import add_densepose_config
_CONFIG_DIR = "configs"
_QUICK_SCHEDULES_CONFIG_SUB_DIR = "quick_schedules"
_CONFIG_FILE_PREFIX = "densepose_"
_CONFIG_FILE_EXT = ".yaml"
def _get_config_dir():
return os.path.join(os.path.dirname(os.path.realpath(__file__)), "..", _CONFIG_DIR)
def _collect_config_files(config_dir):
paths = []
for entry in os.listdir(config_dir):
_, ext = os.path.splitext(entry)
if ext != _CONFIG_FILE_EXT:
continue
if not entry.startswith(_CONFIG_FILE_PREFIX):
continue
path = os.path.join(config_dir, entry)
paths.append(path)
return paths
def _get_config_files():
config_dir = _get_config_dir()
return _collect_config_files(config_dir)
def _get_quick_schedules_config_files():
config_dir = _get_config_dir()
config_dir = os.path.join(config_dir, _QUICK_SCHEDULES_CONFIG_SUB_DIR)
return _collect_config_files(config_dir)
class TestSetup(unittest.TestCase):
def _test_setup(self, config_file):
cfg = get_cfg()
add_densepose_config(cfg)
cfg.merge_from_file(config_file)
cfg.freeze()
default_setup(cfg, {})
def test_setup_configs(self):
config_files = _get_config_files()
for config_file in config_files:
self._test_setup(config_file)
def test_setup_quick_schedules_configs(self):
config_files = _get_quick_schedules_config_files()
for config_file in config_files:
self._test_setup(config_file)
|
Add simple unit tests to validate all configs
Summary: Add simple unit tests to validate all configs: as demonstrated by the previous diff, this can not hurt :)
Reviewed By: vkhalidov
Differential Revision: D20491383
fbshipit-source-id: 1c7b82dfbf9cde43d38ece64a5fb1692d1c03a9b# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
import os
import unittest
from detectron2.config import get_cfg
from detectron2.engine import default_setup
from densepose import add_densepose_config
_CONFIG_DIR = "configs"
_QUICK_SCHEDULES_CONFIG_SUB_DIR = "quick_schedules"
_CONFIG_FILE_PREFIX = "densepose_"
_CONFIG_FILE_EXT = ".yaml"
def _get_config_dir():
return os.path.join(os.path.dirname(os.path.realpath(__file__)), "..", _CONFIG_DIR)
def _collect_config_files(config_dir):
paths = []
for entry in os.listdir(config_dir):
_, ext = os.path.splitext(entry)
if ext != _CONFIG_FILE_EXT:
continue
if not entry.startswith(_CONFIG_FILE_PREFIX):
continue
path = os.path.join(config_dir, entry)
paths.append(path)
return paths
def _get_config_files():
config_dir = _get_config_dir()
return _collect_config_files(config_dir)
def _get_quick_schedules_config_files():
config_dir = _get_config_dir()
config_dir = os.path.join(config_dir, _QUICK_SCHEDULES_CONFIG_SUB_DIR)
return _collect_config_files(config_dir)
class TestSetup(unittest.TestCase):
def _test_setup(self, config_file):
cfg = get_cfg()
add_densepose_config(cfg)
cfg.merge_from_file(config_file)
cfg.freeze()
default_setup(cfg, {})
def test_setup_configs(self):
config_files = _get_config_files()
for config_file in config_files:
self._test_setup(config_file)
def test_setup_quick_schedules_configs(self):
config_files = _get_quick_schedules_config_files()
for config_file in config_files:
self._test_setup(config_file)
|
<commit_before><commit_msg>Add simple unit tests to validate all configs
Summary: Add simple unit tests to validate all configs: as demonstrated by the previous diff, this can not hurt :)
Reviewed By: vkhalidov
Differential Revision: D20491383
fbshipit-source-id: 1c7b82dfbf9cde43d38ece64a5fb1692d1c03a9b<commit_after># Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
import os
import unittest
from detectron2.config import get_cfg
from detectron2.engine import default_setup
from densepose import add_densepose_config
_CONFIG_DIR = "configs"
_QUICK_SCHEDULES_CONFIG_SUB_DIR = "quick_schedules"
_CONFIG_FILE_PREFIX = "densepose_"
_CONFIG_FILE_EXT = ".yaml"
def _get_config_dir():
return os.path.join(os.path.dirname(os.path.realpath(__file__)), "..", _CONFIG_DIR)
def _collect_config_files(config_dir):
paths = []
for entry in os.listdir(config_dir):
_, ext = os.path.splitext(entry)
if ext != _CONFIG_FILE_EXT:
continue
if not entry.startswith(_CONFIG_FILE_PREFIX):
continue
path = os.path.join(config_dir, entry)
paths.append(path)
return paths
def _get_config_files():
config_dir = _get_config_dir()
return _collect_config_files(config_dir)
def _get_quick_schedules_config_files():
config_dir = _get_config_dir()
config_dir = os.path.join(config_dir, _QUICK_SCHEDULES_CONFIG_SUB_DIR)
return _collect_config_files(config_dir)
class TestSetup(unittest.TestCase):
def _test_setup(self, config_file):
cfg = get_cfg()
add_densepose_config(cfg)
cfg.merge_from_file(config_file)
cfg.freeze()
default_setup(cfg, {})
def test_setup_configs(self):
config_files = _get_config_files()
for config_file in config_files:
self._test_setup(config_file)
def test_setup_quick_schedules_configs(self):
config_files = _get_quick_schedules_config_files()
for config_file in config_files:
self._test_setup(config_file)
|
|
e07c699caf699852c98b3396150b343553a386c4
|
server/tests/api/test_language_api.py
|
server/tests/api/test_language_api.py
|
import json
from server.tests.helpers import FlaskTestCase, fixtures
class TestLanguageAPI(FlaskTestCase):
@fixtures('base.json')
def test_get_empty_languages(self):
"""Test GET /api/languages endpoint with no data"""
response, data = self.api_request('get', '/api/languages')
assert data['num_results'] is 0
assert response.status_code == 200
@fixtures('single_language.json')
def test_get_one_language(self):
"""Test GET /api/languages endpoint with a single language"""
response, data = self.api_request('get', '/api/languages')
assert data['num_results'] is 1
assert response.status_code == 200
@fixtures('many_languages.json')
def test_get_multiple_languages(self):
"""Test GET /api/languages endpoint with multple languages"""
response, data = self.api_request('get', '/api/languages')
assert data['num_results'] > 0
assert response.status_code == 200
@fixtures('many_languages.json')
def test_get_no_language_by_id(self):
"""Test GET /api/languages/(int:id) for missing language"""
response, data = self.api_request('get', '/api/languages/1000')
assert response.status_code == 404
@fixtures('many_languages.json')
def test_language_by_id(self):
"""Test GET /api/languages(int:id) for existing language"""
response, data = self.api_request('get', '/api/languages/1')
assert data['language'] == 'Python'
assert response.status_code == 200
@fixtures('single_user.json')
def test_post_language(self):
"""Tests POST to /api/languages for an authorized user"""
self.login()
data = {
'language': 'some_value'
}
response = self.app.post(
'/api/languages',
data=json.dumps(data)
)
assert response.status_code == 201
@fixtures('base.json')
def test_post_language_unauthorized(self):
"""Tests POST to /api/languages for an unauthorized user"""
data = {
'language': 'some_value'
}
response = self.app.post(
'/api/languages',
data=json.dumps(data)
)
assert response.status_code == 401
|
Add tests for language api
|
Add tests for language api
|
Python
|
mit
|
ganemone/ontheside,ganemone/ontheside,ganemone/ontheside
|
Add tests for language api
|
import json
from server.tests.helpers import FlaskTestCase, fixtures
class TestLanguageAPI(FlaskTestCase):
@fixtures('base.json')
def test_get_empty_languages(self):
"""Test GET /api/languages endpoint with no data"""
response, data = self.api_request('get', '/api/languages')
assert data['num_results'] is 0
assert response.status_code == 200
@fixtures('single_language.json')
def test_get_one_language(self):
"""Test GET /api/languages endpoint with a single language"""
response, data = self.api_request('get', '/api/languages')
assert data['num_results'] is 1
assert response.status_code == 200
@fixtures('many_languages.json')
def test_get_multiple_languages(self):
"""Test GET /api/languages endpoint with multple languages"""
response, data = self.api_request('get', '/api/languages')
assert data['num_results'] > 0
assert response.status_code == 200
@fixtures('many_languages.json')
def test_get_no_language_by_id(self):
"""Test GET /api/languages/(int:id) for missing language"""
response, data = self.api_request('get', '/api/languages/1000')
assert response.status_code == 404
@fixtures('many_languages.json')
def test_language_by_id(self):
"""Test GET /api/languages(int:id) for existing language"""
response, data = self.api_request('get', '/api/languages/1')
assert data['language'] == 'Python'
assert response.status_code == 200
@fixtures('single_user.json')
def test_post_language(self):
"""Tests POST to /api/languages for an authorized user"""
self.login()
data = {
'language': 'some_value'
}
response = self.app.post(
'/api/languages',
data=json.dumps(data)
)
assert response.status_code == 201
@fixtures('base.json')
def test_post_language_unauthorized(self):
"""Tests POST to /api/languages for an unauthorized user"""
data = {
'language': 'some_value'
}
response = self.app.post(
'/api/languages',
data=json.dumps(data)
)
assert response.status_code == 401
|
<commit_before><commit_msg>Add tests for language api<commit_after>
|
import json
from server.tests.helpers import FlaskTestCase, fixtures
class TestLanguageAPI(FlaskTestCase):
@fixtures('base.json')
def test_get_empty_languages(self):
"""Test GET /api/languages endpoint with no data"""
response, data = self.api_request('get', '/api/languages')
assert data['num_results'] is 0
assert response.status_code == 200
@fixtures('single_language.json')
def test_get_one_language(self):
"""Test GET /api/languages endpoint with a single language"""
response, data = self.api_request('get', '/api/languages')
assert data['num_results'] is 1
assert response.status_code == 200
@fixtures('many_languages.json')
def test_get_multiple_languages(self):
"""Test GET /api/languages endpoint with multple languages"""
response, data = self.api_request('get', '/api/languages')
assert data['num_results'] > 0
assert response.status_code == 200
@fixtures('many_languages.json')
def test_get_no_language_by_id(self):
"""Test GET /api/languages/(int:id) for missing language"""
response, data = self.api_request('get', '/api/languages/1000')
assert response.status_code == 404
@fixtures('many_languages.json')
def test_language_by_id(self):
"""Test GET /api/languages(int:id) for existing language"""
response, data = self.api_request('get', '/api/languages/1')
assert data['language'] == 'Python'
assert response.status_code == 200
@fixtures('single_user.json')
def test_post_language(self):
"""Tests POST to /api/languages for an authorized user"""
self.login()
data = {
'language': 'some_value'
}
response = self.app.post(
'/api/languages',
data=json.dumps(data)
)
assert response.status_code == 201
@fixtures('base.json')
def test_post_language_unauthorized(self):
"""Tests POST to /api/languages for an unauthorized user"""
data = {
'language': 'some_value'
}
response = self.app.post(
'/api/languages',
data=json.dumps(data)
)
assert response.status_code == 401
|
Add tests for language apiimport json
from server.tests.helpers import FlaskTestCase, fixtures
class TestLanguageAPI(FlaskTestCase):
@fixtures('base.json')
def test_get_empty_languages(self):
"""Test GET /api/languages endpoint with no data"""
response, data = self.api_request('get', '/api/languages')
assert data['num_results'] is 0
assert response.status_code == 200
@fixtures('single_language.json')
def test_get_one_language(self):
"""Test GET /api/languages endpoint with a single language"""
response, data = self.api_request('get', '/api/languages')
assert data['num_results'] is 1
assert response.status_code == 200
@fixtures('many_languages.json')
def test_get_multiple_languages(self):
"""Test GET /api/languages endpoint with multple languages"""
response, data = self.api_request('get', '/api/languages')
assert data['num_results'] > 0
assert response.status_code == 200
@fixtures('many_languages.json')
def test_get_no_language_by_id(self):
"""Test GET /api/languages/(int:id) for missing language"""
response, data = self.api_request('get', '/api/languages/1000')
assert response.status_code == 404
@fixtures('many_languages.json')
def test_language_by_id(self):
"""Test GET /api/languages(int:id) for existing language"""
response, data = self.api_request('get', '/api/languages/1')
assert data['language'] == 'Python'
assert response.status_code == 200
@fixtures('single_user.json')
def test_post_language(self):
"""Tests POST to /api/languages for an authorized user"""
self.login()
data = {
'language': 'some_value'
}
response = self.app.post(
'/api/languages',
data=json.dumps(data)
)
assert response.status_code == 201
@fixtures('base.json')
def test_post_language_unauthorized(self):
"""Tests POST to /api/languages for an unauthorized user"""
data = {
'language': 'some_value'
}
response = self.app.post(
'/api/languages',
data=json.dumps(data)
)
assert response.status_code == 401
|
<commit_before><commit_msg>Add tests for language api<commit_after>import json
from server.tests.helpers import FlaskTestCase, fixtures
class TestLanguageAPI(FlaskTestCase):
@fixtures('base.json')
def test_get_empty_languages(self):
"""Test GET /api/languages endpoint with no data"""
response, data = self.api_request('get', '/api/languages')
assert data['num_results'] is 0
assert response.status_code == 200
@fixtures('single_language.json')
def test_get_one_language(self):
"""Test GET /api/languages endpoint with a single language"""
response, data = self.api_request('get', '/api/languages')
assert data['num_results'] is 1
assert response.status_code == 200
@fixtures('many_languages.json')
def test_get_multiple_languages(self):
"""Test GET /api/languages endpoint with multple languages"""
response, data = self.api_request('get', '/api/languages')
assert data['num_results'] > 0
assert response.status_code == 200
@fixtures('many_languages.json')
def test_get_no_language_by_id(self):
"""Test GET /api/languages/(int:id) for missing language"""
response, data = self.api_request('get', '/api/languages/1000')
assert response.status_code == 404
@fixtures('many_languages.json')
def test_language_by_id(self):
"""Test GET /api/languages(int:id) for existing language"""
response, data = self.api_request('get', '/api/languages/1')
assert data['language'] == 'Python'
assert response.status_code == 200
@fixtures('single_user.json')
def test_post_language(self):
"""Tests POST to /api/languages for an authorized user"""
self.login()
data = {
'language': 'some_value'
}
response = self.app.post(
'/api/languages',
data=json.dumps(data)
)
assert response.status_code == 201
@fixtures('base.json')
def test_post_language_unauthorized(self):
"""Tests POST to /api/languages for an unauthorized user"""
data = {
'language': 'some_value'
}
response = self.app.post(
'/api/languages',
data=json.dumps(data)
)
assert response.status_code == 401
|
|
dcca93fbb66e5cd8bf0e0500aca3f187922e8806
|
scrapy_espn/scrapy_espn/spiders/team_spider.py
|
scrapy_espn/scrapy_espn/spiders/team_spider.py
|
import scrapy
class TeamSpider(scrapy.Spider):
name = "team"
start_urls = [
'http://www.espn.com/mens-college-basketball/teams',
]
def parse(self, response):
for conf in response.css('ul'):
for team in conf.css('li'):
yield {
'team':team.css('h5 a::text').extract(),
'id':team.css('h5 a::attr(href)').extract()[0].split('/')[7]
}
|
Add in team id spider
|
Add in team id spider
|
Python
|
mit
|
danmoeller/ncaa-bball-attendance,danmoeller/ncaa-bball-attendance,danmoeller/ncaa-bball-attendance
|
Add in team id spider
|
import scrapy
class TeamSpider(scrapy.Spider):
name = "team"
start_urls = [
'http://www.espn.com/mens-college-basketball/teams',
]
def parse(self, response):
for conf in response.css('ul'):
for team in conf.css('li'):
yield {
'team':team.css('h5 a::text').extract(),
'id':team.css('h5 a::attr(href)').extract()[0].split('/')[7]
}
|
<commit_before><commit_msg>Add in team id spider<commit_after>
|
import scrapy
class TeamSpider(scrapy.Spider):
name = "team"
start_urls = [
'http://www.espn.com/mens-college-basketball/teams',
]
def parse(self, response):
for conf in response.css('ul'):
for team in conf.css('li'):
yield {
'team':team.css('h5 a::text').extract(),
'id':team.css('h5 a::attr(href)').extract()[0].split('/')[7]
}
|
Add in team id spiderimport scrapy
class TeamSpider(scrapy.Spider):
name = "team"
start_urls = [
'http://www.espn.com/mens-college-basketball/teams',
]
def parse(self, response):
for conf in response.css('ul'):
for team in conf.css('li'):
yield {
'team':team.css('h5 a::text').extract(),
'id':team.css('h5 a::attr(href)').extract()[0].split('/')[7]
}
|
<commit_before><commit_msg>Add in team id spider<commit_after>import scrapy
class TeamSpider(scrapy.Spider):
name = "team"
start_urls = [
'http://www.espn.com/mens-college-basketball/teams',
]
def parse(self, response):
for conf in response.css('ul'):
for team in conf.css('li'):
yield {
'team':team.css('h5 a::text').extract(),
'id':team.css('h5 a::attr(href)').extract()[0].split('/')[7]
}
|
|
458cf526a4ebb72b4fad84e8cd2b665e0f093c1b
|
senlin/tests/functional/test_cluster_health.py
|
senlin/tests/functional/test_cluster_health.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from senlin.tests.functional import api as test_api
from senlin.tests.functional import base
from senlin.tests.functional.utils import test_utils
class TestClusterHealth(base.SenlinFunctionalTest):
def setUp(self):
super(TestClusterHealth, self).setUp()
# Create profile
self.profile = test_api.create_profile(
self.client, test_utils.random_name('profile'),
test_utils.spec_nova_server)
def tearDown(self):
# Delete profile
test_api.delete_profile(self.client, self.profile['id'])
super(TestClusterHealth, self).tearDown()
def test_cluster_check_recover(self):
# Create cluster
desired_capacity = 3
min_size = 2
max_size = 5
cluster = test_api.create_cluster(self.client,
test_utils.random_name('cluster'),
self.profile['id'], desired_capacity,
min_size, max_size)
cluster = test_utils.wait_for_status(test_api.get_cluster, self.client,
cluster['id'], 'ACTIVE')
# Check cluster health status
action_id = test_api.action_cluster(self.client, cluster['id'],
'check')
test_utils.wait_for_status(test_api.get_action, self.client,
action_id, 'SUCCEEDED')
cluster = test_api.get_cluster(self.client, cluster['id'])
self.assertEqual('ACTIVE', cluster['status'])
# Perform cluster recovering operation
action_id = test_api.action_cluster(self.client, cluster['id'],
'recover')
test_utils.wait_for_status(test_api.get_action, self.client,
action_id, 'SUCCEEDED')
action_id = test_api.action_cluster(self.client, cluster['id'],
'recover',
{'operation': 'REBUILD'})
test_utils.wait_for_status(test_api.get_action, self.client,
action_id, 'SUCCEEDED')
# Delete cluster
test_api.delete_cluster(self.client, cluster['id'])
cluster = test_utils.wait_for_delete(test_api.get_cluster, self.client,
cluster['id'])
|
Add functional test for cluster check recover
|
Add functional test for cluster check recover
Change-Id: Icb4ef7f754ba3b5764cf8f6d8f5999f0e2d2f3c2
|
Python
|
apache-2.0
|
openstack/senlin,openstack/senlin,tengqm/senlin-container,openstack/senlin,stackforge/senlin,stackforge/senlin,tengqm/senlin-container
|
Add functional test for cluster check recover
Change-Id: Icb4ef7f754ba3b5764cf8f6d8f5999f0e2d2f3c2
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from senlin.tests.functional import api as test_api
from senlin.tests.functional import base
from senlin.tests.functional.utils import test_utils
class TestClusterHealth(base.SenlinFunctionalTest):
def setUp(self):
super(TestClusterHealth, self).setUp()
# Create profile
self.profile = test_api.create_profile(
self.client, test_utils.random_name('profile'),
test_utils.spec_nova_server)
def tearDown(self):
# Delete profile
test_api.delete_profile(self.client, self.profile['id'])
super(TestClusterHealth, self).tearDown()
def test_cluster_check_recover(self):
# Create cluster
desired_capacity = 3
min_size = 2
max_size = 5
cluster = test_api.create_cluster(self.client,
test_utils.random_name('cluster'),
self.profile['id'], desired_capacity,
min_size, max_size)
cluster = test_utils.wait_for_status(test_api.get_cluster, self.client,
cluster['id'], 'ACTIVE')
# Check cluster health status
action_id = test_api.action_cluster(self.client, cluster['id'],
'check')
test_utils.wait_for_status(test_api.get_action, self.client,
action_id, 'SUCCEEDED')
cluster = test_api.get_cluster(self.client, cluster['id'])
self.assertEqual('ACTIVE', cluster['status'])
# Perform cluster recovering operation
action_id = test_api.action_cluster(self.client, cluster['id'],
'recover')
test_utils.wait_for_status(test_api.get_action, self.client,
action_id, 'SUCCEEDED')
action_id = test_api.action_cluster(self.client, cluster['id'],
'recover',
{'operation': 'REBUILD'})
test_utils.wait_for_status(test_api.get_action, self.client,
action_id, 'SUCCEEDED')
# Delete cluster
test_api.delete_cluster(self.client, cluster['id'])
cluster = test_utils.wait_for_delete(test_api.get_cluster, self.client,
cluster['id'])
|
<commit_before><commit_msg>Add functional test for cluster check recover
Change-Id: Icb4ef7f754ba3b5764cf8f6d8f5999f0e2d2f3c2<commit_after>
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from senlin.tests.functional import api as test_api
from senlin.tests.functional import base
from senlin.tests.functional.utils import test_utils
class TestClusterHealth(base.SenlinFunctionalTest):
def setUp(self):
super(TestClusterHealth, self).setUp()
# Create profile
self.profile = test_api.create_profile(
self.client, test_utils.random_name('profile'),
test_utils.spec_nova_server)
def tearDown(self):
# Delete profile
test_api.delete_profile(self.client, self.profile['id'])
super(TestClusterHealth, self).tearDown()
def test_cluster_check_recover(self):
# Create cluster
desired_capacity = 3
min_size = 2
max_size = 5
cluster = test_api.create_cluster(self.client,
test_utils.random_name('cluster'),
self.profile['id'], desired_capacity,
min_size, max_size)
cluster = test_utils.wait_for_status(test_api.get_cluster, self.client,
cluster['id'], 'ACTIVE')
# Check cluster health status
action_id = test_api.action_cluster(self.client, cluster['id'],
'check')
test_utils.wait_for_status(test_api.get_action, self.client,
action_id, 'SUCCEEDED')
cluster = test_api.get_cluster(self.client, cluster['id'])
self.assertEqual('ACTIVE', cluster['status'])
# Perform cluster recovering operation
action_id = test_api.action_cluster(self.client, cluster['id'],
'recover')
test_utils.wait_for_status(test_api.get_action, self.client,
action_id, 'SUCCEEDED')
action_id = test_api.action_cluster(self.client, cluster['id'],
'recover',
{'operation': 'REBUILD'})
test_utils.wait_for_status(test_api.get_action, self.client,
action_id, 'SUCCEEDED')
# Delete cluster
test_api.delete_cluster(self.client, cluster['id'])
cluster = test_utils.wait_for_delete(test_api.get_cluster, self.client,
cluster['id'])
|
Add functional test for cluster check recover
Change-Id: Icb4ef7f754ba3b5764cf8f6d8f5999f0e2d2f3c2# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from senlin.tests.functional import api as test_api
from senlin.tests.functional import base
from senlin.tests.functional.utils import test_utils
class TestClusterHealth(base.SenlinFunctionalTest):
def setUp(self):
super(TestClusterHealth, self).setUp()
# Create profile
self.profile = test_api.create_profile(
self.client, test_utils.random_name('profile'),
test_utils.spec_nova_server)
def tearDown(self):
# Delete profile
test_api.delete_profile(self.client, self.profile['id'])
super(TestClusterHealth, self).tearDown()
def test_cluster_check_recover(self):
# Create cluster
desired_capacity = 3
min_size = 2
max_size = 5
cluster = test_api.create_cluster(self.client,
test_utils.random_name('cluster'),
self.profile['id'], desired_capacity,
min_size, max_size)
cluster = test_utils.wait_for_status(test_api.get_cluster, self.client,
cluster['id'], 'ACTIVE')
# Check cluster health status
action_id = test_api.action_cluster(self.client, cluster['id'],
'check')
test_utils.wait_for_status(test_api.get_action, self.client,
action_id, 'SUCCEEDED')
cluster = test_api.get_cluster(self.client, cluster['id'])
self.assertEqual('ACTIVE', cluster['status'])
# Perform cluster recovering operation
action_id = test_api.action_cluster(self.client, cluster['id'],
'recover')
test_utils.wait_for_status(test_api.get_action, self.client,
action_id, 'SUCCEEDED')
action_id = test_api.action_cluster(self.client, cluster['id'],
'recover',
{'operation': 'REBUILD'})
test_utils.wait_for_status(test_api.get_action, self.client,
action_id, 'SUCCEEDED')
# Delete cluster
test_api.delete_cluster(self.client, cluster['id'])
cluster = test_utils.wait_for_delete(test_api.get_cluster, self.client,
cluster['id'])
|
<commit_before><commit_msg>Add functional test for cluster check recover
Change-Id: Icb4ef7f754ba3b5764cf8f6d8f5999f0e2d2f3c2<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from senlin.tests.functional import api as test_api
from senlin.tests.functional import base
from senlin.tests.functional.utils import test_utils
class TestClusterHealth(base.SenlinFunctionalTest):
def setUp(self):
super(TestClusterHealth, self).setUp()
# Create profile
self.profile = test_api.create_profile(
self.client, test_utils.random_name('profile'),
test_utils.spec_nova_server)
def tearDown(self):
# Delete profile
test_api.delete_profile(self.client, self.profile['id'])
super(TestClusterHealth, self).tearDown()
def test_cluster_check_recover(self):
# Create cluster
desired_capacity = 3
min_size = 2
max_size = 5
cluster = test_api.create_cluster(self.client,
test_utils.random_name('cluster'),
self.profile['id'], desired_capacity,
min_size, max_size)
cluster = test_utils.wait_for_status(test_api.get_cluster, self.client,
cluster['id'], 'ACTIVE')
# Check cluster health status
action_id = test_api.action_cluster(self.client, cluster['id'],
'check')
test_utils.wait_for_status(test_api.get_action, self.client,
action_id, 'SUCCEEDED')
cluster = test_api.get_cluster(self.client, cluster['id'])
self.assertEqual('ACTIVE', cluster['status'])
# Perform cluster recovering operation
action_id = test_api.action_cluster(self.client, cluster['id'],
'recover')
test_utils.wait_for_status(test_api.get_action, self.client,
action_id, 'SUCCEEDED')
action_id = test_api.action_cluster(self.client, cluster['id'],
'recover',
{'operation': 'REBUILD'})
test_utils.wait_for_status(test_api.get_action, self.client,
action_id, 'SUCCEEDED')
# Delete cluster
test_api.delete_cluster(self.client, cluster['id'])
cluster = test_utils.wait_for_delete(test_api.get_cluster, self.client,
cluster['id'])
|
|
48c008b4ac08114e30f4bee7a208d5d3fb925296
|
problem1/steiner-simplegreedy.py
|
problem1/steiner-simplegreedy.py
|
import networkx as nx
from sys import argv
def main():
# G = nx.read_gml(argv[1])
G = nx.read_gml("steiner-small.gml")
T = [] # terminals
for v,d in G.nodes_iter(data=True):
if d['T'] == 1:
T.append(v)
U = T[:] # Steiner tree vertices
F = [] # Steiner tree edges
D = [] # candidate edge set
for u in T:
u_incident = G.edges(u)
for i in u_incident:
D.append(i)
UF = nx.Graph()
UF.add_nodes_from(T)
while not nx.is_connected(UF):
if len(D) == 0:
print("Not sufficiently connected")
return None
min_f = float("inf")
for f_i in D:
f_cost = G.edge[f_i[0]][f_i[1]]['c']
if f_cost < min_f:
min_f = f_cost
f = f_i
UF_f = UF.copy()
UF_f.add_edge(f[0], f[1])
if nx.has_no_cycles(UF_f):
pass
#F.append(f)
#U.append(f[0])
#U.append(f[1])
#D.append(f.incident)
#D.remove(f)
return UF
if __name__ == '__main__':
UF = main()
print("UF nodes:",UF.nodes())
print("UF edges:",UF.edges())
|
Add partial simple greedy algorithm (baseline).
|
Add partial simple greedy algorithm (baseline).
|
Python
|
mit
|
karulont/combopt
|
Add partial simple greedy algorithm (baseline).
|
import networkx as nx
from sys import argv
def main():
# G = nx.read_gml(argv[1])
G = nx.read_gml("steiner-small.gml")
T = [] # terminals
for v,d in G.nodes_iter(data=True):
if d['T'] == 1:
T.append(v)
U = T[:] # Steiner tree vertices
F = [] # Steiner tree edges
D = [] # candidate edge set
for u in T:
u_incident = G.edges(u)
for i in u_incident:
D.append(i)
UF = nx.Graph()
UF.add_nodes_from(T)
while not nx.is_connected(UF):
if len(D) == 0:
print("Not sufficiently connected")
return None
min_f = float("inf")
for f_i in D:
f_cost = G.edge[f_i[0]][f_i[1]]['c']
if f_cost < min_f:
min_f = f_cost
f = f_i
UF_f = UF.copy()
UF_f.add_edge(f[0], f[1])
if nx.has_no_cycles(UF_f):
pass
#F.append(f)
#U.append(f[0])
#U.append(f[1])
#D.append(f.incident)
#D.remove(f)
return UF
if __name__ == '__main__':
UF = main()
print("UF nodes:",UF.nodes())
print("UF edges:",UF.edges())
|
<commit_before><commit_msg>Add partial simple greedy algorithm (baseline).<commit_after>
|
import networkx as nx
from sys import argv
def main():
# G = nx.read_gml(argv[1])
G = nx.read_gml("steiner-small.gml")
T = [] # terminals
for v,d in G.nodes_iter(data=True):
if d['T'] == 1:
T.append(v)
U = T[:] # Steiner tree vertices
F = [] # Steiner tree edges
D = [] # candidate edge set
for u in T:
u_incident = G.edges(u)
for i in u_incident:
D.append(i)
UF = nx.Graph()
UF.add_nodes_from(T)
while not nx.is_connected(UF):
if len(D) == 0:
print("Not sufficiently connected")
return None
min_f = float("inf")
for f_i in D:
f_cost = G.edge[f_i[0]][f_i[1]]['c']
if f_cost < min_f:
min_f = f_cost
f = f_i
UF_f = UF.copy()
UF_f.add_edge(f[0], f[1])
if nx.has_no_cycles(UF_f):
pass
#F.append(f)
#U.append(f[0])
#U.append(f[1])
#D.append(f.incident)
#D.remove(f)
return UF
if __name__ == '__main__':
UF = main()
print("UF nodes:",UF.nodes())
print("UF edges:",UF.edges())
|
Add partial simple greedy algorithm (baseline).import networkx as nx
from sys import argv
def main():
# G = nx.read_gml(argv[1])
G = nx.read_gml("steiner-small.gml")
T = [] # terminals
for v,d in G.nodes_iter(data=True):
if d['T'] == 1:
T.append(v)
U = T[:] # Steiner tree vertices
F = [] # Steiner tree edges
D = [] # candidate edge set
for u in T:
u_incident = G.edges(u)
for i in u_incident:
D.append(i)
UF = nx.Graph()
UF.add_nodes_from(T)
while not nx.is_connected(UF):
if len(D) == 0:
print("Not sufficiently connected")
return None
min_f = float("inf")
for f_i in D:
f_cost = G.edge[f_i[0]][f_i[1]]['c']
if f_cost < min_f:
min_f = f_cost
f = f_i
UF_f = UF.copy()
UF_f.add_edge(f[0], f[1])
if nx.has_no_cycles(UF_f):
pass
#F.append(f)
#U.append(f[0])
#U.append(f[1])
#D.append(f.incident)
#D.remove(f)
return UF
if __name__ == '__main__':
UF = main()
print("UF nodes:",UF.nodes())
print("UF edges:",UF.edges())
|
<commit_before><commit_msg>Add partial simple greedy algorithm (baseline).<commit_after>import networkx as nx
from sys import argv
def main():
# G = nx.read_gml(argv[1])
G = nx.read_gml("steiner-small.gml")
T = [] # terminals
for v,d in G.nodes_iter(data=True):
if d['T'] == 1:
T.append(v)
U = T[:] # Steiner tree vertices
F = [] # Steiner tree edges
D = [] # candidate edge set
for u in T:
u_incident = G.edges(u)
for i in u_incident:
D.append(i)
UF = nx.Graph()
UF.add_nodes_from(T)
while not nx.is_connected(UF):
if len(D) == 0:
print("Not sufficiently connected")
return None
min_f = float("inf")
for f_i in D:
f_cost = G.edge[f_i[0]][f_i[1]]['c']
if f_cost < min_f:
min_f = f_cost
f = f_i
UF_f = UF.copy()
UF_f.add_edge(f[0], f[1])
if nx.has_no_cycles(UF_f):
pass
#F.append(f)
#U.append(f[0])
#U.append(f[1])
#D.append(f.incident)
#D.remove(f)
return UF
if __name__ == '__main__':
UF = main()
print("UF nodes:",UF.nodes())
print("UF edges:",UF.edges())
|
|
084ebff19703c42c50621eb94ac070c6a471e983
|
Home/mostWantedLetter.py
|
Home/mostWantedLetter.py
|
def checkio(word):
word = word.lower()
arr = dict()
for i in range(len(word)):
char = word[i]
if not str.isalpha(char):
continue
if not arr.__contains__(char):
arr[char] = 0
arr[char] = arr[char] + 1
result = ""
counter = 0
for k, v in arr.items():
if counter < v or (ord(k) < ord(result) and counter == v):
result = k
counter = v
return result
if __name__ == '__main__':
assert checkio("Hello World!") == "l", "First"
assert checkio("How do you do?") == "o", "Second"
assert checkio("One") == "e", "Third"
assert checkio("") == "", "Final"
print('All ok')
|
Solve the most wanted letter problem.
|
Solve the most wanted letter problem.
|
Python
|
mit
|
edwardzhu/checkio-solution
|
Solve the most wanted letter problem.
|
def checkio(word):
word = word.lower()
arr = dict()
for i in range(len(word)):
char = word[i]
if not str.isalpha(char):
continue
if not arr.__contains__(char):
arr[char] = 0
arr[char] = arr[char] + 1
result = ""
counter = 0
for k, v in arr.items():
if counter < v or (ord(k) < ord(result) and counter == v):
result = k
counter = v
return result
if __name__ == '__main__':
assert checkio("Hello World!") == "l", "First"
assert checkio("How do you do?") == "o", "Second"
assert checkio("One") == "e", "Third"
assert checkio("") == "", "Final"
print('All ok')
|
<commit_before><commit_msg>Solve the most wanted letter problem.<commit_after>
|
def checkio(word):
word = word.lower()
arr = dict()
for i in range(len(word)):
char = word[i]
if not str.isalpha(char):
continue
if not arr.__contains__(char):
arr[char] = 0
arr[char] = arr[char] + 1
result = ""
counter = 0
for k, v in arr.items():
if counter < v or (ord(k) < ord(result) and counter == v):
result = k
counter = v
return result
if __name__ == '__main__':
assert checkio("Hello World!") == "l", "First"
assert checkio("How do you do?") == "o", "Second"
assert checkio("One") == "e", "Third"
assert checkio("") == "", "Final"
print('All ok')
|
Solve the most wanted letter problem.def checkio(word):
word = word.lower()
arr = dict()
for i in range(len(word)):
char = word[i]
if not str.isalpha(char):
continue
if not arr.__contains__(char):
arr[char] = 0
arr[char] = arr[char] + 1
result = ""
counter = 0
for k, v in arr.items():
if counter < v or (ord(k) < ord(result) and counter == v):
result = k
counter = v
return result
if __name__ == '__main__':
assert checkio("Hello World!") == "l", "First"
assert checkio("How do you do?") == "o", "Second"
assert checkio("One") == "e", "Third"
assert checkio("") == "", "Final"
print('All ok')
|
<commit_before><commit_msg>Solve the most wanted letter problem.<commit_after>def checkio(word):
word = word.lower()
arr = dict()
for i in range(len(word)):
char = word[i]
if not str.isalpha(char):
continue
if not arr.__contains__(char):
arr[char] = 0
arr[char] = arr[char] + 1
result = ""
counter = 0
for k, v in arr.items():
if counter < v or (ord(k) < ord(result) and counter == v):
result = k
counter = v
return result
if __name__ == '__main__':
assert checkio("Hello World!") == "l", "First"
assert checkio("How do you do?") == "o", "Second"
assert checkio("One") == "e", "Third"
assert checkio("") == "", "Final"
print('All ok')
|
|
8fb4df5367b5c03d2851532063f6fa781fe2f980
|
Maths/fibonacciSeries.py
|
Maths/fibonacciSeries.py
|
# Fibonacci Sequence Using Recursion
def recur_fibo(n):
if n <= 1:
return n
else:
return(recur_fibo(n-1) + recur_fibo(n-2))
limit = int(input("How many terms to include in fionacci series:"))
if limit <= 0:
print("Plese enter a positive integer")
else:
print("Fibonacci series:")
for i in range(limit):
print(recur_fibo(i))
|
Add Fibonacci Series Using Recursion
|
Add Fibonacci Series Using Recursion
|
Python
|
mit
|
TheAlgorithms/Python
|
Add Fibonacci Series Using Recursion
|
# Fibonacci Sequence Using Recursion
def recur_fibo(n):
if n <= 1:
return n
else:
return(recur_fibo(n-1) + recur_fibo(n-2))
limit = int(input("How many terms to include in fionacci series:"))
if limit <= 0:
print("Plese enter a positive integer")
else:
print("Fibonacci series:")
for i in range(limit):
print(recur_fibo(i))
|
<commit_before><commit_msg>Add Fibonacci Series Using Recursion<commit_after>
|
# Fibonacci Sequence Using Recursion
def recur_fibo(n):
if n <= 1:
return n
else:
return(recur_fibo(n-1) + recur_fibo(n-2))
limit = int(input("How many terms to include in fionacci series:"))
if limit <= 0:
print("Plese enter a positive integer")
else:
print("Fibonacci series:")
for i in range(limit):
print(recur_fibo(i))
|
Add Fibonacci Series Using Recursion# Fibonacci Sequence Using Recursion
def recur_fibo(n):
if n <= 1:
return n
else:
return(recur_fibo(n-1) + recur_fibo(n-2))
limit = int(input("How many terms to include in fionacci series:"))
if limit <= 0:
print("Plese enter a positive integer")
else:
print("Fibonacci series:")
for i in range(limit):
print(recur_fibo(i))
|
<commit_before><commit_msg>Add Fibonacci Series Using Recursion<commit_after># Fibonacci Sequence Using Recursion
def recur_fibo(n):
if n <= 1:
return n
else:
return(recur_fibo(n-1) + recur_fibo(n-2))
limit = int(input("How many terms to include in fionacci series:"))
if limit <= 0:
print("Plese enter a positive integer")
else:
print("Fibonacci series:")
for i in range(limit):
print(recur_fibo(i))
|
|
97ae80b08958646e0c937f65a1b396171bf61e72
|
Lib/test/test_xreload.py
|
Lib/test/test_xreload.py
|
"""Doctests for module reloading.
>>> from xreload import xreload
>>> from test.test_xreload import make_mod
>>> make_mod()
>>> import x
>>> C = x.C
>>> Cfoo = C.foo
>>> Cbar = C.bar
>>> Cstomp = C.stomp
>>> b = C()
>>> bfoo = b.foo
>>> b.foo()
42
>>> bfoo()
42
>>> Cfoo(b)
42
>>> Cbar()
42 42
>>> Cstomp()
42 42 42
>>> make_mod(repl="42", subst="24")
>>> xreload(x)
<module 'x' (built-in)>
>>> b.foo()
24
>>> bfoo()
24
>>> Cfoo(b)
24
>>> Cbar()
24 24
>>> Cstomp()
24 24 24
"""
SAMPLE_CODE = """
class C:
def foo(self):
print(42)
@classmethod
def bar(cls):
print(42, 42)
@staticmethod
def stomp():
print (42, 42, 42)
"""
import os
import sys
import shutil
import doctest
import xreload
import tempfile
from test.test_support import run_unittest
tempdir = None
save_path = None
def setUp(unused=None):
global tempdir, save_path
tempdir = tempfile.mkdtemp()
save_path = list(sys.path)
sys.path.append(tempdir)
def tearDown(unused=None):
global tempdir, save_path
if save_path is not None:
sys.path = save_path
save_path = None
if tempdir is not None:
shutil.rmtree(tempdir)
tempdir = None
def make_mod(name="x", repl=None, subst=None):
if not tempdir:
setUp()
assert tempdir
fn = os.path.join(tempdir, name + ".py")
f = open(fn, "w")
sample = SAMPLE_CODE
if repl is not None and subst is not None:
sample = sample.replace(repl, subst)
try:
f.write(sample)
finally:
f.close()
def test_suite():
return doctest.DocTestSuite(setUp=setUp, tearDown=tearDown)
def test_main():
run_unittest(test_suite())
if __name__ == "__main__":
test_main()
|
Add a proper unit test for xreload.py.
|
Add a proper unit test for xreload.py.
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
Add a proper unit test for xreload.py.
|
"""Doctests for module reloading.
>>> from xreload import xreload
>>> from test.test_xreload import make_mod
>>> make_mod()
>>> import x
>>> C = x.C
>>> Cfoo = C.foo
>>> Cbar = C.bar
>>> Cstomp = C.stomp
>>> b = C()
>>> bfoo = b.foo
>>> b.foo()
42
>>> bfoo()
42
>>> Cfoo(b)
42
>>> Cbar()
42 42
>>> Cstomp()
42 42 42
>>> make_mod(repl="42", subst="24")
>>> xreload(x)
<module 'x' (built-in)>
>>> b.foo()
24
>>> bfoo()
24
>>> Cfoo(b)
24
>>> Cbar()
24 24
>>> Cstomp()
24 24 24
"""
SAMPLE_CODE = """
class C:
def foo(self):
print(42)
@classmethod
def bar(cls):
print(42, 42)
@staticmethod
def stomp():
print (42, 42, 42)
"""
import os
import sys
import shutil
import doctest
import xreload
import tempfile
from test.test_support import run_unittest
tempdir = None
save_path = None
def setUp(unused=None):
global tempdir, save_path
tempdir = tempfile.mkdtemp()
save_path = list(sys.path)
sys.path.append(tempdir)
def tearDown(unused=None):
global tempdir, save_path
if save_path is not None:
sys.path = save_path
save_path = None
if tempdir is not None:
shutil.rmtree(tempdir)
tempdir = None
def make_mod(name="x", repl=None, subst=None):
if not tempdir:
setUp()
assert tempdir
fn = os.path.join(tempdir, name + ".py")
f = open(fn, "w")
sample = SAMPLE_CODE
if repl is not None and subst is not None:
sample = sample.replace(repl, subst)
try:
f.write(sample)
finally:
f.close()
def test_suite():
return doctest.DocTestSuite(setUp=setUp, tearDown=tearDown)
def test_main():
run_unittest(test_suite())
if __name__ == "__main__":
test_main()
|
<commit_before><commit_msg>Add a proper unit test for xreload.py.<commit_after>
|
"""Doctests for module reloading.
>>> from xreload import xreload
>>> from test.test_xreload import make_mod
>>> make_mod()
>>> import x
>>> C = x.C
>>> Cfoo = C.foo
>>> Cbar = C.bar
>>> Cstomp = C.stomp
>>> b = C()
>>> bfoo = b.foo
>>> b.foo()
42
>>> bfoo()
42
>>> Cfoo(b)
42
>>> Cbar()
42 42
>>> Cstomp()
42 42 42
>>> make_mod(repl="42", subst="24")
>>> xreload(x)
<module 'x' (built-in)>
>>> b.foo()
24
>>> bfoo()
24
>>> Cfoo(b)
24
>>> Cbar()
24 24
>>> Cstomp()
24 24 24
"""
SAMPLE_CODE = """
class C:
def foo(self):
print(42)
@classmethod
def bar(cls):
print(42, 42)
@staticmethod
def stomp():
print (42, 42, 42)
"""
import os
import sys
import shutil
import doctest
import xreload
import tempfile
from test.test_support import run_unittest
tempdir = None
save_path = None
def setUp(unused=None):
global tempdir, save_path
tempdir = tempfile.mkdtemp()
save_path = list(sys.path)
sys.path.append(tempdir)
def tearDown(unused=None):
global tempdir, save_path
if save_path is not None:
sys.path = save_path
save_path = None
if tempdir is not None:
shutil.rmtree(tempdir)
tempdir = None
def make_mod(name="x", repl=None, subst=None):
if not tempdir:
setUp()
assert tempdir
fn = os.path.join(tempdir, name + ".py")
f = open(fn, "w")
sample = SAMPLE_CODE
if repl is not None and subst is not None:
sample = sample.replace(repl, subst)
try:
f.write(sample)
finally:
f.close()
def test_suite():
return doctest.DocTestSuite(setUp=setUp, tearDown=tearDown)
def test_main():
run_unittest(test_suite())
if __name__ == "__main__":
test_main()
|
Add a proper unit test for xreload.py."""Doctests for module reloading.
>>> from xreload import xreload
>>> from test.test_xreload import make_mod
>>> make_mod()
>>> import x
>>> C = x.C
>>> Cfoo = C.foo
>>> Cbar = C.bar
>>> Cstomp = C.stomp
>>> b = C()
>>> bfoo = b.foo
>>> b.foo()
42
>>> bfoo()
42
>>> Cfoo(b)
42
>>> Cbar()
42 42
>>> Cstomp()
42 42 42
>>> make_mod(repl="42", subst="24")
>>> xreload(x)
<module 'x' (built-in)>
>>> b.foo()
24
>>> bfoo()
24
>>> Cfoo(b)
24
>>> Cbar()
24 24
>>> Cstomp()
24 24 24
"""
SAMPLE_CODE = """
class C:
def foo(self):
print(42)
@classmethod
def bar(cls):
print(42, 42)
@staticmethod
def stomp():
print (42, 42, 42)
"""
import os
import sys
import shutil
import doctest
import xreload
import tempfile
from test.test_support import run_unittest
tempdir = None
save_path = None
def setUp(unused=None):
global tempdir, save_path
tempdir = tempfile.mkdtemp()
save_path = list(sys.path)
sys.path.append(tempdir)
def tearDown(unused=None):
global tempdir, save_path
if save_path is not None:
sys.path = save_path
save_path = None
if tempdir is not None:
shutil.rmtree(tempdir)
tempdir = None
def make_mod(name="x", repl=None, subst=None):
if not tempdir:
setUp()
assert tempdir
fn = os.path.join(tempdir, name + ".py")
f = open(fn, "w")
sample = SAMPLE_CODE
if repl is not None and subst is not None:
sample = sample.replace(repl, subst)
try:
f.write(sample)
finally:
f.close()
def test_suite():
return doctest.DocTestSuite(setUp=setUp, tearDown=tearDown)
def test_main():
run_unittest(test_suite())
if __name__ == "__main__":
test_main()
|
<commit_before><commit_msg>Add a proper unit test for xreload.py.<commit_after>"""Doctests for module reloading.
>>> from xreload import xreload
>>> from test.test_xreload import make_mod
>>> make_mod()
>>> import x
>>> C = x.C
>>> Cfoo = C.foo
>>> Cbar = C.bar
>>> Cstomp = C.stomp
>>> b = C()
>>> bfoo = b.foo
>>> b.foo()
42
>>> bfoo()
42
>>> Cfoo(b)
42
>>> Cbar()
42 42
>>> Cstomp()
42 42 42
>>> make_mod(repl="42", subst="24")
>>> xreload(x)
<module 'x' (built-in)>
>>> b.foo()
24
>>> bfoo()
24
>>> Cfoo(b)
24
>>> Cbar()
24 24
>>> Cstomp()
24 24 24
"""
SAMPLE_CODE = """
class C:
def foo(self):
print(42)
@classmethod
def bar(cls):
print(42, 42)
@staticmethod
def stomp():
print (42, 42, 42)
"""
import os
import sys
import shutil
import doctest
import xreload
import tempfile
from test.test_support import run_unittest
tempdir = None
save_path = None
def setUp(unused=None):
global tempdir, save_path
tempdir = tempfile.mkdtemp()
save_path = list(sys.path)
sys.path.append(tempdir)
def tearDown(unused=None):
global tempdir, save_path
if save_path is not None:
sys.path = save_path
save_path = None
if tempdir is not None:
shutil.rmtree(tempdir)
tempdir = None
def make_mod(name="x", repl=None, subst=None):
if not tempdir:
setUp()
assert tempdir
fn = os.path.join(tempdir, name + ".py")
f = open(fn, "w")
sample = SAMPLE_CODE
if repl is not None and subst is not None:
sample = sample.replace(repl, subst)
try:
f.write(sample)
finally:
f.close()
def test_suite():
return doctest.DocTestSuite(setUp=setUp, tearDown=tearDown)
def test_main():
run_unittest(test_suite())
if __name__ == "__main__":
test_main()
|
|
dd1d0893823561efec203cdfbb927b8edac7a72a
|
tests/unit/beanstalk/test_exception.py
|
tests/unit/beanstalk/test_exception.py
|
# Copyright (c) 2014 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from boto.beanstalk.exception import simple
from boto.compat import unittest
class FakeError(object):
def __init__(self, code, status, reason, body):
self.code = code
self.status = status
self.reason = reason
self.body = body
class TestExceptions(unittest.TestCase):
def test_exception_class_names(self):
# Create exception from class name
error = FakeError('TooManyApplications', 400, 'foo', 'bar')
exception = simple(error)
self.assertEqual(exception.__class__.__name__, 'TooManyApplications')
# Create exception from class name + 'Exception' as seen from the
# live service today
error = FakeError('TooManyApplicationsException', 400, 'foo', 'bar')
exception = simple(error)
self.assertEqual(exception.__class__.__name__, 'TooManyApplications')
# Make sure message body is present
self.assertEqual(exception.message, 'bar')
|
Add a coupld tests to create exception classes from error code names
|
Add a coupld tests to create exception classes from error code names
|
Python
|
mit
|
darjus-amzn/boto,Asana/boto,vishnugonela/boto,podhmo/boto,weebygames/boto,SaranyaKarthikeyan/boto,clouddocx/boto,bleib1dj/boto,TiVoMaker/boto,tpodowd/boto,rayluo/boto,tpodowd/boto,disruptek/boto,stevenbrichards/boto,revmischa/boto,pfhayes/boto,ekalosak/boto,ryansb/boto,shaunbrady/boto,acourtney2015/boto,alfredodeza/boto,jotes/boto,zachmullen/boto,alex/boto,jindongh/boto,nexusz99/boto,felix-d/boto,janslow/boto,kouk/boto,ocadotechnology/boto,kouk/boto,j-carl/boto,campenberger/boto,disruptek/boto,alex/boto,zzzirk/boto,vijaylbais/boto,garnaat/boto,varunarya10/boto,appneta/boto,trademob/boto,s0enke/boto,dimdung/boto,drbild/boto,khagler/boto,nikhilraog/boto,elainexmas/boto,shipci/boto,nishigori/boto,rosmo/boto,drbild/boto,bryx-inc/boto,serviceagility/boto,israelbenatar/boto,lra/boto,weka-io/boto,ddzialak/boto,ramitsurana/boto,abridgett/boto,yangchaogit/boto,awatts/boto,appneta/boto,Pretio/boto
|
Add a coupld tests to create exception classes from error code names
|
# Copyright (c) 2014 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from boto.beanstalk.exception import simple
from boto.compat import unittest
class FakeError(object):
def __init__(self, code, status, reason, body):
self.code = code
self.status = status
self.reason = reason
self.body = body
class TestExceptions(unittest.TestCase):
def test_exception_class_names(self):
# Create exception from class name
error = FakeError('TooManyApplications', 400, 'foo', 'bar')
exception = simple(error)
self.assertEqual(exception.__class__.__name__, 'TooManyApplications')
# Create exception from class name + 'Exception' as seen from the
# live service today
error = FakeError('TooManyApplicationsException', 400, 'foo', 'bar')
exception = simple(error)
self.assertEqual(exception.__class__.__name__, 'TooManyApplications')
# Make sure message body is present
self.assertEqual(exception.message, 'bar')
|
<commit_before><commit_msg>Add a coupld tests to create exception classes from error code names<commit_after>
|
# Copyright (c) 2014 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from boto.beanstalk.exception import simple
from boto.compat import unittest
class FakeError(object):
def __init__(self, code, status, reason, body):
self.code = code
self.status = status
self.reason = reason
self.body = body
class TestExceptions(unittest.TestCase):
def test_exception_class_names(self):
# Create exception from class name
error = FakeError('TooManyApplications', 400, 'foo', 'bar')
exception = simple(error)
self.assertEqual(exception.__class__.__name__, 'TooManyApplications')
# Create exception from class name + 'Exception' as seen from the
# live service today
error = FakeError('TooManyApplicationsException', 400, 'foo', 'bar')
exception = simple(error)
self.assertEqual(exception.__class__.__name__, 'TooManyApplications')
# Make sure message body is present
self.assertEqual(exception.message, 'bar')
|
Add a coupld tests to create exception classes from error code names# Copyright (c) 2014 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from boto.beanstalk.exception import simple
from boto.compat import unittest
class FakeError(object):
def __init__(self, code, status, reason, body):
self.code = code
self.status = status
self.reason = reason
self.body = body
class TestExceptions(unittest.TestCase):
def test_exception_class_names(self):
# Create exception from class name
error = FakeError('TooManyApplications', 400, 'foo', 'bar')
exception = simple(error)
self.assertEqual(exception.__class__.__name__, 'TooManyApplications')
# Create exception from class name + 'Exception' as seen from the
# live service today
error = FakeError('TooManyApplicationsException', 400, 'foo', 'bar')
exception = simple(error)
self.assertEqual(exception.__class__.__name__, 'TooManyApplications')
# Make sure message body is present
self.assertEqual(exception.message, 'bar')
|
<commit_before><commit_msg>Add a coupld tests to create exception classes from error code names<commit_after># Copyright (c) 2014 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from boto.beanstalk.exception import simple
from boto.compat import unittest
class FakeError(object):
def __init__(self, code, status, reason, body):
self.code = code
self.status = status
self.reason = reason
self.body = body
class TestExceptions(unittest.TestCase):
def test_exception_class_names(self):
# Create exception from class name
error = FakeError('TooManyApplications', 400, 'foo', 'bar')
exception = simple(error)
self.assertEqual(exception.__class__.__name__, 'TooManyApplications')
# Create exception from class name + 'Exception' as seen from the
# live service today
error = FakeError('TooManyApplicationsException', 400, 'foo', 'bar')
exception = simple(error)
self.assertEqual(exception.__class__.__name__, 'TooManyApplications')
# Make sure message body is present
self.assertEqual(exception.message, 'bar')
|
|
de38b3e7b3d8458920b913316b06bb10b886df9f
|
thinglang/symbols/argument_selector.py
|
thinglang/symbols/argument_selector.py
|
import collections
import copy
from thinglang.compiler.errors import NoMatchingOverload
from thinglang.lexer.values.identifier import Identifier
SymbolOption = collections.namedtuple('SymbolOption', ['symbol', 'remaining_arguments'])
class ArgumentSelector(object):
"""
Aids in disambiguating overloaded method symbols contained in MergedSymbol objects.
Managed state regarding arguments already observed, and filters out overloads and all arguments are processed.
If a matching overload exists, it is returned - otherwise, an exception is thrown.
"""
def __init__(self, symbols):
self.symbols = symbols
self.collected_arguments = []
self.options = [SymbolOption(symbol, copy.deepcopy(symbol.arguments)) for symbol in symbols]
def constraint(self, resolved):
"""
Filters out option groups that do not expect to see the resolved type as their next argument
"""
self.collected_arguments.append(resolved)
new_options = []
for option in self.options:
if option.remaining_arguments and self.type_match(resolved, option.remaining_arguments.pop(0)):
new_options.append(option)
self.options = new_options
if not self.options:
raise NoMatchingOverload(self.symbols, self.collected_arguments)
def disambiguate(self):
"""
Selects the best matching overload
"""
option_group = [option for option in self.options if not option.remaining_arguments]
if len(option_group) != 1:
raise NoMatchingOverload(self.symbols, self.collected_arguments)
return option_group[0].symbol
@staticmethod
def type_match(resolved, expected_type):
"""
Checks if two types match (TODO: take inheritance chains into account)
"""
if expected_type == Identifier('object'):
return True
return resolved.type == expected_type
|
Implement ArgumentSelector for overload disambiguation
|
Implement ArgumentSelector for overload disambiguation
|
Python
|
mit
|
ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang
|
Implement ArgumentSelector for overload disambiguation
|
import collections
import copy
from thinglang.compiler.errors import NoMatchingOverload
from thinglang.lexer.values.identifier import Identifier
SymbolOption = collections.namedtuple('SymbolOption', ['symbol', 'remaining_arguments'])
class ArgumentSelector(object):
"""
Aids in disambiguating overloaded method symbols contained in MergedSymbol objects.
Managed state regarding arguments already observed, and filters out overloads and all arguments are processed.
If a matching overload exists, it is returned - otherwise, an exception is thrown.
"""
def __init__(self, symbols):
self.symbols = symbols
self.collected_arguments = []
self.options = [SymbolOption(symbol, copy.deepcopy(symbol.arguments)) for symbol in symbols]
def constraint(self, resolved):
"""
Filters out option groups that do not expect to see the resolved type as their next argument
"""
self.collected_arguments.append(resolved)
new_options = []
for option in self.options:
if option.remaining_arguments and self.type_match(resolved, option.remaining_arguments.pop(0)):
new_options.append(option)
self.options = new_options
if not self.options:
raise NoMatchingOverload(self.symbols, self.collected_arguments)
def disambiguate(self):
"""
Selects the best matching overload
"""
option_group = [option for option in self.options if not option.remaining_arguments]
if len(option_group) != 1:
raise NoMatchingOverload(self.symbols, self.collected_arguments)
return option_group[0].symbol
@staticmethod
def type_match(resolved, expected_type):
"""
Checks if two types match (TODO: take inheritance chains into account)
"""
if expected_type == Identifier('object'):
return True
return resolved.type == expected_type
|
<commit_before><commit_msg>Implement ArgumentSelector for overload disambiguation<commit_after>
|
import collections
import copy
from thinglang.compiler.errors import NoMatchingOverload
from thinglang.lexer.values.identifier import Identifier
SymbolOption = collections.namedtuple('SymbolOption', ['symbol', 'remaining_arguments'])
class ArgumentSelector(object):
"""
Aids in disambiguating overloaded method symbols contained in MergedSymbol objects.
Managed state regarding arguments already observed, and filters out overloads and all arguments are processed.
If a matching overload exists, it is returned - otherwise, an exception is thrown.
"""
def __init__(self, symbols):
self.symbols = symbols
self.collected_arguments = []
self.options = [SymbolOption(symbol, copy.deepcopy(symbol.arguments)) for symbol in symbols]
def constraint(self, resolved):
"""
Filters out option groups that do not expect to see the resolved type as their next argument
"""
self.collected_arguments.append(resolved)
new_options = []
for option in self.options:
if option.remaining_arguments and self.type_match(resolved, option.remaining_arguments.pop(0)):
new_options.append(option)
self.options = new_options
if not self.options:
raise NoMatchingOverload(self.symbols, self.collected_arguments)
def disambiguate(self):
"""
Selects the best matching overload
"""
option_group = [option for option in self.options if not option.remaining_arguments]
if len(option_group) != 1:
raise NoMatchingOverload(self.symbols, self.collected_arguments)
return option_group[0].symbol
@staticmethod
def type_match(resolved, expected_type):
"""
Checks if two types match (TODO: take inheritance chains into account)
"""
if expected_type == Identifier('object'):
return True
return resolved.type == expected_type
|
Implement ArgumentSelector for overload disambiguationimport collections
import copy
from thinglang.compiler.errors import NoMatchingOverload
from thinglang.lexer.values.identifier import Identifier
SymbolOption = collections.namedtuple('SymbolOption', ['symbol', 'remaining_arguments'])
class ArgumentSelector(object):
"""
Aids in disambiguating overloaded method symbols contained in MergedSymbol objects.
Managed state regarding arguments already observed, and filters out overloads and all arguments are processed.
If a matching overload exists, it is returned - otherwise, an exception is thrown.
"""
def __init__(self, symbols):
self.symbols = symbols
self.collected_arguments = []
self.options = [SymbolOption(symbol, copy.deepcopy(symbol.arguments)) for symbol in symbols]
def constraint(self, resolved):
"""
Filters out option groups that do not expect to see the resolved type as their next argument
"""
self.collected_arguments.append(resolved)
new_options = []
for option in self.options:
if option.remaining_arguments and self.type_match(resolved, option.remaining_arguments.pop(0)):
new_options.append(option)
self.options = new_options
if not self.options:
raise NoMatchingOverload(self.symbols, self.collected_arguments)
def disambiguate(self):
"""
Selects the best matching overload
"""
option_group = [option for option in self.options if not option.remaining_arguments]
if len(option_group) != 1:
raise NoMatchingOverload(self.symbols, self.collected_arguments)
return option_group[0].symbol
@staticmethod
def type_match(resolved, expected_type):
"""
Checks if two types match (TODO: take inheritance chains into account)
"""
if expected_type == Identifier('object'):
return True
return resolved.type == expected_type
|
<commit_before><commit_msg>Implement ArgumentSelector for overload disambiguation<commit_after>import collections
import copy
from thinglang.compiler.errors import NoMatchingOverload
from thinglang.lexer.values.identifier import Identifier
SymbolOption = collections.namedtuple('SymbolOption', ['symbol', 'remaining_arguments'])
class ArgumentSelector(object):
"""
Aids in disambiguating overloaded method symbols contained in MergedSymbol objects.
Managed state regarding arguments already observed, and filters out overloads and all arguments are processed.
If a matching overload exists, it is returned - otherwise, an exception is thrown.
"""
def __init__(self, symbols):
self.symbols = symbols
self.collected_arguments = []
self.options = [SymbolOption(symbol, copy.deepcopy(symbol.arguments)) for symbol in symbols]
def constraint(self, resolved):
"""
Filters out option groups that do not expect to see the resolved type as their next argument
"""
self.collected_arguments.append(resolved)
new_options = []
for option in self.options:
if option.remaining_arguments and self.type_match(resolved, option.remaining_arguments.pop(0)):
new_options.append(option)
self.options = new_options
if not self.options:
raise NoMatchingOverload(self.symbols, self.collected_arguments)
def disambiguate(self):
"""
Selects the best matching overload
"""
option_group = [option for option in self.options if not option.remaining_arguments]
if len(option_group) != 1:
raise NoMatchingOverload(self.symbols, self.collected_arguments)
return option_group[0].symbol
@staticmethod
def type_match(resolved, expected_type):
"""
Checks if two types match (TODO: take inheritance chains into account)
"""
if expected_type == Identifier('object'):
return True
return resolved.type == expected_type
|
|
4d16ae6d1ad8b308c14c23e802349001b81ae461
|
thinglang/compiler/opcodes.py
|
thinglang/compiler/opcodes.py
|
import os
import re
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
ENUM_PARSER = re.compile(r'(.*)\s*?=\s*?(\d+)')
def read_opcodes():
with open(os.path.join(BASE_DIR, '..', '..', 'thingc', 'execution', 'Opcode.h')) as f:
for line in f:
if 'enum class Opcode' in line:
break
for decl in f:
decl = decl.strip()
if not decl:
continue
if '}' in decl:
break
groups = ENUM_PARSER.search(decl).groups()
yield (groups[0].strip(), int(groups[1]))
OPCODES = dict(read_opcodes())
assert set(range(len(OPCODES))) == set(OPCODES.values())
|
Add Python-based opcode enum parser
|
Add Python-based opcode enum parser
|
Python
|
mit
|
ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang
|
Add Python-based opcode enum parser
|
import os
import re
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
ENUM_PARSER = re.compile(r'(.*)\s*?=\s*?(\d+)')
def read_opcodes():
with open(os.path.join(BASE_DIR, '..', '..', 'thingc', 'execution', 'Opcode.h')) as f:
for line in f:
if 'enum class Opcode' in line:
break
for decl in f:
decl = decl.strip()
if not decl:
continue
if '}' in decl:
break
groups = ENUM_PARSER.search(decl).groups()
yield (groups[0].strip(), int(groups[1]))
OPCODES = dict(read_opcodes())
assert set(range(len(OPCODES))) == set(OPCODES.values())
|
<commit_before><commit_msg>Add Python-based opcode enum parser<commit_after>
|
import os
import re
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
ENUM_PARSER = re.compile(r'(.*)\s*?=\s*?(\d+)')
def read_opcodes():
with open(os.path.join(BASE_DIR, '..', '..', 'thingc', 'execution', 'Opcode.h')) as f:
for line in f:
if 'enum class Opcode' in line:
break
for decl in f:
decl = decl.strip()
if not decl:
continue
if '}' in decl:
break
groups = ENUM_PARSER.search(decl).groups()
yield (groups[0].strip(), int(groups[1]))
OPCODES = dict(read_opcodes())
assert set(range(len(OPCODES))) == set(OPCODES.values())
|
Add Python-based opcode enum parserimport os
import re
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
ENUM_PARSER = re.compile(r'(.*)\s*?=\s*?(\d+)')
def read_opcodes():
with open(os.path.join(BASE_DIR, '..', '..', 'thingc', 'execution', 'Opcode.h')) as f:
for line in f:
if 'enum class Opcode' in line:
break
for decl in f:
decl = decl.strip()
if not decl:
continue
if '}' in decl:
break
groups = ENUM_PARSER.search(decl).groups()
yield (groups[0].strip(), int(groups[1]))
OPCODES = dict(read_opcodes())
assert set(range(len(OPCODES))) == set(OPCODES.values())
|
<commit_before><commit_msg>Add Python-based opcode enum parser<commit_after>import os
import re
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
ENUM_PARSER = re.compile(r'(.*)\s*?=\s*?(\d+)')
def read_opcodes():
with open(os.path.join(BASE_DIR, '..', '..', 'thingc', 'execution', 'Opcode.h')) as f:
for line in f:
if 'enum class Opcode' in line:
break
for decl in f:
decl = decl.strip()
if not decl:
continue
if '}' in decl:
break
groups = ENUM_PARSER.search(decl).groups()
yield (groups[0].strip(), int(groups[1]))
OPCODES = dict(read_opcodes())
assert set(range(len(OPCODES))) == set(OPCODES.values())
|
|
ac823e61fd214f9818bb7a893a8ed52a3bfa3af4
|
neurokernel/conn_utils.py
|
neurokernel/conn_utils.py
|
#!/usr/bin/env python
import itertools
import os
import tempfile
import conn
import matplotlib.pyplot as plt
import networkx as nx
def imdisp(f):
"""
Display the specified image file using matplotlib.
"""
im = plt.imread(f)
plt.imshow(im)
plt.axis('off')
plt.draw()
return im
def show_pydot(g):
"""
Display a networkx graph using pydot.
"""
fd = tempfile.NamedTemporaryFile()
fd.close()
p = nx.to_pydot(g)
p.write_jpg(fd.name)
imdisp(fd.name)
os.remove(fd.name)
def show_pygraphviz(g, prog='dot', graph_attr={}, node_attr={}, edge_attr={}):
"""
Display a networkx graph using pygraphviz.
"""
fd = tempfile.NamedTemporaryFile(suffix='.jpg')
fd.close()
p = nx.to_agraph(g)
p.graph_attr.update(graph_attr)
p.node_attr.update(node_attr)
p.edge_attr.update(edge_attr)
p.draw(fd.name, prog=prog)
imdisp(fd.name)
os.remove(fd.name)
def conn_to_bipartite(c):
"""
Convert a Connectivity object into a bipartite NetworkX multigraph.
"""
g = nx.MultiDiGraph()
src_nodes = ['src_%i' % i for i in xrange(c.N_src)]
dest_nodes = ['dest_%i' % i for i in xrange(c.N_dest)]
g.add_nodes_from(src_nodes)
g.add_nodes_from(dest_nodes)
for key in c._data.keys():
syn, dir, name = key.split('/')
syn = int(syn)
if name == 'conn':
if dir == '+':
for src, dest in itertools.product(xrange(c.N_src), xrange(c.N_dest)):
if c[src, dest, syn, dir, name] == 1:
g.add_edge('src_%i' % src, 'dest_%i' % dest)
elif dir == '-':
for src, dest in itertools.product(xrange(c.N_src), xrange(c.N_dest)):
if c[src, dest, syn, dir, name] == 1:
g.add_edge('dest_%i' % dest, 'src_%i' % src)
else:
raise ValueError('invalid direction')
return g
|
Add utils for graph visualization.
|
Add utils for graph visualization.
|
Python
|
bsd-3-clause
|
cerrno/neurokernel
|
Add utils for graph visualization.
|
#!/usr/bin/env python
import itertools
import os
import tempfile
import conn
import matplotlib.pyplot as plt
import networkx as nx
def imdisp(f):
"""
Display the specified image file using matplotlib.
"""
im = plt.imread(f)
plt.imshow(im)
plt.axis('off')
plt.draw()
return im
def show_pydot(g):
"""
Display a networkx graph using pydot.
"""
fd = tempfile.NamedTemporaryFile()
fd.close()
p = nx.to_pydot(g)
p.write_jpg(fd.name)
imdisp(fd.name)
os.remove(fd.name)
def show_pygraphviz(g, prog='dot', graph_attr={}, node_attr={}, edge_attr={}):
"""
Display a networkx graph using pygraphviz.
"""
fd = tempfile.NamedTemporaryFile(suffix='.jpg')
fd.close()
p = nx.to_agraph(g)
p.graph_attr.update(graph_attr)
p.node_attr.update(node_attr)
p.edge_attr.update(edge_attr)
p.draw(fd.name, prog=prog)
imdisp(fd.name)
os.remove(fd.name)
def conn_to_bipartite(c):
"""
Convert a Connectivity object into a bipartite NetworkX multigraph.
"""
g = nx.MultiDiGraph()
src_nodes = ['src_%i' % i for i in xrange(c.N_src)]
dest_nodes = ['dest_%i' % i for i in xrange(c.N_dest)]
g.add_nodes_from(src_nodes)
g.add_nodes_from(dest_nodes)
for key in c._data.keys():
syn, dir, name = key.split('/')
syn = int(syn)
if name == 'conn':
if dir == '+':
for src, dest in itertools.product(xrange(c.N_src), xrange(c.N_dest)):
if c[src, dest, syn, dir, name] == 1:
g.add_edge('src_%i' % src, 'dest_%i' % dest)
elif dir == '-':
for src, dest in itertools.product(xrange(c.N_src), xrange(c.N_dest)):
if c[src, dest, syn, dir, name] == 1:
g.add_edge('dest_%i' % dest, 'src_%i' % src)
else:
raise ValueError('invalid direction')
return g
|
<commit_before><commit_msg>Add utils for graph visualization.<commit_after>
|
#!/usr/bin/env python
import itertools
import os
import tempfile
import conn
import matplotlib.pyplot as plt
import networkx as nx
def imdisp(f):
"""
Display the specified image file using matplotlib.
"""
im = plt.imread(f)
plt.imshow(im)
plt.axis('off')
plt.draw()
return im
def show_pydot(g):
"""
Display a networkx graph using pydot.
"""
fd = tempfile.NamedTemporaryFile()
fd.close()
p = nx.to_pydot(g)
p.write_jpg(fd.name)
imdisp(fd.name)
os.remove(fd.name)
def show_pygraphviz(g, prog='dot', graph_attr={}, node_attr={}, edge_attr={}):
"""
Display a networkx graph using pygraphviz.
"""
fd = tempfile.NamedTemporaryFile(suffix='.jpg')
fd.close()
p = nx.to_agraph(g)
p.graph_attr.update(graph_attr)
p.node_attr.update(node_attr)
p.edge_attr.update(edge_attr)
p.draw(fd.name, prog=prog)
imdisp(fd.name)
os.remove(fd.name)
def conn_to_bipartite(c):
"""
Convert a Connectivity object into a bipartite NetworkX multigraph.
"""
g = nx.MultiDiGraph()
src_nodes = ['src_%i' % i for i in xrange(c.N_src)]
dest_nodes = ['dest_%i' % i for i in xrange(c.N_dest)]
g.add_nodes_from(src_nodes)
g.add_nodes_from(dest_nodes)
for key in c._data.keys():
syn, dir, name = key.split('/')
syn = int(syn)
if name == 'conn':
if dir == '+':
for src, dest in itertools.product(xrange(c.N_src), xrange(c.N_dest)):
if c[src, dest, syn, dir, name] == 1:
g.add_edge('src_%i' % src, 'dest_%i' % dest)
elif dir == '-':
for src, dest in itertools.product(xrange(c.N_src), xrange(c.N_dest)):
if c[src, dest, syn, dir, name] == 1:
g.add_edge('dest_%i' % dest, 'src_%i' % src)
else:
raise ValueError('invalid direction')
return g
|
Add utils for graph visualization.#!/usr/bin/env python
import itertools
import os
import tempfile
import conn
import matplotlib.pyplot as plt
import networkx as nx
def imdisp(f):
"""
Display the specified image file using matplotlib.
"""
im = plt.imread(f)
plt.imshow(im)
plt.axis('off')
plt.draw()
return im
def show_pydot(g):
"""
Display a networkx graph using pydot.
"""
fd = tempfile.NamedTemporaryFile()
fd.close()
p = nx.to_pydot(g)
p.write_jpg(fd.name)
imdisp(fd.name)
os.remove(fd.name)
def show_pygraphviz(g, prog='dot', graph_attr={}, node_attr={}, edge_attr={}):
"""
Display a networkx graph using pygraphviz.
"""
fd = tempfile.NamedTemporaryFile(suffix='.jpg')
fd.close()
p = nx.to_agraph(g)
p.graph_attr.update(graph_attr)
p.node_attr.update(node_attr)
p.edge_attr.update(edge_attr)
p.draw(fd.name, prog=prog)
imdisp(fd.name)
os.remove(fd.name)
def conn_to_bipartite(c):
"""
Convert a Connectivity object into a bipartite NetworkX multigraph.
"""
g = nx.MultiDiGraph()
src_nodes = ['src_%i' % i for i in xrange(c.N_src)]
dest_nodes = ['dest_%i' % i for i in xrange(c.N_dest)]
g.add_nodes_from(src_nodes)
g.add_nodes_from(dest_nodes)
for key in c._data.keys():
syn, dir, name = key.split('/')
syn = int(syn)
if name == 'conn':
if dir == '+':
for src, dest in itertools.product(xrange(c.N_src), xrange(c.N_dest)):
if c[src, dest, syn, dir, name] == 1:
g.add_edge('src_%i' % src, 'dest_%i' % dest)
elif dir == '-':
for src, dest in itertools.product(xrange(c.N_src), xrange(c.N_dest)):
if c[src, dest, syn, dir, name] == 1:
g.add_edge('dest_%i' % dest, 'src_%i' % src)
else:
raise ValueError('invalid direction')
return g
|
<commit_before><commit_msg>Add utils for graph visualization.<commit_after>#!/usr/bin/env python
import itertools
import os
import tempfile
import conn
import matplotlib.pyplot as plt
import networkx as nx
def imdisp(f):
"""
Display the specified image file using matplotlib.
"""
im = plt.imread(f)
plt.imshow(im)
plt.axis('off')
plt.draw()
return im
def show_pydot(g):
"""
Display a networkx graph using pydot.
"""
fd = tempfile.NamedTemporaryFile()
fd.close()
p = nx.to_pydot(g)
p.write_jpg(fd.name)
imdisp(fd.name)
os.remove(fd.name)
def show_pygraphviz(g, prog='dot', graph_attr={}, node_attr={}, edge_attr={}):
"""
Display a networkx graph using pygraphviz.
"""
fd = tempfile.NamedTemporaryFile(suffix='.jpg')
fd.close()
p = nx.to_agraph(g)
p.graph_attr.update(graph_attr)
p.node_attr.update(node_attr)
p.edge_attr.update(edge_attr)
p.draw(fd.name, prog=prog)
imdisp(fd.name)
os.remove(fd.name)
def conn_to_bipartite(c):
"""
Convert a Connectivity object into a bipartite NetworkX multigraph.
"""
g = nx.MultiDiGraph()
src_nodes = ['src_%i' % i for i in xrange(c.N_src)]
dest_nodes = ['dest_%i' % i for i in xrange(c.N_dest)]
g.add_nodes_from(src_nodes)
g.add_nodes_from(dest_nodes)
for key in c._data.keys():
syn, dir, name = key.split('/')
syn = int(syn)
if name == 'conn':
if dir == '+':
for src, dest in itertools.product(xrange(c.N_src), xrange(c.N_dest)):
if c[src, dest, syn, dir, name] == 1:
g.add_edge('src_%i' % src, 'dest_%i' % dest)
elif dir == '-':
for src, dest in itertools.product(xrange(c.N_src), xrange(c.N_dest)):
if c[src, dest, syn, dir, name] == 1:
g.add_edge('dest_%i' % dest, 'src_%i' % src)
else:
raise ValueError('invalid direction')
return g
|
|
525a8438bd601592c4f878ca5d42d3dab8943be0
|
ooni/tests/test_errors.py
|
ooni/tests/test_errors.py
|
from twisted.trial import unittest
import ooni.errors
class TestErrors(unittest.TestCase):
def test_catch_child_failures_before_parent_failures(self):
"""
Verify that more specific Failures are caught first by
handleAllFailures() and failureToString().
Fails if a subclass is listed after it's parent Failure.
"""
# Check each Failure against all subsequent failures
for index, (failure, _) in enumerate(ooni.errors.known_failures):
for sub_failure, _ in ooni.errors.known_failures[index+1:]:
# Fail if subsequent Failure inherits from the current Failure
self.assertNotIsInstance(sub_failure(None), failure)
|
Test that specific Failures are caught before parent Failures
|
Test that specific Failures are caught before parent Failures
|
Python
|
bsd-2-clause
|
0xPoly/ooni-probe,0xPoly/ooni-probe,0xPoly/ooni-probe,0xPoly/ooni-probe
|
Test that specific Failures are caught before parent Failures
|
from twisted.trial import unittest
import ooni.errors
class TestErrors(unittest.TestCase):
def test_catch_child_failures_before_parent_failures(self):
"""
Verify that more specific Failures are caught first by
handleAllFailures() and failureToString().
Fails if a subclass is listed after it's parent Failure.
"""
# Check each Failure against all subsequent failures
for index, (failure, _) in enumerate(ooni.errors.known_failures):
for sub_failure, _ in ooni.errors.known_failures[index+1:]:
# Fail if subsequent Failure inherits from the current Failure
self.assertNotIsInstance(sub_failure(None), failure)
|
<commit_before><commit_msg>Test that specific Failures are caught before parent Failures<commit_after>
|
from twisted.trial import unittest
import ooni.errors
class TestErrors(unittest.TestCase):
def test_catch_child_failures_before_parent_failures(self):
"""
Verify that more specific Failures are caught first by
handleAllFailures() and failureToString().
Fails if a subclass is listed after it's parent Failure.
"""
# Check each Failure against all subsequent failures
for index, (failure, _) in enumerate(ooni.errors.known_failures):
for sub_failure, _ in ooni.errors.known_failures[index+1:]:
# Fail if subsequent Failure inherits from the current Failure
self.assertNotIsInstance(sub_failure(None), failure)
|
Test that specific Failures are caught before parent Failuresfrom twisted.trial import unittest
import ooni.errors
class TestErrors(unittest.TestCase):
def test_catch_child_failures_before_parent_failures(self):
"""
Verify that more specific Failures are caught first by
handleAllFailures() and failureToString().
Fails if a subclass is listed after it's parent Failure.
"""
# Check each Failure against all subsequent failures
for index, (failure, _) in enumerate(ooni.errors.known_failures):
for sub_failure, _ in ooni.errors.known_failures[index+1:]:
# Fail if subsequent Failure inherits from the current Failure
self.assertNotIsInstance(sub_failure(None), failure)
|
<commit_before><commit_msg>Test that specific Failures are caught before parent Failures<commit_after>from twisted.trial import unittest
import ooni.errors
class TestErrors(unittest.TestCase):
def test_catch_child_failures_before_parent_failures(self):
"""
Verify that more specific Failures are caught first by
handleAllFailures() and failureToString().
Fails if a subclass is listed after it's parent Failure.
"""
# Check each Failure against all subsequent failures
for index, (failure, _) in enumerate(ooni.errors.known_failures):
for sub_failure, _ in ooni.errors.known_failures[index+1:]:
# Fail if subsequent Failure inherits from the current Failure
self.assertNotIsInstance(sub_failure(None), failure)
|
|
90d079928eaf48e370d21417e4d6e649ec0f5f6f
|
taskwiki/taskwiki.py
|
taskwiki/taskwiki.py
|
import sys
import re
import vim
from tasklib.task import TaskWarrior, Task
# Insert the taskwiki on the python path
sys.path.insert(0, vim.eval("s:plugin_path") + '/taskwiki')
from regexp import *
from task import VimwikiTask
from cache import TaskCache
"""
How this plugin works:
1.) On startup, it reads all the tasks and syncs info TW -> Vimwiki file. Task is identified by their
uuid.
2.) When saving, the opposite sync is performed (Vimwiki -> TW direction).
a) if task is marked as subtask by indentation, the dependency is created between
"""
tw = TaskWarrior()
cache = TaskCache(tw)
def update_from_tw():
"""
Updates all the incomplete tasks in the vimwiki file if the info from TW is different.
"""
cache.load_buffer()
cache.update_tasks()
cache.update_buffer()
cache.evaluate_viewports()
def update_to_tw():
"""
Updates all tasks that differ from their TaskWarrior representation.
"""
cache.reset()
cache.load_buffer()
cache.save_tasks()
cache.update_buffer()
if __name__ == '__main__':
update_from_tw()
|
import sys
import re
import vim
from tasklib.task import TaskWarrior, Task
# Insert the taskwiki on the python path
sys.path.insert(0, vim.eval("s:plugin_path") + '/taskwiki')
from regexp import *
from task import VimwikiTask
from cache import TaskCache
"""
How this plugin works:
1.) On startup, it reads all the tasks and syncs info TW -> Vimwiki file. Task is identified by their
uuid.
2.) When saving, the opposite sync is performed (Vimwiki -> TW direction).
a) if task is marked as subtask by indentation, the dependency is created between
"""
tw = TaskWarrior()
cache = TaskCache(tw)
def update_from_tw():
"""
Updates all the incomplete tasks in the vimwiki file if the info from TW is different.
"""
cache.load_buffer()
cache.update_tasks()
cache.update_buffer()
cache.evaluate_viewports()
def update_to_tw():
"""
Updates all tasks that differ from their TaskWarrior representation.
"""
cache.reset()
cache.load_buffer()
cache.update_tasks()
cache.save_tasks()
cache.update_buffer()
cache.evaluate_viewports()
if __name__ == '__main__':
update_from_tw()
|
Update tasks and evaluate viewports on saving
|
Taskwiki: Update tasks and evaluate viewports on saving
|
Python
|
mit
|
phha/taskwiki,Spirotot/taskwiki
|
import sys
import re
import vim
from tasklib.task import TaskWarrior, Task
# Insert the taskwiki on the python path
sys.path.insert(0, vim.eval("s:plugin_path") + '/taskwiki')
from regexp import *
from task import VimwikiTask
from cache import TaskCache
"""
How this plugin works:
1.) On startup, it reads all the tasks and syncs info TW -> Vimwiki file. Task is identified by their
uuid.
2.) When saving, the opposite sync is performed (Vimwiki -> TW direction).
a) if task is marked as subtask by indentation, the dependency is created between
"""
tw = TaskWarrior()
cache = TaskCache(tw)
def update_from_tw():
"""
Updates all the incomplete tasks in the vimwiki file if the info from TW is different.
"""
cache.load_buffer()
cache.update_tasks()
cache.update_buffer()
cache.evaluate_viewports()
def update_to_tw():
"""
Updates all tasks that differ from their TaskWarrior representation.
"""
cache.reset()
cache.load_buffer()
cache.save_tasks()
cache.update_buffer()
if __name__ == '__main__':
update_from_tw()
Taskwiki: Update tasks and evaluate viewports on saving
|
import sys
import re
import vim
from tasklib.task import TaskWarrior, Task
# Insert the taskwiki on the python path
sys.path.insert(0, vim.eval("s:plugin_path") + '/taskwiki')
from regexp import *
from task import VimwikiTask
from cache import TaskCache
"""
How this plugin works:
1.) On startup, it reads all the tasks and syncs info TW -> Vimwiki file. Task is identified by their
uuid.
2.) When saving, the opposite sync is performed (Vimwiki -> TW direction).
a) if task is marked as subtask by indentation, the dependency is created between
"""
tw = TaskWarrior()
cache = TaskCache(tw)
def update_from_tw():
"""
Updates all the incomplete tasks in the vimwiki file if the info from TW is different.
"""
cache.load_buffer()
cache.update_tasks()
cache.update_buffer()
cache.evaluate_viewports()
def update_to_tw():
"""
Updates all tasks that differ from their TaskWarrior representation.
"""
cache.reset()
cache.load_buffer()
cache.update_tasks()
cache.save_tasks()
cache.update_buffer()
cache.evaluate_viewports()
if __name__ == '__main__':
update_from_tw()
|
<commit_before>import sys
import re
import vim
from tasklib.task import TaskWarrior, Task
# Insert the taskwiki on the python path
sys.path.insert(0, vim.eval("s:plugin_path") + '/taskwiki')
from regexp import *
from task import VimwikiTask
from cache import TaskCache
"""
How this plugin works:
1.) On startup, it reads all the tasks and syncs info TW -> Vimwiki file. Task is identified by their
uuid.
2.) When saving, the opposite sync is performed (Vimwiki -> TW direction).
a) if task is marked as subtask by indentation, the dependency is created between
"""
tw = TaskWarrior()
cache = TaskCache(tw)
def update_from_tw():
"""
Updates all the incomplete tasks in the vimwiki file if the info from TW is different.
"""
cache.load_buffer()
cache.update_tasks()
cache.update_buffer()
cache.evaluate_viewports()
def update_to_tw():
"""
Updates all tasks that differ from their TaskWarrior representation.
"""
cache.reset()
cache.load_buffer()
cache.save_tasks()
cache.update_buffer()
if __name__ == '__main__':
update_from_tw()
<commit_msg>Taskwiki: Update tasks and evaluate viewports on saving<commit_after>
|
import sys
import re
import vim
from tasklib.task import TaskWarrior, Task
# Insert the taskwiki on the python path
sys.path.insert(0, vim.eval("s:plugin_path") + '/taskwiki')
from regexp import *
from task import VimwikiTask
from cache import TaskCache
"""
How this plugin works:
1.) On startup, it reads all the tasks and syncs info TW -> Vimwiki file. Task is identified by their
uuid.
2.) When saving, the opposite sync is performed (Vimwiki -> TW direction).
a) if task is marked as subtask by indentation, the dependency is created between
"""
tw = TaskWarrior()
cache = TaskCache(tw)
def update_from_tw():
"""
Updates all the incomplete tasks in the vimwiki file if the info from TW is different.
"""
cache.load_buffer()
cache.update_tasks()
cache.update_buffer()
cache.evaluate_viewports()
def update_to_tw():
"""
Updates all tasks that differ from their TaskWarrior representation.
"""
cache.reset()
cache.load_buffer()
cache.update_tasks()
cache.save_tasks()
cache.update_buffer()
cache.evaluate_viewports()
if __name__ == '__main__':
update_from_tw()
|
import sys
import re
import vim
from tasklib.task import TaskWarrior, Task
# Insert the taskwiki on the python path
sys.path.insert(0, vim.eval("s:plugin_path") + '/taskwiki')
from regexp import *
from task import VimwikiTask
from cache import TaskCache
"""
How this plugin works:
1.) On startup, it reads all the tasks and syncs info TW -> Vimwiki file. Task is identified by their
uuid.
2.) When saving, the opposite sync is performed (Vimwiki -> TW direction).
a) if task is marked as subtask by indentation, the dependency is created between
"""
tw = TaskWarrior()
cache = TaskCache(tw)
def update_from_tw():
"""
Updates all the incomplete tasks in the vimwiki file if the info from TW is different.
"""
cache.load_buffer()
cache.update_tasks()
cache.update_buffer()
cache.evaluate_viewports()
def update_to_tw():
"""
Updates all tasks that differ from their TaskWarrior representation.
"""
cache.reset()
cache.load_buffer()
cache.save_tasks()
cache.update_buffer()
if __name__ == '__main__':
update_from_tw()
Taskwiki: Update tasks and evaluate viewports on savingimport sys
import re
import vim
from tasklib.task import TaskWarrior, Task
# Insert the taskwiki on the python path
sys.path.insert(0, vim.eval("s:plugin_path") + '/taskwiki')
from regexp import *
from task import VimwikiTask
from cache import TaskCache
"""
How this plugin works:
1.) On startup, it reads all the tasks and syncs info TW -> Vimwiki file. Task is identified by their
uuid.
2.) When saving, the opposite sync is performed (Vimwiki -> TW direction).
a) if task is marked as subtask by indentation, the dependency is created between
"""
tw = TaskWarrior()
cache = TaskCache(tw)
def update_from_tw():
"""
Updates all the incomplete tasks in the vimwiki file if the info from TW is different.
"""
cache.load_buffer()
cache.update_tasks()
cache.update_buffer()
cache.evaluate_viewports()
def update_to_tw():
"""
Updates all tasks that differ from their TaskWarrior representation.
"""
cache.reset()
cache.load_buffer()
cache.update_tasks()
cache.save_tasks()
cache.update_buffer()
cache.evaluate_viewports()
if __name__ == '__main__':
update_from_tw()
|
<commit_before>import sys
import re
import vim
from tasklib.task import TaskWarrior, Task
# Insert the taskwiki on the python path
sys.path.insert(0, vim.eval("s:plugin_path") + '/taskwiki')
from regexp import *
from task import VimwikiTask
from cache import TaskCache
"""
How this plugin works:
1.) On startup, it reads all the tasks and syncs info TW -> Vimwiki file. Task is identified by their
uuid.
2.) When saving, the opposite sync is performed (Vimwiki -> TW direction).
a) if task is marked as subtask by indentation, the dependency is created between
"""
tw = TaskWarrior()
cache = TaskCache(tw)
def update_from_tw():
"""
Updates all the incomplete tasks in the vimwiki file if the info from TW is different.
"""
cache.load_buffer()
cache.update_tasks()
cache.update_buffer()
cache.evaluate_viewports()
def update_to_tw():
"""
Updates all tasks that differ from their TaskWarrior representation.
"""
cache.reset()
cache.load_buffer()
cache.save_tasks()
cache.update_buffer()
if __name__ == '__main__':
update_from_tw()
<commit_msg>Taskwiki: Update tasks and evaluate viewports on saving<commit_after>import sys
import re
import vim
from tasklib.task import TaskWarrior, Task
# Insert the taskwiki on the python path
sys.path.insert(0, vim.eval("s:plugin_path") + '/taskwiki')
from regexp import *
from task import VimwikiTask
from cache import TaskCache
"""
How this plugin works:
1.) On startup, it reads all the tasks and syncs info TW -> Vimwiki file. Task is identified by their
uuid.
2.) When saving, the opposite sync is performed (Vimwiki -> TW direction).
a) if task is marked as subtask by indentation, the dependency is created between
"""
tw = TaskWarrior()
cache = TaskCache(tw)
def update_from_tw():
"""
Updates all the incomplete tasks in the vimwiki file if the info from TW is different.
"""
cache.load_buffer()
cache.update_tasks()
cache.update_buffer()
cache.evaluate_viewports()
def update_to_tw():
"""
Updates all tasks that differ from their TaskWarrior representation.
"""
cache.reset()
cache.load_buffer()
cache.update_tasks()
cache.save_tasks()
cache.update_buffer()
cache.evaluate_viewports()
if __name__ == '__main__':
update_from_tw()
|
eb71a3d3319480b3f99cb44f934a51bfb1b5bd67
|
pyatv/auth/hap_channel.py
|
pyatv/auth/hap_channel.py
|
"""Base class for HAP based channels (connections)."""
from abc import ABC, abstractmethod
import asyncio
import logging
from typing import Callable, Tuple, cast
from pyatv.auth.hap_pairing import PairVerifyProcedure
from pyatv.auth.hap_session import HAPSession
from pyatv.support import log_binary
_LOGGER = logging.getLogger(__name__)
class AbstractHAPChannel(ABC, asyncio.Protocol):
"""Abstract base class for connections using HAP encryption and segmenting."""
def __init__(self, output_key: bytes, input_key: bytes) -> None:
"""Initialize a new AbstractHAPChannel instance."""
self.buffer = b""
self.transport = None
self.session: HAPSession = HAPSession()
self.session.enable(output_key, input_key)
def connection_made(self, transport) -> None:
"""Device connection was made."""
sock = transport.get_extra_info("socket")
dstaddr, dstport = sock.getpeername()
_LOGGER.debug("Connected to %s:%d", dstaddr, dstport)
self.transport = transport
def data_received(self, data: bytes) -> None:
"""Message was received from device."""
assert self.transport is not None
decrypt = self.session.decrypt(data)
log_binary(_LOGGER, "Received data", Data=data)
self.buffer += decrypt
self.handle_received()
@abstractmethod
def handle_received(self) -> None:
"""Handle received data that was put in buffer."""
def send(self, data: bytes) -> None:
"""Send message to device."""
assert self.transport is not None
encrypted = self.session.encrypt(data)
log_binary(_LOGGER, "Sending data", Encrypted=encrypted)
self.transport.write(encrypted)
def connection_lost(self, exc) -> None:
"""Device connection was dropped."""
_LOGGER.debug("Connection was lost to remote")
async def setup_channel(
factory: Callable[[bytes, bytes], AbstractHAPChannel],
verifier: PairVerifyProcedure,
address: str,
port: int,
salt: str,
output_info: str,
input_info: str,
) -> Tuple[asyncio.BaseTransport, AbstractHAPChannel]:
"""Set up a new HAP channel and enable encryption."""
out_key, in_key = verifier.encryption_keys(salt, output_info, input_info)
loop = asyncio.get_event_loop()
transport, protocol = await loop.create_connection(
lambda: factory(out_key, in_key),
address,
port,
)
return transport, cast(AbstractHAPChannel, protocol)
|
Add abstract class for HAP channels
|
auth: Add abstract class for HAP channels
Relates to #1255
|
Python
|
mit
|
postlund/pyatv,postlund/pyatv
|
auth: Add abstract class for HAP channels
Relates to #1255
|
"""Base class for HAP based channels (connections)."""
from abc import ABC, abstractmethod
import asyncio
import logging
from typing import Callable, Tuple, cast
from pyatv.auth.hap_pairing import PairVerifyProcedure
from pyatv.auth.hap_session import HAPSession
from pyatv.support import log_binary
_LOGGER = logging.getLogger(__name__)
class AbstractHAPChannel(ABC, asyncio.Protocol):
"""Abstract base class for connections using HAP encryption and segmenting."""
def __init__(self, output_key: bytes, input_key: bytes) -> None:
"""Initialize a new AbstractHAPChannel instance."""
self.buffer = b""
self.transport = None
self.session: HAPSession = HAPSession()
self.session.enable(output_key, input_key)
def connection_made(self, transport) -> None:
"""Device connection was made."""
sock = transport.get_extra_info("socket")
dstaddr, dstport = sock.getpeername()
_LOGGER.debug("Connected to %s:%d", dstaddr, dstport)
self.transport = transport
def data_received(self, data: bytes) -> None:
"""Message was received from device."""
assert self.transport is not None
decrypt = self.session.decrypt(data)
log_binary(_LOGGER, "Received data", Data=data)
self.buffer += decrypt
self.handle_received()
@abstractmethod
def handle_received(self) -> None:
"""Handle received data that was put in buffer."""
def send(self, data: bytes) -> None:
"""Send message to device."""
assert self.transport is not None
encrypted = self.session.encrypt(data)
log_binary(_LOGGER, "Sending data", Encrypted=encrypted)
self.transport.write(encrypted)
def connection_lost(self, exc) -> None:
"""Device connection was dropped."""
_LOGGER.debug("Connection was lost to remote")
async def setup_channel(
factory: Callable[[bytes, bytes], AbstractHAPChannel],
verifier: PairVerifyProcedure,
address: str,
port: int,
salt: str,
output_info: str,
input_info: str,
) -> Tuple[asyncio.BaseTransport, AbstractHAPChannel]:
"""Set up a new HAP channel and enable encryption."""
out_key, in_key = verifier.encryption_keys(salt, output_info, input_info)
loop = asyncio.get_event_loop()
transport, protocol = await loop.create_connection(
lambda: factory(out_key, in_key),
address,
port,
)
return transport, cast(AbstractHAPChannel, protocol)
|
<commit_before><commit_msg>auth: Add abstract class for HAP channels
Relates to #1255<commit_after>
|
"""Base class for HAP based channels (connections)."""
from abc import ABC, abstractmethod
import asyncio
import logging
from typing import Callable, Tuple, cast
from pyatv.auth.hap_pairing import PairVerifyProcedure
from pyatv.auth.hap_session import HAPSession
from pyatv.support import log_binary
_LOGGER = logging.getLogger(__name__)
class AbstractHAPChannel(ABC, asyncio.Protocol):
"""Abstract base class for connections using HAP encryption and segmenting."""
def __init__(self, output_key: bytes, input_key: bytes) -> None:
"""Initialize a new AbstractHAPChannel instance."""
self.buffer = b""
self.transport = None
self.session: HAPSession = HAPSession()
self.session.enable(output_key, input_key)
def connection_made(self, transport) -> None:
"""Device connection was made."""
sock = transport.get_extra_info("socket")
dstaddr, dstport = sock.getpeername()
_LOGGER.debug("Connected to %s:%d", dstaddr, dstport)
self.transport = transport
def data_received(self, data: bytes) -> None:
"""Message was received from device."""
assert self.transport is not None
decrypt = self.session.decrypt(data)
log_binary(_LOGGER, "Received data", Data=data)
self.buffer += decrypt
self.handle_received()
@abstractmethod
def handle_received(self) -> None:
"""Handle received data that was put in buffer."""
def send(self, data: bytes) -> None:
"""Send message to device."""
assert self.transport is not None
encrypted = self.session.encrypt(data)
log_binary(_LOGGER, "Sending data", Encrypted=encrypted)
self.transport.write(encrypted)
def connection_lost(self, exc) -> None:
"""Device connection was dropped."""
_LOGGER.debug("Connection was lost to remote")
async def setup_channel(
factory: Callable[[bytes, bytes], AbstractHAPChannel],
verifier: PairVerifyProcedure,
address: str,
port: int,
salt: str,
output_info: str,
input_info: str,
) -> Tuple[asyncio.BaseTransport, AbstractHAPChannel]:
"""Set up a new HAP channel and enable encryption."""
out_key, in_key = verifier.encryption_keys(salt, output_info, input_info)
loop = asyncio.get_event_loop()
transport, protocol = await loop.create_connection(
lambda: factory(out_key, in_key),
address,
port,
)
return transport, cast(AbstractHAPChannel, protocol)
|
auth: Add abstract class for HAP channels
Relates to #1255"""Base class for HAP based channels (connections)."""
from abc import ABC, abstractmethod
import asyncio
import logging
from typing import Callable, Tuple, cast
from pyatv.auth.hap_pairing import PairVerifyProcedure
from pyatv.auth.hap_session import HAPSession
from pyatv.support import log_binary
_LOGGER = logging.getLogger(__name__)
class AbstractHAPChannel(ABC, asyncio.Protocol):
"""Abstract base class for connections using HAP encryption and segmenting."""
def __init__(self, output_key: bytes, input_key: bytes) -> None:
"""Initialize a new AbstractHAPChannel instance."""
self.buffer = b""
self.transport = None
self.session: HAPSession = HAPSession()
self.session.enable(output_key, input_key)
def connection_made(self, transport) -> None:
"""Device connection was made."""
sock = transport.get_extra_info("socket")
dstaddr, dstport = sock.getpeername()
_LOGGER.debug("Connected to %s:%d", dstaddr, dstport)
self.transport = transport
def data_received(self, data: bytes) -> None:
"""Message was received from device."""
assert self.transport is not None
decrypt = self.session.decrypt(data)
log_binary(_LOGGER, "Received data", Data=data)
self.buffer += decrypt
self.handle_received()
@abstractmethod
def handle_received(self) -> None:
"""Handle received data that was put in buffer."""
def send(self, data: bytes) -> None:
"""Send message to device."""
assert self.transport is not None
encrypted = self.session.encrypt(data)
log_binary(_LOGGER, "Sending data", Encrypted=encrypted)
self.transport.write(encrypted)
def connection_lost(self, exc) -> None:
"""Device connection was dropped."""
_LOGGER.debug("Connection was lost to remote")
async def setup_channel(
factory: Callable[[bytes, bytes], AbstractHAPChannel],
verifier: PairVerifyProcedure,
address: str,
port: int,
salt: str,
output_info: str,
input_info: str,
) -> Tuple[asyncio.BaseTransport, AbstractHAPChannel]:
"""Set up a new HAP channel and enable encryption."""
out_key, in_key = verifier.encryption_keys(salt, output_info, input_info)
loop = asyncio.get_event_loop()
transport, protocol = await loop.create_connection(
lambda: factory(out_key, in_key),
address,
port,
)
return transport, cast(AbstractHAPChannel, protocol)
|
<commit_before><commit_msg>auth: Add abstract class for HAP channels
Relates to #1255<commit_after>"""Base class for HAP based channels (connections)."""
from abc import ABC, abstractmethod
import asyncio
import logging
from typing import Callable, Tuple, cast
from pyatv.auth.hap_pairing import PairVerifyProcedure
from pyatv.auth.hap_session import HAPSession
from pyatv.support import log_binary
_LOGGER = logging.getLogger(__name__)
class AbstractHAPChannel(ABC, asyncio.Protocol):
"""Abstract base class for connections using HAP encryption and segmenting."""
def __init__(self, output_key: bytes, input_key: bytes) -> None:
"""Initialize a new AbstractHAPChannel instance."""
self.buffer = b""
self.transport = None
self.session: HAPSession = HAPSession()
self.session.enable(output_key, input_key)
def connection_made(self, transport) -> None:
"""Device connection was made."""
sock = transport.get_extra_info("socket")
dstaddr, dstport = sock.getpeername()
_LOGGER.debug("Connected to %s:%d", dstaddr, dstport)
self.transport = transport
def data_received(self, data: bytes) -> None:
"""Message was received from device."""
assert self.transport is not None
decrypt = self.session.decrypt(data)
log_binary(_LOGGER, "Received data", Data=data)
self.buffer += decrypt
self.handle_received()
@abstractmethod
def handle_received(self) -> None:
"""Handle received data that was put in buffer."""
def send(self, data: bytes) -> None:
"""Send message to device."""
assert self.transport is not None
encrypted = self.session.encrypt(data)
log_binary(_LOGGER, "Sending data", Encrypted=encrypted)
self.transport.write(encrypted)
def connection_lost(self, exc) -> None:
"""Device connection was dropped."""
_LOGGER.debug("Connection was lost to remote")
async def setup_channel(
factory: Callable[[bytes, bytes], AbstractHAPChannel],
verifier: PairVerifyProcedure,
address: str,
port: int,
salt: str,
output_info: str,
input_info: str,
) -> Tuple[asyncio.BaseTransport, AbstractHAPChannel]:
"""Set up a new HAP channel and enable encryption."""
out_key, in_key = verifier.encryption_keys(salt, output_info, input_info)
loop = asyncio.get_event_loop()
transport, protocol = await loop.create_connection(
lambda: factory(out_key, in_key),
address,
port,
)
return transport, cast(AbstractHAPChannel, protocol)
|
|
bd01797f18012927202b87872dc33caf685306c0
|
gdb.py
|
gdb.py
|
deadbeef = 0xdeadbeefdeadbeef
abc_any = gdb.lookup_type("union any")
def color(s, c):
return "\x1b[" + str(c) + "m" + s + "\x1b[0m"
def gray(s):
return color(s, 90)
def red(s):
return color(s, "1;31")
def p(indent, tag, value):
print(" " * indent + tag + ": " + str(value))
def print_abc(i, v):
v = v.cast(abc_any)
vt = v['as_tagged']
if vt == 0xdeadf00ddeadf00d:
p(i, "Unit", "Unit")
elif vt == deadbeef:
p(i, "Dead", "Beef")
elif vt == 0:
p(i, red("!!!NULL POINTER!!!"), "This should never happen")
elif (vt & 0xfff0000000000000) != 0:
p(i, "Number", (~vt).cast(abc_any)['as_num'])
elif vt < 0x00007f0000000000: # FIXME should get actual mappings -- don't know how to.
block = gdb.block_for_pc(int(vt))
if block == None:
name = str(v['as_indirect'])
else:
name = str(block.function)
p(i, "Block", name)
else:
tag = vt & 0x3
ptr = vt & ~0x3
hexptr = gray(hex(int(ptr)))
v = ptr.cast(abc_any)
try:
if tag == 0:
pair = v['as_pair'].dereference()
if pair['snd']['as_tagged'] == deadbeef:
p(i, "Left", hexptr)
print_abc(i+4, pair['fst'])
else:
p(i, "Pair", hexptr)
print_abc(i+4, pair['fst'])
print_abc(i+4, pair['snd'])
elif tag == 1:
pair = v['as_comp_block'].dereference()
if pair['yz']['as_tagged'] == deadbeef:
p(i, "Right", hexptr)
print_abc(i+4, pair['xy'])
else:
p(i, "Composed", hexptr)
print_abc(i+4, pair['xy'])
print_abc(i+4, pair['yz'])
elif tag == 2:
p(i, "Quoted", hexptr)
print_abc(i+4, v['as_indirect'].dereference())
else:
p(i, "INVALID TAG", hexptr)
except gdb.MemoryError:
p(i, red("!!!INVALID POINTER!!!"), hexptr)
class PrintABCValue(gdb.Command):
def __init__(self):
super(PrintABCValue, self).__init__('print-abc-value', gdb.COMMAND_DATA, gdb.COMPLETE_SYMBOL)
def invoke(self, arg, tty):
print_abc(0, gdb.parse_and_eval(arg))
PrintABCValue()
|
Add GDB plugin for printing ABC values
|
Add GDB plugin for printing ABC values
|
Python
|
bsd-3-clause
|
klkblake/abcc,klkblake/abcc,klkblake/abcc,klkblake/abcc
|
Add GDB plugin for printing ABC values
|
deadbeef = 0xdeadbeefdeadbeef
abc_any = gdb.lookup_type("union any")
def color(s, c):
return "\x1b[" + str(c) + "m" + s + "\x1b[0m"
def gray(s):
return color(s, 90)
def red(s):
return color(s, "1;31")
def p(indent, tag, value):
print(" " * indent + tag + ": " + str(value))
def print_abc(i, v):
v = v.cast(abc_any)
vt = v['as_tagged']
if vt == 0xdeadf00ddeadf00d:
p(i, "Unit", "Unit")
elif vt == deadbeef:
p(i, "Dead", "Beef")
elif vt == 0:
p(i, red("!!!NULL POINTER!!!"), "This should never happen")
elif (vt & 0xfff0000000000000) != 0:
p(i, "Number", (~vt).cast(abc_any)['as_num'])
elif vt < 0x00007f0000000000: # FIXME should get actual mappings -- don't know how to.
block = gdb.block_for_pc(int(vt))
if block == None:
name = str(v['as_indirect'])
else:
name = str(block.function)
p(i, "Block", name)
else:
tag = vt & 0x3
ptr = vt & ~0x3
hexptr = gray(hex(int(ptr)))
v = ptr.cast(abc_any)
try:
if tag == 0:
pair = v['as_pair'].dereference()
if pair['snd']['as_tagged'] == deadbeef:
p(i, "Left", hexptr)
print_abc(i+4, pair['fst'])
else:
p(i, "Pair", hexptr)
print_abc(i+4, pair['fst'])
print_abc(i+4, pair['snd'])
elif tag == 1:
pair = v['as_comp_block'].dereference()
if pair['yz']['as_tagged'] == deadbeef:
p(i, "Right", hexptr)
print_abc(i+4, pair['xy'])
else:
p(i, "Composed", hexptr)
print_abc(i+4, pair['xy'])
print_abc(i+4, pair['yz'])
elif tag == 2:
p(i, "Quoted", hexptr)
print_abc(i+4, v['as_indirect'].dereference())
else:
p(i, "INVALID TAG", hexptr)
except gdb.MemoryError:
p(i, red("!!!INVALID POINTER!!!"), hexptr)
class PrintABCValue(gdb.Command):
def __init__(self):
super(PrintABCValue, self).__init__('print-abc-value', gdb.COMMAND_DATA, gdb.COMPLETE_SYMBOL)
def invoke(self, arg, tty):
print_abc(0, gdb.parse_and_eval(arg))
PrintABCValue()
|
<commit_before><commit_msg>Add GDB plugin for printing ABC values<commit_after>
|
deadbeef = 0xdeadbeefdeadbeef
abc_any = gdb.lookup_type("union any")
def color(s, c):
return "\x1b[" + str(c) + "m" + s + "\x1b[0m"
def gray(s):
return color(s, 90)
def red(s):
return color(s, "1;31")
def p(indent, tag, value):
print(" " * indent + tag + ": " + str(value))
def print_abc(i, v):
v = v.cast(abc_any)
vt = v['as_tagged']
if vt == 0xdeadf00ddeadf00d:
p(i, "Unit", "Unit")
elif vt == deadbeef:
p(i, "Dead", "Beef")
elif vt == 0:
p(i, red("!!!NULL POINTER!!!"), "This should never happen")
elif (vt & 0xfff0000000000000) != 0:
p(i, "Number", (~vt).cast(abc_any)['as_num'])
elif vt < 0x00007f0000000000: # FIXME should get actual mappings -- don't know how to.
block = gdb.block_for_pc(int(vt))
if block == None:
name = str(v['as_indirect'])
else:
name = str(block.function)
p(i, "Block", name)
else:
tag = vt & 0x3
ptr = vt & ~0x3
hexptr = gray(hex(int(ptr)))
v = ptr.cast(abc_any)
try:
if tag == 0:
pair = v['as_pair'].dereference()
if pair['snd']['as_tagged'] == deadbeef:
p(i, "Left", hexptr)
print_abc(i+4, pair['fst'])
else:
p(i, "Pair", hexptr)
print_abc(i+4, pair['fst'])
print_abc(i+4, pair['snd'])
elif tag == 1:
pair = v['as_comp_block'].dereference()
if pair['yz']['as_tagged'] == deadbeef:
p(i, "Right", hexptr)
print_abc(i+4, pair['xy'])
else:
p(i, "Composed", hexptr)
print_abc(i+4, pair['xy'])
print_abc(i+4, pair['yz'])
elif tag == 2:
p(i, "Quoted", hexptr)
print_abc(i+4, v['as_indirect'].dereference())
else:
p(i, "INVALID TAG", hexptr)
except gdb.MemoryError:
p(i, red("!!!INVALID POINTER!!!"), hexptr)
class PrintABCValue(gdb.Command):
def __init__(self):
super(PrintABCValue, self).__init__('print-abc-value', gdb.COMMAND_DATA, gdb.COMPLETE_SYMBOL)
def invoke(self, arg, tty):
print_abc(0, gdb.parse_and_eval(arg))
PrintABCValue()
|
Add GDB plugin for printing ABC valuesdeadbeef = 0xdeadbeefdeadbeef
abc_any = gdb.lookup_type("union any")
def color(s, c):
return "\x1b[" + str(c) + "m" + s + "\x1b[0m"
def gray(s):
return color(s, 90)
def red(s):
return color(s, "1;31")
def p(indent, tag, value):
print(" " * indent + tag + ": " + str(value))
def print_abc(i, v):
v = v.cast(abc_any)
vt = v['as_tagged']
if vt == 0xdeadf00ddeadf00d:
p(i, "Unit", "Unit")
elif vt == deadbeef:
p(i, "Dead", "Beef")
elif vt == 0:
p(i, red("!!!NULL POINTER!!!"), "This should never happen")
elif (vt & 0xfff0000000000000) != 0:
p(i, "Number", (~vt).cast(abc_any)['as_num'])
elif vt < 0x00007f0000000000: # FIXME should get actual mappings -- don't know how to.
block = gdb.block_for_pc(int(vt))
if block == None:
name = str(v['as_indirect'])
else:
name = str(block.function)
p(i, "Block", name)
else:
tag = vt & 0x3
ptr = vt & ~0x3
hexptr = gray(hex(int(ptr)))
v = ptr.cast(abc_any)
try:
if tag == 0:
pair = v['as_pair'].dereference()
if pair['snd']['as_tagged'] == deadbeef:
p(i, "Left", hexptr)
print_abc(i+4, pair['fst'])
else:
p(i, "Pair", hexptr)
print_abc(i+4, pair['fst'])
print_abc(i+4, pair['snd'])
elif tag == 1:
pair = v['as_comp_block'].dereference()
if pair['yz']['as_tagged'] == deadbeef:
p(i, "Right", hexptr)
print_abc(i+4, pair['xy'])
else:
p(i, "Composed", hexptr)
print_abc(i+4, pair['xy'])
print_abc(i+4, pair['yz'])
elif tag == 2:
p(i, "Quoted", hexptr)
print_abc(i+4, v['as_indirect'].dereference())
else:
p(i, "INVALID TAG", hexptr)
except gdb.MemoryError:
p(i, red("!!!INVALID POINTER!!!"), hexptr)
class PrintABCValue(gdb.Command):
def __init__(self):
super(PrintABCValue, self).__init__('print-abc-value', gdb.COMMAND_DATA, gdb.COMPLETE_SYMBOL)
def invoke(self, arg, tty):
print_abc(0, gdb.parse_and_eval(arg))
PrintABCValue()
|
<commit_before><commit_msg>Add GDB plugin for printing ABC values<commit_after>deadbeef = 0xdeadbeefdeadbeef
abc_any = gdb.lookup_type("union any")
def color(s, c):
return "\x1b[" + str(c) + "m" + s + "\x1b[0m"
def gray(s):
return color(s, 90)
def red(s):
return color(s, "1;31")
def p(indent, tag, value):
print(" " * indent + tag + ": " + str(value))
def print_abc(i, v):
v = v.cast(abc_any)
vt = v['as_tagged']
if vt == 0xdeadf00ddeadf00d:
p(i, "Unit", "Unit")
elif vt == deadbeef:
p(i, "Dead", "Beef")
elif vt == 0:
p(i, red("!!!NULL POINTER!!!"), "This should never happen")
elif (vt & 0xfff0000000000000) != 0:
p(i, "Number", (~vt).cast(abc_any)['as_num'])
elif vt < 0x00007f0000000000: # FIXME should get actual mappings -- don't know how to.
block = gdb.block_for_pc(int(vt))
if block == None:
name = str(v['as_indirect'])
else:
name = str(block.function)
p(i, "Block", name)
else:
tag = vt & 0x3
ptr = vt & ~0x3
hexptr = gray(hex(int(ptr)))
v = ptr.cast(abc_any)
try:
if tag == 0:
pair = v['as_pair'].dereference()
if pair['snd']['as_tagged'] == deadbeef:
p(i, "Left", hexptr)
print_abc(i+4, pair['fst'])
else:
p(i, "Pair", hexptr)
print_abc(i+4, pair['fst'])
print_abc(i+4, pair['snd'])
elif tag == 1:
pair = v['as_comp_block'].dereference()
if pair['yz']['as_tagged'] == deadbeef:
p(i, "Right", hexptr)
print_abc(i+4, pair['xy'])
else:
p(i, "Composed", hexptr)
print_abc(i+4, pair['xy'])
print_abc(i+4, pair['yz'])
elif tag == 2:
p(i, "Quoted", hexptr)
print_abc(i+4, v['as_indirect'].dereference())
else:
p(i, "INVALID TAG", hexptr)
except gdb.MemoryError:
p(i, red("!!!INVALID POINTER!!!"), hexptr)
class PrintABCValue(gdb.Command):
def __init__(self):
super(PrintABCValue, self).__init__('print-abc-value', gdb.COMMAND_DATA, gdb.COMPLETE_SYMBOL)
def invoke(self, arg, tty):
print_abc(0, gdb.parse_and_eval(arg))
PrintABCValue()
|
|
2d320058c96f88348d8226fa4a827a6c2c973237
|
mds.py
|
mds.py
|
"""
Simple implementation of classical MDS.
See http://www.stat.cmu.edu/~ryantibs/datamining/lectures/09-dim3-marked.pdf for more details.
"""
import numpy as np
import numpy.linalg as linalg
import matplotlib.pyplot as plt
def square_points(size):
nsensors = size**2
return np.array([(i/size, i%size) for i in range(nsensors)])
def norm(vec):
return np.sqrt(np.sum(vec**2))
def mds(D, dim=2):
"""
Classical multidimensional scaling algorithm.
Given a matrix of interpoint distances D, find a set of low dimensional points
that have a similar interpoint distances.
"""
(n,n) = D.shape
A = (-0.5 * D**2)
M = np.ones((n,n))/n
I = np.eye(n)
B = np.dot(np.dot(I-M, A),I-M)
'''Another way to compute inner-products matrix B
Ac = np.mat(np.mean(A, 1))
Ar = np.mat(np.mean(A, 0))
B = np.array(A - np.transpose(Ac) - Ar + np.mean(A))
'''
[U,S,V] = linalg.svd(B)
Y = U * np.sqrt(S)
return (Y[:,0:dim], S)
def test():
points = square_points(10)
distance = np.zeros((100,100))
for (i, pointi) in enumerate(points):
for (j, pointj) in enumerate(points):
distance[i,j] = norm(pointi-pointj)
Y, eigs = mds(distance)
plt.figure()
plt.plot(Y[:,0], Y[:,1], '.')
plt.figure(2)
plt.plot(points[:,0], points[:,1], '.')
plt.show()
def main():
import sys, os, getopt, pdb
def usage():
print sys.argv[0] + "[-h] [-d]"
try:
(options, args) = getopt.getopt(sys.argv[1:], 'dh', ['help', 'debug'])
except getopt.GetoptError:
usage()
sys.exit(2)
for o, a in options:
if o in ('-h', '--help'):
usage()
sys.exit()
elif o in ('-d', '--debug'):
pdb.set_trace()
test()
if __name__ == '__main__':
main()
|
Add Classical multidimensional scaling algorithm.
|
Add Classical multidimensional scaling algorithm.
|
Python
|
mit
|
ntduong/ML
|
Add Classical multidimensional scaling algorithm.
|
"""
Simple implementation of classical MDS.
See http://www.stat.cmu.edu/~ryantibs/datamining/lectures/09-dim3-marked.pdf for more details.
"""
import numpy as np
import numpy.linalg as linalg
import matplotlib.pyplot as plt
def square_points(size):
nsensors = size**2
return np.array([(i/size, i%size) for i in range(nsensors)])
def norm(vec):
return np.sqrt(np.sum(vec**2))
def mds(D, dim=2):
"""
Classical multidimensional scaling algorithm.
Given a matrix of interpoint distances D, find a set of low dimensional points
that have a similar interpoint distances.
"""
(n,n) = D.shape
A = (-0.5 * D**2)
M = np.ones((n,n))/n
I = np.eye(n)
B = np.dot(np.dot(I-M, A),I-M)
'''Another way to compute inner-products matrix B
Ac = np.mat(np.mean(A, 1))
Ar = np.mat(np.mean(A, 0))
B = np.array(A - np.transpose(Ac) - Ar + np.mean(A))
'''
[U,S,V] = linalg.svd(B)
Y = U * np.sqrt(S)
return (Y[:,0:dim], S)
def test():
points = square_points(10)
distance = np.zeros((100,100))
for (i, pointi) in enumerate(points):
for (j, pointj) in enumerate(points):
distance[i,j] = norm(pointi-pointj)
Y, eigs = mds(distance)
plt.figure()
plt.plot(Y[:,0], Y[:,1], '.')
plt.figure(2)
plt.plot(points[:,0], points[:,1], '.')
plt.show()
def main():
import sys, os, getopt, pdb
def usage():
print sys.argv[0] + "[-h] [-d]"
try:
(options, args) = getopt.getopt(sys.argv[1:], 'dh', ['help', 'debug'])
except getopt.GetoptError:
usage()
sys.exit(2)
for o, a in options:
if o in ('-h', '--help'):
usage()
sys.exit()
elif o in ('-d', '--debug'):
pdb.set_trace()
test()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add Classical multidimensional scaling algorithm.<commit_after>
|
"""
Simple implementation of classical MDS.
See http://www.stat.cmu.edu/~ryantibs/datamining/lectures/09-dim3-marked.pdf for more details.
"""
import numpy as np
import numpy.linalg as linalg
import matplotlib.pyplot as plt
def square_points(size):
nsensors = size**2
return np.array([(i/size, i%size) for i in range(nsensors)])
def norm(vec):
return np.sqrt(np.sum(vec**2))
def mds(D, dim=2):
"""
Classical multidimensional scaling algorithm.
Given a matrix of interpoint distances D, find a set of low dimensional points
that have a similar interpoint distances.
"""
(n,n) = D.shape
A = (-0.5 * D**2)
M = np.ones((n,n))/n
I = np.eye(n)
B = np.dot(np.dot(I-M, A),I-M)
'''Another way to compute inner-products matrix B
Ac = np.mat(np.mean(A, 1))
Ar = np.mat(np.mean(A, 0))
B = np.array(A - np.transpose(Ac) - Ar + np.mean(A))
'''
[U,S,V] = linalg.svd(B)
Y = U * np.sqrt(S)
return (Y[:,0:dim], S)
def test():
points = square_points(10)
distance = np.zeros((100,100))
for (i, pointi) in enumerate(points):
for (j, pointj) in enumerate(points):
distance[i,j] = norm(pointi-pointj)
Y, eigs = mds(distance)
plt.figure()
plt.plot(Y[:,0], Y[:,1], '.')
plt.figure(2)
plt.plot(points[:,0], points[:,1], '.')
plt.show()
def main():
import sys, os, getopt, pdb
def usage():
print sys.argv[0] + "[-h] [-d]"
try:
(options, args) = getopt.getopt(sys.argv[1:], 'dh', ['help', 'debug'])
except getopt.GetoptError:
usage()
sys.exit(2)
for o, a in options:
if o in ('-h', '--help'):
usage()
sys.exit()
elif o in ('-d', '--debug'):
pdb.set_trace()
test()
if __name__ == '__main__':
main()
|
Add Classical multidimensional scaling algorithm."""
Simple implementation of classical MDS.
See http://www.stat.cmu.edu/~ryantibs/datamining/lectures/09-dim3-marked.pdf for more details.
"""
import numpy as np
import numpy.linalg as linalg
import matplotlib.pyplot as plt
def square_points(size):
nsensors = size**2
return np.array([(i/size, i%size) for i in range(nsensors)])
def norm(vec):
return np.sqrt(np.sum(vec**2))
def mds(D, dim=2):
"""
Classical multidimensional scaling algorithm.
Given a matrix of interpoint distances D, find a set of low dimensional points
that have a similar interpoint distances.
"""
(n,n) = D.shape
A = (-0.5 * D**2)
M = np.ones((n,n))/n
I = np.eye(n)
B = np.dot(np.dot(I-M, A),I-M)
'''Another way to compute inner-products matrix B
Ac = np.mat(np.mean(A, 1))
Ar = np.mat(np.mean(A, 0))
B = np.array(A - np.transpose(Ac) - Ar + np.mean(A))
'''
[U,S,V] = linalg.svd(B)
Y = U * np.sqrt(S)
return (Y[:,0:dim], S)
def test():
points = square_points(10)
distance = np.zeros((100,100))
for (i, pointi) in enumerate(points):
for (j, pointj) in enumerate(points):
distance[i,j] = norm(pointi-pointj)
Y, eigs = mds(distance)
plt.figure()
plt.plot(Y[:,0], Y[:,1], '.')
plt.figure(2)
plt.plot(points[:,0], points[:,1], '.')
plt.show()
def main():
import sys, os, getopt, pdb
def usage():
print sys.argv[0] + "[-h] [-d]"
try:
(options, args) = getopt.getopt(sys.argv[1:], 'dh', ['help', 'debug'])
except getopt.GetoptError:
usage()
sys.exit(2)
for o, a in options:
if o in ('-h', '--help'):
usage()
sys.exit()
elif o in ('-d', '--debug'):
pdb.set_trace()
test()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add Classical multidimensional scaling algorithm.<commit_after>"""
Simple implementation of classical MDS.
See http://www.stat.cmu.edu/~ryantibs/datamining/lectures/09-dim3-marked.pdf for more details.
"""
import numpy as np
import numpy.linalg as linalg
import matplotlib.pyplot as plt
def square_points(size):
nsensors = size**2
return np.array([(i/size, i%size) for i in range(nsensors)])
def norm(vec):
return np.sqrt(np.sum(vec**2))
def mds(D, dim=2):
"""
Classical multidimensional scaling algorithm.
Given a matrix of interpoint distances D, find a set of low dimensional points
that have a similar interpoint distances.
"""
(n,n) = D.shape
A = (-0.5 * D**2)
M = np.ones((n,n))/n
I = np.eye(n)
B = np.dot(np.dot(I-M, A),I-M)
'''Another way to compute inner-products matrix B
Ac = np.mat(np.mean(A, 1))
Ar = np.mat(np.mean(A, 0))
B = np.array(A - np.transpose(Ac) - Ar + np.mean(A))
'''
[U,S,V] = linalg.svd(B)
Y = U * np.sqrt(S)
return (Y[:,0:dim], S)
def test():
points = square_points(10)
distance = np.zeros((100,100))
for (i, pointi) in enumerate(points):
for (j, pointj) in enumerate(points):
distance[i,j] = norm(pointi-pointj)
Y, eigs = mds(distance)
plt.figure()
plt.plot(Y[:,0], Y[:,1], '.')
plt.figure(2)
plt.plot(points[:,0], points[:,1], '.')
plt.show()
def main():
import sys, os, getopt, pdb
def usage():
print sys.argv[0] + "[-h] [-d]"
try:
(options, args) = getopt.getopt(sys.argv[1:], 'dh', ['help', 'debug'])
except getopt.GetoptError:
usage()
sys.exit(2)
for o, a in options:
if o in ('-h', '--help'):
usage()
sys.exit()
elif o in ('-d', '--debug'):
pdb.set_trace()
test()
if __name__ == '__main__':
main()
|
|
a78d879c9c097c32c58f5246d46a4a188b17d99c
|
workup/migrations/0002_add_verbose_names.py
|
workup/migrations/0002_add_verbose_names.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workup', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='historicalworkup',
name='fam_hx',
field=models.TextField(verbose_name=b'Family History'),
),
migrations.AlterField(
model_name='historicalworkup',
name='labs_ordered_internal',
field=models.TextField(null=True, verbose_name=b'Labs Ordered Internally', blank=True),
),
migrations.AlterField(
model_name='historicalworkup',
name='labs_ordered_quest',
field=models.TextField(null=True, verbose_name=b'Labs Ordered from Quest', blank=True),
),
migrations.AlterField(
model_name='historicalworkup',
name='ros',
field=models.TextField(verbose_name=b'ROS'),
),
migrations.AlterField(
model_name='historicalworkup',
name='rx',
field=models.TextField(null=True, verbose_name=b'Prescription Orders', blank=True),
),
migrations.AlterField(
model_name='historicalworkup',
name='soc_hx',
field=models.TextField(verbose_name=b'Social History'),
),
migrations.AlterField(
model_name='workup',
name='fam_hx',
field=models.TextField(verbose_name=b'Family History'),
),
migrations.AlterField(
model_name='workup',
name='labs_ordered_internal',
field=models.TextField(null=True, verbose_name=b'Labs Ordered Internally', blank=True),
),
migrations.AlterField(
model_name='workup',
name='labs_ordered_quest',
field=models.TextField(null=True, verbose_name=b'Labs Ordered from Quest', blank=True),
),
migrations.AlterField(
model_name='workup',
name='ros',
field=models.TextField(verbose_name=b'ROS'),
),
migrations.AlterField(
model_name='workup',
name='rx',
field=models.TextField(null=True, verbose_name=b'Prescription Orders', blank=True),
),
migrations.AlterField(
model_name='workup',
name='soc_hx',
field=models.TextField(verbose_name=b'Social History'),
),
]
|
Add workup vebose name change migration.
|
Add workup vebose name change migration.
|
Python
|
mit
|
SaturdayNeighborhoodHealthClinic/clintools,SaturdayNeighborhoodHealthClinic/clintools,SaturdayNeighborhoodHealthClinic/clintools
|
Add workup vebose name change migration.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workup', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='historicalworkup',
name='fam_hx',
field=models.TextField(verbose_name=b'Family History'),
),
migrations.AlterField(
model_name='historicalworkup',
name='labs_ordered_internal',
field=models.TextField(null=True, verbose_name=b'Labs Ordered Internally', blank=True),
),
migrations.AlterField(
model_name='historicalworkup',
name='labs_ordered_quest',
field=models.TextField(null=True, verbose_name=b'Labs Ordered from Quest', blank=True),
),
migrations.AlterField(
model_name='historicalworkup',
name='ros',
field=models.TextField(verbose_name=b'ROS'),
),
migrations.AlterField(
model_name='historicalworkup',
name='rx',
field=models.TextField(null=True, verbose_name=b'Prescription Orders', blank=True),
),
migrations.AlterField(
model_name='historicalworkup',
name='soc_hx',
field=models.TextField(verbose_name=b'Social History'),
),
migrations.AlterField(
model_name='workup',
name='fam_hx',
field=models.TextField(verbose_name=b'Family History'),
),
migrations.AlterField(
model_name='workup',
name='labs_ordered_internal',
field=models.TextField(null=True, verbose_name=b'Labs Ordered Internally', blank=True),
),
migrations.AlterField(
model_name='workup',
name='labs_ordered_quest',
field=models.TextField(null=True, verbose_name=b'Labs Ordered from Quest', blank=True),
),
migrations.AlterField(
model_name='workup',
name='ros',
field=models.TextField(verbose_name=b'ROS'),
),
migrations.AlterField(
model_name='workup',
name='rx',
field=models.TextField(null=True, verbose_name=b'Prescription Orders', blank=True),
),
migrations.AlterField(
model_name='workup',
name='soc_hx',
field=models.TextField(verbose_name=b'Social History'),
),
]
|
<commit_before><commit_msg>Add workup vebose name change migration.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workup', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='historicalworkup',
name='fam_hx',
field=models.TextField(verbose_name=b'Family History'),
),
migrations.AlterField(
model_name='historicalworkup',
name='labs_ordered_internal',
field=models.TextField(null=True, verbose_name=b'Labs Ordered Internally', blank=True),
),
migrations.AlterField(
model_name='historicalworkup',
name='labs_ordered_quest',
field=models.TextField(null=True, verbose_name=b'Labs Ordered from Quest', blank=True),
),
migrations.AlterField(
model_name='historicalworkup',
name='ros',
field=models.TextField(verbose_name=b'ROS'),
),
migrations.AlterField(
model_name='historicalworkup',
name='rx',
field=models.TextField(null=True, verbose_name=b'Prescription Orders', blank=True),
),
migrations.AlterField(
model_name='historicalworkup',
name='soc_hx',
field=models.TextField(verbose_name=b'Social History'),
),
migrations.AlterField(
model_name='workup',
name='fam_hx',
field=models.TextField(verbose_name=b'Family History'),
),
migrations.AlterField(
model_name='workup',
name='labs_ordered_internal',
field=models.TextField(null=True, verbose_name=b'Labs Ordered Internally', blank=True),
),
migrations.AlterField(
model_name='workup',
name='labs_ordered_quest',
field=models.TextField(null=True, verbose_name=b'Labs Ordered from Quest', blank=True),
),
migrations.AlterField(
model_name='workup',
name='ros',
field=models.TextField(verbose_name=b'ROS'),
),
migrations.AlterField(
model_name='workup',
name='rx',
field=models.TextField(null=True, verbose_name=b'Prescription Orders', blank=True),
),
migrations.AlterField(
model_name='workup',
name='soc_hx',
field=models.TextField(verbose_name=b'Social History'),
),
]
|
Add workup vebose name change migration.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workup', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='historicalworkup',
name='fam_hx',
field=models.TextField(verbose_name=b'Family History'),
),
migrations.AlterField(
model_name='historicalworkup',
name='labs_ordered_internal',
field=models.TextField(null=True, verbose_name=b'Labs Ordered Internally', blank=True),
),
migrations.AlterField(
model_name='historicalworkup',
name='labs_ordered_quest',
field=models.TextField(null=True, verbose_name=b'Labs Ordered from Quest', blank=True),
),
migrations.AlterField(
model_name='historicalworkup',
name='ros',
field=models.TextField(verbose_name=b'ROS'),
),
migrations.AlterField(
model_name='historicalworkup',
name='rx',
field=models.TextField(null=True, verbose_name=b'Prescription Orders', blank=True),
),
migrations.AlterField(
model_name='historicalworkup',
name='soc_hx',
field=models.TextField(verbose_name=b'Social History'),
),
migrations.AlterField(
model_name='workup',
name='fam_hx',
field=models.TextField(verbose_name=b'Family History'),
),
migrations.AlterField(
model_name='workup',
name='labs_ordered_internal',
field=models.TextField(null=True, verbose_name=b'Labs Ordered Internally', blank=True),
),
migrations.AlterField(
model_name='workup',
name='labs_ordered_quest',
field=models.TextField(null=True, verbose_name=b'Labs Ordered from Quest', blank=True),
),
migrations.AlterField(
model_name='workup',
name='ros',
field=models.TextField(verbose_name=b'ROS'),
),
migrations.AlterField(
model_name='workup',
name='rx',
field=models.TextField(null=True, verbose_name=b'Prescription Orders', blank=True),
),
migrations.AlterField(
model_name='workup',
name='soc_hx',
field=models.TextField(verbose_name=b'Social History'),
),
]
|
<commit_before><commit_msg>Add workup vebose name change migration.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workup', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='historicalworkup',
name='fam_hx',
field=models.TextField(verbose_name=b'Family History'),
),
migrations.AlterField(
model_name='historicalworkup',
name='labs_ordered_internal',
field=models.TextField(null=True, verbose_name=b'Labs Ordered Internally', blank=True),
),
migrations.AlterField(
model_name='historicalworkup',
name='labs_ordered_quest',
field=models.TextField(null=True, verbose_name=b'Labs Ordered from Quest', blank=True),
),
migrations.AlterField(
model_name='historicalworkup',
name='ros',
field=models.TextField(verbose_name=b'ROS'),
),
migrations.AlterField(
model_name='historicalworkup',
name='rx',
field=models.TextField(null=True, verbose_name=b'Prescription Orders', blank=True),
),
migrations.AlterField(
model_name='historicalworkup',
name='soc_hx',
field=models.TextField(verbose_name=b'Social History'),
),
migrations.AlterField(
model_name='workup',
name='fam_hx',
field=models.TextField(verbose_name=b'Family History'),
),
migrations.AlterField(
model_name='workup',
name='labs_ordered_internal',
field=models.TextField(null=True, verbose_name=b'Labs Ordered Internally', blank=True),
),
migrations.AlterField(
model_name='workup',
name='labs_ordered_quest',
field=models.TextField(null=True, verbose_name=b'Labs Ordered from Quest', blank=True),
),
migrations.AlterField(
model_name='workup',
name='ros',
field=models.TextField(verbose_name=b'ROS'),
),
migrations.AlterField(
model_name='workup',
name='rx',
field=models.TextField(null=True, verbose_name=b'Prescription Orders', blank=True),
),
migrations.AlterField(
model_name='workup',
name='soc_hx',
field=models.TextField(verbose_name=b'Social History'),
),
]
|
|
1e050f30e8307a75976a52b8f1258a5b14e43733
|
wsgi_static.py
|
wsgi_static.py
|
import wsgi_server
import os
from werkzeug.wsgi import SharedDataMiddleware
application = SharedDataMiddleware(wsgi_server.application, {
'/static': os.path.join(os.path.dirname(__file__), 'static')
})
|
Add middleware for static serving
|
Add middleware for static serving
|
Python
|
agpl-3.0
|
cggh/DQXServer
|
Add middleware for static serving
|
import wsgi_server
import os
from werkzeug.wsgi import SharedDataMiddleware
application = SharedDataMiddleware(wsgi_server.application, {
'/static': os.path.join(os.path.dirname(__file__), 'static')
})
|
<commit_before><commit_msg>Add middleware for static serving<commit_after>
|
import wsgi_server
import os
from werkzeug.wsgi import SharedDataMiddleware
application = SharedDataMiddleware(wsgi_server.application, {
'/static': os.path.join(os.path.dirname(__file__), 'static')
})
|
Add middleware for static servingimport wsgi_server
import os
from werkzeug.wsgi import SharedDataMiddleware
application = SharedDataMiddleware(wsgi_server.application, {
'/static': os.path.join(os.path.dirname(__file__), 'static')
})
|
<commit_before><commit_msg>Add middleware for static serving<commit_after>import wsgi_server
import os
from werkzeug.wsgi import SharedDataMiddleware
application = SharedDataMiddleware(wsgi_server.application, {
'/static': os.path.join(os.path.dirname(__file__), 'static')
})
|
|
a086307e6aac341ed8a6596d0a05b7a8d198c7ec
|
zephyr/management/commands/dump_pointers.py
|
zephyr/management/commands/dump_pointers.py
|
from optparse import make_option
from django.core.management.base import BaseCommand
from zephyr.models import Realm, UserProfile
import simplejson
def dump():
pointers = []
for u in UserProfile.objects.select_related("user__email").all():
pointers.append((u.user.email, u.pointer))
file("dumped-pointers", "w").write(simplejson.dumps(pointers) + "\n")
def restore(change):
for (email, pointer) in simplejson.loads(file("dumped-pointers").read()):
u = UserProfile.objects.get(user__email=email)
print "%s: pointer %s => %s" % (email, u.pointer, pointer)
if change:
u.pointer = pointer
u.save()
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--restore', default=False, action='store_true'),
make_option('--dry-run', '-n', default=False, action='store_true'),)
def handle(self, *args, **options):
if options["restore"]:
restore(change=not options['dry_run'])
else:
dump()
|
Add command to dump and restore user pointers.
|
Add command to dump and restore user pointers.
For use in database migrations.
(imported from commit f06ae569fe986da5e7d144c277bf27be534c04f9)
|
Python
|
apache-2.0
|
brainwane/zulip,dwrpayne/zulip,dotcool/zulip,nicholasbs/zulip,ikasumiwt/zulip,DazWorrall/zulip,xuanhan863/zulip,tdr130/zulip,alliejones/zulip,brainwane/zulip,vabs22/zulip,Qgap/zulip,DazWorrall/zulip,souravbadami/zulip,developerfm/zulip,mdavid/zulip,mohsenSy/zulip,rishig/zulip,shubhamdhama/zulip,kaiyuanheshang/zulip,hackerkid/zulip,hustlzp/zulip,hackerkid/zulip,peiwei/zulip,bluesea/zulip,jrowan/zulip,dxq-git/zulip,Cheppers/zulip,luyifan/zulip,vaidap/zulip,codeKonami/zulip,Juanvulcano/zulip,vakila/zulip,technicalpickles/zulip,Jianchun1/zulip,adnanh/zulip,alliejones/zulip,dhcrzf/zulip,m1ssou/zulip,stamhe/zulip,vaidap/zulip,zachallaun/zulip,dawran6/zulip,zhaoweigg/zulip,dawran6/zulip,mansilladev/zulip,dawran6/zulip,so0k/zulip,jonesgithub/zulip,voidException/zulip,wangdeshui/zulip,aakash-cr7/zulip,PaulPetring/zulip,jainayush975/zulip,zofuthan/zulip,isht3/zulip,Drooids/zulip,tommyip/zulip,amyliu345/zulip,moria/zulip,Batterfii/zulip,ipernet/zulip,punchagan/zulip,hj3938/zulip,peguin40/zulip,jainayush975/zulip,krtkmj/zulip,cosmicAsymmetry/zulip,johnnygaddarr/zulip,saitodisse/zulip,Jianchun1/zulip,eeshangarg/zulip,nicholasbs/zulip,Drooids/zulip,aps-sids/zulip,noroot/zulip,natanovia/zulip,dattatreya303/zulip,wavelets/zulip,udxxabp/zulip,zacps/zulip,bluesea/zulip,JPJPJPOPOP/zulip,hafeez3000/zulip,brockwhittaker/zulip,ryanbackman/zulip,hafeez3000/zulip,ufosky-server/zulip,suxinde2009/zulip,arpitpanwar/zulip,JPJPJPOPOP/zulip,shubhamdhama/zulip,developerfm/zulip,easyfmxu/zulip,alliejones/zulip,xuanhan863/zulip,proliming/zulip,zulip/zulip,AZtheAsian/zulip,huangkebo/zulip,PaulPetring/zulip,ashwinirudrappa/zulip,ryansnowboarder/zulip,deer-hope/zulip,hj3938/zulip,fw1121/zulip,zofuthan/zulip,blaze225/zulip,eastlhu/zulip,jeffcao/zulip,praveenaki/zulip,KingxBanana/zulip,LeeRisk/zulip,tdr130/zulip,mdavid/zulip,bssrdf/zulip,udxxabp/zulip,karamcnair/zulip,ufosky-server/zulip,jerryge/zulip,mahim97/zulip,kou/zulip,Jianchun1/zulip,brockwhittaker/zulip,kou/zulip,hj3938/zulip,dhcrzf/zulip,tbutter/zulip,luyifan/zulip,souravbadami/zulip,dnmfarrell/zulip,Cheppers/zulip,avastu/zulip,tbutter/zulip,johnnygaddarr/zulip,ikasumiwt/zulip,udxxabp/zulip,cosmicAsymmetry/zulip,ApsOps/zulip,calvinleenyc/zulip,Diptanshu8/zulip,peguin40/zulip,pradiptad/zulip,ufosky-server/zulip,MariaFaBella85/zulip,tommyip/zulip,arpitpanwar/zulip,akuseru/zulip,dnmfarrell/zulip,jessedhillon/zulip,jphilipsen05/zulip,dxq-git/zulip,eastlhu/zulip,j831/zulip,firstblade/zulip,JanzTam/zulip,Batterfii/zulip,amallia/zulip,jimmy54/zulip,itnihao/zulip,udxxabp/zulip,KJin99/zulip,kaiyuanheshang/zulip,Batterfii/zulip,dxq-git/zulip,Jianchun1/zulip,LAndreas/zulip,natanovia/zulip,praveenaki/zulip,jrowan/zulip,adnanh/zulip,vaidap/zulip,wdaher/zulip,punchagan/zulip,pradiptad/zulip,timabbott/zulip,Drooids/zulip,zofuthan/zulip,amyliu345/zulip,zulip/zulip,avastu/zulip,shaunstanislaus/zulip,qq1012803704/zulip,blaze225/zulip,zwily/zulip,deer-hope/zulip,thomasboyt/zulip,dattatreya303/zulip,swinghu/zulip,sonali0901/zulip,guiquanz/zulip,dattatreya303/zulip,brainwane/zulip,atomic-labs/zulip,cosmicAsymmetry/zulip,christi3k/zulip,themass/zulip,ericzhou2008/zulip,seapasulli/zulip,fw1121/zulip,vabs22/zulip,shrikrishnaholla/zulip,calvinleenyc/zulip,cosmicAsymmetry/zulip,AZtheAsian/zulip,hustlzp/zulip,shubhamdhama/zulip,jonesgithub/zulip,dhcrzf/zulip,samatdav/zulip,hustlzp/zulip,jainayush975/zulip,amanharitsh123/zulip,developerfm/zulip,KJin99/zulip,jessedhillon/zulip,showell/zulip,DazWorrall/zulip,jerryge/zulip,zacps/zulip,jrowan/zulip,dnmfarrell/zulip,qq1012803704/zulip,j831/zulip,xuanhan863/zulip,bowlofstew/zulip,wdaher/zulip,zachallaun/zulip,MariaFaBella85/zulip,bluesea/zulip,niftynei/zulip,Gabriel0402/zulip,esander91/zulip,fw1121/zulip,vakila/zulip,jerryge/zulip,dhcrzf/zulip,KingxBanana/zulip,TigorC/zulip,codeKonami/zulip,Galexrt/zulip,natanovia/zulip,Juanvulcano/zulip,yocome/zulip,huangkebo/zulip,tommyip/zulip,themass/zulip,dattatreya303/zulip,cosmicAsymmetry/zulip,mohsenSy/zulip,shaunstanislaus/zulip,timabbott/zulip,johnny9/zulip,PaulPetring/zulip,timabbott/zulip,vabs22/zulip,tbutter/zulip,peguin40/zulip,firstblade/zulip,schatt/zulip,Jianchun1/zulip,KJin99/zulip,gkotian/zulip,Frouk/zulip,KJin99/zulip,qq1012803704/zulip,grave-w-grave/zulip,pradiptad/zulip,yocome/zulip,zorojean/zulip,joshisa/zulip,j831/zulip,karamcnair/zulip,kaiyuanheshang/zulip,littledogboy/zulip,lfranchi/zulip,ryansnowboarder/zulip,fw1121/zulip,MayB/zulip,Vallher/zulip,bitemyapp/zulip,dwrpayne/zulip,xuanhan863/zulip,hackerkid/zulip,vabs22/zulip,so0k/zulip,hackerkid/zulip,paxapy/zulip,tdr130/zulip,esander91/zulip,mdavid/zulip,arpith/zulip,mansilladev/zulip,dwrpayne/zulip,joyhchen/zulip,LeeRisk/zulip,voidException/zulip,levixie/zulip,RobotCaleb/zulip,showell/zulip,suxinde2009/zulip,tbutter/zulip,tiansiyuan/zulip,karamcnair/zulip,technicalpickles/zulip,bowlofstew/zulip,MayB/zulip,levixie/zulip,gigawhitlocks/zulip,dhcrzf/zulip,TigorC/zulip,jainayush975/zulip,PhilSk/zulip,Gabriel0402/zulip,dwrpayne/zulip,showell/zulip,shrikrishnaholla/zulip,wdaher/zulip,JanzTam/zulip,esander91/zulip,jeffcao/zulip,amanharitsh123/zulip,wangdeshui/zulip,proliming/zulip,johnnygaddarr/zulip,arpitpanwar/zulip,gkotian/zulip,adnanh/zulip,isht3/zulip,vakila/zulip,kou/zulip,peiwei/zulip,dnmfarrell/zulip,zhaoweigg/zulip,KingxBanana/zulip,isht3/zulip,ahmadassaf/zulip,niftynei/zulip,itnihao/zulip,moria/zulip,Frouk/zulip,RobotCaleb/zulip,thomasboyt/zulip,lfranchi/zulip,firstblade/zulip,karamcnair/zulip,RobotCaleb/zulip,hengqujushi/zulip,aakash-cr7/zulip,Qgap/zulip,schatt/zulip,johnnygaddarr/zulip,glovebx/zulip,easyfmxu/zulip,DazWorrall/zulip,littledogboy/zulip,zorojean/zulip,SmartPeople/zulip,jackrzhang/zulip,susansls/zulip,blaze225/zulip,bowlofstew/zulip,adnanh/zulip,zwily/zulip,brockwhittaker/zulip,mansilladev/zulip,isht3/zulip,lfranchi/zulip,jackrzhang/zulip,jrowan/zulip,peiwei/zulip,moria/zulip,jphilipsen05/zulip,dattatreya303/zulip,willingc/zulip,LAndreas/zulip,babbage/zulip,andersk/zulip,deer-hope/zulip,kaiyuanheshang/zulip,paxapy/zulip,huangkebo/zulip,jackrzhang/zulip,ericzhou2008/zulip,samatdav/zulip,DazWorrall/zulip,EasonYi/zulip,timabbott/zulip,dotcool/zulip,LeeRisk/zulip,aps-sids/zulip,yuvipanda/zulip,JPJPJPOPOP/zulip,brockwhittaker/zulip,xuanhan863/zulip,amallia/zulip,itnihao/zulip,RobotCaleb/zulip,ApsOps/zulip,ryanbackman/zulip,mansilladev/zulip,peguin40/zulip,punchagan/zulip,itnihao/zulip,zhaoweigg/zulip,hustlzp/zulip,sonali0901/zulip,levixie/zulip,nicholasbs/zulip,noroot/zulip,suxinde2009/zulip,glovebx/zulip,guiquanz/zulip,schatt/zulip,synicalsyntax/zulip,wweiradio/zulip,glovebx/zulip,jessedhillon/zulip,shaunstanislaus/zulip,krtkmj/zulip,synicalsyntax/zulip,wavelets/zulip,aliceriot/zulip,JPJPJPOPOP/zulip,jonesgithub/zulip,ipernet/zulip,Frouk/zulip,noroot/zulip,zacps/zulip,reyha/zulip,aps-sids/zulip,EasonYi/zulip,noroot/zulip,bowlofstew/zulip,m1ssou/zulip,mahim97/zulip,guiquanz/zulip,MariaFaBella85/zulip,Juanvulcano/zulip,tiansiyuan/zulip,dnmfarrell/zulip,tiansiyuan/zulip,brainwane/zulip,armooo/zulip,suxinde2009/zulip,seapasulli/zulip,bastianh/zulip,armooo/zulip,RobotCaleb/zulip,yuvipanda/zulip,LAndreas/zulip,levixie/zulip,Cheppers/zulip,Diptanshu8/zulip,babbage/zulip,akuseru/zulip,bastianh/zulip,brockwhittaker/zulip,EasonYi/zulip,PhilSk/zulip,alliejones/zulip,brainwane/zulip,bastianh/zulip,saitodisse/zulip,ApsOps/zulip,zhaoweigg/zulip,xuanhan863/zulip,Qgap/zulip,jessedhillon/zulip,johnny9/zulip,stamhe/zulip,zachallaun/zulip,verma-varsha/zulip,cosmicAsymmetry/zulip,bluesea/zulip,schatt/zulip,xuxiao/zulip,ryansnowboarder/zulip,kaiyuanheshang/zulip,hackerkid/zulip,suxinde2009/zulip,mdavid/zulip,johnny9/zulip,LeeRisk/zulip,sup95/zulip,joyhchen/zulip,dhcrzf/zulip,jackrzhang/zulip,he15his/zulip,rishig/zulip,saitodisse/zulip,jainayush975/zulip,fw1121/zulip,jessedhillon/zulip,joshisa/zulip,aps-sids/zulip,kokoar/zulip,zhaoweigg/zulip,Vallher/zulip,rishig/zulip,rishig/zulip,qq1012803704/zulip,andersk/zulip,shaunstanislaus/zulip,Suninus/zulip,umkay/zulip,Qgap/zulip,aliceriot/zulip,jrowan/zulip,thomasboyt/zulip,rishig/zulip,showell/zulip,ApsOps/zulip,bssrdf/zulip,praveenaki/zulip,Suninus/zulip,bastianh/zulip,zhaoweigg/zulip,schatt/zulip,tiansiyuan/zulip,sup95/zulip,LAndreas/zulip,SmartPeople/zulip,zulip/zulip,sonali0901/zulip,Drooids/zulip,hayderimran7/zulip,zacps/zulip,showell/zulip,bastianh/zulip,eeshangarg/zulip,willingc/zulip,zachallaun/zulip,atomic-labs/zulip,zorojean/zulip,technicalpickles/zulip,hafeez3000/zulip,shrikrishnaholla/zulip,m1ssou/zulip,babbage/zulip,Suninus/zulip,Cheppers/zulip,TigorC/zulip,ashwinirudrappa/zulip,SmartPeople/zulip,bluesea/zulip,hackerkid/zulip,aliceriot/zulip,wweiradio/zulip,jimmy54/zulip,lfranchi/zulip,synicalsyntax/zulip,sharmaeklavya2/zulip,reyha/zulip,ikasumiwt/zulip,KJin99/zulip,kou/zulip,sup95/zulip,sonali0901/zulip,ikasumiwt/zulip,andersk/zulip,firstblade/zulip,jerryge/zulip,dwrpayne/zulip,hayderimran7/zulip,nicholasbs/zulip,yuvipanda/zulip,praveenaki/zulip,zachallaun/zulip,m1ssou/zulip,zwily/zulip,Drooids/zulip,akuseru/zulip,jonesgithub/zulip,bssrdf/zulip,KJin99/zulip,wavelets/zulip,adnanh/zulip,guiquanz/zulip,pradiptad/zulip,wavelets/zulip,wdaher/zulip,Vallher/zulip,amanharitsh123/zulip,mansilladev/zulip,wangdeshui/zulip,PhilSk/zulip,aps-sids/zulip,DazWorrall/zulip,TigorC/zulip,paxapy/zulip,tiansiyuan/zulip,saitodisse/zulip,ryansnowboarder/zulip,niftynei/zulip,sup95/zulip,amallia/zulip,joyhchen/zulip,tiansiyuan/zulip,Suninus/zulip,kokoar/zulip,jonesgithub/zulip,m1ssou/zulip,verma-varsha/zulip,eeshangarg/zulip,Juanvulcano/zulip,tommyip/zulip,EasonYi/zulip,zacps/zulip,tdr130/zulip,littledogboy/zulip,xuxiao/zulip,gigawhitlocks/zulip,verma-varsha/zulip,technicalpickles/zulip,hustlzp/zulip,tbutter/zulip,AZtheAsian/zulip,bssrdf/zulip,johnnygaddarr/zulip,armooo/zulip,rht/zulip,sonali0901/zulip,zachallaun/zulip,RobotCaleb/zulip,Frouk/zulip,jimmy54/zulip,saitodisse/zulip,Gabriel0402/zulip,vikas-parashar/zulip,MayB/zulip,deer-hope/zulip,wavelets/zulip,rht/zulip,xuxiao/zulip,Batterfii/zulip,avastu/zulip,glovebx/zulip,jainayush975/zulip,niftynei/zulip,firstblade/zulip,udxxabp/zulip,avastu/zulip,qq1012803704/zulip,ryanbackman/zulip,levixie/zulip,rishig/zulip,vabs22/zulip,joshisa/zulip,proliming/zulip,Qgap/zulip,shrikrishnaholla/zulip,dattatreya303/zulip,kokoar/zulip,jerryge/zulip,codeKonami/zulip,easyfmxu/zulip,wavelets/zulip,umkay/zulip,jeffcao/zulip,aliceriot/zulip,jimmy54/zulip,amanharitsh123/zulip,rht/zulip,themass/zulip,noroot/zulip,tiansiyuan/zulip,PaulPetring/zulip,deer-hope/zulip,aliceriot/zulip,synicalsyntax/zulip,MariaFaBella85/zulip,KingxBanana/zulip,andersk/zulip,bastianh/zulip,easyfmxu/zulip,adnanh/zulip,firstblade/zulip,tdr130/zulip,mohsenSy/zulip,Vallher/zulip,luyifan/zulip,SmartPeople/zulip,kaiyuanheshang/zulip,schatt/zulip,mahim97/zulip,zofuthan/zulip,ericzhou2008/zulip,KJin99/zulip,vikas-parashar/zulip,suxinde2009/zulip,Gabriel0402/zulip,mohsenSy/zulip,vakila/zulip,zacps/zulip,brainwane/zulip,saitodisse/zulip,ashwinirudrappa/zulip,stamhe/zulip,qq1012803704/zulip,vikas-parashar/zulip,ufosky-server/zulip,ahmadassaf/zulip,Qgap/zulip,krtkmj/zulip,atomic-labs/zulip,shubhamdhama/zulip,showell/zulip,wdaher/zulip,ipernet/zulip,zorojean/zulip,niftynei/zulip,Juanvulcano/zulip,armooo/zulip,kokoar/zulip,Qgap/zulip,PaulPetring/zulip,PhilSk/zulip,JPJPJPOPOP/zulip,codeKonami/zulip,wangdeshui/zulip,kou/zulip,sup95/zulip,hustlzp/zulip,susansls/zulip,joyhchen/zulip,aps-sids/zulip,glovebx/zulip,arpith/zulip,umkay/zulip,atomic-labs/zulip,qq1012803704/zulip,isht3/zulip,johnnygaddarr/zulip,guiquanz/zulip,sup95/zulip,zwily/zulip,jeffcao/zulip,aakash-cr7/zulip,udxxabp/zulip,bssrdf/zulip,xuxiao/zulip,yocome/zulip,willingc/zulip,praveenaki/zulip,eastlhu/zulip,deer-hope/zulip,wweiradio/zulip,hafeez3000/zulip,armooo/zulip,ipernet/zulip,johnnygaddarr/zulip,susansls/zulip,joshisa/zulip,Cheppers/zulip,swinghu/zulip,AZtheAsian/zulip,jphilipsen05/zulip,mansilladev/zulip,LAndreas/zulip,ryanbackman/zulip,seapasulli/zulip,gigawhitlocks/zulip,shrikrishnaholla/zulip,reyha/zulip,hayderimran7/zulip,he15his/zulip,hayderimran7/zulip,RobotCaleb/zulip,EasonYi/zulip,ApsOps/zulip,swinghu/zulip,JanzTam/zulip,jphilipsen05/zulip,synicalsyntax/zulip,grave-w-grave/zulip,jphilipsen05/zulip,huangkebo/zulip,Suninus/zulip,christi3k/zulip,samatdav/zulip,thomasboyt/zulip,pradiptad/zulip,itnihao/zulip,hengqujushi/zulip,christi3k/zulip,SmartPeople/zulip,calvinleenyc/zulip,ahmadassaf/zulip,samatdav/zulip,hengqujushi/zulip,MariaFaBella85/zulip,AZtheAsian/zulip,hayderimran7/zulip,LAndreas/zulip,shrikrishnaholla/zulip,atomic-labs/zulip,JanzTam/zulip,kaiyuanheshang/zulip,he15his/zulip,umkay/zulip,jessedhillon/zulip,zwily/zulip,he15his/zulip,babbage/zulip,JanzTam/zulip,TigorC/zulip,Galexrt/zulip,ryansnowboarder/zulip,codeKonami/zulip,rishig/zulip,so0k/zulip,Frouk/zulip,hustlzp/zulip,xuanhan863/zulip,brockwhittaker/zulip,amyliu345/zulip,yocome/zulip,luyifan/zulip,codeKonami/zulip,yocome/zulip,ufosky-server/zulip,wangdeshui/zulip,dawran6/zulip,christi3k/zulip,willingc/zulip,babbage/zulip,jackrzhang/zulip,swinghu/zulip,PhilSk/zulip,seapasulli/zulip,dwrpayne/zulip,gigawhitlocks/zulip,ahmadassaf/zulip,krtkmj/zulip,nicholasbs/zulip,vaidap/zulip,Diptanshu8/zulip,johnny9/zulip,paxapy/zulip,nicholasbs/zulip,bitemyapp/zulip,voidException/zulip,nicholasbs/zulip,susansls/zulip,paxapy/zulip,amanharitsh123/zulip,rht/zulip,karamcnair/zulip,proliming/zulip,zwily/zulip,brainwane/zulip,alliejones/zulip,niftynei/zulip,eastlhu/zulip,Frouk/zulip,peiwei/zulip,Batterfii/zulip,Vallher/zulip,dxq-git/zulip,codeKonami/zulip,Galexrt/zulip,dotcool/zulip,ApsOps/zulip,shaunstanislaus/zulip,willingc/zulip,LeeRisk/zulip,deer-hope/zulip,arpitpanwar/zulip,voidException/zulip,vakila/zulip,arpith/zulip,hackerkid/zulip,ryanbackman/zulip,adnanh/zulip,ericzhou2008/zulip,Diptanshu8/zulip,amallia/zulip,akuseru/zulip,vaidap/zulip,akuseru/zulip,verma-varsha/zulip,shaunstanislaus/zulip,EasonYi/zulip,levixie/zulip,jphilipsen05/zulip,hengqujushi/zulip,littledogboy/zulip,verma-varsha/zulip,MariaFaBella85/zulip,mahim97/zulip,gigawhitlocks/zulip,jimmy54/zulip,arpitpanwar/zulip,souravbadami/zulip,Suninus/zulip,dxq-git/zulip,stamhe/zulip,Galexrt/zulip,littledogboy/zulip,isht3/zulip,lfranchi/zulip,grave-w-grave/zulip,punchagan/zulip,verma-varsha/zulip,ashwinirudrappa/zulip,joyhchen/zulip,umkay/zulip,gigawhitlocks/zulip,sharmaeklavya2/zulip,so0k/zulip,MariaFaBella85/zulip,aps-sids/zulip,gkotian/zulip,eastlhu/zulip,zachallaun/zulip,praveenaki/zulip,ericzhou2008/zulip,akuseru/zulip,LAndreas/zulip,mansilladev/zulip,Jianchun1/zulip,hayderimran7/zulip,zulip/zulip,zwily/zulip,voidException/zulip,ryanbackman/zulip,ryansnowboarder/zulip,lfranchi/zulip,he15his/zulip,grave-w-grave/zulip,jrowan/zulip,ikasumiwt/zulip,souravbadami/zulip,hafeez3000/zulip,amyliu345/zulip,swinghu/zulip,esander91/zulip,natanovia/zulip,themass/zulip,shubhamdhama/zulip,moria/zulip,vikas-parashar/zulip,thomasboyt/zulip,Drooids/zulip,souravbadami/zulip,ufosky-server/zulip,fw1121/zulip,Batterfii/zulip,suxinde2009/zulip,ipernet/zulip,zorojean/zulip,huangkebo/zulip,vaidap/zulip,jeffcao/zulip,zofuthan/zulip,LeeRisk/zulip,reyha/zulip,souravbadami/zulip,bssrdf/zulip,sharmaeklavya2/zulip,babbage/zulip,easyfmxu/zulip,aliceriot/zulip,bowlofstew/zulip,j831/zulip,zulip/zulip,wavelets/zulip,littledogboy/zulip,yuvipanda/zulip,grave-w-grave/zulip,paxapy/zulip,pradiptad/zulip,technicalpickles/zulip,guiquanz/zulip,Vallher/zulip,developerfm/zulip,jonesgithub/zulip,amallia/zulip,xuxiao/zulip,rht/zulip,samatdav/zulip,yocome/zulip,aliceriot/zulip,peguin40/zulip,m1ssou/zulip,Diptanshu8/zulip,gkotian/zulip,Drooids/zulip,hj3938/zulip,ikasumiwt/zulip,hafeez3000/zulip,avastu/zulip,bastianh/zulip,christi3k/zulip,yocome/zulip,umkay/zulip,aakash-cr7/zulip,eeshangarg/zulip,peiwei/zulip,akuseru/zulip,MayB/zulip,Frouk/zulip,alliejones/zulip,KingxBanana/zulip,gigawhitlocks/zulip,dhcrzf/zulip,eeshangarg/zulip,blaze225/zulip,mahim97/zulip,joshisa/zulip,stamhe/zulip,noroot/zulip,tommyip/zulip,developerfm/zulip,kou/zulip,saitodisse/zulip,sharmaeklavya2/zulip,wdaher/zulip,moria/zulip,tommyip/zulip,Vallher/zulip,joyhchen/zulip,amyliu345/zulip,ApsOps/zulip,aakash-cr7/zulip,kokoar/zulip,esander91/zulip,udxxabp/zulip,sharmaeklavya2/zulip,bluesea/zulip,atomic-labs/zulip,xuxiao/zulip,PaulPetring/zulip,punchagan/zulip,zulip/zulip,Batterfii/zulip,SmartPeople/zulip,gkotian/zulip,Gabriel0402/zulip,zorojean/zulip,dnmfarrell/zulip,guiquanz/zulip,yuvipanda/zulip,huangkebo/zulip,samatdav/zulip,bitemyapp/zulip,pradiptad/zulip,vakila/zulip,esander91/zulip,dotcool/zulip,hengqujushi/zulip,johnny9/zulip,dotcool/zulip,armooo/zulip,karamcnair/zulip,jerryge/zulip,calvinleenyc/zulip,reyha/zulip,karamcnair/zulip,gkotian/zulip,mdavid/zulip,arpith/zulip,noroot/zulip,bitemyapp/zulip,ashwinirudrappa/zulip,krtkmj/zulip,Gabriel0402/zulip,johnny9/zulip,seapasulli/zulip,mahim97/zulip,calvinleenyc/zulip,ashwinirudrappa/zulip,proliming/zulip,itnihao/zulip,reyha/zulip,glovebx/zulip,christi3k/zulip,sonali0901/zulip,krtkmj/zulip,rht/zulip,arpith/zulip,showell/zulip,willingc/zulip,calvinleenyc/zulip,themass/zulip,luyifan/zulip,joshisa/zulip,sharmaeklavya2/zulip,eastlhu/zulip,hj3938/zulip,huangkebo/zulip,kokoar/zulip,yuvipanda/zulip,gkotian/zulip,TigorC/zulip,thomasboyt/zulip,EasonYi/zulip,willingc/zulip,developerfm/zulip,shaunstanislaus/zulip,vikas-parashar/zulip,esander91/zulip,he15his/zulip,shubhamdhama/zulip,ufosky-server/zulip,grave-w-grave/zulip,j831/zulip,Diptanshu8/zulip,dawran6/zulip,vabs22/zulip,armooo/zulip,babbage/zulip,zorojean/zulip,aakash-cr7/zulip,xuxiao/zulip,wdaher/zulip,jerryge/zulip,littledogboy/zulip,DazWorrall/zulip,eeshangarg/zulip,JanzTam/zulip,praveenaki/zulip,he15his/zulip,avastu/zulip,firstblade/zulip,so0k/zulip,blaze225/zulip,PaulPetring/zulip,timabbott/zulip,shrikrishnaholla/zulip,easyfmxu/zulip,Gabriel0402/zulip,swinghu/zulip,jeffcao/zulip,timabbott/zulip,mohsenSy/zulip,Cheppers/zulip,LeeRisk/zulip,ipernet/zulip,luyifan/zulip,m1ssou/zulip,ahmadassaf/zulip,bowlofstew/zulip,amallia/zulip,seapasulli/zulip,wangdeshui/zulip,eastlhu/zulip,themass/zulip,dwrpayne/zulip,glovebx/zulip,wweiradio/zulip,kou/zulip,ahmadassaf/zulip,hengqujushi/zulip,jackrzhang/zulip,dxq-git/zulip,ipernet/zulip,ashwinirudrappa/zulip,kokoar/zulip,jonesgithub/zulip,tbutter/zulip,zulip/zulip,Cheppers/zulip,ahmadassaf/zulip,MayB/zulip,proliming/zulip,Juanvulcano/zulip,bluesea/zulip,zofuthan/zulip,jackrzhang/zulip,avastu/zulip,hj3938/zulip,wweiradio/zulip,shubhamdhama/zulip,lfranchi/zulip,hafeez3000/zulip,easyfmxu/zulip,yuvipanda/zulip,andersk/zulip,andersk/zulip,ikasumiwt/zulip,johnny9/zulip,atomic-labs/zulip,AZtheAsian/zulip,ericzhou2008/zulip,umkay/zulip,MayB/zulip,punchagan/zulip,wangdeshui/zulip,levixie/zulip,natanovia/zulip,swinghu/zulip,punchagan/zulip,hj3938/zulip,arpith/zulip,mdavid/zulip,technicalpickles/zulip,vikas-parashar/zulip,tbutter/zulip,synicalsyntax/zulip,alliejones/zulip,seapasulli/zulip,wweiradio/zulip,moria/zulip,so0k/zulip,amanharitsh123/zulip,itnihao/zulip,themass/zulip,schatt/zulip,dnmfarrell/zulip,peguin40/zulip,stamhe/zulip,moria/zulip,jeffcao/zulip,hengqujushi/zulip,synicalsyntax/zulip,mdavid/zulip,so0k/zulip,JPJPJPOPOP/zulip,natanovia/zulip,Suninus/zulip,stamhe/zulip,susansls/zulip,ericzhou2008/zulip,Galexrt/zulip,jimmy54/zulip,bowlofstew/zulip,fw1121/zulip,proliming/zulip,natanovia/zulip,JanzTam/zulip,voidException/zulip,dotcool/zulip,KingxBanana/zulip,bitemyapp/zulip,Galexrt/zulip,j831/zulip,jimmy54/zulip,arpitpanwar/zulip,susansls/zulip,jessedhillon/zulip,wweiradio/zulip,peiwei/zulip,developerfm/zulip,MayB/zulip,hayderimran7/zulip,mohsenSy/zulip,arpitpanwar/zulip,eeshangarg/zulip,bitemyapp/zulip,peiwei/zulip,vakila/zulip,voidException/zulip,amyliu345/zulip,bitemyapp/zulip,krtkmj/zulip,technicalpickles/zulip,dawran6/zulip,Galexrt/zulip,tommyip/zulip,PhilSk/zulip,luyifan/zulip,zhaoweigg/zulip,ryansnowboarder/zulip,timabbott/zulip,tdr130/zulip,amallia/zulip,thomasboyt/zulip,rht/zulip,tdr130/zulip,blaze225/zulip,bssrdf/zulip,dotcool/zulip,joshisa/zulip,dxq-git/zulip,andersk/zulip,zofuthan/zulip
|
Add command to dump and restore user pointers.
For use in database migrations.
(imported from commit f06ae569fe986da5e7d144c277bf27be534c04f9)
|
from optparse import make_option
from django.core.management.base import BaseCommand
from zephyr.models import Realm, UserProfile
import simplejson
def dump():
pointers = []
for u in UserProfile.objects.select_related("user__email").all():
pointers.append((u.user.email, u.pointer))
file("dumped-pointers", "w").write(simplejson.dumps(pointers) + "\n")
def restore(change):
for (email, pointer) in simplejson.loads(file("dumped-pointers").read()):
u = UserProfile.objects.get(user__email=email)
print "%s: pointer %s => %s" % (email, u.pointer, pointer)
if change:
u.pointer = pointer
u.save()
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--restore', default=False, action='store_true'),
make_option('--dry-run', '-n', default=False, action='store_true'),)
def handle(self, *args, **options):
if options["restore"]:
restore(change=not options['dry_run'])
else:
dump()
|
<commit_before><commit_msg>Add command to dump and restore user pointers.
For use in database migrations.
(imported from commit f06ae569fe986da5e7d144c277bf27be534c04f9)<commit_after>
|
from optparse import make_option
from django.core.management.base import BaseCommand
from zephyr.models import Realm, UserProfile
import simplejson
def dump():
pointers = []
for u in UserProfile.objects.select_related("user__email").all():
pointers.append((u.user.email, u.pointer))
file("dumped-pointers", "w").write(simplejson.dumps(pointers) + "\n")
def restore(change):
for (email, pointer) in simplejson.loads(file("dumped-pointers").read()):
u = UserProfile.objects.get(user__email=email)
print "%s: pointer %s => %s" % (email, u.pointer, pointer)
if change:
u.pointer = pointer
u.save()
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--restore', default=False, action='store_true'),
make_option('--dry-run', '-n', default=False, action='store_true'),)
def handle(self, *args, **options):
if options["restore"]:
restore(change=not options['dry_run'])
else:
dump()
|
Add command to dump and restore user pointers.
For use in database migrations.
(imported from commit f06ae569fe986da5e7d144c277bf27be534c04f9)from optparse import make_option
from django.core.management.base import BaseCommand
from zephyr.models import Realm, UserProfile
import simplejson
def dump():
pointers = []
for u in UserProfile.objects.select_related("user__email").all():
pointers.append((u.user.email, u.pointer))
file("dumped-pointers", "w").write(simplejson.dumps(pointers) + "\n")
def restore(change):
for (email, pointer) in simplejson.loads(file("dumped-pointers").read()):
u = UserProfile.objects.get(user__email=email)
print "%s: pointer %s => %s" % (email, u.pointer, pointer)
if change:
u.pointer = pointer
u.save()
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--restore', default=False, action='store_true'),
make_option('--dry-run', '-n', default=False, action='store_true'),)
def handle(self, *args, **options):
if options["restore"]:
restore(change=not options['dry_run'])
else:
dump()
|
<commit_before><commit_msg>Add command to dump and restore user pointers.
For use in database migrations.
(imported from commit f06ae569fe986da5e7d144c277bf27be534c04f9)<commit_after>from optparse import make_option
from django.core.management.base import BaseCommand
from zephyr.models import Realm, UserProfile
import simplejson
def dump():
pointers = []
for u in UserProfile.objects.select_related("user__email").all():
pointers.append((u.user.email, u.pointer))
file("dumped-pointers", "w").write(simplejson.dumps(pointers) + "\n")
def restore(change):
for (email, pointer) in simplejson.loads(file("dumped-pointers").read()):
u = UserProfile.objects.get(user__email=email)
print "%s: pointer %s => %s" % (email, u.pointer, pointer)
if change:
u.pointer = pointer
u.save()
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--restore', default=False, action='store_true'),
make_option('--dry-run', '-n', default=False, action='store_true'),)
def handle(self, *args, **options):
if options["restore"]:
restore(change=not options['dry_run'])
else:
dump()
|
|
eb91b11930319369bc9cfc3b1b15c0b92fb4d85c
|
tests/sentry/models/test_organizationoption.py
|
tests/sentry/models/test_organizationoption.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from sentry.models import OrganizationOption
from sentry.testutils import TestCase
class OrganizationOptionManagerTest(TestCase):
def test_set_value(self):
OrganizationOption.objects.set_value(self.organization, 'foo', 'bar')
assert OrganizationOption.objects.filter(
organization=self.organization, key='foo', value='bar').exists()
def test_get_value(self):
result = OrganizationOption.objects.get_value(self.organization, 'foo')
assert result is None
OrganizationOption.objects.create(
organization=self.organization, key='foo', value='bar')
result = OrganizationOption.objects.get_value(self.organization, 'foo')
assert result == 'bar'
def test_unset_value(self):
OrganizationOption.objects.unset_value(self.organization, 'foo')
OrganizationOption.objects.create(
organization=self.organization, key='foo', value='bar')
OrganizationOption.objects.unset_value(self.organization, 'foo')
assert not OrganizationOption.objects.filter(
organization=self.organization, key='foo').exists()
def test_get_value_bulk(self):
result = OrganizationOption.objects.get_value_bulk([self.organization], 'foo')
assert result == {self.organization: None}
OrganizationOption.objects.create(
organization=self.organization, key='foo', value='bar')
result = OrganizationOption.objects.get_value_bulk([self.organization], 'foo')
assert result == {self.organization: 'bar'}
|
Add `OrganizationOption` tests based on `ProjectOption`.
|
Add `OrganizationOption` tests based on `ProjectOption`.
|
Python
|
bsd-3-clause
|
JamesMura/sentry,gencer/sentry,nicholasserra/sentry,gencer/sentry,fotinakis/sentry,looker/sentry,JackDanger/sentry,gencer/sentry,looker/sentry,fotinakis/sentry,looker/sentry,jean/sentry,daevaorn/sentry,ifduyue/sentry,beeftornado/sentry,zenefits/sentry,alexm92/sentry,mvaled/sentry,mitsuhiko/sentry,nicholasserra/sentry,zenefits/sentry,fotinakis/sentry,fotinakis/sentry,ifduyue/sentry,ifduyue/sentry,nicholasserra/sentry,ifduyue/sentry,mitsuhiko/sentry,gencer/sentry,BuildingLink/sentry,daevaorn/sentry,JamesMura/sentry,looker/sentry,daevaorn/sentry,zenefits/sentry,JamesMura/sentry,mvaled/sentry,zenefits/sentry,jean/sentry,ifduyue/sentry,BuildingLink/sentry,beeftornado/sentry,BuildingLink/sentry,JamesMura/sentry,alexm92/sentry,JackDanger/sentry,jean/sentry,beeftornado/sentry,jean/sentry,mvaled/sentry,looker/sentry,mvaled/sentry,jean/sentry,daevaorn/sentry,JamesMura/sentry,mvaled/sentry,mvaled/sentry,JackDanger/sentry,zenefits/sentry,alexm92/sentry,BuildingLink/sentry,BuildingLink/sentry,gencer/sentry
|
Add `OrganizationOption` tests based on `ProjectOption`.
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from sentry.models import OrganizationOption
from sentry.testutils import TestCase
class OrganizationOptionManagerTest(TestCase):
def test_set_value(self):
OrganizationOption.objects.set_value(self.organization, 'foo', 'bar')
assert OrganizationOption.objects.filter(
organization=self.organization, key='foo', value='bar').exists()
def test_get_value(self):
result = OrganizationOption.objects.get_value(self.organization, 'foo')
assert result is None
OrganizationOption.objects.create(
organization=self.organization, key='foo', value='bar')
result = OrganizationOption.objects.get_value(self.organization, 'foo')
assert result == 'bar'
def test_unset_value(self):
OrganizationOption.objects.unset_value(self.organization, 'foo')
OrganizationOption.objects.create(
organization=self.organization, key='foo', value='bar')
OrganizationOption.objects.unset_value(self.organization, 'foo')
assert not OrganizationOption.objects.filter(
organization=self.organization, key='foo').exists()
def test_get_value_bulk(self):
result = OrganizationOption.objects.get_value_bulk([self.organization], 'foo')
assert result == {self.organization: None}
OrganizationOption.objects.create(
organization=self.organization, key='foo', value='bar')
result = OrganizationOption.objects.get_value_bulk([self.organization], 'foo')
assert result == {self.organization: 'bar'}
|
<commit_before><commit_msg>Add `OrganizationOption` tests based on `ProjectOption`.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from sentry.models import OrganizationOption
from sentry.testutils import TestCase
class OrganizationOptionManagerTest(TestCase):
def test_set_value(self):
OrganizationOption.objects.set_value(self.organization, 'foo', 'bar')
assert OrganizationOption.objects.filter(
organization=self.organization, key='foo', value='bar').exists()
def test_get_value(self):
result = OrganizationOption.objects.get_value(self.organization, 'foo')
assert result is None
OrganizationOption.objects.create(
organization=self.organization, key='foo', value='bar')
result = OrganizationOption.objects.get_value(self.organization, 'foo')
assert result == 'bar'
def test_unset_value(self):
OrganizationOption.objects.unset_value(self.organization, 'foo')
OrganizationOption.objects.create(
organization=self.organization, key='foo', value='bar')
OrganizationOption.objects.unset_value(self.organization, 'foo')
assert not OrganizationOption.objects.filter(
organization=self.organization, key='foo').exists()
def test_get_value_bulk(self):
result = OrganizationOption.objects.get_value_bulk([self.organization], 'foo')
assert result == {self.organization: None}
OrganizationOption.objects.create(
organization=self.organization, key='foo', value='bar')
result = OrganizationOption.objects.get_value_bulk([self.organization], 'foo')
assert result == {self.organization: 'bar'}
|
Add `OrganizationOption` tests based on `ProjectOption`.# -*- coding: utf-8 -*-
from __future__ import absolute_import
from sentry.models import OrganizationOption
from sentry.testutils import TestCase
class OrganizationOptionManagerTest(TestCase):
def test_set_value(self):
OrganizationOption.objects.set_value(self.organization, 'foo', 'bar')
assert OrganizationOption.objects.filter(
organization=self.organization, key='foo', value='bar').exists()
def test_get_value(self):
result = OrganizationOption.objects.get_value(self.organization, 'foo')
assert result is None
OrganizationOption.objects.create(
organization=self.organization, key='foo', value='bar')
result = OrganizationOption.objects.get_value(self.organization, 'foo')
assert result == 'bar'
def test_unset_value(self):
OrganizationOption.objects.unset_value(self.organization, 'foo')
OrganizationOption.objects.create(
organization=self.organization, key='foo', value='bar')
OrganizationOption.objects.unset_value(self.organization, 'foo')
assert not OrganizationOption.objects.filter(
organization=self.organization, key='foo').exists()
def test_get_value_bulk(self):
result = OrganizationOption.objects.get_value_bulk([self.organization], 'foo')
assert result == {self.organization: None}
OrganizationOption.objects.create(
organization=self.organization, key='foo', value='bar')
result = OrganizationOption.objects.get_value_bulk([self.organization], 'foo')
assert result == {self.organization: 'bar'}
|
<commit_before><commit_msg>Add `OrganizationOption` tests based on `ProjectOption`.<commit_after># -*- coding: utf-8 -*-
from __future__ import absolute_import
from sentry.models import OrganizationOption
from sentry.testutils import TestCase
class OrganizationOptionManagerTest(TestCase):
def test_set_value(self):
OrganizationOption.objects.set_value(self.organization, 'foo', 'bar')
assert OrganizationOption.objects.filter(
organization=self.organization, key='foo', value='bar').exists()
def test_get_value(self):
result = OrganizationOption.objects.get_value(self.organization, 'foo')
assert result is None
OrganizationOption.objects.create(
organization=self.organization, key='foo', value='bar')
result = OrganizationOption.objects.get_value(self.organization, 'foo')
assert result == 'bar'
def test_unset_value(self):
OrganizationOption.objects.unset_value(self.organization, 'foo')
OrganizationOption.objects.create(
organization=self.organization, key='foo', value='bar')
OrganizationOption.objects.unset_value(self.organization, 'foo')
assert not OrganizationOption.objects.filter(
organization=self.organization, key='foo').exists()
def test_get_value_bulk(self):
result = OrganizationOption.objects.get_value_bulk([self.organization], 'foo')
assert result == {self.organization: None}
OrganizationOption.objects.create(
organization=self.organization, key='foo', value='bar')
result = OrganizationOption.objects.get_value_bulk([self.organization], 'foo')
assert result == {self.organization: 'bar'}
|
|
f264f8804c208f2b55471f27f92a9e8c1ab5d778
|
tests/correlations/test_views.py
|
tests/correlations/test_views.py
|
# -*- coding: utf-8 -*-
import datetime
import pytest
from django.core.urlresolvers import reverse
from components.people.factories import GroupFactory, IdolFactory
@pytest.mark.django_db
def test_happenings_by_year_view(client):
[GroupFactory(started=datetime.date(2013, 1, 1)) for i in xrange(5)]
response = client.get(reverse('happenings-by-year', kwargs={'year': 2013}))
assert response.status_code == 200
assert 'object_list' in response.context
assert '2010s' in response.context['years']
assert 'correlations/happenings_year.html' in [template.name for template in response.templates]
|
Test our new happenings-by-year view.
|
Test our new happenings-by-year view.
|
Python
|
apache-2.0
|
hello-base/web,hello-base/web,hello-base/web,hello-base/web
|
Test our new happenings-by-year view.
|
# -*- coding: utf-8 -*-
import datetime
import pytest
from django.core.urlresolvers import reverse
from components.people.factories import GroupFactory, IdolFactory
@pytest.mark.django_db
def test_happenings_by_year_view(client):
[GroupFactory(started=datetime.date(2013, 1, 1)) for i in xrange(5)]
response = client.get(reverse('happenings-by-year', kwargs={'year': 2013}))
assert response.status_code == 200
assert 'object_list' in response.context
assert '2010s' in response.context['years']
assert 'correlations/happenings_year.html' in [template.name for template in response.templates]
|
<commit_before><commit_msg>Test our new happenings-by-year view.<commit_after>
|
# -*- coding: utf-8 -*-
import datetime
import pytest
from django.core.urlresolvers import reverse
from components.people.factories import GroupFactory, IdolFactory
@pytest.mark.django_db
def test_happenings_by_year_view(client):
[GroupFactory(started=datetime.date(2013, 1, 1)) for i in xrange(5)]
response = client.get(reverse('happenings-by-year', kwargs={'year': 2013}))
assert response.status_code == 200
assert 'object_list' in response.context
assert '2010s' in response.context['years']
assert 'correlations/happenings_year.html' in [template.name for template in response.templates]
|
Test our new happenings-by-year view.# -*- coding: utf-8 -*-
import datetime
import pytest
from django.core.urlresolvers import reverse
from components.people.factories import GroupFactory, IdolFactory
@pytest.mark.django_db
def test_happenings_by_year_view(client):
[GroupFactory(started=datetime.date(2013, 1, 1)) for i in xrange(5)]
response = client.get(reverse('happenings-by-year', kwargs={'year': 2013}))
assert response.status_code == 200
assert 'object_list' in response.context
assert '2010s' in response.context['years']
assert 'correlations/happenings_year.html' in [template.name for template in response.templates]
|
<commit_before><commit_msg>Test our new happenings-by-year view.<commit_after># -*- coding: utf-8 -*-
import datetime
import pytest
from django.core.urlresolvers import reverse
from components.people.factories import GroupFactory, IdolFactory
@pytest.mark.django_db
def test_happenings_by_year_view(client):
[GroupFactory(started=datetime.date(2013, 1, 1)) for i in xrange(5)]
response = client.get(reverse('happenings-by-year', kwargs={'year': 2013}))
assert response.status_code == 200
assert 'object_list' in response.context
assert '2010s' in response.context['years']
assert 'correlations/happenings_year.html' in [template.name for template in response.templates]
|
|
43c4595ae26a7663538e712af37553c7a64fade7
|
teuthology/test/test_parallel.py
|
teuthology/test/test_parallel.py
|
from ..parallel import parallel
def identity(item, input_set=None, remove=False):
if input_set is not None:
assert item in input_set
if remove:
input_set.remove(item)
return item
class TestParallel(object):
def test_basic(self):
in_set = set(range(10))
with parallel() as para:
for i in in_set:
para.spawn(identity, i, in_set, remove=True)
assert para.any_spawned is True
assert para.count == len(in_set)
def test_result(self):
in_set = set(range(10))
with parallel() as para:
for i in in_set:
para.spawn(identity, i, in_set)
for result in para:
in_set.remove(result)
|
Add a couple unit tests for teuthology.parallel
|
Add a couple unit tests for teuthology.parallel
Signed-off-by: Zack Cerza <f801c831581d4150a2793939287636221d62131e@inktank.com>
|
Python
|
mit
|
michaelsevilla/teuthology,caibo2014/teuthology,ceph/teuthology,SUSE/teuthology,SUSE/teuthology,t-miyamae/teuthology,zhouyuan/teuthology,ktdreyer/teuthology,robbat2/teuthology,yghannam/teuthology,yghannam/teuthology,dmick/teuthology,dreamhost/teuthology,zhouyuan/teuthology,dmick/teuthology,ivotron/teuthology,caibo2014/teuthology,michaelsevilla/teuthology,tchaikov/teuthology,ivotron/teuthology,robbat2/teuthology,ceph/teuthology,dmick/teuthology,t-miyamae/teuthology,tchaikov/teuthology,ktdreyer/teuthology,SUSE/teuthology,dreamhost/teuthology
|
Add a couple unit tests for teuthology.parallel
Signed-off-by: Zack Cerza <f801c831581d4150a2793939287636221d62131e@inktank.com>
|
from ..parallel import parallel
def identity(item, input_set=None, remove=False):
if input_set is not None:
assert item in input_set
if remove:
input_set.remove(item)
return item
class TestParallel(object):
def test_basic(self):
in_set = set(range(10))
with parallel() as para:
for i in in_set:
para.spawn(identity, i, in_set, remove=True)
assert para.any_spawned is True
assert para.count == len(in_set)
def test_result(self):
in_set = set(range(10))
with parallel() as para:
for i in in_set:
para.spawn(identity, i, in_set)
for result in para:
in_set.remove(result)
|
<commit_before><commit_msg>Add a couple unit tests for teuthology.parallel
Signed-off-by: Zack Cerza <f801c831581d4150a2793939287636221d62131e@inktank.com><commit_after>
|
from ..parallel import parallel
def identity(item, input_set=None, remove=False):
if input_set is not None:
assert item in input_set
if remove:
input_set.remove(item)
return item
class TestParallel(object):
def test_basic(self):
in_set = set(range(10))
with parallel() as para:
for i in in_set:
para.spawn(identity, i, in_set, remove=True)
assert para.any_spawned is True
assert para.count == len(in_set)
def test_result(self):
in_set = set(range(10))
with parallel() as para:
for i in in_set:
para.spawn(identity, i, in_set)
for result in para:
in_set.remove(result)
|
Add a couple unit tests for teuthology.parallel
Signed-off-by: Zack Cerza <f801c831581d4150a2793939287636221d62131e@inktank.com>from ..parallel import parallel
def identity(item, input_set=None, remove=False):
if input_set is not None:
assert item in input_set
if remove:
input_set.remove(item)
return item
class TestParallel(object):
def test_basic(self):
in_set = set(range(10))
with parallel() as para:
for i in in_set:
para.spawn(identity, i, in_set, remove=True)
assert para.any_spawned is True
assert para.count == len(in_set)
def test_result(self):
in_set = set(range(10))
with parallel() as para:
for i in in_set:
para.spawn(identity, i, in_set)
for result in para:
in_set.remove(result)
|
<commit_before><commit_msg>Add a couple unit tests for teuthology.parallel
Signed-off-by: Zack Cerza <f801c831581d4150a2793939287636221d62131e@inktank.com><commit_after>from ..parallel import parallel
def identity(item, input_set=None, remove=False):
if input_set is not None:
assert item in input_set
if remove:
input_set.remove(item)
return item
class TestParallel(object):
def test_basic(self):
in_set = set(range(10))
with parallel() as para:
for i in in_set:
para.spawn(identity, i, in_set, remove=True)
assert para.any_spawned is True
assert para.count == len(in_set)
def test_result(self):
in_set = set(range(10))
with parallel() as para:
for i in in_set:
para.spawn(identity, i, in_set)
for result in para:
in_set.remove(result)
|
|
173565f7f2b9ffa548b355a0cbc8f972f1445a50
|
tests/test_guess.py
|
tests/test_guess.py
|
from rdopkg import guess
from collections import namedtuple
import pytest
VersionTestCase = namedtuple('VersionTestCase', ('expected', 'input_data'))
data_table_good = [
VersionTestCase(('1.2.3', None), '1.2.3'),
VersionTestCase(('1.2.3', 'vX.Y.Z'), 'v1.2.3'),
VersionTestCase(('1.2.3', 'VX.Y.Z'), 'V1.2.3'),
VersionTestCase(('banana', None), 'banana'),
]
data_table_bad = [
VersionTestCase((None, None), None),
VersionTestCase((None, None), []),
VersionTestCase((None, None), ()),
VersionTestCase((None, None), ''),
VersionTestCase((None, None), {}),
]
data_table_ugly = [
VersionTestCase((None, None), ('foo', 'bar', 'bah')),
VersionTestCase((None, None), ['foo', 'bar', 'bah']),
VersionTestCase((None, None), {'foo': 'bar'}),
]
def test_table_data_good_tag2version():
for entry in data_table_good:
assert entry.expected == guess.tag2version(entry.input_data)
def test_table_data_bad_tag2version():
for entry in data_table_bad:
# Input Validation should probably return to us (None, None)
# assert entry.expected == guess.tag2version(entry.input_data)
assert (entry.input_data, None) == guess.tag2version(entry.input_data)
def test_table_data_ugly_tag2version():
for entry in data_table_ugly:
# TODO: probably should be a more specific exception
with pytest.raises(Exception):
guess.tag2version(entry.input_data)
def test_version2tag_simple():
assert '1.2.3' == guess.version2tag('1.2.3')
def test_version2tag_type1():
assert 'v1.2.3' == guess.version2tag('1.2.3', 'vX.Y.Z')
def test_version2tag_type2():
assert 'V1.2.3' == guess.version2tag('1.2.3', 'VX.Y.Z')
|
Add test coverage for rdopkg.guess version2tag and tag2version
|
Add test coverage for rdopkg.guess version2tag and tag2version
adding coverage unittest, there are some not well handled input cases
but better to capture existing behavior and update tests and code to
handle things better
Change-Id: I16dfb60886a1ac5ddfab86100e08ac23f8cf6c65
|
Python
|
apache-2.0
|
redhat-openstack/rdopkg,redhat-openstack/rdopkg,openstack-packages/rdopkg,openstack-packages/rdopkg
|
Add test coverage for rdopkg.guess version2tag and tag2version
adding coverage unittest, there are some not well handled input cases
but better to capture existing behavior and update tests and code to
handle things better
Change-Id: I16dfb60886a1ac5ddfab86100e08ac23f8cf6c65
|
from rdopkg import guess
from collections import namedtuple
import pytest
VersionTestCase = namedtuple('VersionTestCase', ('expected', 'input_data'))
data_table_good = [
VersionTestCase(('1.2.3', None), '1.2.3'),
VersionTestCase(('1.2.3', 'vX.Y.Z'), 'v1.2.3'),
VersionTestCase(('1.2.3', 'VX.Y.Z'), 'V1.2.3'),
VersionTestCase(('banana', None), 'banana'),
]
data_table_bad = [
VersionTestCase((None, None), None),
VersionTestCase((None, None), []),
VersionTestCase((None, None), ()),
VersionTestCase((None, None), ''),
VersionTestCase((None, None), {}),
]
data_table_ugly = [
VersionTestCase((None, None), ('foo', 'bar', 'bah')),
VersionTestCase((None, None), ['foo', 'bar', 'bah']),
VersionTestCase((None, None), {'foo': 'bar'}),
]
def test_table_data_good_tag2version():
for entry in data_table_good:
assert entry.expected == guess.tag2version(entry.input_data)
def test_table_data_bad_tag2version():
for entry in data_table_bad:
# Input Validation should probably return to us (None, None)
# assert entry.expected == guess.tag2version(entry.input_data)
assert (entry.input_data, None) == guess.tag2version(entry.input_data)
def test_table_data_ugly_tag2version():
for entry in data_table_ugly:
# TODO: probably should be a more specific exception
with pytest.raises(Exception):
guess.tag2version(entry.input_data)
def test_version2tag_simple():
assert '1.2.3' == guess.version2tag('1.2.3')
def test_version2tag_type1():
assert 'v1.2.3' == guess.version2tag('1.2.3', 'vX.Y.Z')
def test_version2tag_type2():
assert 'V1.2.3' == guess.version2tag('1.2.3', 'VX.Y.Z')
|
<commit_before><commit_msg>Add test coverage for rdopkg.guess version2tag and tag2version
adding coverage unittest, there are some not well handled input cases
but better to capture existing behavior and update tests and code to
handle things better
Change-Id: I16dfb60886a1ac5ddfab86100e08ac23f8cf6c65<commit_after>
|
from rdopkg import guess
from collections import namedtuple
import pytest
VersionTestCase = namedtuple('VersionTestCase', ('expected', 'input_data'))
data_table_good = [
VersionTestCase(('1.2.3', None), '1.2.3'),
VersionTestCase(('1.2.3', 'vX.Y.Z'), 'v1.2.3'),
VersionTestCase(('1.2.3', 'VX.Y.Z'), 'V1.2.3'),
VersionTestCase(('banana', None), 'banana'),
]
data_table_bad = [
VersionTestCase((None, None), None),
VersionTestCase((None, None), []),
VersionTestCase((None, None), ()),
VersionTestCase((None, None), ''),
VersionTestCase((None, None), {}),
]
data_table_ugly = [
VersionTestCase((None, None), ('foo', 'bar', 'bah')),
VersionTestCase((None, None), ['foo', 'bar', 'bah']),
VersionTestCase((None, None), {'foo': 'bar'}),
]
def test_table_data_good_tag2version():
for entry in data_table_good:
assert entry.expected == guess.tag2version(entry.input_data)
def test_table_data_bad_tag2version():
for entry in data_table_bad:
# Input Validation should probably return to us (None, None)
# assert entry.expected == guess.tag2version(entry.input_data)
assert (entry.input_data, None) == guess.tag2version(entry.input_data)
def test_table_data_ugly_tag2version():
for entry in data_table_ugly:
# TODO: probably should be a more specific exception
with pytest.raises(Exception):
guess.tag2version(entry.input_data)
def test_version2tag_simple():
assert '1.2.3' == guess.version2tag('1.2.3')
def test_version2tag_type1():
assert 'v1.2.3' == guess.version2tag('1.2.3', 'vX.Y.Z')
def test_version2tag_type2():
assert 'V1.2.3' == guess.version2tag('1.2.3', 'VX.Y.Z')
|
Add test coverage for rdopkg.guess version2tag and tag2version
adding coverage unittest, there are some not well handled input cases
but better to capture existing behavior and update tests and code to
handle things better
Change-Id: I16dfb60886a1ac5ddfab86100e08ac23f8cf6c65from rdopkg import guess
from collections import namedtuple
import pytest
VersionTestCase = namedtuple('VersionTestCase', ('expected', 'input_data'))
data_table_good = [
VersionTestCase(('1.2.3', None), '1.2.3'),
VersionTestCase(('1.2.3', 'vX.Y.Z'), 'v1.2.3'),
VersionTestCase(('1.2.3', 'VX.Y.Z'), 'V1.2.3'),
VersionTestCase(('banana', None), 'banana'),
]
data_table_bad = [
VersionTestCase((None, None), None),
VersionTestCase((None, None), []),
VersionTestCase((None, None), ()),
VersionTestCase((None, None), ''),
VersionTestCase((None, None), {}),
]
data_table_ugly = [
VersionTestCase((None, None), ('foo', 'bar', 'bah')),
VersionTestCase((None, None), ['foo', 'bar', 'bah']),
VersionTestCase((None, None), {'foo': 'bar'}),
]
def test_table_data_good_tag2version():
for entry in data_table_good:
assert entry.expected == guess.tag2version(entry.input_data)
def test_table_data_bad_tag2version():
for entry in data_table_bad:
# Input Validation should probably return to us (None, None)
# assert entry.expected == guess.tag2version(entry.input_data)
assert (entry.input_data, None) == guess.tag2version(entry.input_data)
def test_table_data_ugly_tag2version():
for entry in data_table_ugly:
# TODO: probably should be a more specific exception
with pytest.raises(Exception):
guess.tag2version(entry.input_data)
def test_version2tag_simple():
assert '1.2.3' == guess.version2tag('1.2.3')
def test_version2tag_type1():
assert 'v1.2.3' == guess.version2tag('1.2.3', 'vX.Y.Z')
def test_version2tag_type2():
assert 'V1.2.3' == guess.version2tag('1.2.3', 'VX.Y.Z')
|
<commit_before><commit_msg>Add test coverage for rdopkg.guess version2tag and tag2version
adding coverage unittest, there are some not well handled input cases
but better to capture existing behavior and update tests and code to
handle things better
Change-Id: I16dfb60886a1ac5ddfab86100e08ac23f8cf6c65<commit_after>from rdopkg import guess
from collections import namedtuple
import pytest
VersionTestCase = namedtuple('VersionTestCase', ('expected', 'input_data'))
data_table_good = [
VersionTestCase(('1.2.3', None), '1.2.3'),
VersionTestCase(('1.2.3', 'vX.Y.Z'), 'v1.2.3'),
VersionTestCase(('1.2.3', 'VX.Y.Z'), 'V1.2.3'),
VersionTestCase(('banana', None), 'banana'),
]
data_table_bad = [
VersionTestCase((None, None), None),
VersionTestCase((None, None), []),
VersionTestCase((None, None), ()),
VersionTestCase((None, None), ''),
VersionTestCase((None, None), {}),
]
data_table_ugly = [
VersionTestCase((None, None), ('foo', 'bar', 'bah')),
VersionTestCase((None, None), ['foo', 'bar', 'bah']),
VersionTestCase((None, None), {'foo': 'bar'}),
]
def test_table_data_good_tag2version():
for entry in data_table_good:
assert entry.expected == guess.tag2version(entry.input_data)
def test_table_data_bad_tag2version():
for entry in data_table_bad:
# Input Validation should probably return to us (None, None)
# assert entry.expected == guess.tag2version(entry.input_data)
assert (entry.input_data, None) == guess.tag2version(entry.input_data)
def test_table_data_ugly_tag2version():
for entry in data_table_ugly:
# TODO: probably should be a more specific exception
with pytest.raises(Exception):
guess.tag2version(entry.input_data)
def test_version2tag_simple():
assert '1.2.3' == guess.version2tag('1.2.3')
def test_version2tag_type1():
assert 'v1.2.3' == guess.version2tag('1.2.3', 'vX.Y.Z')
def test_version2tag_type2():
assert 'V1.2.3' == guess.version2tag('1.2.3', 'VX.Y.Z')
|
|
45cb6df45df84cb9ae85fc8aa15710bde6a15bad
|
nova/tests/functional/test_images.py
|
nova/tests/functional/test_images.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.tests.functional.api import client
from nova.tests.functional import test_servers
class ImagesTest(test_servers.ServersTestBase):
def test_create_images_negative_invalid_state(self):
# Create server
server = self._build_minimal_create_server_request()
created_server = self.api.post_server({"server": server})
server_id = created_server['id']
found_server = self._wait_for_state_change(created_server, 'BUILD')
self.assertEqual('ACTIVE', found_server['status'])
# Create image
name = 'Snapshot 1'
self.api.post_server_action(
server_id, {'createImage': {'name': name}})
self.assertEqual('ACTIVE', found_server['status'])
# Confirm that the image was created
images = self.api.get_images(detail=False)
image_map = {image['name']: image for image in images}
found_image = image_map.get(name)
self.assertTrue(found_image)
# Change server status from ACTIVE to SHELVED for negative test
self.flags(shelved_offload_time = -1)
self.api.post_server_action(server_id, {'shelve': {}})
found_server = self._wait_for_state_change(found_server, 'ACTIVE')
self.assertEqual('SHELVED', found_server['status'])
# Create image in SHELVED (not ACTIVE, etc.)
name = 'Snapshot 2'
ex = self.assertRaises(client.OpenStackApiException,
self.api.post_server_action,
server_id,
{'createImage': {'name': name}})
self.assertEqual(409, ex.response.status_code)
self.assertEqual('SHELVED', found_server['status'])
# Confirm that the image was not created
images = self.api.get_images(detail=False)
image_map = {image['name']: image for image in images}
found_image = image_map.get(name)
self.assertFalse(found_image)
# Cleanup
self._delete_server(server_id)
|
Add create image functional negative tests
|
Add create image functional negative tests
The negative tests of create image API are not covered enough
in functional tests. We want to add the conflict tests of
when the create image API runs in the unexpected state
(e.g. not ACTIVE) of server.
Change-Id: I0c0b9e4d9ef1c5311113177dec46432f35b5ed63
|
Python
|
apache-2.0
|
rahulunair/nova,rahulunair/nova,mahak/nova,vmturbo/nova,klmitch/nova,hanlind/nova,openstack/nova,mikalstill/nova,jianghuaw/nova,klmitch/nova,hanlind/nova,vmturbo/nova,Juniper/nova,rajalokan/nova,Juniper/nova,klmitch/nova,gooddata/openstack-nova,rajalokan/nova,Juniper/nova,klmitch/nova,rajalokan/nova,phenoxim/nova,mahak/nova,gooddata/openstack-nova,openstack/nova,gooddata/openstack-nova,mahak/nova,mikalstill/nova,Juniper/nova,phenoxim/nova,jianghuaw/nova,vmturbo/nova,mikalstill/nova,hanlind/nova,rajalokan/nova,openstack/nova,vmturbo/nova,rahulunair/nova,jianghuaw/nova,gooddata/openstack-nova,jianghuaw/nova
|
Add create image functional negative tests
The negative tests of create image API are not covered enough
in functional tests. We want to add the conflict tests of
when the create image API runs in the unexpected state
(e.g. not ACTIVE) of server.
Change-Id: I0c0b9e4d9ef1c5311113177dec46432f35b5ed63
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.tests.functional.api import client
from nova.tests.functional import test_servers
class ImagesTest(test_servers.ServersTestBase):
def test_create_images_negative_invalid_state(self):
# Create server
server = self._build_minimal_create_server_request()
created_server = self.api.post_server({"server": server})
server_id = created_server['id']
found_server = self._wait_for_state_change(created_server, 'BUILD')
self.assertEqual('ACTIVE', found_server['status'])
# Create image
name = 'Snapshot 1'
self.api.post_server_action(
server_id, {'createImage': {'name': name}})
self.assertEqual('ACTIVE', found_server['status'])
# Confirm that the image was created
images = self.api.get_images(detail=False)
image_map = {image['name']: image for image in images}
found_image = image_map.get(name)
self.assertTrue(found_image)
# Change server status from ACTIVE to SHELVED for negative test
self.flags(shelved_offload_time = -1)
self.api.post_server_action(server_id, {'shelve': {}})
found_server = self._wait_for_state_change(found_server, 'ACTIVE')
self.assertEqual('SHELVED', found_server['status'])
# Create image in SHELVED (not ACTIVE, etc.)
name = 'Snapshot 2'
ex = self.assertRaises(client.OpenStackApiException,
self.api.post_server_action,
server_id,
{'createImage': {'name': name}})
self.assertEqual(409, ex.response.status_code)
self.assertEqual('SHELVED', found_server['status'])
# Confirm that the image was not created
images = self.api.get_images(detail=False)
image_map = {image['name']: image for image in images}
found_image = image_map.get(name)
self.assertFalse(found_image)
# Cleanup
self._delete_server(server_id)
|
<commit_before><commit_msg>Add create image functional negative tests
The negative tests of create image API are not covered enough
in functional tests. We want to add the conflict tests of
when the create image API runs in the unexpected state
(e.g. not ACTIVE) of server.
Change-Id: I0c0b9e4d9ef1c5311113177dec46432f35b5ed63<commit_after>
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.tests.functional.api import client
from nova.tests.functional import test_servers
class ImagesTest(test_servers.ServersTestBase):
def test_create_images_negative_invalid_state(self):
# Create server
server = self._build_minimal_create_server_request()
created_server = self.api.post_server({"server": server})
server_id = created_server['id']
found_server = self._wait_for_state_change(created_server, 'BUILD')
self.assertEqual('ACTIVE', found_server['status'])
# Create image
name = 'Snapshot 1'
self.api.post_server_action(
server_id, {'createImage': {'name': name}})
self.assertEqual('ACTIVE', found_server['status'])
# Confirm that the image was created
images = self.api.get_images(detail=False)
image_map = {image['name']: image for image in images}
found_image = image_map.get(name)
self.assertTrue(found_image)
# Change server status from ACTIVE to SHELVED for negative test
self.flags(shelved_offload_time = -1)
self.api.post_server_action(server_id, {'shelve': {}})
found_server = self._wait_for_state_change(found_server, 'ACTIVE')
self.assertEqual('SHELVED', found_server['status'])
# Create image in SHELVED (not ACTIVE, etc.)
name = 'Snapshot 2'
ex = self.assertRaises(client.OpenStackApiException,
self.api.post_server_action,
server_id,
{'createImage': {'name': name}})
self.assertEqual(409, ex.response.status_code)
self.assertEqual('SHELVED', found_server['status'])
# Confirm that the image was not created
images = self.api.get_images(detail=False)
image_map = {image['name']: image for image in images}
found_image = image_map.get(name)
self.assertFalse(found_image)
# Cleanup
self._delete_server(server_id)
|
Add create image functional negative tests
The negative tests of create image API are not covered enough
in functional tests. We want to add the conflict tests of
when the create image API runs in the unexpected state
(e.g. not ACTIVE) of server.
Change-Id: I0c0b9e4d9ef1c5311113177dec46432f35b5ed63# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.tests.functional.api import client
from nova.tests.functional import test_servers
class ImagesTest(test_servers.ServersTestBase):
def test_create_images_negative_invalid_state(self):
# Create server
server = self._build_minimal_create_server_request()
created_server = self.api.post_server({"server": server})
server_id = created_server['id']
found_server = self._wait_for_state_change(created_server, 'BUILD')
self.assertEqual('ACTIVE', found_server['status'])
# Create image
name = 'Snapshot 1'
self.api.post_server_action(
server_id, {'createImage': {'name': name}})
self.assertEqual('ACTIVE', found_server['status'])
# Confirm that the image was created
images = self.api.get_images(detail=False)
image_map = {image['name']: image for image in images}
found_image = image_map.get(name)
self.assertTrue(found_image)
# Change server status from ACTIVE to SHELVED for negative test
self.flags(shelved_offload_time = -1)
self.api.post_server_action(server_id, {'shelve': {}})
found_server = self._wait_for_state_change(found_server, 'ACTIVE')
self.assertEqual('SHELVED', found_server['status'])
# Create image in SHELVED (not ACTIVE, etc.)
name = 'Snapshot 2'
ex = self.assertRaises(client.OpenStackApiException,
self.api.post_server_action,
server_id,
{'createImage': {'name': name}})
self.assertEqual(409, ex.response.status_code)
self.assertEqual('SHELVED', found_server['status'])
# Confirm that the image was not created
images = self.api.get_images(detail=False)
image_map = {image['name']: image for image in images}
found_image = image_map.get(name)
self.assertFalse(found_image)
# Cleanup
self._delete_server(server_id)
|
<commit_before><commit_msg>Add create image functional negative tests
The negative tests of create image API are not covered enough
in functional tests. We want to add the conflict tests of
when the create image API runs in the unexpected state
(e.g. not ACTIVE) of server.
Change-Id: I0c0b9e4d9ef1c5311113177dec46432f35b5ed63<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.tests.functional.api import client
from nova.tests.functional import test_servers
class ImagesTest(test_servers.ServersTestBase):
def test_create_images_negative_invalid_state(self):
# Create server
server = self._build_minimal_create_server_request()
created_server = self.api.post_server({"server": server})
server_id = created_server['id']
found_server = self._wait_for_state_change(created_server, 'BUILD')
self.assertEqual('ACTIVE', found_server['status'])
# Create image
name = 'Snapshot 1'
self.api.post_server_action(
server_id, {'createImage': {'name': name}})
self.assertEqual('ACTIVE', found_server['status'])
# Confirm that the image was created
images = self.api.get_images(detail=False)
image_map = {image['name']: image for image in images}
found_image = image_map.get(name)
self.assertTrue(found_image)
# Change server status from ACTIVE to SHELVED for negative test
self.flags(shelved_offload_time = -1)
self.api.post_server_action(server_id, {'shelve': {}})
found_server = self._wait_for_state_change(found_server, 'ACTIVE')
self.assertEqual('SHELVED', found_server['status'])
# Create image in SHELVED (not ACTIVE, etc.)
name = 'Snapshot 2'
ex = self.assertRaises(client.OpenStackApiException,
self.api.post_server_action,
server_id,
{'createImage': {'name': name}})
self.assertEqual(409, ex.response.status_code)
self.assertEqual('SHELVED', found_server['status'])
# Confirm that the image was not created
images = self.api.get_images(detail=False)
image_map = {image['name']: image for image in images}
found_image = image_map.get(name)
self.assertFalse(found_image)
# Cleanup
self._delete_server(server_id)
|
|
c6f6278c1915ef90e8825f94cc33a4dea4124722
|
network/http_server_cat.py
|
network/http_server_cat.py
|
#!/bin/env python3
import http.server
import string
import click
import pathlib
import urllib.parse
import os
@click.command()
@click.argument("port", required=False)
@click.option("-s", "--server", default="0.0.0.0")
def main(port, server):
if not port:
port = 8888
http_server = http.server.HTTPServer((server, port), PostHandler)
print('Starting server on {0}:{1}, use <Ctrl-C> to stop'.format(
server, port))
http_server.serve_forever()
class PostHandler(http.server.BaseHTTPRequestHandler):
cwd = pathlib.Path(".")
def do_GET(self):
body_file_cat = string.Template("$content")
body_dir_list = string.Template("""
<h1>Directory listing for $cwd</h1>
<ul>
$items
</ul>
""")
page = string.Template("""<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<title>Directory listing for $cwd</title>
</head>
<body>
$body
</body>
</html>
""")
path = urllib.parse.urlparse(self.path)
fs_path = pathlib.Path("{}{}".format(self.cwd, path.path))
prefix_ref = "{}/".format(path.path)
if fs_path.is_file():
body = body_file_cat
content = ""
with fs_path.open() as f:
content = "".join(f.readlines())
content = "<pre>{}</pre>".format(content)
body = body.substitute(content=content)
else:
body = body_dir_list
items = list()
item_template = string.Template('<li><a href="$item_path">$item_name</a></li>')
for p in fs_path.iterdir():
item_path = urllib.parse.urljoin(prefix_ref, p.name)
item_name = p.name
if os.path.isdir(p):
item_name = "{}/".format(item_name)
items.append(item_template.substitute(item_path=item_path, item_name=item_name))
body = body.substitute(cwd=fs_path, items="\n".join(items))
page = page.substitute(cwd=fs_path, body=body)
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
self.wfile.write(page.encode("UTF-8"))
if __name__ == '__main__':
main()
|
Add http directory listing with content display
|
Add http directory listing with content display
|
Python
|
mit
|
dgengtek/scripts,dgengtek/scripts
|
Add http directory listing with content display
|
#!/bin/env python3
import http.server
import string
import click
import pathlib
import urllib.parse
import os
@click.command()
@click.argument("port", required=False)
@click.option("-s", "--server", default="0.0.0.0")
def main(port, server):
if not port:
port = 8888
http_server = http.server.HTTPServer((server, port), PostHandler)
print('Starting server on {0}:{1}, use <Ctrl-C> to stop'.format(
server, port))
http_server.serve_forever()
class PostHandler(http.server.BaseHTTPRequestHandler):
cwd = pathlib.Path(".")
def do_GET(self):
body_file_cat = string.Template("$content")
body_dir_list = string.Template("""
<h1>Directory listing for $cwd</h1>
<ul>
$items
</ul>
""")
page = string.Template("""<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<title>Directory listing for $cwd</title>
</head>
<body>
$body
</body>
</html>
""")
path = urllib.parse.urlparse(self.path)
fs_path = pathlib.Path("{}{}".format(self.cwd, path.path))
prefix_ref = "{}/".format(path.path)
if fs_path.is_file():
body = body_file_cat
content = ""
with fs_path.open() as f:
content = "".join(f.readlines())
content = "<pre>{}</pre>".format(content)
body = body.substitute(content=content)
else:
body = body_dir_list
items = list()
item_template = string.Template('<li><a href="$item_path">$item_name</a></li>')
for p in fs_path.iterdir():
item_path = urllib.parse.urljoin(prefix_ref, p.name)
item_name = p.name
if os.path.isdir(p):
item_name = "{}/".format(item_name)
items.append(item_template.substitute(item_path=item_path, item_name=item_name))
body = body.substitute(cwd=fs_path, items="\n".join(items))
page = page.substitute(cwd=fs_path, body=body)
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
self.wfile.write(page.encode("UTF-8"))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add http directory listing with content display<commit_after>
|
#!/bin/env python3
import http.server
import string
import click
import pathlib
import urllib.parse
import os
@click.command()
@click.argument("port", required=False)
@click.option("-s", "--server", default="0.0.0.0")
def main(port, server):
if not port:
port = 8888
http_server = http.server.HTTPServer((server, port), PostHandler)
print('Starting server on {0}:{1}, use <Ctrl-C> to stop'.format(
server, port))
http_server.serve_forever()
class PostHandler(http.server.BaseHTTPRequestHandler):
cwd = pathlib.Path(".")
def do_GET(self):
body_file_cat = string.Template("$content")
body_dir_list = string.Template("""
<h1>Directory listing for $cwd</h1>
<ul>
$items
</ul>
""")
page = string.Template("""<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<title>Directory listing for $cwd</title>
</head>
<body>
$body
</body>
</html>
""")
path = urllib.parse.urlparse(self.path)
fs_path = pathlib.Path("{}{}".format(self.cwd, path.path))
prefix_ref = "{}/".format(path.path)
if fs_path.is_file():
body = body_file_cat
content = ""
with fs_path.open() as f:
content = "".join(f.readlines())
content = "<pre>{}</pre>".format(content)
body = body.substitute(content=content)
else:
body = body_dir_list
items = list()
item_template = string.Template('<li><a href="$item_path">$item_name</a></li>')
for p in fs_path.iterdir():
item_path = urllib.parse.urljoin(prefix_ref, p.name)
item_name = p.name
if os.path.isdir(p):
item_name = "{}/".format(item_name)
items.append(item_template.substitute(item_path=item_path, item_name=item_name))
body = body.substitute(cwd=fs_path, items="\n".join(items))
page = page.substitute(cwd=fs_path, body=body)
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
self.wfile.write(page.encode("UTF-8"))
if __name__ == '__main__':
main()
|
Add http directory listing with content display#!/bin/env python3
import http.server
import string
import click
import pathlib
import urllib.parse
import os
@click.command()
@click.argument("port", required=False)
@click.option("-s", "--server", default="0.0.0.0")
def main(port, server):
if not port:
port = 8888
http_server = http.server.HTTPServer((server, port), PostHandler)
print('Starting server on {0}:{1}, use <Ctrl-C> to stop'.format(
server, port))
http_server.serve_forever()
class PostHandler(http.server.BaseHTTPRequestHandler):
cwd = pathlib.Path(".")
def do_GET(self):
body_file_cat = string.Template("$content")
body_dir_list = string.Template("""
<h1>Directory listing for $cwd</h1>
<ul>
$items
</ul>
""")
page = string.Template("""<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<title>Directory listing for $cwd</title>
</head>
<body>
$body
</body>
</html>
""")
path = urllib.parse.urlparse(self.path)
fs_path = pathlib.Path("{}{}".format(self.cwd, path.path))
prefix_ref = "{}/".format(path.path)
if fs_path.is_file():
body = body_file_cat
content = ""
with fs_path.open() as f:
content = "".join(f.readlines())
content = "<pre>{}</pre>".format(content)
body = body.substitute(content=content)
else:
body = body_dir_list
items = list()
item_template = string.Template('<li><a href="$item_path">$item_name</a></li>')
for p in fs_path.iterdir():
item_path = urllib.parse.urljoin(prefix_ref, p.name)
item_name = p.name
if os.path.isdir(p):
item_name = "{}/".format(item_name)
items.append(item_template.substitute(item_path=item_path, item_name=item_name))
body = body.substitute(cwd=fs_path, items="\n".join(items))
page = page.substitute(cwd=fs_path, body=body)
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
self.wfile.write(page.encode("UTF-8"))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add http directory listing with content display<commit_after>#!/bin/env python3
import http.server
import string
import click
import pathlib
import urllib.parse
import os
@click.command()
@click.argument("port", required=False)
@click.option("-s", "--server", default="0.0.0.0")
def main(port, server):
if not port:
port = 8888
http_server = http.server.HTTPServer((server, port), PostHandler)
print('Starting server on {0}:{1}, use <Ctrl-C> to stop'.format(
server, port))
http_server.serve_forever()
class PostHandler(http.server.BaseHTTPRequestHandler):
cwd = pathlib.Path(".")
def do_GET(self):
body_file_cat = string.Template("$content")
body_dir_list = string.Template("""
<h1>Directory listing for $cwd</h1>
<ul>
$items
</ul>
""")
page = string.Template("""<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<title>Directory listing for $cwd</title>
</head>
<body>
$body
</body>
</html>
""")
path = urllib.parse.urlparse(self.path)
fs_path = pathlib.Path("{}{}".format(self.cwd, path.path))
prefix_ref = "{}/".format(path.path)
if fs_path.is_file():
body = body_file_cat
content = ""
with fs_path.open() as f:
content = "".join(f.readlines())
content = "<pre>{}</pre>".format(content)
body = body.substitute(content=content)
else:
body = body_dir_list
items = list()
item_template = string.Template('<li><a href="$item_path">$item_name</a></li>')
for p in fs_path.iterdir():
item_path = urllib.parse.urljoin(prefix_ref, p.name)
item_name = p.name
if os.path.isdir(p):
item_name = "{}/".format(item_name)
items.append(item_template.substitute(item_path=item_path, item_name=item_name))
body = body.substitute(cwd=fs_path, items="\n".join(items))
page = page.substitute(cwd=fs_path, body=body)
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
self.wfile.write(page.encode("UTF-8"))
if __name__ == '__main__':
main()
|
|
6dfc5a3d7845633570b83aac06c47756292cf8ac
|
st2common/tests/unit/test_db_model_uids.py
|
st2common/tests/unit/test_db_model_uids.py
|
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
from st2common.models.db.pack import PackDB
from st2common.models.db.sensor import SensorTypeDB
from st2common.models.db.action import ActionDB
from st2common.models.db.rule import RuleDB
from st2common.models.db.trigger import TriggerTypeDB
from st2common.models.db.trigger import TriggerDB
__all__ = [
'DBModelUIDFieldTestCase'
]
class DBModelUIDFieldTestCase(unittest2.TestCase):
def test_get_uid(self):
pack_db = PackDB(ref='ma_pack')
self.assertEqual(pack_db.get_uid(), 'pack:ma_pack')
sensor_type_db = SensorTypeDB(name='sname', pack='spack')
self.assertEqual(sensor_type_db.get_uid(), 'sensor_type:spack:sname')
action_db = ActionDB(name='aname', pack='apack', runner_info={})
self.assertEqual(action_db.get_uid(), 'action:apack:aname')
rule_db = RuleDB(name='rname', pack='rpack')
self.assertEqual(rule_db.get_uid(), 'rule:rpack:rname')
trigger_type_db = TriggerTypeDB(name='ttname', pack='ttpack')
self.assertEqual(trigger_type_db.get_uid(), 'trigger_type:ttpack:ttname')
trigger_db = TriggerDB(name='tname', pack='tpack')
self.assertTrue(trigger_db.get_uid().startswith('trigger:tpack:tname:'))
|
Add tests for get_uid() method for common DB models.
|
Add tests for get_uid() method for common DB models.
|
Python
|
apache-2.0
|
dennybaa/st2,StackStorm/st2,pixelrebel/st2,Itxaka/st2,Plexxi/st2,pixelrebel/st2,nzlosh/st2,punalpatel/st2,nzlosh/st2,Itxaka/st2,emedvedev/st2,dennybaa/st2,tonybaloney/st2,Plexxi/st2,punalpatel/st2,peak6/st2,dennybaa/st2,StackStorm/st2,tonybaloney/st2,peak6/st2,StackStorm/st2,StackStorm/st2,armab/st2,alfasin/st2,nzlosh/st2,emedvedev/st2,Itxaka/st2,punalpatel/st2,armab/st2,pixelrebel/st2,alfasin/st2,lakshmi-kannan/st2,tonybaloney/st2,lakshmi-kannan/st2,emedvedev/st2,armab/st2,peak6/st2,nzlosh/st2,Plexxi/st2,lakshmi-kannan/st2,alfasin/st2,Plexxi/st2
|
Add tests for get_uid() method for common DB models.
|
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
from st2common.models.db.pack import PackDB
from st2common.models.db.sensor import SensorTypeDB
from st2common.models.db.action import ActionDB
from st2common.models.db.rule import RuleDB
from st2common.models.db.trigger import TriggerTypeDB
from st2common.models.db.trigger import TriggerDB
__all__ = [
'DBModelUIDFieldTestCase'
]
class DBModelUIDFieldTestCase(unittest2.TestCase):
def test_get_uid(self):
pack_db = PackDB(ref='ma_pack')
self.assertEqual(pack_db.get_uid(), 'pack:ma_pack')
sensor_type_db = SensorTypeDB(name='sname', pack='spack')
self.assertEqual(sensor_type_db.get_uid(), 'sensor_type:spack:sname')
action_db = ActionDB(name='aname', pack='apack', runner_info={})
self.assertEqual(action_db.get_uid(), 'action:apack:aname')
rule_db = RuleDB(name='rname', pack='rpack')
self.assertEqual(rule_db.get_uid(), 'rule:rpack:rname')
trigger_type_db = TriggerTypeDB(name='ttname', pack='ttpack')
self.assertEqual(trigger_type_db.get_uid(), 'trigger_type:ttpack:ttname')
trigger_db = TriggerDB(name='tname', pack='tpack')
self.assertTrue(trigger_db.get_uid().startswith('trigger:tpack:tname:'))
|
<commit_before><commit_msg>Add tests for get_uid() method for common DB models.<commit_after>
|
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
from st2common.models.db.pack import PackDB
from st2common.models.db.sensor import SensorTypeDB
from st2common.models.db.action import ActionDB
from st2common.models.db.rule import RuleDB
from st2common.models.db.trigger import TriggerTypeDB
from st2common.models.db.trigger import TriggerDB
__all__ = [
'DBModelUIDFieldTestCase'
]
class DBModelUIDFieldTestCase(unittest2.TestCase):
def test_get_uid(self):
pack_db = PackDB(ref='ma_pack')
self.assertEqual(pack_db.get_uid(), 'pack:ma_pack')
sensor_type_db = SensorTypeDB(name='sname', pack='spack')
self.assertEqual(sensor_type_db.get_uid(), 'sensor_type:spack:sname')
action_db = ActionDB(name='aname', pack='apack', runner_info={})
self.assertEqual(action_db.get_uid(), 'action:apack:aname')
rule_db = RuleDB(name='rname', pack='rpack')
self.assertEqual(rule_db.get_uid(), 'rule:rpack:rname')
trigger_type_db = TriggerTypeDB(name='ttname', pack='ttpack')
self.assertEqual(trigger_type_db.get_uid(), 'trigger_type:ttpack:ttname')
trigger_db = TriggerDB(name='tname', pack='tpack')
self.assertTrue(trigger_db.get_uid().startswith('trigger:tpack:tname:'))
|
Add tests for get_uid() method for common DB models.# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
from st2common.models.db.pack import PackDB
from st2common.models.db.sensor import SensorTypeDB
from st2common.models.db.action import ActionDB
from st2common.models.db.rule import RuleDB
from st2common.models.db.trigger import TriggerTypeDB
from st2common.models.db.trigger import TriggerDB
__all__ = [
'DBModelUIDFieldTestCase'
]
class DBModelUIDFieldTestCase(unittest2.TestCase):
def test_get_uid(self):
pack_db = PackDB(ref='ma_pack')
self.assertEqual(pack_db.get_uid(), 'pack:ma_pack')
sensor_type_db = SensorTypeDB(name='sname', pack='spack')
self.assertEqual(sensor_type_db.get_uid(), 'sensor_type:spack:sname')
action_db = ActionDB(name='aname', pack='apack', runner_info={})
self.assertEqual(action_db.get_uid(), 'action:apack:aname')
rule_db = RuleDB(name='rname', pack='rpack')
self.assertEqual(rule_db.get_uid(), 'rule:rpack:rname')
trigger_type_db = TriggerTypeDB(name='ttname', pack='ttpack')
self.assertEqual(trigger_type_db.get_uid(), 'trigger_type:ttpack:ttname')
trigger_db = TriggerDB(name='tname', pack='tpack')
self.assertTrue(trigger_db.get_uid().startswith('trigger:tpack:tname:'))
|
<commit_before><commit_msg>Add tests for get_uid() method for common DB models.<commit_after># contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
from st2common.models.db.pack import PackDB
from st2common.models.db.sensor import SensorTypeDB
from st2common.models.db.action import ActionDB
from st2common.models.db.rule import RuleDB
from st2common.models.db.trigger import TriggerTypeDB
from st2common.models.db.trigger import TriggerDB
__all__ = [
'DBModelUIDFieldTestCase'
]
class DBModelUIDFieldTestCase(unittest2.TestCase):
def test_get_uid(self):
pack_db = PackDB(ref='ma_pack')
self.assertEqual(pack_db.get_uid(), 'pack:ma_pack')
sensor_type_db = SensorTypeDB(name='sname', pack='spack')
self.assertEqual(sensor_type_db.get_uid(), 'sensor_type:spack:sname')
action_db = ActionDB(name='aname', pack='apack', runner_info={})
self.assertEqual(action_db.get_uid(), 'action:apack:aname')
rule_db = RuleDB(name='rname', pack='rpack')
self.assertEqual(rule_db.get_uid(), 'rule:rpack:rname')
trigger_type_db = TriggerTypeDB(name='ttname', pack='ttpack')
self.assertEqual(trigger_type_db.get_uid(), 'trigger_type:ttpack:ttname')
trigger_db = TriggerDB(name='tname', pack='tpack')
self.assertTrue(trigger_db.get_uid().startswith('trigger:tpack:tname:'))
|
|
8b4bbd23bf37fb946b664f5932e4903f802c6e0d
|
flake8/tests/test_integration.py
|
flake8/tests/test_integration.py
|
from __future__ import with_statement
import os
import unittest
try:
from unittest import mock
except ImportError:
import mock # < PY33
from flake8 import engine
class IntegrationTestCase(unittest.TestCase):
"""Integration style tests to exercise different command line options."""
def this_file(self):
"""Return the real path of this file."""
this_file = os.path.realpath(__file__)
if this_file.endswith("pyc"):
this_file = this_file[:-1]
return this_file
def check_files(self, arglist=[], explicit_stdin=False, count=0):
"""Call check_files."""
if explicit_stdin:
target_file = "-"
else:
target_file = self.this_file()
argv = ['flake8'] + arglist + [target_file]
with mock.patch("sys.argv", argv):
style_guide = engine.get_style_guide(parse_argv=True)
report = style_guide.check_files()
self.assertEqual(report.total_errors, count)
return style_guide, report
def test_no_args(self):
# assert there are no reported errors
self.check_files()
def _job_tester(self, jobs):
# mock stdout.flush so we can count the number of jobs created
with mock.patch('sys.stdout.flush') as mocked:
guide, report = self.check_files(arglist=['--jobs=%s' % jobs])
self.assertEqual(guide.options.jobs, jobs)
self.assertEqual(mocked.call_count, jobs)
def test_jobs(self):
self._job_tester(2)
self._job_tester(10)
def test_stdin(self):
self.count = 0
def fake_stdin():
self.count += 1
with open(self.this_file(), "r") as f:
return f.read()
with mock.patch("pep8.stdin_get_value", fake_stdin):
guide, report = self.check_files(arglist=['--jobs=4'],
explicit_stdin=True)
self.assertEqual(self.count, 1)
def test_stdin_fail(self):
def fake_stdin():
return "notathing\n"
with mock.patch("pep8.stdin_get_value", fake_stdin):
# only assert needed is in check_files
guide, report = self.check_files(arglist=['--jobs=4'],
explicit_stdin=True,
count=1)
|
Add first pass at integration style tests
|
Add first pass at integration style tests
In order to better prevent regressions (such as related to concurrency),
Add a integration test framework to simulate running flake8 with
arguments.
|
Python
|
mit
|
wdv4758h/flake8,lericson/flake8
|
Add first pass at integration style tests
In order to better prevent regressions (such as related to concurrency),
Add a integration test framework to simulate running flake8 with
arguments.
|
from __future__ import with_statement
import os
import unittest
try:
from unittest import mock
except ImportError:
import mock # < PY33
from flake8 import engine
class IntegrationTestCase(unittest.TestCase):
"""Integration style tests to exercise different command line options."""
def this_file(self):
"""Return the real path of this file."""
this_file = os.path.realpath(__file__)
if this_file.endswith("pyc"):
this_file = this_file[:-1]
return this_file
def check_files(self, arglist=[], explicit_stdin=False, count=0):
"""Call check_files."""
if explicit_stdin:
target_file = "-"
else:
target_file = self.this_file()
argv = ['flake8'] + arglist + [target_file]
with mock.patch("sys.argv", argv):
style_guide = engine.get_style_guide(parse_argv=True)
report = style_guide.check_files()
self.assertEqual(report.total_errors, count)
return style_guide, report
def test_no_args(self):
# assert there are no reported errors
self.check_files()
def _job_tester(self, jobs):
# mock stdout.flush so we can count the number of jobs created
with mock.patch('sys.stdout.flush') as mocked:
guide, report = self.check_files(arglist=['--jobs=%s' % jobs])
self.assertEqual(guide.options.jobs, jobs)
self.assertEqual(mocked.call_count, jobs)
def test_jobs(self):
self._job_tester(2)
self._job_tester(10)
def test_stdin(self):
self.count = 0
def fake_stdin():
self.count += 1
with open(self.this_file(), "r") as f:
return f.read()
with mock.patch("pep8.stdin_get_value", fake_stdin):
guide, report = self.check_files(arglist=['--jobs=4'],
explicit_stdin=True)
self.assertEqual(self.count, 1)
def test_stdin_fail(self):
def fake_stdin():
return "notathing\n"
with mock.patch("pep8.stdin_get_value", fake_stdin):
# only assert needed is in check_files
guide, report = self.check_files(arglist=['--jobs=4'],
explicit_stdin=True,
count=1)
|
<commit_before><commit_msg>Add first pass at integration style tests
In order to better prevent regressions (such as related to concurrency),
Add a integration test framework to simulate running flake8 with
arguments.<commit_after>
|
from __future__ import with_statement
import os
import unittest
try:
from unittest import mock
except ImportError:
import mock # < PY33
from flake8 import engine
class IntegrationTestCase(unittest.TestCase):
"""Integration style tests to exercise different command line options."""
def this_file(self):
"""Return the real path of this file."""
this_file = os.path.realpath(__file__)
if this_file.endswith("pyc"):
this_file = this_file[:-1]
return this_file
def check_files(self, arglist=[], explicit_stdin=False, count=0):
"""Call check_files."""
if explicit_stdin:
target_file = "-"
else:
target_file = self.this_file()
argv = ['flake8'] + arglist + [target_file]
with mock.patch("sys.argv", argv):
style_guide = engine.get_style_guide(parse_argv=True)
report = style_guide.check_files()
self.assertEqual(report.total_errors, count)
return style_guide, report
def test_no_args(self):
# assert there are no reported errors
self.check_files()
def _job_tester(self, jobs):
# mock stdout.flush so we can count the number of jobs created
with mock.patch('sys.stdout.flush') as mocked:
guide, report = self.check_files(arglist=['--jobs=%s' % jobs])
self.assertEqual(guide.options.jobs, jobs)
self.assertEqual(mocked.call_count, jobs)
def test_jobs(self):
self._job_tester(2)
self._job_tester(10)
def test_stdin(self):
self.count = 0
def fake_stdin():
self.count += 1
with open(self.this_file(), "r") as f:
return f.read()
with mock.patch("pep8.stdin_get_value", fake_stdin):
guide, report = self.check_files(arglist=['--jobs=4'],
explicit_stdin=True)
self.assertEqual(self.count, 1)
def test_stdin_fail(self):
def fake_stdin():
return "notathing\n"
with mock.patch("pep8.stdin_get_value", fake_stdin):
# only assert needed is in check_files
guide, report = self.check_files(arglist=['--jobs=4'],
explicit_stdin=True,
count=1)
|
Add first pass at integration style tests
In order to better prevent regressions (such as related to concurrency),
Add a integration test framework to simulate running flake8 with
arguments.from __future__ import with_statement
import os
import unittest
try:
from unittest import mock
except ImportError:
import mock # < PY33
from flake8 import engine
class IntegrationTestCase(unittest.TestCase):
"""Integration style tests to exercise different command line options."""
def this_file(self):
"""Return the real path of this file."""
this_file = os.path.realpath(__file__)
if this_file.endswith("pyc"):
this_file = this_file[:-1]
return this_file
def check_files(self, arglist=[], explicit_stdin=False, count=0):
"""Call check_files."""
if explicit_stdin:
target_file = "-"
else:
target_file = self.this_file()
argv = ['flake8'] + arglist + [target_file]
with mock.patch("sys.argv", argv):
style_guide = engine.get_style_guide(parse_argv=True)
report = style_guide.check_files()
self.assertEqual(report.total_errors, count)
return style_guide, report
def test_no_args(self):
# assert there are no reported errors
self.check_files()
def _job_tester(self, jobs):
# mock stdout.flush so we can count the number of jobs created
with mock.patch('sys.stdout.flush') as mocked:
guide, report = self.check_files(arglist=['--jobs=%s' % jobs])
self.assertEqual(guide.options.jobs, jobs)
self.assertEqual(mocked.call_count, jobs)
def test_jobs(self):
self._job_tester(2)
self._job_tester(10)
def test_stdin(self):
self.count = 0
def fake_stdin():
self.count += 1
with open(self.this_file(), "r") as f:
return f.read()
with mock.patch("pep8.stdin_get_value", fake_stdin):
guide, report = self.check_files(arglist=['--jobs=4'],
explicit_stdin=True)
self.assertEqual(self.count, 1)
def test_stdin_fail(self):
def fake_stdin():
return "notathing\n"
with mock.patch("pep8.stdin_get_value", fake_stdin):
# only assert needed is in check_files
guide, report = self.check_files(arglist=['--jobs=4'],
explicit_stdin=True,
count=1)
|
<commit_before><commit_msg>Add first pass at integration style tests
In order to better prevent regressions (such as related to concurrency),
Add a integration test framework to simulate running flake8 with
arguments.<commit_after>from __future__ import with_statement
import os
import unittest
try:
from unittest import mock
except ImportError:
import mock # < PY33
from flake8 import engine
class IntegrationTestCase(unittest.TestCase):
"""Integration style tests to exercise different command line options."""
def this_file(self):
"""Return the real path of this file."""
this_file = os.path.realpath(__file__)
if this_file.endswith("pyc"):
this_file = this_file[:-1]
return this_file
def check_files(self, arglist=[], explicit_stdin=False, count=0):
"""Call check_files."""
if explicit_stdin:
target_file = "-"
else:
target_file = self.this_file()
argv = ['flake8'] + arglist + [target_file]
with mock.patch("sys.argv", argv):
style_guide = engine.get_style_guide(parse_argv=True)
report = style_guide.check_files()
self.assertEqual(report.total_errors, count)
return style_guide, report
def test_no_args(self):
# assert there are no reported errors
self.check_files()
def _job_tester(self, jobs):
# mock stdout.flush so we can count the number of jobs created
with mock.patch('sys.stdout.flush') as mocked:
guide, report = self.check_files(arglist=['--jobs=%s' % jobs])
self.assertEqual(guide.options.jobs, jobs)
self.assertEqual(mocked.call_count, jobs)
def test_jobs(self):
self._job_tester(2)
self._job_tester(10)
def test_stdin(self):
self.count = 0
def fake_stdin():
self.count += 1
with open(self.this_file(), "r") as f:
return f.read()
with mock.patch("pep8.stdin_get_value", fake_stdin):
guide, report = self.check_files(arglist=['--jobs=4'],
explicit_stdin=True)
self.assertEqual(self.count, 1)
def test_stdin_fail(self):
def fake_stdin():
return "notathing\n"
with mock.patch("pep8.stdin_get_value", fake_stdin):
# only assert needed is in check_files
guide, report = self.check_files(arglist=['--jobs=4'],
explicit_stdin=True,
count=1)
|
|
77af87198d1116b77df431d9139b30f76103dd64
|
fellowms/migrations/0023_auto_20160617_1350.py
|
fellowms/migrations/0023_auto_20160617_1350.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-06-17 13:50
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('fellowms', '0022_event_report_url'),
]
operations = [
migrations.AddField(
model_name='event',
name='lat',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='event',
name='lon',
field=models.FloatField(blank=True, null=True),
),
]
|
Add migration for latitute and longitude of event
|
Add migration for latitute and longitude of event
|
Python
|
bsd-3-clause
|
softwaresaved/fat,softwaresaved/fat,softwaresaved/fat,softwaresaved/fat
|
Add migration for latitute and longitude of event
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-06-17 13:50
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('fellowms', '0022_event_report_url'),
]
operations = [
migrations.AddField(
model_name='event',
name='lat',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='event',
name='lon',
field=models.FloatField(blank=True, null=True),
),
]
|
<commit_before><commit_msg>Add migration for latitute and longitude of event<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-06-17 13:50
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('fellowms', '0022_event_report_url'),
]
operations = [
migrations.AddField(
model_name='event',
name='lat',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='event',
name='lon',
field=models.FloatField(blank=True, null=True),
),
]
|
Add migration for latitute and longitude of event# -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-06-17 13:50
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('fellowms', '0022_event_report_url'),
]
operations = [
migrations.AddField(
model_name='event',
name='lat',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='event',
name='lon',
field=models.FloatField(blank=True, null=True),
),
]
|
<commit_before><commit_msg>Add migration for latitute and longitude of event<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-06-17 13:50
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('fellowms', '0022_event_report_url'),
]
operations = [
migrations.AddField(
model_name='event',
name='lat',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='event',
name='lon',
field=models.FloatField(blank=True, null=True),
),
]
|
|
b920f5aeecf7843fcc699db4a70a9a0f124fa198
|
tests/test_protonate.py
|
tests/test_protonate.py
|
import propka.atom
import propka.protonate
def test_protonate_atom():
atom = propka.atom.Atom(
"HETATM 4479 V VO4 A1578 -19.097 16.967 0.500 1.00 17.21 V "
)
assert not atom.is_protonated
p = propka.protonate.Protonate()
p.protonate_atom(atom)
assert atom.is_protonated
assert atom.number_of_protons_to_add == 6
|
Add unit test for protonate.py
|
Add unit test for protonate.py
|
Python
|
lgpl-2.1
|
jensengroup/propka
|
Add unit test for protonate.py
|
import propka.atom
import propka.protonate
def test_protonate_atom():
atom = propka.atom.Atom(
"HETATM 4479 V VO4 A1578 -19.097 16.967 0.500 1.00 17.21 V "
)
assert not atom.is_protonated
p = propka.protonate.Protonate()
p.protonate_atom(atom)
assert atom.is_protonated
assert atom.number_of_protons_to_add == 6
|
<commit_before><commit_msg>Add unit test for protonate.py<commit_after>
|
import propka.atom
import propka.protonate
def test_protonate_atom():
atom = propka.atom.Atom(
"HETATM 4479 V VO4 A1578 -19.097 16.967 0.500 1.00 17.21 V "
)
assert not atom.is_protonated
p = propka.protonate.Protonate()
p.protonate_atom(atom)
assert atom.is_protonated
assert atom.number_of_protons_to_add == 6
|
Add unit test for protonate.pyimport propka.atom
import propka.protonate
def test_protonate_atom():
atom = propka.atom.Atom(
"HETATM 4479 V VO4 A1578 -19.097 16.967 0.500 1.00 17.21 V "
)
assert not atom.is_protonated
p = propka.protonate.Protonate()
p.protonate_atom(atom)
assert atom.is_protonated
assert atom.number_of_protons_to_add == 6
|
<commit_before><commit_msg>Add unit test for protonate.py<commit_after>import propka.atom
import propka.protonate
def test_protonate_atom():
atom = propka.atom.Atom(
"HETATM 4479 V VO4 A1578 -19.097 16.967 0.500 1.00 17.21 V "
)
assert not atom.is_protonated
p = propka.protonate.Protonate()
p.protonate_atom(atom)
assert atom.is_protonated
assert atom.number_of_protons_to_add == 6
|
|
2bf763e39e91ef989c121bba420e4ae09ea0a569
|
algorithms/diagonal_difference/kevin.py
|
algorithms/diagonal_difference/kevin.py
|
#!/usr/bin/env python
def get_matrix_row_from_input():
return [int(index) for index in input().strip().split(' ')]
n = int(input().strip())
primary_diag_sum = 0
secondary_diag_sum = 0
for row_count in range(n):
row = get_matrix_row_from_input()
primary_diag_sum += row[row_count]
secondary_diag_sum += row[-1 - row_count]
print(abs(primary_diag_sum - secondary_diag_sum))
|
Add Diagonal Difference HackerRank Problem
|
Add Diagonal Difference HackerRank Problem
* https://www.hackerrank.com/challenges/diagonal-difference
|
Python
|
mit
|
PlattsSEC/HackerRank,PlattsSEC/HackerRank,PlattsSEC/HackerRank,PlattsSEC/HackerRank,PlattsSEC/HackerRank,PlattsSEC/HackerRank
|
Add Diagonal Difference HackerRank Problem
* https://www.hackerrank.com/challenges/diagonal-difference
|
#!/usr/bin/env python
def get_matrix_row_from_input():
return [int(index) for index in input().strip().split(' ')]
n = int(input().strip())
primary_diag_sum = 0
secondary_diag_sum = 0
for row_count in range(n):
row = get_matrix_row_from_input()
primary_diag_sum += row[row_count]
secondary_diag_sum += row[-1 - row_count]
print(abs(primary_diag_sum - secondary_diag_sum))
|
<commit_before><commit_msg>Add Diagonal Difference HackerRank Problem
* https://www.hackerrank.com/challenges/diagonal-difference<commit_after>
|
#!/usr/bin/env python
def get_matrix_row_from_input():
return [int(index) for index in input().strip().split(' ')]
n = int(input().strip())
primary_diag_sum = 0
secondary_diag_sum = 0
for row_count in range(n):
row = get_matrix_row_from_input()
primary_diag_sum += row[row_count]
secondary_diag_sum += row[-1 - row_count]
print(abs(primary_diag_sum - secondary_diag_sum))
|
Add Diagonal Difference HackerRank Problem
* https://www.hackerrank.com/challenges/diagonal-difference#!/usr/bin/env python
def get_matrix_row_from_input():
return [int(index) for index in input().strip().split(' ')]
n = int(input().strip())
primary_diag_sum = 0
secondary_diag_sum = 0
for row_count in range(n):
row = get_matrix_row_from_input()
primary_diag_sum += row[row_count]
secondary_diag_sum += row[-1 - row_count]
print(abs(primary_diag_sum - secondary_diag_sum))
|
<commit_before><commit_msg>Add Diagonal Difference HackerRank Problem
* https://www.hackerrank.com/challenges/diagonal-difference<commit_after>#!/usr/bin/env python
def get_matrix_row_from_input():
return [int(index) for index in input().strip().split(' ')]
n = int(input().strip())
primary_diag_sum = 0
secondary_diag_sum = 0
for row_count in range(n):
row = get_matrix_row_from_input()
primary_diag_sum += row[row_count]
secondary_diag_sum += row[-1 - row_count]
print(abs(primary_diag_sum - secondary_diag_sum))
|
|
9e6a016c5a59b25199426f6825b2c83571997e68
|
build/android/buildbot/tests/bb_run_bot_test.py
|
build/android/buildbot/tests/bb_run_bot_test.py
|
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
import sys
BUILDBOT_DIR = os.path.join(os.path.dirname(__file__), '..')
sys.path.append(BUILDBOT_DIR)
import bb_run_bot
def RunBotsWithTesting(bot_step_map):
code = 0
procs = [
(bot, subprocess.Popen(
[os.path.join(BUILDBOT_DIR, 'bb_run_bot.py'), '--bot-id', bot,
'--testing'], stdout=subprocess.PIPE, stderr=subprocess.PIPE))
for bot in bot_step_map]
for bot, proc in procs:
_, err = proc.communicate()
code |= proc.returncode
if proc.returncode != 0:
print 'Error running bb_run_bot with id="%s"' % bot, err
return code
def main():
return RunBotsWithTesting(bb_run_bot.GetBotStepMap())
if __name__ == '__main__':
sys.exit(main())
|
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
import sys
BUILDBOT_DIR = os.path.join(os.path.dirname(__file__), '..')
sys.path.append(BUILDBOT_DIR)
import bb_run_bot
def RunBotProcesses(bot_process_map):
code = 0
for bot, proc in bot_process_map:
_, err = proc.communicate()
code |= proc.returncode
if proc.returncode != 0:
print 'Error running the bot script with id="%s"' % bot, err
return code
def main():
procs = [
(bot, subprocess.Popen(
[os.path.join(BUILDBOT_DIR, 'bb_run_bot.py'), '--bot-id', bot,
'--testing'], stdout=subprocess.PIPE, stderr=subprocess.PIPE))
for bot in bb_run_bot.GetBotStepMap()]
return RunBotProcesses(procs)
if __name__ == '__main__':
sys.exit(main())
|
Refactor buildbot tests so that they can be used downstream.
|
[Android] Refactor buildbot tests so that they can be used downstream.
I refactored in the wrong way in r211209 (https://chromiumcodereview.appspot.com/18325030/). This CL fixes that. Note that r211209 is not broken; it is just not usable downstream.
BUG=249997
NOTRY=True
Review URL: https://chromiumcodereview.appspot.com/18202005
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@211454 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
ondra-novak/chromium.src,hgl888/chromium-crosswalk,jaruba/chromium.src,dednal/chromium.src,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,Pluto-tv/chromium-crosswalk,fujunwei/chromium-crosswalk,markYoungH/chromium.src,krieger-od/nwjs_chromium.src,dushu1203/chromium.src,jaruba/chromium.src,ltilve/chromium,ChromiumWebApps/chromium,dushu1203/chromium.src,fujunwei/chromium-crosswalk,krieger-od/nwjs_chromium.src,krieger-od/nwjs_chromium.src,chuan9/chromium-crosswalk,Just-D/chromium-1,anirudhSK/chromium,hgl888/chromium-crosswalk,dushu1203/chromium.src,M4sse/chromium.src,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,chuan9/chromium-crosswalk,Chilledheart/chromium,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk,ltilve/chromium,fujunwei/chromium-crosswalk,chuan9/chromium-crosswalk,patrickm/chromium.src,axinging/chromium-crosswalk,Fireblend/chromium-crosswalk,fujunwei/chromium-crosswalk,markYoungH/chromium.src,dednal/chromium.src,patrickm/chromium.src,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk-efl,dednal/chromium.src,ChromiumWebApps/chromium,Chilledheart/chromium,mohamed--abdel-maksoud/chromium.src,patrickm/chromium.src,mogoweb/chromium-crosswalk,jaruba/chromium.src,hgl888/chromium-crosswalk-efl,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,bright-sparks/chromium-spacewalk,jaruba/chromium.src,mogoweb/chromium-crosswalk,patrickm/chromium.src,anirudhSK/chromium,M4sse/chromium.src,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,anirudhSK/chromium,ltilve/chromium,markYoungH/chromium.src,PeterWangIntel/chromium-crosswalk,anirudhSK/chromium,ltilve/chromium,Chilledheart/chromium,dednal/chromium.src,TheTypoMaster/chromium-crosswalk,ltilve/chromium,markYoungH/chromium.src,axinging/chromium-crosswalk,ChromiumWebApps/chromium,ltilve/chromium,Chilledheart/chromium,Jonekee/chromium.src,axinging/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Jonekee/chromium.src,TheTypoMaster/chromium-crosswalk,dednal/chromium.src,patrickm/chromium.src,ondra-novak/chromium.src,bright-sparks/chromium-spacewalk,PeterWangIntel/chromium-crosswalk,anirudhSK/chromium,Pluto-tv/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,axinging/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,littlstar/chromium.src,ChromiumWebApps/chromium,dushu1203/chromium.src,M4sse/chromium.src,ondra-novak/chromium.src,mogoweb/chromium-crosswalk,Pluto-tv/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,Just-D/chromium-1,Fireblend/chromium-crosswalk,markYoungH/chromium.src,markYoungH/chromium.src,Fireblend/chromium-crosswalk,Pluto-tv/chromium-crosswalk,M4sse/chromium.src,TheTypoMaster/chromium-crosswalk,fujunwei/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk-efl,M4sse/chromium.src,bright-sparks/chromium-spacewalk,fujunwei/chromium-crosswalk,patrickm/chromium.src,krieger-od/nwjs_chromium.src,mogoweb/chromium-crosswalk,axinging/chromium-crosswalk,hgl888/chromium-crosswalk-efl,anirudhSK/chromium,Chilledheart/chromium,chuan9/chromium-crosswalk,Pluto-tv/chromium-crosswalk,anirudhSK/chromium,jaruba/chromium.src,ondra-novak/chromium.src,dednal/chromium.src,crosswalk-project/chromium-crosswalk-efl,ltilve/chromium,jaruba/chromium.src,bright-sparks/chromium-spacewalk,Fireblend/chromium-crosswalk,Just-D/chromium-1,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,M4sse/chromium.src,jaruba/chromium.src,dednal/chromium.src,bright-sparks/chromium-spacewalk,hgl888/chromium-crosswalk-efl,Chilledheart/chromium,littlstar/chromium.src,crosswalk-project/chromium-crosswalk-efl,hgl888/chromium-crosswalk,M4sse/chromium.src,ondra-novak/chromium.src,Chilledheart/chromium,littlstar/chromium.src,dednal/chromium.src,krieger-od/nwjs_chromium.src,mogoweb/chromium-crosswalk,krieger-od/nwjs_chromium.src,ondra-novak/chromium.src,mogoweb/chromium-crosswalk,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,dushu1203/chromium.src,mohamed--abdel-maksoud/chromium.src,crosswalk-project/chromium-crosswalk-efl,TheTypoMaster/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Just-D/chromium-1,mohamed--abdel-maksoud/chromium.src,Jonekee/chromium.src,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,jaruba/chromium.src,axinging/chromium-crosswalk,bright-sparks/chromium-spacewalk,ondra-novak/chromium.src,Fireblend/chromium-crosswalk,bright-sparks/chromium-spacewalk,dednal/chromium.src,anirudhSK/chromium,crosswalk-project/chromium-crosswalk-efl,ltilve/chromium,littlstar/chromium.src,markYoungH/chromium.src,mogoweb/chromium-crosswalk,jaruba/chromium.src,M4sse/chromium.src,patrickm/chromium.src,mohamed--abdel-maksoud/chromium.src,anirudhSK/chromium,ondra-novak/chromium.src,ltilve/chromium,dushu1203/chromium.src,Just-D/chromium-1,markYoungH/chromium.src,jaruba/chromium.src,anirudhSK/chromium,Jonekee/chromium.src,Jonekee/chromium.src,bright-sparks/chromium-spacewalk,mogoweb/chromium-crosswalk,chuan9/chromium-crosswalk,M4sse/chromium.src,hgl888/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,axinging/chromium-crosswalk,Jonekee/chromium.src,ChromiumWebApps/chromium,patrickm/chromium.src,Chilledheart/chromium,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,Jonekee/chromium.src,hgl888/chromium-crosswalk,Jonekee/chromium.src,chuan9/chromium-crosswalk,littlstar/chromium.src,Just-D/chromium-1,ChromiumWebApps/chromium,chuan9/chromium-crosswalk,markYoungH/chromium.src,mohamed--abdel-maksoud/chromium.src,mogoweb/chromium-crosswalk,littlstar/chromium.src,TheTypoMaster/chromium-crosswalk,dushu1203/chromium.src,fujunwei/chromium-crosswalk,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk-efl,dushu1203/chromium.src,ChromiumWebApps/chromium,ChromiumWebApps/chromium,markYoungH/chromium.src,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,littlstar/chromium.src,jaruba/chromium.src,axinging/chromium-crosswalk,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,Jonekee/chromium.src,hgl888/chromium-crosswalk,Chilledheart/chromium,M4sse/chromium.src,krieger-od/nwjs_chromium.src,ondra-novak/chromium.src,hgl888/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,mogoweb/chromium-crosswalk,M4sse/chromium.src,ChromiumWebApps/chromium,dednal/chromium.src,crosswalk-project/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,anirudhSK/chromium,dushu1203/chromium.src,fujunwei/chromium-crosswalk,krieger-od/nwjs_chromium.src,Jonekee/chromium.src,Just-D/chromium-1,hgl888/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,ChromiumWebApps/chromium,patrickm/chromium.src,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,Just-D/chromium-1,PeterWangIntel/chromium-crosswalk,littlstar/chromium.src,ChromiumWebApps/chromium,dednal/chromium.src,ChromiumWebApps/chromium,Fireblend/chromium-crosswalk,anirudhSK/chromium
|
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
import sys
BUILDBOT_DIR = os.path.join(os.path.dirname(__file__), '..')
sys.path.append(BUILDBOT_DIR)
import bb_run_bot
def RunBotsWithTesting(bot_step_map):
code = 0
procs = [
(bot, subprocess.Popen(
[os.path.join(BUILDBOT_DIR, 'bb_run_bot.py'), '--bot-id', bot,
'--testing'], stdout=subprocess.PIPE, stderr=subprocess.PIPE))
for bot in bot_step_map]
for bot, proc in procs:
_, err = proc.communicate()
code |= proc.returncode
if proc.returncode != 0:
print 'Error running bb_run_bot with id="%s"' % bot, err
return code
def main():
return RunBotsWithTesting(bb_run_bot.GetBotStepMap())
if __name__ == '__main__':
sys.exit(main())
[Android] Refactor buildbot tests so that they can be used downstream.
I refactored in the wrong way in r211209 (https://chromiumcodereview.appspot.com/18325030/). This CL fixes that. Note that r211209 is not broken; it is just not usable downstream.
BUG=249997
NOTRY=True
Review URL: https://chromiumcodereview.appspot.com/18202005
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@211454 0039d316-1c4b-4281-b951-d872f2087c98
|
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
import sys
BUILDBOT_DIR = os.path.join(os.path.dirname(__file__), '..')
sys.path.append(BUILDBOT_DIR)
import bb_run_bot
def RunBotProcesses(bot_process_map):
code = 0
for bot, proc in bot_process_map:
_, err = proc.communicate()
code |= proc.returncode
if proc.returncode != 0:
print 'Error running the bot script with id="%s"' % bot, err
return code
def main():
procs = [
(bot, subprocess.Popen(
[os.path.join(BUILDBOT_DIR, 'bb_run_bot.py'), '--bot-id', bot,
'--testing'], stdout=subprocess.PIPE, stderr=subprocess.PIPE))
for bot in bb_run_bot.GetBotStepMap()]
return RunBotProcesses(procs)
if __name__ == '__main__':
sys.exit(main())
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
import sys
BUILDBOT_DIR = os.path.join(os.path.dirname(__file__), '..')
sys.path.append(BUILDBOT_DIR)
import bb_run_bot
def RunBotsWithTesting(bot_step_map):
code = 0
procs = [
(bot, subprocess.Popen(
[os.path.join(BUILDBOT_DIR, 'bb_run_bot.py'), '--bot-id', bot,
'--testing'], stdout=subprocess.PIPE, stderr=subprocess.PIPE))
for bot in bot_step_map]
for bot, proc in procs:
_, err = proc.communicate()
code |= proc.returncode
if proc.returncode != 0:
print 'Error running bb_run_bot with id="%s"' % bot, err
return code
def main():
return RunBotsWithTesting(bb_run_bot.GetBotStepMap())
if __name__ == '__main__':
sys.exit(main())
<commit_msg>[Android] Refactor buildbot tests so that they can be used downstream.
I refactored in the wrong way in r211209 (https://chromiumcodereview.appspot.com/18325030/). This CL fixes that. Note that r211209 is not broken; it is just not usable downstream.
BUG=249997
NOTRY=True
Review URL: https://chromiumcodereview.appspot.com/18202005
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@211454 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
import sys
BUILDBOT_DIR = os.path.join(os.path.dirname(__file__), '..')
sys.path.append(BUILDBOT_DIR)
import bb_run_bot
def RunBotProcesses(bot_process_map):
code = 0
for bot, proc in bot_process_map:
_, err = proc.communicate()
code |= proc.returncode
if proc.returncode != 0:
print 'Error running the bot script with id="%s"' % bot, err
return code
def main():
procs = [
(bot, subprocess.Popen(
[os.path.join(BUILDBOT_DIR, 'bb_run_bot.py'), '--bot-id', bot,
'--testing'], stdout=subprocess.PIPE, stderr=subprocess.PIPE))
for bot in bb_run_bot.GetBotStepMap()]
return RunBotProcesses(procs)
if __name__ == '__main__':
sys.exit(main())
|
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
import sys
BUILDBOT_DIR = os.path.join(os.path.dirname(__file__), '..')
sys.path.append(BUILDBOT_DIR)
import bb_run_bot
def RunBotsWithTesting(bot_step_map):
code = 0
procs = [
(bot, subprocess.Popen(
[os.path.join(BUILDBOT_DIR, 'bb_run_bot.py'), '--bot-id', bot,
'--testing'], stdout=subprocess.PIPE, stderr=subprocess.PIPE))
for bot in bot_step_map]
for bot, proc in procs:
_, err = proc.communicate()
code |= proc.returncode
if proc.returncode != 0:
print 'Error running bb_run_bot with id="%s"' % bot, err
return code
def main():
return RunBotsWithTesting(bb_run_bot.GetBotStepMap())
if __name__ == '__main__':
sys.exit(main())
[Android] Refactor buildbot tests so that they can be used downstream.
I refactored in the wrong way in r211209 (https://chromiumcodereview.appspot.com/18325030/). This CL fixes that. Note that r211209 is not broken; it is just not usable downstream.
BUG=249997
NOTRY=True
Review URL: https://chromiumcodereview.appspot.com/18202005
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@211454 0039d316-1c4b-4281-b951-d872f2087c98#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
import sys
BUILDBOT_DIR = os.path.join(os.path.dirname(__file__), '..')
sys.path.append(BUILDBOT_DIR)
import bb_run_bot
def RunBotProcesses(bot_process_map):
code = 0
for bot, proc in bot_process_map:
_, err = proc.communicate()
code |= proc.returncode
if proc.returncode != 0:
print 'Error running the bot script with id="%s"' % bot, err
return code
def main():
procs = [
(bot, subprocess.Popen(
[os.path.join(BUILDBOT_DIR, 'bb_run_bot.py'), '--bot-id', bot,
'--testing'], stdout=subprocess.PIPE, stderr=subprocess.PIPE))
for bot in bb_run_bot.GetBotStepMap()]
return RunBotProcesses(procs)
if __name__ == '__main__':
sys.exit(main())
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
import sys
BUILDBOT_DIR = os.path.join(os.path.dirname(__file__), '..')
sys.path.append(BUILDBOT_DIR)
import bb_run_bot
def RunBotsWithTesting(bot_step_map):
code = 0
procs = [
(bot, subprocess.Popen(
[os.path.join(BUILDBOT_DIR, 'bb_run_bot.py'), '--bot-id', bot,
'--testing'], stdout=subprocess.PIPE, stderr=subprocess.PIPE))
for bot in bot_step_map]
for bot, proc in procs:
_, err = proc.communicate()
code |= proc.returncode
if proc.returncode != 0:
print 'Error running bb_run_bot with id="%s"' % bot, err
return code
def main():
return RunBotsWithTesting(bb_run_bot.GetBotStepMap())
if __name__ == '__main__':
sys.exit(main())
<commit_msg>[Android] Refactor buildbot tests so that they can be used downstream.
I refactored in the wrong way in r211209 (https://chromiumcodereview.appspot.com/18325030/). This CL fixes that. Note that r211209 is not broken; it is just not usable downstream.
BUG=249997
NOTRY=True
Review URL: https://chromiumcodereview.appspot.com/18202005
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@211454 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
import sys
BUILDBOT_DIR = os.path.join(os.path.dirname(__file__), '..')
sys.path.append(BUILDBOT_DIR)
import bb_run_bot
def RunBotProcesses(bot_process_map):
code = 0
for bot, proc in bot_process_map:
_, err = proc.communicate()
code |= proc.returncode
if proc.returncode != 0:
print 'Error running the bot script with id="%s"' % bot, err
return code
def main():
procs = [
(bot, subprocess.Popen(
[os.path.join(BUILDBOT_DIR, 'bb_run_bot.py'), '--bot-id', bot,
'--testing'], stdout=subprocess.PIPE, stderr=subprocess.PIPE))
for bot in bb_run_bot.GetBotStepMap()]
return RunBotProcesses(procs)
if __name__ == '__main__':
sys.exit(main())
|
eb9f9d8bfa5ea278e1fb39c59ed660a223b1f6a9
|
api/__init__.py
|
api/__init__.py
|
from flask_sqlalchemy import SQLAlchemy
import connexion
from config import config
db = SQLAlchemy()
def create_app(config_name):
app = connexion.FlaskApp(__name__, specification_dir='swagger/')
app.add_api('swagger.yaml')
application = app.app
application.config.from_object(config[config_name])
db.init_app(application)
return application
from api.api import *
|
Add flask api app creation to init
|
Add flask api app creation to init
|
Python
|
mit
|
EdwinKato/bucket-list,EdwinKato/bucket-list,EdwinKato/bucket-list,EdwinKato/bucket-list,EdwinKato/bucket-list
|
Add flask api app creation to init
|
from flask_sqlalchemy import SQLAlchemy
import connexion
from config import config
db = SQLAlchemy()
def create_app(config_name):
app = connexion.FlaskApp(__name__, specification_dir='swagger/')
app.add_api('swagger.yaml')
application = app.app
application.config.from_object(config[config_name])
db.init_app(application)
return application
from api.api import *
|
<commit_before><commit_msg>Add flask api app creation to init<commit_after>
|
from flask_sqlalchemy import SQLAlchemy
import connexion
from config import config
db = SQLAlchemy()
def create_app(config_name):
app = connexion.FlaskApp(__name__, specification_dir='swagger/')
app.add_api('swagger.yaml')
application = app.app
application.config.from_object(config[config_name])
db.init_app(application)
return application
from api.api import *
|
Add flask api app creation to initfrom flask_sqlalchemy import SQLAlchemy
import connexion
from config import config
db = SQLAlchemy()
def create_app(config_name):
app = connexion.FlaskApp(__name__, specification_dir='swagger/')
app.add_api('swagger.yaml')
application = app.app
application.config.from_object(config[config_name])
db.init_app(application)
return application
from api.api import *
|
<commit_before><commit_msg>Add flask api app creation to init<commit_after>from flask_sqlalchemy import SQLAlchemy
import connexion
from config import config
db = SQLAlchemy()
def create_app(config_name):
app = connexion.FlaskApp(__name__, specification_dir='swagger/')
app.add_api('swagger.yaml')
application = app.app
application.config.from_object(config[config_name])
db.init_app(application)
return application
from api.api import *
|
|
24f21146b01ff75a244df40d1626c54883abeb1a
|
lib/helpers.py
|
lib/helpers.py
|
#! /usr/bin/env python2.7
import datetime
def typecast_json(o):
if isinstance(o, datetime.datetime) or isinstance(o, datetime.date):
return o.isoformat()
else:
return o
def split_dict(src, keys):
result = dict()
for k in set(src.keys()) & set(keys):
result[k] = src[k]
return result
|
Add helper-lib for json object conversion and split dicts
|
Add helper-lib for json object conversion and split dicts
|
Python
|
bsd-3-clause
|
UngaForskareStockholm/medlem2
|
Add helper-lib for json object conversion and split dicts
|
#! /usr/bin/env python2.7
import datetime
def typecast_json(o):
if isinstance(o, datetime.datetime) or isinstance(o, datetime.date):
return o.isoformat()
else:
return o
def split_dict(src, keys):
result = dict()
for k in set(src.keys()) & set(keys):
result[k] = src[k]
return result
|
<commit_before><commit_msg>Add helper-lib for json object conversion and split dicts<commit_after>
|
#! /usr/bin/env python2.7
import datetime
def typecast_json(o):
if isinstance(o, datetime.datetime) or isinstance(o, datetime.date):
return o.isoformat()
else:
return o
def split_dict(src, keys):
result = dict()
for k in set(src.keys()) & set(keys):
result[k] = src[k]
return result
|
Add helper-lib for json object conversion and split dicts#! /usr/bin/env python2.7
import datetime
def typecast_json(o):
if isinstance(o, datetime.datetime) or isinstance(o, datetime.date):
return o.isoformat()
else:
return o
def split_dict(src, keys):
result = dict()
for k in set(src.keys()) & set(keys):
result[k] = src[k]
return result
|
<commit_before><commit_msg>Add helper-lib for json object conversion and split dicts<commit_after>#! /usr/bin/env python2.7
import datetime
def typecast_json(o):
if isinstance(o, datetime.datetime) or isinstance(o, datetime.date):
return o.isoformat()
else:
return o
def split_dict(src, keys):
result = dict()
for k in set(src.keys()) & set(keys):
result[k] = src[k]
return result
|
|
0f5c0168b257436882f837e5d521cce46a740ad6
|
finat/greek_alphabet.py
|
finat/greek_alphabet.py
|
"""Translation table from utf-8 to greek variable names, taken from:
https://gist.github.com/piquadrat/765262#file-greek_alphabet-py
"""
def translate_symbol(symbol):
"""Translates utf-8 sub-strings into compilable variable names"""
name = symbol.decode("utf-8")
for k, v in greek_alphabet.iteritems():
name = name.replace(k, v)
return name
greek_alphabet = {
u'\u0391': 'Alpha',
u'\u0392': 'Beta',
u'\u0393': 'Gamma',
u'\u0394': 'Delta',
u'\u0395': 'Epsilon',
u'\u0396': 'Zeta',
u'\u0397': 'Eta',
u'\u0398': 'Theta',
u'\u0399': 'Iota',
u'\u039A': 'Kappa',
u'\u039B': 'Lamda',
u'\u039C': 'Mu',
u'\u039D': 'Nu',
u'\u039E': 'Xi',
u'\u039F': 'Omicron',
u'\u03A0': 'Pi',
u'\u03A1': 'Rho',
u'\u03A3': 'Sigma',
u'\u03A4': 'Tau',
u'\u03A5': 'Upsilon',
u'\u03A6': 'Phi',
u'\u03A7': 'Chi',
u'\u03A8': 'Psi',
u'\u03A9': 'Omega',
u'\u03B1': 'alpha',
u'\u03B2': 'beta',
u'\u03B3': 'gamma',
u'\u03B4': 'delta',
u'\u03B5': 'epsilon',
u'\u03B6': 'zeta',
u'\u03B7': 'eta',
u'\u03B8': 'theta',
u'\u03B9': 'iota',
u'\u03BA': 'kappa',
u'\u03BB': 'lamda',
u'\u03BC': 'mu',
u'\u03BD': 'nu',
u'\u03BE': 'xi',
u'\u03BF': 'omicron',
u'\u03C0': 'pi',
u'\u03C1': 'rho',
u'\u03C3': 'sigma',
u'\u03C4': 'tau',
u'\u03C5': 'upsilon',
u'\u03C6': 'phi',
u'\u03C7': 'chi',
u'\u03C8': 'psi',
u'\u03C9': 'omega',
}
|
Add symbol translator to make utf-8 variables compilable
|
Coffee: Add symbol translator to make utf-8 variables compilable
|
Python
|
mit
|
FInAT/FInAT
|
Coffee: Add symbol translator to make utf-8 variables compilable
|
"""Translation table from utf-8 to greek variable names, taken from:
https://gist.github.com/piquadrat/765262#file-greek_alphabet-py
"""
def translate_symbol(symbol):
"""Translates utf-8 sub-strings into compilable variable names"""
name = symbol.decode("utf-8")
for k, v in greek_alphabet.iteritems():
name = name.replace(k, v)
return name
greek_alphabet = {
u'\u0391': 'Alpha',
u'\u0392': 'Beta',
u'\u0393': 'Gamma',
u'\u0394': 'Delta',
u'\u0395': 'Epsilon',
u'\u0396': 'Zeta',
u'\u0397': 'Eta',
u'\u0398': 'Theta',
u'\u0399': 'Iota',
u'\u039A': 'Kappa',
u'\u039B': 'Lamda',
u'\u039C': 'Mu',
u'\u039D': 'Nu',
u'\u039E': 'Xi',
u'\u039F': 'Omicron',
u'\u03A0': 'Pi',
u'\u03A1': 'Rho',
u'\u03A3': 'Sigma',
u'\u03A4': 'Tau',
u'\u03A5': 'Upsilon',
u'\u03A6': 'Phi',
u'\u03A7': 'Chi',
u'\u03A8': 'Psi',
u'\u03A9': 'Omega',
u'\u03B1': 'alpha',
u'\u03B2': 'beta',
u'\u03B3': 'gamma',
u'\u03B4': 'delta',
u'\u03B5': 'epsilon',
u'\u03B6': 'zeta',
u'\u03B7': 'eta',
u'\u03B8': 'theta',
u'\u03B9': 'iota',
u'\u03BA': 'kappa',
u'\u03BB': 'lamda',
u'\u03BC': 'mu',
u'\u03BD': 'nu',
u'\u03BE': 'xi',
u'\u03BF': 'omicron',
u'\u03C0': 'pi',
u'\u03C1': 'rho',
u'\u03C3': 'sigma',
u'\u03C4': 'tau',
u'\u03C5': 'upsilon',
u'\u03C6': 'phi',
u'\u03C7': 'chi',
u'\u03C8': 'psi',
u'\u03C9': 'omega',
}
|
<commit_before><commit_msg>Coffee: Add symbol translator to make utf-8 variables compilable<commit_after>
|
"""Translation table from utf-8 to greek variable names, taken from:
https://gist.github.com/piquadrat/765262#file-greek_alphabet-py
"""
def translate_symbol(symbol):
"""Translates utf-8 sub-strings into compilable variable names"""
name = symbol.decode("utf-8")
for k, v in greek_alphabet.iteritems():
name = name.replace(k, v)
return name
greek_alphabet = {
u'\u0391': 'Alpha',
u'\u0392': 'Beta',
u'\u0393': 'Gamma',
u'\u0394': 'Delta',
u'\u0395': 'Epsilon',
u'\u0396': 'Zeta',
u'\u0397': 'Eta',
u'\u0398': 'Theta',
u'\u0399': 'Iota',
u'\u039A': 'Kappa',
u'\u039B': 'Lamda',
u'\u039C': 'Mu',
u'\u039D': 'Nu',
u'\u039E': 'Xi',
u'\u039F': 'Omicron',
u'\u03A0': 'Pi',
u'\u03A1': 'Rho',
u'\u03A3': 'Sigma',
u'\u03A4': 'Tau',
u'\u03A5': 'Upsilon',
u'\u03A6': 'Phi',
u'\u03A7': 'Chi',
u'\u03A8': 'Psi',
u'\u03A9': 'Omega',
u'\u03B1': 'alpha',
u'\u03B2': 'beta',
u'\u03B3': 'gamma',
u'\u03B4': 'delta',
u'\u03B5': 'epsilon',
u'\u03B6': 'zeta',
u'\u03B7': 'eta',
u'\u03B8': 'theta',
u'\u03B9': 'iota',
u'\u03BA': 'kappa',
u'\u03BB': 'lamda',
u'\u03BC': 'mu',
u'\u03BD': 'nu',
u'\u03BE': 'xi',
u'\u03BF': 'omicron',
u'\u03C0': 'pi',
u'\u03C1': 'rho',
u'\u03C3': 'sigma',
u'\u03C4': 'tau',
u'\u03C5': 'upsilon',
u'\u03C6': 'phi',
u'\u03C7': 'chi',
u'\u03C8': 'psi',
u'\u03C9': 'omega',
}
|
Coffee: Add symbol translator to make utf-8 variables compilable"""Translation table from utf-8 to greek variable names, taken from:
https://gist.github.com/piquadrat/765262#file-greek_alphabet-py
"""
def translate_symbol(symbol):
"""Translates utf-8 sub-strings into compilable variable names"""
name = symbol.decode("utf-8")
for k, v in greek_alphabet.iteritems():
name = name.replace(k, v)
return name
greek_alphabet = {
u'\u0391': 'Alpha',
u'\u0392': 'Beta',
u'\u0393': 'Gamma',
u'\u0394': 'Delta',
u'\u0395': 'Epsilon',
u'\u0396': 'Zeta',
u'\u0397': 'Eta',
u'\u0398': 'Theta',
u'\u0399': 'Iota',
u'\u039A': 'Kappa',
u'\u039B': 'Lamda',
u'\u039C': 'Mu',
u'\u039D': 'Nu',
u'\u039E': 'Xi',
u'\u039F': 'Omicron',
u'\u03A0': 'Pi',
u'\u03A1': 'Rho',
u'\u03A3': 'Sigma',
u'\u03A4': 'Tau',
u'\u03A5': 'Upsilon',
u'\u03A6': 'Phi',
u'\u03A7': 'Chi',
u'\u03A8': 'Psi',
u'\u03A9': 'Omega',
u'\u03B1': 'alpha',
u'\u03B2': 'beta',
u'\u03B3': 'gamma',
u'\u03B4': 'delta',
u'\u03B5': 'epsilon',
u'\u03B6': 'zeta',
u'\u03B7': 'eta',
u'\u03B8': 'theta',
u'\u03B9': 'iota',
u'\u03BA': 'kappa',
u'\u03BB': 'lamda',
u'\u03BC': 'mu',
u'\u03BD': 'nu',
u'\u03BE': 'xi',
u'\u03BF': 'omicron',
u'\u03C0': 'pi',
u'\u03C1': 'rho',
u'\u03C3': 'sigma',
u'\u03C4': 'tau',
u'\u03C5': 'upsilon',
u'\u03C6': 'phi',
u'\u03C7': 'chi',
u'\u03C8': 'psi',
u'\u03C9': 'omega',
}
|
<commit_before><commit_msg>Coffee: Add symbol translator to make utf-8 variables compilable<commit_after>"""Translation table from utf-8 to greek variable names, taken from:
https://gist.github.com/piquadrat/765262#file-greek_alphabet-py
"""
def translate_symbol(symbol):
"""Translates utf-8 sub-strings into compilable variable names"""
name = symbol.decode("utf-8")
for k, v in greek_alphabet.iteritems():
name = name.replace(k, v)
return name
greek_alphabet = {
u'\u0391': 'Alpha',
u'\u0392': 'Beta',
u'\u0393': 'Gamma',
u'\u0394': 'Delta',
u'\u0395': 'Epsilon',
u'\u0396': 'Zeta',
u'\u0397': 'Eta',
u'\u0398': 'Theta',
u'\u0399': 'Iota',
u'\u039A': 'Kappa',
u'\u039B': 'Lamda',
u'\u039C': 'Mu',
u'\u039D': 'Nu',
u'\u039E': 'Xi',
u'\u039F': 'Omicron',
u'\u03A0': 'Pi',
u'\u03A1': 'Rho',
u'\u03A3': 'Sigma',
u'\u03A4': 'Tau',
u'\u03A5': 'Upsilon',
u'\u03A6': 'Phi',
u'\u03A7': 'Chi',
u'\u03A8': 'Psi',
u'\u03A9': 'Omega',
u'\u03B1': 'alpha',
u'\u03B2': 'beta',
u'\u03B3': 'gamma',
u'\u03B4': 'delta',
u'\u03B5': 'epsilon',
u'\u03B6': 'zeta',
u'\u03B7': 'eta',
u'\u03B8': 'theta',
u'\u03B9': 'iota',
u'\u03BA': 'kappa',
u'\u03BB': 'lamda',
u'\u03BC': 'mu',
u'\u03BD': 'nu',
u'\u03BE': 'xi',
u'\u03BF': 'omicron',
u'\u03C0': 'pi',
u'\u03C1': 'rho',
u'\u03C3': 'sigma',
u'\u03C4': 'tau',
u'\u03C5': 'upsilon',
u'\u03C6': 'phi',
u'\u03C7': 'chi',
u'\u03C8': 'psi',
u'\u03C9': 'omega',
}
|
|
9e128fdd5af0598a233416de5a1e8f2d3a74fdc0
|
spaces/migrations/0006_unique_space_document.py
|
spaces/migrations/0006_unique_space_document.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2015-12-15 02:12
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('spaces', '0005_document_space_doc'),
]
operations = [
migrations.AlterField(
model_name='space',
name='name',
field=models.CharField(max_length=100, unique=True),
),
migrations.AlterField(
model_name='space',
name='path',
field=models.CharField(max_length=40, unique=True),
),
migrations.AlterUniqueTogether(
name='document',
unique_together=set([('path', 'parent')]),
),
]
|
Enforce unique paths and names
|
Enforce unique paths and names
|
Python
|
mit
|
jgillick/Spaces,jgillick/Spaces,jgillick/Spaces,jgillick/Spaces,jgillick/Spaces,jgillick/Spaces
|
Enforce unique paths and names
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2015-12-15 02:12
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('spaces', '0005_document_space_doc'),
]
operations = [
migrations.AlterField(
model_name='space',
name='name',
field=models.CharField(max_length=100, unique=True),
),
migrations.AlterField(
model_name='space',
name='path',
field=models.CharField(max_length=40, unique=True),
),
migrations.AlterUniqueTogether(
name='document',
unique_together=set([('path', 'parent')]),
),
]
|
<commit_before><commit_msg>Enforce unique paths and names<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2015-12-15 02:12
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('spaces', '0005_document_space_doc'),
]
operations = [
migrations.AlterField(
model_name='space',
name='name',
field=models.CharField(max_length=100, unique=True),
),
migrations.AlterField(
model_name='space',
name='path',
field=models.CharField(max_length=40, unique=True),
),
migrations.AlterUniqueTogether(
name='document',
unique_together=set([('path', 'parent')]),
),
]
|
Enforce unique paths and names# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2015-12-15 02:12
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('spaces', '0005_document_space_doc'),
]
operations = [
migrations.AlterField(
model_name='space',
name='name',
field=models.CharField(max_length=100, unique=True),
),
migrations.AlterField(
model_name='space',
name='path',
field=models.CharField(max_length=40, unique=True),
),
migrations.AlterUniqueTogether(
name='document',
unique_together=set([('path', 'parent')]),
),
]
|
<commit_before><commit_msg>Enforce unique paths and names<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2015-12-15 02:12
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('spaces', '0005_document_space_doc'),
]
operations = [
migrations.AlterField(
model_name='space',
name='name',
field=models.CharField(max_length=100, unique=True),
),
migrations.AlterField(
model_name='space',
name='path',
field=models.CharField(max_length=40, unique=True),
),
migrations.AlterUniqueTogether(
name='document',
unique_together=set([('path', 'parent')]),
),
]
|
|
8249d33898500d9d39e8bee3d44d39c2a6034659
|
scripts/create_overlays.py
|
scripts/create_overlays.py
|
"""Varcan smart tool."""
import click
from dtoolcore import DataSet
@click.command()
@click.argument('dataset_uri')
@click.option('--config-path', type=click.Path(exists=True))
def main(dataset_uri, config_path=None):
dataset = DataSet.from_uri(dataset_uri, config_path=config_path)
def name_from_identifier(identifier):
item_properties = dataset.item_properties(identifier)
name = item_properties['relpath'].rsplit('.', 1)[0]
return name
useful_name_overlay = {
identifier: name_from_identifier(identifier)
for identifier in dataset.identifiers
}
dataset.put_overlay("useful_name", useful_name_overlay)
if __name__ == '__main__':
main()
|
Add script to create overlays
|
Add script to create overlays
|
Python
|
mit
|
JIC-Image-Analysis/senescence-in-field,JIC-Image-Analysis/senescence-in-field,JIC-Image-Analysis/senescence-in-field
|
Add script to create overlays
|
"""Varcan smart tool."""
import click
from dtoolcore import DataSet
@click.command()
@click.argument('dataset_uri')
@click.option('--config-path', type=click.Path(exists=True))
def main(dataset_uri, config_path=None):
dataset = DataSet.from_uri(dataset_uri, config_path=config_path)
def name_from_identifier(identifier):
item_properties = dataset.item_properties(identifier)
name = item_properties['relpath'].rsplit('.', 1)[0]
return name
useful_name_overlay = {
identifier: name_from_identifier(identifier)
for identifier in dataset.identifiers
}
dataset.put_overlay("useful_name", useful_name_overlay)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add script to create overlays<commit_after>
|
"""Varcan smart tool."""
import click
from dtoolcore import DataSet
@click.command()
@click.argument('dataset_uri')
@click.option('--config-path', type=click.Path(exists=True))
def main(dataset_uri, config_path=None):
dataset = DataSet.from_uri(dataset_uri, config_path=config_path)
def name_from_identifier(identifier):
item_properties = dataset.item_properties(identifier)
name = item_properties['relpath'].rsplit('.', 1)[0]
return name
useful_name_overlay = {
identifier: name_from_identifier(identifier)
for identifier in dataset.identifiers
}
dataset.put_overlay("useful_name", useful_name_overlay)
if __name__ == '__main__':
main()
|
Add script to create overlays"""Varcan smart tool."""
import click
from dtoolcore import DataSet
@click.command()
@click.argument('dataset_uri')
@click.option('--config-path', type=click.Path(exists=True))
def main(dataset_uri, config_path=None):
dataset = DataSet.from_uri(dataset_uri, config_path=config_path)
def name_from_identifier(identifier):
item_properties = dataset.item_properties(identifier)
name = item_properties['relpath'].rsplit('.', 1)[0]
return name
useful_name_overlay = {
identifier: name_from_identifier(identifier)
for identifier in dataset.identifiers
}
dataset.put_overlay("useful_name", useful_name_overlay)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add script to create overlays<commit_after>"""Varcan smart tool."""
import click
from dtoolcore import DataSet
@click.command()
@click.argument('dataset_uri')
@click.option('--config-path', type=click.Path(exists=True))
def main(dataset_uri, config_path=None):
dataset = DataSet.from_uri(dataset_uri, config_path=config_path)
def name_from_identifier(identifier):
item_properties = dataset.item_properties(identifier)
name = item_properties['relpath'].rsplit('.', 1)[0]
return name
useful_name_overlay = {
identifier: name_from_identifier(identifier)
for identifier in dataset.identifiers
}
dataset.put_overlay("useful_name", useful_name_overlay)
if __name__ == '__main__':
main()
|
|
0ba11dd47dac04f3f7a314cf320558ccbc9eb148
|
integration-test/1477-water-layer-too-big.py
|
integration-test/1477-water-layer-too-big.py
|
# -*- encoding: utf-8 -*-
from . import FixtureTest
class WaterLayerTooBigTest(FixtureTest):
def test_drop_label(self):
from tilequeue.tile import calc_meters_per_pixel_area
from shapely.ops import transform
from tilequeue.tile import reproject_mercator_to_lnglat
import math
import dsl
for zoom in range(5, 16):
area = 270.0 * calc_meters_per_pixel_area(zoom)
radius = math.sqrt(area / math.pi)
coord = 2 ** (zoom - 1)
# larger feature should retain name
shape = dsl.tile_centre_shape(
zoom, coord, coord).buffer(radius * 1.1)
shape_lnglat = transform(
reproject_mercator_to_lnglat, shape)
self.generate_fixtures(
dsl.way(1, shape_lnglat, {
'natural': 'water',
'name': 'Foo',
}),
)
self.assert_has_feature(
zoom, coord, coord, 'water', {
'kind': 'water',
'name': 'Foo',
})
# smaller shape should drop it
shape = dsl.tile_centre_shape(
zoom, coord, coord).buffer(radius / 1.1)
shape_lnglat = transform(
reproject_mercator_to_lnglat, shape)
self.generate_fixtures(
dsl.way(1, shape_lnglat, {
'natural': 'water',
'name': 'Foo',
}),
)
self.assert_has_feature(
zoom, coord, coord, 'water', {
'kind': 'water',
'name': type(None),
})
|
Add test for water polygon name dropping.
|
Add test for water polygon name dropping.
|
Python
|
mit
|
mapzen/vector-datasource,mapzen/vector-datasource,mapzen/vector-datasource
|
Add test for water polygon name dropping.
|
# -*- encoding: utf-8 -*-
from . import FixtureTest
class WaterLayerTooBigTest(FixtureTest):
def test_drop_label(self):
from tilequeue.tile import calc_meters_per_pixel_area
from shapely.ops import transform
from tilequeue.tile import reproject_mercator_to_lnglat
import math
import dsl
for zoom in range(5, 16):
area = 270.0 * calc_meters_per_pixel_area(zoom)
radius = math.sqrt(area / math.pi)
coord = 2 ** (zoom - 1)
# larger feature should retain name
shape = dsl.tile_centre_shape(
zoom, coord, coord).buffer(radius * 1.1)
shape_lnglat = transform(
reproject_mercator_to_lnglat, shape)
self.generate_fixtures(
dsl.way(1, shape_lnglat, {
'natural': 'water',
'name': 'Foo',
}),
)
self.assert_has_feature(
zoom, coord, coord, 'water', {
'kind': 'water',
'name': 'Foo',
})
# smaller shape should drop it
shape = dsl.tile_centre_shape(
zoom, coord, coord).buffer(radius / 1.1)
shape_lnglat = transform(
reproject_mercator_to_lnglat, shape)
self.generate_fixtures(
dsl.way(1, shape_lnglat, {
'natural': 'water',
'name': 'Foo',
}),
)
self.assert_has_feature(
zoom, coord, coord, 'water', {
'kind': 'water',
'name': type(None),
})
|
<commit_before><commit_msg>Add test for water polygon name dropping.<commit_after>
|
# -*- encoding: utf-8 -*-
from . import FixtureTest
class WaterLayerTooBigTest(FixtureTest):
def test_drop_label(self):
from tilequeue.tile import calc_meters_per_pixel_area
from shapely.ops import transform
from tilequeue.tile import reproject_mercator_to_lnglat
import math
import dsl
for zoom in range(5, 16):
area = 270.0 * calc_meters_per_pixel_area(zoom)
radius = math.sqrt(area / math.pi)
coord = 2 ** (zoom - 1)
# larger feature should retain name
shape = dsl.tile_centre_shape(
zoom, coord, coord).buffer(radius * 1.1)
shape_lnglat = transform(
reproject_mercator_to_lnglat, shape)
self.generate_fixtures(
dsl.way(1, shape_lnglat, {
'natural': 'water',
'name': 'Foo',
}),
)
self.assert_has_feature(
zoom, coord, coord, 'water', {
'kind': 'water',
'name': 'Foo',
})
# smaller shape should drop it
shape = dsl.tile_centre_shape(
zoom, coord, coord).buffer(radius / 1.1)
shape_lnglat = transform(
reproject_mercator_to_lnglat, shape)
self.generate_fixtures(
dsl.way(1, shape_lnglat, {
'natural': 'water',
'name': 'Foo',
}),
)
self.assert_has_feature(
zoom, coord, coord, 'water', {
'kind': 'water',
'name': type(None),
})
|
Add test for water polygon name dropping.# -*- encoding: utf-8 -*-
from . import FixtureTest
class WaterLayerTooBigTest(FixtureTest):
def test_drop_label(self):
from tilequeue.tile import calc_meters_per_pixel_area
from shapely.ops import transform
from tilequeue.tile import reproject_mercator_to_lnglat
import math
import dsl
for zoom in range(5, 16):
area = 270.0 * calc_meters_per_pixel_area(zoom)
radius = math.sqrt(area / math.pi)
coord = 2 ** (zoom - 1)
# larger feature should retain name
shape = dsl.tile_centre_shape(
zoom, coord, coord).buffer(radius * 1.1)
shape_lnglat = transform(
reproject_mercator_to_lnglat, shape)
self.generate_fixtures(
dsl.way(1, shape_lnglat, {
'natural': 'water',
'name': 'Foo',
}),
)
self.assert_has_feature(
zoom, coord, coord, 'water', {
'kind': 'water',
'name': 'Foo',
})
# smaller shape should drop it
shape = dsl.tile_centre_shape(
zoom, coord, coord).buffer(radius / 1.1)
shape_lnglat = transform(
reproject_mercator_to_lnglat, shape)
self.generate_fixtures(
dsl.way(1, shape_lnglat, {
'natural': 'water',
'name': 'Foo',
}),
)
self.assert_has_feature(
zoom, coord, coord, 'water', {
'kind': 'water',
'name': type(None),
})
|
<commit_before><commit_msg>Add test for water polygon name dropping.<commit_after># -*- encoding: utf-8 -*-
from . import FixtureTest
class WaterLayerTooBigTest(FixtureTest):
def test_drop_label(self):
from tilequeue.tile import calc_meters_per_pixel_area
from shapely.ops import transform
from tilequeue.tile import reproject_mercator_to_lnglat
import math
import dsl
for zoom in range(5, 16):
area = 270.0 * calc_meters_per_pixel_area(zoom)
radius = math.sqrt(area / math.pi)
coord = 2 ** (zoom - 1)
# larger feature should retain name
shape = dsl.tile_centre_shape(
zoom, coord, coord).buffer(radius * 1.1)
shape_lnglat = transform(
reproject_mercator_to_lnglat, shape)
self.generate_fixtures(
dsl.way(1, shape_lnglat, {
'natural': 'water',
'name': 'Foo',
}),
)
self.assert_has_feature(
zoom, coord, coord, 'water', {
'kind': 'water',
'name': 'Foo',
})
# smaller shape should drop it
shape = dsl.tile_centre_shape(
zoom, coord, coord).buffer(radius / 1.1)
shape_lnglat = transform(
reproject_mercator_to_lnglat, shape)
self.generate_fixtures(
dsl.way(1, shape_lnglat, {
'natural': 'water',
'name': 'Foo',
}),
)
self.assert_has_feature(
zoom, coord, coord, 'water', {
'kind': 'water',
'name': type(None),
})
|
|
865dc29421c1e9ef4bf340bf32164863cc5f2006
|
app/raw/management/commands/list_spiders.py
|
app/raw/management/commands/list_spiders.py
|
from django.core.management import BaseCommand
from raw.utils import list_spiders
class Command(BaseCommand):
help = 'List installed spiders'
def handle(self, *args, **options):
for spider in list_spiders():
print spider
|
Add management command to list installed spiders
|
Add management command to list installed spiders
|
Python
|
mit
|
legco-watch/legco-watch,comsaint/legco-watch,legco-watch/legco-watch,comsaint/legco-watch,legco-watch/legco-watch,comsaint/legco-watch,comsaint/legco-watch,legco-watch/legco-watch
|
Add management command to list installed spiders
|
from django.core.management import BaseCommand
from raw.utils import list_spiders
class Command(BaseCommand):
help = 'List installed spiders'
def handle(self, *args, **options):
for spider in list_spiders():
print spider
|
<commit_before><commit_msg>Add management command to list installed spiders<commit_after>
|
from django.core.management import BaseCommand
from raw.utils import list_spiders
class Command(BaseCommand):
help = 'List installed spiders'
def handle(self, *args, **options):
for spider in list_spiders():
print spider
|
Add management command to list installed spidersfrom django.core.management import BaseCommand
from raw.utils import list_spiders
class Command(BaseCommand):
help = 'List installed spiders'
def handle(self, *args, **options):
for spider in list_spiders():
print spider
|
<commit_before><commit_msg>Add management command to list installed spiders<commit_after>from django.core.management import BaseCommand
from raw.utils import list_spiders
class Command(BaseCommand):
help = 'List installed spiders'
def handle(self, *args, **options):
for spider in list_spiders():
print spider
|
|
5f12ada7fe0ddb44274e18decbaea0d05ab4471f
|
CodeFights/lineUp.py
|
CodeFights/lineUp.py
|
#!/usr/local/bin/python
# Code Fights Lineup Problem
def lineUp(commands):
aligned, tmp = 0, 0
com_dict = {"L": 1, "A": 0, "R": -1}
for c in commands:
tmp += com_dict[c]
if tmp % 2 == 0:
aligned += 1
return aligned
def main():
tests = [
["LLARL", 3],
["RLR", 1],
["", 0],
["L", 0],
["A", 1],
["AAAAAAAAAAAAAAA", 15],
["RRRRRRRRRRLLLLLLLLLRRRRLLLLLLLLLL", 16],
["AALAAALARAR", 5]
]
for t in tests:
res = lineUp(t[0])
ans = t[1]
if ans == res:
print("PASSED: lineUp({}) returned {}"
.format(t[0], res))
else:
print("FAILED: lineUp({}) returned {}, answer: {}"
.format(t[0], res, ans))
if __name__ == '__main__':
main()
|
Solve Code Fights lineup problem
|
Solve Code Fights lineup problem
|
Python
|
mit
|
HKuz/Test_Code
|
Solve Code Fights lineup problem
|
#!/usr/local/bin/python
# Code Fights Lineup Problem
def lineUp(commands):
aligned, tmp = 0, 0
com_dict = {"L": 1, "A": 0, "R": -1}
for c in commands:
tmp += com_dict[c]
if tmp % 2 == 0:
aligned += 1
return aligned
def main():
tests = [
["LLARL", 3],
["RLR", 1],
["", 0],
["L", 0],
["A", 1],
["AAAAAAAAAAAAAAA", 15],
["RRRRRRRRRRLLLLLLLLLRRRRLLLLLLLLLL", 16],
["AALAAALARAR", 5]
]
for t in tests:
res = lineUp(t[0])
ans = t[1]
if ans == res:
print("PASSED: lineUp({}) returned {}"
.format(t[0], res))
else:
print("FAILED: lineUp({}) returned {}, answer: {}"
.format(t[0], res, ans))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Solve Code Fights lineup problem<commit_after>
|
#!/usr/local/bin/python
# Code Fights Lineup Problem
def lineUp(commands):
aligned, tmp = 0, 0
com_dict = {"L": 1, "A": 0, "R": -1}
for c in commands:
tmp += com_dict[c]
if tmp % 2 == 0:
aligned += 1
return aligned
def main():
tests = [
["LLARL", 3],
["RLR", 1],
["", 0],
["L", 0],
["A", 1],
["AAAAAAAAAAAAAAA", 15],
["RRRRRRRRRRLLLLLLLLLRRRRLLLLLLLLLL", 16],
["AALAAALARAR", 5]
]
for t in tests:
res = lineUp(t[0])
ans = t[1]
if ans == res:
print("PASSED: lineUp({}) returned {}"
.format(t[0], res))
else:
print("FAILED: lineUp({}) returned {}, answer: {}"
.format(t[0], res, ans))
if __name__ == '__main__':
main()
|
Solve Code Fights lineup problem#!/usr/local/bin/python
# Code Fights Lineup Problem
def lineUp(commands):
aligned, tmp = 0, 0
com_dict = {"L": 1, "A": 0, "R": -1}
for c in commands:
tmp += com_dict[c]
if tmp % 2 == 0:
aligned += 1
return aligned
def main():
tests = [
["LLARL", 3],
["RLR", 1],
["", 0],
["L", 0],
["A", 1],
["AAAAAAAAAAAAAAA", 15],
["RRRRRRRRRRLLLLLLLLLRRRRLLLLLLLLLL", 16],
["AALAAALARAR", 5]
]
for t in tests:
res = lineUp(t[0])
ans = t[1]
if ans == res:
print("PASSED: lineUp({}) returned {}"
.format(t[0], res))
else:
print("FAILED: lineUp({}) returned {}, answer: {}"
.format(t[0], res, ans))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Solve Code Fights lineup problem<commit_after>#!/usr/local/bin/python
# Code Fights Lineup Problem
def lineUp(commands):
aligned, tmp = 0, 0
com_dict = {"L": 1, "A": 0, "R": -1}
for c in commands:
tmp += com_dict[c]
if tmp % 2 == 0:
aligned += 1
return aligned
def main():
tests = [
["LLARL", 3],
["RLR", 1],
["", 0],
["L", 0],
["A", 1],
["AAAAAAAAAAAAAAA", 15],
["RRRRRRRRRRLLLLLLLLLRRRRLLLLLLLLLL", 16],
["AALAAALARAR", 5]
]
for t in tests:
res = lineUp(t[0])
ans = t[1]
if ans == res:
print("PASSED: lineUp({}) returned {}"
.format(t[0], res))
else:
print("FAILED: lineUp({}) returned {}, answer: {}"
.format(t[0], res, ans))
if __name__ == '__main__':
main()
|
|
c486b8df5861fd883b49ea8118d40d73f5b4e7b8
|
tardis/tardis_portal/tests/test_download_apikey.py
|
tardis/tardis_portal/tests/test_download_apikey.py
|
# -*- coding: utf-8 -*-
from django.core.urlresolvers import reverse
from django.test import TestCase
from tastypie.test import ResourceTestCase
from django.test.client import Client
from django.conf import settings
from django.contrib.auth.models import User
class ApiKeyDownloadTestCase(ResourceTestCase):
def setUp(self):
# create a test user
self.username = 'test'
self.email = 'test@example.com'
self.password = 'passw0rd'
self.user = User.objects.create_user(username=self.username,
email=self.email,
password=self.password)
def tearDown(self):
self.user.delete()
def testView(self):
download_api_key_url = reverse('tardis.tardis_portal.views.download_api_key')
client = Client()
# Expect redirect to login
response = client.get(download_api_key_url)
self.assertEqual(response.status_code, 302)
# Login as user
login = client.login(username=self.username, password=self.password)
self.assertTrue(login)
response = client.get(download_api_key_url)
self.assertEqual(response['Content-Disposition'],
'inline; filename="{0}.key"'.format(self.username))
self.assertEqual(response.status_code, 200)
response_content = ""
for c in response.streaming_content:
response_content += c
self.assertEqual(response_content,
self.create_apikey(username=self.username,
api_key=user.api_key.key))
|
Add download apikey test case
|
Add download apikey test case
|
Python
|
bsd-3-clause
|
pansapiens/mytardis,pansapiens/mytardis,pansapiens/mytardis,pansapiens/mytardis
|
Add download apikey test case
|
# -*- coding: utf-8 -*-
from django.core.urlresolvers import reverse
from django.test import TestCase
from tastypie.test import ResourceTestCase
from django.test.client import Client
from django.conf import settings
from django.contrib.auth.models import User
class ApiKeyDownloadTestCase(ResourceTestCase):
def setUp(self):
# create a test user
self.username = 'test'
self.email = 'test@example.com'
self.password = 'passw0rd'
self.user = User.objects.create_user(username=self.username,
email=self.email,
password=self.password)
def tearDown(self):
self.user.delete()
def testView(self):
download_api_key_url = reverse('tardis.tardis_portal.views.download_api_key')
client = Client()
# Expect redirect to login
response = client.get(download_api_key_url)
self.assertEqual(response.status_code, 302)
# Login as user
login = client.login(username=self.username, password=self.password)
self.assertTrue(login)
response = client.get(download_api_key_url)
self.assertEqual(response['Content-Disposition'],
'inline; filename="{0}.key"'.format(self.username))
self.assertEqual(response.status_code, 200)
response_content = ""
for c in response.streaming_content:
response_content += c
self.assertEqual(response_content,
self.create_apikey(username=self.username,
api_key=user.api_key.key))
|
<commit_before><commit_msg>Add download apikey test case<commit_after>
|
# -*- coding: utf-8 -*-
from django.core.urlresolvers import reverse
from django.test import TestCase
from tastypie.test import ResourceTestCase
from django.test.client import Client
from django.conf import settings
from django.contrib.auth.models import User
class ApiKeyDownloadTestCase(ResourceTestCase):
def setUp(self):
# create a test user
self.username = 'test'
self.email = 'test@example.com'
self.password = 'passw0rd'
self.user = User.objects.create_user(username=self.username,
email=self.email,
password=self.password)
def tearDown(self):
self.user.delete()
def testView(self):
download_api_key_url = reverse('tardis.tardis_portal.views.download_api_key')
client = Client()
# Expect redirect to login
response = client.get(download_api_key_url)
self.assertEqual(response.status_code, 302)
# Login as user
login = client.login(username=self.username, password=self.password)
self.assertTrue(login)
response = client.get(download_api_key_url)
self.assertEqual(response['Content-Disposition'],
'inline; filename="{0}.key"'.format(self.username))
self.assertEqual(response.status_code, 200)
response_content = ""
for c in response.streaming_content:
response_content += c
self.assertEqual(response_content,
self.create_apikey(username=self.username,
api_key=user.api_key.key))
|
Add download apikey test case# -*- coding: utf-8 -*-
from django.core.urlresolvers import reverse
from django.test import TestCase
from tastypie.test import ResourceTestCase
from django.test.client import Client
from django.conf import settings
from django.contrib.auth.models import User
class ApiKeyDownloadTestCase(ResourceTestCase):
def setUp(self):
# create a test user
self.username = 'test'
self.email = 'test@example.com'
self.password = 'passw0rd'
self.user = User.objects.create_user(username=self.username,
email=self.email,
password=self.password)
def tearDown(self):
self.user.delete()
def testView(self):
download_api_key_url = reverse('tardis.tardis_portal.views.download_api_key')
client = Client()
# Expect redirect to login
response = client.get(download_api_key_url)
self.assertEqual(response.status_code, 302)
# Login as user
login = client.login(username=self.username, password=self.password)
self.assertTrue(login)
response = client.get(download_api_key_url)
self.assertEqual(response['Content-Disposition'],
'inline; filename="{0}.key"'.format(self.username))
self.assertEqual(response.status_code, 200)
response_content = ""
for c in response.streaming_content:
response_content += c
self.assertEqual(response_content,
self.create_apikey(username=self.username,
api_key=user.api_key.key))
|
<commit_before><commit_msg>Add download apikey test case<commit_after># -*- coding: utf-8 -*-
from django.core.urlresolvers import reverse
from django.test import TestCase
from tastypie.test import ResourceTestCase
from django.test.client import Client
from django.conf import settings
from django.contrib.auth.models import User
class ApiKeyDownloadTestCase(ResourceTestCase):
def setUp(self):
# create a test user
self.username = 'test'
self.email = 'test@example.com'
self.password = 'passw0rd'
self.user = User.objects.create_user(username=self.username,
email=self.email,
password=self.password)
def tearDown(self):
self.user.delete()
def testView(self):
download_api_key_url = reverse('tardis.tardis_portal.views.download_api_key')
client = Client()
# Expect redirect to login
response = client.get(download_api_key_url)
self.assertEqual(response.status_code, 302)
# Login as user
login = client.login(username=self.username, password=self.password)
self.assertTrue(login)
response = client.get(download_api_key_url)
self.assertEqual(response['Content-Disposition'],
'inline; filename="{0}.key"'.format(self.username))
self.assertEqual(response.status_code, 200)
response_content = ""
for c in response.streaming_content:
response_content += c
self.assertEqual(response_content,
self.create_apikey(username=self.username,
api_key=user.api_key.key))
|
|
65f6f78008d4f961c9ebe5d8047b0f2c742fe15f
|
tests/qtgui/qinputdialog_get_test.py
|
tests/qtgui/qinputdialog_get_test.py
|
import unittest
from PySide import QtCore, QtGui
from helper import UsesQApplication, TimedQApplication
class TestInputDialog(TimedQApplication):
def testGetDouble(self):
QtGui.QInputDialog.getDouble(None, "title", "label")
def testGetInt(self):
QtGui.QInputDialog.getInt(None, "title", "label")
def testGetInteger(self):
QtGui.QInputDialog.getInteger(None, "title", "label")
def testGetItem(self):
QtGui.QInputDialog.getItem(None, "title", "label", QtCore.QStringList(["1", "2", "3"]))
def testGetText(self):
QtGui.QInputDialog.getText(None, "title", "label")
if __name__ == '__main__':
unittest.main()
|
Add unittest for QInputDialog.getXXX() methods
|
Add unittest for QInputDialog.getXXX() methods
|
Python
|
lgpl-2.1
|
RobinD42/pyside,RobinD42/pyside,enthought/pyside,BadSingleton/pyside2,M4rtinK/pyside-bb10,pankajp/pyside,BadSingleton/pyside2,M4rtinK/pyside-android,IronManMark20/pyside2,pankajp/pyside,BadSingleton/pyside2,pankajp/pyside,gbaty/pyside2,RobinD42/pyside,qtproject/pyside-pyside,M4rtinK/pyside-android,M4rtinK/pyside-bb10,M4rtinK/pyside-bb10,gbaty/pyside2,M4rtinK/pyside-android,M4rtinK/pyside-bb10,M4rtinK/pyside-bb10,gbaty/pyside2,IronManMark20/pyside2,BadSingleton/pyside2,qtproject/pyside-pyside,pankajp/pyside,IronManMark20/pyside2,pankajp/pyside,enthought/pyside,enthought/pyside,RobinD42/pyside,M4rtinK/pyside-bb10,gbaty/pyside2,enthought/pyside,qtproject/pyside-pyside,RobinD42/pyside,IronManMark20/pyside2,qtproject/pyside-pyside,PySide/PySide,M4rtinK/pyside-android,PySide/PySide,RobinD42/pyside,PySide/PySide,enthought/pyside,enthought/pyside,IronManMark20/pyside2,PySide/PySide,BadSingleton/pyside2,gbaty/pyside2,M4rtinK/pyside-android,enthought/pyside,RobinD42/pyside,PySide/PySide,qtproject/pyside-pyside,M4rtinK/pyside-android
|
Add unittest for QInputDialog.getXXX() methods
|
import unittest
from PySide import QtCore, QtGui
from helper import UsesQApplication, TimedQApplication
class TestInputDialog(TimedQApplication):
def testGetDouble(self):
QtGui.QInputDialog.getDouble(None, "title", "label")
def testGetInt(self):
QtGui.QInputDialog.getInt(None, "title", "label")
def testGetInteger(self):
QtGui.QInputDialog.getInteger(None, "title", "label")
def testGetItem(self):
QtGui.QInputDialog.getItem(None, "title", "label", QtCore.QStringList(["1", "2", "3"]))
def testGetText(self):
QtGui.QInputDialog.getText(None, "title", "label")
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add unittest for QInputDialog.getXXX() methods<commit_after>
|
import unittest
from PySide import QtCore, QtGui
from helper import UsesQApplication, TimedQApplication
class TestInputDialog(TimedQApplication):
def testGetDouble(self):
QtGui.QInputDialog.getDouble(None, "title", "label")
def testGetInt(self):
QtGui.QInputDialog.getInt(None, "title", "label")
def testGetInteger(self):
QtGui.QInputDialog.getInteger(None, "title", "label")
def testGetItem(self):
QtGui.QInputDialog.getItem(None, "title", "label", QtCore.QStringList(["1", "2", "3"]))
def testGetText(self):
QtGui.QInputDialog.getText(None, "title", "label")
if __name__ == '__main__':
unittest.main()
|
Add unittest for QInputDialog.getXXX() methodsimport unittest
from PySide import QtCore, QtGui
from helper import UsesQApplication, TimedQApplication
class TestInputDialog(TimedQApplication):
def testGetDouble(self):
QtGui.QInputDialog.getDouble(None, "title", "label")
def testGetInt(self):
QtGui.QInputDialog.getInt(None, "title", "label")
def testGetInteger(self):
QtGui.QInputDialog.getInteger(None, "title", "label")
def testGetItem(self):
QtGui.QInputDialog.getItem(None, "title", "label", QtCore.QStringList(["1", "2", "3"]))
def testGetText(self):
QtGui.QInputDialog.getText(None, "title", "label")
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add unittest for QInputDialog.getXXX() methods<commit_after>import unittest
from PySide import QtCore, QtGui
from helper import UsesQApplication, TimedQApplication
class TestInputDialog(TimedQApplication):
def testGetDouble(self):
QtGui.QInputDialog.getDouble(None, "title", "label")
def testGetInt(self):
QtGui.QInputDialog.getInt(None, "title", "label")
def testGetInteger(self):
QtGui.QInputDialog.getInteger(None, "title", "label")
def testGetItem(self):
QtGui.QInputDialog.getItem(None, "title", "label", QtCore.QStringList(["1", "2", "3"]))
def testGetText(self):
QtGui.QInputDialog.getText(None, "title", "label")
if __name__ == '__main__':
unittest.main()
|
|
52189e2161e92b36df47a04c2150dff38f81f5e9
|
tests/unit/tests/test_activations.py
|
tests/unit/tests/test_activations.py
|
from unittest import mock
from django.test import TestCase
from viewflow import activation, flow
from viewflow.models import Task
class TestActivations(TestCase):
def test_start_activation_lifecycle(self):
flow_task_mock = mock.Mock(spec=flow.Start())
act = activation.StartActivation()
act.initialize(flow_task_mock)
act.prepare()
act.done()
act.task.prepare.assert_called_once_with()
act.task.done.assert_called_once_with()
act.process.start.assert_called_once_with()
flow_task_mock.activate_next.assert_any_call(act)
def test_view_activation_activate(self):
flow_task_mock = mock.Mock(spec=flow.View(lambda *args, **kwargs: None))
prev_activation_mock = mock.Mock(spec=activation.StartActivation())
act = activation.ViewActivation.activate(flow_task_mock, prev_activation_mock)
act.task.save.assert_has_calls(())
def test_view_activation_lifecycle(self):
flow_task_mock = mock.Mock(spec=flow.View(lambda *args, **kwargs: None))
task_mock = mock.Mock(spec=Task())
act = activation.ViewActivation()
act.initialize(flow_task_mock, task_mock)
act.prepare()
act.done()
act.task.prepare.assert_called_once_with()
act.task.done.assert_called_once_with()
flow_task_mock.activate_next.assert_any_call(act)
|
Add mocked tests for activation
|
Add mocked tests for activation
|
Python
|
agpl-3.0
|
pombredanne/viewflow,ribeiro-ucl/viewflow,codingjoe/viewflow,codingjoe/viewflow,pombredanne/viewflow,viewflow/viewflow,viewflow/viewflow,viewflow/viewflow,ribeiro-ucl/viewflow,codingjoe/viewflow,ribeiro-ucl/viewflow
|
Add mocked tests for activation
|
from unittest import mock
from django.test import TestCase
from viewflow import activation, flow
from viewflow.models import Task
class TestActivations(TestCase):
def test_start_activation_lifecycle(self):
flow_task_mock = mock.Mock(spec=flow.Start())
act = activation.StartActivation()
act.initialize(flow_task_mock)
act.prepare()
act.done()
act.task.prepare.assert_called_once_with()
act.task.done.assert_called_once_with()
act.process.start.assert_called_once_with()
flow_task_mock.activate_next.assert_any_call(act)
def test_view_activation_activate(self):
flow_task_mock = mock.Mock(spec=flow.View(lambda *args, **kwargs: None))
prev_activation_mock = mock.Mock(spec=activation.StartActivation())
act = activation.ViewActivation.activate(flow_task_mock, prev_activation_mock)
act.task.save.assert_has_calls(())
def test_view_activation_lifecycle(self):
flow_task_mock = mock.Mock(spec=flow.View(lambda *args, **kwargs: None))
task_mock = mock.Mock(spec=Task())
act = activation.ViewActivation()
act.initialize(flow_task_mock, task_mock)
act.prepare()
act.done()
act.task.prepare.assert_called_once_with()
act.task.done.assert_called_once_with()
flow_task_mock.activate_next.assert_any_call(act)
|
<commit_before><commit_msg>Add mocked tests for activation<commit_after>
|
from unittest import mock
from django.test import TestCase
from viewflow import activation, flow
from viewflow.models import Task
class TestActivations(TestCase):
def test_start_activation_lifecycle(self):
flow_task_mock = mock.Mock(spec=flow.Start())
act = activation.StartActivation()
act.initialize(flow_task_mock)
act.prepare()
act.done()
act.task.prepare.assert_called_once_with()
act.task.done.assert_called_once_with()
act.process.start.assert_called_once_with()
flow_task_mock.activate_next.assert_any_call(act)
def test_view_activation_activate(self):
flow_task_mock = mock.Mock(spec=flow.View(lambda *args, **kwargs: None))
prev_activation_mock = mock.Mock(spec=activation.StartActivation())
act = activation.ViewActivation.activate(flow_task_mock, prev_activation_mock)
act.task.save.assert_has_calls(())
def test_view_activation_lifecycle(self):
flow_task_mock = mock.Mock(spec=flow.View(lambda *args, **kwargs: None))
task_mock = mock.Mock(spec=Task())
act = activation.ViewActivation()
act.initialize(flow_task_mock, task_mock)
act.prepare()
act.done()
act.task.prepare.assert_called_once_with()
act.task.done.assert_called_once_with()
flow_task_mock.activate_next.assert_any_call(act)
|
Add mocked tests for activationfrom unittest import mock
from django.test import TestCase
from viewflow import activation, flow
from viewflow.models import Task
class TestActivations(TestCase):
def test_start_activation_lifecycle(self):
flow_task_mock = mock.Mock(spec=flow.Start())
act = activation.StartActivation()
act.initialize(flow_task_mock)
act.prepare()
act.done()
act.task.prepare.assert_called_once_with()
act.task.done.assert_called_once_with()
act.process.start.assert_called_once_with()
flow_task_mock.activate_next.assert_any_call(act)
def test_view_activation_activate(self):
flow_task_mock = mock.Mock(spec=flow.View(lambda *args, **kwargs: None))
prev_activation_mock = mock.Mock(spec=activation.StartActivation())
act = activation.ViewActivation.activate(flow_task_mock, prev_activation_mock)
act.task.save.assert_has_calls(())
def test_view_activation_lifecycle(self):
flow_task_mock = mock.Mock(spec=flow.View(lambda *args, **kwargs: None))
task_mock = mock.Mock(spec=Task())
act = activation.ViewActivation()
act.initialize(flow_task_mock, task_mock)
act.prepare()
act.done()
act.task.prepare.assert_called_once_with()
act.task.done.assert_called_once_with()
flow_task_mock.activate_next.assert_any_call(act)
|
<commit_before><commit_msg>Add mocked tests for activation<commit_after>from unittest import mock
from django.test import TestCase
from viewflow import activation, flow
from viewflow.models import Task
class TestActivations(TestCase):
def test_start_activation_lifecycle(self):
flow_task_mock = mock.Mock(spec=flow.Start())
act = activation.StartActivation()
act.initialize(flow_task_mock)
act.prepare()
act.done()
act.task.prepare.assert_called_once_with()
act.task.done.assert_called_once_with()
act.process.start.assert_called_once_with()
flow_task_mock.activate_next.assert_any_call(act)
def test_view_activation_activate(self):
flow_task_mock = mock.Mock(spec=flow.View(lambda *args, **kwargs: None))
prev_activation_mock = mock.Mock(spec=activation.StartActivation())
act = activation.ViewActivation.activate(flow_task_mock, prev_activation_mock)
act.task.save.assert_has_calls(())
def test_view_activation_lifecycle(self):
flow_task_mock = mock.Mock(spec=flow.View(lambda *args, **kwargs: None))
task_mock = mock.Mock(spec=Task())
act = activation.ViewActivation()
act.initialize(flow_task_mock, task_mock)
act.prepare()
act.done()
act.task.prepare.assert_called_once_with()
act.task.done.assert_called_once_with()
flow_task_mock.activate_next.assert_any_call(act)
|
|
27cb9279670bd513a1559f4865500d84869bb9f0
|
tests/test_predictor.py
|
tests/test_predictor.py
|
#! /usr/env/bin python
import numpy as np
from pyboas import predictor, models
# Build random 3-parameter normal posterior.
posterior = np.random.randn(100, 3)
def toy_model(param, time):
time = np.atleast_1d(time)[:, np.newaxis]
a = param[:, 0]
b = param[:, 1]
c = param[:, 2]
return a*time**2 + b*time + c
def test_basic_shape():
"""Test basic shape conditions on output of predictions."""
time = np.random.rand(4, )
pred1 = predictor.GaussPredictor(posterior, toy_model)
pred1.make_prediction(time)
# Test shape of predictive distributions and x
assert pred1.x.shape == pred1.predictives.shape
# Test len of time array and predictives
assert len(time) == len(pred1.predictives)
return
def test_time_concatenation():
"""
Test feature to concatenate prediction times over make_prediction calls.
"""
# Built random time array
time = np.random.rand(4,)
pred1 = predictor.GaussPredictor(posterior, toy_model)
pred2 = predictor.GaussPredictor(posterior, toy_model)
# Run first predictor with full time array
pred1.make_prediction(time)
# Run second predictor twice
pred2.make_prediction(time[:2])
pred2.make_prediction(time[2:])
assert np.allclose(pred1.predictives, pred2.predictives)
assert np.allclose(pred1.x, pred2.x)
return
def test_sample_draw():
# Built random time array
time = np.random.rand(4, )
pred1 = predictor.GaussPredictor(posterior, toy_model)
pred1.samplepredictive(time, 100)
def ok():
print('\033[92mOK\033[0m')
def failed():
print('\033[91mFAILED\033[0m')
def test_all():
print('Testing basic functioning....\t'),
try:
test_basic_shape()
ok()
except AssertionError:
failed()
print('Testing time concatenation....\t'),
try:
test_time_concatenation()
ok()
except AssertionError:
failed()
return
if __name__ == '__main__':
test_all()
|
Test module for Predictor class.
|
Test module for Predictor class.
|
Python
|
mit
|
exord/pyboas
|
Test module for Predictor class.
|
#! /usr/env/bin python
import numpy as np
from pyboas import predictor, models
# Build random 3-parameter normal posterior.
posterior = np.random.randn(100, 3)
def toy_model(param, time):
time = np.atleast_1d(time)[:, np.newaxis]
a = param[:, 0]
b = param[:, 1]
c = param[:, 2]
return a*time**2 + b*time + c
def test_basic_shape():
"""Test basic shape conditions on output of predictions."""
time = np.random.rand(4, )
pred1 = predictor.GaussPredictor(posterior, toy_model)
pred1.make_prediction(time)
# Test shape of predictive distributions and x
assert pred1.x.shape == pred1.predictives.shape
# Test len of time array and predictives
assert len(time) == len(pred1.predictives)
return
def test_time_concatenation():
"""
Test feature to concatenate prediction times over make_prediction calls.
"""
# Built random time array
time = np.random.rand(4,)
pred1 = predictor.GaussPredictor(posterior, toy_model)
pred2 = predictor.GaussPredictor(posterior, toy_model)
# Run first predictor with full time array
pred1.make_prediction(time)
# Run second predictor twice
pred2.make_prediction(time[:2])
pred2.make_prediction(time[2:])
assert np.allclose(pred1.predictives, pred2.predictives)
assert np.allclose(pred1.x, pred2.x)
return
def test_sample_draw():
# Built random time array
time = np.random.rand(4, )
pred1 = predictor.GaussPredictor(posterior, toy_model)
pred1.samplepredictive(time, 100)
def ok():
print('\033[92mOK\033[0m')
def failed():
print('\033[91mFAILED\033[0m')
def test_all():
print('Testing basic functioning....\t'),
try:
test_basic_shape()
ok()
except AssertionError:
failed()
print('Testing time concatenation....\t'),
try:
test_time_concatenation()
ok()
except AssertionError:
failed()
return
if __name__ == '__main__':
test_all()
|
<commit_before><commit_msg>Test module for Predictor class.<commit_after>
|
#! /usr/env/bin python
import numpy as np
from pyboas import predictor, models
# Build random 3-parameter normal posterior.
posterior = np.random.randn(100, 3)
def toy_model(param, time):
time = np.atleast_1d(time)[:, np.newaxis]
a = param[:, 0]
b = param[:, 1]
c = param[:, 2]
return a*time**2 + b*time + c
def test_basic_shape():
"""Test basic shape conditions on output of predictions."""
time = np.random.rand(4, )
pred1 = predictor.GaussPredictor(posterior, toy_model)
pred1.make_prediction(time)
# Test shape of predictive distributions and x
assert pred1.x.shape == pred1.predictives.shape
# Test len of time array and predictives
assert len(time) == len(pred1.predictives)
return
def test_time_concatenation():
"""
Test feature to concatenate prediction times over make_prediction calls.
"""
# Built random time array
time = np.random.rand(4,)
pred1 = predictor.GaussPredictor(posterior, toy_model)
pred2 = predictor.GaussPredictor(posterior, toy_model)
# Run first predictor with full time array
pred1.make_prediction(time)
# Run second predictor twice
pred2.make_prediction(time[:2])
pred2.make_prediction(time[2:])
assert np.allclose(pred1.predictives, pred2.predictives)
assert np.allclose(pred1.x, pred2.x)
return
def test_sample_draw():
# Built random time array
time = np.random.rand(4, )
pred1 = predictor.GaussPredictor(posterior, toy_model)
pred1.samplepredictive(time, 100)
def ok():
print('\033[92mOK\033[0m')
def failed():
print('\033[91mFAILED\033[0m')
def test_all():
print('Testing basic functioning....\t'),
try:
test_basic_shape()
ok()
except AssertionError:
failed()
print('Testing time concatenation....\t'),
try:
test_time_concatenation()
ok()
except AssertionError:
failed()
return
if __name__ == '__main__':
test_all()
|
Test module for Predictor class.#! /usr/env/bin python
import numpy as np
from pyboas import predictor, models
# Build random 3-parameter normal posterior.
posterior = np.random.randn(100, 3)
def toy_model(param, time):
time = np.atleast_1d(time)[:, np.newaxis]
a = param[:, 0]
b = param[:, 1]
c = param[:, 2]
return a*time**2 + b*time + c
def test_basic_shape():
"""Test basic shape conditions on output of predictions."""
time = np.random.rand(4, )
pred1 = predictor.GaussPredictor(posterior, toy_model)
pred1.make_prediction(time)
# Test shape of predictive distributions and x
assert pred1.x.shape == pred1.predictives.shape
# Test len of time array and predictives
assert len(time) == len(pred1.predictives)
return
def test_time_concatenation():
"""
Test feature to concatenate prediction times over make_prediction calls.
"""
# Built random time array
time = np.random.rand(4,)
pred1 = predictor.GaussPredictor(posterior, toy_model)
pred2 = predictor.GaussPredictor(posterior, toy_model)
# Run first predictor with full time array
pred1.make_prediction(time)
# Run second predictor twice
pred2.make_prediction(time[:2])
pred2.make_prediction(time[2:])
assert np.allclose(pred1.predictives, pred2.predictives)
assert np.allclose(pred1.x, pred2.x)
return
def test_sample_draw():
# Built random time array
time = np.random.rand(4, )
pred1 = predictor.GaussPredictor(posterior, toy_model)
pred1.samplepredictive(time, 100)
def ok():
print('\033[92mOK\033[0m')
def failed():
print('\033[91mFAILED\033[0m')
def test_all():
print('Testing basic functioning....\t'),
try:
test_basic_shape()
ok()
except AssertionError:
failed()
print('Testing time concatenation....\t'),
try:
test_time_concatenation()
ok()
except AssertionError:
failed()
return
if __name__ == '__main__':
test_all()
|
<commit_before><commit_msg>Test module for Predictor class.<commit_after>#! /usr/env/bin python
import numpy as np
from pyboas import predictor, models
# Build random 3-parameter normal posterior.
posterior = np.random.randn(100, 3)
def toy_model(param, time):
time = np.atleast_1d(time)[:, np.newaxis]
a = param[:, 0]
b = param[:, 1]
c = param[:, 2]
return a*time**2 + b*time + c
def test_basic_shape():
"""Test basic shape conditions on output of predictions."""
time = np.random.rand(4, )
pred1 = predictor.GaussPredictor(posterior, toy_model)
pred1.make_prediction(time)
# Test shape of predictive distributions and x
assert pred1.x.shape == pred1.predictives.shape
# Test len of time array and predictives
assert len(time) == len(pred1.predictives)
return
def test_time_concatenation():
"""
Test feature to concatenate prediction times over make_prediction calls.
"""
# Built random time array
time = np.random.rand(4,)
pred1 = predictor.GaussPredictor(posterior, toy_model)
pred2 = predictor.GaussPredictor(posterior, toy_model)
# Run first predictor with full time array
pred1.make_prediction(time)
# Run second predictor twice
pred2.make_prediction(time[:2])
pred2.make_prediction(time[2:])
assert np.allclose(pred1.predictives, pred2.predictives)
assert np.allclose(pred1.x, pred2.x)
return
def test_sample_draw():
# Built random time array
time = np.random.rand(4, )
pred1 = predictor.GaussPredictor(posterior, toy_model)
pred1.samplepredictive(time, 100)
def ok():
print('\033[92mOK\033[0m')
def failed():
print('\033[91mFAILED\033[0m')
def test_all():
print('Testing basic functioning....\t'),
try:
test_basic_shape()
ok()
except AssertionError:
failed()
print('Testing time concatenation....\t'),
try:
test_time_concatenation()
ok()
except AssertionError:
failed()
return
if __name__ == '__main__':
test_all()
|
|
34d5b5cdc058f1c9055b82151b518251fa3b4f74
|
tools/join-contracts.py
|
tools/join-contracts.py
|
import os
import click
import re
from click.types import File
IMPORT_RE = re.compile(r'^import +["\'](?P<contract>[^"\']+.sol)["\'];$')
"""
Utility to join solidity contracts into a single output file by recursively
resolving imports.
example usage:
$ cd raiden/smart_contracts
$ python ../../tools/join-contracts.py SomeContractWithImports.sol joined.sol
"""
class ContractJoiner(object):
def __init__(self):
self.have_pragma = False
self.seen = set()
def join(self, contract_file):
out = []
if contract_file.name in self.seen:
print('Skipping duplicate {}'.format(contract_file.name))
return []
self.seen.add(contract_file.name)
print('Reading {}'.format(contract_file.name))
for line in contract_file:
line = line.strip('\r\n')
stripped_line = line.strip()
if stripped_line.startswith('pragma'):
if not self.have_pragma:
self.have_pragma = True
out.append(line)
elif stripped_line.startswith('import'):
match = IMPORT_RE.match(stripped_line)
if match:
next_file = match.groupdict().get('contract')
if next_file and os.path.exists(next_file):
with open(next_file) as next_contract:
out.extend(self.join(next_contract))
else:
out.append(line)
return out
@click.command()
@click.argument('contract', type=File())
@click.argument('output', type=File('w'))
def main(contract, output):
output.write("\n".join(ContractJoiner().join(contract)))
if __name__ == '__main__':
main()
|
Add tool to create combined smart contract files
|
Add tool to create combined smart contract files
Useful for various cases where a single source file is needed e.g. when
verifying contracts on etherscan.
|
Python
|
mit
|
tomashaber/raiden,hackaugusto/raiden,tomashaber/raiden,tomashaber/raiden,hackaugusto/raiden,tomashaber/raiden,tomashaber/raiden
|
Add tool to create combined smart contract files
Useful for various cases where a single source file is needed e.g. when
verifying contracts on etherscan.
|
import os
import click
import re
from click.types import File
IMPORT_RE = re.compile(r'^import +["\'](?P<contract>[^"\']+.sol)["\'];$')
"""
Utility to join solidity contracts into a single output file by recursively
resolving imports.
example usage:
$ cd raiden/smart_contracts
$ python ../../tools/join-contracts.py SomeContractWithImports.sol joined.sol
"""
class ContractJoiner(object):
def __init__(self):
self.have_pragma = False
self.seen = set()
def join(self, contract_file):
out = []
if contract_file.name in self.seen:
print('Skipping duplicate {}'.format(contract_file.name))
return []
self.seen.add(contract_file.name)
print('Reading {}'.format(contract_file.name))
for line in contract_file:
line = line.strip('\r\n')
stripped_line = line.strip()
if stripped_line.startswith('pragma'):
if not self.have_pragma:
self.have_pragma = True
out.append(line)
elif stripped_line.startswith('import'):
match = IMPORT_RE.match(stripped_line)
if match:
next_file = match.groupdict().get('contract')
if next_file and os.path.exists(next_file):
with open(next_file) as next_contract:
out.extend(self.join(next_contract))
else:
out.append(line)
return out
@click.command()
@click.argument('contract', type=File())
@click.argument('output', type=File('w'))
def main(contract, output):
output.write("\n".join(ContractJoiner().join(contract)))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add tool to create combined smart contract files
Useful for various cases where a single source file is needed e.g. when
verifying contracts on etherscan.<commit_after>
|
import os
import click
import re
from click.types import File
IMPORT_RE = re.compile(r'^import +["\'](?P<contract>[^"\']+.sol)["\'];$')
"""
Utility to join solidity contracts into a single output file by recursively
resolving imports.
example usage:
$ cd raiden/smart_contracts
$ python ../../tools/join-contracts.py SomeContractWithImports.sol joined.sol
"""
class ContractJoiner(object):
def __init__(self):
self.have_pragma = False
self.seen = set()
def join(self, contract_file):
out = []
if contract_file.name in self.seen:
print('Skipping duplicate {}'.format(contract_file.name))
return []
self.seen.add(contract_file.name)
print('Reading {}'.format(contract_file.name))
for line in contract_file:
line = line.strip('\r\n')
stripped_line = line.strip()
if stripped_line.startswith('pragma'):
if not self.have_pragma:
self.have_pragma = True
out.append(line)
elif stripped_line.startswith('import'):
match = IMPORT_RE.match(stripped_line)
if match:
next_file = match.groupdict().get('contract')
if next_file and os.path.exists(next_file):
with open(next_file) as next_contract:
out.extend(self.join(next_contract))
else:
out.append(line)
return out
@click.command()
@click.argument('contract', type=File())
@click.argument('output', type=File('w'))
def main(contract, output):
output.write("\n".join(ContractJoiner().join(contract)))
if __name__ == '__main__':
main()
|
Add tool to create combined smart contract files
Useful for various cases where a single source file is needed e.g. when
verifying contracts on etherscan.import os
import click
import re
from click.types import File
IMPORT_RE = re.compile(r'^import +["\'](?P<contract>[^"\']+.sol)["\'];$')
"""
Utility to join solidity contracts into a single output file by recursively
resolving imports.
example usage:
$ cd raiden/smart_contracts
$ python ../../tools/join-contracts.py SomeContractWithImports.sol joined.sol
"""
class ContractJoiner(object):
def __init__(self):
self.have_pragma = False
self.seen = set()
def join(self, contract_file):
out = []
if contract_file.name in self.seen:
print('Skipping duplicate {}'.format(contract_file.name))
return []
self.seen.add(contract_file.name)
print('Reading {}'.format(contract_file.name))
for line in contract_file:
line = line.strip('\r\n')
stripped_line = line.strip()
if stripped_line.startswith('pragma'):
if not self.have_pragma:
self.have_pragma = True
out.append(line)
elif stripped_line.startswith('import'):
match = IMPORT_RE.match(stripped_line)
if match:
next_file = match.groupdict().get('contract')
if next_file and os.path.exists(next_file):
with open(next_file) as next_contract:
out.extend(self.join(next_contract))
else:
out.append(line)
return out
@click.command()
@click.argument('contract', type=File())
@click.argument('output', type=File('w'))
def main(contract, output):
output.write("\n".join(ContractJoiner().join(contract)))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add tool to create combined smart contract files
Useful for various cases where a single source file is needed e.g. when
verifying contracts on etherscan.<commit_after>import os
import click
import re
from click.types import File
IMPORT_RE = re.compile(r'^import +["\'](?P<contract>[^"\']+.sol)["\'];$')
"""
Utility to join solidity contracts into a single output file by recursively
resolving imports.
example usage:
$ cd raiden/smart_contracts
$ python ../../tools/join-contracts.py SomeContractWithImports.sol joined.sol
"""
class ContractJoiner(object):
def __init__(self):
self.have_pragma = False
self.seen = set()
def join(self, contract_file):
out = []
if contract_file.name in self.seen:
print('Skipping duplicate {}'.format(contract_file.name))
return []
self.seen.add(contract_file.name)
print('Reading {}'.format(contract_file.name))
for line in contract_file:
line = line.strip('\r\n')
stripped_line = line.strip()
if stripped_line.startswith('pragma'):
if not self.have_pragma:
self.have_pragma = True
out.append(line)
elif stripped_line.startswith('import'):
match = IMPORT_RE.match(stripped_line)
if match:
next_file = match.groupdict().get('contract')
if next_file and os.path.exists(next_file):
with open(next_file) as next_contract:
out.extend(self.join(next_contract))
else:
out.append(line)
return out
@click.command()
@click.argument('contract', type=File())
@click.argument('output', type=File('w'))
def main(contract, output):
output.write("\n".join(ContractJoiner().join(contract)))
if __name__ == '__main__':
main()
|
|
e06416a61826229ebd0cccdc519b6dc39d8a0fd9
|
server/migrations/0088_auto_20190304_1313.py
|
server/migrations/0088_auto_20190304_1313.py
|
# Generated by Django 2.1.4 on 2019-03-04 18:13
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('server', '0087_auto_20190301_1424'),
]
operations = [
migrations.AlterUniqueTogether(
name='installedupdate',
unique_together=set(),
),
migrations.RemoveField(
model_name='installedupdate',
name='machine',
),
migrations.RemoveField(
model_name='pendingappleupdate',
name='machine',
),
migrations.AlterUniqueTogether(
name='updatehistory',
unique_together=set(),
),
migrations.RemoveField(
model_name='updatehistory',
name='machine',
),
migrations.AlterUniqueTogether(
name='updatehistoryitem',
unique_together=set(),
),
migrations.RemoveField(
model_name='updatehistoryitem',
name='update_history',
),
migrations.DeleteModel(
name='InstalledUpdate',
),
migrations.DeleteModel(
name='PendingAppleUpdate',
),
migrations.DeleteModel(
name='UpdateHistory',
),
migrations.DeleteModel(
name='UpdateHistoryItem',
),
]
|
Add migration to remove models.
|
Add migration to remove models.
|
Python
|
apache-2.0
|
sheagcraig/sal,sheagcraig/sal,sheagcraig/sal,salopensource/sal,salopensource/sal,salopensource/sal,salopensource/sal,sheagcraig/sal
|
Add migration to remove models.
|
# Generated by Django 2.1.4 on 2019-03-04 18:13
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('server', '0087_auto_20190301_1424'),
]
operations = [
migrations.AlterUniqueTogether(
name='installedupdate',
unique_together=set(),
),
migrations.RemoveField(
model_name='installedupdate',
name='machine',
),
migrations.RemoveField(
model_name='pendingappleupdate',
name='machine',
),
migrations.AlterUniqueTogether(
name='updatehistory',
unique_together=set(),
),
migrations.RemoveField(
model_name='updatehistory',
name='machine',
),
migrations.AlterUniqueTogether(
name='updatehistoryitem',
unique_together=set(),
),
migrations.RemoveField(
model_name='updatehistoryitem',
name='update_history',
),
migrations.DeleteModel(
name='InstalledUpdate',
),
migrations.DeleteModel(
name='PendingAppleUpdate',
),
migrations.DeleteModel(
name='UpdateHistory',
),
migrations.DeleteModel(
name='UpdateHistoryItem',
),
]
|
<commit_before><commit_msg>Add migration to remove models.<commit_after>
|
# Generated by Django 2.1.4 on 2019-03-04 18:13
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('server', '0087_auto_20190301_1424'),
]
operations = [
migrations.AlterUniqueTogether(
name='installedupdate',
unique_together=set(),
),
migrations.RemoveField(
model_name='installedupdate',
name='machine',
),
migrations.RemoveField(
model_name='pendingappleupdate',
name='machine',
),
migrations.AlterUniqueTogether(
name='updatehistory',
unique_together=set(),
),
migrations.RemoveField(
model_name='updatehistory',
name='machine',
),
migrations.AlterUniqueTogether(
name='updatehistoryitem',
unique_together=set(),
),
migrations.RemoveField(
model_name='updatehistoryitem',
name='update_history',
),
migrations.DeleteModel(
name='InstalledUpdate',
),
migrations.DeleteModel(
name='PendingAppleUpdate',
),
migrations.DeleteModel(
name='UpdateHistory',
),
migrations.DeleteModel(
name='UpdateHistoryItem',
),
]
|
Add migration to remove models.# Generated by Django 2.1.4 on 2019-03-04 18:13
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('server', '0087_auto_20190301_1424'),
]
operations = [
migrations.AlterUniqueTogether(
name='installedupdate',
unique_together=set(),
),
migrations.RemoveField(
model_name='installedupdate',
name='machine',
),
migrations.RemoveField(
model_name='pendingappleupdate',
name='machine',
),
migrations.AlterUniqueTogether(
name='updatehistory',
unique_together=set(),
),
migrations.RemoveField(
model_name='updatehistory',
name='machine',
),
migrations.AlterUniqueTogether(
name='updatehistoryitem',
unique_together=set(),
),
migrations.RemoveField(
model_name='updatehistoryitem',
name='update_history',
),
migrations.DeleteModel(
name='InstalledUpdate',
),
migrations.DeleteModel(
name='PendingAppleUpdate',
),
migrations.DeleteModel(
name='UpdateHistory',
),
migrations.DeleteModel(
name='UpdateHistoryItem',
),
]
|
<commit_before><commit_msg>Add migration to remove models.<commit_after># Generated by Django 2.1.4 on 2019-03-04 18:13
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('server', '0087_auto_20190301_1424'),
]
operations = [
migrations.AlterUniqueTogether(
name='installedupdate',
unique_together=set(),
),
migrations.RemoveField(
model_name='installedupdate',
name='machine',
),
migrations.RemoveField(
model_name='pendingappleupdate',
name='machine',
),
migrations.AlterUniqueTogether(
name='updatehistory',
unique_together=set(),
),
migrations.RemoveField(
model_name='updatehistory',
name='machine',
),
migrations.AlterUniqueTogether(
name='updatehistoryitem',
unique_together=set(),
),
migrations.RemoveField(
model_name='updatehistoryitem',
name='update_history',
),
migrations.DeleteModel(
name='InstalledUpdate',
),
migrations.DeleteModel(
name='PendingAppleUpdate',
),
migrations.DeleteModel(
name='UpdateHistory',
),
migrations.DeleteModel(
name='UpdateHistoryItem',
),
]
|
|
66137a8710bf3b778c860af8d6278ee0c97bbab4
|
scripts/delete-unused-users.py
|
scripts/delete-unused-users.py
|
#!/usr/bin/env python3
"""
Delete unused users from a JupyterHub.
JupyterHub performance sometimes scales with *total* number
of users, rather than running number of users. While that should
be fixed, we can work around it by deleting unused users once in
a while. This script will delete anyone who hasn't registered
any activity in a given period of time, double checking to
make sure they aren't active right now. This will require users to
log in again the next time they use the hub, but that's probably
ok.
"""
import argparse
from jhub_client.api import JupyterHubAPI
from dateutil.parser import parse
import asyncio
from datetime import timedelta, datetime
async def main():
argparser = argparse.ArgumentParser()
argparser.add_argument(
'hub_url',
help='Fully qualified URL to the JupyterHub'
)
args = argparser.parse_args()
to_delete = []
async with JupyterHubAPI(hub_url=args.hub_url) as hub:
users = await hub.list_users()
for user in users:
last_activity_str = user.get('last_activity', False)
if last_activity_str:
try:
last_activity = parse(user['last_activity'])
except:
print(user['last_activity'])
raise
if last_activity and datetime.now().astimezone() - last_activity < timedelta(hours=24) and user['server'] is not None:
print(f"Not deleting {user['name']}")
else:
to_delete.append(user['name'])
print(f"Deleting {user['name']}")
for i, username in enumerate(to_delete):
print(f'{i+1} of {len(to_delete)}: deleting {username}')
await hub.delete_user(username)
if __name__ == '__main__':
asyncio.run(main())
|
Add script to delete unused users on JupyterHub
|
Add script to delete unused users on JupyterHub
Note that this doesn't actually delete their home directories
or any data - just the entry in the JupyterHub DB. As soon as
they log in again, a new entry is created.
This is really just a performance optimization.
|
Python
|
bsd-3-clause
|
ryanlovett/datahub,berkeley-dsep-infra/datahub,ryanlovett/datahub,berkeley-dsep-infra/datahub,berkeley-dsep-infra/datahub,ryanlovett/datahub
|
Add script to delete unused users on JupyterHub
Note that this doesn't actually delete their home directories
or any data - just the entry in the JupyterHub DB. As soon as
they log in again, a new entry is created.
This is really just a performance optimization.
|
#!/usr/bin/env python3
"""
Delete unused users from a JupyterHub.
JupyterHub performance sometimes scales with *total* number
of users, rather than running number of users. While that should
be fixed, we can work around it by deleting unused users once in
a while. This script will delete anyone who hasn't registered
any activity in a given period of time, double checking to
make sure they aren't active right now. This will require users to
log in again the next time they use the hub, but that's probably
ok.
"""
import argparse
from jhub_client.api import JupyterHubAPI
from dateutil.parser import parse
import asyncio
from datetime import timedelta, datetime
async def main():
argparser = argparse.ArgumentParser()
argparser.add_argument(
'hub_url',
help='Fully qualified URL to the JupyterHub'
)
args = argparser.parse_args()
to_delete = []
async with JupyterHubAPI(hub_url=args.hub_url) as hub:
users = await hub.list_users()
for user in users:
last_activity_str = user.get('last_activity', False)
if last_activity_str:
try:
last_activity = parse(user['last_activity'])
except:
print(user['last_activity'])
raise
if last_activity and datetime.now().astimezone() - last_activity < timedelta(hours=24) and user['server'] is not None:
print(f"Not deleting {user['name']}")
else:
to_delete.append(user['name'])
print(f"Deleting {user['name']}")
for i, username in enumerate(to_delete):
print(f'{i+1} of {len(to_delete)}: deleting {username}')
await hub.delete_user(username)
if __name__ == '__main__':
asyncio.run(main())
|
<commit_before><commit_msg>Add script to delete unused users on JupyterHub
Note that this doesn't actually delete their home directories
or any data - just the entry in the JupyterHub DB. As soon as
they log in again, a new entry is created.
This is really just a performance optimization.<commit_after>
|
#!/usr/bin/env python3
"""
Delete unused users from a JupyterHub.
JupyterHub performance sometimes scales with *total* number
of users, rather than running number of users. While that should
be fixed, we can work around it by deleting unused users once in
a while. This script will delete anyone who hasn't registered
any activity in a given period of time, double checking to
make sure they aren't active right now. This will require users to
log in again the next time they use the hub, but that's probably
ok.
"""
import argparse
from jhub_client.api import JupyterHubAPI
from dateutil.parser import parse
import asyncio
from datetime import timedelta, datetime
async def main():
argparser = argparse.ArgumentParser()
argparser.add_argument(
'hub_url',
help='Fully qualified URL to the JupyterHub'
)
args = argparser.parse_args()
to_delete = []
async with JupyterHubAPI(hub_url=args.hub_url) as hub:
users = await hub.list_users()
for user in users:
last_activity_str = user.get('last_activity', False)
if last_activity_str:
try:
last_activity = parse(user['last_activity'])
except:
print(user['last_activity'])
raise
if last_activity and datetime.now().astimezone() - last_activity < timedelta(hours=24) and user['server'] is not None:
print(f"Not deleting {user['name']}")
else:
to_delete.append(user['name'])
print(f"Deleting {user['name']}")
for i, username in enumerate(to_delete):
print(f'{i+1} of {len(to_delete)}: deleting {username}')
await hub.delete_user(username)
if __name__ == '__main__':
asyncio.run(main())
|
Add script to delete unused users on JupyterHub
Note that this doesn't actually delete their home directories
or any data - just the entry in the JupyterHub DB. As soon as
they log in again, a new entry is created.
This is really just a performance optimization.#!/usr/bin/env python3
"""
Delete unused users from a JupyterHub.
JupyterHub performance sometimes scales with *total* number
of users, rather than running number of users. While that should
be fixed, we can work around it by deleting unused users once in
a while. This script will delete anyone who hasn't registered
any activity in a given period of time, double checking to
make sure they aren't active right now. This will require users to
log in again the next time they use the hub, but that's probably
ok.
"""
import argparse
from jhub_client.api import JupyterHubAPI
from dateutil.parser import parse
import asyncio
from datetime import timedelta, datetime
async def main():
argparser = argparse.ArgumentParser()
argparser.add_argument(
'hub_url',
help='Fully qualified URL to the JupyterHub'
)
args = argparser.parse_args()
to_delete = []
async with JupyterHubAPI(hub_url=args.hub_url) as hub:
users = await hub.list_users()
for user in users:
last_activity_str = user.get('last_activity', False)
if last_activity_str:
try:
last_activity = parse(user['last_activity'])
except:
print(user['last_activity'])
raise
if last_activity and datetime.now().astimezone() - last_activity < timedelta(hours=24) and user['server'] is not None:
print(f"Not deleting {user['name']}")
else:
to_delete.append(user['name'])
print(f"Deleting {user['name']}")
for i, username in enumerate(to_delete):
print(f'{i+1} of {len(to_delete)}: deleting {username}')
await hub.delete_user(username)
if __name__ == '__main__':
asyncio.run(main())
|
<commit_before><commit_msg>Add script to delete unused users on JupyterHub
Note that this doesn't actually delete their home directories
or any data - just the entry in the JupyterHub DB. As soon as
they log in again, a new entry is created.
This is really just a performance optimization.<commit_after>#!/usr/bin/env python3
"""
Delete unused users from a JupyterHub.
JupyterHub performance sometimes scales with *total* number
of users, rather than running number of users. While that should
be fixed, we can work around it by deleting unused users once in
a while. This script will delete anyone who hasn't registered
any activity in a given period of time, double checking to
make sure they aren't active right now. This will require users to
log in again the next time they use the hub, but that's probably
ok.
"""
import argparse
from jhub_client.api import JupyterHubAPI
from dateutil.parser import parse
import asyncio
from datetime import timedelta, datetime
async def main():
argparser = argparse.ArgumentParser()
argparser.add_argument(
'hub_url',
help='Fully qualified URL to the JupyterHub'
)
args = argparser.parse_args()
to_delete = []
async with JupyterHubAPI(hub_url=args.hub_url) as hub:
users = await hub.list_users()
for user in users:
last_activity_str = user.get('last_activity', False)
if last_activity_str:
try:
last_activity = parse(user['last_activity'])
except:
print(user['last_activity'])
raise
if last_activity and datetime.now().astimezone() - last_activity < timedelta(hours=24) and user['server'] is not None:
print(f"Not deleting {user['name']}")
else:
to_delete.append(user['name'])
print(f"Deleting {user['name']}")
for i, username in enumerate(to_delete):
print(f'{i+1} of {len(to_delete)}: deleting {username}')
await hub.delete_user(username)
if __name__ == '__main__':
asyncio.run(main())
|
|
5a77678a44ec9838e943b514a586dbd96b8bdfdc
|
modelview/migrations/0042_auto_20171215_0953.py
|
modelview/migrations/0042_auto_20171215_0953.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-12-15 08:53
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('modelview', '0041_merge_20171211_1420'),
]
operations = [
migrations.AlterField(
model_name='basicfactsheet',
name='license',
field=models.CharField(choices=[('MIT Licence', 'MIT Licence'), ('Apache Licence', 'Apache Licence'), ('GNU GPL Licence', 'GNU GPL Licence'), ('Other', 'Other'), ('Unknown', 'Unknown')], default='Unknown', max_length=20, verbose_name='License'))
]
|
Add migration for license change
|
Add migration for license change
|
Python
|
agpl-3.0
|
openego/oeplatform,tom-heimbrodt/oeplatform,openego/oeplatform,openego/oeplatform,tom-heimbrodt/oeplatform,tom-heimbrodt/oeplatform,openego/oeplatform
|
Add migration for license change
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-12-15 08:53
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('modelview', '0041_merge_20171211_1420'),
]
operations = [
migrations.AlterField(
model_name='basicfactsheet',
name='license',
field=models.CharField(choices=[('MIT Licence', 'MIT Licence'), ('Apache Licence', 'Apache Licence'), ('GNU GPL Licence', 'GNU GPL Licence'), ('Other', 'Other'), ('Unknown', 'Unknown')], default='Unknown', max_length=20, verbose_name='License'))
]
|
<commit_before><commit_msg>Add migration for license change<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-12-15 08:53
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('modelview', '0041_merge_20171211_1420'),
]
operations = [
migrations.AlterField(
model_name='basicfactsheet',
name='license',
field=models.CharField(choices=[('MIT Licence', 'MIT Licence'), ('Apache Licence', 'Apache Licence'), ('GNU GPL Licence', 'GNU GPL Licence'), ('Other', 'Other'), ('Unknown', 'Unknown')], default='Unknown', max_length=20, verbose_name='License'))
]
|
Add migration for license change# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-12-15 08:53
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('modelview', '0041_merge_20171211_1420'),
]
operations = [
migrations.AlterField(
model_name='basicfactsheet',
name='license',
field=models.CharField(choices=[('MIT Licence', 'MIT Licence'), ('Apache Licence', 'Apache Licence'), ('GNU GPL Licence', 'GNU GPL Licence'), ('Other', 'Other'), ('Unknown', 'Unknown')], default='Unknown', max_length=20, verbose_name='License'))
]
|
<commit_before><commit_msg>Add migration for license change<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-12-15 08:53
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('modelview', '0041_merge_20171211_1420'),
]
operations = [
migrations.AlterField(
model_name='basicfactsheet',
name='license',
field=models.CharField(choices=[('MIT Licence', 'MIT Licence'), ('Apache Licence', 'Apache Licence'), ('GNU GPL Licence', 'GNU GPL Licence'), ('Other', 'Other'), ('Unknown', 'Unknown')], default='Unknown', max_length=20, verbose_name='License'))
]
|
|
f5970d1488d28f27c5f20dd11619187d0c13c960
|
os/win_registry.py
|
os/win_registry.py
|
import _winreg
keyName = "myKey"
def write_to_registry():
try:
key = _winreg.CreateKey(_winreg.HKEY_CURRENT_USER, "Software\\" + keyName)
_winreg.SetValueEx(key, "myVal", 0, _winreg.REG_SZ, "This is a value.")
print("value created")
except Exception as e:
print(e)
def read_from_registry():
try:
with _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, "Software\\" + keyName, 0, _winreg.KEY_READ) as key:
if key:
data = _winreg.QueryValueEx(key, "myVal")
print("Read from registry: ", data)
except Exception as e:
print(e)
if __name__ == '__main__':
write_to_registry()
read_from_registry()
|
Add simple windows registry read/write functions
|
Add simple windows registry read/write functions
|
Python
|
mit
|
ddubson/code-dojo-py
|
Add simple windows registry read/write functions
|
import _winreg
keyName = "myKey"
def write_to_registry():
try:
key = _winreg.CreateKey(_winreg.HKEY_CURRENT_USER, "Software\\" + keyName)
_winreg.SetValueEx(key, "myVal", 0, _winreg.REG_SZ, "This is a value.")
print("value created")
except Exception as e:
print(e)
def read_from_registry():
try:
with _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, "Software\\" + keyName, 0, _winreg.KEY_READ) as key:
if key:
data = _winreg.QueryValueEx(key, "myVal")
print("Read from registry: ", data)
except Exception as e:
print(e)
if __name__ == '__main__':
write_to_registry()
read_from_registry()
|
<commit_before><commit_msg>Add simple windows registry read/write functions<commit_after>
|
import _winreg
keyName = "myKey"
def write_to_registry():
try:
key = _winreg.CreateKey(_winreg.HKEY_CURRENT_USER, "Software\\" + keyName)
_winreg.SetValueEx(key, "myVal", 0, _winreg.REG_SZ, "This is a value.")
print("value created")
except Exception as e:
print(e)
def read_from_registry():
try:
with _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, "Software\\" + keyName, 0, _winreg.KEY_READ) as key:
if key:
data = _winreg.QueryValueEx(key, "myVal")
print("Read from registry: ", data)
except Exception as e:
print(e)
if __name__ == '__main__':
write_to_registry()
read_from_registry()
|
Add simple windows registry read/write functionsimport _winreg
keyName = "myKey"
def write_to_registry():
try:
key = _winreg.CreateKey(_winreg.HKEY_CURRENT_USER, "Software\\" + keyName)
_winreg.SetValueEx(key, "myVal", 0, _winreg.REG_SZ, "This is a value.")
print("value created")
except Exception as e:
print(e)
def read_from_registry():
try:
with _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, "Software\\" + keyName, 0, _winreg.KEY_READ) as key:
if key:
data = _winreg.QueryValueEx(key, "myVal")
print("Read from registry: ", data)
except Exception as e:
print(e)
if __name__ == '__main__':
write_to_registry()
read_from_registry()
|
<commit_before><commit_msg>Add simple windows registry read/write functions<commit_after>import _winreg
keyName = "myKey"
def write_to_registry():
try:
key = _winreg.CreateKey(_winreg.HKEY_CURRENT_USER, "Software\\" + keyName)
_winreg.SetValueEx(key, "myVal", 0, _winreg.REG_SZ, "This is a value.")
print("value created")
except Exception as e:
print(e)
def read_from_registry():
try:
with _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, "Software\\" + keyName, 0, _winreg.KEY_READ) as key:
if key:
data = _winreg.QueryValueEx(key, "myVal")
print("Read from registry: ", data)
except Exception as e:
print(e)
if __name__ == '__main__':
write_to_registry()
read_from_registry()
|
|
1de668219f618a0632fac80fd892a0a229b8fa05
|
CodeFights/additionWithoutCarrying.py
|
CodeFights/additionWithoutCarrying.py
|
#!/usr/local/bin/python
# Code Fights Addition Without Carrying Problem
def additionWithoutCarrying(param1, param2):
s1, s2 = str(param1), str(param2)
shorter = s1 if len(s1) < len(s2) else s2
longer = s2 if shorter == s1 else s1
if len(shorter) < len(longer):
shorter = shorter.zfill(len(longer))
return int(''.join([str(int(a) + int(b))[-1] for (a, b) in
zip(shorter, longer)]))
def main():
tests = [
[456, 1734, 1180],
[99999, 0, 99999],
[999, 999, 888],
[0, 0, 0],
[54321, 54321, 8642]
]
for t in tests:
res = additionWithoutCarrying(t[0], t[1])
ans = t[2]
if ans == res:
print("PASSED: additionWithoutCarrying({}, {}) returned {}"
.format(t[0], t[1], res))
else:
print("FAILED: additionWithoutCarrying({}, {}) returned {},"
"answer: {}".format(t[0], t[1], res, ans))
if __name__ == '__main__':
main()
|
Solve Code Fights addition without carrying problem
|
Solve Code Fights addition without carrying problem
|
Python
|
mit
|
HKuz/Test_Code
|
Solve Code Fights addition without carrying problem
|
#!/usr/local/bin/python
# Code Fights Addition Without Carrying Problem
def additionWithoutCarrying(param1, param2):
s1, s2 = str(param1), str(param2)
shorter = s1 if len(s1) < len(s2) else s2
longer = s2 if shorter == s1 else s1
if len(shorter) < len(longer):
shorter = shorter.zfill(len(longer))
return int(''.join([str(int(a) + int(b))[-1] for (a, b) in
zip(shorter, longer)]))
def main():
tests = [
[456, 1734, 1180],
[99999, 0, 99999],
[999, 999, 888],
[0, 0, 0],
[54321, 54321, 8642]
]
for t in tests:
res = additionWithoutCarrying(t[0], t[1])
ans = t[2]
if ans == res:
print("PASSED: additionWithoutCarrying({}, {}) returned {}"
.format(t[0], t[1], res))
else:
print("FAILED: additionWithoutCarrying({}, {}) returned {},"
"answer: {}".format(t[0], t[1], res, ans))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Solve Code Fights addition without carrying problem<commit_after>
|
#!/usr/local/bin/python
# Code Fights Addition Without Carrying Problem
def additionWithoutCarrying(param1, param2):
s1, s2 = str(param1), str(param2)
shorter = s1 if len(s1) < len(s2) else s2
longer = s2 if shorter == s1 else s1
if len(shorter) < len(longer):
shorter = shorter.zfill(len(longer))
return int(''.join([str(int(a) + int(b))[-1] for (a, b) in
zip(shorter, longer)]))
def main():
tests = [
[456, 1734, 1180],
[99999, 0, 99999],
[999, 999, 888],
[0, 0, 0],
[54321, 54321, 8642]
]
for t in tests:
res = additionWithoutCarrying(t[0], t[1])
ans = t[2]
if ans == res:
print("PASSED: additionWithoutCarrying({}, {}) returned {}"
.format(t[0], t[1], res))
else:
print("FAILED: additionWithoutCarrying({}, {}) returned {},"
"answer: {}".format(t[0], t[1], res, ans))
if __name__ == '__main__':
main()
|
Solve Code Fights addition without carrying problem#!/usr/local/bin/python
# Code Fights Addition Without Carrying Problem
def additionWithoutCarrying(param1, param2):
s1, s2 = str(param1), str(param2)
shorter = s1 if len(s1) < len(s2) else s2
longer = s2 if shorter == s1 else s1
if len(shorter) < len(longer):
shorter = shorter.zfill(len(longer))
return int(''.join([str(int(a) + int(b))[-1] for (a, b) in
zip(shorter, longer)]))
def main():
tests = [
[456, 1734, 1180],
[99999, 0, 99999],
[999, 999, 888],
[0, 0, 0],
[54321, 54321, 8642]
]
for t in tests:
res = additionWithoutCarrying(t[0], t[1])
ans = t[2]
if ans == res:
print("PASSED: additionWithoutCarrying({}, {}) returned {}"
.format(t[0], t[1], res))
else:
print("FAILED: additionWithoutCarrying({}, {}) returned {},"
"answer: {}".format(t[0], t[1], res, ans))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Solve Code Fights addition without carrying problem<commit_after>#!/usr/local/bin/python
# Code Fights Addition Without Carrying Problem
def additionWithoutCarrying(param1, param2):
s1, s2 = str(param1), str(param2)
shorter = s1 if len(s1) < len(s2) else s2
longer = s2 if shorter == s1 else s1
if len(shorter) < len(longer):
shorter = shorter.zfill(len(longer))
return int(''.join([str(int(a) + int(b))[-1] for (a, b) in
zip(shorter, longer)]))
def main():
tests = [
[456, 1734, 1180],
[99999, 0, 99999],
[999, 999, 888],
[0, 0, 0],
[54321, 54321, 8642]
]
for t in tests:
res = additionWithoutCarrying(t[0], t[1])
ans = t[2]
if ans == res:
print("PASSED: additionWithoutCarrying({}, {}) returned {}"
.format(t[0], t[1], res))
else:
print("FAILED: additionWithoutCarrying({}, {}) returned {},"
"answer: {}".format(t[0], t[1], res, ans))
if __name__ == '__main__':
main()
|
|
7f4bd900d1e647fe017ce4c01e279dd41a71a349
|
lms/djangoapps/verify_student/management/commands/set_software_secure_status.py
|
lms/djangoapps/verify_student/management/commands/set_software_secure_status.py
|
"""
Manually set Software Secure verification status.
"""
import sys
from django.core.management.base import BaseCommand
from verify_student.models import (
SoftwareSecurePhotoVerification, VerificationCheckpoint, VerificationStatus
)
class Command(BaseCommand):
"""
Command to trigger the actions that would normally follow Software Secure
returning with the results of a photo verification.
"""
args = "<{approved, denied}, SoftwareSecurePhotoVerification id, [reason_for_denial]>"
def handle(self, *args, **kwargs): # pylint: disable=unused-argument
from verify_student.views import _set_user_requirement_status
status_to_set = args[0]
receipt_id = args[1]
try:
attempt = SoftwareSecurePhotoVerification.objects.get(receipt_id=receipt_id)
except SoftwareSecurePhotoVerification.DoesNotExist:
self.stderr.write(
'SoftwareSecurePhotoVerification with id {id} could not be found.\n'.format(id=receipt_id)
)
sys.exit(1)
if status_to_set == 'approved':
self.stdout.write('Approving verification for {id}.\n'.format(id=receipt_id))
attempt.approve()
_set_user_requirement_status(attempt, 'reverification', 'satisfied')
elif status_to_set == 'denied':
self.stdout.write('Denying verification for {id}.\n'.format(id=receipt_id))
if len(args) >= 3:
reason_for_denial = args[2]
else:
reason_for_denial = 'Denied via management command.'
attempt.deny(reason_for_denial)
_set_user_requirement_status(attempt, 'reverification', 'failed', reason_for_denial)
else:
self.stdout.write('Cannot set id {id} to unrecognized status {status}'.format(
id=receipt_id, status=status_to_set
))
sys.exit(1)
checkpoints = VerificationCheckpoint.objects.filter(photo_verification=attempt).all()
VerificationStatus.add_status_from_checkpoints(
checkpoints=checkpoints,
user=attempt.user,
status=status_to_set
)
|
Add management command to set SoftwareSecure verification status.
|
Add management command to set SoftwareSecure verification status.
|
Python
|
agpl-3.0
|
procangroup/edx-platform,fintech-circle/edx-platform,devs1991/test_edx_docmode,a-parhom/edx-platform,pomegranited/edx-platform,zubair-arbi/edx-platform,JCBarahona/edX,naresh21/synergetics-edx-platform,jjmiranda/edx-platform,a-parhom/edx-platform,deepsrijit1105/edx-platform,IONISx/edx-platform,mitocw/edx-platform,zubair-arbi/edx-platform,ovnicraft/edx-platform,TeachAtTUM/edx-platform,edx-solutions/edx-platform,Livit/Livit.Learn.EdX,alu042/edx-platform,msegado/edx-platform,eduNEXT/edx-platform,philanthropy-u/edx-platform,waheedahmed/edx-platform,BehavioralInsightsTeam/edx-platform,cognitiveclass/edx-platform,Lektorium-LLC/edx-platform,philanthropy-u/edx-platform,lduarte1991/edx-platform,RPI-OPENEDX/edx-platform,jzoldak/edx-platform,halvertoluke/edx-platform,eduNEXT/edunext-platform,ampax/edx-platform,waheedahmed/edx-platform,devs1991/test_edx_docmode,halvertoluke/edx-platform,jbzdak/edx-platform,pomegranited/edx-platform,TeachAtTUM/edx-platform,procangroup/edx-platform,Endika/edx-platform,10clouds/edx-platform,ampax/edx-platform,ZLLab-Mooc/edx-platform,Endika/edx-platform,synergeticsedx/deployment-wipro,fintech-circle/edx-platform,edx-solutions/edx-platform,bigdatauniversity/edx-platform,msegado/edx-platform,IONISx/edx-platform,proversity-org/edx-platform,pomegranited/edx-platform,nttks/edx-platform,louyihua/edx-platform,CredoReference/edx-platform,Edraak/circleci-edx-platform,arbrandes/edx-platform,louyihua/edx-platform,adoosii/edx-platform,longmen21/edx-platform,defance/edx-platform,edx/edx-platform,bigdatauniversity/edx-platform,zubair-arbi/edx-platform,Edraak/edx-platform,vikas1885/test1,doganov/edx-platform,ZLLab-Mooc/edx-platform,JioEducation/edx-platform,shabab12/edx-platform,devs1991/test_edx_docmode,Stanford-Online/edx-platform,kmoocdev2/edx-platform,amir-qayyum-khan/edx-platform,ZLLab-Mooc/edx-platform,Stanford-Online/edx-platform,Edraak/edraak-platform,BehavioralInsightsTeam/edx-platform,romain-li/edx-platform,adoosii/edx-platform,mitocw/edx-platform,Edraak/edraak-platform,vikas1885/test1,vikas1885/test1,Edraak/edraak-platform,hastexo/edx-platform,gymnasium/edx-platform,iivic/BoiseStateX,romain-li/edx-platform,romain-li/edx-platform,philanthropy-u/edx-platform,Livit/Livit.Learn.EdX,jjmiranda/edx-platform,procangroup/edx-platform,Lektorium-LLC/edx-platform,angelapper/edx-platform,tanmaykm/edx-platform,IndonesiaX/edx-platform,solashirai/edx-platform,solashirai/edx-platform,devs1991/test_edx_docmode,ahmedaljazzar/edx-platform,itsjeyd/edx-platform,RPI-OPENEDX/edx-platform,hamzehd/edx-platform,teltek/edx-platform,defance/edx-platform,a-parhom/edx-platform,alexthered/kienhoc-platform,cecep-edu/edx-platform,edx/edx-platform,Ayub-Khan/edx-platform,eduNEXT/edx-platform,mitocw/edx-platform,deepsrijit1105/edx-platform,solashirai/edx-platform,miptliot/edx-platform,mcgachey/edx-platform,chrisndodge/edx-platform,franosincic/edx-platform,jjmiranda/edx-platform,synergeticsedx/deployment-wipro,hastexo/edx-platform,raccoongang/edx-platform,pepeportela/edx-platform,msegado/edx-platform,gymnasium/edx-platform,amir-qayyum-khan/edx-platform,Edraak/edx-platform,ampax/edx-platform,pomegranited/edx-platform,stvstnfrd/edx-platform,eduNEXT/edunext-platform,angelapper/edx-platform,shurihell/testasia,Ayub-Khan/edx-platform,10clouds/edx-platform,alexthered/kienhoc-platform,alu042/edx-platform,Lektorium-LLC/edx-platform,BehavioralInsightsTeam/edx-platform,lduarte1991/edx-platform,simbs/edx-platform,jbzdak/edx-platform,tanmaykm/edx-platform,proversity-org/edx-platform,antoviaque/edx-platform,waheedahmed/edx-platform,nttks/edx-platform,halvertoluke/edx-platform,caesar2164/edx-platform,eduNEXT/edunext-platform,waheedahmed/edx-platform,nttks/edx-platform,Edraak/edx-platform,MakeHer/edx-platform,shabab12/edx-platform,alu042/edx-platform,halvertoluke/edx-platform,Stanford-Online/edx-platform,cpennington/edx-platform,caesar2164/edx-platform,TeachAtTUM/edx-platform,gsehub/edx-platform,eduNEXT/edx-platform,Edraak/circleci-edx-platform,pepeportela/edx-platform,cognitiveclass/edx-platform,zhenzhai/edx-platform,Edraak/edx-platform,adoosii/edx-platform,itsjeyd/edx-platform,alexthered/kienhoc-platform,eduNEXT/edunext-platform,inares/edx-platform,pabloborrego93/edx-platform,zubair-arbi/edx-platform,Edraak/circleci-edx-platform,franosincic/edx-platform,kmoocdev2/edx-platform,doganov/edx-platform,ahmedaljazzar/edx-platform,doganov/edx-platform,Edraak/circleci-edx-platform,hamzehd/edx-platform,ESOedX/edx-platform,simbs/edx-platform,miptliot/edx-platform,Ayub-Khan/edx-platform,IONISx/edx-platform,analyseuc3m/ANALYSE-v1,appsembler/edx-platform,analyseuc3m/ANALYSE-v1,inares/edx-platform,ahmedaljazzar/edx-platform,EDUlib/edx-platform,mbareta/edx-platform-ft,analyseuc3m/ANALYSE-v1,simbs/edx-platform,CourseTalk/edx-platform,fintech-circle/edx-platform,pabloborrego93/edx-platform,gsehub/edx-platform,wwj718/edx-platform,adoosii/edx-platform,antoviaque/edx-platform,MakeHer/edx-platform,pabloborrego93/edx-platform,miptliot/edx-platform,edx/edx-platform,doganov/edx-platform,mbareta/edx-platform-ft,lduarte1991/edx-platform,fintech-circle/edx-platform,caesar2164/edx-platform,devs1991/test_edx_docmode,MakeHer/edx-platform,jbzdak/edx-platform,synergeticsedx/deployment-wipro,arbrandes/edx-platform,shabab12/edx-platform,xingyepei/edx-platform,adoosii/edx-platform,ovnicraft/edx-platform,inares/edx-platform,longmen21/edx-platform,cecep-edu/edx-platform,xingyepei/edx-platform,stvstnfrd/edx-platform,devs1991/test_edx_docmode,iivic/BoiseStateX,alexthered/kienhoc-platform,CourseTalk/edx-platform,devs1991/test_edx_docmode,ovnicraft/edx-platform,kmoocdev2/edx-platform,edx-solutions/edx-platform,Endika/edx-platform,IndonesiaX/edx-platform,IndonesiaX/edx-platform,louyihua/edx-platform,kmoocdev2/edx-platform,ZLLab-Mooc/edx-platform,arbrandes/edx-platform,chrisndodge/edx-platform,mcgachey/edx-platform,proversity-org/edx-platform,alu042/edx-platform,jolyonb/edx-platform,bigdatauniversity/edx-platform,pepeportela/edx-platform,Endika/edx-platform,cecep-edu/edx-platform,philanthropy-u/edx-platform,Ayub-Khan/edx-platform,longmen21/edx-platform,a-parhom/edx-platform,JCBarahona/edX,jzoldak/edx-platform,itsjeyd/edx-platform,mcgachey/edx-platform,simbs/edx-platform,IONISx/edx-platform,angelapper/edx-platform,TeachAtTUM/edx-platform,UOMx/edx-platform,louyihua/edx-platform,tanmaykm/edx-platform,edx/edx-platform,hastexo/edx-platform,EDUlib/edx-platform,wwj718/edx-platform,IndonesiaX/edx-platform,cpennington/edx-platform,gymnasium/edx-platform,shurihell/testasia,franosincic/edx-platform,eduNEXT/edx-platform,antoviaque/edx-platform,jolyonb/edx-platform,deepsrijit1105/edx-platform,IndonesiaX/edx-platform,CredoReference/edx-platform,ahmadiga/min_edx,nttks/edx-platform,CredoReference/edx-platform,cecep-edu/edx-platform,BehavioralInsightsTeam/edx-platform,ahmadiga/min_edx,10clouds/edx-platform,stvstnfrd/edx-platform,Edraak/edraak-platform,zhenzhai/edx-platform,raccoongang/edx-platform,arbrandes/edx-platform,deepsrijit1105/edx-platform,hamzehd/edx-platform,naresh21/synergetics-edx-platform,solashirai/edx-platform,RPI-OPENEDX/edx-platform,Lektorium-LLC/edx-platform,pepeportela/edx-platform,JCBarahona/edX,Edraak/circleci-edx-platform,prarthitm/edxplatform,iivic/BoiseStateX,franosincic/edx-platform,appsembler/edx-platform,waheedahmed/edx-platform,analyseuc3m/ANALYSE-v1,longmen21/edx-platform,bigdatauniversity/edx-platform,mcgachey/edx-platform,franosincic/edx-platform,pomegranited/edx-platform,shurihell/testasia,Livit/Livit.Learn.EdX,chrisndodge/edx-platform,Livit/Livit.Learn.EdX,ESOedX/edx-platform,UOMx/edx-platform,teltek/edx-platform,Edraak/edx-platform,gymnasium/edx-platform,xingyepei/edx-platform,shabab12/edx-platform,msegado/edx-platform,teltek/edx-platform,zubair-arbi/edx-platform,JCBarahona/edX,hamzehd/edx-platform,prarthitm/edxplatform,msegado/edx-platform,Ayub-Khan/edx-platform,10clouds/edx-platform,ahmadiga/min_edx,marcore/edx-platform,kursitet/edx-platform,ovnicraft/edx-platform,bigdatauniversity/edx-platform,naresh21/synergetics-edx-platform,wwj718/edx-platform,RPI-OPENEDX/edx-platform,appsembler/edx-platform,cognitiveclass/edx-platform,kursitet/edx-platform,edx-solutions/edx-platform,doganov/edx-platform,mcgachey/edx-platform,prarthitm/edxplatform,synergeticsedx/deployment-wipro,itsjeyd/edx-platform,EDUlib/edx-platform,shurihell/testasia,miptliot/edx-platform,alexthered/kienhoc-platform,raccoongang/edx-platform,jbzdak/edx-platform,romain-li/edx-platform,JioEducation/edx-platform,CourseTalk/edx-platform,pabloborrego93/edx-platform,defance/edx-platform,vikas1885/test1,marcore/edx-platform,jzoldak/edx-platform,ahmadiga/min_edx,nttks/edx-platform,JioEducation/edx-platform,UOMx/edx-platform,gsehub/edx-platform,jjmiranda/edx-platform,JioEducation/edx-platform,simbs/edx-platform,JCBarahona/edX,iivic/BoiseStateX,ESOedX/edx-platform,EDUlib/edx-platform,MakeHer/edx-platform,mitocw/edx-platform,xingyepei/edx-platform,caesar2164/edx-platform,kursitet/edx-platform,zhenzhai/edx-platform,kursitet/edx-platform,teltek/edx-platform,MakeHer/edx-platform,tanmaykm/edx-platform,chrisndodge/edx-platform,jbzdak/edx-platform,hamzehd/edx-platform,ampax/edx-platform,CourseTalk/edx-platform,UOMx/edx-platform,amir-qayyum-khan/edx-platform,Stanford-Online/edx-platform,ESOedX/edx-platform,hastexo/edx-platform,cognitiveclass/edx-platform,naresh21/synergetics-edx-platform,solashirai/edx-platform,cpennington/edx-platform,IONISx/edx-platform,cecep-edu/edx-platform,marcore/edx-platform,iivic/BoiseStateX,jolyonb/edx-platform,jzoldak/edx-platform,lduarte1991/edx-platform,longmen21/edx-platform,raccoongang/edx-platform,proversity-org/edx-platform,wwj718/edx-platform,inares/edx-platform,kmoocdev2/edx-platform,wwj718/edx-platform,mbareta/edx-platform-ft,halvertoluke/edx-platform,devs1991/test_edx_docmode,cpennington/edx-platform,shurihell/testasia,kursitet/edx-platform,ahmadiga/min_edx,cognitiveclass/edx-platform,jolyonb/edx-platform,inares/edx-platform,prarthitm/edxplatform,antoviaque/edx-platform,xingyepei/edx-platform,RPI-OPENEDX/edx-platform,defance/edx-platform,CredoReference/edx-platform,procangroup/edx-platform,zhenzhai/edx-platform,stvstnfrd/edx-platform,amir-qayyum-khan/edx-platform,vikas1885/test1,romain-li/edx-platform,marcore/edx-platform,angelapper/edx-platform,appsembler/edx-platform,ovnicraft/edx-platform,zhenzhai/edx-platform,ZLLab-Mooc/edx-platform,mbareta/edx-platform-ft,gsehub/edx-platform,ahmedaljazzar/edx-platform
|
Add management command to set SoftwareSecure verification status.
|
"""
Manually set Software Secure verification status.
"""
import sys
from django.core.management.base import BaseCommand
from verify_student.models import (
SoftwareSecurePhotoVerification, VerificationCheckpoint, VerificationStatus
)
class Command(BaseCommand):
"""
Command to trigger the actions that would normally follow Software Secure
returning with the results of a photo verification.
"""
args = "<{approved, denied}, SoftwareSecurePhotoVerification id, [reason_for_denial]>"
def handle(self, *args, **kwargs): # pylint: disable=unused-argument
from verify_student.views import _set_user_requirement_status
status_to_set = args[0]
receipt_id = args[1]
try:
attempt = SoftwareSecurePhotoVerification.objects.get(receipt_id=receipt_id)
except SoftwareSecurePhotoVerification.DoesNotExist:
self.stderr.write(
'SoftwareSecurePhotoVerification with id {id} could not be found.\n'.format(id=receipt_id)
)
sys.exit(1)
if status_to_set == 'approved':
self.stdout.write('Approving verification for {id}.\n'.format(id=receipt_id))
attempt.approve()
_set_user_requirement_status(attempt, 'reverification', 'satisfied')
elif status_to_set == 'denied':
self.stdout.write('Denying verification for {id}.\n'.format(id=receipt_id))
if len(args) >= 3:
reason_for_denial = args[2]
else:
reason_for_denial = 'Denied via management command.'
attempt.deny(reason_for_denial)
_set_user_requirement_status(attempt, 'reverification', 'failed', reason_for_denial)
else:
self.stdout.write('Cannot set id {id} to unrecognized status {status}'.format(
id=receipt_id, status=status_to_set
))
sys.exit(1)
checkpoints = VerificationCheckpoint.objects.filter(photo_verification=attempt).all()
VerificationStatus.add_status_from_checkpoints(
checkpoints=checkpoints,
user=attempt.user,
status=status_to_set
)
|
<commit_before><commit_msg>Add management command to set SoftwareSecure verification status.<commit_after>
|
"""
Manually set Software Secure verification status.
"""
import sys
from django.core.management.base import BaseCommand
from verify_student.models import (
SoftwareSecurePhotoVerification, VerificationCheckpoint, VerificationStatus
)
class Command(BaseCommand):
"""
Command to trigger the actions that would normally follow Software Secure
returning with the results of a photo verification.
"""
args = "<{approved, denied}, SoftwareSecurePhotoVerification id, [reason_for_denial]>"
def handle(self, *args, **kwargs): # pylint: disable=unused-argument
from verify_student.views import _set_user_requirement_status
status_to_set = args[0]
receipt_id = args[1]
try:
attempt = SoftwareSecurePhotoVerification.objects.get(receipt_id=receipt_id)
except SoftwareSecurePhotoVerification.DoesNotExist:
self.stderr.write(
'SoftwareSecurePhotoVerification with id {id} could not be found.\n'.format(id=receipt_id)
)
sys.exit(1)
if status_to_set == 'approved':
self.stdout.write('Approving verification for {id}.\n'.format(id=receipt_id))
attempt.approve()
_set_user_requirement_status(attempt, 'reverification', 'satisfied')
elif status_to_set == 'denied':
self.stdout.write('Denying verification for {id}.\n'.format(id=receipt_id))
if len(args) >= 3:
reason_for_denial = args[2]
else:
reason_for_denial = 'Denied via management command.'
attempt.deny(reason_for_denial)
_set_user_requirement_status(attempt, 'reverification', 'failed', reason_for_denial)
else:
self.stdout.write('Cannot set id {id} to unrecognized status {status}'.format(
id=receipt_id, status=status_to_set
))
sys.exit(1)
checkpoints = VerificationCheckpoint.objects.filter(photo_verification=attempt).all()
VerificationStatus.add_status_from_checkpoints(
checkpoints=checkpoints,
user=attempt.user,
status=status_to_set
)
|
Add management command to set SoftwareSecure verification status."""
Manually set Software Secure verification status.
"""
import sys
from django.core.management.base import BaseCommand
from verify_student.models import (
SoftwareSecurePhotoVerification, VerificationCheckpoint, VerificationStatus
)
class Command(BaseCommand):
"""
Command to trigger the actions that would normally follow Software Secure
returning with the results of a photo verification.
"""
args = "<{approved, denied}, SoftwareSecurePhotoVerification id, [reason_for_denial]>"
def handle(self, *args, **kwargs): # pylint: disable=unused-argument
from verify_student.views import _set_user_requirement_status
status_to_set = args[0]
receipt_id = args[1]
try:
attempt = SoftwareSecurePhotoVerification.objects.get(receipt_id=receipt_id)
except SoftwareSecurePhotoVerification.DoesNotExist:
self.stderr.write(
'SoftwareSecurePhotoVerification with id {id} could not be found.\n'.format(id=receipt_id)
)
sys.exit(1)
if status_to_set == 'approved':
self.stdout.write('Approving verification for {id}.\n'.format(id=receipt_id))
attempt.approve()
_set_user_requirement_status(attempt, 'reverification', 'satisfied')
elif status_to_set == 'denied':
self.stdout.write('Denying verification for {id}.\n'.format(id=receipt_id))
if len(args) >= 3:
reason_for_denial = args[2]
else:
reason_for_denial = 'Denied via management command.'
attempt.deny(reason_for_denial)
_set_user_requirement_status(attempt, 'reverification', 'failed', reason_for_denial)
else:
self.stdout.write('Cannot set id {id} to unrecognized status {status}'.format(
id=receipt_id, status=status_to_set
))
sys.exit(1)
checkpoints = VerificationCheckpoint.objects.filter(photo_verification=attempt).all()
VerificationStatus.add_status_from_checkpoints(
checkpoints=checkpoints,
user=attempt.user,
status=status_to_set
)
|
<commit_before><commit_msg>Add management command to set SoftwareSecure verification status.<commit_after>"""
Manually set Software Secure verification status.
"""
import sys
from django.core.management.base import BaseCommand
from verify_student.models import (
SoftwareSecurePhotoVerification, VerificationCheckpoint, VerificationStatus
)
class Command(BaseCommand):
"""
Command to trigger the actions that would normally follow Software Secure
returning with the results of a photo verification.
"""
args = "<{approved, denied}, SoftwareSecurePhotoVerification id, [reason_for_denial]>"
def handle(self, *args, **kwargs): # pylint: disable=unused-argument
from verify_student.views import _set_user_requirement_status
status_to_set = args[0]
receipt_id = args[1]
try:
attempt = SoftwareSecurePhotoVerification.objects.get(receipt_id=receipt_id)
except SoftwareSecurePhotoVerification.DoesNotExist:
self.stderr.write(
'SoftwareSecurePhotoVerification with id {id} could not be found.\n'.format(id=receipt_id)
)
sys.exit(1)
if status_to_set == 'approved':
self.stdout.write('Approving verification for {id}.\n'.format(id=receipt_id))
attempt.approve()
_set_user_requirement_status(attempt, 'reverification', 'satisfied')
elif status_to_set == 'denied':
self.stdout.write('Denying verification for {id}.\n'.format(id=receipt_id))
if len(args) >= 3:
reason_for_denial = args[2]
else:
reason_for_denial = 'Denied via management command.'
attempt.deny(reason_for_denial)
_set_user_requirement_status(attempt, 'reverification', 'failed', reason_for_denial)
else:
self.stdout.write('Cannot set id {id} to unrecognized status {status}'.format(
id=receipt_id, status=status_to_set
))
sys.exit(1)
checkpoints = VerificationCheckpoint.objects.filter(photo_verification=attempt).all()
VerificationStatus.add_status_from_checkpoints(
checkpoints=checkpoints,
user=attempt.user,
status=status_to_set
)
|
|
4dd66150c922e1c700fad74727955ef72c045f37
|
minecraft/FindCommand.py
|
minecraft/FindCommand.py
|
# MCEdit filter
from albow import alert
displayName = "Find Command"
inputs = (
("Command:", ("string", "value=")),
)
def perform(level, box, options):
command = options["Command:"]
n = 0
result = ""
for (chunk, slices, point) in level.getChunkSlices(box):
for e in chunk.TileEntities:
x = e["x"].value
y = e["y"].value
z = e["z"].value
if (x, y, z) in box:
t = e["id"].value
if t == "Control":
c = e["Command"].value
if c.find(command) >= 0:
n += 1
result += "(%d, %d, %d) %s\n" % (x, y, z, c)
result += "(%d)" % n
alert(result)
|
Add Find Command MCEdit filter
|
Add Find Command MCEdit filter
|
Python
|
mit
|
satgo1546/dot-product,satgo1546/dot-product,satgo1546/dot-product,satgo1546/dot-product,satgo1546/dot-product
|
Add Find Command MCEdit filter
|
# MCEdit filter
from albow import alert
displayName = "Find Command"
inputs = (
("Command:", ("string", "value=")),
)
def perform(level, box, options):
command = options["Command:"]
n = 0
result = ""
for (chunk, slices, point) in level.getChunkSlices(box):
for e in chunk.TileEntities:
x = e["x"].value
y = e["y"].value
z = e["z"].value
if (x, y, z) in box:
t = e["id"].value
if t == "Control":
c = e["Command"].value
if c.find(command) >= 0:
n += 1
result += "(%d, %d, %d) %s\n" % (x, y, z, c)
result += "(%d)" % n
alert(result)
|
<commit_before><commit_msg>Add Find Command MCEdit filter<commit_after>
|
# MCEdit filter
from albow import alert
displayName = "Find Command"
inputs = (
("Command:", ("string", "value=")),
)
def perform(level, box, options):
command = options["Command:"]
n = 0
result = ""
for (chunk, slices, point) in level.getChunkSlices(box):
for e in chunk.TileEntities:
x = e["x"].value
y = e["y"].value
z = e["z"].value
if (x, y, z) in box:
t = e["id"].value
if t == "Control":
c = e["Command"].value
if c.find(command) >= 0:
n += 1
result += "(%d, %d, %d) %s\n" % (x, y, z, c)
result += "(%d)" % n
alert(result)
|
Add Find Command MCEdit filter# MCEdit filter
from albow import alert
displayName = "Find Command"
inputs = (
("Command:", ("string", "value=")),
)
def perform(level, box, options):
command = options["Command:"]
n = 0
result = ""
for (chunk, slices, point) in level.getChunkSlices(box):
for e in chunk.TileEntities:
x = e["x"].value
y = e["y"].value
z = e["z"].value
if (x, y, z) in box:
t = e["id"].value
if t == "Control":
c = e["Command"].value
if c.find(command) >= 0:
n += 1
result += "(%d, %d, %d) %s\n" % (x, y, z, c)
result += "(%d)" % n
alert(result)
|
<commit_before><commit_msg>Add Find Command MCEdit filter<commit_after># MCEdit filter
from albow import alert
displayName = "Find Command"
inputs = (
("Command:", ("string", "value=")),
)
def perform(level, box, options):
command = options["Command:"]
n = 0
result = ""
for (chunk, slices, point) in level.getChunkSlices(box):
for e in chunk.TileEntities:
x = e["x"].value
y = e["y"].value
z = e["z"].value
if (x, y, z) in box:
t = e["id"].value
if t == "Control":
c = e["Command"].value
if c.find(command) >= 0:
n += 1
result += "(%d, %d, %d) %s\n" % (x, y, z, c)
result += "(%d)" % n
alert(result)
|
|
eea33e6207da7446e1713eb4d78b76d37ae5eaf2
|
with_celery.py
|
with_celery.py
|
from celery import Celery
# The host in which RabbitMQ is running
HOST = 'amqp://guest@localhost'
app = Celery('pages_celery', broker=HOST)
@app.task
def work(msg):
print msg
# To execute the task:
#
# $ python
# >>> from with_celery import work
# >>> work.delay('Hi there!!')
|
Add sample of scheduler using celery
|
Add sample of scheduler using celery
|
Python
|
apache-2.0
|
jovannypcg/python_scheduler
|
Add sample of scheduler using celery
|
from celery import Celery
# The host in which RabbitMQ is running
HOST = 'amqp://guest@localhost'
app = Celery('pages_celery', broker=HOST)
@app.task
def work(msg):
print msg
# To execute the task:
#
# $ python
# >>> from with_celery import work
# >>> work.delay('Hi there!!')
|
<commit_before><commit_msg>Add sample of scheduler using celery<commit_after>
|
from celery import Celery
# The host in which RabbitMQ is running
HOST = 'amqp://guest@localhost'
app = Celery('pages_celery', broker=HOST)
@app.task
def work(msg):
print msg
# To execute the task:
#
# $ python
# >>> from with_celery import work
# >>> work.delay('Hi there!!')
|
Add sample of scheduler using celeryfrom celery import Celery
# The host in which RabbitMQ is running
HOST = 'amqp://guest@localhost'
app = Celery('pages_celery', broker=HOST)
@app.task
def work(msg):
print msg
# To execute the task:
#
# $ python
# >>> from with_celery import work
# >>> work.delay('Hi there!!')
|
<commit_before><commit_msg>Add sample of scheduler using celery<commit_after>from celery import Celery
# The host in which RabbitMQ is running
HOST = 'amqp://guest@localhost'
app = Celery('pages_celery', broker=HOST)
@app.task
def work(msg):
print msg
# To execute the task:
#
# $ python
# >>> from with_celery import work
# >>> work.delay('Hi there!!')
|
|
b4b2b80cb1d0c0729e8e98085c2cfc3bc55ddda3
|
LongestLines.py
|
LongestLines.py
|
# Longest Lines
#
# https://www.codeeval.com/open_challenges/2/
#
# Challenge Description: Write a program which reads a file and prints to
# stdout the specified number of the longest lines that are sorted based on
# their length in descending order.
import sys
input_file = sys.argv[1]
with open(input_file, 'r') as test_cases:
number_of_lines_to_print = int(test_cases.readline().rstrip())
lines = []
for case in test_cases:
lines.append(case.rstrip())
lines.sort(key=len, reverse=True)
for l in lines[:number_of_lines_to_print]:
print(l)
|
Solve the Longest Lines challenge using Python3
|
Solve the Longest Lines challenge using Python3
|
Python
|
mit
|
TommyN94/CodeEvalSolutions,TommyN94/CodeEvalSolutions
|
Solve the Longest Lines challenge using Python3
|
# Longest Lines
#
# https://www.codeeval.com/open_challenges/2/
#
# Challenge Description: Write a program which reads a file and prints to
# stdout the specified number of the longest lines that are sorted based on
# their length in descending order.
import sys
input_file = sys.argv[1]
with open(input_file, 'r') as test_cases:
number_of_lines_to_print = int(test_cases.readline().rstrip())
lines = []
for case in test_cases:
lines.append(case.rstrip())
lines.sort(key=len, reverse=True)
for l in lines[:number_of_lines_to_print]:
print(l)
|
<commit_before><commit_msg>Solve the Longest Lines challenge using Python3<commit_after>
|
# Longest Lines
#
# https://www.codeeval.com/open_challenges/2/
#
# Challenge Description: Write a program which reads a file and prints to
# stdout the specified number of the longest lines that are sorted based on
# their length in descending order.
import sys
input_file = sys.argv[1]
with open(input_file, 'r') as test_cases:
number_of_lines_to_print = int(test_cases.readline().rstrip())
lines = []
for case in test_cases:
lines.append(case.rstrip())
lines.sort(key=len, reverse=True)
for l in lines[:number_of_lines_to_print]:
print(l)
|
Solve the Longest Lines challenge using Python3# Longest Lines
#
# https://www.codeeval.com/open_challenges/2/
#
# Challenge Description: Write a program which reads a file and prints to
# stdout the specified number of the longest lines that are sorted based on
# their length in descending order.
import sys
input_file = sys.argv[1]
with open(input_file, 'r') as test_cases:
number_of_lines_to_print = int(test_cases.readline().rstrip())
lines = []
for case in test_cases:
lines.append(case.rstrip())
lines.sort(key=len, reverse=True)
for l in lines[:number_of_lines_to_print]:
print(l)
|
<commit_before><commit_msg>Solve the Longest Lines challenge using Python3<commit_after># Longest Lines
#
# https://www.codeeval.com/open_challenges/2/
#
# Challenge Description: Write a program which reads a file and prints to
# stdout the specified number of the longest lines that are sorted based on
# their length in descending order.
import sys
input_file = sys.argv[1]
with open(input_file, 'r') as test_cases:
number_of_lines_to_print = int(test_cases.readline().rstrip())
lines = []
for case in test_cases:
lines.append(case.rstrip())
lines.sort(key=len, reverse=True)
for l in lines[:number_of_lines_to_print]:
print(l)
|
|
37e674f05547c7b6b93f447477443644865975d1
|
urls.py
|
urls.py
|
__author__ = 'ankesh'
from django.conf.urls import patterns, include, url
from django.http import HttpResponseRedirect
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'upload.views.home', name='home'),
url(r'^$', lambda x: HttpResponseRedirect('/upload/basic/plus/')),
url(r'^upload/', include('fileupload.urls')),
url(r'^plots/', include('plots.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
import os
urlpatterns += patterns('',
(r'^media/(.*)$', 'django.views.static.serve', {'document_root': os.path.join(os.path.abspath(os.path.dirname(__file__)), 'media')}),
)
|
Bring back the Root URL config
|
Bring back the Root URL config
The file was probably deleted by a mistake, we need it, so took it back.
|
Python
|
bsd-2-clause
|
ankeshanand/benchmark,ankeshanand/benchmark,ankeshanand/benchmark,ankeshanand/benchmark
|
Bring back the Root URL config
The file was probably deleted by a mistake, we need it, so took it back.
|
__author__ = 'ankesh'
from django.conf.urls import patterns, include, url
from django.http import HttpResponseRedirect
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'upload.views.home', name='home'),
url(r'^$', lambda x: HttpResponseRedirect('/upload/basic/plus/')),
url(r'^upload/', include('fileupload.urls')),
url(r'^plots/', include('plots.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
import os
urlpatterns += patterns('',
(r'^media/(.*)$', 'django.views.static.serve', {'document_root': os.path.join(os.path.abspath(os.path.dirname(__file__)), 'media')}),
)
|
<commit_before><commit_msg>Bring back the Root URL config
The file was probably deleted by a mistake, we need it, so took it back.<commit_after>
|
__author__ = 'ankesh'
from django.conf.urls import patterns, include, url
from django.http import HttpResponseRedirect
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'upload.views.home', name='home'),
url(r'^$', lambda x: HttpResponseRedirect('/upload/basic/plus/')),
url(r'^upload/', include('fileupload.urls')),
url(r'^plots/', include('plots.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
import os
urlpatterns += patterns('',
(r'^media/(.*)$', 'django.views.static.serve', {'document_root': os.path.join(os.path.abspath(os.path.dirname(__file__)), 'media')}),
)
|
Bring back the Root URL config
The file was probably deleted by a mistake, we need it, so took it back.__author__ = 'ankesh'
from django.conf.urls import patterns, include, url
from django.http import HttpResponseRedirect
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'upload.views.home', name='home'),
url(r'^$', lambda x: HttpResponseRedirect('/upload/basic/plus/')),
url(r'^upload/', include('fileupload.urls')),
url(r'^plots/', include('plots.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
import os
urlpatterns += patterns('',
(r'^media/(.*)$', 'django.views.static.serve', {'document_root': os.path.join(os.path.abspath(os.path.dirname(__file__)), 'media')}),
)
|
<commit_before><commit_msg>Bring back the Root URL config
The file was probably deleted by a mistake, we need it, so took it back.<commit_after>__author__ = 'ankesh'
from django.conf.urls import patterns, include, url
from django.http import HttpResponseRedirect
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'upload.views.home', name='home'),
url(r'^$', lambda x: HttpResponseRedirect('/upload/basic/plus/')),
url(r'^upload/', include('fileupload.urls')),
url(r'^plots/', include('plots.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
import os
urlpatterns += patterns('',
(r'^media/(.*)$', 'django.views.static.serve', {'document_root': os.path.join(os.path.abspath(os.path.dirname(__file__)), 'media')}),
)
|
|
90399f50a3f50d9193ae1e6b2042215fb388230f
|
VideoStream.py
|
VideoStream.py
|
import cv2
import numpy as np
cap = cv2.VideoCapture(0)
print('Beginning Capture Device opening...\n')
print('Capture device opened?', cap.isOpened())
while True:
ret, frame = cap.read()
gray_image = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
cv2.imshow('frame', gray_image)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
# Release the capture
cap.release()
cv2.destroyAllWindows()
|
Create Video Stream program for webcam
|
Create Video Stream program for webcam
|
Python
|
apache-2.0
|
SentientCNC/Sentient-CNC
|
Create Video Stream program for webcam
|
import cv2
import numpy as np
cap = cv2.VideoCapture(0)
print('Beginning Capture Device opening...\n')
print('Capture device opened?', cap.isOpened())
while True:
ret, frame = cap.read()
gray_image = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
cv2.imshow('frame', gray_image)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
# Release the capture
cap.release()
cv2.destroyAllWindows()
|
<commit_before><commit_msg>Create Video Stream program for webcam<commit_after>
|
import cv2
import numpy as np
cap = cv2.VideoCapture(0)
print('Beginning Capture Device opening...\n')
print('Capture device opened?', cap.isOpened())
while True:
ret, frame = cap.read()
gray_image = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
cv2.imshow('frame', gray_image)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
# Release the capture
cap.release()
cv2.destroyAllWindows()
|
Create Video Stream program for webcamimport cv2
import numpy as np
cap = cv2.VideoCapture(0)
print('Beginning Capture Device opening...\n')
print('Capture device opened?', cap.isOpened())
while True:
ret, frame = cap.read()
gray_image = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
cv2.imshow('frame', gray_image)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
# Release the capture
cap.release()
cv2.destroyAllWindows()
|
<commit_before><commit_msg>Create Video Stream program for webcam<commit_after>import cv2
import numpy as np
cap = cv2.VideoCapture(0)
print('Beginning Capture Device opening...\n')
print('Capture device opened?', cap.isOpened())
while True:
ret, frame = cap.read()
gray_image = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
cv2.imshow('frame', gray_image)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
# Release the capture
cap.release()
cv2.destroyAllWindows()
|
|
1437bb868844731d3fdb13c6dd52dfd706df6f63
|
bin/ext_service/clean_habitica_user.py
|
bin/ext_service/clean_habitica_user.py
|
import argparse
import sys
import logging
import emission.core.get_database as edb
import emission.net.ext_service.habitica.proxy as proxy
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
parser = argparse.ArgumentParser()
parser.add_argument("user_email",
help="the email address of the user whose habitica account you want to clean up")
args = parser.parse_args()
del_uuid = edb.get_uuid_db().find_one({'user_email': args.user_email})['uuid']
logging.debug("Found uuid %s" % del_uuid)
del_habitica_creds = edb.get_habitica_db().find_one({'user_id': del_uuid})
logging.debug("del_habitica_creds = %s" % del_habitica_creds)
del_result = proxy.habiticaProxy(del_uuid, "DELETE",
"/api/v3/user",
{'password': del_habitica_creds['habitica_password']})
logging.debug("delete result = %s" % del_result)
|
Add a new script to clean up a habitica user given user email
|
Add a new script to clean up a habitica user given user email
- Looks up uuid
- uses that to lookup password
- calls delete method
Simple!
|
Python
|
bsd-3-clause
|
sunil07t/e-mission-server,shankari/e-mission-server,sunil07t/e-mission-server,yw374cornell/e-mission-server,e-mission/e-mission-server,yw374cornell/e-mission-server,shankari/e-mission-server,e-mission/e-mission-server,e-mission/e-mission-server,yw374cornell/e-mission-server,shankari/e-mission-server,yw374cornell/e-mission-server,sunil07t/e-mission-server,sunil07t/e-mission-server,shankari/e-mission-server,e-mission/e-mission-server
|
Add a new script to clean up a habitica user given user email
- Looks up uuid
- uses that to lookup password
- calls delete method
Simple!
|
import argparse
import sys
import logging
import emission.core.get_database as edb
import emission.net.ext_service.habitica.proxy as proxy
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
parser = argparse.ArgumentParser()
parser.add_argument("user_email",
help="the email address of the user whose habitica account you want to clean up")
args = parser.parse_args()
del_uuid = edb.get_uuid_db().find_one({'user_email': args.user_email})['uuid']
logging.debug("Found uuid %s" % del_uuid)
del_habitica_creds = edb.get_habitica_db().find_one({'user_id': del_uuid})
logging.debug("del_habitica_creds = %s" % del_habitica_creds)
del_result = proxy.habiticaProxy(del_uuid, "DELETE",
"/api/v3/user",
{'password': del_habitica_creds['habitica_password']})
logging.debug("delete result = %s" % del_result)
|
<commit_before><commit_msg>Add a new script to clean up a habitica user given user email
- Looks up uuid
- uses that to lookup password
- calls delete method
Simple!<commit_after>
|
import argparse
import sys
import logging
import emission.core.get_database as edb
import emission.net.ext_service.habitica.proxy as proxy
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
parser = argparse.ArgumentParser()
parser.add_argument("user_email",
help="the email address of the user whose habitica account you want to clean up")
args = parser.parse_args()
del_uuid = edb.get_uuid_db().find_one({'user_email': args.user_email})['uuid']
logging.debug("Found uuid %s" % del_uuid)
del_habitica_creds = edb.get_habitica_db().find_one({'user_id': del_uuid})
logging.debug("del_habitica_creds = %s" % del_habitica_creds)
del_result = proxy.habiticaProxy(del_uuid, "DELETE",
"/api/v3/user",
{'password': del_habitica_creds['habitica_password']})
logging.debug("delete result = %s" % del_result)
|
Add a new script to clean up a habitica user given user email
- Looks up uuid
- uses that to lookup password
- calls delete method
Simple!import argparse
import sys
import logging
import emission.core.get_database as edb
import emission.net.ext_service.habitica.proxy as proxy
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
parser = argparse.ArgumentParser()
parser.add_argument("user_email",
help="the email address of the user whose habitica account you want to clean up")
args = parser.parse_args()
del_uuid = edb.get_uuid_db().find_one({'user_email': args.user_email})['uuid']
logging.debug("Found uuid %s" % del_uuid)
del_habitica_creds = edb.get_habitica_db().find_one({'user_id': del_uuid})
logging.debug("del_habitica_creds = %s" % del_habitica_creds)
del_result = proxy.habiticaProxy(del_uuid, "DELETE",
"/api/v3/user",
{'password': del_habitica_creds['habitica_password']})
logging.debug("delete result = %s" % del_result)
|
<commit_before><commit_msg>Add a new script to clean up a habitica user given user email
- Looks up uuid
- uses that to lookup password
- calls delete method
Simple!<commit_after>import argparse
import sys
import logging
import emission.core.get_database as edb
import emission.net.ext_service.habitica.proxy as proxy
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
parser = argparse.ArgumentParser()
parser.add_argument("user_email",
help="the email address of the user whose habitica account you want to clean up")
args = parser.parse_args()
del_uuid = edb.get_uuid_db().find_one({'user_email': args.user_email})['uuid']
logging.debug("Found uuid %s" % del_uuid)
del_habitica_creds = edb.get_habitica_db().find_one({'user_id': del_uuid})
logging.debug("del_habitica_creds = %s" % del_habitica_creds)
del_result = proxy.habiticaProxy(del_uuid, "DELETE",
"/api/v3/user",
{'password': del_habitica_creds['habitica_password']})
logging.debug("delete result = %s" % del_result)
|
|
83d4ac6c3565044727c9b3fcbada9966d529a80e
|
lib/font_loader.py
|
lib/font_loader.py
|
import os
import sys
import logging
FONT_FILE_NAME_LIST = (
"fontawesome-webfont.ttf",
)
FONT_DIRECTORY = "share"
FONT_DIRECTORY_SYSTEM = "/usr/share/fonts"
FONT_DIRECTORY_USER = os.path.join(os.environ['HOME'], ".local/share/fonts")
class FontLoader:
def __init__(self):
self.fonts_loaded = []
self.logger = logging.getLogger('FontLoader')
def load(self):
for font_file_name in FONT_FILE_NAME_LIST:
# check if font is in the project font directory
font_source_path = os.path.join(FONT_DIRECTORY, font_file_name)
if not os.path.isfile(font_source_path):
raise IOError("Font '{}' not found in project directories".format(
font_file_name
))
# check if the font is installed at system level
if os.path.isfile(os.path.join(FONT_DIRECTORY_SYSTEM, font_file_name)):
self.logger.debug("Font '{}' found in system directory".format(
font_file_name
))
continue
# check if the font is installed at user level
if os.path.isfile(os.path.join(FONT_DIRECTORY_USER, font_file_name)):
self.logger.debug("Font '{}' found in user directory".format(
font_file_name
))
continue
# if the font is not installed
font_target_path = os.path.join(FONT_DIRECTORY_USER, font_file_name)
os.symlink(
os.path.join(os.getcwd(), font_source_path),
font_target_path
)
self.fonts_loaded.append(font_target_path)
self.logger.debug("Font '{}' loaded in user directory: '{}'".format(
font_file_name,
font_target_path
))
def unload(self):
for font_path in self.fonts_loaded:
os.unlink(font_path)
self.logger.debug("Font '{}' unloaded".format(
font_path
))
self.fonts_loaded = []
|
Add forgotten font leader lib
|
Add forgotten font leader lib
|
Python
|
mit
|
Nadeflore/dakara-player-vlc
|
Add forgotten font leader lib
|
import os
import sys
import logging
FONT_FILE_NAME_LIST = (
"fontawesome-webfont.ttf",
)
FONT_DIRECTORY = "share"
FONT_DIRECTORY_SYSTEM = "/usr/share/fonts"
FONT_DIRECTORY_USER = os.path.join(os.environ['HOME'], ".local/share/fonts")
class FontLoader:
def __init__(self):
self.fonts_loaded = []
self.logger = logging.getLogger('FontLoader')
def load(self):
for font_file_name in FONT_FILE_NAME_LIST:
# check if font is in the project font directory
font_source_path = os.path.join(FONT_DIRECTORY, font_file_name)
if not os.path.isfile(font_source_path):
raise IOError("Font '{}' not found in project directories".format(
font_file_name
))
# check if the font is installed at system level
if os.path.isfile(os.path.join(FONT_DIRECTORY_SYSTEM, font_file_name)):
self.logger.debug("Font '{}' found in system directory".format(
font_file_name
))
continue
# check if the font is installed at user level
if os.path.isfile(os.path.join(FONT_DIRECTORY_USER, font_file_name)):
self.logger.debug("Font '{}' found in user directory".format(
font_file_name
))
continue
# if the font is not installed
font_target_path = os.path.join(FONT_DIRECTORY_USER, font_file_name)
os.symlink(
os.path.join(os.getcwd(), font_source_path),
font_target_path
)
self.fonts_loaded.append(font_target_path)
self.logger.debug("Font '{}' loaded in user directory: '{}'".format(
font_file_name,
font_target_path
))
def unload(self):
for font_path in self.fonts_loaded:
os.unlink(font_path)
self.logger.debug("Font '{}' unloaded".format(
font_path
))
self.fonts_loaded = []
|
<commit_before><commit_msg>Add forgotten font leader lib<commit_after>
|
import os
import sys
import logging
FONT_FILE_NAME_LIST = (
"fontawesome-webfont.ttf",
)
FONT_DIRECTORY = "share"
FONT_DIRECTORY_SYSTEM = "/usr/share/fonts"
FONT_DIRECTORY_USER = os.path.join(os.environ['HOME'], ".local/share/fonts")
class FontLoader:
def __init__(self):
self.fonts_loaded = []
self.logger = logging.getLogger('FontLoader')
def load(self):
for font_file_name in FONT_FILE_NAME_LIST:
# check if font is in the project font directory
font_source_path = os.path.join(FONT_DIRECTORY, font_file_name)
if not os.path.isfile(font_source_path):
raise IOError("Font '{}' not found in project directories".format(
font_file_name
))
# check if the font is installed at system level
if os.path.isfile(os.path.join(FONT_DIRECTORY_SYSTEM, font_file_name)):
self.logger.debug("Font '{}' found in system directory".format(
font_file_name
))
continue
# check if the font is installed at user level
if os.path.isfile(os.path.join(FONT_DIRECTORY_USER, font_file_name)):
self.logger.debug("Font '{}' found in user directory".format(
font_file_name
))
continue
# if the font is not installed
font_target_path = os.path.join(FONT_DIRECTORY_USER, font_file_name)
os.symlink(
os.path.join(os.getcwd(), font_source_path),
font_target_path
)
self.fonts_loaded.append(font_target_path)
self.logger.debug("Font '{}' loaded in user directory: '{}'".format(
font_file_name,
font_target_path
))
def unload(self):
for font_path in self.fonts_loaded:
os.unlink(font_path)
self.logger.debug("Font '{}' unloaded".format(
font_path
))
self.fonts_loaded = []
|
Add forgotten font leader libimport os
import sys
import logging
FONT_FILE_NAME_LIST = (
"fontawesome-webfont.ttf",
)
FONT_DIRECTORY = "share"
FONT_DIRECTORY_SYSTEM = "/usr/share/fonts"
FONT_DIRECTORY_USER = os.path.join(os.environ['HOME'], ".local/share/fonts")
class FontLoader:
def __init__(self):
self.fonts_loaded = []
self.logger = logging.getLogger('FontLoader')
def load(self):
for font_file_name in FONT_FILE_NAME_LIST:
# check if font is in the project font directory
font_source_path = os.path.join(FONT_DIRECTORY, font_file_name)
if not os.path.isfile(font_source_path):
raise IOError("Font '{}' not found in project directories".format(
font_file_name
))
# check if the font is installed at system level
if os.path.isfile(os.path.join(FONT_DIRECTORY_SYSTEM, font_file_name)):
self.logger.debug("Font '{}' found in system directory".format(
font_file_name
))
continue
# check if the font is installed at user level
if os.path.isfile(os.path.join(FONT_DIRECTORY_USER, font_file_name)):
self.logger.debug("Font '{}' found in user directory".format(
font_file_name
))
continue
# if the font is not installed
font_target_path = os.path.join(FONT_DIRECTORY_USER, font_file_name)
os.symlink(
os.path.join(os.getcwd(), font_source_path),
font_target_path
)
self.fonts_loaded.append(font_target_path)
self.logger.debug("Font '{}' loaded in user directory: '{}'".format(
font_file_name,
font_target_path
))
def unload(self):
for font_path in self.fonts_loaded:
os.unlink(font_path)
self.logger.debug("Font '{}' unloaded".format(
font_path
))
self.fonts_loaded = []
|
<commit_before><commit_msg>Add forgotten font leader lib<commit_after>import os
import sys
import logging
FONT_FILE_NAME_LIST = (
"fontawesome-webfont.ttf",
)
FONT_DIRECTORY = "share"
FONT_DIRECTORY_SYSTEM = "/usr/share/fonts"
FONT_DIRECTORY_USER = os.path.join(os.environ['HOME'], ".local/share/fonts")
class FontLoader:
def __init__(self):
self.fonts_loaded = []
self.logger = logging.getLogger('FontLoader')
def load(self):
for font_file_name in FONT_FILE_NAME_LIST:
# check if font is in the project font directory
font_source_path = os.path.join(FONT_DIRECTORY, font_file_name)
if not os.path.isfile(font_source_path):
raise IOError("Font '{}' not found in project directories".format(
font_file_name
))
# check if the font is installed at system level
if os.path.isfile(os.path.join(FONT_DIRECTORY_SYSTEM, font_file_name)):
self.logger.debug("Font '{}' found in system directory".format(
font_file_name
))
continue
# check if the font is installed at user level
if os.path.isfile(os.path.join(FONT_DIRECTORY_USER, font_file_name)):
self.logger.debug("Font '{}' found in user directory".format(
font_file_name
))
continue
# if the font is not installed
font_target_path = os.path.join(FONT_DIRECTORY_USER, font_file_name)
os.symlink(
os.path.join(os.getcwd(), font_source_path),
font_target_path
)
self.fonts_loaded.append(font_target_path)
self.logger.debug("Font '{}' loaded in user directory: '{}'".format(
font_file_name,
font_target_path
))
def unload(self):
for font_path in self.fonts_loaded:
os.unlink(font_path)
self.logger.debug("Font '{}' unloaded".format(
font_path
))
self.fonts_loaded = []
|
|
151e8fc71e5ef2e31db13730bff57bc8fd915c30
|
paystackapi/tests/test_invoice.py
|
paystackapi/tests/test_invoice.py
|
import httpretty
from paystackapi.tests.base_test_case import BaseTestCase
from paystackapi.invoice import Invoice
class TestInvoice(BaseTestCase):
@httpretty.activate
def test_create_invoice(self):
"""Method defined to test create Invoice."""
httpretty.register_uri(
httpretty.POST,
self.endpoint_url("/paymentrequest"),
content_type='text/json',
body='{"status": true, "message": "Invoice created"}',
status=201,
)
response = Invoice.create(
customer="CUS_je02lbimlqixzax",
amount=42000,
due_date="2019-05-08T00:00:00.000Z"
)
self.assertTrue(response['status'])
@httpretty.activate
def test_list_invoice(self):
"""Method defined to test list Invoice."""
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("/paymentrequest"),
content_type='text/json',
body='{"status": true, "message": "Invoice retrieved"}',
status=201,
)
response = Invoice.list(
customer="CUS_je02lbimlqixzax",
status="pending",
currency="NGN",
paid="false",
include_archive="true"
)
self.assertTrue(response['status'])
|
Add test case for list invoice
|
Add test case for list invoice
|
Python
|
mit
|
andela-sjames/paystack-python
|
Add test case for list invoice
|
import httpretty
from paystackapi.tests.base_test_case import BaseTestCase
from paystackapi.invoice import Invoice
class TestInvoice(BaseTestCase):
@httpretty.activate
def test_create_invoice(self):
"""Method defined to test create Invoice."""
httpretty.register_uri(
httpretty.POST,
self.endpoint_url("/paymentrequest"),
content_type='text/json',
body='{"status": true, "message": "Invoice created"}',
status=201,
)
response = Invoice.create(
customer="CUS_je02lbimlqixzax",
amount=42000,
due_date="2019-05-08T00:00:00.000Z"
)
self.assertTrue(response['status'])
@httpretty.activate
def test_list_invoice(self):
"""Method defined to test list Invoice."""
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("/paymentrequest"),
content_type='text/json',
body='{"status": true, "message": "Invoice retrieved"}',
status=201,
)
response = Invoice.list(
customer="CUS_je02lbimlqixzax",
status="pending",
currency="NGN",
paid="false",
include_archive="true"
)
self.assertTrue(response['status'])
|
<commit_before><commit_msg>Add test case for list invoice<commit_after>
|
import httpretty
from paystackapi.tests.base_test_case import BaseTestCase
from paystackapi.invoice import Invoice
class TestInvoice(BaseTestCase):
@httpretty.activate
def test_create_invoice(self):
"""Method defined to test create Invoice."""
httpretty.register_uri(
httpretty.POST,
self.endpoint_url("/paymentrequest"),
content_type='text/json',
body='{"status": true, "message": "Invoice created"}',
status=201,
)
response = Invoice.create(
customer="CUS_je02lbimlqixzax",
amount=42000,
due_date="2019-05-08T00:00:00.000Z"
)
self.assertTrue(response['status'])
@httpretty.activate
def test_list_invoice(self):
"""Method defined to test list Invoice."""
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("/paymentrequest"),
content_type='text/json',
body='{"status": true, "message": "Invoice retrieved"}',
status=201,
)
response = Invoice.list(
customer="CUS_je02lbimlqixzax",
status="pending",
currency="NGN",
paid="false",
include_archive="true"
)
self.assertTrue(response['status'])
|
Add test case for list invoiceimport httpretty
from paystackapi.tests.base_test_case import BaseTestCase
from paystackapi.invoice import Invoice
class TestInvoice(BaseTestCase):
@httpretty.activate
def test_create_invoice(self):
"""Method defined to test create Invoice."""
httpretty.register_uri(
httpretty.POST,
self.endpoint_url("/paymentrequest"),
content_type='text/json',
body='{"status": true, "message": "Invoice created"}',
status=201,
)
response = Invoice.create(
customer="CUS_je02lbimlqixzax",
amount=42000,
due_date="2019-05-08T00:00:00.000Z"
)
self.assertTrue(response['status'])
@httpretty.activate
def test_list_invoice(self):
"""Method defined to test list Invoice."""
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("/paymentrequest"),
content_type='text/json',
body='{"status": true, "message": "Invoice retrieved"}',
status=201,
)
response = Invoice.list(
customer="CUS_je02lbimlqixzax",
status="pending",
currency="NGN",
paid="false",
include_archive="true"
)
self.assertTrue(response['status'])
|
<commit_before><commit_msg>Add test case for list invoice<commit_after>import httpretty
from paystackapi.tests.base_test_case import BaseTestCase
from paystackapi.invoice import Invoice
class TestInvoice(BaseTestCase):
@httpretty.activate
def test_create_invoice(self):
"""Method defined to test create Invoice."""
httpretty.register_uri(
httpretty.POST,
self.endpoint_url("/paymentrequest"),
content_type='text/json',
body='{"status": true, "message": "Invoice created"}',
status=201,
)
response = Invoice.create(
customer="CUS_je02lbimlqixzax",
amount=42000,
due_date="2019-05-08T00:00:00.000Z"
)
self.assertTrue(response['status'])
@httpretty.activate
def test_list_invoice(self):
"""Method defined to test list Invoice."""
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("/paymentrequest"),
content_type='text/json',
body='{"status": true, "message": "Invoice retrieved"}',
status=201,
)
response = Invoice.list(
customer="CUS_je02lbimlqixzax",
status="pending",
currency="NGN",
paid="false",
include_archive="true"
)
self.assertTrue(response['status'])
|
|
dcd1d962feec4f3cd914677545f74924ad9e6351
|
testing/test_direct_wrapper.py
|
testing/test_direct_wrapper.py
|
import os
from cffitsio._cfitsio import ffi, lib
def test_create_file(tmpdir):
filename = str(tmpdir.join('test.fits'))
f = ffi.new('fitsfile **')
status = ffi.new('int *')
lib.fits_create_file(f, filename, status)
assert status[0] == 0
assert os.path.isfile(filename)
|
Add test for file creation of low level library
|
Add test for file creation of low level library
|
Python
|
mit
|
mindriot101/fitsio-cffi
|
Add test for file creation of low level library
|
import os
from cffitsio._cfitsio import ffi, lib
def test_create_file(tmpdir):
filename = str(tmpdir.join('test.fits'))
f = ffi.new('fitsfile **')
status = ffi.new('int *')
lib.fits_create_file(f, filename, status)
assert status[0] == 0
assert os.path.isfile(filename)
|
<commit_before><commit_msg>Add test for file creation of low level library<commit_after>
|
import os
from cffitsio._cfitsio import ffi, lib
def test_create_file(tmpdir):
filename = str(tmpdir.join('test.fits'))
f = ffi.new('fitsfile **')
status = ffi.new('int *')
lib.fits_create_file(f, filename, status)
assert status[0] == 0
assert os.path.isfile(filename)
|
Add test for file creation of low level libraryimport os
from cffitsio._cfitsio import ffi, lib
def test_create_file(tmpdir):
filename = str(tmpdir.join('test.fits'))
f = ffi.new('fitsfile **')
status = ffi.new('int *')
lib.fits_create_file(f, filename, status)
assert status[0] == 0
assert os.path.isfile(filename)
|
<commit_before><commit_msg>Add test for file creation of low level library<commit_after>import os
from cffitsio._cfitsio import ffi, lib
def test_create_file(tmpdir):
filename = str(tmpdir.join('test.fits'))
f = ffi.new('fitsfile **')
status = ffi.new('int *')
lib.fits_create_file(f, filename, status)
assert status[0] == 0
assert os.path.isfile(filename)
|
|
e426afbe9ccbc72a1aa0d00032144e8b9b2b8cdc
|
gusset/colortable.py
|
gusset/colortable.py
|
"""
Pretty table generation.
"""
from itertools import cycle
from string import capwords
from fabric.colors import red, green, blue, magenta, white, yellow
class ColorRow(dict):
"""
Ordered collection of column values.
"""
def __init__(self, table, **kwargs):
super(ColorRow, self).__init__(self)
self.table = table
for column in self.table.columns:
self[column] = kwargs.get(column)
def __str__(self):
"""
Generate a formatted and colored string for this row.
"""
def format_cell(color, item):
column, value = item
return color(" {}".format(value).ljust(1 + self.table.column_widths[column]))
# get items in column order
items = [(column, self[column]) for column in self.table.columns]
# format cells with color and length
cells = [format_cell(color, item) for color, item in zip(cycle(self.table.colors), items)]
return " ".join(cells)
class ColorTable(object):
"""
Simple row/column table.
"""
def __init__(self, *columns, **kwargs):
"""
Create a table with fixed columns.
:param columns: *args style list of column names
:param kwargs: additional options, including `sort_key` and `colors`
"""
self.columns = columns
self.sort_key = kwargs.get("sort_key")
self.colors = kwargs.get("colors", [red, green, blue, magenta, white, yellow])
self.header = ColorRow(self, **dict([(column, capwords(column)) for column in self.columns]))
# initialize column widths based on header
self.column_widths = dict([(column, len(self.header[column])) for column in self.columns])
self.rows = []
@property
def separator(self):
"""
Generate a separator row using current column widths.
"""
cells = dict([(column, "-" * self.column_widths[column]) for column in self.columns])
return ColorRow(self, **cells)
def add(self, **kwargs):
row = ColorRow(self, **kwargs)
# update column widths
for column in self.columns:
self.column_widths[column] = max(self.column_widths[column], len(row[column]))
self.rows.append(row)
def __str__(self):
"""
Generate a colored table.
"""
rows = sorted(self.rows, key=self.sort_key) if self.sort_key else self.rows
return "\n".join(map(str, [self.header, self.separator] + rows))
if __name__ == '__main__':
table = ColorTable("first", "last", sort_key=lambda row: (row["last"], row["first"]))
table.add(first="George", last="Washington")
table.add(first="John", last="Adams")
table.add(first="Thomas", last="Jefferson")
print table
|
Implement utility for colored, tabular output using fabric's color controls.
|
Implement utility for colored, tabular output using fabric's color controls.
|
Python
|
apache-2.0
|
locationlabs/gusset
|
Implement utility for colored, tabular output using fabric's color controls.
|
"""
Pretty table generation.
"""
from itertools import cycle
from string import capwords
from fabric.colors import red, green, blue, magenta, white, yellow
class ColorRow(dict):
"""
Ordered collection of column values.
"""
def __init__(self, table, **kwargs):
super(ColorRow, self).__init__(self)
self.table = table
for column in self.table.columns:
self[column] = kwargs.get(column)
def __str__(self):
"""
Generate a formatted and colored string for this row.
"""
def format_cell(color, item):
column, value = item
return color(" {}".format(value).ljust(1 + self.table.column_widths[column]))
# get items in column order
items = [(column, self[column]) for column in self.table.columns]
# format cells with color and length
cells = [format_cell(color, item) for color, item in zip(cycle(self.table.colors), items)]
return " ".join(cells)
class ColorTable(object):
"""
Simple row/column table.
"""
def __init__(self, *columns, **kwargs):
"""
Create a table with fixed columns.
:param columns: *args style list of column names
:param kwargs: additional options, including `sort_key` and `colors`
"""
self.columns = columns
self.sort_key = kwargs.get("sort_key")
self.colors = kwargs.get("colors", [red, green, blue, magenta, white, yellow])
self.header = ColorRow(self, **dict([(column, capwords(column)) for column in self.columns]))
# initialize column widths based on header
self.column_widths = dict([(column, len(self.header[column])) for column in self.columns])
self.rows = []
@property
def separator(self):
"""
Generate a separator row using current column widths.
"""
cells = dict([(column, "-" * self.column_widths[column]) for column in self.columns])
return ColorRow(self, **cells)
def add(self, **kwargs):
row = ColorRow(self, **kwargs)
# update column widths
for column in self.columns:
self.column_widths[column] = max(self.column_widths[column], len(row[column]))
self.rows.append(row)
def __str__(self):
"""
Generate a colored table.
"""
rows = sorted(self.rows, key=self.sort_key) if self.sort_key else self.rows
return "\n".join(map(str, [self.header, self.separator] + rows))
if __name__ == '__main__':
table = ColorTable("first", "last", sort_key=lambda row: (row["last"], row["first"]))
table.add(first="George", last="Washington")
table.add(first="John", last="Adams")
table.add(first="Thomas", last="Jefferson")
print table
|
<commit_before><commit_msg>Implement utility for colored, tabular output using fabric's color controls.<commit_after>
|
"""
Pretty table generation.
"""
from itertools import cycle
from string import capwords
from fabric.colors import red, green, blue, magenta, white, yellow
class ColorRow(dict):
"""
Ordered collection of column values.
"""
def __init__(self, table, **kwargs):
super(ColorRow, self).__init__(self)
self.table = table
for column in self.table.columns:
self[column] = kwargs.get(column)
def __str__(self):
"""
Generate a formatted and colored string for this row.
"""
def format_cell(color, item):
column, value = item
return color(" {}".format(value).ljust(1 + self.table.column_widths[column]))
# get items in column order
items = [(column, self[column]) for column in self.table.columns]
# format cells with color and length
cells = [format_cell(color, item) for color, item in zip(cycle(self.table.colors), items)]
return " ".join(cells)
class ColorTable(object):
"""
Simple row/column table.
"""
def __init__(self, *columns, **kwargs):
"""
Create a table with fixed columns.
:param columns: *args style list of column names
:param kwargs: additional options, including `sort_key` and `colors`
"""
self.columns = columns
self.sort_key = kwargs.get("sort_key")
self.colors = kwargs.get("colors", [red, green, blue, magenta, white, yellow])
self.header = ColorRow(self, **dict([(column, capwords(column)) for column in self.columns]))
# initialize column widths based on header
self.column_widths = dict([(column, len(self.header[column])) for column in self.columns])
self.rows = []
@property
def separator(self):
"""
Generate a separator row using current column widths.
"""
cells = dict([(column, "-" * self.column_widths[column]) for column in self.columns])
return ColorRow(self, **cells)
def add(self, **kwargs):
row = ColorRow(self, **kwargs)
# update column widths
for column in self.columns:
self.column_widths[column] = max(self.column_widths[column], len(row[column]))
self.rows.append(row)
def __str__(self):
"""
Generate a colored table.
"""
rows = sorted(self.rows, key=self.sort_key) if self.sort_key else self.rows
return "\n".join(map(str, [self.header, self.separator] + rows))
if __name__ == '__main__':
table = ColorTable("first", "last", sort_key=lambda row: (row["last"], row["first"]))
table.add(first="George", last="Washington")
table.add(first="John", last="Adams")
table.add(first="Thomas", last="Jefferson")
print table
|
Implement utility for colored, tabular output using fabric's color controls."""
Pretty table generation.
"""
from itertools import cycle
from string import capwords
from fabric.colors import red, green, blue, magenta, white, yellow
class ColorRow(dict):
"""
Ordered collection of column values.
"""
def __init__(self, table, **kwargs):
super(ColorRow, self).__init__(self)
self.table = table
for column in self.table.columns:
self[column] = kwargs.get(column)
def __str__(self):
"""
Generate a formatted and colored string for this row.
"""
def format_cell(color, item):
column, value = item
return color(" {}".format(value).ljust(1 + self.table.column_widths[column]))
# get items in column order
items = [(column, self[column]) for column in self.table.columns]
# format cells with color and length
cells = [format_cell(color, item) for color, item in zip(cycle(self.table.colors), items)]
return " ".join(cells)
class ColorTable(object):
"""
Simple row/column table.
"""
def __init__(self, *columns, **kwargs):
"""
Create a table with fixed columns.
:param columns: *args style list of column names
:param kwargs: additional options, including `sort_key` and `colors`
"""
self.columns = columns
self.sort_key = kwargs.get("sort_key")
self.colors = kwargs.get("colors", [red, green, blue, magenta, white, yellow])
self.header = ColorRow(self, **dict([(column, capwords(column)) for column in self.columns]))
# initialize column widths based on header
self.column_widths = dict([(column, len(self.header[column])) for column in self.columns])
self.rows = []
@property
def separator(self):
"""
Generate a separator row using current column widths.
"""
cells = dict([(column, "-" * self.column_widths[column]) for column in self.columns])
return ColorRow(self, **cells)
def add(self, **kwargs):
row = ColorRow(self, **kwargs)
# update column widths
for column in self.columns:
self.column_widths[column] = max(self.column_widths[column], len(row[column]))
self.rows.append(row)
def __str__(self):
"""
Generate a colored table.
"""
rows = sorted(self.rows, key=self.sort_key) if self.sort_key else self.rows
return "\n".join(map(str, [self.header, self.separator] + rows))
if __name__ == '__main__':
table = ColorTable("first", "last", sort_key=lambda row: (row["last"], row["first"]))
table.add(first="George", last="Washington")
table.add(first="John", last="Adams")
table.add(first="Thomas", last="Jefferson")
print table
|
<commit_before><commit_msg>Implement utility for colored, tabular output using fabric's color controls.<commit_after>"""
Pretty table generation.
"""
from itertools import cycle
from string import capwords
from fabric.colors import red, green, blue, magenta, white, yellow
class ColorRow(dict):
"""
Ordered collection of column values.
"""
def __init__(self, table, **kwargs):
super(ColorRow, self).__init__(self)
self.table = table
for column in self.table.columns:
self[column] = kwargs.get(column)
def __str__(self):
"""
Generate a formatted and colored string for this row.
"""
def format_cell(color, item):
column, value = item
return color(" {}".format(value).ljust(1 + self.table.column_widths[column]))
# get items in column order
items = [(column, self[column]) for column in self.table.columns]
# format cells with color and length
cells = [format_cell(color, item) for color, item in zip(cycle(self.table.colors), items)]
return " ".join(cells)
class ColorTable(object):
"""
Simple row/column table.
"""
def __init__(self, *columns, **kwargs):
"""
Create a table with fixed columns.
:param columns: *args style list of column names
:param kwargs: additional options, including `sort_key` and `colors`
"""
self.columns = columns
self.sort_key = kwargs.get("sort_key")
self.colors = kwargs.get("colors", [red, green, blue, magenta, white, yellow])
self.header = ColorRow(self, **dict([(column, capwords(column)) for column in self.columns]))
# initialize column widths based on header
self.column_widths = dict([(column, len(self.header[column])) for column in self.columns])
self.rows = []
@property
def separator(self):
"""
Generate a separator row using current column widths.
"""
cells = dict([(column, "-" * self.column_widths[column]) for column in self.columns])
return ColorRow(self, **cells)
def add(self, **kwargs):
row = ColorRow(self, **kwargs)
# update column widths
for column in self.columns:
self.column_widths[column] = max(self.column_widths[column], len(row[column]))
self.rows.append(row)
def __str__(self):
"""
Generate a colored table.
"""
rows = sorted(self.rows, key=self.sort_key) if self.sort_key else self.rows
return "\n".join(map(str, [self.header, self.separator] + rows))
if __name__ == '__main__':
table = ColorTable("first", "last", sort_key=lambda row: (row["last"], row["first"]))
table.add(first="George", last="Washington")
table.add(first="John", last="Adams")
table.add(first="Thomas", last="Jefferson")
print table
|
|
0882c8885b88618ea55b97ace256cdf833a1547d
|
tests/test_pylama_isort.py
|
tests/test_pylama_isort.py
|
import os
from isort.pylama_isort import Linter
class TestLinter:
instance = Linter()
def test_allow(self):
assert not self.instance.allow("test_case.pyc")
assert not self.instance.allow("test_case.c")
assert self.instance.allow("test_case.py")
def test_run(self, src_dir, tmpdir):
assert not self.instance.run(os.path.join(src_dir, "isort.py"))
incorrect = tmpdir.join("incorrect.py")
incorrect.write("import b\nimport a\n")
assert self.instance.run(str(incorrect))
|
Add tests for pylama isort
|
Add tests for pylama isort
|
Python
|
mit
|
PyCQA/isort,PyCQA/isort
|
Add tests for pylama isort
|
import os
from isort.pylama_isort import Linter
class TestLinter:
instance = Linter()
def test_allow(self):
assert not self.instance.allow("test_case.pyc")
assert not self.instance.allow("test_case.c")
assert self.instance.allow("test_case.py")
def test_run(self, src_dir, tmpdir):
assert not self.instance.run(os.path.join(src_dir, "isort.py"))
incorrect = tmpdir.join("incorrect.py")
incorrect.write("import b\nimport a\n")
assert self.instance.run(str(incorrect))
|
<commit_before><commit_msg>Add tests for pylama isort<commit_after>
|
import os
from isort.pylama_isort import Linter
class TestLinter:
instance = Linter()
def test_allow(self):
assert not self.instance.allow("test_case.pyc")
assert not self.instance.allow("test_case.c")
assert self.instance.allow("test_case.py")
def test_run(self, src_dir, tmpdir):
assert not self.instance.run(os.path.join(src_dir, "isort.py"))
incorrect = tmpdir.join("incorrect.py")
incorrect.write("import b\nimport a\n")
assert self.instance.run(str(incorrect))
|
Add tests for pylama isortimport os
from isort.pylama_isort import Linter
class TestLinter:
instance = Linter()
def test_allow(self):
assert not self.instance.allow("test_case.pyc")
assert not self.instance.allow("test_case.c")
assert self.instance.allow("test_case.py")
def test_run(self, src_dir, tmpdir):
assert not self.instance.run(os.path.join(src_dir, "isort.py"))
incorrect = tmpdir.join("incorrect.py")
incorrect.write("import b\nimport a\n")
assert self.instance.run(str(incorrect))
|
<commit_before><commit_msg>Add tests for pylama isort<commit_after>import os
from isort.pylama_isort import Linter
class TestLinter:
instance = Linter()
def test_allow(self):
assert not self.instance.allow("test_case.pyc")
assert not self.instance.allow("test_case.c")
assert self.instance.allow("test_case.py")
def test_run(self, src_dir, tmpdir):
assert not self.instance.run(os.path.join(src_dir, "isort.py"))
incorrect = tmpdir.join("incorrect.py")
incorrect.write("import b\nimport a\n")
assert self.instance.run(str(incorrect))
|
|
280e72331d99a8c49783196951287627a933a659
|
py/repeated-substring-pattern.py
|
py/repeated-substring-pattern.py
|
class Solution(object):
def repeatedSubstringPattern(self, s):
"""
:type s: str
:rtype: bool
"""
for i in xrange(1, len(s) / 2 + 1):
if len(s) % i == 0 and len(set(s[j:j+i] for j in xrange(0, len(s), i))) == 1:
return True
return False
|
Add py solution for 459. Repeated Substring Pattern
|
Add py solution for 459. Repeated Substring Pattern
459. Repeated Substring Pattern: https://leetcode.com/problems/repeated-substring-pattern/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 459. Repeated Substring Pattern
459. Repeated Substring Pattern: https://leetcode.com/problems/repeated-substring-pattern/
|
class Solution(object):
def repeatedSubstringPattern(self, s):
"""
:type s: str
:rtype: bool
"""
for i in xrange(1, len(s) / 2 + 1):
if len(s) % i == 0 and len(set(s[j:j+i] for j in xrange(0, len(s), i))) == 1:
return True
return False
|
<commit_before><commit_msg>Add py solution for 459. Repeated Substring Pattern
459. Repeated Substring Pattern: https://leetcode.com/problems/repeated-substring-pattern/<commit_after>
|
class Solution(object):
def repeatedSubstringPattern(self, s):
"""
:type s: str
:rtype: bool
"""
for i in xrange(1, len(s) / 2 + 1):
if len(s) % i == 0 and len(set(s[j:j+i] for j in xrange(0, len(s), i))) == 1:
return True
return False
|
Add py solution for 459. Repeated Substring Pattern
459. Repeated Substring Pattern: https://leetcode.com/problems/repeated-substring-pattern/class Solution(object):
def repeatedSubstringPattern(self, s):
"""
:type s: str
:rtype: bool
"""
for i in xrange(1, len(s) / 2 + 1):
if len(s) % i == 0 and len(set(s[j:j+i] for j in xrange(0, len(s), i))) == 1:
return True
return False
|
<commit_before><commit_msg>Add py solution for 459. Repeated Substring Pattern
459. Repeated Substring Pattern: https://leetcode.com/problems/repeated-substring-pattern/<commit_after>class Solution(object):
def repeatedSubstringPattern(self, s):
"""
:type s: str
:rtype: bool
"""
for i in xrange(1, len(s) / 2 + 1):
if len(s) % i == 0 and len(set(s[j:j+i] for j in xrange(0, len(s), i))) == 1:
return True
return False
|
|
75dc32ef71fd32c7728269b01a74faf840690473
|
examples/too_slow_bot.py
|
examples/too_slow_bot.py
|
import random
import asyncio
import sc2
from sc2 import Race, Difficulty
from sc2.constants import *
from sc2.player import Bot, Computer
from proxy_rax import ProxyRaxBot
class SlowBot(ProxyRaxBot):
async def on_step(self, state, iteration):
await asyncio.sleep(random.random())
await super().on_step(state, iteration)
def main():
sc2.run_game(sc2.maps.get("Abyssal Reef LE"), [
Bot(Race.Terran, SlowBot()),
Computer(Race.Protoss, Difficulty.Medium)
], realtime=False, step_time_limit=0.2)
if __name__ == '__main__':
main()
|
Add a slow bot to test timeout feature
|
Add a slow bot to test timeout feature
|
Python
|
mit
|
Dentosal/python-sc2
|
Add a slow bot to test timeout feature
|
import random
import asyncio
import sc2
from sc2 import Race, Difficulty
from sc2.constants import *
from sc2.player import Bot, Computer
from proxy_rax import ProxyRaxBot
class SlowBot(ProxyRaxBot):
async def on_step(self, state, iteration):
await asyncio.sleep(random.random())
await super().on_step(state, iteration)
def main():
sc2.run_game(sc2.maps.get("Abyssal Reef LE"), [
Bot(Race.Terran, SlowBot()),
Computer(Race.Protoss, Difficulty.Medium)
], realtime=False, step_time_limit=0.2)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a slow bot to test timeout feature<commit_after>
|
import random
import asyncio
import sc2
from sc2 import Race, Difficulty
from sc2.constants import *
from sc2.player import Bot, Computer
from proxy_rax import ProxyRaxBot
class SlowBot(ProxyRaxBot):
async def on_step(self, state, iteration):
await asyncio.sleep(random.random())
await super().on_step(state, iteration)
def main():
sc2.run_game(sc2.maps.get("Abyssal Reef LE"), [
Bot(Race.Terran, SlowBot()),
Computer(Race.Protoss, Difficulty.Medium)
], realtime=False, step_time_limit=0.2)
if __name__ == '__main__':
main()
|
Add a slow bot to test timeout featureimport random
import asyncio
import sc2
from sc2 import Race, Difficulty
from sc2.constants import *
from sc2.player import Bot, Computer
from proxy_rax import ProxyRaxBot
class SlowBot(ProxyRaxBot):
async def on_step(self, state, iteration):
await asyncio.sleep(random.random())
await super().on_step(state, iteration)
def main():
sc2.run_game(sc2.maps.get("Abyssal Reef LE"), [
Bot(Race.Terran, SlowBot()),
Computer(Race.Protoss, Difficulty.Medium)
], realtime=False, step_time_limit=0.2)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a slow bot to test timeout feature<commit_after>import random
import asyncio
import sc2
from sc2 import Race, Difficulty
from sc2.constants import *
from sc2.player import Bot, Computer
from proxy_rax import ProxyRaxBot
class SlowBot(ProxyRaxBot):
async def on_step(self, state, iteration):
await asyncio.sleep(random.random())
await super().on_step(state, iteration)
def main():
sc2.run_game(sc2.maps.get("Abyssal Reef LE"), [
Bot(Race.Terran, SlowBot()),
Computer(Race.Protoss, Difficulty.Medium)
], realtime=False, step_time_limit=0.2)
if __name__ == '__main__':
main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.