commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0472c1cabdfdf0f8a193552dac3370ae93bbdaed
|
scripts/get_top_hashtags.py
|
scripts/get_top_hashtags.py
|
import json
import sys
from collections import Counter
f = open(sys.argv[1], 'r')
topk = int(sys.argv[2]) if len(sys.argv) > 2 and sys.argv[2].isdigit() else 10
hashtags = []
for line in f:
if line.startswith('{'):
hashtags.extend(json.loads(line)['hashtags'])
hashtagCounter = Counter([hashtag.lower() for hashtag in hashtags])
for (hashtag, count) in hashtagCounter.most_common(topk):
print hashtag, count
|
import json
import sys
from collections import Counter
f = open(sys.argv[1], 'r')
topk = int(sys.argv[2]) if len(sys.argv) > 2 and sys.argv[2].isdigit() else 10
hashtagCounter = Counter([hashtag.lower() for line in f if line.startswith('{') for hashtag in json.loads(line)['hashtags'] ])
for (hashtag, count) in hashtagCounter.most_common(topk):
print hashtag, count
|
Use a more compact functional style for instantiating hashtagCounter
|
Use a more compact functional style for instantiating hashtagCounter
|
Python
|
mpl-2.0
|
aDataAlchemist/election-tweets
|
import json
import sys
from collections import Counter
f = open(sys.argv[1], 'r')
topk = int(sys.argv[2]) if len(sys.argv) > 2 and sys.argv[2].isdigit() else 10
hashtags = []
for line in f:
if line.startswith('{'):
hashtags.extend(json.loads(line)['hashtags'])
hashtagCounter = Counter([hashtag.lower() for hashtag in hashtags])
for (hashtag, count) in hashtagCounter.most_common(topk):
print hashtag, count
Use a more compact functional style for instantiating hashtagCounter
|
import json
import sys
from collections import Counter
f = open(sys.argv[1], 'r')
topk = int(sys.argv[2]) if len(sys.argv) > 2 and sys.argv[2].isdigit() else 10
hashtagCounter = Counter([hashtag.lower() for line in f if line.startswith('{') for hashtag in json.loads(line)['hashtags'] ])
for (hashtag, count) in hashtagCounter.most_common(topk):
print hashtag, count
|
<commit_before>import json
import sys
from collections import Counter
f = open(sys.argv[1], 'r')
topk = int(sys.argv[2]) if len(sys.argv) > 2 and sys.argv[2].isdigit() else 10
hashtags = []
for line in f:
if line.startswith('{'):
hashtags.extend(json.loads(line)['hashtags'])
hashtagCounter = Counter([hashtag.lower() for hashtag in hashtags])
for (hashtag, count) in hashtagCounter.most_common(topk):
print hashtag, count
<commit_msg>Use a more compact functional style for instantiating hashtagCounter<commit_after>
|
import json
import sys
from collections import Counter
f = open(sys.argv[1], 'r')
topk = int(sys.argv[2]) if len(sys.argv) > 2 and sys.argv[2].isdigit() else 10
hashtagCounter = Counter([hashtag.lower() for line in f if line.startswith('{') for hashtag in json.loads(line)['hashtags'] ])
for (hashtag, count) in hashtagCounter.most_common(topk):
print hashtag, count
|
import json
import sys
from collections import Counter
f = open(sys.argv[1], 'r')
topk = int(sys.argv[2]) if len(sys.argv) > 2 and sys.argv[2].isdigit() else 10
hashtags = []
for line in f:
if line.startswith('{'):
hashtags.extend(json.loads(line)['hashtags'])
hashtagCounter = Counter([hashtag.lower() for hashtag in hashtags])
for (hashtag, count) in hashtagCounter.most_common(topk):
print hashtag, count
Use a more compact functional style for instantiating hashtagCounterimport json
import sys
from collections import Counter
f = open(sys.argv[1], 'r')
topk = int(sys.argv[2]) if len(sys.argv) > 2 and sys.argv[2].isdigit() else 10
hashtagCounter = Counter([hashtag.lower() for line in f if line.startswith('{') for hashtag in json.loads(line)['hashtags'] ])
for (hashtag, count) in hashtagCounter.most_common(topk):
print hashtag, count
|
<commit_before>import json
import sys
from collections import Counter
f = open(sys.argv[1], 'r')
topk = int(sys.argv[2]) if len(sys.argv) > 2 and sys.argv[2].isdigit() else 10
hashtags = []
for line in f:
if line.startswith('{'):
hashtags.extend(json.loads(line)['hashtags'])
hashtagCounter = Counter([hashtag.lower() for hashtag in hashtags])
for (hashtag, count) in hashtagCounter.most_common(topk):
print hashtag, count
<commit_msg>Use a more compact functional style for instantiating hashtagCounter<commit_after>import json
import sys
from collections import Counter
f = open(sys.argv[1], 'r')
topk = int(sys.argv[2]) if len(sys.argv) > 2 and sys.argv[2].isdigit() else 10
hashtagCounter = Counter([hashtag.lower() for line in f if line.startswith('{') for hashtag in json.loads(line)['hashtags'] ])
for (hashtag, count) in hashtagCounter.most_common(topk):
print hashtag, count
|
cd300ebffd8974b5c9fe98e8368f26dc029ae41b
|
tests/schemas.py
|
tests/schemas.py
|
from marshmallow import Schema, fields
class PetSchema(Schema):
id = fields.Int(dump_only=True)
name = fields.Str()
class SampleSchema(Schema):
runs = fields.Nested('RunSchema', many=True, exclude=('sample',))
count = fields.Int()
class RunSchema(Schema):
sample = fields.Nested(SampleSchema, exclude=('runs',))
class AnalysisSchema(Schema):
sample = fields.Nested(SampleSchema)
class AnalysisWithListSchema(Schema):
samples = fields.List(fields.Nested(SampleSchema))
class PatternedObjectSchema(Schema):
count = fields.Int(dump_only=True, **{'x-count': 1})
count2 = fields.Int(dump_only=True, x_count2=2)
class SelfReferencingSchema(Schema):
id = fields.Int()
single = fields.Nested('self')
single_with_ref = fields.Nested('self', ref='#/definitions/Self')
many = fields.Nested('self', many=True)
many_with_ref = fields.Nested('self', many=True, ref='#/definitions/Selves')
class OrderedSchema(Schema):
field1 = fields.Int()
field2 = fields.Int()
field3 = fields.Int()
field4 = fields.Int()
field5 = fields.Int()
class Meta:
ordered = True
class DefaultCallableSchema(Schema):
numbers = fields.List(fields.Int, missing=list)
|
from marshmallow import Schema, fields
class PetSchema(Schema):
id = fields.Int(dump_only=True)
name = fields.Str()
class SampleSchema(Schema):
runs = fields.Nested('RunSchema', many=True, exclude=('sample',))
count = fields.Int()
class RunSchema(Schema):
sample = fields.Nested(SampleSchema, exclude=('runs',))
class AnalysisSchema(Schema):
sample = fields.Nested(SampleSchema)
class AnalysisWithListSchema(Schema):
samples = fields.List(fields.Nested(SampleSchema))
class AnalysisWithDictSchema(Schema):
samplesDict = fields.Dict(values=fields.Nested(SampleSchema))
class PatternedObjectSchema(Schema):
count = fields.Int(dump_only=True, **{'x-count': 1})
count2 = fields.Int(dump_only=True, x_count2=2)
class SelfReferencingSchema(Schema):
id = fields.Int()
single = fields.Nested('self')
single_with_ref = fields.Nested('self', ref='#/definitions/Self')
many = fields.Nested('self', many=True)
many_with_ref = fields.Nested('self', many=True, ref='#/definitions/Selves')
class OrderedSchema(Schema):
field1 = fields.Int()
field2 = fields.Int()
field3 = fields.Int()
field4 = fields.Int()
field5 = fields.Int()
class Meta:
ordered = True
class DefaultCallableSchema(Schema):
numbers = fields.List(fields.Int, missing=list)
|
Add schema with Dict field
|
Add schema with Dict field
|
Python
|
mit
|
marshmallow-code/smore,marshmallow-code/apispec
|
from marshmallow import Schema, fields
class PetSchema(Schema):
id = fields.Int(dump_only=True)
name = fields.Str()
class SampleSchema(Schema):
runs = fields.Nested('RunSchema', many=True, exclude=('sample',))
count = fields.Int()
class RunSchema(Schema):
sample = fields.Nested(SampleSchema, exclude=('runs',))
class AnalysisSchema(Schema):
sample = fields.Nested(SampleSchema)
class AnalysisWithListSchema(Schema):
samples = fields.List(fields.Nested(SampleSchema))
class PatternedObjectSchema(Schema):
count = fields.Int(dump_only=True, **{'x-count': 1})
count2 = fields.Int(dump_only=True, x_count2=2)
class SelfReferencingSchema(Schema):
id = fields.Int()
single = fields.Nested('self')
single_with_ref = fields.Nested('self', ref='#/definitions/Self')
many = fields.Nested('self', many=True)
many_with_ref = fields.Nested('self', many=True, ref='#/definitions/Selves')
class OrderedSchema(Schema):
field1 = fields.Int()
field2 = fields.Int()
field3 = fields.Int()
field4 = fields.Int()
field5 = fields.Int()
class Meta:
ordered = True
class DefaultCallableSchema(Schema):
numbers = fields.List(fields.Int, missing=list)
Add schema with Dict field
|
from marshmallow import Schema, fields
class PetSchema(Schema):
id = fields.Int(dump_only=True)
name = fields.Str()
class SampleSchema(Schema):
runs = fields.Nested('RunSchema', many=True, exclude=('sample',))
count = fields.Int()
class RunSchema(Schema):
sample = fields.Nested(SampleSchema, exclude=('runs',))
class AnalysisSchema(Schema):
sample = fields.Nested(SampleSchema)
class AnalysisWithListSchema(Schema):
samples = fields.List(fields.Nested(SampleSchema))
class AnalysisWithDictSchema(Schema):
samplesDict = fields.Dict(values=fields.Nested(SampleSchema))
class PatternedObjectSchema(Schema):
count = fields.Int(dump_only=True, **{'x-count': 1})
count2 = fields.Int(dump_only=True, x_count2=2)
class SelfReferencingSchema(Schema):
id = fields.Int()
single = fields.Nested('self')
single_with_ref = fields.Nested('self', ref='#/definitions/Self')
many = fields.Nested('self', many=True)
many_with_ref = fields.Nested('self', many=True, ref='#/definitions/Selves')
class OrderedSchema(Schema):
field1 = fields.Int()
field2 = fields.Int()
field3 = fields.Int()
field4 = fields.Int()
field5 = fields.Int()
class Meta:
ordered = True
class DefaultCallableSchema(Schema):
numbers = fields.List(fields.Int, missing=list)
|
<commit_before>from marshmallow import Schema, fields
class PetSchema(Schema):
id = fields.Int(dump_only=True)
name = fields.Str()
class SampleSchema(Schema):
runs = fields.Nested('RunSchema', many=True, exclude=('sample',))
count = fields.Int()
class RunSchema(Schema):
sample = fields.Nested(SampleSchema, exclude=('runs',))
class AnalysisSchema(Schema):
sample = fields.Nested(SampleSchema)
class AnalysisWithListSchema(Schema):
samples = fields.List(fields.Nested(SampleSchema))
class PatternedObjectSchema(Schema):
count = fields.Int(dump_only=True, **{'x-count': 1})
count2 = fields.Int(dump_only=True, x_count2=2)
class SelfReferencingSchema(Schema):
id = fields.Int()
single = fields.Nested('self')
single_with_ref = fields.Nested('self', ref='#/definitions/Self')
many = fields.Nested('self', many=True)
many_with_ref = fields.Nested('self', many=True, ref='#/definitions/Selves')
class OrderedSchema(Schema):
field1 = fields.Int()
field2 = fields.Int()
field3 = fields.Int()
field4 = fields.Int()
field5 = fields.Int()
class Meta:
ordered = True
class DefaultCallableSchema(Schema):
numbers = fields.List(fields.Int, missing=list)
<commit_msg>Add schema with Dict field<commit_after>
|
from marshmallow import Schema, fields
class PetSchema(Schema):
id = fields.Int(dump_only=True)
name = fields.Str()
class SampleSchema(Schema):
runs = fields.Nested('RunSchema', many=True, exclude=('sample',))
count = fields.Int()
class RunSchema(Schema):
sample = fields.Nested(SampleSchema, exclude=('runs',))
class AnalysisSchema(Schema):
sample = fields.Nested(SampleSchema)
class AnalysisWithListSchema(Schema):
samples = fields.List(fields.Nested(SampleSchema))
class AnalysisWithDictSchema(Schema):
samplesDict = fields.Dict(values=fields.Nested(SampleSchema))
class PatternedObjectSchema(Schema):
count = fields.Int(dump_only=True, **{'x-count': 1})
count2 = fields.Int(dump_only=True, x_count2=2)
class SelfReferencingSchema(Schema):
id = fields.Int()
single = fields.Nested('self')
single_with_ref = fields.Nested('self', ref='#/definitions/Self')
many = fields.Nested('self', many=True)
many_with_ref = fields.Nested('self', many=True, ref='#/definitions/Selves')
class OrderedSchema(Schema):
field1 = fields.Int()
field2 = fields.Int()
field3 = fields.Int()
field4 = fields.Int()
field5 = fields.Int()
class Meta:
ordered = True
class DefaultCallableSchema(Schema):
numbers = fields.List(fields.Int, missing=list)
|
from marshmallow import Schema, fields
class PetSchema(Schema):
id = fields.Int(dump_only=True)
name = fields.Str()
class SampleSchema(Schema):
runs = fields.Nested('RunSchema', many=True, exclude=('sample',))
count = fields.Int()
class RunSchema(Schema):
sample = fields.Nested(SampleSchema, exclude=('runs',))
class AnalysisSchema(Schema):
sample = fields.Nested(SampleSchema)
class AnalysisWithListSchema(Schema):
samples = fields.List(fields.Nested(SampleSchema))
class PatternedObjectSchema(Schema):
count = fields.Int(dump_only=True, **{'x-count': 1})
count2 = fields.Int(dump_only=True, x_count2=2)
class SelfReferencingSchema(Schema):
id = fields.Int()
single = fields.Nested('self')
single_with_ref = fields.Nested('self', ref='#/definitions/Self')
many = fields.Nested('self', many=True)
many_with_ref = fields.Nested('self', many=True, ref='#/definitions/Selves')
class OrderedSchema(Schema):
field1 = fields.Int()
field2 = fields.Int()
field3 = fields.Int()
field4 = fields.Int()
field5 = fields.Int()
class Meta:
ordered = True
class DefaultCallableSchema(Schema):
numbers = fields.List(fields.Int, missing=list)
Add schema with Dict fieldfrom marshmallow import Schema, fields
class PetSchema(Schema):
id = fields.Int(dump_only=True)
name = fields.Str()
class SampleSchema(Schema):
runs = fields.Nested('RunSchema', many=True, exclude=('sample',))
count = fields.Int()
class RunSchema(Schema):
sample = fields.Nested(SampleSchema, exclude=('runs',))
class AnalysisSchema(Schema):
sample = fields.Nested(SampleSchema)
class AnalysisWithListSchema(Schema):
samples = fields.List(fields.Nested(SampleSchema))
class AnalysisWithDictSchema(Schema):
samplesDict = fields.Dict(values=fields.Nested(SampleSchema))
class PatternedObjectSchema(Schema):
count = fields.Int(dump_only=True, **{'x-count': 1})
count2 = fields.Int(dump_only=True, x_count2=2)
class SelfReferencingSchema(Schema):
id = fields.Int()
single = fields.Nested('self')
single_with_ref = fields.Nested('self', ref='#/definitions/Self')
many = fields.Nested('self', many=True)
many_with_ref = fields.Nested('self', many=True, ref='#/definitions/Selves')
class OrderedSchema(Schema):
field1 = fields.Int()
field2 = fields.Int()
field3 = fields.Int()
field4 = fields.Int()
field5 = fields.Int()
class Meta:
ordered = True
class DefaultCallableSchema(Schema):
numbers = fields.List(fields.Int, missing=list)
|
<commit_before>from marshmallow import Schema, fields
class PetSchema(Schema):
id = fields.Int(dump_only=True)
name = fields.Str()
class SampleSchema(Schema):
runs = fields.Nested('RunSchema', many=True, exclude=('sample',))
count = fields.Int()
class RunSchema(Schema):
sample = fields.Nested(SampleSchema, exclude=('runs',))
class AnalysisSchema(Schema):
sample = fields.Nested(SampleSchema)
class AnalysisWithListSchema(Schema):
samples = fields.List(fields.Nested(SampleSchema))
class PatternedObjectSchema(Schema):
count = fields.Int(dump_only=True, **{'x-count': 1})
count2 = fields.Int(dump_only=True, x_count2=2)
class SelfReferencingSchema(Schema):
id = fields.Int()
single = fields.Nested('self')
single_with_ref = fields.Nested('self', ref='#/definitions/Self')
many = fields.Nested('self', many=True)
many_with_ref = fields.Nested('self', many=True, ref='#/definitions/Selves')
class OrderedSchema(Schema):
field1 = fields.Int()
field2 = fields.Int()
field3 = fields.Int()
field4 = fields.Int()
field5 = fields.Int()
class Meta:
ordered = True
class DefaultCallableSchema(Schema):
numbers = fields.List(fields.Int, missing=list)
<commit_msg>Add schema with Dict field<commit_after>from marshmallow import Schema, fields
class PetSchema(Schema):
id = fields.Int(dump_only=True)
name = fields.Str()
class SampleSchema(Schema):
runs = fields.Nested('RunSchema', many=True, exclude=('sample',))
count = fields.Int()
class RunSchema(Schema):
sample = fields.Nested(SampleSchema, exclude=('runs',))
class AnalysisSchema(Schema):
sample = fields.Nested(SampleSchema)
class AnalysisWithListSchema(Schema):
samples = fields.List(fields.Nested(SampleSchema))
class AnalysisWithDictSchema(Schema):
samplesDict = fields.Dict(values=fields.Nested(SampleSchema))
class PatternedObjectSchema(Schema):
count = fields.Int(dump_only=True, **{'x-count': 1})
count2 = fields.Int(dump_only=True, x_count2=2)
class SelfReferencingSchema(Schema):
id = fields.Int()
single = fields.Nested('self')
single_with_ref = fields.Nested('self', ref='#/definitions/Self')
many = fields.Nested('self', many=True)
many_with_ref = fields.Nested('self', many=True, ref='#/definitions/Selves')
class OrderedSchema(Schema):
field1 = fields.Int()
field2 = fields.Int()
field3 = fields.Int()
field4 = fields.Int()
field5 = fields.Int()
class Meta:
ordered = True
class DefaultCallableSchema(Schema):
numbers = fields.List(fields.Int, missing=list)
|
e1988a1a696aa86aa7cd1bf305a8f893e9225f79
|
src/manage.py
|
src/manage.py
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "onepageblog.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
#!/usr/bin/env python3
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "onepageblog.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
Use python3 in the shebang
|
Use python3 in the shebang
|
Python
|
agpl-3.0
|
kaapstorm/onepageblog,kaapstorm/onepageblog,kaapstorm/onepageblog
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "onepageblog.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
Use python3 in the shebang
|
#!/usr/bin/env python3
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "onepageblog.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
<commit_before>#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "onepageblog.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
<commit_msg>Use python3 in the shebang<commit_after>
|
#!/usr/bin/env python3
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "onepageblog.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "onepageblog.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
Use python3 in the shebang#!/usr/bin/env python3
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "onepageblog.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
<commit_before>#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "onepageblog.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
<commit_msg>Use python3 in the shebang<commit_after>#!/usr/bin/env python3
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "onepageblog.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
70e104c8270385050842bd0c61ce4a92b78754fd
|
conda_verify/errors.py
|
conda_verify/errors.py
|
from collections import namedtuple
class PackageError(Exception):
"""Exception to be raised when user wants to exit on error."""
class RecipeError(Exception):
"""Exception to be raised when user wants to exit on error."""
class Error(namedtuple('Error', ['file', 'code', 'message'])):
"""Error class creates error codes to be shown to the user."""
def __repr__(self):
"""Override namedtuple's __repr__ so that error codes are readable."""
return '{}: {} {}' .format(self.file, self.code, self.message)
|
from collections import namedtuple
class PackageError(Exception):
"""Exception to be raised when user wants to exit on error."""
class RecipeError(Exception):
"""Exception to be raised when user wants to exit on error."""
class Error(namedtuple('Error', ['file', 'code', 'message'])):
"""Error class creates error codes to be shown to the user."""
def __str__(self):
"""Override namedtuple's __str__ so that error codes are readable."""
return '{}: {} {}' .format(self.file, self.code, self.message)
|
Use __str__ instead of __repr__ so that debugging output remains the same
|
Use __str__ instead of __repr__ so that debugging output remains the same
|
Python
|
bsd-3-clause
|
mandeep/conda-verify
|
from collections import namedtuple
class PackageError(Exception):
"""Exception to be raised when user wants to exit on error."""
class RecipeError(Exception):
"""Exception to be raised when user wants to exit on error."""
class Error(namedtuple('Error', ['file', 'code', 'message'])):
"""Error class creates error codes to be shown to the user."""
def __repr__(self):
"""Override namedtuple's __repr__ so that error codes are readable."""
return '{}: {} {}' .format(self.file, self.code, self.message)
Use __str__ instead of __repr__ so that debugging output remains the same
|
from collections import namedtuple
class PackageError(Exception):
"""Exception to be raised when user wants to exit on error."""
class RecipeError(Exception):
"""Exception to be raised when user wants to exit on error."""
class Error(namedtuple('Error', ['file', 'code', 'message'])):
"""Error class creates error codes to be shown to the user."""
def __str__(self):
"""Override namedtuple's __str__ so that error codes are readable."""
return '{}: {} {}' .format(self.file, self.code, self.message)
|
<commit_before>from collections import namedtuple
class PackageError(Exception):
"""Exception to be raised when user wants to exit on error."""
class RecipeError(Exception):
"""Exception to be raised when user wants to exit on error."""
class Error(namedtuple('Error', ['file', 'code', 'message'])):
"""Error class creates error codes to be shown to the user."""
def __repr__(self):
"""Override namedtuple's __repr__ so that error codes are readable."""
return '{}: {} {}' .format(self.file, self.code, self.message)
<commit_msg>Use __str__ instead of __repr__ so that debugging output remains the same<commit_after>
|
from collections import namedtuple
class PackageError(Exception):
"""Exception to be raised when user wants to exit on error."""
class RecipeError(Exception):
"""Exception to be raised when user wants to exit on error."""
class Error(namedtuple('Error', ['file', 'code', 'message'])):
"""Error class creates error codes to be shown to the user."""
def __str__(self):
"""Override namedtuple's __str__ so that error codes are readable."""
return '{}: {} {}' .format(self.file, self.code, self.message)
|
from collections import namedtuple
class PackageError(Exception):
"""Exception to be raised when user wants to exit on error."""
class RecipeError(Exception):
"""Exception to be raised when user wants to exit on error."""
class Error(namedtuple('Error', ['file', 'code', 'message'])):
"""Error class creates error codes to be shown to the user."""
def __repr__(self):
"""Override namedtuple's __repr__ so that error codes are readable."""
return '{}: {} {}' .format(self.file, self.code, self.message)
Use __str__ instead of __repr__ so that debugging output remains the samefrom collections import namedtuple
class PackageError(Exception):
"""Exception to be raised when user wants to exit on error."""
class RecipeError(Exception):
"""Exception to be raised when user wants to exit on error."""
class Error(namedtuple('Error', ['file', 'code', 'message'])):
"""Error class creates error codes to be shown to the user."""
def __str__(self):
"""Override namedtuple's __str__ so that error codes are readable."""
return '{}: {} {}' .format(self.file, self.code, self.message)
|
<commit_before>from collections import namedtuple
class PackageError(Exception):
"""Exception to be raised when user wants to exit on error."""
class RecipeError(Exception):
"""Exception to be raised when user wants to exit on error."""
class Error(namedtuple('Error', ['file', 'code', 'message'])):
"""Error class creates error codes to be shown to the user."""
def __repr__(self):
"""Override namedtuple's __repr__ so that error codes are readable."""
return '{}: {} {}' .format(self.file, self.code, self.message)
<commit_msg>Use __str__ instead of __repr__ so that debugging output remains the same<commit_after>from collections import namedtuple
class PackageError(Exception):
"""Exception to be raised when user wants to exit on error."""
class RecipeError(Exception):
"""Exception to be raised when user wants to exit on error."""
class Error(namedtuple('Error', ['file', 'code', 'message'])):
"""Error class creates error codes to be shown to the user."""
def __str__(self):
"""Override namedtuple's __str__ so that error codes are readable."""
return '{}: {} {}' .format(self.file, self.code, self.message)
|
7064916ddd2913856b9493670ca2d525fd412b06
|
crmapp/urls.py
|
crmapp/urls.py
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
from marketing.views import HomePage
urlpatterns = patterns('',
# Marketing pages
url(r'^$', HomePage.as_view(), name="home"),
# Subscriber related URLs
url(r'^signup/$',
'crmapp.subscribers.views.subscriber_new', name='sub_new'
),
# Admin URL
(r'^admin/', include(admin.site.urls)),
# Login/Logout URLs
# Account related URLs
# Contact related URLS
# Communication related URLs
)
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
from marketing.views import HomePage
urlpatterns = patterns('',
# Marketing pages
url(r'^$', HomePage.as_view(), name="home"),
# Subscriber related URLs
url(r'^signup/$',
'crmapp.subscribers.views.subscriber_new', name='sub_new'
),
# Admin URL
(r'^admin/', include(admin.site.urls)),
# Login/Logout URLs
(r'^login/$',
'django.contrib.auth.views.login', {'template_name': 'login.html'}
),
(r'^logout/$',
'django.contrib.auth.views.logout', {'next_page': '/login/'}
),
# Account related URLs
# Contact related URLS
# Communication related URLs
)
|
Create the Login Page > Create the Login & Logout URLs
|
Create the Login Page > Create the Login & Logout URLs
|
Python
|
mit
|
deenaariff/Django,tabdon/crmeasyapp,tabdon/crmeasyapp
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
from marketing.views import HomePage
urlpatterns = patterns('',
# Marketing pages
url(r'^$', HomePage.as_view(), name="home"),
# Subscriber related URLs
url(r'^signup/$',
'crmapp.subscribers.views.subscriber_new', name='sub_new'
),
# Admin URL
(r'^admin/', include(admin.site.urls)),
# Login/Logout URLs
# Account related URLs
# Contact related URLS
# Communication related URLs
)Create the Login Page > Create the Login & Logout URLs
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
from marketing.views import HomePage
urlpatterns = patterns('',
# Marketing pages
url(r'^$', HomePage.as_view(), name="home"),
# Subscriber related URLs
url(r'^signup/$',
'crmapp.subscribers.views.subscriber_new', name='sub_new'
),
# Admin URL
(r'^admin/', include(admin.site.urls)),
# Login/Logout URLs
(r'^login/$',
'django.contrib.auth.views.login', {'template_name': 'login.html'}
),
(r'^logout/$',
'django.contrib.auth.views.logout', {'next_page': '/login/'}
),
# Account related URLs
# Contact related URLS
# Communication related URLs
)
|
<commit_before>from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
from marketing.views import HomePage
urlpatterns = patterns('',
# Marketing pages
url(r'^$', HomePage.as_view(), name="home"),
# Subscriber related URLs
url(r'^signup/$',
'crmapp.subscribers.views.subscriber_new', name='sub_new'
),
# Admin URL
(r'^admin/', include(admin.site.urls)),
# Login/Logout URLs
# Account related URLs
# Contact related URLS
# Communication related URLs
)<commit_msg>Create the Login Page > Create the Login & Logout URLs<commit_after>
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
from marketing.views import HomePage
urlpatterns = patterns('',
# Marketing pages
url(r'^$', HomePage.as_view(), name="home"),
# Subscriber related URLs
url(r'^signup/$',
'crmapp.subscribers.views.subscriber_new', name='sub_new'
),
# Admin URL
(r'^admin/', include(admin.site.urls)),
# Login/Logout URLs
(r'^login/$',
'django.contrib.auth.views.login', {'template_name': 'login.html'}
),
(r'^logout/$',
'django.contrib.auth.views.logout', {'next_page': '/login/'}
),
# Account related URLs
# Contact related URLS
# Communication related URLs
)
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
from marketing.views import HomePage
urlpatterns = patterns('',
# Marketing pages
url(r'^$', HomePage.as_view(), name="home"),
# Subscriber related URLs
url(r'^signup/$',
'crmapp.subscribers.views.subscriber_new', name='sub_new'
),
# Admin URL
(r'^admin/', include(admin.site.urls)),
# Login/Logout URLs
# Account related URLs
# Contact related URLS
# Communication related URLs
)Create the Login Page > Create the Login & Logout URLsfrom django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
from marketing.views import HomePage
urlpatterns = patterns('',
# Marketing pages
url(r'^$', HomePage.as_view(), name="home"),
# Subscriber related URLs
url(r'^signup/$',
'crmapp.subscribers.views.subscriber_new', name='sub_new'
),
# Admin URL
(r'^admin/', include(admin.site.urls)),
# Login/Logout URLs
(r'^login/$',
'django.contrib.auth.views.login', {'template_name': 'login.html'}
),
(r'^logout/$',
'django.contrib.auth.views.logout', {'next_page': '/login/'}
),
# Account related URLs
# Contact related URLS
# Communication related URLs
)
|
<commit_before>from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
from marketing.views import HomePage
urlpatterns = patterns('',
# Marketing pages
url(r'^$', HomePage.as_view(), name="home"),
# Subscriber related URLs
url(r'^signup/$',
'crmapp.subscribers.views.subscriber_new', name='sub_new'
),
# Admin URL
(r'^admin/', include(admin.site.urls)),
# Login/Logout URLs
# Account related URLs
# Contact related URLS
# Communication related URLs
)<commit_msg>Create the Login Page > Create the Login & Logout URLs<commit_after>from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
from marketing.views import HomePage
urlpatterns = patterns('',
# Marketing pages
url(r'^$', HomePage.as_view(), name="home"),
# Subscriber related URLs
url(r'^signup/$',
'crmapp.subscribers.views.subscriber_new', name='sub_new'
),
# Admin URL
(r'^admin/', include(admin.site.urls)),
# Login/Logout URLs
(r'^login/$',
'django.contrib.auth.views.login', {'template_name': 'login.html'}
),
(r'^logout/$',
'django.contrib.auth.views.logout', {'next_page': '/login/'}
),
# Account related URLs
# Contact related URLS
# Communication related URLs
)
|
8a0c17f39fd63a90b24ed79bd5bde4d52622e41d
|
irc/message.py
|
irc/message.py
|
class Tag(object):
"""
An IRC message tag ircv3.net/specs/core/message-tags-3.2.html
"""
@staticmethod
def parse(item):
key, sep, value = item.partition('=')
value = value.replace('\\:', ';')
value = value.replace('\\s', ' ')
value = value.replace('\\n', '\n')
value = value.replace('\\r', '\r')
value = value.replace('\\\\', '\\')
value = value or None
return {
'key': key,
'value': value,
}
|
from __future__ import print_function
class Tag(object):
"""
An IRC message tag ircv3.net/specs/core/message-tags-3.2.html
"""
@staticmethod
def parse(item):
r"""
>>> Tag.parse('x') == {'key': 'x', 'value': None}
True
>>> Tag.parse('x=yes') == {'key': 'x', 'value': 'yes'}
True
>>> Tag.parse('x=3')['value']
'3'
>>> Tag.parse('x=red fox\\:green eggs')['value']
'red fox;green eggs'
>>> Tag.parse('x=red fox:green eggs')['value']
'red fox:green eggs'
>>> print(Tag.parse('x=a\\nb\\nc')['value'])
a
b
c
"""
key, sep, value = item.partition('=')
value = value.replace('\\:', ';')
value = value.replace('\\s', ' ')
value = value.replace('\\n', '\n')
value = value.replace('\\r', '\r')
value = value.replace('\\\\', '\\')
value = value or None
return {
'key': key,
'value': value,
}
|
Add tests for tag parsing
|
Add tests for tag parsing
|
Python
|
mit
|
jaraco/irc
|
class Tag(object):
"""
An IRC message tag ircv3.net/specs/core/message-tags-3.2.html
"""
@staticmethod
def parse(item):
key, sep, value = item.partition('=')
value = value.replace('\\:', ';')
value = value.replace('\\s', ' ')
value = value.replace('\\n', '\n')
value = value.replace('\\r', '\r')
value = value.replace('\\\\', '\\')
value = value or None
return {
'key': key,
'value': value,
}
Add tests for tag parsing
|
from __future__ import print_function
class Tag(object):
"""
An IRC message tag ircv3.net/specs/core/message-tags-3.2.html
"""
@staticmethod
def parse(item):
r"""
>>> Tag.parse('x') == {'key': 'x', 'value': None}
True
>>> Tag.parse('x=yes') == {'key': 'x', 'value': 'yes'}
True
>>> Tag.parse('x=3')['value']
'3'
>>> Tag.parse('x=red fox\\:green eggs')['value']
'red fox;green eggs'
>>> Tag.parse('x=red fox:green eggs')['value']
'red fox:green eggs'
>>> print(Tag.parse('x=a\\nb\\nc')['value'])
a
b
c
"""
key, sep, value = item.partition('=')
value = value.replace('\\:', ';')
value = value.replace('\\s', ' ')
value = value.replace('\\n', '\n')
value = value.replace('\\r', '\r')
value = value.replace('\\\\', '\\')
value = value or None
return {
'key': key,
'value': value,
}
|
<commit_before>
class Tag(object):
"""
An IRC message tag ircv3.net/specs/core/message-tags-3.2.html
"""
@staticmethod
def parse(item):
key, sep, value = item.partition('=')
value = value.replace('\\:', ';')
value = value.replace('\\s', ' ')
value = value.replace('\\n', '\n')
value = value.replace('\\r', '\r')
value = value.replace('\\\\', '\\')
value = value or None
return {
'key': key,
'value': value,
}
<commit_msg>Add tests for tag parsing<commit_after>
|
from __future__ import print_function
class Tag(object):
"""
An IRC message tag ircv3.net/specs/core/message-tags-3.2.html
"""
@staticmethod
def parse(item):
r"""
>>> Tag.parse('x') == {'key': 'x', 'value': None}
True
>>> Tag.parse('x=yes') == {'key': 'x', 'value': 'yes'}
True
>>> Tag.parse('x=3')['value']
'3'
>>> Tag.parse('x=red fox\\:green eggs')['value']
'red fox;green eggs'
>>> Tag.parse('x=red fox:green eggs')['value']
'red fox:green eggs'
>>> print(Tag.parse('x=a\\nb\\nc')['value'])
a
b
c
"""
key, sep, value = item.partition('=')
value = value.replace('\\:', ';')
value = value.replace('\\s', ' ')
value = value.replace('\\n', '\n')
value = value.replace('\\r', '\r')
value = value.replace('\\\\', '\\')
value = value or None
return {
'key': key,
'value': value,
}
|
class Tag(object):
"""
An IRC message tag ircv3.net/specs/core/message-tags-3.2.html
"""
@staticmethod
def parse(item):
key, sep, value = item.partition('=')
value = value.replace('\\:', ';')
value = value.replace('\\s', ' ')
value = value.replace('\\n', '\n')
value = value.replace('\\r', '\r')
value = value.replace('\\\\', '\\')
value = value or None
return {
'key': key,
'value': value,
}
Add tests for tag parsingfrom __future__ import print_function
class Tag(object):
"""
An IRC message tag ircv3.net/specs/core/message-tags-3.2.html
"""
@staticmethod
def parse(item):
r"""
>>> Tag.parse('x') == {'key': 'x', 'value': None}
True
>>> Tag.parse('x=yes') == {'key': 'x', 'value': 'yes'}
True
>>> Tag.parse('x=3')['value']
'3'
>>> Tag.parse('x=red fox\\:green eggs')['value']
'red fox;green eggs'
>>> Tag.parse('x=red fox:green eggs')['value']
'red fox:green eggs'
>>> print(Tag.parse('x=a\\nb\\nc')['value'])
a
b
c
"""
key, sep, value = item.partition('=')
value = value.replace('\\:', ';')
value = value.replace('\\s', ' ')
value = value.replace('\\n', '\n')
value = value.replace('\\r', '\r')
value = value.replace('\\\\', '\\')
value = value or None
return {
'key': key,
'value': value,
}
|
<commit_before>
class Tag(object):
"""
An IRC message tag ircv3.net/specs/core/message-tags-3.2.html
"""
@staticmethod
def parse(item):
key, sep, value = item.partition('=')
value = value.replace('\\:', ';')
value = value.replace('\\s', ' ')
value = value.replace('\\n', '\n')
value = value.replace('\\r', '\r')
value = value.replace('\\\\', '\\')
value = value or None
return {
'key': key,
'value': value,
}
<commit_msg>Add tests for tag parsing<commit_after>from __future__ import print_function
class Tag(object):
"""
An IRC message tag ircv3.net/specs/core/message-tags-3.2.html
"""
@staticmethod
def parse(item):
r"""
>>> Tag.parse('x') == {'key': 'x', 'value': None}
True
>>> Tag.parse('x=yes') == {'key': 'x', 'value': 'yes'}
True
>>> Tag.parse('x=3')['value']
'3'
>>> Tag.parse('x=red fox\\:green eggs')['value']
'red fox;green eggs'
>>> Tag.parse('x=red fox:green eggs')['value']
'red fox:green eggs'
>>> print(Tag.parse('x=a\\nb\\nc')['value'])
a
b
c
"""
key, sep, value = item.partition('=')
value = value.replace('\\:', ';')
value = value.replace('\\s', ' ')
value = value.replace('\\n', '\n')
value = value.replace('\\r', '\r')
value = value.replace('\\\\', '\\')
value = value or None
return {
'key': key,
'value': value,
}
|
6e6bffc19873260696822bb3f4a821ce4ea6f4a3
|
consulrest/keyvalue.py
|
consulrest/keyvalue.py
|
import json
import re
import requests
class KeyValue(object):
def __init__(self, url):
self._url = "%s/kv" % url
def _get(self, key, recurse=None, keys=None):
url = self._url + '/' + key
if recurse is not None:
url += '?recurse'
if keys is not None:
url += '?keys'
r = requests.get(url)
if r.status_code == 200:
return json.loads(r.text)
else:
return None
def get(self, key, recurse=None):
return self._get(key, recurse=recurse)
def list(self, key=''):
return self._get(key, keys=True)
def set(self, key, value):
r = requests.put(self._url + '/' + key, data=value)
if r.status_code == 200 and re.match(r"true", r.text) is not None:
return True
else:
return False
def delete(self, key, recurse=None):
url = self._url + '/' + key
if recurse is not None:
url += '?recurse'
requests.delete(url)
|
import json
import re
import requests
class KeyValue(object):
def __init__(self, url):
self._url = "%s/kv" % url
def _get(self, key, recurse=None, keys=None):
url = self._url + '/' + key
params = dict()
if recurse is not None:
params['recurse'] = True
if keys is not None:
params['keys'] = True
r = requests.get(url, params=params)
if r.status_code == 200:
return json.loads(r.text)
else:
return None
def get(self, key, recurse=None):
return self._get(key, recurse=recurse)
def list(self, key=''):
return self._get(key, keys=True)
def set(self, key, value):
r = requests.put(self._url + '/' + key, data=value)
if r.status_code == 200 and re.match(r"true", r.text) is not None:
return True
else:
return False
def delete(self, key, recurse=None):
url = self._url + '/' + key
params = dict()
if recurse is not None:
params['recurse'] = True
requests.delete(url, params=params)
|
Use params dictionary instead of appending to the end of URL string
|
Use params dictionary instead of appending to the end of URL string
|
Python
|
mit
|
vcoque/consul-ri
|
import json
import re
import requests
class KeyValue(object):
def __init__(self, url):
self._url = "%s/kv" % url
def _get(self, key, recurse=None, keys=None):
url = self._url + '/' + key
if recurse is not None:
url += '?recurse'
if keys is not None:
url += '?keys'
r = requests.get(url)
if r.status_code == 200:
return json.loads(r.text)
else:
return None
def get(self, key, recurse=None):
return self._get(key, recurse=recurse)
def list(self, key=''):
return self._get(key, keys=True)
def set(self, key, value):
r = requests.put(self._url + '/' + key, data=value)
if r.status_code == 200 and re.match(r"true", r.text) is not None:
return True
else:
return False
def delete(self, key, recurse=None):
url = self._url + '/' + key
if recurse is not None:
url += '?recurse'
requests.delete(url)
Use params dictionary instead of appending to the end of URL string
|
import json
import re
import requests
class KeyValue(object):
def __init__(self, url):
self._url = "%s/kv" % url
def _get(self, key, recurse=None, keys=None):
url = self._url + '/' + key
params = dict()
if recurse is not None:
params['recurse'] = True
if keys is not None:
params['keys'] = True
r = requests.get(url, params=params)
if r.status_code == 200:
return json.loads(r.text)
else:
return None
def get(self, key, recurse=None):
return self._get(key, recurse=recurse)
def list(self, key=''):
return self._get(key, keys=True)
def set(self, key, value):
r = requests.put(self._url + '/' + key, data=value)
if r.status_code == 200 and re.match(r"true", r.text) is not None:
return True
else:
return False
def delete(self, key, recurse=None):
url = self._url + '/' + key
params = dict()
if recurse is not None:
params['recurse'] = True
requests.delete(url, params=params)
|
<commit_before>import json
import re
import requests
class KeyValue(object):
def __init__(self, url):
self._url = "%s/kv" % url
def _get(self, key, recurse=None, keys=None):
url = self._url + '/' + key
if recurse is not None:
url += '?recurse'
if keys is not None:
url += '?keys'
r = requests.get(url)
if r.status_code == 200:
return json.loads(r.text)
else:
return None
def get(self, key, recurse=None):
return self._get(key, recurse=recurse)
def list(self, key=''):
return self._get(key, keys=True)
def set(self, key, value):
r = requests.put(self._url + '/' + key, data=value)
if r.status_code == 200 and re.match(r"true", r.text) is not None:
return True
else:
return False
def delete(self, key, recurse=None):
url = self._url + '/' + key
if recurse is not None:
url += '?recurse'
requests.delete(url)
<commit_msg>Use params dictionary instead of appending to the end of URL string<commit_after>
|
import json
import re
import requests
class KeyValue(object):
def __init__(self, url):
self._url = "%s/kv" % url
def _get(self, key, recurse=None, keys=None):
url = self._url + '/' + key
params = dict()
if recurse is not None:
params['recurse'] = True
if keys is not None:
params['keys'] = True
r = requests.get(url, params=params)
if r.status_code == 200:
return json.loads(r.text)
else:
return None
def get(self, key, recurse=None):
return self._get(key, recurse=recurse)
def list(self, key=''):
return self._get(key, keys=True)
def set(self, key, value):
r = requests.put(self._url + '/' + key, data=value)
if r.status_code == 200 and re.match(r"true", r.text) is not None:
return True
else:
return False
def delete(self, key, recurse=None):
url = self._url + '/' + key
params = dict()
if recurse is not None:
params['recurse'] = True
requests.delete(url, params=params)
|
import json
import re
import requests
class KeyValue(object):
def __init__(self, url):
self._url = "%s/kv" % url
def _get(self, key, recurse=None, keys=None):
url = self._url + '/' + key
if recurse is not None:
url += '?recurse'
if keys is not None:
url += '?keys'
r = requests.get(url)
if r.status_code == 200:
return json.loads(r.text)
else:
return None
def get(self, key, recurse=None):
return self._get(key, recurse=recurse)
def list(self, key=''):
return self._get(key, keys=True)
def set(self, key, value):
r = requests.put(self._url + '/' + key, data=value)
if r.status_code == 200 and re.match(r"true", r.text) is not None:
return True
else:
return False
def delete(self, key, recurse=None):
url = self._url + '/' + key
if recurse is not None:
url += '?recurse'
requests.delete(url)
Use params dictionary instead of appending to the end of URL stringimport json
import re
import requests
class KeyValue(object):
def __init__(self, url):
self._url = "%s/kv" % url
def _get(self, key, recurse=None, keys=None):
url = self._url + '/' + key
params = dict()
if recurse is not None:
params['recurse'] = True
if keys is not None:
params['keys'] = True
r = requests.get(url, params=params)
if r.status_code == 200:
return json.loads(r.text)
else:
return None
def get(self, key, recurse=None):
return self._get(key, recurse=recurse)
def list(self, key=''):
return self._get(key, keys=True)
def set(self, key, value):
r = requests.put(self._url + '/' + key, data=value)
if r.status_code == 200 and re.match(r"true", r.text) is not None:
return True
else:
return False
def delete(self, key, recurse=None):
url = self._url + '/' + key
params = dict()
if recurse is not None:
params['recurse'] = True
requests.delete(url, params=params)
|
<commit_before>import json
import re
import requests
class KeyValue(object):
def __init__(self, url):
self._url = "%s/kv" % url
def _get(self, key, recurse=None, keys=None):
url = self._url + '/' + key
if recurse is not None:
url += '?recurse'
if keys is not None:
url += '?keys'
r = requests.get(url)
if r.status_code == 200:
return json.loads(r.text)
else:
return None
def get(self, key, recurse=None):
return self._get(key, recurse=recurse)
def list(self, key=''):
return self._get(key, keys=True)
def set(self, key, value):
r = requests.put(self._url + '/' + key, data=value)
if r.status_code == 200 and re.match(r"true", r.text) is not None:
return True
else:
return False
def delete(self, key, recurse=None):
url = self._url + '/' + key
if recurse is not None:
url += '?recurse'
requests.delete(url)
<commit_msg>Use params dictionary instead of appending to the end of URL string<commit_after>import json
import re
import requests
class KeyValue(object):
def __init__(self, url):
self._url = "%s/kv" % url
def _get(self, key, recurse=None, keys=None):
url = self._url + '/' + key
params = dict()
if recurse is not None:
params['recurse'] = True
if keys is not None:
params['keys'] = True
r = requests.get(url, params=params)
if r.status_code == 200:
return json.loads(r.text)
else:
return None
def get(self, key, recurse=None):
return self._get(key, recurse=recurse)
def list(self, key=''):
return self._get(key, keys=True)
def set(self, key, value):
r = requests.put(self._url + '/' + key, data=value)
if r.status_code == 200 and re.match(r"true", r.text) is not None:
return True
else:
return False
def delete(self, key, recurse=None):
url = self._url + '/' + key
params = dict()
if recurse is not None:
params['recurse'] = True
requests.delete(url, params=params)
|
c4826775915d53800262feac0109a4e1ad15e78b
|
site_scons/DefaultConfig.py
|
site_scons/DefaultConfig.py
|
import Config
dbg = Config.Config('dbg', default=True, CCFLAGS=['-g', '-O0'], suffix='_DEBUG')
optdbg = Config.Config('optdbg', CCFLAGS=['-g', '-02'], suffix='_OPTIMIZED')
release = Config.Config('release', CCFLAGS=['-02'])
|
import Config
from SCons.Script import *
dbgFlags = {}
optdbgFlags = {}
releaseFlags = {}
env = Environment()
if env['CC'] == 'gcc':
dbgFlags['CCFLAGS'] = ['-g', '-O0']
optdbgFlags['CCFLAGS'] = ['-g', '-O2']
releaseFlags['CCFLAGS'] = ['-O2']
elif env['CC'] == 'cl':
dbgFlags['CCFLAGS'] = ['/Zi', '/Od']
optdbgFlags['CCFLAGS'] = ['/Zi', '/O2']
releaseFlags['CCFLAGS'] = ['/O2']
dbg = Config.Config('dbg', default=True, suffix='_DEBUG', **dbgFlags)
optdbg = Config.Config('optdbg', suffix='_OPTIMIZED', **optdbgFlags)
release = Config.Config('release', **releaseFlags)
|
Use msvc flags when building with msvc, and gcc flags when building with gcc
|
Use msvc flags when building with msvc, and gcc flags when building with gcc
|
Python
|
mit
|
AlexSc/HelloWorld,AlexSc/HelloWorld,AlexSc/HelloWorld
|
import Config
dbg = Config.Config('dbg', default=True, CCFLAGS=['-g', '-O0'], suffix='_DEBUG')
optdbg = Config.Config('optdbg', CCFLAGS=['-g', '-02'], suffix='_OPTIMIZED')
release = Config.Config('release', CCFLAGS=['-02'])
Use msvc flags when building with msvc, and gcc flags when building with gcc
|
import Config
from SCons.Script import *
dbgFlags = {}
optdbgFlags = {}
releaseFlags = {}
env = Environment()
if env['CC'] == 'gcc':
dbgFlags['CCFLAGS'] = ['-g', '-O0']
optdbgFlags['CCFLAGS'] = ['-g', '-O2']
releaseFlags['CCFLAGS'] = ['-O2']
elif env['CC'] == 'cl':
dbgFlags['CCFLAGS'] = ['/Zi', '/Od']
optdbgFlags['CCFLAGS'] = ['/Zi', '/O2']
releaseFlags['CCFLAGS'] = ['/O2']
dbg = Config.Config('dbg', default=True, suffix='_DEBUG', **dbgFlags)
optdbg = Config.Config('optdbg', suffix='_OPTIMIZED', **optdbgFlags)
release = Config.Config('release', **releaseFlags)
|
<commit_before>import Config
dbg = Config.Config('dbg', default=True, CCFLAGS=['-g', '-O0'], suffix='_DEBUG')
optdbg = Config.Config('optdbg', CCFLAGS=['-g', '-02'], suffix='_OPTIMIZED')
release = Config.Config('release', CCFLAGS=['-02'])
<commit_msg>Use msvc flags when building with msvc, and gcc flags when building with gcc<commit_after>
|
import Config
from SCons.Script import *
dbgFlags = {}
optdbgFlags = {}
releaseFlags = {}
env = Environment()
if env['CC'] == 'gcc':
dbgFlags['CCFLAGS'] = ['-g', '-O0']
optdbgFlags['CCFLAGS'] = ['-g', '-O2']
releaseFlags['CCFLAGS'] = ['-O2']
elif env['CC'] == 'cl':
dbgFlags['CCFLAGS'] = ['/Zi', '/Od']
optdbgFlags['CCFLAGS'] = ['/Zi', '/O2']
releaseFlags['CCFLAGS'] = ['/O2']
dbg = Config.Config('dbg', default=True, suffix='_DEBUG', **dbgFlags)
optdbg = Config.Config('optdbg', suffix='_OPTIMIZED', **optdbgFlags)
release = Config.Config('release', **releaseFlags)
|
import Config
dbg = Config.Config('dbg', default=True, CCFLAGS=['-g', '-O0'], suffix='_DEBUG')
optdbg = Config.Config('optdbg', CCFLAGS=['-g', '-02'], suffix='_OPTIMIZED')
release = Config.Config('release', CCFLAGS=['-02'])
Use msvc flags when building with msvc, and gcc flags when building with gccimport Config
from SCons.Script import *
dbgFlags = {}
optdbgFlags = {}
releaseFlags = {}
env = Environment()
if env['CC'] == 'gcc':
dbgFlags['CCFLAGS'] = ['-g', '-O0']
optdbgFlags['CCFLAGS'] = ['-g', '-O2']
releaseFlags['CCFLAGS'] = ['-O2']
elif env['CC'] == 'cl':
dbgFlags['CCFLAGS'] = ['/Zi', '/Od']
optdbgFlags['CCFLAGS'] = ['/Zi', '/O2']
releaseFlags['CCFLAGS'] = ['/O2']
dbg = Config.Config('dbg', default=True, suffix='_DEBUG', **dbgFlags)
optdbg = Config.Config('optdbg', suffix='_OPTIMIZED', **optdbgFlags)
release = Config.Config('release', **releaseFlags)
|
<commit_before>import Config
dbg = Config.Config('dbg', default=True, CCFLAGS=['-g', '-O0'], suffix='_DEBUG')
optdbg = Config.Config('optdbg', CCFLAGS=['-g', '-02'], suffix='_OPTIMIZED')
release = Config.Config('release', CCFLAGS=['-02'])
<commit_msg>Use msvc flags when building with msvc, and gcc flags when building with gcc<commit_after>import Config
from SCons.Script import *
dbgFlags = {}
optdbgFlags = {}
releaseFlags = {}
env = Environment()
if env['CC'] == 'gcc':
dbgFlags['CCFLAGS'] = ['-g', '-O0']
optdbgFlags['CCFLAGS'] = ['-g', '-O2']
releaseFlags['CCFLAGS'] = ['-O2']
elif env['CC'] == 'cl':
dbgFlags['CCFLAGS'] = ['/Zi', '/Od']
optdbgFlags['CCFLAGS'] = ['/Zi', '/O2']
releaseFlags['CCFLAGS'] = ['/O2']
dbg = Config.Config('dbg', default=True, suffix='_DEBUG', **dbgFlags)
optdbg = Config.Config('optdbg', suffix='_OPTIMIZED', **optdbgFlags)
release = Config.Config('release', **releaseFlags)
|
8dc265ac0c2bbea683d900f64c5080a23879c9da
|
spacy/tests/lang/da/test_exceptions.py
|
spacy/tests/lang/da/test_exceptions.py
|
# coding: utf-8
from __future__ import unicode_literals
import pytest
@pytest.mark.parametrize('text', ["ca.", "m.a.o.", "Jan.", "Dec."])
def test_da_tokenizer_handles_abbr(da_tokenizer, text):
tokens = da_tokenizer(text)
assert len(tokens) == 1
def test_da_tokenizer_handles_exc_in_text(da_tokenizer):
text = "Det er bl.a. ikke meningen"
tokens = da_tokenizer(text)
assert len(tokens) == 5
assert tokens[2].text == "bl.a."
|
# coding: utf-8
from __future__ import unicode_literals
import pytest
@pytest.mark.parametrize('text', ["ca.", "m.a.o.", "Jan.", "Dec."])
def test_da_tokenizer_handles_abbr(da_tokenizer, text):
tokens = da_tokenizer(text)
assert len(tokens) == 1
def test_da_tokenizer_handles_exc_in_text(da_tokenizer):
text = "Det er bl.a. ikke meningen"
tokens = da_tokenizer(text)
assert len(tokens) == 5
assert tokens[2].text == "bl.a."
def test_da_tokenizer_handles_custom_base_exc(da_tokenizer):
text = "Her er noget du kan kigge i."
tokens = da_tokenizer(text)
assert len(tokens) == 8
assert tokens[6].text == "i"
assert tokens[7].text == "."
|
Add test for tokenization of 'i.' for Danish.
|
Add test for tokenization of 'i.' for Danish.
|
Python
|
mit
|
explosion/spaCy,aikramer2/spaCy,recognai/spaCy,recognai/spaCy,spacy-io/spaCy,honnibal/spaCy,spacy-io/spaCy,spacy-io/spaCy,recognai/spaCy,honnibal/spaCy,recognai/spaCy,spacy-io/spaCy,spacy-io/spaCy,spacy-io/spaCy,explosion/spaCy,explosion/spaCy,honnibal/spaCy,explosion/spaCy,aikramer2/spaCy,explosion/spaCy,aikramer2/spaCy,aikramer2/spaCy,honnibal/spaCy,aikramer2/spaCy,recognai/spaCy,recognai/spaCy,aikramer2/spaCy,explosion/spaCy
|
# coding: utf-8
from __future__ import unicode_literals
import pytest
@pytest.mark.parametrize('text', ["ca.", "m.a.o.", "Jan.", "Dec."])
def test_da_tokenizer_handles_abbr(da_tokenizer, text):
tokens = da_tokenizer(text)
assert len(tokens) == 1
def test_da_tokenizer_handles_exc_in_text(da_tokenizer):
text = "Det er bl.a. ikke meningen"
tokens = da_tokenizer(text)
assert len(tokens) == 5
assert tokens[2].text == "bl.a."
Add test for tokenization of 'i.' for Danish.
|
# coding: utf-8
from __future__ import unicode_literals
import pytest
@pytest.mark.parametrize('text', ["ca.", "m.a.o.", "Jan.", "Dec."])
def test_da_tokenizer_handles_abbr(da_tokenizer, text):
tokens = da_tokenizer(text)
assert len(tokens) == 1
def test_da_tokenizer_handles_exc_in_text(da_tokenizer):
text = "Det er bl.a. ikke meningen"
tokens = da_tokenizer(text)
assert len(tokens) == 5
assert tokens[2].text == "bl.a."
def test_da_tokenizer_handles_custom_base_exc(da_tokenizer):
text = "Her er noget du kan kigge i."
tokens = da_tokenizer(text)
assert len(tokens) == 8
assert tokens[6].text == "i"
assert tokens[7].text == "."
|
<commit_before># coding: utf-8
from __future__ import unicode_literals
import pytest
@pytest.mark.parametrize('text', ["ca.", "m.a.o.", "Jan.", "Dec."])
def test_da_tokenizer_handles_abbr(da_tokenizer, text):
tokens = da_tokenizer(text)
assert len(tokens) == 1
def test_da_tokenizer_handles_exc_in_text(da_tokenizer):
text = "Det er bl.a. ikke meningen"
tokens = da_tokenizer(text)
assert len(tokens) == 5
assert tokens[2].text == "bl.a."
<commit_msg>Add test for tokenization of 'i.' for Danish.<commit_after>
|
# coding: utf-8
from __future__ import unicode_literals
import pytest
@pytest.mark.parametrize('text', ["ca.", "m.a.o.", "Jan.", "Dec."])
def test_da_tokenizer_handles_abbr(da_tokenizer, text):
tokens = da_tokenizer(text)
assert len(tokens) == 1
def test_da_tokenizer_handles_exc_in_text(da_tokenizer):
text = "Det er bl.a. ikke meningen"
tokens = da_tokenizer(text)
assert len(tokens) == 5
assert tokens[2].text == "bl.a."
def test_da_tokenizer_handles_custom_base_exc(da_tokenizer):
text = "Her er noget du kan kigge i."
tokens = da_tokenizer(text)
assert len(tokens) == 8
assert tokens[6].text == "i"
assert tokens[7].text == "."
|
# coding: utf-8
from __future__ import unicode_literals
import pytest
@pytest.mark.parametrize('text', ["ca.", "m.a.o.", "Jan.", "Dec."])
def test_da_tokenizer_handles_abbr(da_tokenizer, text):
tokens = da_tokenizer(text)
assert len(tokens) == 1
def test_da_tokenizer_handles_exc_in_text(da_tokenizer):
text = "Det er bl.a. ikke meningen"
tokens = da_tokenizer(text)
assert len(tokens) == 5
assert tokens[2].text == "bl.a."
Add test for tokenization of 'i.' for Danish.# coding: utf-8
from __future__ import unicode_literals
import pytest
@pytest.mark.parametrize('text', ["ca.", "m.a.o.", "Jan.", "Dec."])
def test_da_tokenizer_handles_abbr(da_tokenizer, text):
tokens = da_tokenizer(text)
assert len(tokens) == 1
def test_da_tokenizer_handles_exc_in_text(da_tokenizer):
text = "Det er bl.a. ikke meningen"
tokens = da_tokenizer(text)
assert len(tokens) == 5
assert tokens[2].text == "bl.a."
def test_da_tokenizer_handles_custom_base_exc(da_tokenizer):
text = "Her er noget du kan kigge i."
tokens = da_tokenizer(text)
assert len(tokens) == 8
assert tokens[6].text == "i"
assert tokens[7].text == "."
|
<commit_before># coding: utf-8
from __future__ import unicode_literals
import pytest
@pytest.mark.parametrize('text', ["ca.", "m.a.o.", "Jan.", "Dec."])
def test_da_tokenizer_handles_abbr(da_tokenizer, text):
tokens = da_tokenizer(text)
assert len(tokens) == 1
def test_da_tokenizer_handles_exc_in_text(da_tokenizer):
text = "Det er bl.a. ikke meningen"
tokens = da_tokenizer(text)
assert len(tokens) == 5
assert tokens[2].text == "bl.a."
<commit_msg>Add test for tokenization of 'i.' for Danish.<commit_after># coding: utf-8
from __future__ import unicode_literals
import pytest
@pytest.mark.parametrize('text', ["ca.", "m.a.o.", "Jan.", "Dec."])
def test_da_tokenizer_handles_abbr(da_tokenizer, text):
tokens = da_tokenizer(text)
assert len(tokens) == 1
def test_da_tokenizer_handles_exc_in_text(da_tokenizer):
text = "Det er bl.a. ikke meningen"
tokens = da_tokenizer(text)
assert len(tokens) == 5
assert tokens[2].text == "bl.a."
def test_da_tokenizer_handles_custom_base_exc(da_tokenizer):
text = "Her er noget du kan kigge i."
tokens = da_tokenizer(text)
assert len(tokens) == 8
assert tokens[6].text == "i"
assert tokens[7].text == "."
|
4687e306797e96c85165fabea3ad1fc005469aa1
|
tools/telemetry/telemetry/internal/platform/profiler/android_screen_recorder_profiler.py
|
tools/telemetry/telemetry/internal/platform/profiler/android_screen_recorder_profiler.py
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
from telemetry.internal.platform import profiler
from telemetry.internal import util
from telemetry.internal.backends.chrome import android_browser_finder
class AndroidScreenRecordingProfiler(profiler.Profiler):
"""Captures a screen recording on Android."""
def __init__(self, browser_backend, platform_backend, output_path, state):
super(AndroidScreenRecordingProfiler, self).__init__(
browser_backend, platform_backend, output_path, state)
self._output_path = output_path + '.mp4'
self._recorder = subprocess.Popen(
[os.path.join(util.GetChromiumSrcDir(), 'build', 'android',
'screenshot.py'),
'--video',
'--file', self._output_path,
'--device', browser_backend.device.adb.GetDeviceSerial()],
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
@classmethod
def name(cls):
return 'android-screen-recorder'
@classmethod
def is_supported(cls, browser_type):
if browser_type == 'any':
return android_browser_finder.CanFindAvailableBrowsers()
return browser_type.startswith('android')
def CollectProfile(self):
self._recorder.communicate(input='\n')
print 'Screen recording saved as %s' % self._output_path
print 'To view, open in Chrome or a video player'
return [self._output_path]
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
from telemetry.core import util
from telemetry.internal.platform import profiler
from telemetry.internal.backends.chrome import android_browser_finder
class AndroidScreenRecordingProfiler(profiler.Profiler):
"""Captures a screen recording on Android."""
def __init__(self, browser_backend, platform_backend, output_path, state):
super(AndroidScreenRecordingProfiler, self).__init__(
browser_backend, platform_backend, output_path, state)
self._output_path = output_path + '.mp4'
self._recorder = subprocess.Popen(
[os.path.join(util.GetChromiumSrcDir(), 'build', 'android',
'screenshot.py'),
'--video',
'--file', self._output_path,
'--device', browser_backend.device.adb.GetDeviceSerial()],
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
@classmethod
def name(cls):
return 'android-screen-recorder'
@classmethod
def is_supported(cls, browser_type):
if browser_type == 'any':
return android_browser_finder.CanFindAvailableBrowsers()
return browser_type.startswith('android')
def CollectProfile(self):
self._recorder.communicate(input='\n')
print 'Screen recording saved as %s' % self._output_path
print 'To view, open in Chrome or a video player'
return [self._output_path]
|
Fix an import path in the Android screen recorder
|
telemetry: Fix an import path in the Android screen recorder
Review URL: https://codereview.chromium.org/1301613004
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#343960}
|
Python
|
bsd-3-clause
|
ltilve/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
from telemetry.internal.platform import profiler
from telemetry.internal import util
from telemetry.internal.backends.chrome import android_browser_finder
class AndroidScreenRecordingProfiler(profiler.Profiler):
"""Captures a screen recording on Android."""
def __init__(self, browser_backend, platform_backend, output_path, state):
super(AndroidScreenRecordingProfiler, self).__init__(
browser_backend, platform_backend, output_path, state)
self._output_path = output_path + '.mp4'
self._recorder = subprocess.Popen(
[os.path.join(util.GetChromiumSrcDir(), 'build', 'android',
'screenshot.py'),
'--video',
'--file', self._output_path,
'--device', browser_backend.device.adb.GetDeviceSerial()],
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
@classmethod
def name(cls):
return 'android-screen-recorder'
@classmethod
def is_supported(cls, browser_type):
if browser_type == 'any':
return android_browser_finder.CanFindAvailableBrowsers()
return browser_type.startswith('android')
def CollectProfile(self):
self._recorder.communicate(input='\n')
print 'Screen recording saved as %s' % self._output_path
print 'To view, open in Chrome or a video player'
return [self._output_path]
telemetry: Fix an import path in the Android screen recorder
Review URL: https://codereview.chromium.org/1301613004
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#343960}
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
from telemetry.core import util
from telemetry.internal.platform import profiler
from telemetry.internal.backends.chrome import android_browser_finder
class AndroidScreenRecordingProfiler(profiler.Profiler):
"""Captures a screen recording on Android."""
def __init__(self, browser_backend, platform_backend, output_path, state):
super(AndroidScreenRecordingProfiler, self).__init__(
browser_backend, platform_backend, output_path, state)
self._output_path = output_path + '.mp4'
self._recorder = subprocess.Popen(
[os.path.join(util.GetChromiumSrcDir(), 'build', 'android',
'screenshot.py'),
'--video',
'--file', self._output_path,
'--device', browser_backend.device.adb.GetDeviceSerial()],
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
@classmethod
def name(cls):
return 'android-screen-recorder'
@classmethod
def is_supported(cls, browser_type):
if browser_type == 'any':
return android_browser_finder.CanFindAvailableBrowsers()
return browser_type.startswith('android')
def CollectProfile(self):
self._recorder.communicate(input='\n')
print 'Screen recording saved as %s' % self._output_path
print 'To view, open in Chrome or a video player'
return [self._output_path]
|
<commit_before># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
from telemetry.internal.platform import profiler
from telemetry.internal import util
from telemetry.internal.backends.chrome import android_browser_finder
class AndroidScreenRecordingProfiler(profiler.Profiler):
"""Captures a screen recording on Android."""
def __init__(self, browser_backend, platform_backend, output_path, state):
super(AndroidScreenRecordingProfiler, self).__init__(
browser_backend, platform_backend, output_path, state)
self._output_path = output_path + '.mp4'
self._recorder = subprocess.Popen(
[os.path.join(util.GetChromiumSrcDir(), 'build', 'android',
'screenshot.py'),
'--video',
'--file', self._output_path,
'--device', browser_backend.device.adb.GetDeviceSerial()],
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
@classmethod
def name(cls):
return 'android-screen-recorder'
@classmethod
def is_supported(cls, browser_type):
if browser_type == 'any':
return android_browser_finder.CanFindAvailableBrowsers()
return browser_type.startswith('android')
def CollectProfile(self):
self._recorder.communicate(input='\n')
print 'Screen recording saved as %s' % self._output_path
print 'To view, open in Chrome or a video player'
return [self._output_path]
<commit_msg>telemetry: Fix an import path in the Android screen recorder
Review URL: https://codereview.chromium.org/1301613004
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#343960}<commit_after>
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
from telemetry.core import util
from telemetry.internal.platform import profiler
from telemetry.internal.backends.chrome import android_browser_finder
class AndroidScreenRecordingProfiler(profiler.Profiler):
"""Captures a screen recording on Android."""
def __init__(self, browser_backend, platform_backend, output_path, state):
super(AndroidScreenRecordingProfiler, self).__init__(
browser_backend, platform_backend, output_path, state)
self._output_path = output_path + '.mp4'
self._recorder = subprocess.Popen(
[os.path.join(util.GetChromiumSrcDir(), 'build', 'android',
'screenshot.py'),
'--video',
'--file', self._output_path,
'--device', browser_backend.device.adb.GetDeviceSerial()],
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
@classmethod
def name(cls):
return 'android-screen-recorder'
@classmethod
def is_supported(cls, browser_type):
if browser_type == 'any':
return android_browser_finder.CanFindAvailableBrowsers()
return browser_type.startswith('android')
def CollectProfile(self):
self._recorder.communicate(input='\n')
print 'Screen recording saved as %s' % self._output_path
print 'To view, open in Chrome or a video player'
return [self._output_path]
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
from telemetry.internal.platform import profiler
from telemetry.internal import util
from telemetry.internal.backends.chrome import android_browser_finder
class AndroidScreenRecordingProfiler(profiler.Profiler):
"""Captures a screen recording on Android."""
def __init__(self, browser_backend, platform_backend, output_path, state):
super(AndroidScreenRecordingProfiler, self).__init__(
browser_backend, platform_backend, output_path, state)
self._output_path = output_path + '.mp4'
self._recorder = subprocess.Popen(
[os.path.join(util.GetChromiumSrcDir(), 'build', 'android',
'screenshot.py'),
'--video',
'--file', self._output_path,
'--device', browser_backend.device.adb.GetDeviceSerial()],
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
@classmethod
def name(cls):
return 'android-screen-recorder'
@classmethod
def is_supported(cls, browser_type):
if browser_type == 'any':
return android_browser_finder.CanFindAvailableBrowsers()
return browser_type.startswith('android')
def CollectProfile(self):
self._recorder.communicate(input='\n')
print 'Screen recording saved as %s' % self._output_path
print 'To view, open in Chrome or a video player'
return [self._output_path]
telemetry: Fix an import path in the Android screen recorder
Review URL: https://codereview.chromium.org/1301613004
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#343960}# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
from telemetry.core import util
from telemetry.internal.platform import profiler
from telemetry.internal.backends.chrome import android_browser_finder
class AndroidScreenRecordingProfiler(profiler.Profiler):
"""Captures a screen recording on Android."""
def __init__(self, browser_backend, platform_backend, output_path, state):
super(AndroidScreenRecordingProfiler, self).__init__(
browser_backend, platform_backend, output_path, state)
self._output_path = output_path + '.mp4'
self._recorder = subprocess.Popen(
[os.path.join(util.GetChromiumSrcDir(), 'build', 'android',
'screenshot.py'),
'--video',
'--file', self._output_path,
'--device', browser_backend.device.adb.GetDeviceSerial()],
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
@classmethod
def name(cls):
return 'android-screen-recorder'
@classmethod
def is_supported(cls, browser_type):
if browser_type == 'any':
return android_browser_finder.CanFindAvailableBrowsers()
return browser_type.startswith('android')
def CollectProfile(self):
self._recorder.communicate(input='\n')
print 'Screen recording saved as %s' % self._output_path
print 'To view, open in Chrome or a video player'
return [self._output_path]
|
<commit_before># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
from telemetry.internal.platform import profiler
from telemetry.internal import util
from telemetry.internal.backends.chrome import android_browser_finder
class AndroidScreenRecordingProfiler(profiler.Profiler):
"""Captures a screen recording on Android."""
def __init__(self, browser_backend, platform_backend, output_path, state):
super(AndroidScreenRecordingProfiler, self).__init__(
browser_backend, platform_backend, output_path, state)
self._output_path = output_path + '.mp4'
self._recorder = subprocess.Popen(
[os.path.join(util.GetChromiumSrcDir(), 'build', 'android',
'screenshot.py'),
'--video',
'--file', self._output_path,
'--device', browser_backend.device.adb.GetDeviceSerial()],
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
@classmethod
def name(cls):
return 'android-screen-recorder'
@classmethod
def is_supported(cls, browser_type):
if browser_type == 'any':
return android_browser_finder.CanFindAvailableBrowsers()
return browser_type.startswith('android')
def CollectProfile(self):
self._recorder.communicate(input='\n')
print 'Screen recording saved as %s' % self._output_path
print 'To view, open in Chrome or a video player'
return [self._output_path]
<commit_msg>telemetry: Fix an import path in the Android screen recorder
Review URL: https://codereview.chromium.org/1301613004
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#343960}<commit_after># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
from telemetry.core import util
from telemetry.internal.platform import profiler
from telemetry.internal.backends.chrome import android_browser_finder
class AndroidScreenRecordingProfiler(profiler.Profiler):
"""Captures a screen recording on Android."""
def __init__(self, browser_backend, platform_backend, output_path, state):
super(AndroidScreenRecordingProfiler, self).__init__(
browser_backend, platform_backend, output_path, state)
self._output_path = output_path + '.mp4'
self._recorder = subprocess.Popen(
[os.path.join(util.GetChromiumSrcDir(), 'build', 'android',
'screenshot.py'),
'--video',
'--file', self._output_path,
'--device', browser_backend.device.adb.GetDeviceSerial()],
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
@classmethod
def name(cls):
return 'android-screen-recorder'
@classmethod
def is_supported(cls, browser_type):
if browser_type == 'any':
return android_browser_finder.CanFindAvailableBrowsers()
return browser_type.startswith('android')
def CollectProfile(self):
self._recorder.communicate(input='\n')
print 'Screen recording saved as %s' % self._output_path
print 'To view, open in Chrome or a video player'
return [self._output_path]
|
a00687f0bf11f40bf65e429caeb4fd9439557fbb
|
contrib/gunicorn_config.py
|
contrib/gunicorn_config.py
|
# Gunicorn config file
import os
flavor = os.environ.get('SETTINGS_FLAVOR', 'dev')
reload = True
bind = '%s:%s' % (
os.environ.get('REGISTRY_HOST', '0.0.0.0'),
os.environ.get('REGISTRY_PORT', '5000')
)
graceful_timeout = int(os.environ.get('GUNICORN_GRACEFUL_TIMEOUT', 3600))
timeout = int(os.environ.get('GUNICORN_SILENT_TIMEOUT', 3600))
worker_class = 'gevent'
max_requests = 100
workers = int(os.environ.get('GUNICORN_WORKERS', 4))
log_level = 'debug'
debug = True
accesslog = os.environ.get('GUNICORN_ACCESS_LOG_FILE', '-')
errorlog = os.environ.get('GUNICORN_ERROR_LOG_FILE', '-')
access_log_format = ('%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" '
'"%(a)s" %(D)s %({X-Docker-Size}o)s')
if flavor == 'prod' or flavor == 'staging':
reload = False
workers = 8
debug = False
log_level = 'info'
|
# Gunicorn config file
import os
flavor = os.environ.get('SETTINGS_FLAVOR', 'dev')
reload = True
bind = '%s:%s' % (
os.environ.get('REGISTRY_HOST', '0.0.0.0'),
os.environ.get('REGISTRY_PORT', '5000')
)
graceful_timeout = int(os.environ.get('GUNICORN_GRACEFUL_TIMEOUT', 3600))
timeout = int(os.environ.get('GUNICORN_SILENT_TIMEOUT', 3600))
worker_class = 'gevent'
max_requests = int(os.environ.get('GUNICORN_MAX_REQUESTS', 100))
workers = int(os.environ.get('GUNICORN_WORKERS', 4))
log_level = 'debug'
debug = True
accesslog = os.environ.get('GUNICORN_ACCESS_LOG_FILE', '-')
errorlog = os.environ.get('GUNICORN_ERROR_LOG_FILE', '-')
access_log_format = ('%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" '
'"%(a)s" %(D)s %({X-Docker-Size}o)s')
if flavor == 'prod' or flavor == 'staging':
reload = False
workers = 8
debug = False
log_level = 'info'
|
Enable Gunicorn max_requests config through env
|
Enable Gunicorn max_requests config through env
This commit enables a user to alter the `max_requests` parameter
passed through to Gunicorn without having to resort to the "bare"
CMD-specification hinted at in the Advanced documentation.
|
Python
|
apache-2.0
|
deis/docker-registry,deis/docker-registry,docker/docker-registry,whuwxl/docker-registry,viljaste/docker-registry-1,dalvikchen/docker-registry,Haitianisgood/docker-registry,depay/docker-registry,tangkun75/docker-registry,pombredanne/docker-registry,docker/docker-registry,OnePaaS/docker-registry,mboersma/docker-registry,kireal/docker-registry,mboersma/docker-registry,ptisserand/docker-registry,ken-saka/docker-registry,stormltf/docker-registry,hex108/docker-registry,viljaste/docker-registry-1,pombredanne/docker-registry,atyenoria/docker-registry,viljaste/docker-registry-1,pombredanne/docker-registry,stormltf/docker-registry,Haitianisgood/docker-registry,tangkun75/docker-registry,hex108/docker-registry,Carrotzpc/docker-registry,mboersma/docker-registry,wakermahmud/docker-registry,yuriyf/docker-registry,nunogt/docker-registry,yuriyf/docker-registry,dedalusdev/docker-registry,dalvikchen/docker-registry,dedalusdev/docker-registry,HubSpot/docker-registry,ptisserand/docker-registry,HubSpot/docker-registry,kireal/docker-registry,stormltf/docker-registry,HubSpot/docker-registry,depay/docker-registry,dedalusdev/docker-registry,whuwxl/docker-registry,ptisserand/docker-registry,atyenoria/docker-registry,wakermahmud/docker-registry,deis/docker-registry,OnePaaS/docker-registry,wakermahmud/docker-registry,Carrotzpc/docker-registry,depay/docker-registry,ken-saka/docker-registry,yuriyf/docker-registry,whuwxl/docker-registry,Carrotzpc/docker-registry,hex108/docker-registry,OnePaaS/docker-registry,atyenoria/docker-registry,ken-saka/docker-registry,dhiltgen/docker-registry,tangkun75/docker-registry,dhiltgen/docker-registry,kireal/docker-registry,dhiltgen/docker-registry,nunogt/docker-registry,docker/docker-registry,Haitianisgood/docker-registry,dalvikchen/docker-registry,nunogt/docker-registry
|
# Gunicorn config file
import os
flavor = os.environ.get('SETTINGS_FLAVOR', 'dev')
reload = True
bind = '%s:%s' % (
os.environ.get('REGISTRY_HOST', '0.0.0.0'),
os.environ.get('REGISTRY_PORT', '5000')
)
graceful_timeout = int(os.environ.get('GUNICORN_GRACEFUL_TIMEOUT', 3600))
timeout = int(os.environ.get('GUNICORN_SILENT_TIMEOUT', 3600))
worker_class = 'gevent'
max_requests = 100
workers = int(os.environ.get('GUNICORN_WORKERS', 4))
log_level = 'debug'
debug = True
accesslog = os.environ.get('GUNICORN_ACCESS_LOG_FILE', '-')
errorlog = os.environ.get('GUNICORN_ERROR_LOG_FILE', '-')
access_log_format = ('%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" '
'"%(a)s" %(D)s %({X-Docker-Size}o)s')
if flavor == 'prod' or flavor == 'staging':
reload = False
workers = 8
debug = False
log_level = 'info'
Enable Gunicorn max_requests config through env
This commit enables a user to alter the `max_requests` parameter
passed through to Gunicorn without having to resort to the "bare"
CMD-specification hinted at in the Advanced documentation.
|
# Gunicorn config file
import os
flavor = os.environ.get('SETTINGS_FLAVOR', 'dev')
reload = True
bind = '%s:%s' % (
os.environ.get('REGISTRY_HOST', '0.0.0.0'),
os.environ.get('REGISTRY_PORT', '5000')
)
graceful_timeout = int(os.environ.get('GUNICORN_GRACEFUL_TIMEOUT', 3600))
timeout = int(os.environ.get('GUNICORN_SILENT_TIMEOUT', 3600))
worker_class = 'gevent'
max_requests = int(os.environ.get('GUNICORN_MAX_REQUESTS', 100))
workers = int(os.environ.get('GUNICORN_WORKERS', 4))
log_level = 'debug'
debug = True
accesslog = os.environ.get('GUNICORN_ACCESS_LOG_FILE', '-')
errorlog = os.environ.get('GUNICORN_ERROR_LOG_FILE', '-')
access_log_format = ('%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" '
'"%(a)s" %(D)s %({X-Docker-Size}o)s')
if flavor == 'prod' or flavor == 'staging':
reload = False
workers = 8
debug = False
log_level = 'info'
|
<commit_before># Gunicorn config file
import os
flavor = os.environ.get('SETTINGS_FLAVOR', 'dev')
reload = True
bind = '%s:%s' % (
os.environ.get('REGISTRY_HOST', '0.0.0.0'),
os.environ.get('REGISTRY_PORT', '5000')
)
graceful_timeout = int(os.environ.get('GUNICORN_GRACEFUL_TIMEOUT', 3600))
timeout = int(os.environ.get('GUNICORN_SILENT_TIMEOUT', 3600))
worker_class = 'gevent'
max_requests = 100
workers = int(os.environ.get('GUNICORN_WORKERS', 4))
log_level = 'debug'
debug = True
accesslog = os.environ.get('GUNICORN_ACCESS_LOG_FILE', '-')
errorlog = os.environ.get('GUNICORN_ERROR_LOG_FILE', '-')
access_log_format = ('%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" '
'"%(a)s" %(D)s %({X-Docker-Size}o)s')
if flavor == 'prod' or flavor == 'staging':
reload = False
workers = 8
debug = False
log_level = 'info'
<commit_msg>Enable Gunicorn max_requests config through env
This commit enables a user to alter the `max_requests` parameter
passed through to Gunicorn without having to resort to the "bare"
CMD-specification hinted at in the Advanced documentation.<commit_after>
|
# Gunicorn config file
import os
flavor = os.environ.get('SETTINGS_FLAVOR', 'dev')
reload = True
bind = '%s:%s' % (
os.environ.get('REGISTRY_HOST', '0.0.0.0'),
os.environ.get('REGISTRY_PORT', '5000')
)
graceful_timeout = int(os.environ.get('GUNICORN_GRACEFUL_TIMEOUT', 3600))
timeout = int(os.environ.get('GUNICORN_SILENT_TIMEOUT', 3600))
worker_class = 'gevent'
max_requests = int(os.environ.get('GUNICORN_MAX_REQUESTS', 100))
workers = int(os.environ.get('GUNICORN_WORKERS', 4))
log_level = 'debug'
debug = True
accesslog = os.environ.get('GUNICORN_ACCESS_LOG_FILE', '-')
errorlog = os.environ.get('GUNICORN_ERROR_LOG_FILE', '-')
access_log_format = ('%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" '
'"%(a)s" %(D)s %({X-Docker-Size}o)s')
if flavor == 'prod' or flavor == 'staging':
reload = False
workers = 8
debug = False
log_level = 'info'
|
# Gunicorn config file
import os
flavor = os.environ.get('SETTINGS_FLAVOR', 'dev')
reload = True
bind = '%s:%s' % (
os.environ.get('REGISTRY_HOST', '0.0.0.0'),
os.environ.get('REGISTRY_PORT', '5000')
)
graceful_timeout = int(os.environ.get('GUNICORN_GRACEFUL_TIMEOUT', 3600))
timeout = int(os.environ.get('GUNICORN_SILENT_TIMEOUT', 3600))
worker_class = 'gevent'
max_requests = 100
workers = int(os.environ.get('GUNICORN_WORKERS', 4))
log_level = 'debug'
debug = True
accesslog = os.environ.get('GUNICORN_ACCESS_LOG_FILE', '-')
errorlog = os.environ.get('GUNICORN_ERROR_LOG_FILE', '-')
access_log_format = ('%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" '
'"%(a)s" %(D)s %({X-Docker-Size}o)s')
if flavor == 'prod' or flavor == 'staging':
reload = False
workers = 8
debug = False
log_level = 'info'
Enable Gunicorn max_requests config through env
This commit enables a user to alter the `max_requests` parameter
passed through to Gunicorn without having to resort to the "bare"
CMD-specification hinted at in the Advanced documentation.# Gunicorn config file
import os
flavor = os.environ.get('SETTINGS_FLAVOR', 'dev')
reload = True
bind = '%s:%s' % (
os.environ.get('REGISTRY_HOST', '0.0.0.0'),
os.environ.get('REGISTRY_PORT', '5000')
)
graceful_timeout = int(os.environ.get('GUNICORN_GRACEFUL_TIMEOUT', 3600))
timeout = int(os.environ.get('GUNICORN_SILENT_TIMEOUT', 3600))
worker_class = 'gevent'
max_requests = int(os.environ.get('GUNICORN_MAX_REQUESTS', 100))
workers = int(os.environ.get('GUNICORN_WORKERS', 4))
log_level = 'debug'
debug = True
accesslog = os.environ.get('GUNICORN_ACCESS_LOG_FILE', '-')
errorlog = os.environ.get('GUNICORN_ERROR_LOG_FILE', '-')
access_log_format = ('%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" '
'"%(a)s" %(D)s %({X-Docker-Size}o)s')
if flavor == 'prod' or flavor == 'staging':
reload = False
workers = 8
debug = False
log_level = 'info'
|
<commit_before># Gunicorn config file
import os
flavor = os.environ.get('SETTINGS_FLAVOR', 'dev')
reload = True
bind = '%s:%s' % (
os.environ.get('REGISTRY_HOST', '0.0.0.0'),
os.environ.get('REGISTRY_PORT', '5000')
)
graceful_timeout = int(os.environ.get('GUNICORN_GRACEFUL_TIMEOUT', 3600))
timeout = int(os.environ.get('GUNICORN_SILENT_TIMEOUT', 3600))
worker_class = 'gevent'
max_requests = 100
workers = int(os.environ.get('GUNICORN_WORKERS', 4))
log_level = 'debug'
debug = True
accesslog = os.environ.get('GUNICORN_ACCESS_LOG_FILE', '-')
errorlog = os.environ.get('GUNICORN_ERROR_LOG_FILE', '-')
access_log_format = ('%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" '
'"%(a)s" %(D)s %({X-Docker-Size}o)s')
if flavor == 'prod' or flavor == 'staging':
reload = False
workers = 8
debug = False
log_level = 'info'
<commit_msg>Enable Gunicorn max_requests config through env
This commit enables a user to alter the `max_requests` parameter
passed through to Gunicorn without having to resort to the "bare"
CMD-specification hinted at in the Advanced documentation.<commit_after># Gunicorn config file
import os
flavor = os.environ.get('SETTINGS_FLAVOR', 'dev')
reload = True
bind = '%s:%s' % (
os.environ.get('REGISTRY_HOST', '0.0.0.0'),
os.environ.get('REGISTRY_PORT', '5000')
)
graceful_timeout = int(os.environ.get('GUNICORN_GRACEFUL_TIMEOUT', 3600))
timeout = int(os.environ.get('GUNICORN_SILENT_TIMEOUT', 3600))
worker_class = 'gevent'
max_requests = int(os.environ.get('GUNICORN_MAX_REQUESTS', 100))
workers = int(os.environ.get('GUNICORN_WORKERS', 4))
log_level = 'debug'
debug = True
accesslog = os.environ.get('GUNICORN_ACCESS_LOG_FILE', '-')
errorlog = os.environ.get('GUNICORN_ERROR_LOG_FILE', '-')
access_log_format = ('%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" '
'"%(a)s" %(D)s %({X-Docker-Size}o)s')
if flavor == 'prod' or flavor == 'staging':
reload = False
workers = 8
debug = False
log_level = 'info'
|
7e19c3058615f4599ed7339e2bd157b72cd51018
|
test_dimuon.py
|
test_dimuon.py
|
from dimuon import *
from nose.tools import *
from math import pi
class DummyParticle:
def __init__(self, q):
self.q = q
def test_no_particles():
particles = []
pairs = find_pairs(particles)
assert len(pairs) == 0
def test_one_particle():
pos = DummyParticle(+1)
particles = [pos]
pairs = find_pairs(particles)
assert len(pairs) == 0
def test_two_particles_unlike_sign():
pos = DummyParticle(+1)
neg = DummyParticle(-1)
particles = [pos,neg]
pairs = find_pairs(particles)
assert_equal(pairs, [(pos,neg)] )
def test_two_particles_like_sign():
pos1 = DummyParticle(+1)
pos2 = DummyParticle(+1)
particles = [pos1,pos2]
pairs = find_pairs(particles)
assert_equal(len(pairs), 0)
def test_inv_mass_zero_mass_particles():
pos = Particle(1.0, +1.0, 0, pi/2) # massless particle with pt = 1 GeV
neg = Particle(1.0, -1.0, pi, pi/2) # massless, pt = 1 GeV, opposite direction
assert_equal(inv_mass_from_pair((pos,neg)), 2.0)
|
from dimuon import *
from nose.tools import *
from math import pi
class DummyParticle:
def __init__(self, q):
self.q = q
def test_no_particles():
particles = []
pairs = find_pairs(particles)
assert len(pairs) == 0
def test_one_particle():
pos = DummyParticle(+1)
particles = [pos]
pairs = find_pairs(particles)
assert len(pairs) == 0
def test_two_particles_unlike_sign():
pos = DummyParticle(+1)
neg = DummyParticle(-1)
particles = [pos,neg]
pairs = find_pairs(particles)
assert_equal(pairs, [(pos,neg)] )
def test_two_particles_like_sign():
pos1 = DummyParticle(+1)
pos2 = DummyParticle(+1)
particles = [pos1,pos2]
pairs = find_pairs(particles)
assert_equal(len(pairs), 0)
def test_inv_mass_zero_mass_particles():
pos = Particle(1.0, +1.0, 0, pi/2) # massless particle with pt = 1 GeV
neg = Particle(1.0, -1.0, pi, pi/2) # massless, pt = 1 GeV, opposite direction
assert_equal(inv_mass_from_pair((pos,neg)), 2.0)
def test_inv_mass_nonzero_mass_particles():
# shouldn't actually make any difference if masses are non-zero
pos = Particle(1.0, +0.5, 0, pi/2)
neg = Particle(1.0, -0.5, pi, pi/2)
assert_equal(inv_mass_from_pair((pos,neg)), 2.0)
|
Test pair mass for non-zero mass particles
|
Test pair mass for non-zero mass particles
|
Python
|
mit
|
benwaugh/dimuon
|
from dimuon import *
from nose.tools import *
from math import pi
class DummyParticle:
def __init__(self, q):
self.q = q
def test_no_particles():
particles = []
pairs = find_pairs(particles)
assert len(pairs) == 0
def test_one_particle():
pos = DummyParticle(+1)
particles = [pos]
pairs = find_pairs(particles)
assert len(pairs) == 0
def test_two_particles_unlike_sign():
pos = DummyParticle(+1)
neg = DummyParticle(-1)
particles = [pos,neg]
pairs = find_pairs(particles)
assert_equal(pairs, [(pos,neg)] )
def test_two_particles_like_sign():
pos1 = DummyParticle(+1)
pos2 = DummyParticle(+1)
particles = [pos1,pos2]
pairs = find_pairs(particles)
assert_equal(len(pairs), 0)
def test_inv_mass_zero_mass_particles():
pos = Particle(1.0, +1.0, 0, pi/2) # massless particle with pt = 1 GeV
neg = Particle(1.0, -1.0, pi, pi/2) # massless, pt = 1 GeV, opposite direction
assert_equal(inv_mass_from_pair((pos,neg)), 2.0)
Test pair mass for non-zero mass particles
|
from dimuon import *
from nose.tools import *
from math import pi
class DummyParticle:
def __init__(self, q):
self.q = q
def test_no_particles():
particles = []
pairs = find_pairs(particles)
assert len(pairs) == 0
def test_one_particle():
pos = DummyParticle(+1)
particles = [pos]
pairs = find_pairs(particles)
assert len(pairs) == 0
def test_two_particles_unlike_sign():
pos = DummyParticle(+1)
neg = DummyParticle(-1)
particles = [pos,neg]
pairs = find_pairs(particles)
assert_equal(pairs, [(pos,neg)] )
def test_two_particles_like_sign():
pos1 = DummyParticle(+1)
pos2 = DummyParticle(+1)
particles = [pos1,pos2]
pairs = find_pairs(particles)
assert_equal(len(pairs), 0)
def test_inv_mass_zero_mass_particles():
pos = Particle(1.0, +1.0, 0, pi/2) # massless particle with pt = 1 GeV
neg = Particle(1.0, -1.0, pi, pi/2) # massless, pt = 1 GeV, opposite direction
assert_equal(inv_mass_from_pair((pos,neg)), 2.0)
def test_inv_mass_nonzero_mass_particles():
# shouldn't actually make any difference if masses are non-zero
pos = Particle(1.0, +0.5, 0, pi/2)
neg = Particle(1.0, -0.5, pi, pi/2)
assert_equal(inv_mass_from_pair((pos,neg)), 2.0)
|
<commit_before>from dimuon import *
from nose.tools import *
from math import pi
class DummyParticle:
def __init__(self, q):
self.q = q
def test_no_particles():
particles = []
pairs = find_pairs(particles)
assert len(pairs) == 0
def test_one_particle():
pos = DummyParticle(+1)
particles = [pos]
pairs = find_pairs(particles)
assert len(pairs) == 0
def test_two_particles_unlike_sign():
pos = DummyParticle(+1)
neg = DummyParticle(-1)
particles = [pos,neg]
pairs = find_pairs(particles)
assert_equal(pairs, [(pos,neg)] )
def test_two_particles_like_sign():
pos1 = DummyParticle(+1)
pos2 = DummyParticle(+1)
particles = [pos1,pos2]
pairs = find_pairs(particles)
assert_equal(len(pairs), 0)
def test_inv_mass_zero_mass_particles():
pos = Particle(1.0, +1.0, 0, pi/2) # massless particle with pt = 1 GeV
neg = Particle(1.0, -1.0, pi, pi/2) # massless, pt = 1 GeV, opposite direction
assert_equal(inv_mass_from_pair((pos,neg)), 2.0)
<commit_msg>Test pair mass for non-zero mass particles<commit_after>
|
from dimuon import *
from nose.tools import *
from math import pi
class DummyParticle:
def __init__(self, q):
self.q = q
def test_no_particles():
particles = []
pairs = find_pairs(particles)
assert len(pairs) == 0
def test_one_particle():
pos = DummyParticle(+1)
particles = [pos]
pairs = find_pairs(particles)
assert len(pairs) == 0
def test_two_particles_unlike_sign():
pos = DummyParticle(+1)
neg = DummyParticle(-1)
particles = [pos,neg]
pairs = find_pairs(particles)
assert_equal(pairs, [(pos,neg)] )
def test_two_particles_like_sign():
pos1 = DummyParticle(+1)
pos2 = DummyParticle(+1)
particles = [pos1,pos2]
pairs = find_pairs(particles)
assert_equal(len(pairs), 0)
def test_inv_mass_zero_mass_particles():
pos = Particle(1.0, +1.0, 0, pi/2) # massless particle with pt = 1 GeV
neg = Particle(1.0, -1.0, pi, pi/2) # massless, pt = 1 GeV, opposite direction
assert_equal(inv_mass_from_pair((pos,neg)), 2.0)
def test_inv_mass_nonzero_mass_particles():
# shouldn't actually make any difference if masses are non-zero
pos = Particle(1.0, +0.5, 0, pi/2)
neg = Particle(1.0, -0.5, pi, pi/2)
assert_equal(inv_mass_from_pair((pos,neg)), 2.0)
|
from dimuon import *
from nose.tools import *
from math import pi
class DummyParticle:
def __init__(self, q):
self.q = q
def test_no_particles():
particles = []
pairs = find_pairs(particles)
assert len(pairs) == 0
def test_one_particle():
pos = DummyParticle(+1)
particles = [pos]
pairs = find_pairs(particles)
assert len(pairs) == 0
def test_two_particles_unlike_sign():
pos = DummyParticle(+1)
neg = DummyParticle(-1)
particles = [pos,neg]
pairs = find_pairs(particles)
assert_equal(pairs, [(pos,neg)] )
def test_two_particles_like_sign():
pos1 = DummyParticle(+1)
pos2 = DummyParticle(+1)
particles = [pos1,pos2]
pairs = find_pairs(particles)
assert_equal(len(pairs), 0)
def test_inv_mass_zero_mass_particles():
pos = Particle(1.0, +1.0, 0, pi/2) # massless particle with pt = 1 GeV
neg = Particle(1.0, -1.0, pi, pi/2) # massless, pt = 1 GeV, opposite direction
assert_equal(inv_mass_from_pair((pos,neg)), 2.0)
Test pair mass for non-zero mass particlesfrom dimuon import *
from nose.tools import *
from math import pi
class DummyParticle:
def __init__(self, q):
self.q = q
def test_no_particles():
particles = []
pairs = find_pairs(particles)
assert len(pairs) == 0
def test_one_particle():
pos = DummyParticle(+1)
particles = [pos]
pairs = find_pairs(particles)
assert len(pairs) == 0
def test_two_particles_unlike_sign():
pos = DummyParticle(+1)
neg = DummyParticle(-1)
particles = [pos,neg]
pairs = find_pairs(particles)
assert_equal(pairs, [(pos,neg)] )
def test_two_particles_like_sign():
pos1 = DummyParticle(+1)
pos2 = DummyParticle(+1)
particles = [pos1,pos2]
pairs = find_pairs(particles)
assert_equal(len(pairs), 0)
def test_inv_mass_zero_mass_particles():
pos = Particle(1.0, +1.0, 0, pi/2) # massless particle with pt = 1 GeV
neg = Particle(1.0, -1.0, pi, pi/2) # massless, pt = 1 GeV, opposite direction
assert_equal(inv_mass_from_pair((pos,neg)), 2.0)
def test_inv_mass_nonzero_mass_particles():
# shouldn't actually make any difference if masses are non-zero
pos = Particle(1.0, +0.5, 0, pi/2)
neg = Particle(1.0, -0.5, pi, pi/2)
assert_equal(inv_mass_from_pair((pos,neg)), 2.0)
|
<commit_before>from dimuon import *
from nose.tools import *
from math import pi
class DummyParticle:
def __init__(self, q):
self.q = q
def test_no_particles():
particles = []
pairs = find_pairs(particles)
assert len(pairs) == 0
def test_one_particle():
pos = DummyParticle(+1)
particles = [pos]
pairs = find_pairs(particles)
assert len(pairs) == 0
def test_two_particles_unlike_sign():
pos = DummyParticle(+1)
neg = DummyParticle(-1)
particles = [pos,neg]
pairs = find_pairs(particles)
assert_equal(pairs, [(pos,neg)] )
def test_two_particles_like_sign():
pos1 = DummyParticle(+1)
pos2 = DummyParticle(+1)
particles = [pos1,pos2]
pairs = find_pairs(particles)
assert_equal(len(pairs), 0)
def test_inv_mass_zero_mass_particles():
pos = Particle(1.0, +1.0, 0, pi/2) # massless particle with pt = 1 GeV
neg = Particle(1.0, -1.0, pi, pi/2) # massless, pt = 1 GeV, opposite direction
assert_equal(inv_mass_from_pair((pos,neg)), 2.0)
<commit_msg>Test pair mass for non-zero mass particles<commit_after>from dimuon import *
from nose.tools import *
from math import pi
class DummyParticle:
def __init__(self, q):
self.q = q
def test_no_particles():
particles = []
pairs = find_pairs(particles)
assert len(pairs) == 0
def test_one_particle():
pos = DummyParticle(+1)
particles = [pos]
pairs = find_pairs(particles)
assert len(pairs) == 0
def test_two_particles_unlike_sign():
pos = DummyParticle(+1)
neg = DummyParticle(-1)
particles = [pos,neg]
pairs = find_pairs(particles)
assert_equal(pairs, [(pos,neg)] )
def test_two_particles_like_sign():
pos1 = DummyParticle(+1)
pos2 = DummyParticle(+1)
particles = [pos1,pos2]
pairs = find_pairs(particles)
assert_equal(len(pairs), 0)
def test_inv_mass_zero_mass_particles():
pos = Particle(1.0, +1.0, 0, pi/2) # massless particle with pt = 1 GeV
neg = Particle(1.0, -1.0, pi, pi/2) # massless, pt = 1 GeV, opposite direction
assert_equal(inv_mass_from_pair((pos,neg)), 2.0)
def test_inv_mass_nonzero_mass_particles():
# shouldn't actually make any difference if masses are non-zero
pos = Particle(1.0, +0.5, 0, pi/2)
neg = Particle(1.0, -0.5, pi, pi/2)
assert_equal(inv_mass_from_pair((pos,neg)), 2.0)
|
59fa966d43e4fd66669c3390464f60f323cf2865
|
tests/changes/api/serializer/models/test_command.py
|
tests/changes/api/serializer/models/test_command.py
|
from datetime import datetime
from changes.api.serializer import serialize
from changes.config import db
from changes.models import Command
from changes.testutils import TestCase
class CommandSerializerTest(TestCase):
def test_simple(self):
project = self.create_project()
build = self.create_build(project)
job = self.create_job(build)
jobphase = self.create_jobphase(job)
jobstep = self.create_jobstep(jobphase)
command = Command(
label='echo 1',
jobstep_id=jobstep.id,
cwd='/home/foobar',
script='echo 1',
date_created=datetime(2013, 9, 19, 22, 15, 22),
artifacts=['junit.xml'],
)
db.session.add(command)
db.session.flush()
result = serialize(command)
assert result['id'] == command.id.hex
assert result['dateCreated'] == '2013-09-19T22:15:22'
assert result['cwd'] == command.cwd
assert result['script'] == command.script
|
from datetime import datetime
from changes.api.serializer import serialize
from changes.config import db
from changes.models import Command
from changes.testutils import TestCase
class CommandSerializerTest(TestCase):
def test_simple(self):
project = self.create_project()
build = self.create_build(project)
job = self.create_job(build)
jobphase = self.create_jobphase(job)
jobstep = self.create_jobstep(jobphase)
command = Command(
label='echo 1',
jobstep_id=jobstep.id,
cwd='/home/foobar',
env={'foo': 'bar'},
script='echo 1',
date_created=datetime(2013, 9, 19, 22, 15, 22),
artifacts=['junit.xml'],
)
db.session.add(command)
db.session.flush()
result = serialize(command)
assert result['id'] == command.id.hex
assert result['dateCreated'] == '2013-09-19T22:15:22'
assert result['cwd'] == command.cwd
assert result['env'] == {'foo': 'bar'}
assert result['script'] == command.script
|
Add tests for env serialization
|
Add tests for env serialization
|
Python
|
apache-2.0
|
dropbox/changes,bowlofstew/changes,wfxiang08/changes,bowlofstew/changes,bowlofstew/changes,dropbox/changes,wfxiang08/changes,wfxiang08/changes,dropbox/changes,bowlofstew/changes,dropbox/changes,wfxiang08/changes
|
from datetime import datetime
from changes.api.serializer import serialize
from changes.config import db
from changes.models import Command
from changes.testutils import TestCase
class CommandSerializerTest(TestCase):
def test_simple(self):
project = self.create_project()
build = self.create_build(project)
job = self.create_job(build)
jobphase = self.create_jobphase(job)
jobstep = self.create_jobstep(jobphase)
command = Command(
label='echo 1',
jobstep_id=jobstep.id,
cwd='/home/foobar',
script='echo 1',
date_created=datetime(2013, 9, 19, 22, 15, 22),
artifacts=['junit.xml'],
)
db.session.add(command)
db.session.flush()
result = serialize(command)
assert result['id'] == command.id.hex
assert result['dateCreated'] == '2013-09-19T22:15:22'
assert result['cwd'] == command.cwd
assert result['script'] == command.script
Add tests for env serialization
|
from datetime import datetime
from changes.api.serializer import serialize
from changes.config import db
from changes.models import Command
from changes.testutils import TestCase
class CommandSerializerTest(TestCase):
def test_simple(self):
project = self.create_project()
build = self.create_build(project)
job = self.create_job(build)
jobphase = self.create_jobphase(job)
jobstep = self.create_jobstep(jobphase)
command = Command(
label='echo 1',
jobstep_id=jobstep.id,
cwd='/home/foobar',
env={'foo': 'bar'},
script='echo 1',
date_created=datetime(2013, 9, 19, 22, 15, 22),
artifacts=['junit.xml'],
)
db.session.add(command)
db.session.flush()
result = serialize(command)
assert result['id'] == command.id.hex
assert result['dateCreated'] == '2013-09-19T22:15:22'
assert result['cwd'] == command.cwd
assert result['env'] == {'foo': 'bar'}
assert result['script'] == command.script
|
<commit_before>from datetime import datetime
from changes.api.serializer import serialize
from changes.config import db
from changes.models import Command
from changes.testutils import TestCase
class CommandSerializerTest(TestCase):
def test_simple(self):
project = self.create_project()
build = self.create_build(project)
job = self.create_job(build)
jobphase = self.create_jobphase(job)
jobstep = self.create_jobstep(jobphase)
command = Command(
label='echo 1',
jobstep_id=jobstep.id,
cwd='/home/foobar',
script='echo 1',
date_created=datetime(2013, 9, 19, 22, 15, 22),
artifacts=['junit.xml'],
)
db.session.add(command)
db.session.flush()
result = serialize(command)
assert result['id'] == command.id.hex
assert result['dateCreated'] == '2013-09-19T22:15:22'
assert result['cwd'] == command.cwd
assert result['script'] == command.script
<commit_msg>Add tests for env serialization<commit_after>
|
from datetime import datetime
from changes.api.serializer import serialize
from changes.config import db
from changes.models import Command
from changes.testutils import TestCase
class CommandSerializerTest(TestCase):
def test_simple(self):
project = self.create_project()
build = self.create_build(project)
job = self.create_job(build)
jobphase = self.create_jobphase(job)
jobstep = self.create_jobstep(jobphase)
command = Command(
label='echo 1',
jobstep_id=jobstep.id,
cwd='/home/foobar',
env={'foo': 'bar'},
script='echo 1',
date_created=datetime(2013, 9, 19, 22, 15, 22),
artifacts=['junit.xml'],
)
db.session.add(command)
db.session.flush()
result = serialize(command)
assert result['id'] == command.id.hex
assert result['dateCreated'] == '2013-09-19T22:15:22'
assert result['cwd'] == command.cwd
assert result['env'] == {'foo': 'bar'}
assert result['script'] == command.script
|
from datetime import datetime
from changes.api.serializer import serialize
from changes.config import db
from changes.models import Command
from changes.testutils import TestCase
class CommandSerializerTest(TestCase):
def test_simple(self):
project = self.create_project()
build = self.create_build(project)
job = self.create_job(build)
jobphase = self.create_jobphase(job)
jobstep = self.create_jobstep(jobphase)
command = Command(
label='echo 1',
jobstep_id=jobstep.id,
cwd='/home/foobar',
script='echo 1',
date_created=datetime(2013, 9, 19, 22, 15, 22),
artifacts=['junit.xml'],
)
db.session.add(command)
db.session.flush()
result = serialize(command)
assert result['id'] == command.id.hex
assert result['dateCreated'] == '2013-09-19T22:15:22'
assert result['cwd'] == command.cwd
assert result['script'] == command.script
Add tests for env serializationfrom datetime import datetime
from changes.api.serializer import serialize
from changes.config import db
from changes.models import Command
from changes.testutils import TestCase
class CommandSerializerTest(TestCase):
def test_simple(self):
project = self.create_project()
build = self.create_build(project)
job = self.create_job(build)
jobphase = self.create_jobphase(job)
jobstep = self.create_jobstep(jobphase)
command = Command(
label='echo 1',
jobstep_id=jobstep.id,
cwd='/home/foobar',
env={'foo': 'bar'},
script='echo 1',
date_created=datetime(2013, 9, 19, 22, 15, 22),
artifacts=['junit.xml'],
)
db.session.add(command)
db.session.flush()
result = serialize(command)
assert result['id'] == command.id.hex
assert result['dateCreated'] == '2013-09-19T22:15:22'
assert result['cwd'] == command.cwd
assert result['env'] == {'foo': 'bar'}
assert result['script'] == command.script
|
<commit_before>from datetime import datetime
from changes.api.serializer import serialize
from changes.config import db
from changes.models import Command
from changes.testutils import TestCase
class CommandSerializerTest(TestCase):
def test_simple(self):
project = self.create_project()
build = self.create_build(project)
job = self.create_job(build)
jobphase = self.create_jobphase(job)
jobstep = self.create_jobstep(jobphase)
command = Command(
label='echo 1',
jobstep_id=jobstep.id,
cwd='/home/foobar',
script='echo 1',
date_created=datetime(2013, 9, 19, 22, 15, 22),
artifacts=['junit.xml'],
)
db.session.add(command)
db.session.flush()
result = serialize(command)
assert result['id'] == command.id.hex
assert result['dateCreated'] == '2013-09-19T22:15:22'
assert result['cwd'] == command.cwd
assert result['script'] == command.script
<commit_msg>Add tests for env serialization<commit_after>from datetime import datetime
from changes.api.serializer import serialize
from changes.config import db
from changes.models import Command
from changes.testutils import TestCase
class CommandSerializerTest(TestCase):
def test_simple(self):
project = self.create_project()
build = self.create_build(project)
job = self.create_job(build)
jobphase = self.create_jobphase(job)
jobstep = self.create_jobstep(jobphase)
command = Command(
label='echo 1',
jobstep_id=jobstep.id,
cwd='/home/foobar',
env={'foo': 'bar'},
script='echo 1',
date_created=datetime(2013, 9, 19, 22, 15, 22),
artifacts=['junit.xml'],
)
db.session.add(command)
db.session.flush()
result = serialize(command)
assert result['id'] == command.id.hex
assert result['dateCreated'] == '2013-09-19T22:15:22'
assert result['cwd'] == command.cwd
assert result['env'] == {'foo': 'bar'}
assert result['script'] == command.script
|
03eb110d1c71c1ad6e40f856b5eb5c8ff96090ff
|
tests/dotnetexample/conf.py
|
tests/dotnetexample/conf.py
|
# -*- coding: utf-8 -*-
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'dotnetexample'
copyright = u'2015, rtfd'
author = u'rtfd'
version = '0.1'
release = '0.1'
language = None
exclude_patterns = ['_build']
pygments_style = 'sphinx'
todo_include_todos = False
html_theme = 'sphinx_rtd_theme'
html_static_path = ['_static']
htmlhelp_basename = 'dotnetexampledoc'
extensions = ['autoapi.extension', 'sphinxcontrib.dotnetdomain']
autoapi_type = 'dotnet'
autoapi_dir = 'example/mvc/src/'
autoapi_file_pattern = 'project.json'
autoapi_ignore = ['toc.yml', 'index.yml']
|
# -*- coding: utf-8 -*-
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'dotnetexample'
copyright = u'2015, rtfd'
author = u'rtfd'
version = '0.1'
release = '0.1'
language = None
exclude_patterns = ['_build']
pygments_style = 'sphinx'
todo_include_todos = False
html_theme = 'sphinx_rtd_theme'
html_static_path = ['_static']
htmlhelp_basename = 'dotnetexampledoc'
extensions = ['autoapi.extension', 'sphinxcontrib.dotnetdomain']
autoapi_type = 'dotnet'
autoapi_dir = 'example/corefx/src'
autoapi_ignore = ['*toc.yml', '*index.yml', '*tests*tests*']
|
Remove outdated test deps from dotnet
|
Remove outdated test deps from dotnet
|
Python
|
mit
|
rtfd/sphinx-autoapi,rtfd/sphinx-autoapi,rtfd/sphinx-autoapi,rtfd/sphinx-autoapi
|
# -*- coding: utf-8 -*-
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'dotnetexample'
copyright = u'2015, rtfd'
author = u'rtfd'
version = '0.1'
release = '0.1'
language = None
exclude_patterns = ['_build']
pygments_style = 'sphinx'
todo_include_todos = False
html_theme = 'sphinx_rtd_theme'
html_static_path = ['_static']
htmlhelp_basename = 'dotnetexampledoc'
extensions = ['autoapi.extension', 'sphinxcontrib.dotnetdomain']
autoapi_type = 'dotnet'
autoapi_dir = 'example/mvc/src/'
autoapi_file_pattern = 'project.json'
autoapi_ignore = ['toc.yml', 'index.yml']
Remove outdated test deps from dotnet
|
# -*- coding: utf-8 -*-
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'dotnetexample'
copyright = u'2015, rtfd'
author = u'rtfd'
version = '0.1'
release = '0.1'
language = None
exclude_patterns = ['_build']
pygments_style = 'sphinx'
todo_include_todos = False
html_theme = 'sphinx_rtd_theme'
html_static_path = ['_static']
htmlhelp_basename = 'dotnetexampledoc'
extensions = ['autoapi.extension', 'sphinxcontrib.dotnetdomain']
autoapi_type = 'dotnet'
autoapi_dir = 'example/corefx/src'
autoapi_ignore = ['*toc.yml', '*index.yml', '*tests*tests*']
|
<commit_before># -*- coding: utf-8 -*-
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'dotnetexample'
copyright = u'2015, rtfd'
author = u'rtfd'
version = '0.1'
release = '0.1'
language = None
exclude_patterns = ['_build']
pygments_style = 'sphinx'
todo_include_todos = False
html_theme = 'sphinx_rtd_theme'
html_static_path = ['_static']
htmlhelp_basename = 'dotnetexampledoc'
extensions = ['autoapi.extension', 'sphinxcontrib.dotnetdomain']
autoapi_type = 'dotnet'
autoapi_dir = 'example/mvc/src/'
autoapi_file_pattern = 'project.json'
autoapi_ignore = ['toc.yml', 'index.yml']
<commit_msg>Remove outdated test deps from dotnet<commit_after>
|
# -*- coding: utf-8 -*-
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'dotnetexample'
copyright = u'2015, rtfd'
author = u'rtfd'
version = '0.1'
release = '0.1'
language = None
exclude_patterns = ['_build']
pygments_style = 'sphinx'
todo_include_todos = False
html_theme = 'sphinx_rtd_theme'
html_static_path = ['_static']
htmlhelp_basename = 'dotnetexampledoc'
extensions = ['autoapi.extension', 'sphinxcontrib.dotnetdomain']
autoapi_type = 'dotnet'
autoapi_dir = 'example/corefx/src'
autoapi_ignore = ['*toc.yml', '*index.yml', '*tests*tests*']
|
# -*- coding: utf-8 -*-
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'dotnetexample'
copyright = u'2015, rtfd'
author = u'rtfd'
version = '0.1'
release = '0.1'
language = None
exclude_patterns = ['_build']
pygments_style = 'sphinx'
todo_include_todos = False
html_theme = 'sphinx_rtd_theme'
html_static_path = ['_static']
htmlhelp_basename = 'dotnetexampledoc'
extensions = ['autoapi.extension', 'sphinxcontrib.dotnetdomain']
autoapi_type = 'dotnet'
autoapi_dir = 'example/mvc/src/'
autoapi_file_pattern = 'project.json'
autoapi_ignore = ['toc.yml', 'index.yml']
Remove outdated test deps from dotnet# -*- coding: utf-8 -*-
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'dotnetexample'
copyright = u'2015, rtfd'
author = u'rtfd'
version = '0.1'
release = '0.1'
language = None
exclude_patterns = ['_build']
pygments_style = 'sphinx'
todo_include_todos = False
html_theme = 'sphinx_rtd_theme'
html_static_path = ['_static']
htmlhelp_basename = 'dotnetexampledoc'
extensions = ['autoapi.extension', 'sphinxcontrib.dotnetdomain']
autoapi_type = 'dotnet'
autoapi_dir = 'example/corefx/src'
autoapi_ignore = ['*toc.yml', '*index.yml', '*tests*tests*']
|
<commit_before># -*- coding: utf-8 -*-
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'dotnetexample'
copyright = u'2015, rtfd'
author = u'rtfd'
version = '0.1'
release = '0.1'
language = None
exclude_patterns = ['_build']
pygments_style = 'sphinx'
todo_include_todos = False
html_theme = 'sphinx_rtd_theme'
html_static_path = ['_static']
htmlhelp_basename = 'dotnetexampledoc'
extensions = ['autoapi.extension', 'sphinxcontrib.dotnetdomain']
autoapi_type = 'dotnet'
autoapi_dir = 'example/mvc/src/'
autoapi_file_pattern = 'project.json'
autoapi_ignore = ['toc.yml', 'index.yml']
<commit_msg>Remove outdated test deps from dotnet<commit_after># -*- coding: utf-8 -*-
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'dotnetexample'
copyright = u'2015, rtfd'
author = u'rtfd'
version = '0.1'
release = '0.1'
language = None
exclude_patterns = ['_build']
pygments_style = 'sphinx'
todo_include_todos = False
html_theme = 'sphinx_rtd_theme'
html_static_path = ['_static']
htmlhelp_basename = 'dotnetexampledoc'
extensions = ['autoapi.extension', 'sphinxcontrib.dotnetdomain']
autoapi_type = 'dotnet'
autoapi_dir = 'example/corefx/src'
autoapi_ignore = ['*toc.yml', '*index.yml', '*tests*tests*']
|
3507a4c4efc21e701970a5bc33ddeae0b0c20943
|
unzip/utils.py
|
unzip/utils.py
|
# coding=utf-8
import logging
import subprocess
from subprocess import Popen, PIPE
class GitCmdException(Exception):
def __init__(self, stdout, stderr, message):
self.stdout = stdout
self.stderr = stderr
self.message = message
class RebaseAndTagException(GitCmdException):
def __init__(self, tag, stdout, stderr, message):
self.tag = tag
super(RebaseAndTagException, self).__init__(stdout, stderr, message)
def run_cmd(cmd, debug=True, multi=False):
if debug:
logger = logging.getLogger('unzip')
logger.debug('Running: %s' % cmd)
if multi:
cmd_parts = cmd.split(' | ')
first_cmd = cmd_parts.pop(0)
process = Popen(first_cmd.split(), stdout=PIPE)
for cmd in cmd_parts:
process = Popen(cmd.split(), stdin=process.stdout, stdout=PIPE)
else:
process = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = process.communicate()
if process.returncode:
raise GitCmdException(stdout=stdout, stderr=stderr, message='Problem running %s: %s' % (cmd, stderr.strip()))
return stdout.strip()
|
# coding=utf-8
import logging
import subprocess
from subprocess import Popen, PIPE
class GitCmdException(Exception):
def __init__(self, stdout, stderr, message):
self.stdout = stdout
self.stderr = stderr
self.message = message
class RebaseAndTagException(GitCmdException):
def __init__(self, tag, stdout, stderr, message):
self.tag = tag
super(RebaseAndTagException, self).__init__(stdout, stderr, message)
def run_cmd(cmd, debug=True, multi=False):
if debug:
logger = logging.getLogger('unzip')
logger.debug('Running: %s' % cmd)
if multi:
cmd_parts = cmd.split(' | ')
first_cmd = cmd_parts.pop(0)
process = Popen(first_cmd.split(), stdout=PIPE)
for cmd in cmd_parts:
process = Popen(cmd.split(), stdin=process.stdout, stdout=PIPE)
else:
process = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = process.communicate()
stdout = stdout or ''
stderr = stderr or ''
if process.returncode:
raise GitCmdException(stdout=stdout, stderr=stderr, message='Problem running %s: %s' % (cmd, stderr.strip()))
return stdout.strip()
|
Make sure we always have a string to work with
|
Make sure we always have a string to work with
|
Python
|
mit
|
xiian/git-unzip
|
# coding=utf-8
import logging
import subprocess
from subprocess import Popen, PIPE
class GitCmdException(Exception):
def __init__(self, stdout, stderr, message):
self.stdout = stdout
self.stderr = stderr
self.message = message
class RebaseAndTagException(GitCmdException):
def __init__(self, tag, stdout, stderr, message):
self.tag = tag
super(RebaseAndTagException, self).__init__(stdout, stderr, message)
def run_cmd(cmd, debug=True, multi=False):
if debug:
logger = logging.getLogger('unzip')
logger.debug('Running: %s' % cmd)
if multi:
cmd_parts = cmd.split(' | ')
first_cmd = cmd_parts.pop(0)
process = Popen(first_cmd.split(), stdout=PIPE)
for cmd in cmd_parts:
process = Popen(cmd.split(), stdin=process.stdout, stdout=PIPE)
else:
process = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = process.communicate()
if process.returncode:
raise GitCmdException(stdout=stdout, stderr=stderr, message='Problem running %s: %s' % (cmd, stderr.strip()))
return stdout.strip()
Make sure we always have a string to work with
|
# coding=utf-8
import logging
import subprocess
from subprocess import Popen, PIPE
class GitCmdException(Exception):
def __init__(self, stdout, stderr, message):
self.stdout = stdout
self.stderr = stderr
self.message = message
class RebaseAndTagException(GitCmdException):
def __init__(self, tag, stdout, stderr, message):
self.tag = tag
super(RebaseAndTagException, self).__init__(stdout, stderr, message)
def run_cmd(cmd, debug=True, multi=False):
if debug:
logger = logging.getLogger('unzip')
logger.debug('Running: %s' % cmd)
if multi:
cmd_parts = cmd.split(' | ')
first_cmd = cmd_parts.pop(0)
process = Popen(first_cmd.split(), stdout=PIPE)
for cmd in cmd_parts:
process = Popen(cmd.split(), stdin=process.stdout, stdout=PIPE)
else:
process = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = process.communicate()
stdout = stdout or ''
stderr = stderr or ''
if process.returncode:
raise GitCmdException(stdout=stdout, stderr=stderr, message='Problem running %s: %s' % (cmd, stderr.strip()))
return stdout.strip()
|
<commit_before># coding=utf-8
import logging
import subprocess
from subprocess import Popen, PIPE
class GitCmdException(Exception):
def __init__(self, stdout, stderr, message):
self.stdout = stdout
self.stderr = stderr
self.message = message
class RebaseAndTagException(GitCmdException):
def __init__(self, tag, stdout, stderr, message):
self.tag = tag
super(RebaseAndTagException, self).__init__(stdout, stderr, message)
def run_cmd(cmd, debug=True, multi=False):
if debug:
logger = logging.getLogger('unzip')
logger.debug('Running: %s' % cmd)
if multi:
cmd_parts = cmd.split(' | ')
first_cmd = cmd_parts.pop(0)
process = Popen(first_cmd.split(), stdout=PIPE)
for cmd in cmd_parts:
process = Popen(cmd.split(), stdin=process.stdout, stdout=PIPE)
else:
process = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = process.communicate()
if process.returncode:
raise GitCmdException(stdout=stdout, stderr=stderr, message='Problem running %s: %s' % (cmd, stderr.strip()))
return stdout.strip()
<commit_msg>Make sure we always have a string to work with<commit_after>
|
# coding=utf-8
import logging
import subprocess
from subprocess import Popen, PIPE
class GitCmdException(Exception):
def __init__(self, stdout, stderr, message):
self.stdout = stdout
self.stderr = stderr
self.message = message
class RebaseAndTagException(GitCmdException):
def __init__(self, tag, stdout, stderr, message):
self.tag = tag
super(RebaseAndTagException, self).__init__(stdout, stderr, message)
def run_cmd(cmd, debug=True, multi=False):
if debug:
logger = logging.getLogger('unzip')
logger.debug('Running: %s' % cmd)
if multi:
cmd_parts = cmd.split(' | ')
first_cmd = cmd_parts.pop(0)
process = Popen(first_cmd.split(), stdout=PIPE)
for cmd in cmd_parts:
process = Popen(cmd.split(), stdin=process.stdout, stdout=PIPE)
else:
process = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = process.communicate()
stdout = stdout or ''
stderr = stderr or ''
if process.returncode:
raise GitCmdException(stdout=stdout, stderr=stderr, message='Problem running %s: %s' % (cmd, stderr.strip()))
return stdout.strip()
|
# coding=utf-8
import logging
import subprocess
from subprocess import Popen, PIPE
class GitCmdException(Exception):
def __init__(self, stdout, stderr, message):
self.stdout = stdout
self.stderr = stderr
self.message = message
class RebaseAndTagException(GitCmdException):
def __init__(self, tag, stdout, stderr, message):
self.tag = tag
super(RebaseAndTagException, self).__init__(stdout, stderr, message)
def run_cmd(cmd, debug=True, multi=False):
if debug:
logger = logging.getLogger('unzip')
logger.debug('Running: %s' % cmd)
if multi:
cmd_parts = cmd.split(' | ')
first_cmd = cmd_parts.pop(0)
process = Popen(first_cmd.split(), stdout=PIPE)
for cmd in cmd_parts:
process = Popen(cmd.split(), stdin=process.stdout, stdout=PIPE)
else:
process = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = process.communicate()
if process.returncode:
raise GitCmdException(stdout=stdout, stderr=stderr, message='Problem running %s: %s' % (cmd, stderr.strip()))
return stdout.strip()
Make sure we always have a string to work with# coding=utf-8
import logging
import subprocess
from subprocess import Popen, PIPE
class GitCmdException(Exception):
def __init__(self, stdout, stderr, message):
self.stdout = stdout
self.stderr = stderr
self.message = message
class RebaseAndTagException(GitCmdException):
def __init__(self, tag, stdout, stderr, message):
self.tag = tag
super(RebaseAndTagException, self).__init__(stdout, stderr, message)
def run_cmd(cmd, debug=True, multi=False):
if debug:
logger = logging.getLogger('unzip')
logger.debug('Running: %s' % cmd)
if multi:
cmd_parts = cmd.split(' | ')
first_cmd = cmd_parts.pop(0)
process = Popen(first_cmd.split(), stdout=PIPE)
for cmd in cmd_parts:
process = Popen(cmd.split(), stdin=process.stdout, stdout=PIPE)
else:
process = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = process.communicate()
stdout = stdout or ''
stderr = stderr or ''
if process.returncode:
raise GitCmdException(stdout=stdout, stderr=stderr, message='Problem running %s: %s' % (cmd, stderr.strip()))
return stdout.strip()
|
<commit_before># coding=utf-8
import logging
import subprocess
from subprocess import Popen, PIPE
class GitCmdException(Exception):
def __init__(self, stdout, stderr, message):
self.stdout = stdout
self.stderr = stderr
self.message = message
class RebaseAndTagException(GitCmdException):
def __init__(self, tag, stdout, stderr, message):
self.tag = tag
super(RebaseAndTagException, self).__init__(stdout, stderr, message)
def run_cmd(cmd, debug=True, multi=False):
if debug:
logger = logging.getLogger('unzip')
logger.debug('Running: %s' % cmd)
if multi:
cmd_parts = cmd.split(' | ')
first_cmd = cmd_parts.pop(0)
process = Popen(first_cmd.split(), stdout=PIPE)
for cmd in cmd_parts:
process = Popen(cmd.split(), stdin=process.stdout, stdout=PIPE)
else:
process = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = process.communicate()
if process.returncode:
raise GitCmdException(stdout=stdout, stderr=stderr, message='Problem running %s: %s' % (cmd, stderr.strip()))
return stdout.strip()
<commit_msg>Make sure we always have a string to work with<commit_after># coding=utf-8
import logging
import subprocess
from subprocess import Popen, PIPE
class GitCmdException(Exception):
def __init__(self, stdout, stderr, message):
self.stdout = stdout
self.stderr = stderr
self.message = message
class RebaseAndTagException(GitCmdException):
def __init__(self, tag, stdout, stderr, message):
self.tag = tag
super(RebaseAndTagException, self).__init__(stdout, stderr, message)
def run_cmd(cmd, debug=True, multi=False):
if debug:
logger = logging.getLogger('unzip')
logger.debug('Running: %s' % cmd)
if multi:
cmd_parts = cmd.split(' | ')
first_cmd = cmd_parts.pop(0)
process = Popen(first_cmd.split(), stdout=PIPE)
for cmd in cmd_parts:
process = Popen(cmd.split(), stdin=process.stdout, stdout=PIPE)
else:
process = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = process.communicate()
stdout = stdout or ''
stderr = stderr or ''
if process.returncode:
raise GitCmdException(stdout=stdout, stderr=stderr, message='Problem running %s: %s' % (cmd, stderr.strip()))
return stdout.strip()
|
7ac3e48d1934e7a749590d875a3f5e4423fa6c72
|
linked_list.py
|
linked_list.py
|
#!/usr/bin/env python
class SinglyLinked(object):
def __init__(self):
pass
def insert(self, val):
# insert val at beginning of list
pass
def pop(self):
# pops first value from list and returns it
pass
def size(self):
# returns length of list
pass
def search(self, val):
# return node containing 'val' in list, if present (else None)
pass
def remove(self, node):
# remove node from list, wherever it might be
pass
def print_(self):
# print list as python tuple literal
# (bonus points if you make it appear like "(12, 'sam', 32, 'fred')")
pass
|
#!/usr/bin/env python
class Node(object):
def __init__(self, data, nextNode=None):
self.data = data
self.nextNode = nextNode
class LinkedList(object):
def __init__(self, firstNode=None):
self.firstNode = firstNode
def insert(self, newNode):
# insert newNode at beginning of list
if not self.firstNode:
self.firstNode = newNode
else:
newNode.nextNode = self.firstNode
self.firstNode = newNode
def pop(self):
# pops first value from list and returns it
pass
def size(self):
# returns length of list
pass
def search(self, val):
# return node containing 'val' in list, if present (else None)
pass
def remove(self, node):
# remove node from list, wherever it might be
pass
def display(self):
# print list as python tuple literal
# (bonus points if you make it appear like "(12, 'sam', 32, 'fred')")
for node in self:
print self.firstNode
|
Create Node class; construct insert method
|
Create Node class; construct insert method
|
Python
|
mit
|
jwarren116/data-structures
|
#!/usr/bin/env python
class SinglyLinked(object):
def __init__(self):
pass
def insert(self, val):
# insert val at beginning of list
pass
def pop(self):
# pops first value from list and returns it
pass
def size(self):
# returns length of list
pass
def search(self, val):
# return node containing 'val' in list, if present (else None)
pass
def remove(self, node):
# remove node from list, wherever it might be
pass
def print_(self):
# print list as python tuple literal
# (bonus points if you make it appear like "(12, 'sam', 32, 'fred')")
pass
Create Node class; construct insert method
|
#!/usr/bin/env python
class Node(object):
def __init__(self, data, nextNode=None):
self.data = data
self.nextNode = nextNode
class LinkedList(object):
def __init__(self, firstNode=None):
self.firstNode = firstNode
def insert(self, newNode):
# insert newNode at beginning of list
if not self.firstNode:
self.firstNode = newNode
else:
newNode.nextNode = self.firstNode
self.firstNode = newNode
def pop(self):
# pops first value from list and returns it
pass
def size(self):
# returns length of list
pass
def search(self, val):
# return node containing 'val' in list, if present (else None)
pass
def remove(self, node):
# remove node from list, wherever it might be
pass
def display(self):
# print list as python tuple literal
# (bonus points if you make it appear like "(12, 'sam', 32, 'fred')")
for node in self:
print self.firstNode
|
<commit_before>#!/usr/bin/env python
class SinglyLinked(object):
def __init__(self):
pass
def insert(self, val):
# insert val at beginning of list
pass
def pop(self):
# pops first value from list and returns it
pass
def size(self):
# returns length of list
pass
def search(self, val):
# return node containing 'val' in list, if present (else None)
pass
def remove(self, node):
# remove node from list, wherever it might be
pass
def print_(self):
# print list as python tuple literal
# (bonus points if you make it appear like "(12, 'sam', 32, 'fred')")
pass
<commit_msg>Create Node class; construct insert method<commit_after>
|
#!/usr/bin/env python
class Node(object):
def __init__(self, data, nextNode=None):
self.data = data
self.nextNode = nextNode
class LinkedList(object):
def __init__(self, firstNode=None):
self.firstNode = firstNode
def insert(self, newNode):
# insert newNode at beginning of list
if not self.firstNode:
self.firstNode = newNode
else:
newNode.nextNode = self.firstNode
self.firstNode = newNode
def pop(self):
# pops first value from list and returns it
pass
def size(self):
# returns length of list
pass
def search(self, val):
# return node containing 'val' in list, if present (else None)
pass
def remove(self, node):
# remove node from list, wherever it might be
pass
def display(self):
# print list as python tuple literal
# (bonus points if you make it appear like "(12, 'sam', 32, 'fred')")
for node in self:
print self.firstNode
|
#!/usr/bin/env python
class SinglyLinked(object):
def __init__(self):
pass
def insert(self, val):
# insert val at beginning of list
pass
def pop(self):
# pops first value from list and returns it
pass
def size(self):
# returns length of list
pass
def search(self, val):
# return node containing 'val' in list, if present (else None)
pass
def remove(self, node):
# remove node from list, wherever it might be
pass
def print_(self):
# print list as python tuple literal
# (bonus points if you make it appear like "(12, 'sam', 32, 'fred')")
pass
Create Node class; construct insert method#!/usr/bin/env python
class Node(object):
def __init__(self, data, nextNode=None):
self.data = data
self.nextNode = nextNode
class LinkedList(object):
def __init__(self, firstNode=None):
self.firstNode = firstNode
def insert(self, newNode):
# insert newNode at beginning of list
if not self.firstNode:
self.firstNode = newNode
else:
newNode.nextNode = self.firstNode
self.firstNode = newNode
def pop(self):
# pops first value from list and returns it
pass
def size(self):
# returns length of list
pass
def search(self, val):
# return node containing 'val' in list, if present (else None)
pass
def remove(self, node):
# remove node from list, wherever it might be
pass
def display(self):
# print list as python tuple literal
# (bonus points if you make it appear like "(12, 'sam', 32, 'fred')")
for node in self:
print self.firstNode
|
<commit_before>#!/usr/bin/env python
class SinglyLinked(object):
def __init__(self):
pass
def insert(self, val):
# insert val at beginning of list
pass
def pop(self):
# pops first value from list and returns it
pass
def size(self):
# returns length of list
pass
def search(self, val):
# return node containing 'val' in list, if present (else None)
pass
def remove(self, node):
# remove node from list, wherever it might be
pass
def print_(self):
# print list as python tuple literal
# (bonus points if you make it appear like "(12, 'sam', 32, 'fred')")
pass
<commit_msg>Create Node class; construct insert method<commit_after>#!/usr/bin/env python
class Node(object):
def __init__(self, data, nextNode=None):
self.data = data
self.nextNode = nextNode
class LinkedList(object):
def __init__(self, firstNode=None):
self.firstNode = firstNode
def insert(self, newNode):
# insert newNode at beginning of list
if not self.firstNode:
self.firstNode = newNode
else:
newNode.nextNode = self.firstNode
self.firstNode = newNode
def pop(self):
# pops first value from list and returns it
pass
def size(self):
# returns length of list
pass
def search(self, val):
# return node containing 'val' in list, if present (else None)
pass
def remove(self, node):
# remove node from list, wherever it might be
pass
def display(self):
# print list as python tuple literal
# (bonus points if you make it appear like "(12, 'sam', 32, 'fred')")
for node in self:
print self.firstNode
|
51931a0ba263cd16f14780df664c093764d0bad7
|
tests/integrations/test_urls.py
|
tests/integrations/test_urls.py
|
import pytest
pytestmark = pytest.mark.django_db
def test_admin_interface(client):
response = client.get('/admin/login/')
assert response.status_code == 200
|
import pytest
pytestmark = pytest.mark.django_db
def test_admin_interface(client):
public_urls = [
'/admin/login/',
'/',
'/about/',
'/privacy/',
]
for url in public_urls:
response = client.get('/admin/login/')
assert response.status_code == 200
|
Add test to check for public urls
|
Add test to check for public urls
|
Python
|
mit
|
fossevents/fossevents.in,fossevents/fossevents.in,vipul-sharma20/fossevents.in,vipul-sharma20/fossevents.in,vipul-sharma20/fossevents.in,aniketmaithani/fossevents.in,fossevents/fossevents.in,aniketmaithani/fossevents.in,vipul-sharma20/fossevents.in,aniketmaithani/fossevents.in,fossevents/fossevents.in,aniketmaithani/fossevents.in
|
import pytest
pytestmark = pytest.mark.django_db
def test_admin_interface(client):
response = client.get('/admin/login/')
assert response.status_code == 200
Add test to check for public urls
|
import pytest
pytestmark = pytest.mark.django_db
def test_admin_interface(client):
public_urls = [
'/admin/login/',
'/',
'/about/',
'/privacy/',
]
for url in public_urls:
response = client.get('/admin/login/')
assert response.status_code == 200
|
<commit_before>import pytest
pytestmark = pytest.mark.django_db
def test_admin_interface(client):
response = client.get('/admin/login/')
assert response.status_code == 200
<commit_msg>Add test to check for public urls<commit_after>
|
import pytest
pytestmark = pytest.mark.django_db
def test_admin_interface(client):
public_urls = [
'/admin/login/',
'/',
'/about/',
'/privacy/',
]
for url in public_urls:
response = client.get('/admin/login/')
assert response.status_code == 200
|
import pytest
pytestmark = pytest.mark.django_db
def test_admin_interface(client):
response = client.get('/admin/login/')
assert response.status_code == 200
Add test to check for public urlsimport pytest
pytestmark = pytest.mark.django_db
def test_admin_interface(client):
public_urls = [
'/admin/login/',
'/',
'/about/',
'/privacy/',
]
for url in public_urls:
response = client.get('/admin/login/')
assert response.status_code == 200
|
<commit_before>import pytest
pytestmark = pytest.mark.django_db
def test_admin_interface(client):
response = client.get('/admin/login/')
assert response.status_code == 200
<commit_msg>Add test to check for public urls<commit_after>import pytest
pytestmark = pytest.mark.django_db
def test_admin_interface(client):
public_urls = [
'/admin/login/',
'/',
'/about/',
'/privacy/',
]
for url in public_urls:
response = client.get('/admin/login/')
assert response.status_code == 200
|
765d34819d90781e62c64f5a7d32b480483b44e8
|
great_expectations/data_context/types/configurations.py
|
great_expectations/data_context/types/configurations.py
|
from six import string_types
from great_expectations.types import Config
class DataContextConfig(Config):
_allowed_keys = set([
"ge_config_version",
# TODO temporary fix
# TODO warn if this is detected
"result_callback",
"config_variables_file_path",
"plugins_directory",
"expectations_store",
"evaluation_parameter_store_name",
"datasources",
"stores",
"data_docs", # TODO: Rename this to sites, to remove a layer of extraneous nesting
"validation_operators",
])
_required_keys = set([
"ge_config_version",
"plugins_directory",
"expectations_store",
"evaluation_parameter_store_name",
"datasources",
"stores",
"data_docs",
# "validation_operators", # TODO: Activate!
])
_key_types = {
"config_variables_file_path": string_types,
"plugins_directory": string_types,
"expectations_store": dict,
"evaluation_parameter_store_name": string_types,
"datasources": dict,
"stores": dict,
"data_docs": dict,
"validation_operators": dict,
}
|
from six import string_types
from great_expectations.types import Config
class DataContextConfig(Config):
_allowed_keys = set([
"config_variables_file_path",
"plugins_directory",
"expectations_store",
"evaluation_parameter_store_name",
"datasources",
"stores",
"data_docs", # TODO: Rename this to sites, to remove a layer of extraneous nesting
"validation_operators",
])
_required_keys = set([
"plugins_directory",
"expectations_store",
"evaluation_parameter_store_name",
"datasources",
"stores",
"data_docs",
# "validation_operators", # TODO: Activate!
])
_key_types = {
"config_variables_file_path": string_types,
"plugins_directory": string_types,
"expectations_store": dict,
"evaluation_parameter_store_name": string_types,
"datasources": dict,
"stores": dict,
"data_docs": dict,
"validation_operators": dict,
}
|
Revert "* added a few keys to the configs"
|
Revert "* added a few keys to the configs"
This reverts commit 66f083e386a94ad558d50fa2dd8120c01e9580a7.
|
Python
|
apache-2.0
|
great-expectations/great_expectations,great-expectations/great_expectations,great-expectations/great_expectations,great-expectations/great_expectations
|
from six import string_types
from great_expectations.types import Config
class DataContextConfig(Config):
_allowed_keys = set([
"ge_config_version",
# TODO temporary fix
# TODO warn if this is detected
"result_callback",
"config_variables_file_path",
"plugins_directory",
"expectations_store",
"evaluation_parameter_store_name",
"datasources",
"stores",
"data_docs", # TODO: Rename this to sites, to remove a layer of extraneous nesting
"validation_operators",
])
_required_keys = set([
"ge_config_version",
"plugins_directory",
"expectations_store",
"evaluation_parameter_store_name",
"datasources",
"stores",
"data_docs",
# "validation_operators", # TODO: Activate!
])
_key_types = {
"config_variables_file_path": string_types,
"plugins_directory": string_types,
"expectations_store": dict,
"evaluation_parameter_store_name": string_types,
"datasources": dict,
"stores": dict,
"data_docs": dict,
"validation_operators": dict,
}
Revert "* added a few keys to the configs"
This reverts commit 66f083e386a94ad558d50fa2dd8120c01e9580a7.
|
from six import string_types
from great_expectations.types import Config
class DataContextConfig(Config):
_allowed_keys = set([
"config_variables_file_path",
"plugins_directory",
"expectations_store",
"evaluation_parameter_store_name",
"datasources",
"stores",
"data_docs", # TODO: Rename this to sites, to remove a layer of extraneous nesting
"validation_operators",
])
_required_keys = set([
"plugins_directory",
"expectations_store",
"evaluation_parameter_store_name",
"datasources",
"stores",
"data_docs",
# "validation_operators", # TODO: Activate!
])
_key_types = {
"config_variables_file_path": string_types,
"plugins_directory": string_types,
"expectations_store": dict,
"evaluation_parameter_store_name": string_types,
"datasources": dict,
"stores": dict,
"data_docs": dict,
"validation_operators": dict,
}
|
<commit_before>from six import string_types
from great_expectations.types import Config
class DataContextConfig(Config):
_allowed_keys = set([
"ge_config_version",
# TODO temporary fix
# TODO warn if this is detected
"result_callback",
"config_variables_file_path",
"plugins_directory",
"expectations_store",
"evaluation_parameter_store_name",
"datasources",
"stores",
"data_docs", # TODO: Rename this to sites, to remove a layer of extraneous nesting
"validation_operators",
])
_required_keys = set([
"ge_config_version",
"plugins_directory",
"expectations_store",
"evaluation_parameter_store_name",
"datasources",
"stores",
"data_docs",
# "validation_operators", # TODO: Activate!
])
_key_types = {
"config_variables_file_path": string_types,
"plugins_directory": string_types,
"expectations_store": dict,
"evaluation_parameter_store_name": string_types,
"datasources": dict,
"stores": dict,
"data_docs": dict,
"validation_operators": dict,
}
<commit_msg>Revert "* added a few keys to the configs"
This reverts commit 66f083e386a94ad558d50fa2dd8120c01e9580a7.<commit_after>
|
from six import string_types
from great_expectations.types import Config
class DataContextConfig(Config):
_allowed_keys = set([
"config_variables_file_path",
"plugins_directory",
"expectations_store",
"evaluation_parameter_store_name",
"datasources",
"stores",
"data_docs", # TODO: Rename this to sites, to remove a layer of extraneous nesting
"validation_operators",
])
_required_keys = set([
"plugins_directory",
"expectations_store",
"evaluation_parameter_store_name",
"datasources",
"stores",
"data_docs",
# "validation_operators", # TODO: Activate!
])
_key_types = {
"config_variables_file_path": string_types,
"plugins_directory": string_types,
"expectations_store": dict,
"evaluation_parameter_store_name": string_types,
"datasources": dict,
"stores": dict,
"data_docs": dict,
"validation_operators": dict,
}
|
from six import string_types
from great_expectations.types import Config
class DataContextConfig(Config):
_allowed_keys = set([
"ge_config_version",
# TODO temporary fix
# TODO warn if this is detected
"result_callback",
"config_variables_file_path",
"plugins_directory",
"expectations_store",
"evaluation_parameter_store_name",
"datasources",
"stores",
"data_docs", # TODO: Rename this to sites, to remove a layer of extraneous nesting
"validation_operators",
])
_required_keys = set([
"ge_config_version",
"plugins_directory",
"expectations_store",
"evaluation_parameter_store_name",
"datasources",
"stores",
"data_docs",
# "validation_operators", # TODO: Activate!
])
_key_types = {
"config_variables_file_path": string_types,
"plugins_directory": string_types,
"expectations_store": dict,
"evaluation_parameter_store_name": string_types,
"datasources": dict,
"stores": dict,
"data_docs": dict,
"validation_operators": dict,
}
Revert "* added a few keys to the configs"
This reverts commit 66f083e386a94ad558d50fa2dd8120c01e9580a7.from six import string_types
from great_expectations.types import Config
class DataContextConfig(Config):
_allowed_keys = set([
"config_variables_file_path",
"plugins_directory",
"expectations_store",
"evaluation_parameter_store_name",
"datasources",
"stores",
"data_docs", # TODO: Rename this to sites, to remove a layer of extraneous nesting
"validation_operators",
])
_required_keys = set([
"plugins_directory",
"expectations_store",
"evaluation_parameter_store_name",
"datasources",
"stores",
"data_docs",
# "validation_operators", # TODO: Activate!
])
_key_types = {
"config_variables_file_path": string_types,
"plugins_directory": string_types,
"expectations_store": dict,
"evaluation_parameter_store_name": string_types,
"datasources": dict,
"stores": dict,
"data_docs": dict,
"validation_operators": dict,
}
|
<commit_before>from six import string_types
from great_expectations.types import Config
class DataContextConfig(Config):
_allowed_keys = set([
"ge_config_version",
# TODO temporary fix
# TODO warn if this is detected
"result_callback",
"config_variables_file_path",
"plugins_directory",
"expectations_store",
"evaluation_parameter_store_name",
"datasources",
"stores",
"data_docs", # TODO: Rename this to sites, to remove a layer of extraneous nesting
"validation_operators",
])
_required_keys = set([
"ge_config_version",
"plugins_directory",
"expectations_store",
"evaluation_parameter_store_name",
"datasources",
"stores",
"data_docs",
# "validation_operators", # TODO: Activate!
])
_key_types = {
"config_variables_file_path": string_types,
"plugins_directory": string_types,
"expectations_store": dict,
"evaluation_parameter_store_name": string_types,
"datasources": dict,
"stores": dict,
"data_docs": dict,
"validation_operators": dict,
}
<commit_msg>Revert "* added a few keys to the configs"
This reverts commit 66f083e386a94ad558d50fa2dd8120c01e9580a7.<commit_after>from six import string_types
from great_expectations.types import Config
class DataContextConfig(Config):
_allowed_keys = set([
"config_variables_file_path",
"plugins_directory",
"expectations_store",
"evaluation_parameter_store_name",
"datasources",
"stores",
"data_docs", # TODO: Rename this to sites, to remove a layer of extraneous nesting
"validation_operators",
])
_required_keys = set([
"plugins_directory",
"expectations_store",
"evaluation_parameter_store_name",
"datasources",
"stores",
"data_docs",
# "validation_operators", # TODO: Activate!
])
_key_types = {
"config_variables_file_path": string_types,
"plugins_directory": string_types,
"expectations_store": dict,
"evaluation_parameter_store_name": string_types,
"datasources": dict,
"stores": dict,
"data_docs": dict,
"validation_operators": dict,
}
|
6cfb0ca69b43784d495920865f0a250f7d16ff84
|
trump/extensions/loader.py
|
trump/extensions/loader.py
|
from imp import find_module, load_module
import os
class SourceExtension(object):
def __init__(self, mod):
self.initialized = False
self.mod = mod
self.renew = mod.renew
self.Source = mod.Source
def __call__(self, _ses, **kwargs):
if not self.initialized or self.renew:
self.fetcher = self.Source(_ses, **kwargs)
self.initialized = True
return self.fetcher.getseries(_ses, **kwargs)
sources = {}
curdir = os.path.dirname(os.path.realpath(__file__))
extension_names = os.listdir(os.path.join(curdir,'source'))
for name in extension_names:
ext = find_module(name, ['source'])
mod = load_module(name, *ext)
sources[mod.stype] = SourceExtension(mod)
|
from imp import find_module, load_module
import os
class SourceExtension(object):
def __init__(self, mod):
self.initialized = False
self.mod = mod
self.renew = mod.renew
self.Source = mod.Source
def __call__(self, _ses, **kwargs):
if not self.initialized or self.renew:
self.fetcher = self.Source(_ses, **kwargs)
self.initialized = True
return self.fetcher.getseries(_ses, **kwargs)
sources = {}
curdir = os.path.dirname(os.path.realpath(__file__))
sourcedir = os.path.join(curdir,'source')
extension_names = os.listdir(sourcedir)
for name in extension_names:
ext = find_module(name, [sourcedir])
mod = load_module(name, *ext)
sources[mod.stype] = SourceExtension(mod)
|
Use full path to find mods
|
Use full path to find mods
|
Python
|
bsd-3-clause
|
jnmclarty/trump,Equitable/trump
|
from imp import find_module, load_module
import os
class SourceExtension(object):
def __init__(self, mod):
self.initialized = False
self.mod = mod
self.renew = mod.renew
self.Source = mod.Source
def __call__(self, _ses, **kwargs):
if not self.initialized or self.renew:
self.fetcher = self.Source(_ses, **kwargs)
self.initialized = True
return self.fetcher.getseries(_ses, **kwargs)
sources = {}
curdir = os.path.dirname(os.path.realpath(__file__))
extension_names = os.listdir(os.path.join(curdir,'source'))
for name in extension_names:
ext = find_module(name, ['source'])
mod = load_module(name, *ext)
sources[mod.stype] = SourceExtension(mod)Use full path to find mods
|
from imp import find_module, load_module
import os
class SourceExtension(object):
def __init__(self, mod):
self.initialized = False
self.mod = mod
self.renew = mod.renew
self.Source = mod.Source
def __call__(self, _ses, **kwargs):
if not self.initialized or self.renew:
self.fetcher = self.Source(_ses, **kwargs)
self.initialized = True
return self.fetcher.getseries(_ses, **kwargs)
sources = {}
curdir = os.path.dirname(os.path.realpath(__file__))
sourcedir = os.path.join(curdir,'source')
extension_names = os.listdir(sourcedir)
for name in extension_names:
ext = find_module(name, [sourcedir])
mod = load_module(name, *ext)
sources[mod.stype] = SourceExtension(mod)
|
<commit_before>from imp import find_module, load_module
import os
class SourceExtension(object):
def __init__(self, mod):
self.initialized = False
self.mod = mod
self.renew = mod.renew
self.Source = mod.Source
def __call__(self, _ses, **kwargs):
if not self.initialized or self.renew:
self.fetcher = self.Source(_ses, **kwargs)
self.initialized = True
return self.fetcher.getseries(_ses, **kwargs)
sources = {}
curdir = os.path.dirname(os.path.realpath(__file__))
extension_names = os.listdir(os.path.join(curdir,'source'))
for name in extension_names:
ext = find_module(name, ['source'])
mod = load_module(name, *ext)
sources[mod.stype] = SourceExtension(mod)<commit_msg>Use full path to find mods<commit_after>
|
from imp import find_module, load_module
import os
class SourceExtension(object):
def __init__(self, mod):
self.initialized = False
self.mod = mod
self.renew = mod.renew
self.Source = mod.Source
def __call__(self, _ses, **kwargs):
if not self.initialized or self.renew:
self.fetcher = self.Source(_ses, **kwargs)
self.initialized = True
return self.fetcher.getseries(_ses, **kwargs)
sources = {}
curdir = os.path.dirname(os.path.realpath(__file__))
sourcedir = os.path.join(curdir,'source')
extension_names = os.listdir(sourcedir)
for name in extension_names:
ext = find_module(name, [sourcedir])
mod = load_module(name, *ext)
sources[mod.stype] = SourceExtension(mod)
|
from imp import find_module, load_module
import os
class SourceExtension(object):
def __init__(self, mod):
self.initialized = False
self.mod = mod
self.renew = mod.renew
self.Source = mod.Source
def __call__(self, _ses, **kwargs):
if not self.initialized or self.renew:
self.fetcher = self.Source(_ses, **kwargs)
self.initialized = True
return self.fetcher.getseries(_ses, **kwargs)
sources = {}
curdir = os.path.dirname(os.path.realpath(__file__))
extension_names = os.listdir(os.path.join(curdir,'source'))
for name in extension_names:
ext = find_module(name, ['source'])
mod = load_module(name, *ext)
sources[mod.stype] = SourceExtension(mod)Use full path to find modsfrom imp import find_module, load_module
import os
class SourceExtension(object):
def __init__(self, mod):
self.initialized = False
self.mod = mod
self.renew = mod.renew
self.Source = mod.Source
def __call__(self, _ses, **kwargs):
if not self.initialized or self.renew:
self.fetcher = self.Source(_ses, **kwargs)
self.initialized = True
return self.fetcher.getseries(_ses, **kwargs)
sources = {}
curdir = os.path.dirname(os.path.realpath(__file__))
sourcedir = os.path.join(curdir,'source')
extension_names = os.listdir(sourcedir)
for name in extension_names:
ext = find_module(name, [sourcedir])
mod = load_module(name, *ext)
sources[mod.stype] = SourceExtension(mod)
|
<commit_before>from imp import find_module, load_module
import os
class SourceExtension(object):
def __init__(self, mod):
self.initialized = False
self.mod = mod
self.renew = mod.renew
self.Source = mod.Source
def __call__(self, _ses, **kwargs):
if not self.initialized or self.renew:
self.fetcher = self.Source(_ses, **kwargs)
self.initialized = True
return self.fetcher.getseries(_ses, **kwargs)
sources = {}
curdir = os.path.dirname(os.path.realpath(__file__))
extension_names = os.listdir(os.path.join(curdir,'source'))
for name in extension_names:
ext = find_module(name, ['source'])
mod = load_module(name, *ext)
sources[mod.stype] = SourceExtension(mod)<commit_msg>Use full path to find mods<commit_after>from imp import find_module, load_module
import os
class SourceExtension(object):
def __init__(self, mod):
self.initialized = False
self.mod = mod
self.renew = mod.renew
self.Source = mod.Source
def __call__(self, _ses, **kwargs):
if not self.initialized or self.renew:
self.fetcher = self.Source(_ses, **kwargs)
self.initialized = True
return self.fetcher.getseries(_ses, **kwargs)
sources = {}
curdir = os.path.dirname(os.path.realpath(__file__))
sourcedir = os.path.join(curdir,'source')
extension_names = os.listdir(sourcedir)
for name in extension_names:
ext = find_module(name, [sourcedir])
mod = load_module(name, *ext)
sources[mod.stype] = SourceExtension(mod)
|
38a61253fc34f4e41ffa73f41ef622cb96dfbf3e
|
deprecated/__init__.py
|
deprecated/__init__.py
|
# -*- coding: utf-8 -*-
import functools
import warnings
def deprecated(func):
"""
This is a decorator which can be used to mark functions
as deprecated. It will result in a warning being emmitted
when the function is used.
"""
@functools.wraps(func)
def new_func(*args, **kwargs):
warnings.simplefilter('always', DeprecationWarning)
warnings.warn(
"Call to deprecated function {}.".format(func.__name__),
category=DeprecationWarning,
stacklevel=2
)
warnings.simplefilter('default', DeprecationWarning)
return func(*args, **kwargs)
return new_func
|
# -*- coding: utf-8 -*-
import functools
import warnings
def deprecated(func):
"""
This is a decorator which can be used to mark functions
as deprecated. It will result in a warning being emmitted
when the function is used.
"""
@functools.wraps(func)
def new_func(*args, **kwargs):
warnings.simplefilter('always', DeprecationWarning)
warnings.warn(
"Call to deprecated function {0}.".format(func.__name__),
category=DeprecationWarning,
stacklevel=2
)
warnings.simplefilter('default', DeprecationWarning)
return func(*args, **kwargs)
return new_func
|
Replace invalid string format '{}' by '{0}' for Python 2.6 compatibility.
|
Replace invalid string format '{}' by '{0}' for Python 2.6 compatibility.
|
Python
|
mit
|
vrcmarcos/python-deprecated
|
# -*- coding: utf-8 -*-
import functools
import warnings
def deprecated(func):
"""
This is a decorator which can be used to mark functions
as deprecated. It will result in a warning being emmitted
when the function is used.
"""
@functools.wraps(func)
def new_func(*args, **kwargs):
warnings.simplefilter('always', DeprecationWarning)
warnings.warn(
"Call to deprecated function {}.".format(func.__name__),
category=DeprecationWarning,
stacklevel=2
)
warnings.simplefilter('default', DeprecationWarning)
return func(*args, **kwargs)
return new_func
Replace invalid string format '{}' by '{0}' for Python 2.6 compatibility.
|
# -*- coding: utf-8 -*-
import functools
import warnings
def deprecated(func):
"""
This is a decorator which can be used to mark functions
as deprecated. It will result in a warning being emmitted
when the function is used.
"""
@functools.wraps(func)
def new_func(*args, **kwargs):
warnings.simplefilter('always', DeprecationWarning)
warnings.warn(
"Call to deprecated function {0}.".format(func.__name__),
category=DeprecationWarning,
stacklevel=2
)
warnings.simplefilter('default', DeprecationWarning)
return func(*args, **kwargs)
return new_func
|
<commit_before># -*- coding: utf-8 -*-
import functools
import warnings
def deprecated(func):
"""
This is a decorator which can be used to mark functions
as deprecated. It will result in a warning being emmitted
when the function is used.
"""
@functools.wraps(func)
def new_func(*args, **kwargs):
warnings.simplefilter('always', DeprecationWarning)
warnings.warn(
"Call to deprecated function {}.".format(func.__name__),
category=DeprecationWarning,
stacklevel=2
)
warnings.simplefilter('default', DeprecationWarning)
return func(*args, **kwargs)
return new_func
<commit_msg>Replace invalid string format '{}' by '{0}' for Python 2.6 compatibility.<commit_after>
|
# -*- coding: utf-8 -*-
import functools
import warnings
def deprecated(func):
"""
This is a decorator which can be used to mark functions
as deprecated. It will result in a warning being emmitted
when the function is used.
"""
@functools.wraps(func)
def new_func(*args, **kwargs):
warnings.simplefilter('always', DeprecationWarning)
warnings.warn(
"Call to deprecated function {0}.".format(func.__name__),
category=DeprecationWarning,
stacklevel=2
)
warnings.simplefilter('default', DeprecationWarning)
return func(*args, **kwargs)
return new_func
|
# -*- coding: utf-8 -*-
import functools
import warnings
def deprecated(func):
"""
This is a decorator which can be used to mark functions
as deprecated. It will result in a warning being emmitted
when the function is used.
"""
@functools.wraps(func)
def new_func(*args, **kwargs):
warnings.simplefilter('always', DeprecationWarning)
warnings.warn(
"Call to deprecated function {}.".format(func.__name__),
category=DeprecationWarning,
stacklevel=2
)
warnings.simplefilter('default', DeprecationWarning)
return func(*args, **kwargs)
return new_func
Replace invalid string format '{}' by '{0}' for Python 2.6 compatibility.# -*- coding: utf-8 -*-
import functools
import warnings
def deprecated(func):
"""
This is a decorator which can be used to mark functions
as deprecated. It will result in a warning being emmitted
when the function is used.
"""
@functools.wraps(func)
def new_func(*args, **kwargs):
warnings.simplefilter('always', DeprecationWarning)
warnings.warn(
"Call to deprecated function {0}.".format(func.__name__),
category=DeprecationWarning,
stacklevel=2
)
warnings.simplefilter('default', DeprecationWarning)
return func(*args, **kwargs)
return new_func
|
<commit_before># -*- coding: utf-8 -*-
import functools
import warnings
def deprecated(func):
"""
This is a decorator which can be used to mark functions
as deprecated. It will result in a warning being emmitted
when the function is used.
"""
@functools.wraps(func)
def new_func(*args, **kwargs):
warnings.simplefilter('always', DeprecationWarning)
warnings.warn(
"Call to deprecated function {}.".format(func.__name__),
category=DeprecationWarning,
stacklevel=2
)
warnings.simplefilter('default', DeprecationWarning)
return func(*args, **kwargs)
return new_func
<commit_msg>Replace invalid string format '{}' by '{0}' for Python 2.6 compatibility.<commit_after># -*- coding: utf-8 -*-
import functools
import warnings
def deprecated(func):
"""
This is a decorator which can be used to mark functions
as deprecated. It will result in a warning being emmitted
when the function is used.
"""
@functools.wraps(func)
def new_func(*args, **kwargs):
warnings.simplefilter('always', DeprecationWarning)
warnings.warn(
"Call to deprecated function {0}.".format(func.__name__),
category=DeprecationWarning,
stacklevel=2
)
warnings.simplefilter('default', DeprecationWarning)
return func(*args, **kwargs)
return new_func
|
758553edd8da7adbfeb2d291c83442dce77c748c
|
spotify/__init__.py
|
spotify/__init__.py
|
from __future__ import unicode_literals
import os
from cffi import FFI
__version__ = '2.0.0a1'
header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h')
header = open(header_file).read()
header += '#define SPOTIFY_API_VERSION ...\n'
ffi = FFI()
ffi.cdef(header)
lib = ffi.verify('#include "libspotify/api.h"', libraries=[str('spotify')])
def _to_text(chars):
return ffi.string(chars).decode('utf-8')
def _add_enum(obj, prefix):
for attr in dir(lib):
if attr.startswith(prefix):
setattr(obj, attr.replace(prefix, ''), getattr(lib, attr))
class Error(Exception):
def __init__(self, error_code):
self.error_code = error_code
message = _to_text(lib.sp_error_message(error_code))
super(Error, self).__init__(message)
_add_enum(Error, 'SP_ERROR_')
|
from __future__ import unicode_literals
import os
from cffi import FFI
__version__ = '2.0.0a1'
header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h')
header = open(header_file).read()
header += '#define SPOTIFY_API_VERSION ...\n'
ffi = FFI()
ffi.cdef(header)
lib = ffi.verify('#include "libspotify/api.h"', libraries=[str('spotify')])
def _to_text(chars):
return ffi.string(chars).decode('utf-8')
def enum(prefix):
def wrapper(obj):
for attr in dir(lib):
if attr.startswith(prefix):
setattr(obj, attr.replace(prefix, ''), getattr(lib, attr))
return obj
return wrapper
@enum('SP_ERROR_')
class Error(Exception):
def __init__(self, error_code):
self.error_code = error_code
message = _to_text(lib.sp_error_message(error_code))
super(Error, self).__init__(message)
|
Use a class decorator to add enum values to classes
|
Use a class decorator to add enum values to classes
|
Python
|
apache-2.0
|
jodal/pyspotify,kotamat/pyspotify,felix1m/pyspotify,jodal/pyspotify,kotamat/pyspotify,jodal/pyspotify,mopidy/pyspotify,felix1m/pyspotify,kotamat/pyspotify,mopidy/pyspotify,felix1m/pyspotify
|
from __future__ import unicode_literals
import os
from cffi import FFI
__version__ = '2.0.0a1'
header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h')
header = open(header_file).read()
header += '#define SPOTIFY_API_VERSION ...\n'
ffi = FFI()
ffi.cdef(header)
lib = ffi.verify('#include "libspotify/api.h"', libraries=[str('spotify')])
def _to_text(chars):
return ffi.string(chars).decode('utf-8')
def _add_enum(obj, prefix):
for attr in dir(lib):
if attr.startswith(prefix):
setattr(obj, attr.replace(prefix, ''), getattr(lib, attr))
class Error(Exception):
def __init__(self, error_code):
self.error_code = error_code
message = _to_text(lib.sp_error_message(error_code))
super(Error, self).__init__(message)
_add_enum(Error, 'SP_ERROR_')
Use a class decorator to add enum values to classes
|
from __future__ import unicode_literals
import os
from cffi import FFI
__version__ = '2.0.0a1'
header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h')
header = open(header_file).read()
header += '#define SPOTIFY_API_VERSION ...\n'
ffi = FFI()
ffi.cdef(header)
lib = ffi.verify('#include "libspotify/api.h"', libraries=[str('spotify')])
def _to_text(chars):
return ffi.string(chars).decode('utf-8')
def enum(prefix):
def wrapper(obj):
for attr in dir(lib):
if attr.startswith(prefix):
setattr(obj, attr.replace(prefix, ''), getattr(lib, attr))
return obj
return wrapper
@enum('SP_ERROR_')
class Error(Exception):
def __init__(self, error_code):
self.error_code = error_code
message = _to_text(lib.sp_error_message(error_code))
super(Error, self).__init__(message)
|
<commit_before>from __future__ import unicode_literals
import os
from cffi import FFI
__version__ = '2.0.0a1'
header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h')
header = open(header_file).read()
header += '#define SPOTIFY_API_VERSION ...\n'
ffi = FFI()
ffi.cdef(header)
lib = ffi.verify('#include "libspotify/api.h"', libraries=[str('spotify')])
def _to_text(chars):
return ffi.string(chars).decode('utf-8')
def _add_enum(obj, prefix):
for attr in dir(lib):
if attr.startswith(prefix):
setattr(obj, attr.replace(prefix, ''), getattr(lib, attr))
class Error(Exception):
def __init__(self, error_code):
self.error_code = error_code
message = _to_text(lib.sp_error_message(error_code))
super(Error, self).__init__(message)
_add_enum(Error, 'SP_ERROR_')
<commit_msg>Use a class decorator to add enum values to classes<commit_after>
|
from __future__ import unicode_literals
import os
from cffi import FFI
__version__ = '2.0.0a1'
header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h')
header = open(header_file).read()
header += '#define SPOTIFY_API_VERSION ...\n'
ffi = FFI()
ffi.cdef(header)
lib = ffi.verify('#include "libspotify/api.h"', libraries=[str('spotify')])
def _to_text(chars):
return ffi.string(chars).decode('utf-8')
def enum(prefix):
def wrapper(obj):
for attr in dir(lib):
if attr.startswith(prefix):
setattr(obj, attr.replace(prefix, ''), getattr(lib, attr))
return obj
return wrapper
@enum('SP_ERROR_')
class Error(Exception):
def __init__(self, error_code):
self.error_code = error_code
message = _to_text(lib.sp_error_message(error_code))
super(Error, self).__init__(message)
|
from __future__ import unicode_literals
import os
from cffi import FFI
__version__ = '2.0.0a1'
header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h')
header = open(header_file).read()
header += '#define SPOTIFY_API_VERSION ...\n'
ffi = FFI()
ffi.cdef(header)
lib = ffi.verify('#include "libspotify/api.h"', libraries=[str('spotify')])
def _to_text(chars):
return ffi.string(chars).decode('utf-8')
def _add_enum(obj, prefix):
for attr in dir(lib):
if attr.startswith(prefix):
setattr(obj, attr.replace(prefix, ''), getattr(lib, attr))
class Error(Exception):
def __init__(self, error_code):
self.error_code = error_code
message = _to_text(lib.sp_error_message(error_code))
super(Error, self).__init__(message)
_add_enum(Error, 'SP_ERROR_')
Use a class decorator to add enum values to classesfrom __future__ import unicode_literals
import os
from cffi import FFI
__version__ = '2.0.0a1'
header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h')
header = open(header_file).read()
header += '#define SPOTIFY_API_VERSION ...\n'
ffi = FFI()
ffi.cdef(header)
lib = ffi.verify('#include "libspotify/api.h"', libraries=[str('spotify')])
def _to_text(chars):
return ffi.string(chars).decode('utf-8')
def enum(prefix):
def wrapper(obj):
for attr in dir(lib):
if attr.startswith(prefix):
setattr(obj, attr.replace(prefix, ''), getattr(lib, attr))
return obj
return wrapper
@enum('SP_ERROR_')
class Error(Exception):
def __init__(self, error_code):
self.error_code = error_code
message = _to_text(lib.sp_error_message(error_code))
super(Error, self).__init__(message)
|
<commit_before>from __future__ import unicode_literals
import os
from cffi import FFI
__version__ = '2.0.0a1'
header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h')
header = open(header_file).read()
header += '#define SPOTIFY_API_VERSION ...\n'
ffi = FFI()
ffi.cdef(header)
lib = ffi.verify('#include "libspotify/api.h"', libraries=[str('spotify')])
def _to_text(chars):
return ffi.string(chars).decode('utf-8')
def _add_enum(obj, prefix):
for attr in dir(lib):
if attr.startswith(prefix):
setattr(obj, attr.replace(prefix, ''), getattr(lib, attr))
class Error(Exception):
def __init__(self, error_code):
self.error_code = error_code
message = _to_text(lib.sp_error_message(error_code))
super(Error, self).__init__(message)
_add_enum(Error, 'SP_ERROR_')
<commit_msg>Use a class decorator to add enum values to classes<commit_after>from __future__ import unicode_literals
import os
from cffi import FFI
__version__ = '2.0.0a1'
header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h')
header = open(header_file).read()
header += '#define SPOTIFY_API_VERSION ...\n'
ffi = FFI()
ffi.cdef(header)
lib = ffi.verify('#include "libspotify/api.h"', libraries=[str('spotify')])
def _to_text(chars):
return ffi.string(chars).decode('utf-8')
def enum(prefix):
def wrapper(obj):
for attr in dir(lib):
if attr.startswith(prefix):
setattr(obj, attr.replace(prefix, ''), getattr(lib, attr))
return obj
return wrapper
@enum('SP_ERROR_')
class Error(Exception):
def __init__(self, error_code):
self.error_code = error_code
message = _to_text(lib.sp_error_message(error_code))
super(Error, self).__init__(message)
|
99e0e90552c16067cfd41c9e89464311494c5a85
|
kitsune/sumo/management/commands/nunjucks_precompile.py
|
kitsune/sumo/management/commands/nunjucks_precompile.py
|
import os
import subprocess
from django.conf import settings
from django.core.management.base import BaseCommand
ROOT = os.path.realpath(
os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', '..'))
path = lambda *a: os.path.join(ROOT, *a)
class Command(BaseCommand):
help = 'Precompiles nunjuck templates'
def handle(self, *args, **kwargs):
files = os.listdir(path('static/tpl'))
for f in files:
if f.endswith('.html'):
tpl = f[:-5]
cmd = '%s %s > %s' % (
settings.NUNJUCKS_PRECOMPILE_BIN,
path('static/tpl'),
path('static/js/templates/%s.js' % tpl))
subprocess.call(cmd, shell=True)
|
import os
import subprocess
from django.conf import settings
from django.core.management.base import BaseCommand
ROOT = os.path.realpath(
os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', '..'))
path = lambda *a: os.path.join(ROOT, *a)
class Command(BaseCommand):
help = 'Precompiles nunjuck templates'
def handle(self, *args, **kwargs):
try:
os.makedirs(path('static/js/templates'))
except OSError:
pass
try:
os.makedirs(path('static/tpl'))
except OSError:
pass
files = os.listdir(path('static/tpl'))
for f in files:
if f.endswith('.html'):
tpl = f[:-5]
cmd = '%s %s > %s' % (
settings.NUNJUCKS_PRECOMPILE_BIN,
path('static/tpl'),
path('static/js/templates/%s.js' % tpl))
subprocess.call(cmd, shell=True)
|
Fix nunjucks command so travis is happy
|
Fix nunjucks command so travis is happy
|
Python
|
bsd-3-clause
|
rlr/kitsune,orvi2014/kitsune,silentbob73/kitsune,H1ghT0p/kitsune,MziRintu/kitsune,feer56/Kitsune1,philipp-sumo/kitsune,safwanrahman/kitsune,turtleloveshoes/kitsune,H1ghT0p/kitsune,orvi2014/kitsune,safwanrahman/linuxdesh,safwanrahman/kitsune,Osmose/kitsune,Osmose/kitsune,NewPresident1/kitsune,H1ghT0p/kitsune,MziRintu/kitsune,safwanrahman/kitsune,iDTLabssl/kitsune,feer56/Kitsune2,dbbhattacharya/kitsune,safwanrahman/linuxdesh,MikkCZ/kitsune,YOTOV-LIMITED/kitsune,brittanystoroz/kitsune,rlr/kitsune,mozilla/kitsune,asdofindia/kitsune,orvi2014/kitsune,Osmose/kitsune,turtleloveshoes/kitsune,feer56/Kitsune2,feer56/Kitsune2,chirilo/kitsune,feer56/Kitsune1,YOTOV-LIMITED/kitsune,MziRintu/kitsune,chirilo/kitsune,MikkCZ/kitsune,rlr/kitsune,MikkCZ/kitsune,dbbhattacharya/kitsune,mozilla/kitsune,brittanystoroz/kitsune,dbbhattacharya/kitsune,feer56/Kitsune1,brittanystoroz/kitsune,anushbmx/kitsune,asdofindia/kitsune,feer56/Kitsune2,brittanystoroz/kitsune,NewPresident1/kitsune,safwanrahman/linuxdesh,YOTOV-LIMITED/kitsune,silentbob73/kitsune,turtleloveshoes/kitsune,safwanrahman/kitsune,silentbob73/kitsune,MziRintu/kitsune,iDTLabssl/kitsune,anushbmx/kitsune,anushbmx/kitsune,orvi2014/kitsune,Osmose/kitsune,YOTOV-LIMITED/kitsune,mythmon/kitsune,philipp-sumo/kitsune,dbbhattacharya/kitsune,turtleloveshoes/kitsune,silentbob73/kitsune,mythmon/kitsune,philipp-sumo/kitsune,iDTLabssl/kitsune,mozilla/kitsune,chirilo/kitsune,rlr/kitsune,mozilla/kitsune,MikkCZ/kitsune,NewPresident1/kitsune,iDTLabssl/kitsune,NewPresident1/kitsune,anushbmx/kitsune,mythmon/kitsune,asdofindia/kitsune,mythmon/kitsune,asdofindia/kitsune,chirilo/kitsune,H1ghT0p/kitsune
|
import os
import subprocess
from django.conf import settings
from django.core.management.base import BaseCommand
ROOT = os.path.realpath(
os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', '..'))
path = lambda *a: os.path.join(ROOT, *a)
class Command(BaseCommand):
help = 'Precompiles nunjuck templates'
def handle(self, *args, **kwargs):
files = os.listdir(path('static/tpl'))
for f in files:
if f.endswith('.html'):
tpl = f[:-5]
cmd = '%s %s > %s' % (
settings.NUNJUCKS_PRECOMPILE_BIN,
path('static/tpl'),
path('static/js/templates/%s.js' % tpl))
subprocess.call(cmd, shell=True)
Fix nunjucks command so travis is happy
|
import os
import subprocess
from django.conf import settings
from django.core.management.base import BaseCommand
ROOT = os.path.realpath(
os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', '..'))
path = lambda *a: os.path.join(ROOT, *a)
class Command(BaseCommand):
help = 'Precompiles nunjuck templates'
def handle(self, *args, **kwargs):
try:
os.makedirs(path('static/js/templates'))
except OSError:
pass
try:
os.makedirs(path('static/tpl'))
except OSError:
pass
files = os.listdir(path('static/tpl'))
for f in files:
if f.endswith('.html'):
tpl = f[:-5]
cmd = '%s %s > %s' % (
settings.NUNJUCKS_PRECOMPILE_BIN,
path('static/tpl'),
path('static/js/templates/%s.js' % tpl))
subprocess.call(cmd, shell=True)
|
<commit_before>import os
import subprocess
from django.conf import settings
from django.core.management.base import BaseCommand
ROOT = os.path.realpath(
os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', '..'))
path = lambda *a: os.path.join(ROOT, *a)
class Command(BaseCommand):
help = 'Precompiles nunjuck templates'
def handle(self, *args, **kwargs):
files = os.listdir(path('static/tpl'))
for f in files:
if f.endswith('.html'):
tpl = f[:-5]
cmd = '%s %s > %s' % (
settings.NUNJUCKS_PRECOMPILE_BIN,
path('static/tpl'),
path('static/js/templates/%s.js' % tpl))
subprocess.call(cmd, shell=True)
<commit_msg>Fix nunjucks command so travis is happy<commit_after>
|
import os
import subprocess
from django.conf import settings
from django.core.management.base import BaseCommand
ROOT = os.path.realpath(
os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', '..'))
path = lambda *a: os.path.join(ROOT, *a)
class Command(BaseCommand):
help = 'Precompiles nunjuck templates'
def handle(self, *args, **kwargs):
try:
os.makedirs(path('static/js/templates'))
except OSError:
pass
try:
os.makedirs(path('static/tpl'))
except OSError:
pass
files = os.listdir(path('static/tpl'))
for f in files:
if f.endswith('.html'):
tpl = f[:-5]
cmd = '%s %s > %s' % (
settings.NUNJUCKS_PRECOMPILE_BIN,
path('static/tpl'),
path('static/js/templates/%s.js' % tpl))
subprocess.call(cmd, shell=True)
|
import os
import subprocess
from django.conf import settings
from django.core.management.base import BaseCommand
ROOT = os.path.realpath(
os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', '..'))
path = lambda *a: os.path.join(ROOT, *a)
class Command(BaseCommand):
help = 'Precompiles nunjuck templates'
def handle(self, *args, **kwargs):
files = os.listdir(path('static/tpl'))
for f in files:
if f.endswith('.html'):
tpl = f[:-5]
cmd = '%s %s > %s' % (
settings.NUNJUCKS_PRECOMPILE_BIN,
path('static/tpl'),
path('static/js/templates/%s.js' % tpl))
subprocess.call(cmd, shell=True)
Fix nunjucks command so travis is happyimport os
import subprocess
from django.conf import settings
from django.core.management.base import BaseCommand
ROOT = os.path.realpath(
os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', '..'))
path = lambda *a: os.path.join(ROOT, *a)
class Command(BaseCommand):
help = 'Precompiles nunjuck templates'
def handle(self, *args, **kwargs):
try:
os.makedirs(path('static/js/templates'))
except OSError:
pass
try:
os.makedirs(path('static/tpl'))
except OSError:
pass
files = os.listdir(path('static/tpl'))
for f in files:
if f.endswith('.html'):
tpl = f[:-5]
cmd = '%s %s > %s' % (
settings.NUNJUCKS_PRECOMPILE_BIN,
path('static/tpl'),
path('static/js/templates/%s.js' % tpl))
subprocess.call(cmd, shell=True)
|
<commit_before>import os
import subprocess
from django.conf import settings
from django.core.management.base import BaseCommand
ROOT = os.path.realpath(
os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', '..'))
path = lambda *a: os.path.join(ROOT, *a)
class Command(BaseCommand):
help = 'Precompiles nunjuck templates'
def handle(self, *args, **kwargs):
files = os.listdir(path('static/tpl'))
for f in files:
if f.endswith('.html'):
tpl = f[:-5]
cmd = '%s %s > %s' % (
settings.NUNJUCKS_PRECOMPILE_BIN,
path('static/tpl'),
path('static/js/templates/%s.js' % tpl))
subprocess.call(cmd, shell=True)
<commit_msg>Fix nunjucks command so travis is happy<commit_after>import os
import subprocess
from django.conf import settings
from django.core.management.base import BaseCommand
ROOT = os.path.realpath(
os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', '..'))
path = lambda *a: os.path.join(ROOT, *a)
class Command(BaseCommand):
help = 'Precompiles nunjuck templates'
def handle(self, *args, **kwargs):
try:
os.makedirs(path('static/js/templates'))
except OSError:
pass
try:
os.makedirs(path('static/tpl'))
except OSError:
pass
files = os.listdir(path('static/tpl'))
for f in files:
if f.endswith('.html'):
tpl = f[:-5]
cmd = '%s %s > %s' % (
settings.NUNJUCKS_PRECOMPILE_BIN,
path('static/tpl'),
path('static/js/templates/%s.js' % tpl))
subprocess.call(cmd, shell=True)
|
c82574aec4ee413198f54473cb47508a6b271f9a
|
dmf_device_ui/client.py
|
dmf_device_ui/client.py
|
import sys
import zmq
import time
def main():
port = 5000
if len(sys.argv) > 1:
port = sys.argv[1]
int(port)
bind_addr = "tcp://localhost:%s" % port
context = zmq.Context()
socket = context.socket(zmq.SUB)
socket.connect(bind_addr)
socket.setsockopt(zmq.SUBSCRIBE,'')
print "Listening for events on %s ..." % bind_addr
while True:
mssg = socket.recv()
print mssg
if __name__ == '__main__':
main()
|
import sys
import zmq
import time
def main():
port = 5000
if len(sys.argv) > 1:
port = sys.argv[1]
int(port)
bind_addr = "tcp://localhost:%s" % port
context = zmq.Context()
socket = context.socket(zmq.SUB)
socket.connect(bind_addr)
socket.setsockopt(zmq.SUBSCRIBE,'')
print "Listening for events on %s ..." % bind_addr
while True:
try:
try:
mssg = socket.recv(zmq.NOBLOCK)
print mssg
except zmq.error.Again:
time.sleep(0.001)
except KeyboardInterrupt:
break
if __name__ == '__main__':
main()
|
Replace spaces with tabs, quit on <Ctrl+C>
|
Replace spaces with tabs, quit on <Ctrl+C>
|
Python
|
lgpl-2.1
|
wheeler-microfluidics/dmf-device-ui
|
import sys
import zmq
import time
def main():
port = 5000
if len(sys.argv) > 1:
port = sys.argv[1]
int(port)
bind_addr = "tcp://localhost:%s" % port
context = zmq.Context()
socket = context.socket(zmq.SUB)
socket.connect(bind_addr)
socket.setsockopt(zmq.SUBSCRIBE,'')
print "Listening for events on %s ..." % bind_addr
while True:
mssg = socket.recv()
print mssg
if __name__ == '__main__':
main()Replace spaces with tabs, quit on <Ctrl+C>
|
import sys
import zmq
import time
def main():
port = 5000
if len(sys.argv) > 1:
port = sys.argv[1]
int(port)
bind_addr = "tcp://localhost:%s" % port
context = zmq.Context()
socket = context.socket(zmq.SUB)
socket.connect(bind_addr)
socket.setsockopt(zmq.SUBSCRIBE,'')
print "Listening for events on %s ..." % bind_addr
while True:
try:
try:
mssg = socket.recv(zmq.NOBLOCK)
print mssg
except zmq.error.Again:
time.sleep(0.001)
except KeyboardInterrupt:
break
if __name__ == '__main__':
main()
|
<commit_before>import sys
import zmq
import time
def main():
port = 5000
if len(sys.argv) > 1:
port = sys.argv[1]
int(port)
bind_addr = "tcp://localhost:%s" % port
context = zmq.Context()
socket = context.socket(zmq.SUB)
socket.connect(bind_addr)
socket.setsockopt(zmq.SUBSCRIBE,'')
print "Listening for events on %s ..." % bind_addr
while True:
mssg = socket.recv()
print mssg
if __name__ == '__main__':
main()<commit_msg>Replace spaces with tabs, quit on <Ctrl+C><commit_after>
|
import sys
import zmq
import time
def main():
port = 5000
if len(sys.argv) > 1:
port = sys.argv[1]
int(port)
bind_addr = "tcp://localhost:%s" % port
context = zmq.Context()
socket = context.socket(zmq.SUB)
socket.connect(bind_addr)
socket.setsockopt(zmq.SUBSCRIBE,'')
print "Listening for events on %s ..." % bind_addr
while True:
try:
try:
mssg = socket.recv(zmq.NOBLOCK)
print mssg
except zmq.error.Again:
time.sleep(0.001)
except KeyboardInterrupt:
break
if __name__ == '__main__':
main()
|
import sys
import zmq
import time
def main():
port = 5000
if len(sys.argv) > 1:
port = sys.argv[1]
int(port)
bind_addr = "tcp://localhost:%s" % port
context = zmq.Context()
socket = context.socket(zmq.SUB)
socket.connect(bind_addr)
socket.setsockopt(zmq.SUBSCRIBE,'')
print "Listening for events on %s ..." % bind_addr
while True:
mssg = socket.recv()
print mssg
if __name__ == '__main__':
main()Replace spaces with tabs, quit on <Ctrl+C>import sys
import zmq
import time
def main():
port = 5000
if len(sys.argv) > 1:
port = sys.argv[1]
int(port)
bind_addr = "tcp://localhost:%s" % port
context = zmq.Context()
socket = context.socket(zmq.SUB)
socket.connect(bind_addr)
socket.setsockopt(zmq.SUBSCRIBE,'')
print "Listening for events on %s ..." % bind_addr
while True:
try:
try:
mssg = socket.recv(zmq.NOBLOCK)
print mssg
except zmq.error.Again:
time.sleep(0.001)
except KeyboardInterrupt:
break
if __name__ == '__main__':
main()
|
<commit_before>import sys
import zmq
import time
def main():
port = 5000
if len(sys.argv) > 1:
port = sys.argv[1]
int(port)
bind_addr = "tcp://localhost:%s" % port
context = zmq.Context()
socket = context.socket(zmq.SUB)
socket.connect(bind_addr)
socket.setsockopt(zmq.SUBSCRIBE,'')
print "Listening for events on %s ..." % bind_addr
while True:
mssg = socket.recv()
print mssg
if __name__ == '__main__':
main()<commit_msg>Replace spaces with tabs, quit on <Ctrl+C><commit_after>import sys
import zmq
import time
def main():
port = 5000
if len(sys.argv) > 1:
port = sys.argv[1]
int(port)
bind_addr = "tcp://localhost:%s" % port
context = zmq.Context()
socket = context.socket(zmq.SUB)
socket.connect(bind_addr)
socket.setsockopt(zmq.SUBSCRIBE,'')
print "Listening for events on %s ..." % bind_addr
while True:
try:
try:
mssg = socket.recv(zmq.NOBLOCK)
print mssg
except zmq.error.Again:
time.sleep(0.001)
except KeyboardInterrupt:
break
if __name__ == '__main__':
main()
|
bf288ac18734e12b5344dc83515208be35989f18
|
tob-api/api/indy/claimParser.py
|
tob-api/api/indy/claimParser.py
|
import json
import logging
class ClaimParser(object):
"""
Parses a generic claim.
"""
def __init__(self, claim: str) -> None:
self.__logger = logging.getLogger(__name__)
self.__orgData = claim
self.__parse()
def __parse(self):
self.__logger.debug("Parsing claim ...")
data = json.loads(self.__orgData)
self.__claim_type = data["claim_type"]
self.__claim = data["claim_data"]
self.__issuer_did = data["claim_data"]["issuer_did"]
def getField(self, field):
return self.__claim["claim"][field][0]
@property
def schemaName(self) -> str:
return self.__claim_type
@property
def issuerDid(self) -> str:
return self.__issuer_did
@property
def json(self) -> str:
return json.dumps(self.__claim)
|
import json
import logging
class ClaimParser(object):
"""
Parses a generic claim.
"""
def __init__(self, claim: str) -> None:
self.__logger = logging.getLogger(__name__)
self.__orgData = claim
self.__parse()
def __parse(self):
self.__logger.debug("Parsing claim ...")
data = json.loads(self.__orgData)
self.__claim_type = data["claim_type"]
self.__claim = data["claim_data"]
self.__issuer_did = data["claim_data"]["issuer_did"]
def getField(self, field):
return self.__claim["claim"][field][0]
@property
def schemaName(self) -> str:
return self.__claim_type
@property
def issuerDid(self) -> str:
return self.__issuer_did
@property
def json(self) -> str:
return self.__claim
|
Save the claim json as properly formatted json
|
Save the claim json as properly formatted json
|
Python
|
apache-2.0
|
swcurran/TheOrgBook,WadeBarnes/TheOrgBook,WadeBarnes/TheOrgBook,WadeBarnes/TheOrgBook,WadeBarnes/TheOrgBook,WadeBarnes/TheOrgBook,swcurran/TheOrgBook,swcurran/TheOrgBook,swcurran/TheOrgBook,swcurran/TheOrgBook
|
import json
import logging
class ClaimParser(object):
"""
Parses a generic claim.
"""
def __init__(self, claim: str) -> None:
self.__logger = logging.getLogger(__name__)
self.__orgData = claim
self.__parse()
def __parse(self):
self.__logger.debug("Parsing claim ...")
data = json.loads(self.__orgData)
self.__claim_type = data["claim_type"]
self.__claim = data["claim_data"]
self.__issuer_did = data["claim_data"]["issuer_did"]
def getField(self, field):
return self.__claim["claim"][field][0]
@property
def schemaName(self) -> str:
return self.__claim_type
@property
def issuerDid(self) -> str:
return self.__issuer_did
@property
def json(self) -> str:
return json.dumps(self.__claim)Save the claim json as properly formatted json
|
import json
import logging
class ClaimParser(object):
"""
Parses a generic claim.
"""
def __init__(self, claim: str) -> None:
self.__logger = logging.getLogger(__name__)
self.__orgData = claim
self.__parse()
def __parse(self):
self.__logger.debug("Parsing claim ...")
data = json.loads(self.__orgData)
self.__claim_type = data["claim_type"]
self.__claim = data["claim_data"]
self.__issuer_did = data["claim_data"]["issuer_did"]
def getField(self, field):
return self.__claim["claim"][field][0]
@property
def schemaName(self) -> str:
return self.__claim_type
@property
def issuerDid(self) -> str:
return self.__issuer_did
@property
def json(self) -> str:
return self.__claim
|
<commit_before>import json
import logging
class ClaimParser(object):
"""
Parses a generic claim.
"""
def __init__(self, claim: str) -> None:
self.__logger = logging.getLogger(__name__)
self.__orgData = claim
self.__parse()
def __parse(self):
self.__logger.debug("Parsing claim ...")
data = json.loads(self.__orgData)
self.__claim_type = data["claim_type"]
self.__claim = data["claim_data"]
self.__issuer_did = data["claim_data"]["issuer_did"]
def getField(self, field):
return self.__claim["claim"][field][0]
@property
def schemaName(self) -> str:
return self.__claim_type
@property
def issuerDid(self) -> str:
return self.__issuer_did
@property
def json(self) -> str:
return json.dumps(self.__claim)<commit_msg>Save the claim json as properly formatted json<commit_after>
|
import json
import logging
class ClaimParser(object):
"""
Parses a generic claim.
"""
def __init__(self, claim: str) -> None:
self.__logger = logging.getLogger(__name__)
self.__orgData = claim
self.__parse()
def __parse(self):
self.__logger.debug("Parsing claim ...")
data = json.loads(self.__orgData)
self.__claim_type = data["claim_type"]
self.__claim = data["claim_data"]
self.__issuer_did = data["claim_data"]["issuer_did"]
def getField(self, field):
return self.__claim["claim"][field][0]
@property
def schemaName(self) -> str:
return self.__claim_type
@property
def issuerDid(self) -> str:
return self.__issuer_did
@property
def json(self) -> str:
return self.__claim
|
import json
import logging
class ClaimParser(object):
"""
Parses a generic claim.
"""
def __init__(self, claim: str) -> None:
self.__logger = logging.getLogger(__name__)
self.__orgData = claim
self.__parse()
def __parse(self):
self.__logger.debug("Parsing claim ...")
data = json.loads(self.__orgData)
self.__claim_type = data["claim_type"]
self.__claim = data["claim_data"]
self.__issuer_did = data["claim_data"]["issuer_did"]
def getField(self, field):
return self.__claim["claim"][field][0]
@property
def schemaName(self) -> str:
return self.__claim_type
@property
def issuerDid(self) -> str:
return self.__issuer_did
@property
def json(self) -> str:
return json.dumps(self.__claim)Save the claim json as properly formatted jsonimport json
import logging
class ClaimParser(object):
"""
Parses a generic claim.
"""
def __init__(self, claim: str) -> None:
self.__logger = logging.getLogger(__name__)
self.__orgData = claim
self.__parse()
def __parse(self):
self.__logger.debug("Parsing claim ...")
data = json.loads(self.__orgData)
self.__claim_type = data["claim_type"]
self.__claim = data["claim_data"]
self.__issuer_did = data["claim_data"]["issuer_did"]
def getField(self, field):
return self.__claim["claim"][field][0]
@property
def schemaName(self) -> str:
return self.__claim_type
@property
def issuerDid(self) -> str:
return self.__issuer_did
@property
def json(self) -> str:
return self.__claim
|
<commit_before>import json
import logging
class ClaimParser(object):
"""
Parses a generic claim.
"""
def __init__(self, claim: str) -> None:
self.__logger = logging.getLogger(__name__)
self.__orgData = claim
self.__parse()
def __parse(self):
self.__logger.debug("Parsing claim ...")
data = json.loads(self.__orgData)
self.__claim_type = data["claim_type"]
self.__claim = data["claim_data"]
self.__issuer_did = data["claim_data"]["issuer_did"]
def getField(self, field):
return self.__claim["claim"][field][0]
@property
def schemaName(self) -> str:
return self.__claim_type
@property
def issuerDid(self) -> str:
return self.__issuer_did
@property
def json(self) -> str:
return json.dumps(self.__claim)<commit_msg>Save the claim json as properly formatted json<commit_after>import json
import logging
class ClaimParser(object):
"""
Parses a generic claim.
"""
def __init__(self, claim: str) -> None:
self.__logger = logging.getLogger(__name__)
self.__orgData = claim
self.__parse()
def __parse(self):
self.__logger.debug("Parsing claim ...")
data = json.loads(self.__orgData)
self.__claim_type = data["claim_type"]
self.__claim = data["claim_data"]
self.__issuer_did = data["claim_data"]["issuer_did"]
def getField(self, field):
return self.__claim["claim"][field][0]
@property
def schemaName(self) -> str:
return self.__claim_type
@property
def issuerDid(self) -> str:
return self.__issuer_did
@property
def json(self) -> str:
return self.__claim
|
dad970e9db2d3985a4995982d91a995898c8781b
|
virtool/handlers/updates.py
|
virtool/handlers/updates.py
|
import virtool.app
import virtool.updates
from virtool.handlers.utils import json_response
async def get(req):
db = req.app["db"]
settings = req.app["settings"]
repo = settings.get("software_repo")
server_version = virtool.app.find_server_version()
await virtool.updates.get_releases(repo, server_version)
return json_response({"message": "YAY"})
|
import virtool.app
import virtool.updates
from virtool.handlers.utils import json_response
async def get(req):
# db = req.app["db"]
settings = req.app["settings"]
repo = settings.get("software_repo")
server_version = virtool.app.find_server_version()
releases = await virtool.updates.get_releases(repo, server_version)
return json_response({
"releases": releases
})
|
Make retrieval of releases from GitHub functional
|
Make retrieval of releases from GitHub functional
|
Python
|
mit
|
igboyes/virtool,virtool/virtool,virtool/virtool,igboyes/virtool
|
import virtool.app
import virtool.updates
from virtool.handlers.utils import json_response
async def get(req):
db = req.app["db"]
settings = req.app["settings"]
repo = settings.get("software_repo")
server_version = virtool.app.find_server_version()
await virtool.updates.get_releases(repo, server_version)
return json_response({"message": "YAY"})
Make retrieval of releases from GitHub functional
|
import virtool.app
import virtool.updates
from virtool.handlers.utils import json_response
async def get(req):
# db = req.app["db"]
settings = req.app["settings"]
repo = settings.get("software_repo")
server_version = virtool.app.find_server_version()
releases = await virtool.updates.get_releases(repo, server_version)
return json_response({
"releases": releases
})
|
<commit_before>import virtool.app
import virtool.updates
from virtool.handlers.utils import json_response
async def get(req):
db = req.app["db"]
settings = req.app["settings"]
repo = settings.get("software_repo")
server_version = virtool.app.find_server_version()
await virtool.updates.get_releases(repo, server_version)
return json_response({"message": "YAY"})
<commit_msg>Make retrieval of releases from GitHub functional<commit_after>
|
import virtool.app
import virtool.updates
from virtool.handlers.utils import json_response
async def get(req):
# db = req.app["db"]
settings = req.app["settings"]
repo = settings.get("software_repo")
server_version = virtool.app.find_server_version()
releases = await virtool.updates.get_releases(repo, server_version)
return json_response({
"releases": releases
})
|
import virtool.app
import virtool.updates
from virtool.handlers.utils import json_response
async def get(req):
db = req.app["db"]
settings = req.app["settings"]
repo = settings.get("software_repo")
server_version = virtool.app.find_server_version()
await virtool.updates.get_releases(repo, server_version)
return json_response({"message": "YAY"})
Make retrieval of releases from GitHub functionalimport virtool.app
import virtool.updates
from virtool.handlers.utils import json_response
async def get(req):
# db = req.app["db"]
settings = req.app["settings"]
repo = settings.get("software_repo")
server_version = virtool.app.find_server_version()
releases = await virtool.updates.get_releases(repo, server_version)
return json_response({
"releases": releases
})
|
<commit_before>import virtool.app
import virtool.updates
from virtool.handlers.utils import json_response
async def get(req):
db = req.app["db"]
settings = req.app["settings"]
repo = settings.get("software_repo")
server_version = virtool.app.find_server_version()
await virtool.updates.get_releases(repo, server_version)
return json_response({"message": "YAY"})
<commit_msg>Make retrieval of releases from GitHub functional<commit_after>import virtool.app
import virtool.updates
from virtool.handlers.utils import json_response
async def get(req):
# db = req.app["db"]
settings = req.app["settings"]
repo = settings.get("software_repo")
server_version = virtool.app.find_server_version()
releases = await virtool.updates.get_releases(repo, server_version)
return json_response({
"releases": releases
})
|
a978a7ed7f40ac7a77aa31ec89a3bb8ae58abb61
|
ecommerce/courses/migrations/0006_auto_20171204_1036.py
|
ecommerce/courses/migrations/0006_auto_20171204_1036.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-12-04 10:36
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
def add_created_modified_date(apps, schema_editor):
Course = apps.get_model('courses', 'Course')
courses = Course.objects.all()
for course in courses:
course.created = course.history.earliest().history_date
course.modified = course.history.latest().history_date
course.save()
dependencies = [
('courses', '0005_auto_20170525_0131'),
]
operations = [
migrations.AddField(
model_name='course',
name='created',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='course',
name='modified',
field=models.DateTimeField(auto_now=True, null=True),
),
migrations.AddField(
model_name='historicalcourse',
name='created',
field=models.DateTimeField(blank=True, editable=False, null=True),
),
migrations.AddField(
model_name='historicalcourse',
name='modified',
field=models.DateTimeField(blank=True, editable=False, null=True),
),
migrations.RunPython(add_created_modified_date, migrations.RunPython.noop),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-12-04 10:36
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
def add_created_modified_date(apps, schema_editor):
Course = apps.get_model('courses', 'Course')
HistoricalCourse = apps.get_model('courses', 'historicalcourse')
courses = Course.objects.all()
for course in courses:
history = HistoricalCourse.objects.filter(id=course.id)
course.created = history.earliest().history_date
course.modified = history.latest().history_date
course.save()
dependencies = [
('courses', '0005_auto_20170525_0131'),
]
operations = [
migrations.AddField(
model_name='course',
name='created',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='course',
name='modified',
field=models.DateTimeField(auto_now=True, null=True),
),
migrations.AddField(
model_name='historicalcourse',
name='created',
field=models.DateTimeField(blank=True, editable=False, null=True),
),
migrations.AddField(
model_name='historicalcourse',
name='modified',
field=models.DateTimeField(blank=True, editable=False, null=True),
),
migrations.RunPython(add_created_modified_date, migrations.RunPython.noop),
]
|
Fix migration issue Fixed 'Course' object has no attribute 'history' issue in the migration
|
Fix migration issue
Fixed 'Course' object has no attribute 'history' issue in the migration
|
Python
|
agpl-3.0
|
edx/ecommerce,edx/ecommerce,eduNEXT/edunext-ecommerce,eduNEXT/edunext-ecommerce,eduNEXT/edunext-ecommerce,edx/ecommerce,edx/ecommerce,eduNEXT/edunext-ecommerce
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-12-04 10:36
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
def add_created_modified_date(apps, schema_editor):
Course = apps.get_model('courses', 'Course')
courses = Course.objects.all()
for course in courses:
course.created = course.history.earliest().history_date
course.modified = course.history.latest().history_date
course.save()
dependencies = [
('courses', '0005_auto_20170525_0131'),
]
operations = [
migrations.AddField(
model_name='course',
name='created',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='course',
name='modified',
field=models.DateTimeField(auto_now=True, null=True),
),
migrations.AddField(
model_name='historicalcourse',
name='created',
field=models.DateTimeField(blank=True, editable=False, null=True),
),
migrations.AddField(
model_name='historicalcourse',
name='modified',
field=models.DateTimeField(blank=True, editable=False, null=True),
),
migrations.RunPython(add_created_modified_date, migrations.RunPython.noop),
]
Fix migration issue
Fixed 'Course' object has no attribute 'history' issue in the migration
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-12-04 10:36
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
def add_created_modified_date(apps, schema_editor):
Course = apps.get_model('courses', 'Course')
HistoricalCourse = apps.get_model('courses', 'historicalcourse')
courses = Course.objects.all()
for course in courses:
history = HistoricalCourse.objects.filter(id=course.id)
course.created = history.earliest().history_date
course.modified = history.latest().history_date
course.save()
dependencies = [
('courses', '0005_auto_20170525_0131'),
]
operations = [
migrations.AddField(
model_name='course',
name='created',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='course',
name='modified',
field=models.DateTimeField(auto_now=True, null=True),
),
migrations.AddField(
model_name='historicalcourse',
name='created',
field=models.DateTimeField(blank=True, editable=False, null=True),
),
migrations.AddField(
model_name='historicalcourse',
name='modified',
field=models.DateTimeField(blank=True, editable=False, null=True),
),
migrations.RunPython(add_created_modified_date, migrations.RunPython.noop),
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-12-04 10:36
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
def add_created_modified_date(apps, schema_editor):
Course = apps.get_model('courses', 'Course')
courses = Course.objects.all()
for course in courses:
course.created = course.history.earliest().history_date
course.modified = course.history.latest().history_date
course.save()
dependencies = [
('courses', '0005_auto_20170525_0131'),
]
operations = [
migrations.AddField(
model_name='course',
name='created',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='course',
name='modified',
field=models.DateTimeField(auto_now=True, null=True),
),
migrations.AddField(
model_name='historicalcourse',
name='created',
field=models.DateTimeField(blank=True, editable=False, null=True),
),
migrations.AddField(
model_name='historicalcourse',
name='modified',
field=models.DateTimeField(blank=True, editable=False, null=True),
),
migrations.RunPython(add_created_modified_date, migrations.RunPython.noop),
]
<commit_msg>Fix migration issue
Fixed 'Course' object has no attribute 'history' issue in the migration<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-12-04 10:36
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
def add_created_modified_date(apps, schema_editor):
Course = apps.get_model('courses', 'Course')
HistoricalCourse = apps.get_model('courses', 'historicalcourse')
courses = Course.objects.all()
for course in courses:
history = HistoricalCourse.objects.filter(id=course.id)
course.created = history.earliest().history_date
course.modified = history.latest().history_date
course.save()
dependencies = [
('courses', '0005_auto_20170525_0131'),
]
operations = [
migrations.AddField(
model_name='course',
name='created',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='course',
name='modified',
field=models.DateTimeField(auto_now=True, null=True),
),
migrations.AddField(
model_name='historicalcourse',
name='created',
field=models.DateTimeField(blank=True, editable=False, null=True),
),
migrations.AddField(
model_name='historicalcourse',
name='modified',
field=models.DateTimeField(blank=True, editable=False, null=True),
),
migrations.RunPython(add_created_modified_date, migrations.RunPython.noop),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-12-04 10:36
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
def add_created_modified_date(apps, schema_editor):
Course = apps.get_model('courses', 'Course')
courses = Course.objects.all()
for course in courses:
course.created = course.history.earliest().history_date
course.modified = course.history.latest().history_date
course.save()
dependencies = [
('courses', '0005_auto_20170525_0131'),
]
operations = [
migrations.AddField(
model_name='course',
name='created',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='course',
name='modified',
field=models.DateTimeField(auto_now=True, null=True),
),
migrations.AddField(
model_name='historicalcourse',
name='created',
field=models.DateTimeField(blank=True, editable=False, null=True),
),
migrations.AddField(
model_name='historicalcourse',
name='modified',
field=models.DateTimeField(blank=True, editable=False, null=True),
),
migrations.RunPython(add_created_modified_date, migrations.RunPython.noop),
]
Fix migration issue
Fixed 'Course' object has no attribute 'history' issue in the migration# -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-12-04 10:36
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
def add_created_modified_date(apps, schema_editor):
Course = apps.get_model('courses', 'Course')
HistoricalCourse = apps.get_model('courses', 'historicalcourse')
courses = Course.objects.all()
for course in courses:
history = HistoricalCourse.objects.filter(id=course.id)
course.created = history.earliest().history_date
course.modified = history.latest().history_date
course.save()
dependencies = [
('courses', '0005_auto_20170525_0131'),
]
operations = [
migrations.AddField(
model_name='course',
name='created',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='course',
name='modified',
field=models.DateTimeField(auto_now=True, null=True),
),
migrations.AddField(
model_name='historicalcourse',
name='created',
field=models.DateTimeField(blank=True, editable=False, null=True),
),
migrations.AddField(
model_name='historicalcourse',
name='modified',
field=models.DateTimeField(blank=True, editable=False, null=True),
),
migrations.RunPython(add_created_modified_date, migrations.RunPython.noop),
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-12-04 10:36
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
def add_created_modified_date(apps, schema_editor):
Course = apps.get_model('courses', 'Course')
courses = Course.objects.all()
for course in courses:
course.created = course.history.earliest().history_date
course.modified = course.history.latest().history_date
course.save()
dependencies = [
('courses', '0005_auto_20170525_0131'),
]
operations = [
migrations.AddField(
model_name='course',
name='created',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='course',
name='modified',
field=models.DateTimeField(auto_now=True, null=True),
),
migrations.AddField(
model_name='historicalcourse',
name='created',
field=models.DateTimeField(blank=True, editable=False, null=True),
),
migrations.AddField(
model_name='historicalcourse',
name='modified',
field=models.DateTimeField(blank=True, editable=False, null=True),
),
migrations.RunPython(add_created_modified_date, migrations.RunPython.noop),
]
<commit_msg>Fix migration issue
Fixed 'Course' object has no attribute 'history' issue in the migration<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-12-04 10:36
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
def add_created_modified_date(apps, schema_editor):
Course = apps.get_model('courses', 'Course')
HistoricalCourse = apps.get_model('courses', 'historicalcourse')
courses = Course.objects.all()
for course in courses:
history = HistoricalCourse.objects.filter(id=course.id)
course.created = history.earliest().history_date
course.modified = history.latest().history_date
course.save()
dependencies = [
('courses', '0005_auto_20170525_0131'),
]
operations = [
migrations.AddField(
model_name='course',
name='created',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='course',
name='modified',
field=models.DateTimeField(auto_now=True, null=True),
),
migrations.AddField(
model_name='historicalcourse',
name='created',
field=models.DateTimeField(blank=True, editable=False, null=True),
),
migrations.AddField(
model_name='historicalcourse',
name='modified',
field=models.DateTimeField(blank=True, editable=False, null=True),
),
migrations.RunPython(add_created_modified_date, migrations.RunPython.noop),
]
|
19af60831293ccff759e7ad9afb2336d1e232b02
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup
setup(name="i3pystatus",
version="3.34",
description="A complete replacement for i3status",
url="http://github.com/enkore/i3pystatus",
license="MIT",
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: X11 Applications",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3",
"Topic :: Desktop Environment :: Window Managers",
],
packages=[
"i3pystatus",
"i3pystatus.core",
"i3pystatus.tools",
"i3pystatus.mail",
"i3pystatus.pulseaudio",
"i3pystatus.updates",
],
entry_points={
"console_scripts": [
"i3pystatus = i3pystatus:main",
"i3pystatus-setting-util = i3pystatus.tools.setting_util:main"
]
},
zip_safe=True,
)
|
#!/usr/bin/env python
from setuptools import setup
setup(name="i3pystatus",
version="3.34",
description="A complete replacement for i3status",
url="http://github.com/enkore/i3pystatus",
license="MIT",
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: X11 Applications",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3",
"Topic :: Desktop Environment :: Window Managers",
],
packages=[
"i3pystatus",
"i3pystatus.core",
"i3pystatus.tools",
"i3pystatus.mail",
"i3pystatus.pulseaudio",
"i3pystatus.updates",
"i3pystatus.weather",
],
entry_points={
"console_scripts": [
"i3pystatus = i3pystatus:main",
"i3pystatus-setting-util = i3pystatus.tools.setting_util:main"
]
},
zip_safe=True,
)
|
Add i3pystatus.weather to packages list
|
Add i3pystatus.weather to packages list
|
Python
|
mit
|
asmikhailov/i3pystatus,drwahl/i3pystatus,enkore/i3pystatus,facetoe/i3pystatus,teto/i3pystatus,m45t3r/i3pystatus,Arvedui/i3pystatus,fmarchenko/i3pystatus,eBrnd/i3pystatus,yang-ling/i3pystatus,schroeji/i3pystatus,yang-ling/i3pystatus,ncoop/i3pystatus,ncoop/i3pystatus,asmikhailov/i3pystatus,fmarchenko/i3pystatus,enkore/i3pystatus,schroeji/i3pystatus,teto/i3pystatus,richese/i3pystatus,m45t3r/i3pystatus,facetoe/i3pystatus,richese/i3pystatus,drwahl/i3pystatus,Arvedui/i3pystatus,eBrnd/i3pystatus
|
#!/usr/bin/env python
from setuptools import setup
setup(name="i3pystatus",
version="3.34",
description="A complete replacement for i3status",
url="http://github.com/enkore/i3pystatus",
license="MIT",
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: X11 Applications",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3",
"Topic :: Desktop Environment :: Window Managers",
],
packages=[
"i3pystatus",
"i3pystatus.core",
"i3pystatus.tools",
"i3pystatus.mail",
"i3pystatus.pulseaudio",
"i3pystatus.updates",
],
entry_points={
"console_scripts": [
"i3pystatus = i3pystatus:main",
"i3pystatus-setting-util = i3pystatus.tools.setting_util:main"
]
},
zip_safe=True,
)
Add i3pystatus.weather to packages list
|
#!/usr/bin/env python
from setuptools import setup
setup(name="i3pystatus",
version="3.34",
description="A complete replacement for i3status",
url="http://github.com/enkore/i3pystatus",
license="MIT",
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: X11 Applications",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3",
"Topic :: Desktop Environment :: Window Managers",
],
packages=[
"i3pystatus",
"i3pystatus.core",
"i3pystatus.tools",
"i3pystatus.mail",
"i3pystatus.pulseaudio",
"i3pystatus.updates",
"i3pystatus.weather",
],
entry_points={
"console_scripts": [
"i3pystatus = i3pystatus:main",
"i3pystatus-setting-util = i3pystatus.tools.setting_util:main"
]
},
zip_safe=True,
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup
setup(name="i3pystatus",
version="3.34",
description="A complete replacement for i3status",
url="http://github.com/enkore/i3pystatus",
license="MIT",
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: X11 Applications",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3",
"Topic :: Desktop Environment :: Window Managers",
],
packages=[
"i3pystatus",
"i3pystatus.core",
"i3pystatus.tools",
"i3pystatus.mail",
"i3pystatus.pulseaudio",
"i3pystatus.updates",
],
entry_points={
"console_scripts": [
"i3pystatus = i3pystatus:main",
"i3pystatus-setting-util = i3pystatus.tools.setting_util:main"
]
},
zip_safe=True,
)
<commit_msg>Add i3pystatus.weather to packages list<commit_after>
|
#!/usr/bin/env python
from setuptools import setup
setup(name="i3pystatus",
version="3.34",
description="A complete replacement for i3status",
url="http://github.com/enkore/i3pystatus",
license="MIT",
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: X11 Applications",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3",
"Topic :: Desktop Environment :: Window Managers",
],
packages=[
"i3pystatus",
"i3pystatus.core",
"i3pystatus.tools",
"i3pystatus.mail",
"i3pystatus.pulseaudio",
"i3pystatus.updates",
"i3pystatus.weather",
],
entry_points={
"console_scripts": [
"i3pystatus = i3pystatus:main",
"i3pystatus-setting-util = i3pystatus.tools.setting_util:main"
]
},
zip_safe=True,
)
|
#!/usr/bin/env python
from setuptools import setup
setup(name="i3pystatus",
version="3.34",
description="A complete replacement for i3status",
url="http://github.com/enkore/i3pystatus",
license="MIT",
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: X11 Applications",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3",
"Topic :: Desktop Environment :: Window Managers",
],
packages=[
"i3pystatus",
"i3pystatus.core",
"i3pystatus.tools",
"i3pystatus.mail",
"i3pystatus.pulseaudio",
"i3pystatus.updates",
],
entry_points={
"console_scripts": [
"i3pystatus = i3pystatus:main",
"i3pystatus-setting-util = i3pystatus.tools.setting_util:main"
]
},
zip_safe=True,
)
Add i3pystatus.weather to packages list#!/usr/bin/env python
from setuptools import setup
setup(name="i3pystatus",
version="3.34",
description="A complete replacement for i3status",
url="http://github.com/enkore/i3pystatus",
license="MIT",
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: X11 Applications",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3",
"Topic :: Desktop Environment :: Window Managers",
],
packages=[
"i3pystatus",
"i3pystatus.core",
"i3pystatus.tools",
"i3pystatus.mail",
"i3pystatus.pulseaudio",
"i3pystatus.updates",
"i3pystatus.weather",
],
entry_points={
"console_scripts": [
"i3pystatus = i3pystatus:main",
"i3pystatus-setting-util = i3pystatus.tools.setting_util:main"
]
},
zip_safe=True,
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup
setup(name="i3pystatus",
version="3.34",
description="A complete replacement for i3status",
url="http://github.com/enkore/i3pystatus",
license="MIT",
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: X11 Applications",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3",
"Topic :: Desktop Environment :: Window Managers",
],
packages=[
"i3pystatus",
"i3pystatus.core",
"i3pystatus.tools",
"i3pystatus.mail",
"i3pystatus.pulseaudio",
"i3pystatus.updates",
],
entry_points={
"console_scripts": [
"i3pystatus = i3pystatus:main",
"i3pystatus-setting-util = i3pystatus.tools.setting_util:main"
]
},
zip_safe=True,
)
<commit_msg>Add i3pystatus.weather to packages list<commit_after>#!/usr/bin/env python
from setuptools import setup
setup(name="i3pystatus",
version="3.34",
description="A complete replacement for i3status",
url="http://github.com/enkore/i3pystatus",
license="MIT",
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: X11 Applications",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3",
"Topic :: Desktop Environment :: Window Managers",
],
packages=[
"i3pystatus",
"i3pystatus.core",
"i3pystatus.tools",
"i3pystatus.mail",
"i3pystatus.pulseaudio",
"i3pystatus.updates",
"i3pystatus.weather",
],
entry_points={
"console_scripts": [
"i3pystatus = i3pystatus:main",
"i3pystatus-setting-util = i3pystatus.tools.setting_util:main"
]
},
zip_safe=True,
)
|
40c8b74d5ce7dd3b76fe878eb769b95cea3f40c0
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='tangled.mako',
version='0.1a4.dev0',
description='Tangled Mako integration',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.mako/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
include_package_data=True,
packages=[
'tangled',
'tangled.mako',
'tangled.mako.tests',
],
install_requires=[
'tangled.web>=0.1a5',
'Mako>=1.0',
],
extras_require={
'dev': [
'tangled.web[dev]>=0.1a5',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
)
|
from setuptools import setup
setup(
name='tangled.mako',
version='0.1a4.dev0',
description='Tangled Mako integration',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.mako/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
include_package_data=True,
packages=[
'tangled',
'tangled.mako',
'tangled.mako.tests',
],
install_requires=[
'tangled.web>=0.1a10',
'Mako>=1.0',
],
extras_require={
'dev': [
'tangled.web[dev]>=0.1a10',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
)
|
Upgrade tangled.web 0.1a5 => 0.1a10
|
Upgrade tangled.web 0.1a5 => 0.1a10
|
Python
|
mit
|
TangledWeb/tangled.mako
|
from setuptools import setup
setup(
name='tangled.mako',
version='0.1a4.dev0',
description='Tangled Mako integration',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.mako/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
include_package_data=True,
packages=[
'tangled',
'tangled.mako',
'tangled.mako.tests',
],
install_requires=[
'tangled.web>=0.1a5',
'Mako>=1.0',
],
extras_require={
'dev': [
'tangled.web[dev]>=0.1a5',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
)
Upgrade tangled.web 0.1a5 => 0.1a10
|
from setuptools import setup
setup(
name='tangled.mako',
version='0.1a4.dev0',
description='Tangled Mako integration',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.mako/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
include_package_data=True,
packages=[
'tangled',
'tangled.mako',
'tangled.mako.tests',
],
install_requires=[
'tangled.web>=0.1a10',
'Mako>=1.0',
],
extras_require={
'dev': [
'tangled.web[dev]>=0.1a10',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
)
|
<commit_before>from setuptools import setup
setup(
name='tangled.mako',
version='0.1a4.dev0',
description='Tangled Mako integration',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.mako/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
include_package_data=True,
packages=[
'tangled',
'tangled.mako',
'tangled.mako.tests',
],
install_requires=[
'tangled.web>=0.1a5',
'Mako>=1.0',
],
extras_require={
'dev': [
'tangled.web[dev]>=0.1a5',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
)
<commit_msg>Upgrade tangled.web 0.1a5 => 0.1a10<commit_after>
|
from setuptools import setup
setup(
name='tangled.mako',
version='0.1a4.dev0',
description='Tangled Mako integration',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.mako/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
include_package_data=True,
packages=[
'tangled',
'tangled.mako',
'tangled.mako.tests',
],
install_requires=[
'tangled.web>=0.1a10',
'Mako>=1.0',
],
extras_require={
'dev': [
'tangled.web[dev]>=0.1a10',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
)
|
from setuptools import setup
setup(
name='tangled.mako',
version='0.1a4.dev0',
description='Tangled Mako integration',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.mako/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
include_package_data=True,
packages=[
'tangled',
'tangled.mako',
'tangled.mako.tests',
],
install_requires=[
'tangled.web>=0.1a5',
'Mako>=1.0',
],
extras_require={
'dev': [
'tangled.web[dev]>=0.1a5',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
)
Upgrade tangled.web 0.1a5 => 0.1a10from setuptools import setup
setup(
name='tangled.mako',
version='0.1a4.dev0',
description='Tangled Mako integration',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.mako/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
include_package_data=True,
packages=[
'tangled',
'tangled.mako',
'tangled.mako.tests',
],
install_requires=[
'tangled.web>=0.1a10',
'Mako>=1.0',
],
extras_require={
'dev': [
'tangled.web[dev]>=0.1a10',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
)
|
<commit_before>from setuptools import setup
setup(
name='tangled.mako',
version='0.1a4.dev0',
description='Tangled Mako integration',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.mako/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
include_package_data=True,
packages=[
'tangled',
'tangled.mako',
'tangled.mako.tests',
],
install_requires=[
'tangled.web>=0.1a5',
'Mako>=1.0',
],
extras_require={
'dev': [
'tangled.web[dev]>=0.1a5',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
)
<commit_msg>Upgrade tangled.web 0.1a5 => 0.1a10<commit_after>from setuptools import setup
setup(
name='tangled.mako',
version='0.1a4.dev0',
description='Tangled Mako integration',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.mako/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
include_package_data=True,
packages=[
'tangled',
'tangled.mako',
'tangled.mako.tests',
],
install_requires=[
'tangled.web>=0.1a10',
'Mako>=1.0',
],
extras_require={
'dev': [
'tangled.web[dev]>=0.1a10',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
)
|
cf370fa6ef2ddde31936da5e7ca8cb6b41331338
|
setup.py
|
setup.py
|
# coding: utf-8
from __future__ import absolute_import
import setuptools
from keysmith import __version__
with open('README.md', 'r') as f:
long_description = f.read()
setuptools.setup(
name='keysmith',
version=__version__,
license='GPL',
description='Diceware-style Password Generator',
long_description=long_description,
url='https://github.com/dmtucker/keysmith-py',
packages=['keysmith'],
package_data={'keysmith': ['words.txt']},
entry_points={'console_scripts': ['keysmith = keysmith.__main__:main']}
)
|
# coding: utf-8
from __future__ import absolute_import
import setuptools
from keysmith import __version__
setuptools.setup(
name='keysmith',
version=__version__,
license='GPL',
description='Diceware-style Password Generator',
url='https://github.com/dmtucker/keysmith-py',
packages=['keysmith'],
package_data={'keysmith': ['words.txt']},
entry_points={'console_scripts': ['keysmith = keysmith.__main__:main']}
)
|
Remove the long description from the package.
|
Remove the long description from the package.
|
Python
|
bsd-3-clause
|
dmtucker/keysmith
|
# coding: utf-8
from __future__ import absolute_import
import setuptools
from keysmith import __version__
with open('README.md', 'r') as f:
long_description = f.read()
setuptools.setup(
name='keysmith',
version=__version__,
license='GPL',
description='Diceware-style Password Generator',
long_description=long_description,
url='https://github.com/dmtucker/keysmith-py',
packages=['keysmith'],
package_data={'keysmith': ['words.txt']},
entry_points={'console_scripts': ['keysmith = keysmith.__main__:main']}
)
Remove the long description from the package.
|
# coding: utf-8
from __future__ import absolute_import
import setuptools
from keysmith import __version__
setuptools.setup(
name='keysmith',
version=__version__,
license='GPL',
description='Diceware-style Password Generator',
url='https://github.com/dmtucker/keysmith-py',
packages=['keysmith'],
package_data={'keysmith': ['words.txt']},
entry_points={'console_scripts': ['keysmith = keysmith.__main__:main']}
)
|
<commit_before># coding: utf-8
from __future__ import absolute_import
import setuptools
from keysmith import __version__
with open('README.md', 'r') as f:
long_description = f.read()
setuptools.setup(
name='keysmith',
version=__version__,
license='GPL',
description='Diceware-style Password Generator',
long_description=long_description,
url='https://github.com/dmtucker/keysmith-py',
packages=['keysmith'],
package_data={'keysmith': ['words.txt']},
entry_points={'console_scripts': ['keysmith = keysmith.__main__:main']}
)
<commit_msg>Remove the long description from the package.<commit_after>
|
# coding: utf-8
from __future__ import absolute_import
import setuptools
from keysmith import __version__
setuptools.setup(
name='keysmith',
version=__version__,
license='GPL',
description='Diceware-style Password Generator',
url='https://github.com/dmtucker/keysmith-py',
packages=['keysmith'],
package_data={'keysmith': ['words.txt']},
entry_points={'console_scripts': ['keysmith = keysmith.__main__:main']}
)
|
# coding: utf-8
from __future__ import absolute_import
import setuptools
from keysmith import __version__
with open('README.md', 'r') as f:
long_description = f.read()
setuptools.setup(
name='keysmith',
version=__version__,
license='GPL',
description='Diceware-style Password Generator',
long_description=long_description,
url='https://github.com/dmtucker/keysmith-py',
packages=['keysmith'],
package_data={'keysmith': ['words.txt']},
entry_points={'console_scripts': ['keysmith = keysmith.__main__:main']}
)
Remove the long description from the package.# coding: utf-8
from __future__ import absolute_import
import setuptools
from keysmith import __version__
setuptools.setup(
name='keysmith',
version=__version__,
license='GPL',
description='Diceware-style Password Generator',
url='https://github.com/dmtucker/keysmith-py',
packages=['keysmith'],
package_data={'keysmith': ['words.txt']},
entry_points={'console_scripts': ['keysmith = keysmith.__main__:main']}
)
|
<commit_before># coding: utf-8
from __future__ import absolute_import
import setuptools
from keysmith import __version__
with open('README.md', 'r') as f:
long_description = f.read()
setuptools.setup(
name='keysmith',
version=__version__,
license='GPL',
description='Diceware-style Password Generator',
long_description=long_description,
url='https://github.com/dmtucker/keysmith-py',
packages=['keysmith'],
package_data={'keysmith': ['words.txt']},
entry_points={'console_scripts': ['keysmith = keysmith.__main__:main']}
)
<commit_msg>Remove the long description from the package.<commit_after># coding: utf-8
from __future__ import absolute_import
import setuptools
from keysmith import __version__
setuptools.setup(
name='keysmith',
version=__version__,
license='GPL',
description='Diceware-style Password Generator',
url='https://github.com/dmtucker/keysmith-py',
packages=['keysmith'],
package_data={'keysmith': ['words.txt']},
entry_points={'console_scripts': ['keysmith = keysmith.__main__:main']}
)
|
434b3e94f461c995a5e2f421acca29897495f0a8
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name = 'respite',
version = '0.6.1',
description = "Respite conforms Django to Representational State Transfer (REST)",
author = "Johannes Gorset",
author_email = "jgorset@gmail.com",
url = "http://github.com/jgorset/respite",
packages = ['respite']
)
|
from distutils.core import setup
setup(
name = 'respite',
version = '0.6.1',
description = "Respite conforms Django to Representational State Transfer (REST)",
author = "Johannes Gorset",
author_email = "jgorset@gmail.com",
url = "http://github.com/jgorset/respite",
packages = ['respite', 'respite.lib', 'respite.serializers']
)
|
Add 'lib' and 'serializers' to packages
|
Add 'lib' and 'serializers' to packages
|
Python
|
mit
|
jgorset/django-respite,jgorset/django-respite,jgorset/django-respite
|
from distutils.core import setup
setup(
name = 'respite',
version = '0.6.1',
description = "Respite conforms Django to Representational State Transfer (REST)",
author = "Johannes Gorset",
author_email = "jgorset@gmail.com",
url = "http://github.com/jgorset/respite",
packages = ['respite']
)
Add 'lib' and 'serializers' to packages
|
from distutils.core import setup
setup(
name = 'respite',
version = '0.6.1',
description = "Respite conforms Django to Representational State Transfer (REST)",
author = "Johannes Gorset",
author_email = "jgorset@gmail.com",
url = "http://github.com/jgorset/respite",
packages = ['respite', 'respite.lib', 'respite.serializers']
)
|
<commit_before>from distutils.core import setup
setup(
name = 'respite',
version = '0.6.1',
description = "Respite conforms Django to Representational State Transfer (REST)",
author = "Johannes Gorset",
author_email = "jgorset@gmail.com",
url = "http://github.com/jgorset/respite",
packages = ['respite']
)
<commit_msg>Add 'lib' and 'serializers' to packages<commit_after>
|
from distutils.core import setup
setup(
name = 'respite',
version = '0.6.1',
description = "Respite conforms Django to Representational State Transfer (REST)",
author = "Johannes Gorset",
author_email = "jgorset@gmail.com",
url = "http://github.com/jgorset/respite",
packages = ['respite', 'respite.lib', 'respite.serializers']
)
|
from distutils.core import setup
setup(
name = 'respite',
version = '0.6.1',
description = "Respite conforms Django to Representational State Transfer (REST)",
author = "Johannes Gorset",
author_email = "jgorset@gmail.com",
url = "http://github.com/jgorset/respite",
packages = ['respite']
)
Add 'lib' and 'serializers' to packagesfrom distutils.core import setup
setup(
name = 'respite',
version = '0.6.1',
description = "Respite conforms Django to Representational State Transfer (REST)",
author = "Johannes Gorset",
author_email = "jgorset@gmail.com",
url = "http://github.com/jgorset/respite",
packages = ['respite', 'respite.lib', 'respite.serializers']
)
|
<commit_before>from distutils.core import setup
setup(
name = 'respite',
version = '0.6.1',
description = "Respite conforms Django to Representational State Transfer (REST)",
author = "Johannes Gorset",
author_email = "jgorset@gmail.com",
url = "http://github.com/jgorset/respite",
packages = ['respite']
)
<commit_msg>Add 'lib' and 'serializers' to packages<commit_after>from distutils.core import setup
setup(
name = 'respite',
version = '0.6.1',
description = "Respite conforms Django to Representational State Transfer (REST)",
author = "Johannes Gorset",
author_email = "jgorset@gmail.com",
url = "http://github.com/jgorset/respite",
packages = ['respite', 'respite.lib', 'respite.serializers']
)
|
9d3608d85ef31910c2bb0c79505bd8a5d18a603d
|
setup.py
|
setup.py
|
""" Setup file """
import os
from setuptools import setup, find_packages
from version_helper import git_version
HERE = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(HERE, 'README.rst')).read()
CHANGES = open(os.path.join(HERE, 'CHANGES.txt')).read()
REQUIREMENTS = [
'mock',
]
if __name__ == "__main__":
setup(
name='devbox',
description='Quickly set up python repos for development',
long_description=README + '\n\n' + CHANGES,
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
],
license='MIT',
author='Steven Arcangeli',
author_email='steven@highlig.ht',
url='http://github.com/mathcamp/devbox',
zip_safe=False,
include_package_data=True,
packages=find_packages(),
entry_points={
'console_scripts': [
'devbox-pre-commit = devbox.hook:precommit',
'devbox-create = devbox:create',
'devbox-unbox = devbox.unbox:main',
],
},
setup_requires=[
'nose>=1.0',
],
install_requires=REQUIREMENTS,
tests_require=REQUIREMENTS,
**git_version()
)
|
""" Setup file """
import os
from setuptools import setup, find_packages
from version_helper import git_version
HERE = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(HERE, 'README.rst')).read()
CHANGES = open(os.path.join(HERE, 'CHANGES.txt')).read()
REQUIREMENTS = [
'mock',
]
# Python 2.6 doesn't ship with argparse
try:
import argparse
except ImportError:
REQUIREMENTS.append('argparse')
if __name__ == "__main__":
setup(
name='devbox',
description='Quickly set up python repos for development',
long_description=README + '\n\n' + CHANGES,
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
],
license='MIT',
author='Steven Arcangeli',
author_email='steven@highlig.ht',
url='http://github.com/mathcamp/devbox',
zip_safe=False,
include_package_data=True,
packages=find_packages(),
entry_points={
'console_scripts': [
'devbox-pre-commit = devbox.hook:precommit',
'devbox-create = devbox:create',
'devbox-unbox = devbox.unbox:main',
],
},
setup_requires=[
'nose>=1.0',
],
install_requires=REQUIREMENTS,
tests_require=REQUIREMENTS,
**git_version()
)
|
Add argparse as a requirement if not built in
|
Add argparse as a requirement if not built in
|
Python
|
mit
|
mathcamp/devbox,mathcamp/devbox
|
""" Setup file """
import os
from setuptools import setup, find_packages
from version_helper import git_version
HERE = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(HERE, 'README.rst')).read()
CHANGES = open(os.path.join(HERE, 'CHANGES.txt')).read()
REQUIREMENTS = [
'mock',
]
if __name__ == "__main__":
setup(
name='devbox',
description='Quickly set up python repos for development',
long_description=README + '\n\n' + CHANGES,
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
],
license='MIT',
author='Steven Arcangeli',
author_email='steven@highlig.ht',
url='http://github.com/mathcamp/devbox',
zip_safe=False,
include_package_data=True,
packages=find_packages(),
entry_points={
'console_scripts': [
'devbox-pre-commit = devbox.hook:precommit',
'devbox-create = devbox:create',
'devbox-unbox = devbox.unbox:main',
],
},
setup_requires=[
'nose>=1.0',
],
install_requires=REQUIREMENTS,
tests_require=REQUIREMENTS,
**git_version()
)
Add argparse as a requirement if not built in
|
""" Setup file """
import os
from setuptools import setup, find_packages
from version_helper import git_version
HERE = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(HERE, 'README.rst')).read()
CHANGES = open(os.path.join(HERE, 'CHANGES.txt')).read()
REQUIREMENTS = [
'mock',
]
# Python 2.6 doesn't ship with argparse
try:
import argparse
except ImportError:
REQUIREMENTS.append('argparse')
if __name__ == "__main__":
setup(
name='devbox',
description='Quickly set up python repos for development',
long_description=README + '\n\n' + CHANGES,
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
],
license='MIT',
author='Steven Arcangeli',
author_email='steven@highlig.ht',
url='http://github.com/mathcamp/devbox',
zip_safe=False,
include_package_data=True,
packages=find_packages(),
entry_points={
'console_scripts': [
'devbox-pre-commit = devbox.hook:precommit',
'devbox-create = devbox:create',
'devbox-unbox = devbox.unbox:main',
],
},
setup_requires=[
'nose>=1.0',
],
install_requires=REQUIREMENTS,
tests_require=REQUIREMENTS,
**git_version()
)
|
<commit_before>""" Setup file """
import os
from setuptools import setup, find_packages
from version_helper import git_version
HERE = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(HERE, 'README.rst')).read()
CHANGES = open(os.path.join(HERE, 'CHANGES.txt')).read()
REQUIREMENTS = [
'mock',
]
if __name__ == "__main__":
setup(
name='devbox',
description='Quickly set up python repos for development',
long_description=README + '\n\n' + CHANGES,
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
],
license='MIT',
author='Steven Arcangeli',
author_email='steven@highlig.ht',
url='http://github.com/mathcamp/devbox',
zip_safe=False,
include_package_data=True,
packages=find_packages(),
entry_points={
'console_scripts': [
'devbox-pre-commit = devbox.hook:precommit',
'devbox-create = devbox:create',
'devbox-unbox = devbox.unbox:main',
],
},
setup_requires=[
'nose>=1.0',
],
install_requires=REQUIREMENTS,
tests_require=REQUIREMENTS,
**git_version()
)
<commit_msg>Add argparse as a requirement if not built in<commit_after>
|
""" Setup file """
import os
from setuptools import setup, find_packages
from version_helper import git_version
HERE = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(HERE, 'README.rst')).read()
CHANGES = open(os.path.join(HERE, 'CHANGES.txt')).read()
REQUIREMENTS = [
'mock',
]
# Python 2.6 doesn't ship with argparse
try:
import argparse
except ImportError:
REQUIREMENTS.append('argparse')
if __name__ == "__main__":
setup(
name='devbox',
description='Quickly set up python repos for development',
long_description=README + '\n\n' + CHANGES,
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
],
license='MIT',
author='Steven Arcangeli',
author_email='steven@highlig.ht',
url='http://github.com/mathcamp/devbox',
zip_safe=False,
include_package_data=True,
packages=find_packages(),
entry_points={
'console_scripts': [
'devbox-pre-commit = devbox.hook:precommit',
'devbox-create = devbox:create',
'devbox-unbox = devbox.unbox:main',
],
},
setup_requires=[
'nose>=1.0',
],
install_requires=REQUIREMENTS,
tests_require=REQUIREMENTS,
**git_version()
)
|
""" Setup file """
import os
from setuptools import setup, find_packages
from version_helper import git_version
HERE = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(HERE, 'README.rst')).read()
CHANGES = open(os.path.join(HERE, 'CHANGES.txt')).read()
REQUIREMENTS = [
'mock',
]
if __name__ == "__main__":
setup(
name='devbox',
description='Quickly set up python repos for development',
long_description=README + '\n\n' + CHANGES,
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
],
license='MIT',
author='Steven Arcangeli',
author_email='steven@highlig.ht',
url='http://github.com/mathcamp/devbox',
zip_safe=False,
include_package_data=True,
packages=find_packages(),
entry_points={
'console_scripts': [
'devbox-pre-commit = devbox.hook:precommit',
'devbox-create = devbox:create',
'devbox-unbox = devbox.unbox:main',
],
},
setup_requires=[
'nose>=1.0',
],
install_requires=REQUIREMENTS,
tests_require=REQUIREMENTS,
**git_version()
)
Add argparse as a requirement if not built in""" Setup file """
import os
from setuptools import setup, find_packages
from version_helper import git_version
HERE = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(HERE, 'README.rst')).read()
CHANGES = open(os.path.join(HERE, 'CHANGES.txt')).read()
REQUIREMENTS = [
'mock',
]
# Python 2.6 doesn't ship with argparse
try:
import argparse
except ImportError:
REQUIREMENTS.append('argparse')
if __name__ == "__main__":
setup(
name='devbox',
description='Quickly set up python repos for development',
long_description=README + '\n\n' + CHANGES,
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
],
license='MIT',
author='Steven Arcangeli',
author_email='steven@highlig.ht',
url='http://github.com/mathcamp/devbox',
zip_safe=False,
include_package_data=True,
packages=find_packages(),
entry_points={
'console_scripts': [
'devbox-pre-commit = devbox.hook:precommit',
'devbox-create = devbox:create',
'devbox-unbox = devbox.unbox:main',
],
},
setup_requires=[
'nose>=1.0',
],
install_requires=REQUIREMENTS,
tests_require=REQUIREMENTS,
**git_version()
)
|
<commit_before>""" Setup file """
import os
from setuptools import setup, find_packages
from version_helper import git_version
HERE = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(HERE, 'README.rst')).read()
CHANGES = open(os.path.join(HERE, 'CHANGES.txt')).read()
REQUIREMENTS = [
'mock',
]
if __name__ == "__main__":
setup(
name='devbox',
description='Quickly set up python repos for development',
long_description=README + '\n\n' + CHANGES,
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
],
license='MIT',
author='Steven Arcangeli',
author_email='steven@highlig.ht',
url='http://github.com/mathcamp/devbox',
zip_safe=False,
include_package_data=True,
packages=find_packages(),
entry_points={
'console_scripts': [
'devbox-pre-commit = devbox.hook:precommit',
'devbox-create = devbox:create',
'devbox-unbox = devbox.unbox:main',
],
},
setup_requires=[
'nose>=1.0',
],
install_requires=REQUIREMENTS,
tests_require=REQUIREMENTS,
**git_version()
)
<commit_msg>Add argparse as a requirement if not built in<commit_after>""" Setup file """
import os
from setuptools import setup, find_packages
from version_helper import git_version
HERE = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(HERE, 'README.rst')).read()
CHANGES = open(os.path.join(HERE, 'CHANGES.txt')).read()
REQUIREMENTS = [
'mock',
]
# Python 2.6 doesn't ship with argparse
try:
import argparse
except ImportError:
REQUIREMENTS.append('argparse')
if __name__ == "__main__":
setup(
name='devbox',
description='Quickly set up python repos for development',
long_description=README + '\n\n' + CHANGES,
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
],
license='MIT',
author='Steven Arcangeli',
author_email='steven@highlig.ht',
url='http://github.com/mathcamp/devbox',
zip_safe=False,
include_package_data=True,
packages=find_packages(),
entry_points={
'console_scripts': [
'devbox-pre-commit = devbox.hook:precommit',
'devbox-create = devbox:create',
'devbox-unbox = devbox.unbox:main',
],
},
setup_requires=[
'nose>=1.0',
],
install_requires=REQUIREMENTS,
tests_require=REQUIREMENTS,
**git_version()
)
|
3ac8b236f1e892ec98b71f917093400d0c79cd8a
|
setup.py
|
setup.py
|
from setuptools import setup
setup(name='neovim',
version='0.0.8',
description='Python client to neovim',
url='http://github.com/neovim/python-client',
download_url='https://github.com/neovim/python-client/archive/0.0.8.tar.gz',
author='Thiago de Arruda',
author_email='tpadilha84@gmail.com',
license='MIT',
packages=['neovim'],
install_requires=[
'msgpack-python',
'pyuv',
],
zip_safe=False)
|
from setuptools import setup
setup(name='neovim',
version='0.0.8',
description='Python client to neovim',
url='http://github.com/neovim/python-client',
download_url='https://github.com/neovim/python-client/archive/0.0.8.tar.gz',
author='Thiago de Arruda',
author_email='tpadilha84@gmail.com',
license='MIT',
packages=['neovim'],
install_requires=[
'greenlet',
'msgpack-python',
'pyuv',
],
zip_safe=False)
|
Add 'greenlet' as a PyPI dependency
|
Add 'greenlet' as a PyPI dependency
|
Python
|
apache-2.0
|
traverseda/python-client,Shougo/python-client,timeyyy/python-client,0x90sled/python-client,bfredl/python-client,meitham/python-client,justinmk/python-client,starcraftman/python-client,neovim/python-client,justinmk/python-client,neovim/python-client,zchee/python-client,brcolow/python-client,0x90sled/python-client,bfredl/python-client,fwalch/python-client,timeyyy/python-client,traverseda/python-client,meitham/python-client,starcraftman/python-client,Shougo/python-client,fwalch/python-client,zchee/python-client,brcolow/python-client
|
from setuptools import setup
setup(name='neovim',
version='0.0.8',
description='Python client to neovim',
url='http://github.com/neovim/python-client',
download_url='https://github.com/neovim/python-client/archive/0.0.8.tar.gz',
author='Thiago de Arruda',
author_email='tpadilha84@gmail.com',
license='MIT',
packages=['neovim'],
install_requires=[
'msgpack-python',
'pyuv',
],
zip_safe=False)
Add 'greenlet' as a PyPI dependency
|
from setuptools import setup
setup(name='neovim',
version='0.0.8',
description='Python client to neovim',
url='http://github.com/neovim/python-client',
download_url='https://github.com/neovim/python-client/archive/0.0.8.tar.gz',
author='Thiago de Arruda',
author_email='tpadilha84@gmail.com',
license='MIT',
packages=['neovim'],
install_requires=[
'greenlet',
'msgpack-python',
'pyuv',
],
zip_safe=False)
|
<commit_before>from setuptools import setup
setup(name='neovim',
version='0.0.8',
description='Python client to neovim',
url='http://github.com/neovim/python-client',
download_url='https://github.com/neovim/python-client/archive/0.0.8.tar.gz',
author='Thiago de Arruda',
author_email='tpadilha84@gmail.com',
license='MIT',
packages=['neovim'],
install_requires=[
'msgpack-python',
'pyuv',
],
zip_safe=False)
<commit_msg>Add 'greenlet' as a PyPI dependency<commit_after>
|
from setuptools import setup
setup(name='neovim',
version='0.0.8',
description='Python client to neovim',
url='http://github.com/neovim/python-client',
download_url='https://github.com/neovim/python-client/archive/0.0.8.tar.gz',
author='Thiago de Arruda',
author_email='tpadilha84@gmail.com',
license='MIT',
packages=['neovim'],
install_requires=[
'greenlet',
'msgpack-python',
'pyuv',
],
zip_safe=False)
|
from setuptools import setup
setup(name='neovim',
version='0.0.8',
description='Python client to neovim',
url='http://github.com/neovim/python-client',
download_url='https://github.com/neovim/python-client/archive/0.0.8.tar.gz',
author='Thiago de Arruda',
author_email='tpadilha84@gmail.com',
license='MIT',
packages=['neovim'],
install_requires=[
'msgpack-python',
'pyuv',
],
zip_safe=False)
Add 'greenlet' as a PyPI dependencyfrom setuptools import setup
setup(name='neovim',
version='0.0.8',
description='Python client to neovim',
url='http://github.com/neovim/python-client',
download_url='https://github.com/neovim/python-client/archive/0.0.8.tar.gz',
author='Thiago de Arruda',
author_email='tpadilha84@gmail.com',
license='MIT',
packages=['neovim'],
install_requires=[
'greenlet',
'msgpack-python',
'pyuv',
],
zip_safe=False)
|
<commit_before>from setuptools import setup
setup(name='neovim',
version='0.0.8',
description='Python client to neovim',
url='http://github.com/neovim/python-client',
download_url='https://github.com/neovim/python-client/archive/0.0.8.tar.gz',
author='Thiago de Arruda',
author_email='tpadilha84@gmail.com',
license='MIT',
packages=['neovim'],
install_requires=[
'msgpack-python',
'pyuv',
],
zip_safe=False)
<commit_msg>Add 'greenlet' as a PyPI dependency<commit_after>from setuptools import setup
setup(name='neovim',
version='0.0.8',
description='Python client to neovim',
url='http://github.com/neovim/python-client',
download_url='https://github.com/neovim/python-client/archive/0.0.8.tar.gz',
author='Thiago de Arruda',
author_email='tpadilha84@gmail.com',
license='MIT',
packages=['neovim'],
install_requires=[
'greenlet',
'msgpack-python',
'pyuv',
],
zip_safe=False)
|
9d4760423f9fce9453ff7147873f3270e59e7782
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name = "wsgi-sslify",
description = "WSGI middleware to force HTTPS.",
version = "1.0.1",
author = "Jacob Kaplan-Moss",
author_email = "jacob@jacobian.org",
url = "http://github.com/jacobian/wsgi-sslify",
py_modules = ['wsgi_sslify'],
install_requires = ['werkzeug>=0.10.1'],
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware'
]
)
|
from setuptools import setup
setup(
name = "wsgi-sslify",
description = "WSGI middleware to force HTTPS.",
version = "1.0.1",
author = "Jacob Kaplan-Moss",
author_email = "jacob@jacobian.org",
url = "https://github.com/jacobian/wsgi-sslify",
py_modules = ['wsgi_sslify'],
install_requires = ['werkzeug>=0.10.1'],
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware'
]
)
|
Use HTTPS for the project URL on PyPI
|
Use HTTPS for the project URL on PyPI
|
Python
|
bsd-3-clause
|
jacobian/wsgi-sslify
|
from setuptools import setup
setup(
name = "wsgi-sslify",
description = "WSGI middleware to force HTTPS.",
version = "1.0.1",
author = "Jacob Kaplan-Moss",
author_email = "jacob@jacobian.org",
url = "http://github.com/jacobian/wsgi-sslify",
py_modules = ['wsgi_sslify'],
install_requires = ['werkzeug>=0.10.1'],
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware'
]
)
Use HTTPS for the project URL on PyPI
|
from setuptools import setup
setup(
name = "wsgi-sslify",
description = "WSGI middleware to force HTTPS.",
version = "1.0.1",
author = "Jacob Kaplan-Moss",
author_email = "jacob@jacobian.org",
url = "https://github.com/jacobian/wsgi-sslify",
py_modules = ['wsgi_sslify'],
install_requires = ['werkzeug>=0.10.1'],
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware'
]
)
|
<commit_before>from setuptools import setup
setup(
name = "wsgi-sslify",
description = "WSGI middleware to force HTTPS.",
version = "1.0.1",
author = "Jacob Kaplan-Moss",
author_email = "jacob@jacobian.org",
url = "http://github.com/jacobian/wsgi-sslify",
py_modules = ['wsgi_sslify'],
install_requires = ['werkzeug>=0.10.1'],
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware'
]
)
<commit_msg>Use HTTPS for the project URL on PyPI<commit_after>
|
from setuptools import setup
setup(
name = "wsgi-sslify",
description = "WSGI middleware to force HTTPS.",
version = "1.0.1",
author = "Jacob Kaplan-Moss",
author_email = "jacob@jacobian.org",
url = "https://github.com/jacobian/wsgi-sslify",
py_modules = ['wsgi_sslify'],
install_requires = ['werkzeug>=0.10.1'],
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware'
]
)
|
from setuptools import setup
setup(
name = "wsgi-sslify",
description = "WSGI middleware to force HTTPS.",
version = "1.0.1",
author = "Jacob Kaplan-Moss",
author_email = "jacob@jacobian.org",
url = "http://github.com/jacobian/wsgi-sslify",
py_modules = ['wsgi_sslify'],
install_requires = ['werkzeug>=0.10.1'],
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware'
]
)
Use HTTPS for the project URL on PyPIfrom setuptools import setup
setup(
name = "wsgi-sslify",
description = "WSGI middleware to force HTTPS.",
version = "1.0.1",
author = "Jacob Kaplan-Moss",
author_email = "jacob@jacobian.org",
url = "https://github.com/jacobian/wsgi-sslify",
py_modules = ['wsgi_sslify'],
install_requires = ['werkzeug>=0.10.1'],
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware'
]
)
|
<commit_before>from setuptools import setup
setup(
name = "wsgi-sslify",
description = "WSGI middleware to force HTTPS.",
version = "1.0.1",
author = "Jacob Kaplan-Moss",
author_email = "jacob@jacobian.org",
url = "http://github.com/jacobian/wsgi-sslify",
py_modules = ['wsgi_sslify'],
install_requires = ['werkzeug>=0.10.1'],
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware'
]
)
<commit_msg>Use HTTPS for the project URL on PyPI<commit_after>from setuptools import setup
setup(
name = "wsgi-sslify",
description = "WSGI middleware to force HTTPS.",
version = "1.0.1",
author = "Jacob Kaplan-Moss",
author_email = "jacob@jacobian.org",
url = "https://github.com/jacobian/wsgi-sslify",
py_modules = ['wsgi_sslify'],
install_requires = ['werkzeug>=0.10.1'],
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware'
]
)
|
f49d6cabc704cc5aedbd6bd897e14fff1d8e172a
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
version='0.1dev'
setup(name='fixcity',
version=version,
description="Build me a bike rack!",
author="Ivan Willig, Paul Winkler, Sonali Sridhar, Andy Cochran, etc.",
author_email="iwillig@opengeo.org",
url="http://www.plope.com/software/ExternalEditor",
zip_safe=False,
scripts=[],
packages=find_packages(),
dependency_links=[
'http://geopy.googlecode.com/svn/branches/reverse-geocode#egg=geopy-dev',
'http://dist.repoze.org/PIL-1.1.6.tar.gz#egg=PIL-1.1.6',
'http://sourceforge.net/projects/ctypes/files/ctypes/1.0.2/ctypes-1.0.2.tar.gz/download#egg=ctypes-1.0.2',
],
install_requires=[
'geopy==dev,>=0.93dev-r84',
'sorl-thumbnail>=3.2.2',
'Django>=1.1',
'django-registration>=0.7',
'psycopg2>=2.0.12',
'PIL==1.1.6',
'ctypes>=1.0.2',
'wsgilog>=0.1',
],
)
|
from setuptools import setup, find_packages
version='0.1dev'
setup(name='fixcity',
version=version,
description="Build me a bike rack!",
author="Ivan Willig, Paul Winkler, Sonali Sridhar, Andy Cochran, etc.",
author_email="iwillig@opengeo.org",
url="http://www.plope.com/software/ExternalEditor",
zip_safe=False,
scripts=[],
packages=find_packages(),
dependency_links=[
'http://geopy.googlecode.com/svn/branches/reverse-geocode#egg=geopy-dev',
'http://dist.repoze.org/PIL-1.1.6.tar.gz#egg=PIL-1.1.6',
'http://sourceforge.net/projects/ctypes/files/ctypes/1.0.2/ctypes-1.0.2.tar.gz/download#egg=ctypes-1.0.2',
],
install_requires=[
'geopy==dev,>=0.93dev-r84',
'sorl-thumbnail>=3.2.2',
'Django>=1.1.1',
'django-registration>=0.7',
'psycopg2>=2.0.12',
'PIL==1.1.6',
'ctypes>=1.0.2',
'wsgilog>=0.1',
],
)
|
Use Django 1.1.1 to get a security bugfix.
|
Use Django 1.1.1 to get a security bugfix.
|
Python
|
agpl-3.0
|
openplans/fixcity,openplans/fixcity
|
from setuptools import setup, find_packages
version='0.1dev'
setup(name='fixcity',
version=version,
description="Build me a bike rack!",
author="Ivan Willig, Paul Winkler, Sonali Sridhar, Andy Cochran, etc.",
author_email="iwillig@opengeo.org",
url="http://www.plope.com/software/ExternalEditor",
zip_safe=False,
scripts=[],
packages=find_packages(),
dependency_links=[
'http://geopy.googlecode.com/svn/branches/reverse-geocode#egg=geopy-dev',
'http://dist.repoze.org/PIL-1.1.6.tar.gz#egg=PIL-1.1.6',
'http://sourceforge.net/projects/ctypes/files/ctypes/1.0.2/ctypes-1.0.2.tar.gz/download#egg=ctypes-1.0.2',
],
install_requires=[
'geopy==dev,>=0.93dev-r84',
'sorl-thumbnail>=3.2.2',
'Django>=1.1',
'django-registration>=0.7',
'psycopg2>=2.0.12',
'PIL==1.1.6',
'ctypes>=1.0.2',
'wsgilog>=0.1',
],
)
Use Django 1.1.1 to get a security bugfix.
|
from setuptools import setup, find_packages
version='0.1dev'
setup(name='fixcity',
version=version,
description="Build me a bike rack!",
author="Ivan Willig, Paul Winkler, Sonali Sridhar, Andy Cochran, etc.",
author_email="iwillig@opengeo.org",
url="http://www.plope.com/software/ExternalEditor",
zip_safe=False,
scripts=[],
packages=find_packages(),
dependency_links=[
'http://geopy.googlecode.com/svn/branches/reverse-geocode#egg=geopy-dev',
'http://dist.repoze.org/PIL-1.1.6.tar.gz#egg=PIL-1.1.6',
'http://sourceforge.net/projects/ctypes/files/ctypes/1.0.2/ctypes-1.0.2.tar.gz/download#egg=ctypes-1.0.2',
],
install_requires=[
'geopy==dev,>=0.93dev-r84',
'sorl-thumbnail>=3.2.2',
'Django>=1.1.1',
'django-registration>=0.7',
'psycopg2>=2.0.12',
'PIL==1.1.6',
'ctypes>=1.0.2',
'wsgilog>=0.1',
],
)
|
<commit_before>from setuptools import setup, find_packages
version='0.1dev'
setup(name='fixcity',
version=version,
description="Build me a bike rack!",
author="Ivan Willig, Paul Winkler, Sonali Sridhar, Andy Cochran, etc.",
author_email="iwillig@opengeo.org",
url="http://www.plope.com/software/ExternalEditor",
zip_safe=False,
scripts=[],
packages=find_packages(),
dependency_links=[
'http://geopy.googlecode.com/svn/branches/reverse-geocode#egg=geopy-dev',
'http://dist.repoze.org/PIL-1.1.6.tar.gz#egg=PIL-1.1.6',
'http://sourceforge.net/projects/ctypes/files/ctypes/1.0.2/ctypes-1.0.2.tar.gz/download#egg=ctypes-1.0.2',
],
install_requires=[
'geopy==dev,>=0.93dev-r84',
'sorl-thumbnail>=3.2.2',
'Django>=1.1',
'django-registration>=0.7',
'psycopg2>=2.0.12',
'PIL==1.1.6',
'ctypes>=1.0.2',
'wsgilog>=0.1',
],
)
<commit_msg>Use Django 1.1.1 to get a security bugfix.<commit_after>
|
from setuptools import setup, find_packages
version='0.1dev'
setup(name='fixcity',
version=version,
description="Build me a bike rack!",
author="Ivan Willig, Paul Winkler, Sonali Sridhar, Andy Cochran, etc.",
author_email="iwillig@opengeo.org",
url="http://www.plope.com/software/ExternalEditor",
zip_safe=False,
scripts=[],
packages=find_packages(),
dependency_links=[
'http://geopy.googlecode.com/svn/branches/reverse-geocode#egg=geopy-dev',
'http://dist.repoze.org/PIL-1.1.6.tar.gz#egg=PIL-1.1.6',
'http://sourceforge.net/projects/ctypes/files/ctypes/1.0.2/ctypes-1.0.2.tar.gz/download#egg=ctypes-1.0.2',
],
install_requires=[
'geopy==dev,>=0.93dev-r84',
'sorl-thumbnail>=3.2.2',
'Django>=1.1.1',
'django-registration>=0.7',
'psycopg2>=2.0.12',
'PIL==1.1.6',
'ctypes>=1.0.2',
'wsgilog>=0.1',
],
)
|
from setuptools import setup, find_packages
version='0.1dev'
setup(name='fixcity',
version=version,
description="Build me a bike rack!",
author="Ivan Willig, Paul Winkler, Sonali Sridhar, Andy Cochran, etc.",
author_email="iwillig@opengeo.org",
url="http://www.plope.com/software/ExternalEditor",
zip_safe=False,
scripts=[],
packages=find_packages(),
dependency_links=[
'http://geopy.googlecode.com/svn/branches/reverse-geocode#egg=geopy-dev',
'http://dist.repoze.org/PIL-1.1.6.tar.gz#egg=PIL-1.1.6',
'http://sourceforge.net/projects/ctypes/files/ctypes/1.0.2/ctypes-1.0.2.tar.gz/download#egg=ctypes-1.0.2',
],
install_requires=[
'geopy==dev,>=0.93dev-r84',
'sorl-thumbnail>=3.2.2',
'Django>=1.1',
'django-registration>=0.7',
'psycopg2>=2.0.12',
'PIL==1.1.6',
'ctypes>=1.0.2',
'wsgilog>=0.1',
],
)
Use Django 1.1.1 to get a security bugfix.from setuptools import setup, find_packages
version='0.1dev'
setup(name='fixcity',
version=version,
description="Build me a bike rack!",
author="Ivan Willig, Paul Winkler, Sonali Sridhar, Andy Cochran, etc.",
author_email="iwillig@opengeo.org",
url="http://www.plope.com/software/ExternalEditor",
zip_safe=False,
scripts=[],
packages=find_packages(),
dependency_links=[
'http://geopy.googlecode.com/svn/branches/reverse-geocode#egg=geopy-dev',
'http://dist.repoze.org/PIL-1.1.6.tar.gz#egg=PIL-1.1.6',
'http://sourceforge.net/projects/ctypes/files/ctypes/1.0.2/ctypes-1.0.2.tar.gz/download#egg=ctypes-1.0.2',
],
install_requires=[
'geopy==dev,>=0.93dev-r84',
'sorl-thumbnail>=3.2.2',
'Django>=1.1.1',
'django-registration>=0.7',
'psycopg2>=2.0.12',
'PIL==1.1.6',
'ctypes>=1.0.2',
'wsgilog>=0.1',
],
)
|
<commit_before>from setuptools import setup, find_packages
version='0.1dev'
setup(name='fixcity',
version=version,
description="Build me a bike rack!",
author="Ivan Willig, Paul Winkler, Sonali Sridhar, Andy Cochran, etc.",
author_email="iwillig@opengeo.org",
url="http://www.plope.com/software/ExternalEditor",
zip_safe=False,
scripts=[],
packages=find_packages(),
dependency_links=[
'http://geopy.googlecode.com/svn/branches/reverse-geocode#egg=geopy-dev',
'http://dist.repoze.org/PIL-1.1.6.tar.gz#egg=PIL-1.1.6',
'http://sourceforge.net/projects/ctypes/files/ctypes/1.0.2/ctypes-1.0.2.tar.gz/download#egg=ctypes-1.0.2',
],
install_requires=[
'geopy==dev,>=0.93dev-r84',
'sorl-thumbnail>=3.2.2',
'Django>=1.1',
'django-registration>=0.7',
'psycopg2>=2.0.12',
'PIL==1.1.6',
'ctypes>=1.0.2',
'wsgilog>=0.1',
],
)
<commit_msg>Use Django 1.1.1 to get a security bugfix.<commit_after>from setuptools import setup, find_packages
version='0.1dev'
setup(name='fixcity',
version=version,
description="Build me a bike rack!",
author="Ivan Willig, Paul Winkler, Sonali Sridhar, Andy Cochran, etc.",
author_email="iwillig@opengeo.org",
url="http://www.plope.com/software/ExternalEditor",
zip_safe=False,
scripts=[],
packages=find_packages(),
dependency_links=[
'http://geopy.googlecode.com/svn/branches/reverse-geocode#egg=geopy-dev',
'http://dist.repoze.org/PIL-1.1.6.tar.gz#egg=PIL-1.1.6',
'http://sourceforge.net/projects/ctypes/files/ctypes/1.0.2/ctypes-1.0.2.tar.gz/download#egg=ctypes-1.0.2',
],
install_requires=[
'geopy==dev,>=0.93dev-r84',
'sorl-thumbnail>=3.2.2',
'Django>=1.1.1',
'django-registration>=0.7',
'psycopg2>=2.0.12',
'PIL==1.1.6',
'ctypes>=1.0.2',
'wsgilog>=0.1',
],
)
|
f44049bc0266f31ff982e88b0d7440413b6d1b68
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# http://stackoverflow.com/questions/9810603/adding-install-requires-to-setup-py-when-making-a-python-package
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='molml',
version='0.0.1',
description='An interface between molecules and machine learning',
author='Chris Collins',
author_email='chris@crcollins.com',
url='https://github.com/crcollins/molml/',
license='MIT',
packages=['molml'],
test_suite='nose.collector',
tests_require=['nose'],
install_requires=[
'pathos',
],
classifiers=[
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Bio-Informatics",
"Topic :: Scientific/Engineering :: Chemistry",
"Topic :: Scientific/Engineering :: Physics",
]
)
|
#!/usr/bin/env python
# http://stackoverflow.com/questions/9810603/adding-install-requires-to-setup-py-when-making-a-python-package
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='molml',
version='0.1.0',
description='An interface between molecules and machine learning',
author='Chris Collins',
author_email='chris@crcollins.com',
url='https://github.com/crcollins/molml/',
license='MIT',
packages=['molml'],
test_suite='nose.collector',
tests_require=['nose'],
install_requires=[
'pathos',
],
classifiers=[
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Bio-Informatics",
"Topic :: Scientific/Engineering :: Chemistry",
"Topic :: Scientific/Engineering :: Physics",
]
)
|
Update the version to 0.1.0
|
Update the version to 0.1.0
|
Python
|
mit
|
crcollins/molml
|
#!/usr/bin/env python
# http://stackoverflow.com/questions/9810603/adding-install-requires-to-setup-py-when-making-a-python-package
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='molml',
version='0.0.1',
description='An interface between molecules and machine learning',
author='Chris Collins',
author_email='chris@crcollins.com',
url='https://github.com/crcollins/molml/',
license='MIT',
packages=['molml'],
test_suite='nose.collector',
tests_require=['nose'],
install_requires=[
'pathos',
],
classifiers=[
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Bio-Informatics",
"Topic :: Scientific/Engineering :: Chemistry",
"Topic :: Scientific/Engineering :: Physics",
]
)
Update the version to 0.1.0
|
#!/usr/bin/env python
# http://stackoverflow.com/questions/9810603/adding-install-requires-to-setup-py-when-making-a-python-package
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='molml',
version='0.1.0',
description='An interface between molecules and machine learning',
author='Chris Collins',
author_email='chris@crcollins.com',
url='https://github.com/crcollins/molml/',
license='MIT',
packages=['molml'],
test_suite='nose.collector',
tests_require=['nose'],
install_requires=[
'pathos',
],
classifiers=[
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Bio-Informatics",
"Topic :: Scientific/Engineering :: Chemistry",
"Topic :: Scientific/Engineering :: Physics",
]
)
|
<commit_before>#!/usr/bin/env python
# http://stackoverflow.com/questions/9810603/adding-install-requires-to-setup-py-when-making-a-python-package
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='molml',
version='0.0.1',
description='An interface between molecules and machine learning',
author='Chris Collins',
author_email='chris@crcollins.com',
url='https://github.com/crcollins/molml/',
license='MIT',
packages=['molml'],
test_suite='nose.collector',
tests_require=['nose'],
install_requires=[
'pathos',
],
classifiers=[
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Bio-Informatics",
"Topic :: Scientific/Engineering :: Chemistry",
"Topic :: Scientific/Engineering :: Physics",
]
)
<commit_msg>Update the version to 0.1.0<commit_after>
|
#!/usr/bin/env python
# http://stackoverflow.com/questions/9810603/adding-install-requires-to-setup-py-when-making-a-python-package
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='molml',
version='0.1.0',
description='An interface between molecules and machine learning',
author='Chris Collins',
author_email='chris@crcollins.com',
url='https://github.com/crcollins/molml/',
license='MIT',
packages=['molml'],
test_suite='nose.collector',
tests_require=['nose'],
install_requires=[
'pathos',
],
classifiers=[
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Bio-Informatics",
"Topic :: Scientific/Engineering :: Chemistry",
"Topic :: Scientific/Engineering :: Physics",
]
)
|
#!/usr/bin/env python
# http://stackoverflow.com/questions/9810603/adding-install-requires-to-setup-py-when-making-a-python-package
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='molml',
version='0.0.1',
description='An interface between molecules and machine learning',
author='Chris Collins',
author_email='chris@crcollins.com',
url='https://github.com/crcollins/molml/',
license='MIT',
packages=['molml'],
test_suite='nose.collector',
tests_require=['nose'],
install_requires=[
'pathos',
],
classifiers=[
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Bio-Informatics",
"Topic :: Scientific/Engineering :: Chemistry",
"Topic :: Scientific/Engineering :: Physics",
]
)
Update the version to 0.1.0#!/usr/bin/env python
# http://stackoverflow.com/questions/9810603/adding-install-requires-to-setup-py-when-making-a-python-package
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='molml',
version='0.1.0',
description='An interface between molecules and machine learning',
author='Chris Collins',
author_email='chris@crcollins.com',
url='https://github.com/crcollins/molml/',
license='MIT',
packages=['molml'],
test_suite='nose.collector',
tests_require=['nose'],
install_requires=[
'pathos',
],
classifiers=[
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Bio-Informatics",
"Topic :: Scientific/Engineering :: Chemistry",
"Topic :: Scientific/Engineering :: Physics",
]
)
|
<commit_before>#!/usr/bin/env python
# http://stackoverflow.com/questions/9810603/adding-install-requires-to-setup-py-when-making-a-python-package
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='molml',
version='0.0.1',
description='An interface between molecules and machine learning',
author='Chris Collins',
author_email='chris@crcollins.com',
url='https://github.com/crcollins/molml/',
license='MIT',
packages=['molml'],
test_suite='nose.collector',
tests_require=['nose'],
install_requires=[
'pathos',
],
classifiers=[
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Bio-Informatics",
"Topic :: Scientific/Engineering :: Chemistry",
"Topic :: Scientific/Engineering :: Physics",
]
)
<commit_msg>Update the version to 0.1.0<commit_after>#!/usr/bin/env python
# http://stackoverflow.com/questions/9810603/adding-install-requires-to-setup-py-when-making-a-python-package
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='molml',
version='0.1.0',
description='An interface between molecules and machine learning',
author='Chris Collins',
author_email='chris@crcollins.com',
url='https://github.com/crcollins/molml/',
license='MIT',
packages=['molml'],
test_suite='nose.collector',
tests_require=['nose'],
install_requires=[
'pathos',
],
classifiers=[
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Bio-Informatics",
"Topic :: Scientific/Engineering :: Chemistry",
"Topic :: Scientific/Engineering :: Physics",
]
)
|
becea073558590c8d16a088c947d4ea482cba017
|
setup.py
|
setup.py
|
#! /usr/bin/env python3
from distutils.core import setup
setup(
description = 'File downloader for danbooru',
author = 'Todd Gaunt',
url = 'https://www.github.com/toddgaunt/danboorsync',
download_url = 'https://www.github.com/toddgaunt/danboorsync',
author_email = 'toddgaunt@protonmail.ch',
version = '1.0',
packages = ['danboorsync'],
package_dir = {'danboorsync':'src'},
# Change these per distribution
data_files = [('usr/share/man/man1', ['doc/danboorsync.1']),
('usr/share/licenses/imgfetch/LICENSE', ['doc/LICENSE'])],
scripts = ['bin/danboorsync'],
name = 'danboorsync'
)
|
#! /usr/bin/env python3
from distutils.core import setup
setup(
description = 'File downloader for danbooru',
author = 'Todd Gaunt',
url = 'https://www.github.com/toddgaunt/danboorsync',
download_url = 'https://www.github.com/toddgaunt/danboorsync',
author_email = 'toddgaunt@protonmail.ch',
version = '1.0',
packages = ['danboorsync'],
package_dir = {'danboorsync':'src'},
# Change these per distribution
data_files = [('usr/share/man/man1', ['doc/danboorsync.1']),
('usr/share/licenses/imgfetch/LICENSE', ['doc/LICENSE'])],
scripts = ['/usr/bin/danboorsync'],
name = 'danboorsync'
)
|
CHange script installation to /usr/bin/ instead of /bin
|
CHange script installation to /usr/bin/ instead of /bin
|
Python
|
isc
|
toddgaunt/imgfetch
|
#! /usr/bin/env python3
from distutils.core import setup
setup(
description = 'File downloader for danbooru',
author = 'Todd Gaunt',
url = 'https://www.github.com/toddgaunt/danboorsync',
download_url = 'https://www.github.com/toddgaunt/danboorsync',
author_email = 'toddgaunt@protonmail.ch',
version = '1.0',
packages = ['danboorsync'],
package_dir = {'danboorsync':'src'},
# Change these per distribution
data_files = [('usr/share/man/man1', ['doc/danboorsync.1']),
('usr/share/licenses/imgfetch/LICENSE', ['doc/LICENSE'])],
scripts = ['bin/danboorsync'],
name = 'danboorsync'
)
CHange script installation to /usr/bin/ instead of /bin
|
#! /usr/bin/env python3
from distutils.core import setup
setup(
description = 'File downloader for danbooru',
author = 'Todd Gaunt',
url = 'https://www.github.com/toddgaunt/danboorsync',
download_url = 'https://www.github.com/toddgaunt/danboorsync',
author_email = 'toddgaunt@protonmail.ch',
version = '1.0',
packages = ['danboorsync'],
package_dir = {'danboorsync':'src'},
# Change these per distribution
data_files = [('usr/share/man/man1', ['doc/danboorsync.1']),
('usr/share/licenses/imgfetch/LICENSE', ['doc/LICENSE'])],
scripts = ['/usr/bin/danboorsync'],
name = 'danboorsync'
)
|
<commit_before>#! /usr/bin/env python3
from distutils.core import setup
setup(
description = 'File downloader for danbooru',
author = 'Todd Gaunt',
url = 'https://www.github.com/toddgaunt/danboorsync',
download_url = 'https://www.github.com/toddgaunt/danboorsync',
author_email = 'toddgaunt@protonmail.ch',
version = '1.0',
packages = ['danboorsync'],
package_dir = {'danboorsync':'src'},
# Change these per distribution
data_files = [('usr/share/man/man1', ['doc/danboorsync.1']),
('usr/share/licenses/imgfetch/LICENSE', ['doc/LICENSE'])],
scripts = ['bin/danboorsync'],
name = 'danboorsync'
)
<commit_msg>CHange script installation to /usr/bin/ instead of /bin<commit_after>
|
#! /usr/bin/env python3
from distutils.core import setup
setup(
description = 'File downloader for danbooru',
author = 'Todd Gaunt',
url = 'https://www.github.com/toddgaunt/danboorsync',
download_url = 'https://www.github.com/toddgaunt/danboorsync',
author_email = 'toddgaunt@protonmail.ch',
version = '1.0',
packages = ['danboorsync'],
package_dir = {'danboorsync':'src'},
# Change these per distribution
data_files = [('usr/share/man/man1', ['doc/danboorsync.1']),
('usr/share/licenses/imgfetch/LICENSE', ['doc/LICENSE'])],
scripts = ['/usr/bin/danboorsync'],
name = 'danboorsync'
)
|
#! /usr/bin/env python3
from distutils.core import setup
setup(
description = 'File downloader for danbooru',
author = 'Todd Gaunt',
url = 'https://www.github.com/toddgaunt/danboorsync',
download_url = 'https://www.github.com/toddgaunt/danboorsync',
author_email = 'toddgaunt@protonmail.ch',
version = '1.0',
packages = ['danboorsync'],
package_dir = {'danboorsync':'src'},
# Change these per distribution
data_files = [('usr/share/man/man1', ['doc/danboorsync.1']),
('usr/share/licenses/imgfetch/LICENSE', ['doc/LICENSE'])],
scripts = ['bin/danboorsync'],
name = 'danboorsync'
)
CHange script installation to /usr/bin/ instead of /bin#! /usr/bin/env python3
from distutils.core import setup
setup(
description = 'File downloader for danbooru',
author = 'Todd Gaunt',
url = 'https://www.github.com/toddgaunt/danboorsync',
download_url = 'https://www.github.com/toddgaunt/danboorsync',
author_email = 'toddgaunt@protonmail.ch',
version = '1.0',
packages = ['danboorsync'],
package_dir = {'danboorsync':'src'},
# Change these per distribution
data_files = [('usr/share/man/man1', ['doc/danboorsync.1']),
('usr/share/licenses/imgfetch/LICENSE', ['doc/LICENSE'])],
scripts = ['/usr/bin/danboorsync'],
name = 'danboorsync'
)
|
<commit_before>#! /usr/bin/env python3
from distutils.core import setup
setup(
description = 'File downloader for danbooru',
author = 'Todd Gaunt',
url = 'https://www.github.com/toddgaunt/danboorsync',
download_url = 'https://www.github.com/toddgaunt/danboorsync',
author_email = 'toddgaunt@protonmail.ch',
version = '1.0',
packages = ['danboorsync'],
package_dir = {'danboorsync':'src'},
# Change these per distribution
data_files = [('usr/share/man/man1', ['doc/danboorsync.1']),
('usr/share/licenses/imgfetch/LICENSE', ['doc/LICENSE'])],
scripts = ['bin/danboorsync'],
name = 'danboorsync'
)
<commit_msg>CHange script installation to /usr/bin/ instead of /bin<commit_after>#! /usr/bin/env python3
from distutils.core import setup
setup(
description = 'File downloader for danbooru',
author = 'Todd Gaunt',
url = 'https://www.github.com/toddgaunt/danboorsync',
download_url = 'https://www.github.com/toddgaunt/danboorsync',
author_email = 'toddgaunt@protonmail.ch',
version = '1.0',
packages = ['danboorsync'],
package_dir = {'danboorsync':'src'},
# Change these per distribution
data_files = [('usr/share/man/man1', ['doc/danboorsync.1']),
('usr/share/licenses/imgfetch/LICENSE', ['doc/LICENSE'])],
scripts = ['/usr/bin/danboorsync'],
name = 'danboorsync'
)
|
e05b65505c95f3b796279d115aa27a0cb73e6b44
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
version = '1.2.4'
setup(name='switchboard',
version=version,
description="Feature flipper for Pyramid, Pylons, or TurboGears apps.",
# http://www.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
],
keywords='switches feature flipper pyramid pylons turbogears',
author='Kyle Adams',
author_email='kadams54@users.sourceforge.net',
url='http://sf.net/projects/switchboardpy',
download_url='https://sf.net/projects/switchboardpy/files/latest',
license='Apache License',
packages=find_packages(exclude=['ez_setup']),
include_package_data=True,
install_requires=[
'pymongo == 2.3',
'blinker >= 1.2',
'WebOb >= 0.9',
'Mako >= 0.9',
],
zip_safe=False,
tests_require=[
'nose',
'mock',
'bottle',
],
test_suite='nose.collector',
)
|
from setuptools import setup, find_packages
version = '1.2.4'
setup(name='switchboard',
version=version,
description="Feature flipper for Pyramid, Pylons, or TurboGears apps.",
# http://www.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
],
keywords='switches feature flipper pyramid pylons turbogears',
author='Kyle Adams',
author_email='kadams54@users.noreply.github.com',
url='https://github.com/switchboardpy/switchboard/',
download_url='https://github.com/switchboardpy/switchboard/releases',
license='Apache License',
packages=find_packages(exclude=['ez_setup']),
include_package_data=True,
install_requires=[
'pymongo == 2.3',
'blinker >= 1.2',
'WebOb >= 0.9',
'Mako >= 0.9',
],
zip_safe=False,
tests_require=[
'nose',
'mock',
'bottle',
],
test_suite='nose.collector',
)
|
Switch from SF to Github for canonical links.
|
Switch from SF to Github for canonical links.
|
Python
|
apache-2.0
|
kadams54/switchboard,switchboardpy/switchboard
|
from setuptools import setup, find_packages
version = '1.2.4'
setup(name='switchboard',
version=version,
description="Feature flipper for Pyramid, Pylons, or TurboGears apps.",
# http://www.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
],
keywords='switches feature flipper pyramid pylons turbogears',
author='Kyle Adams',
author_email='kadams54@users.sourceforge.net',
url='http://sf.net/projects/switchboardpy',
download_url='https://sf.net/projects/switchboardpy/files/latest',
license='Apache License',
packages=find_packages(exclude=['ez_setup']),
include_package_data=True,
install_requires=[
'pymongo == 2.3',
'blinker >= 1.2',
'WebOb >= 0.9',
'Mako >= 0.9',
],
zip_safe=False,
tests_require=[
'nose',
'mock',
'bottle',
],
test_suite='nose.collector',
)
Switch from SF to Github for canonical links.
|
from setuptools import setup, find_packages
version = '1.2.4'
setup(name='switchboard',
version=version,
description="Feature flipper for Pyramid, Pylons, or TurboGears apps.",
# http://www.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
],
keywords='switches feature flipper pyramid pylons turbogears',
author='Kyle Adams',
author_email='kadams54@users.noreply.github.com',
url='https://github.com/switchboardpy/switchboard/',
download_url='https://github.com/switchboardpy/switchboard/releases',
license='Apache License',
packages=find_packages(exclude=['ez_setup']),
include_package_data=True,
install_requires=[
'pymongo == 2.3',
'blinker >= 1.2',
'WebOb >= 0.9',
'Mako >= 0.9',
],
zip_safe=False,
tests_require=[
'nose',
'mock',
'bottle',
],
test_suite='nose.collector',
)
|
<commit_before>from setuptools import setup, find_packages
version = '1.2.4'
setup(name='switchboard',
version=version,
description="Feature flipper for Pyramid, Pylons, or TurboGears apps.",
# http://www.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
],
keywords='switches feature flipper pyramid pylons turbogears',
author='Kyle Adams',
author_email='kadams54@users.sourceforge.net',
url='http://sf.net/projects/switchboardpy',
download_url='https://sf.net/projects/switchboardpy/files/latest',
license='Apache License',
packages=find_packages(exclude=['ez_setup']),
include_package_data=True,
install_requires=[
'pymongo == 2.3',
'blinker >= 1.2',
'WebOb >= 0.9',
'Mako >= 0.9',
],
zip_safe=False,
tests_require=[
'nose',
'mock',
'bottle',
],
test_suite='nose.collector',
)
<commit_msg>Switch from SF to Github for canonical links.<commit_after>
|
from setuptools import setup, find_packages
version = '1.2.4'
setup(name='switchboard',
version=version,
description="Feature flipper for Pyramid, Pylons, or TurboGears apps.",
# http://www.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
],
keywords='switches feature flipper pyramid pylons turbogears',
author='Kyle Adams',
author_email='kadams54@users.noreply.github.com',
url='https://github.com/switchboardpy/switchboard/',
download_url='https://github.com/switchboardpy/switchboard/releases',
license='Apache License',
packages=find_packages(exclude=['ez_setup']),
include_package_data=True,
install_requires=[
'pymongo == 2.3',
'blinker >= 1.2',
'WebOb >= 0.9',
'Mako >= 0.9',
],
zip_safe=False,
tests_require=[
'nose',
'mock',
'bottle',
],
test_suite='nose.collector',
)
|
from setuptools import setup, find_packages
version = '1.2.4'
setup(name='switchboard',
version=version,
description="Feature flipper for Pyramid, Pylons, or TurboGears apps.",
# http://www.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
],
keywords='switches feature flipper pyramid pylons turbogears',
author='Kyle Adams',
author_email='kadams54@users.sourceforge.net',
url='http://sf.net/projects/switchboardpy',
download_url='https://sf.net/projects/switchboardpy/files/latest',
license='Apache License',
packages=find_packages(exclude=['ez_setup']),
include_package_data=True,
install_requires=[
'pymongo == 2.3',
'blinker >= 1.2',
'WebOb >= 0.9',
'Mako >= 0.9',
],
zip_safe=False,
tests_require=[
'nose',
'mock',
'bottle',
],
test_suite='nose.collector',
)
Switch from SF to Github for canonical links.from setuptools import setup, find_packages
version = '1.2.4'
setup(name='switchboard',
version=version,
description="Feature flipper for Pyramid, Pylons, or TurboGears apps.",
# http://www.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
],
keywords='switches feature flipper pyramid pylons turbogears',
author='Kyle Adams',
author_email='kadams54@users.noreply.github.com',
url='https://github.com/switchboardpy/switchboard/',
download_url='https://github.com/switchboardpy/switchboard/releases',
license='Apache License',
packages=find_packages(exclude=['ez_setup']),
include_package_data=True,
install_requires=[
'pymongo == 2.3',
'blinker >= 1.2',
'WebOb >= 0.9',
'Mako >= 0.9',
],
zip_safe=False,
tests_require=[
'nose',
'mock',
'bottle',
],
test_suite='nose.collector',
)
|
<commit_before>from setuptools import setup, find_packages
version = '1.2.4'
setup(name='switchboard',
version=version,
description="Feature flipper for Pyramid, Pylons, or TurboGears apps.",
# http://www.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
],
keywords='switches feature flipper pyramid pylons turbogears',
author='Kyle Adams',
author_email='kadams54@users.sourceforge.net',
url='http://sf.net/projects/switchboardpy',
download_url='https://sf.net/projects/switchboardpy/files/latest',
license='Apache License',
packages=find_packages(exclude=['ez_setup']),
include_package_data=True,
install_requires=[
'pymongo == 2.3',
'blinker >= 1.2',
'WebOb >= 0.9',
'Mako >= 0.9',
],
zip_safe=False,
tests_require=[
'nose',
'mock',
'bottle',
],
test_suite='nose.collector',
)
<commit_msg>Switch from SF to Github for canonical links.<commit_after>from setuptools import setup, find_packages
version = '1.2.4'
setup(name='switchboard',
version=version,
description="Feature flipper for Pyramid, Pylons, or TurboGears apps.",
# http://www.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
],
keywords='switches feature flipper pyramid pylons turbogears',
author='Kyle Adams',
author_email='kadams54@users.noreply.github.com',
url='https://github.com/switchboardpy/switchboard/',
download_url='https://github.com/switchboardpy/switchboard/releases',
license='Apache License',
packages=find_packages(exclude=['ez_setup']),
include_package_data=True,
install_requires=[
'pymongo == 2.3',
'blinker >= 1.2',
'WebOb >= 0.9',
'Mako >= 0.9',
],
zip_safe=False,
tests_require=[
'nose',
'mock',
'bottle',
],
test_suite='nose.collector',
)
|
f143a01db4da51052bc08ce7ccf8f9f895c05edb
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name = 'RestApi',
packages = ['RestApi'], # this must be the same as the name above
version = '0.0.1',
description = 'Create Rest Api',
author = 'Shridhar Patil',
author_email = 'shridharpatil2792@gmail.com',
url = 'https://github.com/shridarpatil/Flask-RestApi', #URL to the github repo
download_url = 'https://github.com/shridarpatil/Flask-RestApi/archive/0.0.1.tar.gz',
keywords = ['RestApi', 'rest api', 'Rest Api'], # arbitrary keywords
classifiers = [],
install_requires=[
'json', 'pymysql', 'flask',
],
)
|
from distutils.core import setup
setup(
name = 'restapi',
packages = ['restapi'], # this must be the same as the name above
version = '0.0.1',
description = 'Create Rest Api',
author = 'Shridhar Patil',
author_email = 'shridharpatil2792@gmail.com',
url = 'https://github.com/shridarpatil/Flask-RestApi', #URL to the github repo
download_url = 'https://github.com/shridarpatil/Flask-RestApi/archive/0.0.1.tar.gz',
keywords = ['RestApi', 'rest api', 'Rest Api'], # arbitrary keywords
classifiers = [],
install_requires=[
'json', 'pymysql', 'flask',
],
)
|
Package name changed to restapi
|
Package name changed to restapi
|
Python
|
mit
|
shridarpatil/RestApiz,shridarpatil/RestApiz,shridarpatil/RestApiz
|
from distutils.core import setup
setup(
name = 'RestApi',
packages = ['RestApi'], # this must be the same as the name above
version = '0.0.1',
description = 'Create Rest Api',
author = 'Shridhar Patil',
author_email = 'shridharpatil2792@gmail.com',
url = 'https://github.com/shridarpatil/Flask-RestApi', #URL to the github repo
download_url = 'https://github.com/shridarpatil/Flask-RestApi/archive/0.0.1.tar.gz',
keywords = ['RestApi', 'rest api', 'Rest Api'], # arbitrary keywords
classifiers = [],
install_requires=[
'json', 'pymysql', 'flask',
],
)
Package name changed to restapi
|
from distutils.core import setup
setup(
name = 'restapi',
packages = ['restapi'], # this must be the same as the name above
version = '0.0.1',
description = 'Create Rest Api',
author = 'Shridhar Patil',
author_email = 'shridharpatil2792@gmail.com',
url = 'https://github.com/shridarpatil/Flask-RestApi', #URL to the github repo
download_url = 'https://github.com/shridarpatil/Flask-RestApi/archive/0.0.1.tar.gz',
keywords = ['RestApi', 'rest api', 'Rest Api'], # arbitrary keywords
classifiers = [],
install_requires=[
'json', 'pymysql', 'flask',
],
)
|
<commit_before>from distutils.core import setup
setup(
name = 'RestApi',
packages = ['RestApi'], # this must be the same as the name above
version = '0.0.1',
description = 'Create Rest Api',
author = 'Shridhar Patil',
author_email = 'shridharpatil2792@gmail.com',
url = 'https://github.com/shridarpatil/Flask-RestApi', #URL to the github repo
download_url = 'https://github.com/shridarpatil/Flask-RestApi/archive/0.0.1.tar.gz',
keywords = ['RestApi', 'rest api', 'Rest Api'], # arbitrary keywords
classifiers = [],
install_requires=[
'json', 'pymysql', 'flask',
],
)
<commit_msg>Package name changed to restapi<commit_after>
|
from distutils.core import setup
setup(
name = 'restapi',
packages = ['restapi'], # this must be the same as the name above
version = '0.0.1',
description = 'Create Rest Api',
author = 'Shridhar Patil',
author_email = 'shridharpatil2792@gmail.com',
url = 'https://github.com/shridarpatil/Flask-RestApi', #URL to the github repo
download_url = 'https://github.com/shridarpatil/Flask-RestApi/archive/0.0.1.tar.gz',
keywords = ['RestApi', 'rest api', 'Rest Api'], # arbitrary keywords
classifiers = [],
install_requires=[
'json', 'pymysql', 'flask',
],
)
|
from distutils.core import setup
setup(
name = 'RestApi',
packages = ['RestApi'], # this must be the same as the name above
version = '0.0.1',
description = 'Create Rest Api',
author = 'Shridhar Patil',
author_email = 'shridharpatil2792@gmail.com',
url = 'https://github.com/shridarpatil/Flask-RestApi', #URL to the github repo
download_url = 'https://github.com/shridarpatil/Flask-RestApi/archive/0.0.1.tar.gz',
keywords = ['RestApi', 'rest api', 'Rest Api'], # arbitrary keywords
classifiers = [],
install_requires=[
'json', 'pymysql', 'flask',
],
)
Package name changed to restapifrom distutils.core import setup
setup(
name = 'restapi',
packages = ['restapi'], # this must be the same as the name above
version = '0.0.1',
description = 'Create Rest Api',
author = 'Shridhar Patil',
author_email = 'shridharpatil2792@gmail.com',
url = 'https://github.com/shridarpatil/Flask-RestApi', #URL to the github repo
download_url = 'https://github.com/shridarpatil/Flask-RestApi/archive/0.0.1.tar.gz',
keywords = ['RestApi', 'rest api', 'Rest Api'], # arbitrary keywords
classifiers = [],
install_requires=[
'json', 'pymysql', 'flask',
],
)
|
<commit_before>from distutils.core import setup
setup(
name = 'RestApi',
packages = ['RestApi'], # this must be the same as the name above
version = '0.0.1',
description = 'Create Rest Api',
author = 'Shridhar Patil',
author_email = 'shridharpatil2792@gmail.com',
url = 'https://github.com/shridarpatil/Flask-RestApi', #URL to the github repo
download_url = 'https://github.com/shridarpatil/Flask-RestApi/archive/0.0.1.tar.gz',
keywords = ['RestApi', 'rest api', 'Rest Api'], # arbitrary keywords
classifiers = [],
install_requires=[
'json', 'pymysql', 'flask',
],
)
<commit_msg>Package name changed to restapi<commit_after>from distutils.core import setup
setup(
name = 'restapi',
packages = ['restapi'], # this must be the same as the name above
version = '0.0.1',
description = 'Create Rest Api',
author = 'Shridhar Patil',
author_email = 'shridharpatil2792@gmail.com',
url = 'https://github.com/shridarpatil/Flask-RestApi', #URL to the github repo
download_url = 'https://github.com/shridarpatil/Flask-RestApi/archive/0.0.1.tar.gz',
keywords = ['RestApi', 'rest api', 'Rest Api'], # arbitrary keywords
classifiers = [],
install_requires=[
'json', 'pymysql', 'flask',
],
)
|
026ad064ec2a71ba61235b9333deaf1e6b886cb6
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(name='pycloudfuse',
version="0.01",
description='Fuse (Filesystem in Userspace) interface to Rackspace Cloud Files and Open Stack Object Storage (Swift)',
author='Nick Craig-Wood',
author_email='nick@memset.com',
url="https://github.com/memset/pycloudfuse",
license='MIT',
include_package_data=True,
zip_safe=False,
install_requires=['python-fuse', 'python-cloudfiles', 'python-ftp-cloudfs'],
scripts=['pycloudfuse'],
#packages = find_packages(exclude=['tests', 'debian']),
#tests_require = ["nose"],
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Programming Language :: Python',
'Operating System :: Linux',
'Environment :: No Input/Output (Daemon)',
'License :: OSI Approved :: MIT License',
],
#test_suite = "nose.collector",
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(name='pycloudfuse',
version="0.01",
description='Fuse (Filesystem in Userspace) interface to Rackspace Cloud Files and Open Stack Object Storage (Swift)',
author='Nick Craig-Wood',
author_email='nick@memset.com',
url="https://github.com/memset/pycloudfuse",
license='MIT',
include_package_data=True,
zip_safe=False,
install_requires=['fuse-python', 'python-cloudfiles', 'ftp-cloudfs>=0.9'],
scripts=['pycloudfuse'],
#packages = find_packages(exclude=['tests', 'debian']),
#tests_require = ["nose"],
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Programming Language :: Python',
'Operating System :: Linux',
'Environment :: No Input/Output (Daemon)',
'License :: OSI Approved :: MIT License',
],
#test_suite = "nose.collector",
)
|
Fix requires so they work properly
|
Fix requires so they work properly
|
Python
|
mit
|
Memset/pycloudfuse
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(name='pycloudfuse',
version="0.01",
description='Fuse (Filesystem in Userspace) interface to Rackspace Cloud Files and Open Stack Object Storage (Swift)',
author='Nick Craig-Wood',
author_email='nick@memset.com',
url="https://github.com/memset/pycloudfuse",
license='MIT',
include_package_data=True,
zip_safe=False,
install_requires=['python-fuse', 'python-cloudfiles', 'python-ftp-cloudfs'],
scripts=['pycloudfuse'],
#packages = find_packages(exclude=['tests', 'debian']),
#tests_require = ["nose"],
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Programming Language :: Python',
'Operating System :: Linux',
'Environment :: No Input/Output (Daemon)',
'License :: OSI Approved :: MIT License',
],
#test_suite = "nose.collector",
)
Fix requires so they work properly
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(name='pycloudfuse',
version="0.01",
description='Fuse (Filesystem in Userspace) interface to Rackspace Cloud Files and Open Stack Object Storage (Swift)',
author='Nick Craig-Wood',
author_email='nick@memset.com',
url="https://github.com/memset/pycloudfuse",
license='MIT',
include_package_data=True,
zip_safe=False,
install_requires=['fuse-python', 'python-cloudfiles', 'ftp-cloudfs>=0.9'],
scripts=['pycloudfuse'],
#packages = find_packages(exclude=['tests', 'debian']),
#tests_require = ["nose"],
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Programming Language :: Python',
'Operating System :: Linux',
'Environment :: No Input/Output (Daemon)',
'License :: OSI Approved :: MIT License',
],
#test_suite = "nose.collector",
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
setup(name='pycloudfuse',
version="0.01",
description='Fuse (Filesystem in Userspace) interface to Rackspace Cloud Files and Open Stack Object Storage (Swift)',
author='Nick Craig-Wood',
author_email='nick@memset.com',
url="https://github.com/memset/pycloudfuse",
license='MIT',
include_package_data=True,
zip_safe=False,
install_requires=['python-fuse', 'python-cloudfiles', 'python-ftp-cloudfs'],
scripts=['pycloudfuse'],
#packages = find_packages(exclude=['tests', 'debian']),
#tests_require = ["nose"],
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Programming Language :: Python',
'Operating System :: Linux',
'Environment :: No Input/Output (Daemon)',
'License :: OSI Approved :: MIT License',
],
#test_suite = "nose.collector",
)
<commit_msg>Fix requires so they work properly<commit_after>
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(name='pycloudfuse',
version="0.01",
description='Fuse (Filesystem in Userspace) interface to Rackspace Cloud Files and Open Stack Object Storage (Swift)',
author='Nick Craig-Wood',
author_email='nick@memset.com',
url="https://github.com/memset/pycloudfuse",
license='MIT',
include_package_data=True,
zip_safe=False,
install_requires=['fuse-python', 'python-cloudfiles', 'ftp-cloudfs>=0.9'],
scripts=['pycloudfuse'],
#packages = find_packages(exclude=['tests', 'debian']),
#tests_require = ["nose"],
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Programming Language :: Python',
'Operating System :: Linux',
'Environment :: No Input/Output (Daemon)',
'License :: OSI Approved :: MIT License',
],
#test_suite = "nose.collector",
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(name='pycloudfuse',
version="0.01",
description='Fuse (Filesystem in Userspace) interface to Rackspace Cloud Files and Open Stack Object Storage (Swift)',
author='Nick Craig-Wood',
author_email='nick@memset.com',
url="https://github.com/memset/pycloudfuse",
license='MIT',
include_package_data=True,
zip_safe=False,
install_requires=['python-fuse', 'python-cloudfiles', 'python-ftp-cloudfs'],
scripts=['pycloudfuse'],
#packages = find_packages(exclude=['tests', 'debian']),
#tests_require = ["nose"],
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Programming Language :: Python',
'Operating System :: Linux',
'Environment :: No Input/Output (Daemon)',
'License :: OSI Approved :: MIT License',
],
#test_suite = "nose.collector",
)
Fix requires so they work properly#!/usr/bin/env python
from setuptools import setup, find_packages
setup(name='pycloudfuse',
version="0.01",
description='Fuse (Filesystem in Userspace) interface to Rackspace Cloud Files and Open Stack Object Storage (Swift)',
author='Nick Craig-Wood',
author_email='nick@memset.com',
url="https://github.com/memset/pycloudfuse",
license='MIT',
include_package_data=True,
zip_safe=False,
install_requires=['fuse-python', 'python-cloudfiles', 'ftp-cloudfs>=0.9'],
scripts=['pycloudfuse'],
#packages = find_packages(exclude=['tests', 'debian']),
#tests_require = ["nose"],
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Programming Language :: Python',
'Operating System :: Linux',
'Environment :: No Input/Output (Daemon)',
'License :: OSI Approved :: MIT License',
],
#test_suite = "nose.collector",
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
setup(name='pycloudfuse',
version="0.01",
description='Fuse (Filesystem in Userspace) interface to Rackspace Cloud Files and Open Stack Object Storage (Swift)',
author='Nick Craig-Wood',
author_email='nick@memset.com',
url="https://github.com/memset/pycloudfuse",
license='MIT',
include_package_data=True,
zip_safe=False,
install_requires=['python-fuse', 'python-cloudfiles', 'python-ftp-cloudfs'],
scripts=['pycloudfuse'],
#packages = find_packages(exclude=['tests', 'debian']),
#tests_require = ["nose"],
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Programming Language :: Python',
'Operating System :: Linux',
'Environment :: No Input/Output (Daemon)',
'License :: OSI Approved :: MIT License',
],
#test_suite = "nose.collector",
)
<commit_msg>Fix requires so they work properly<commit_after>#!/usr/bin/env python
from setuptools import setup, find_packages
setup(name='pycloudfuse',
version="0.01",
description='Fuse (Filesystem in Userspace) interface to Rackspace Cloud Files and Open Stack Object Storage (Swift)',
author='Nick Craig-Wood',
author_email='nick@memset.com',
url="https://github.com/memset/pycloudfuse",
license='MIT',
include_package_data=True,
zip_safe=False,
install_requires=['fuse-python', 'python-cloudfiles', 'ftp-cloudfs>=0.9'],
scripts=['pycloudfuse'],
#packages = find_packages(exclude=['tests', 'debian']),
#tests_require = ["nose"],
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Programming Language :: Python',
'Operating System :: Linux',
'Environment :: No Input/Output (Daemon)',
'License :: OSI Approved :: MIT License',
],
#test_suite = "nose.collector",
)
|
41bba7238fcbe2624ccd294c8ac54f805a781603
|
setup.py
|
setup.py
|
from distutils.core import setup
import py2exe
setup(
console=[{'script': 'check_forbidden.py', 'version': '1.3.0', }],
options={'py2exe': {'bundle_files': 2}}
)
'''
cd dropbox/codes/check_forbidden
py -3.4 setup.py py2exe
'''
|
'''
cd dropbox/codes/check_forbidden
py -3.4 setup.py py2exe
Libraries used:
import tkinter
import tkinter.filedialog
import csv
import os
import re
from time import sleep
import zipfile
'''
from distutils.core import setup
import py2exe
setup(
console=[{'author': 'Shun Sakurai',
'script': 'check_forbidden.py',
'version': '1.4.0',
}],
options={'py2exe': {
'bundle_files': 2,
'compressed': True,
'excludes': ['_hashlib', '_frozen_importlib', 'argparse', '_lzma', '_bz2', '_ssl', 'calendar', 'datetime', 'difflib', 'doctest', 'inspect', 'locale', 'optparse', 'pdb', 'pickle', 'pydoc', 'pyexpat', 'pyreadline'],
}}
)
|
Reduce the size of the dist folder
|
Reduce the size of the dist folder
|
Python
|
mit
|
ShunSakurai/check_forbidden,ShunSakurai/check_forbidden
|
from distutils.core import setup
import py2exe
setup(
console=[{'script': 'check_forbidden.py', 'version': '1.3.0', }],
options={'py2exe': {'bundle_files': 2}}
)
'''
cd dropbox/codes/check_forbidden
py -3.4 setup.py py2exe
'''
Reduce the size of the dist folder
|
'''
cd dropbox/codes/check_forbidden
py -3.4 setup.py py2exe
Libraries used:
import tkinter
import tkinter.filedialog
import csv
import os
import re
from time import sleep
import zipfile
'''
from distutils.core import setup
import py2exe
setup(
console=[{'author': 'Shun Sakurai',
'script': 'check_forbidden.py',
'version': '1.4.0',
}],
options={'py2exe': {
'bundle_files': 2,
'compressed': True,
'excludes': ['_hashlib', '_frozen_importlib', 'argparse', '_lzma', '_bz2', '_ssl', 'calendar', 'datetime', 'difflib', 'doctest', 'inspect', 'locale', 'optparse', 'pdb', 'pickle', 'pydoc', 'pyexpat', 'pyreadline'],
}}
)
|
<commit_before>from distutils.core import setup
import py2exe
setup(
console=[{'script': 'check_forbidden.py', 'version': '1.3.0', }],
options={'py2exe': {'bundle_files': 2}}
)
'''
cd dropbox/codes/check_forbidden
py -3.4 setup.py py2exe
'''
<commit_msg>Reduce the size of the dist folder<commit_after>
|
'''
cd dropbox/codes/check_forbidden
py -3.4 setup.py py2exe
Libraries used:
import tkinter
import tkinter.filedialog
import csv
import os
import re
from time import sleep
import zipfile
'''
from distutils.core import setup
import py2exe
setup(
console=[{'author': 'Shun Sakurai',
'script': 'check_forbidden.py',
'version': '1.4.0',
}],
options={'py2exe': {
'bundle_files': 2,
'compressed': True,
'excludes': ['_hashlib', '_frozen_importlib', 'argparse', '_lzma', '_bz2', '_ssl', 'calendar', 'datetime', 'difflib', 'doctest', 'inspect', 'locale', 'optparse', 'pdb', 'pickle', 'pydoc', 'pyexpat', 'pyreadline'],
}}
)
|
from distutils.core import setup
import py2exe
setup(
console=[{'script': 'check_forbidden.py', 'version': '1.3.0', }],
options={'py2exe': {'bundle_files': 2}}
)
'''
cd dropbox/codes/check_forbidden
py -3.4 setup.py py2exe
'''
Reduce the size of the dist folder'''
cd dropbox/codes/check_forbidden
py -3.4 setup.py py2exe
Libraries used:
import tkinter
import tkinter.filedialog
import csv
import os
import re
from time import sleep
import zipfile
'''
from distutils.core import setup
import py2exe
setup(
console=[{'author': 'Shun Sakurai',
'script': 'check_forbidden.py',
'version': '1.4.0',
}],
options={'py2exe': {
'bundle_files': 2,
'compressed': True,
'excludes': ['_hashlib', '_frozen_importlib', 'argparse', '_lzma', '_bz2', '_ssl', 'calendar', 'datetime', 'difflib', 'doctest', 'inspect', 'locale', 'optparse', 'pdb', 'pickle', 'pydoc', 'pyexpat', 'pyreadline'],
}}
)
|
<commit_before>from distutils.core import setup
import py2exe
setup(
console=[{'script': 'check_forbidden.py', 'version': '1.3.0', }],
options={'py2exe': {'bundle_files': 2}}
)
'''
cd dropbox/codes/check_forbidden
py -3.4 setup.py py2exe
'''
<commit_msg>Reduce the size of the dist folder<commit_after>'''
cd dropbox/codes/check_forbidden
py -3.4 setup.py py2exe
Libraries used:
import tkinter
import tkinter.filedialog
import csv
import os
import re
from time import sleep
import zipfile
'''
from distutils.core import setup
import py2exe
setup(
console=[{'author': 'Shun Sakurai',
'script': 'check_forbidden.py',
'version': '1.4.0',
}],
options={'py2exe': {
'bundle_files': 2,
'compressed': True,
'excludes': ['_hashlib', '_frozen_importlib', 'argparse', '_lzma', '_bz2', '_ssl', 'calendar', 'datetime', 'difflib', 'doctest', 'inspect', 'locale', 'optparse', 'pdb', 'pickle', 'pydoc', 'pyexpat', 'pyreadline'],
}}
)
|
9f07296e45b790d622800b48bde84788b5cb7d30
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
__version_info__ = ('0', '1', '2')
__version__ = '.'.join(__version_info__)
setup(
name="staticjinja",
version=__version__,
description="jinja based static site generator",
author="Ceasar Bautista",
author_email="cbautista2010@gmail.com",
url="https://github.com/Ceasar/staticjinja",
keywords=["jinja", "static", "website"],
packages=["staticjinja"],
install_requires=["easywatch", "jinja2"],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.5",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Software Development :: Libraries :: Python Modules",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
],
use_2to3=True,
)
|
from setuptools import setup, find_packages
__version_info__ = ('0', '1', '3')
__version__ = '.'.join(__version_info__)
setup(
name="staticjinja",
version=__version__,
description="jinja based static site generator",
author="Ceasar Bautista",
author_email="cbautista2010@gmail.com",
url="https://github.com/Ceasar/staticjinja",
keywords=["jinja", "static", "website"],
packages=["staticjinja"],
install_requires=["easywatch", "jinja2"],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.5",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Software Development :: Libraries :: Python Modules",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
],
use_2to3=True,
)
|
Set current version number to 0.13
|
Set current version number to 0.13
|
Python
|
mit
|
jerivas/staticjinja,jerivas/staticjinja,Ceasar/staticjinja,Ceasar/staticjinja
|
from setuptools import setup, find_packages
__version_info__ = ('0', '1', '2')
__version__ = '.'.join(__version_info__)
setup(
name="staticjinja",
version=__version__,
description="jinja based static site generator",
author="Ceasar Bautista",
author_email="cbautista2010@gmail.com",
url="https://github.com/Ceasar/staticjinja",
keywords=["jinja", "static", "website"],
packages=["staticjinja"],
install_requires=["easywatch", "jinja2"],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.5",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Software Development :: Libraries :: Python Modules",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
],
use_2to3=True,
)
Set current version number to 0.13
|
from setuptools import setup, find_packages
__version_info__ = ('0', '1', '3')
__version__ = '.'.join(__version_info__)
setup(
name="staticjinja",
version=__version__,
description="jinja based static site generator",
author="Ceasar Bautista",
author_email="cbautista2010@gmail.com",
url="https://github.com/Ceasar/staticjinja",
keywords=["jinja", "static", "website"],
packages=["staticjinja"],
install_requires=["easywatch", "jinja2"],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.5",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Software Development :: Libraries :: Python Modules",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
],
use_2to3=True,
)
|
<commit_before>from setuptools import setup, find_packages
__version_info__ = ('0', '1', '2')
__version__ = '.'.join(__version_info__)
setup(
name="staticjinja",
version=__version__,
description="jinja based static site generator",
author="Ceasar Bautista",
author_email="cbautista2010@gmail.com",
url="https://github.com/Ceasar/staticjinja",
keywords=["jinja", "static", "website"],
packages=["staticjinja"],
install_requires=["easywatch", "jinja2"],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.5",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Software Development :: Libraries :: Python Modules",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
],
use_2to3=True,
)
<commit_msg>Set current version number to 0.13<commit_after>
|
from setuptools import setup, find_packages
__version_info__ = ('0', '1', '3')
__version__ = '.'.join(__version_info__)
setup(
name="staticjinja",
version=__version__,
description="jinja based static site generator",
author="Ceasar Bautista",
author_email="cbautista2010@gmail.com",
url="https://github.com/Ceasar/staticjinja",
keywords=["jinja", "static", "website"],
packages=["staticjinja"],
install_requires=["easywatch", "jinja2"],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.5",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Software Development :: Libraries :: Python Modules",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
],
use_2to3=True,
)
|
from setuptools import setup, find_packages
__version_info__ = ('0', '1', '2')
__version__ = '.'.join(__version_info__)
setup(
name="staticjinja",
version=__version__,
description="jinja based static site generator",
author="Ceasar Bautista",
author_email="cbautista2010@gmail.com",
url="https://github.com/Ceasar/staticjinja",
keywords=["jinja", "static", "website"],
packages=["staticjinja"],
install_requires=["easywatch", "jinja2"],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.5",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Software Development :: Libraries :: Python Modules",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
],
use_2to3=True,
)
Set current version number to 0.13from setuptools import setup, find_packages
__version_info__ = ('0', '1', '3')
__version__ = '.'.join(__version_info__)
setup(
name="staticjinja",
version=__version__,
description="jinja based static site generator",
author="Ceasar Bautista",
author_email="cbautista2010@gmail.com",
url="https://github.com/Ceasar/staticjinja",
keywords=["jinja", "static", "website"],
packages=["staticjinja"],
install_requires=["easywatch", "jinja2"],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.5",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Software Development :: Libraries :: Python Modules",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
],
use_2to3=True,
)
|
<commit_before>from setuptools import setup, find_packages
__version_info__ = ('0', '1', '2')
__version__ = '.'.join(__version_info__)
setup(
name="staticjinja",
version=__version__,
description="jinja based static site generator",
author="Ceasar Bautista",
author_email="cbautista2010@gmail.com",
url="https://github.com/Ceasar/staticjinja",
keywords=["jinja", "static", "website"],
packages=["staticjinja"],
install_requires=["easywatch", "jinja2"],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.5",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Software Development :: Libraries :: Python Modules",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
],
use_2to3=True,
)
<commit_msg>Set current version number to 0.13<commit_after>from setuptools import setup, find_packages
__version_info__ = ('0', '1', '3')
__version__ = '.'.join(__version_info__)
setup(
name="staticjinja",
version=__version__,
description="jinja based static site generator",
author="Ceasar Bautista",
author_email="cbautista2010@gmail.com",
url="https://github.com/Ceasar/staticjinja",
keywords=["jinja", "static", "website"],
packages=["staticjinja"],
install_requires=["easywatch", "jinja2"],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.5",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Software Development :: Libraries :: Python Modules",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
],
use_2to3=True,
)
|
cb94fb2359bcd0daa65585db3eb1f4c08065d39c
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='iotp',
author='Dan Mills',
version='0.0.2',
description='A CLI app for TOTP',
py_modules=['iotp'],
install_requires=[
'click',
'pyotp',
'appdirs',
'pyperclip'
],
entry_points={
'console_scripts': [
'iotp=iotp:cli'
]
}
)
|
from setuptools import setup
setup(
name='iotp',
author='Dan Mills',
version='0.0.2',
license='Apache 2.0',
classifiers=[
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'
],
description='A CLI app for TOTP',
py_modules=['iotp'],
install_requires=[
'click',
'pyotp',
'appdirs',
'pyperclip'
],
entry_points={
'console_scripts': [
'iotp=iotp:cli'
]
}
)
|
Add license and classifiers for PyPi
|
Add license and classifiers for PyPi
|
Python
|
apache-2.0
|
evilhamsterman/iotp
|
from setuptools import setup
setup(
name='iotp',
author='Dan Mills',
version='0.0.2',
description='A CLI app for TOTP',
py_modules=['iotp'],
install_requires=[
'click',
'pyotp',
'appdirs',
'pyperclip'
],
entry_points={
'console_scripts': [
'iotp=iotp:cli'
]
}
)
Add license and classifiers for PyPi
|
from setuptools import setup
setup(
name='iotp',
author='Dan Mills',
version='0.0.2',
license='Apache 2.0',
classifiers=[
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'
],
description='A CLI app for TOTP',
py_modules=['iotp'],
install_requires=[
'click',
'pyotp',
'appdirs',
'pyperclip'
],
entry_points={
'console_scripts': [
'iotp=iotp:cli'
]
}
)
|
<commit_before>from setuptools import setup
setup(
name='iotp',
author='Dan Mills',
version='0.0.2',
description='A CLI app for TOTP',
py_modules=['iotp'],
install_requires=[
'click',
'pyotp',
'appdirs',
'pyperclip'
],
entry_points={
'console_scripts': [
'iotp=iotp:cli'
]
}
)
<commit_msg>Add license and classifiers for PyPi<commit_after>
|
from setuptools import setup
setup(
name='iotp',
author='Dan Mills',
version='0.0.2',
license='Apache 2.0',
classifiers=[
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'
],
description='A CLI app for TOTP',
py_modules=['iotp'],
install_requires=[
'click',
'pyotp',
'appdirs',
'pyperclip'
],
entry_points={
'console_scripts': [
'iotp=iotp:cli'
]
}
)
|
from setuptools import setup
setup(
name='iotp',
author='Dan Mills',
version='0.0.2',
description='A CLI app for TOTP',
py_modules=['iotp'],
install_requires=[
'click',
'pyotp',
'appdirs',
'pyperclip'
],
entry_points={
'console_scripts': [
'iotp=iotp:cli'
]
}
)
Add license and classifiers for PyPifrom setuptools import setup
setup(
name='iotp',
author='Dan Mills',
version='0.0.2',
license='Apache 2.0',
classifiers=[
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'
],
description='A CLI app for TOTP',
py_modules=['iotp'],
install_requires=[
'click',
'pyotp',
'appdirs',
'pyperclip'
],
entry_points={
'console_scripts': [
'iotp=iotp:cli'
]
}
)
|
<commit_before>from setuptools import setup
setup(
name='iotp',
author='Dan Mills',
version='0.0.2',
description='A CLI app for TOTP',
py_modules=['iotp'],
install_requires=[
'click',
'pyotp',
'appdirs',
'pyperclip'
],
entry_points={
'console_scripts': [
'iotp=iotp:cli'
]
}
)
<commit_msg>Add license and classifiers for PyPi<commit_after>from setuptools import setup
setup(
name='iotp',
author='Dan Mills',
version='0.0.2',
license='Apache 2.0',
classifiers=[
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'
],
description='A CLI app for TOTP',
py_modules=['iotp'],
install_requires=[
'click',
'pyotp',
'appdirs',
'pyperclip'
],
entry_points={
'console_scripts': [
'iotp=iotp:cli'
]
}
)
|
4ec2a3ba065de6cfaae295ea17d01a2b115e4a7b
|
setup.py
|
setup.py
|
import setuptools
setuptools.setup(
name='ChatExchange6',
version='1.0',
url='https://github.com/ByteCommander/ChatExchange6',
packages=[
'chatexchange6'
],
install_requires=[
'beautifulsoup4>=4.3.2',
'requests>=2.2.1',
'websocket-client>=0.13.0',
# only for dev:
'coverage>=3.7.1',
'epydoc>=3.0.1',
'httmock>=1.2.2',
'pytest-capturelog>=0.7',
'pytest-timeout>=0.3',
'pytest>=2.7.3',
'py>=1.4.29'
]
)
|
import setuptools
setuptools.setup(
name='ChatExchange6',
version='1.0',
url='https://github.com/ByteCommander/ChatExchange6',
packages=[
'chatexchange6'
],
install_requires=[
'beautifulsoup4>=4.3.2',
'requests>=2.2.1',
'websocket-client>=0.13.0',
# only for dev:
'coverage=3.7.1',
'epydoc>=3.0.1',
'httmock>=1.2.2',
'pytest-capturelog>=0.7',
'pytest-timeout>=0.3',
'pytest>=2.7.3',
'py>=1.4.29'
]
)
|
Use Coverage 3.7.1 so it passes on Python 3.2
|
Use Coverage 3.7.1 so it passes on Python 3.2
|
Python
|
apache-2.0
|
ByteCommander/ChatExchange6,ByteCommander/ChatExchange6,Charcoal-SE/ChatExchange,Charcoal-SE/ChatExchange
|
import setuptools
setuptools.setup(
name='ChatExchange6',
version='1.0',
url='https://github.com/ByteCommander/ChatExchange6',
packages=[
'chatexchange6'
],
install_requires=[
'beautifulsoup4>=4.3.2',
'requests>=2.2.1',
'websocket-client>=0.13.0',
# only for dev:
'coverage>=3.7.1',
'epydoc>=3.0.1',
'httmock>=1.2.2',
'pytest-capturelog>=0.7',
'pytest-timeout>=0.3',
'pytest>=2.7.3',
'py>=1.4.29'
]
)
Use Coverage 3.7.1 so it passes on Python 3.2
|
import setuptools
setuptools.setup(
name='ChatExchange6',
version='1.0',
url='https://github.com/ByteCommander/ChatExchange6',
packages=[
'chatexchange6'
],
install_requires=[
'beautifulsoup4>=4.3.2',
'requests>=2.2.1',
'websocket-client>=0.13.0',
# only for dev:
'coverage=3.7.1',
'epydoc>=3.0.1',
'httmock>=1.2.2',
'pytest-capturelog>=0.7',
'pytest-timeout>=0.3',
'pytest>=2.7.3',
'py>=1.4.29'
]
)
|
<commit_before>import setuptools
setuptools.setup(
name='ChatExchange6',
version='1.0',
url='https://github.com/ByteCommander/ChatExchange6',
packages=[
'chatexchange6'
],
install_requires=[
'beautifulsoup4>=4.3.2',
'requests>=2.2.1',
'websocket-client>=0.13.0',
# only for dev:
'coverage>=3.7.1',
'epydoc>=3.0.1',
'httmock>=1.2.2',
'pytest-capturelog>=0.7',
'pytest-timeout>=0.3',
'pytest>=2.7.3',
'py>=1.4.29'
]
)
<commit_msg>Use Coverage 3.7.1 so it passes on Python 3.2<commit_after>
|
import setuptools
setuptools.setup(
name='ChatExchange6',
version='1.0',
url='https://github.com/ByteCommander/ChatExchange6',
packages=[
'chatexchange6'
],
install_requires=[
'beautifulsoup4>=4.3.2',
'requests>=2.2.1',
'websocket-client>=0.13.0',
# only for dev:
'coverage=3.7.1',
'epydoc>=3.0.1',
'httmock>=1.2.2',
'pytest-capturelog>=0.7',
'pytest-timeout>=0.3',
'pytest>=2.7.3',
'py>=1.4.29'
]
)
|
import setuptools
setuptools.setup(
name='ChatExchange6',
version='1.0',
url='https://github.com/ByteCommander/ChatExchange6',
packages=[
'chatexchange6'
],
install_requires=[
'beautifulsoup4>=4.3.2',
'requests>=2.2.1',
'websocket-client>=0.13.0',
# only for dev:
'coverage>=3.7.1',
'epydoc>=3.0.1',
'httmock>=1.2.2',
'pytest-capturelog>=0.7',
'pytest-timeout>=0.3',
'pytest>=2.7.3',
'py>=1.4.29'
]
)
Use Coverage 3.7.1 so it passes on Python 3.2import setuptools
setuptools.setup(
name='ChatExchange6',
version='1.0',
url='https://github.com/ByteCommander/ChatExchange6',
packages=[
'chatexchange6'
],
install_requires=[
'beautifulsoup4>=4.3.2',
'requests>=2.2.1',
'websocket-client>=0.13.0',
# only for dev:
'coverage=3.7.1',
'epydoc>=3.0.1',
'httmock>=1.2.2',
'pytest-capturelog>=0.7',
'pytest-timeout>=0.3',
'pytest>=2.7.3',
'py>=1.4.29'
]
)
|
<commit_before>import setuptools
setuptools.setup(
name='ChatExchange6',
version='1.0',
url='https://github.com/ByteCommander/ChatExchange6',
packages=[
'chatexchange6'
],
install_requires=[
'beautifulsoup4>=4.3.2',
'requests>=2.2.1',
'websocket-client>=0.13.0',
# only for dev:
'coverage>=3.7.1',
'epydoc>=3.0.1',
'httmock>=1.2.2',
'pytest-capturelog>=0.7',
'pytest-timeout>=0.3',
'pytest>=2.7.3',
'py>=1.4.29'
]
)
<commit_msg>Use Coverage 3.7.1 so it passes on Python 3.2<commit_after>import setuptools
setuptools.setup(
name='ChatExchange6',
version='1.0',
url='https://github.com/ByteCommander/ChatExchange6',
packages=[
'chatexchange6'
],
install_requires=[
'beautifulsoup4>=4.3.2',
'requests>=2.2.1',
'websocket-client>=0.13.0',
# only for dev:
'coverage=3.7.1',
'epydoc>=3.0.1',
'httmock>=1.2.2',
'pytest-capturelog>=0.7',
'pytest-timeout>=0.3',
'pytest>=2.7.3',
'py>=1.4.29'
]
)
|
3005aff2b5bee2d3bb832ad86ba67afd12e7857d
|
setup.py
|
setup.py
|
AUTHOR = 'Chris Dent'
AUTHOR_EMAIL = 'cdent@peermore.com'
NAME = 'tiddlywebplugins.mysql3'
DESCRIPTION = 'MySQL-based store for tiddlyweb'
VERSION = '3.0.13' # don't forget to update __init__.py too
import os
from setuptools import setup, find_packages
setup(
namespace_packages = ['tiddlywebplugins'],
name = NAME,
version = VERSION,
description = DESCRIPTION,
long_description = open(os.path.join(os.path.dirname(__file__), 'README')).read(),
author = AUTHOR,
author_email = AUTHOR_EMAIL,
url = 'http://pypi.python.org/pypi/%s' % NAME,
platforms = 'Posix; MacOS X; Windows',
packages = find_packages(exclude=['test']),
install_requires = ['setuptools',
'tiddlyweb>=1.4.2',
'tiddlywebplugins.sqlalchemy3>=3.0.15',
'sqlalchemy>=0.7.0',
'MySQL-python',
'pyparsing<2.0.0',
],
zip_safe = False,
license = 'BSD'
)
|
AUTHOR = 'Chris Dent'
AUTHOR_EMAIL = 'cdent@peermore.com'
NAME = 'tiddlywebplugins.mysql3'
DESCRIPTION = 'MySQL-based store for tiddlyweb'
VERSION = '3.0.13' # don't forget to update __init__.py too
import os
from setuptools import setup, find_packages
setup(
namespace_packages = ['tiddlywebplugins'],
name = NAME,
version = VERSION,
description = DESCRIPTION,
long_description = open(os.path.join(os.path.dirname(__file__), 'README')).read(),
author = AUTHOR,
author_email = AUTHOR_EMAIL,
url = 'http://pypi.python.org/pypi/%s' % NAME,
platforms = 'Posix; MacOS X; Windows',
packages = find_packages(exclude=['test']),
install_requires = ['setuptools',
'tiddlyweb>=1.4.2',
'tiddlywebplugins.sqlalchemy3>=3.1.0',
'MySQL-python',
],
zip_safe = False,
license = 'BSD'
)
|
Upgrade to using twp.sqlachemy3 3.1.0 which hosts search itself
|
Upgrade to using twp.sqlachemy3 3.1.0 which hosts search itself
thus needing a change to setup.py requirements
|
Python
|
bsd-3-clause
|
tiddlyweb/tiddlywebplugins.mysql
|
AUTHOR = 'Chris Dent'
AUTHOR_EMAIL = 'cdent@peermore.com'
NAME = 'tiddlywebplugins.mysql3'
DESCRIPTION = 'MySQL-based store for tiddlyweb'
VERSION = '3.0.13' # don't forget to update __init__.py too
import os
from setuptools import setup, find_packages
setup(
namespace_packages = ['tiddlywebplugins'],
name = NAME,
version = VERSION,
description = DESCRIPTION,
long_description = open(os.path.join(os.path.dirname(__file__), 'README')).read(),
author = AUTHOR,
author_email = AUTHOR_EMAIL,
url = 'http://pypi.python.org/pypi/%s' % NAME,
platforms = 'Posix; MacOS X; Windows',
packages = find_packages(exclude=['test']),
install_requires = ['setuptools',
'tiddlyweb>=1.4.2',
'tiddlywebplugins.sqlalchemy3>=3.0.15',
'sqlalchemy>=0.7.0',
'MySQL-python',
'pyparsing<2.0.0',
],
zip_safe = False,
license = 'BSD'
)
Upgrade to using twp.sqlachemy3 3.1.0 which hosts search itself
thus needing a change to setup.py requirements
|
AUTHOR = 'Chris Dent'
AUTHOR_EMAIL = 'cdent@peermore.com'
NAME = 'tiddlywebplugins.mysql3'
DESCRIPTION = 'MySQL-based store for tiddlyweb'
VERSION = '3.0.13' # don't forget to update __init__.py too
import os
from setuptools import setup, find_packages
setup(
namespace_packages = ['tiddlywebplugins'],
name = NAME,
version = VERSION,
description = DESCRIPTION,
long_description = open(os.path.join(os.path.dirname(__file__), 'README')).read(),
author = AUTHOR,
author_email = AUTHOR_EMAIL,
url = 'http://pypi.python.org/pypi/%s' % NAME,
platforms = 'Posix; MacOS X; Windows',
packages = find_packages(exclude=['test']),
install_requires = ['setuptools',
'tiddlyweb>=1.4.2',
'tiddlywebplugins.sqlalchemy3>=3.1.0',
'MySQL-python',
],
zip_safe = False,
license = 'BSD'
)
|
<commit_before>AUTHOR = 'Chris Dent'
AUTHOR_EMAIL = 'cdent@peermore.com'
NAME = 'tiddlywebplugins.mysql3'
DESCRIPTION = 'MySQL-based store for tiddlyweb'
VERSION = '3.0.13' # don't forget to update __init__.py too
import os
from setuptools import setup, find_packages
setup(
namespace_packages = ['tiddlywebplugins'],
name = NAME,
version = VERSION,
description = DESCRIPTION,
long_description = open(os.path.join(os.path.dirname(__file__), 'README')).read(),
author = AUTHOR,
author_email = AUTHOR_EMAIL,
url = 'http://pypi.python.org/pypi/%s' % NAME,
platforms = 'Posix; MacOS X; Windows',
packages = find_packages(exclude=['test']),
install_requires = ['setuptools',
'tiddlyweb>=1.4.2',
'tiddlywebplugins.sqlalchemy3>=3.0.15',
'sqlalchemy>=0.7.0',
'MySQL-python',
'pyparsing<2.0.0',
],
zip_safe = False,
license = 'BSD'
)
<commit_msg>Upgrade to using twp.sqlachemy3 3.1.0 which hosts search itself
thus needing a change to setup.py requirements<commit_after>
|
AUTHOR = 'Chris Dent'
AUTHOR_EMAIL = 'cdent@peermore.com'
NAME = 'tiddlywebplugins.mysql3'
DESCRIPTION = 'MySQL-based store for tiddlyweb'
VERSION = '3.0.13' # don't forget to update __init__.py too
import os
from setuptools import setup, find_packages
setup(
namespace_packages = ['tiddlywebplugins'],
name = NAME,
version = VERSION,
description = DESCRIPTION,
long_description = open(os.path.join(os.path.dirname(__file__), 'README')).read(),
author = AUTHOR,
author_email = AUTHOR_EMAIL,
url = 'http://pypi.python.org/pypi/%s' % NAME,
platforms = 'Posix; MacOS X; Windows',
packages = find_packages(exclude=['test']),
install_requires = ['setuptools',
'tiddlyweb>=1.4.2',
'tiddlywebplugins.sqlalchemy3>=3.1.0',
'MySQL-python',
],
zip_safe = False,
license = 'BSD'
)
|
AUTHOR = 'Chris Dent'
AUTHOR_EMAIL = 'cdent@peermore.com'
NAME = 'tiddlywebplugins.mysql3'
DESCRIPTION = 'MySQL-based store for tiddlyweb'
VERSION = '3.0.13' # don't forget to update __init__.py too
import os
from setuptools import setup, find_packages
setup(
namespace_packages = ['tiddlywebplugins'],
name = NAME,
version = VERSION,
description = DESCRIPTION,
long_description = open(os.path.join(os.path.dirname(__file__), 'README')).read(),
author = AUTHOR,
author_email = AUTHOR_EMAIL,
url = 'http://pypi.python.org/pypi/%s' % NAME,
platforms = 'Posix; MacOS X; Windows',
packages = find_packages(exclude=['test']),
install_requires = ['setuptools',
'tiddlyweb>=1.4.2',
'tiddlywebplugins.sqlalchemy3>=3.0.15',
'sqlalchemy>=0.7.0',
'MySQL-python',
'pyparsing<2.0.0',
],
zip_safe = False,
license = 'BSD'
)
Upgrade to using twp.sqlachemy3 3.1.0 which hosts search itself
thus needing a change to setup.py requirementsAUTHOR = 'Chris Dent'
AUTHOR_EMAIL = 'cdent@peermore.com'
NAME = 'tiddlywebplugins.mysql3'
DESCRIPTION = 'MySQL-based store for tiddlyweb'
VERSION = '3.0.13' # don't forget to update __init__.py too
import os
from setuptools import setup, find_packages
setup(
namespace_packages = ['tiddlywebplugins'],
name = NAME,
version = VERSION,
description = DESCRIPTION,
long_description = open(os.path.join(os.path.dirname(__file__), 'README')).read(),
author = AUTHOR,
author_email = AUTHOR_EMAIL,
url = 'http://pypi.python.org/pypi/%s' % NAME,
platforms = 'Posix; MacOS X; Windows',
packages = find_packages(exclude=['test']),
install_requires = ['setuptools',
'tiddlyweb>=1.4.2',
'tiddlywebplugins.sqlalchemy3>=3.1.0',
'MySQL-python',
],
zip_safe = False,
license = 'BSD'
)
|
<commit_before>AUTHOR = 'Chris Dent'
AUTHOR_EMAIL = 'cdent@peermore.com'
NAME = 'tiddlywebplugins.mysql3'
DESCRIPTION = 'MySQL-based store for tiddlyweb'
VERSION = '3.0.13' # don't forget to update __init__.py too
import os
from setuptools import setup, find_packages
setup(
namespace_packages = ['tiddlywebplugins'],
name = NAME,
version = VERSION,
description = DESCRIPTION,
long_description = open(os.path.join(os.path.dirname(__file__), 'README')).read(),
author = AUTHOR,
author_email = AUTHOR_EMAIL,
url = 'http://pypi.python.org/pypi/%s' % NAME,
platforms = 'Posix; MacOS X; Windows',
packages = find_packages(exclude=['test']),
install_requires = ['setuptools',
'tiddlyweb>=1.4.2',
'tiddlywebplugins.sqlalchemy3>=3.0.15',
'sqlalchemy>=0.7.0',
'MySQL-python',
'pyparsing<2.0.0',
],
zip_safe = False,
license = 'BSD'
)
<commit_msg>Upgrade to using twp.sqlachemy3 3.1.0 which hosts search itself
thus needing a change to setup.py requirements<commit_after>AUTHOR = 'Chris Dent'
AUTHOR_EMAIL = 'cdent@peermore.com'
NAME = 'tiddlywebplugins.mysql3'
DESCRIPTION = 'MySQL-based store for tiddlyweb'
VERSION = '3.0.13' # don't forget to update __init__.py too
import os
from setuptools import setup, find_packages
setup(
namespace_packages = ['tiddlywebplugins'],
name = NAME,
version = VERSION,
description = DESCRIPTION,
long_description = open(os.path.join(os.path.dirname(__file__), 'README')).read(),
author = AUTHOR,
author_email = AUTHOR_EMAIL,
url = 'http://pypi.python.org/pypi/%s' % NAME,
platforms = 'Posix; MacOS X; Windows',
packages = find_packages(exclude=['test']),
install_requires = ['setuptools',
'tiddlyweb>=1.4.2',
'tiddlywebplugins.sqlalchemy3>=3.1.0',
'MySQL-python',
],
zip_safe = False,
license = 'BSD'
)
|
dc3d26e45f25c6d65e34f82e21276623c4821023
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
from rest_framework_gis import __version__
setup(
name='django-rest-framework-gis',
version=__version__,
url='https://github.com/dmeehan/django-rest-framework-gis',
license='',
author='Douglas Meehan',
author_email='dmeehan@gmail.com',
description='Geographic add-ons for Django Rest Framework',
packages=find_packages()
)
|
from setuptools import setup, find_packages
from rest_framework_gis import __version__
setup(
name='djangorestframework-gis',
version=__version__,
url='https://github.com/dmeehan/django-rest-framework-gis',
license='',
author='Douglas Meehan',
author_email='dmeehan@gmail.com',
description='Geographic add-ons for Django Rest Framework',
packages=find_packages()
)
|
Rename package from django-rest-framework-gis to djangorestframework-gis
|
Rename package from django-rest-framework-gis to djangorestframework-gis
|
Python
|
mit
|
barseghyanartur/django-rest-framework-gis,arjenvrielink/django-rest-framework-gis,illing2005/django-rest-framework-gis,djangonauts/django-rest-framework-gis,nmandery/django-rest-framework-gis,bopo/django-rest-framework-gis,sh4wn/django-rest-framework-gis,manhg/django-rest-framework-gis,nmandery/django-rest-framework-gis,pglotov/django-rest-framework-gis
|
from setuptools import setup, find_packages
from rest_framework_gis import __version__
setup(
name='django-rest-framework-gis',
version=__version__,
url='https://github.com/dmeehan/django-rest-framework-gis',
license='',
author='Douglas Meehan',
author_email='dmeehan@gmail.com',
description='Geographic add-ons for Django Rest Framework',
packages=find_packages()
)
Rename package from django-rest-framework-gis to djangorestframework-gis
|
from setuptools import setup, find_packages
from rest_framework_gis import __version__
setup(
name='djangorestframework-gis',
version=__version__,
url='https://github.com/dmeehan/django-rest-framework-gis',
license='',
author='Douglas Meehan',
author_email='dmeehan@gmail.com',
description='Geographic add-ons for Django Rest Framework',
packages=find_packages()
)
|
<commit_before>from setuptools import setup, find_packages
from rest_framework_gis import __version__
setup(
name='django-rest-framework-gis',
version=__version__,
url='https://github.com/dmeehan/django-rest-framework-gis',
license='',
author='Douglas Meehan',
author_email='dmeehan@gmail.com',
description='Geographic add-ons for Django Rest Framework',
packages=find_packages()
)
<commit_msg>Rename package from django-rest-framework-gis to djangorestframework-gis<commit_after>
|
from setuptools import setup, find_packages
from rest_framework_gis import __version__
setup(
name='djangorestframework-gis',
version=__version__,
url='https://github.com/dmeehan/django-rest-framework-gis',
license='',
author='Douglas Meehan',
author_email='dmeehan@gmail.com',
description='Geographic add-ons for Django Rest Framework',
packages=find_packages()
)
|
from setuptools import setup, find_packages
from rest_framework_gis import __version__
setup(
name='django-rest-framework-gis',
version=__version__,
url='https://github.com/dmeehan/django-rest-framework-gis',
license='',
author='Douglas Meehan',
author_email='dmeehan@gmail.com',
description='Geographic add-ons for Django Rest Framework',
packages=find_packages()
)
Rename package from django-rest-framework-gis to djangorestframework-gisfrom setuptools import setup, find_packages
from rest_framework_gis import __version__
setup(
name='djangorestframework-gis',
version=__version__,
url='https://github.com/dmeehan/django-rest-framework-gis',
license='',
author='Douglas Meehan',
author_email='dmeehan@gmail.com',
description='Geographic add-ons for Django Rest Framework',
packages=find_packages()
)
|
<commit_before>from setuptools import setup, find_packages
from rest_framework_gis import __version__
setup(
name='django-rest-framework-gis',
version=__version__,
url='https://github.com/dmeehan/django-rest-framework-gis',
license='',
author='Douglas Meehan',
author_email='dmeehan@gmail.com',
description='Geographic add-ons for Django Rest Framework',
packages=find_packages()
)
<commit_msg>Rename package from django-rest-framework-gis to djangorestframework-gis<commit_after>from setuptools import setup, find_packages
from rest_framework_gis import __version__
setup(
name='djangorestframework-gis',
version=__version__,
url='https://github.com/dmeehan/django-rest-framework-gis',
license='',
author='Douglas Meehan',
author_email='dmeehan@gmail.com',
description='Geographic add-ons for Django Rest Framework',
packages=find_packages()
)
|
194cb58fe982ad447e9611f29804fd8deb87883b
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import codecs
from setuptools import setup
with codecs.open('README.rst', encoding='utf-8') as f:
long_description = f.read()
setup(
name='pelican-extended-sitemap',
description='sitemap generator plugin for pelican',
# @see http://semver.org/
version='1.2.3',
author='Alexander Herrmann',
author_email='darignac@gmail.com',
license='MIT',
url='https://github.com/dArignac/pelican-extended-sitemap',
long_description=long_description,
packages=[
'extended_sitemap',
'extended_sitemap.tests',
],
package_data={
'extended_sitemap': [
'sitemap-stylesheet.xsl',
'tests/content/articles/*.md',
'tests/content/pages/*.md',
'tests/expected/*.xml',
],
},
requires=[
'pelican'
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Text Processing :: Markup'
]
)
|
#!/usr/bin/env python
import codecs
from setuptools import setup
with codecs.open('README.rst', encoding='utf-8') as f:
long_description = f.read()
setup(
name='pelican-extended-sitemap',
description='sitemap generator plugin for pelican',
# @see http://semver.org/
version='1.2.3',
author='Alexander Herrmann',
author_email='darignac@gmail.com',
license='MIT',
url='https://github.com/dArignac/pelican-extended-sitemap',
long_description=long_description,
packages=[
'extended_sitemap',
'extended_sitemap.tests',
],
package_data={
'extended_sitemap': [
'sitemap-stylesheet.xsl',
'tests/content/articles/*.md',
'tests/content/pages/*.md',
'tests/expected/*.xml',
],
},
requires=[
'pelican'
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Framework :: Pelican :: Plugins',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Text Processing :: Markup'
]
)
|
Add Framework :: Pelican :: Plugins classifer for PyPI
|
Add Framework :: Pelican :: Plugins classifer for PyPI
|
Python
|
mit
|
dArignac/pelican-extended-sitemap
|
#!/usr/bin/env python
import codecs
from setuptools import setup
with codecs.open('README.rst', encoding='utf-8') as f:
long_description = f.read()
setup(
name='pelican-extended-sitemap',
description='sitemap generator plugin for pelican',
# @see http://semver.org/
version='1.2.3',
author='Alexander Herrmann',
author_email='darignac@gmail.com',
license='MIT',
url='https://github.com/dArignac/pelican-extended-sitemap',
long_description=long_description,
packages=[
'extended_sitemap',
'extended_sitemap.tests',
],
package_data={
'extended_sitemap': [
'sitemap-stylesheet.xsl',
'tests/content/articles/*.md',
'tests/content/pages/*.md',
'tests/expected/*.xml',
],
},
requires=[
'pelican'
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Text Processing :: Markup'
]
)
Add Framework :: Pelican :: Plugins classifer for PyPI
|
#!/usr/bin/env python
import codecs
from setuptools import setup
with codecs.open('README.rst', encoding='utf-8') as f:
long_description = f.read()
setup(
name='pelican-extended-sitemap',
description='sitemap generator plugin for pelican',
# @see http://semver.org/
version='1.2.3',
author='Alexander Herrmann',
author_email='darignac@gmail.com',
license='MIT',
url='https://github.com/dArignac/pelican-extended-sitemap',
long_description=long_description,
packages=[
'extended_sitemap',
'extended_sitemap.tests',
],
package_data={
'extended_sitemap': [
'sitemap-stylesheet.xsl',
'tests/content/articles/*.md',
'tests/content/pages/*.md',
'tests/expected/*.xml',
],
},
requires=[
'pelican'
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Framework :: Pelican :: Plugins',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Text Processing :: Markup'
]
)
|
<commit_before>#!/usr/bin/env python
import codecs
from setuptools import setup
with codecs.open('README.rst', encoding='utf-8') as f:
long_description = f.read()
setup(
name='pelican-extended-sitemap',
description='sitemap generator plugin for pelican',
# @see http://semver.org/
version='1.2.3',
author='Alexander Herrmann',
author_email='darignac@gmail.com',
license='MIT',
url='https://github.com/dArignac/pelican-extended-sitemap',
long_description=long_description,
packages=[
'extended_sitemap',
'extended_sitemap.tests',
],
package_data={
'extended_sitemap': [
'sitemap-stylesheet.xsl',
'tests/content/articles/*.md',
'tests/content/pages/*.md',
'tests/expected/*.xml',
],
},
requires=[
'pelican'
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Text Processing :: Markup'
]
)
<commit_msg>Add Framework :: Pelican :: Plugins classifer for PyPI<commit_after>
|
#!/usr/bin/env python
import codecs
from setuptools import setup
with codecs.open('README.rst', encoding='utf-8') as f:
long_description = f.read()
setup(
name='pelican-extended-sitemap',
description='sitemap generator plugin for pelican',
# @see http://semver.org/
version='1.2.3',
author='Alexander Herrmann',
author_email='darignac@gmail.com',
license='MIT',
url='https://github.com/dArignac/pelican-extended-sitemap',
long_description=long_description,
packages=[
'extended_sitemap',
'extended_sitemap.tests',
],
package_data={
'extended_sitemap': [
'sitemap-stylesheet.xsl',
'tests/content/articles/*.md',
'tests/content/pages/*.md',
'tests/expected/*.xml',
],
},
requires=[
'pelican'
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Framework :: Pelican :: Plugins',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Text Processing :: Markup'
]
)
|
#!/usr/bin/env python
import codecs
from setuptools import setup
with codecs.open('README.rst', encoding='utf-8') as f:
long_description = f.read()
setup(
name='pelican-extended-sitemap',
description='sitemap generator plugin for pelican',
# @see http://semver.org/
version='1.2.3',
author='Alexander Herrmann',
author_email='darignac@gmail.com',
license='MIT',
url='https://github.com/dArignac/pelican-extended-sitemap',
long_description=long_description,
packages=[
'extended_sitemap',
'extended_sitemap.tests',
],
package_data={
'extended_sitemap': [
'sitemap-stylesheet.xsl',
'tests/content/articles/*.md',
'tests/content/pages/*.md',
'tests/expected/*.xml',
],
},
requires=[
'pelican'
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Text Processing :: Markup'
]
)
Add Framework :: Pelican :: Plugins classifer for PyPI#!/usr/bin/env python
import codecs
from setuptools import setup
with codecs.open('README.rst', encoding='utf-8') as f:
long_description = f.read()
setup(
name='pelican-extended-sitemap',
description='sitemap generator plugin for pelican',
# @see http://semver.org/
version='1.2.3',
author='Alexander Herrmann',
author_email='darignac@gmail.com',
license='MIT',
url='https://github.com/dArignac/pelican-extended-sitemap',
long_description=long_description,
packages=[
'extended_sitemap',
'extended_sitemap.tests',
],
package_data={
'extended_sitemap': [
'sitemap-stylesheet.xsl',
'tests/content/articles/*.md',
'tests/content/pages/*.md',
'tests/expected/*.xml',
],
},
requires=[
'pelican'
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Framework :: Pelican :: Plugins',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Text Processing :: Markup'
]
)
|
<commit_before>#!/usr/bin/env python
import codecs
from setuptools import setup
with codecs.open('README.rst', encoding='utf-8') as f:
long_description = f.read()
setup(
name='pelican-extended-sitemap',
description='sitemap generator plugin for pelican',
# @see http://semver.org/
version='1.2.3',
author='Alexander Herrmann',
author_email='darignac@gmail.com',
license='MIT',
url='https://github.com/dArignac/pelican-extended-sitemap',
long_description=long_description,
packages=[
'extended_sitemap',
'extended_sitemap.tests',
],
package_data={
'extended_sitemap': [
'sitemap-stylesheet.xsl',
'tests/content/articles/*.md',
'tests/content/pages/*.md',
'tests/expected/*.xml',
],
},
requires=[
'pelican'
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Text Processing :: Markup'
]
)
<commit_msg>Add Framework :: Pelican :: Plugins classifer for PyPI<commit_after>#!/usr/bin/env python
import codecs
from setuptools import setup
with codecs.open('README.rst', encoding='utf-8') as f:
long_description = f.read()
setup(
name='pelican-extended-sitemap',
description='sitemap generator plugin for pelican',
# @see http://semver.org/
version='1.2.3',
author='Alexander Herrmann',
author_email='darignac@gmail.com',
license='MIT',
url='https://github.com/dArignac/pelican-extended-sitemap',
long_description=long_description,
packages=[
'extended_sitemap',
'extended_sitemap.tests',
],
package_data={
'extended_sitemap': [
'sitemap-stylesheet.xsl',
'tests/content/articles/*.md',
'tests/content/pages/*.md',
'tests/expected/*.xml',
],
},
requires=[
'pelican'
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Framework :: Pelican :: Plugins',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Text Processing :: Markup'
]
)
|
9690b27be3fe9c4d4fad502dcdf37ff155e41878
|
setup.py
|
setup.py
|
#! -*- coding: utf-8 -*-
from setuptools import setup
from mmcq.version import VERSION
setup(name='mmcq.py',
version=VERSION,
author='Kang Hyojun',
author_email='hyojun@admire.kr',
install_requires=[
'Wand == 0.3.5'
])
|
#! -*- coding: utf-8 -*-
from setuptools import setup
from mmcq.version import VERSION
setup(name='mmcq.py',
version=VERSION,
author='Kang Hyojun',
author_email='hyojun@admire.kr',
install_requires=[
'Wand >= 0.3.0'
])
|
Change wand version ( >= 0.3.0 )
|
Change wand version ( >= 0.3.0 )
close #3
|
Python
|
mit
|
admire93/mmcq.py
|
#! -*- coding: utf-8 -*-
from setuptools import setup
from mmcq.version import VERSION
setup(name='mmcq.py',
version=VERSION,
author='Kang Hyojun',
author_email='hyojun@admire.kr',
install_requires=[
'Wand == 0.3.5'
])
Change wand version ( >= 0.3.0 )
close #3
|
#! -*- coding: utf-8 -*-
from setuptools import setup
from mmcq.version import VERSION
setup(name='mmcq.py',
version=VERSION,
author='Kang Hyojun',
author_email='hyojun@admire.kr',
install_requires=[
'Wand >= 0.3.0'
])
|
<commit_before>#! -*- coding: utf-8 -*-
from setuptools import setup
from mmcq.version import VERSION
setup(name='mmcq.py',
version=VERSION,
author='Kang Hyojun',
author_email='hyojun@admire.kr',
install_requires=[
'Wand == 0.3.5'
])
<commit_msg>Change wand version ( >= 0.3.0 )
close #3<commit_after>
|
#! -*- coding: utf-8 -*-
from setuptools import setup
from mmcq.version import VERSION
setup(name='mmcq.py',
version=VERSION,
author='Kang Hyojun',
author_email='hyojun@admire.kr',
install_requires=[
'Wand >= 0.3.0'
])
|
#! -*- coding: utf-8 -*-
from setuptools import setup
from mmcq.version import VERSION
setup(name='mmcq.py',
version=VERSION,
author='Kang Hyojun',
author_email='hyojun@admire.kr',
install_requires=[
'Wand == 0.3.5'
])
Change wand version ( >= 0.3.0 )
close #3#! -*- coding: utf-8 -*-
from setuptools import setup
from mmcq.version import VERSION
setup(name='mmcq.py',
version=VERSION,
author='Kang Hyojun',
author_email='hyojun@admire.kr',
install_requires=[
'Wand >= 0.3.0'
])
|
<commit_before>#! -*- coding: utf-8 -*-
from setuptools import setup
from mmcq.version import VERSION
setup(name='mmcq.py',
version=VERSION,
author='Kang Hyojun',
author_email='hyojun@admire.kr',
install_requires=[
'Wand == 0.3.5'
])
<commit_msg>Change wand version ( >= 0.3.0 )
close #3<commit_after>#! -*- coding: utf-8 -*-
from setuptools import setup
from mmcq.version import VERSION
setup(name='mmcq.py',
version=VERSION,
author='Kang Hyojun',
author_email='hyojun@admire.kr',
install_requires=[
'Wand >= 0.3.0'
])
|
21a068c8a56d528e3d1dd4ea3c45ad24306607ab
|
setup.py
|
setup.py
|
from setuptools import setup
from distutils import sysconfig
import re
#from setuptools.dist import Distribution
site_packages_path = sysconfig.get_python_lib()
try:
sprem = re.match(
r'.*(lib[\\/](python\d(\.\d)*[\\/])?site-packages)', site_packages_path, re.I)
if sprem is None:
sprem = re.match(
r'.*(lib[\\/](python\d(\.\d)*[\\/])?dist-packages)', site_packages_path, re.I)
rel_site_packages = sprem.group(1)
except Exception as exc:
print("I'm having trouble finding your site-packages directory. Is it where you expect?")
print("sysconfig.get_python_lib() returns '{}'".format(site_packages_path))
print("Exception was: {}".format(exc))
sys.exit(-1)
#class PureDistribution(Distribution):
# def is_pure(self):
# return True
setup(
name = 'coverage_pth',
version = '0.0.1',
description = 'Coverage PTH file to enable coverage at the virtualenv level',
#packages = '..',
#include_pacakage_date=True,
data_files=[
(rel_site_packages, ['coverage_pth.pth',]),
],
install_requires=[
'coverage',
],
#distclass=PureDistribution,
zip_safe=False,
)
|
from setuptools import setup
from distutils import sysconfig
import sys
import re
#from setuptools.dist import Distribution
site_packages_path = sysconfig.get_python_lib()
try:
sprem = re.match(
r'.*(lib[\\/](python\d(\.\d)*[\\/])?site-packages)', site_packages_path, re.I)
if sprem is None:
sprem = re.match(
r'.*(lib[\\/](python\d(\.\d)*[\\/])?dist-packages)', site_packages_path, re.I)
rel_site_packages = sprem.group(1)
except Exception as exc:
print("I'm having trouble finding your site-packages directory. Is it where you expect?")
print("sysconfig.get_python_lib() returns '{}'".format(site_packages_path))
print("Exception was: {}".format(exc))
sys.exit(-1)
#class PureDistribution(Distribution):
# def is_pure(self):
# return True
setup(
name = 'coverage_pth',
version = '0.0.1',
description = 'Coverage PTH file to enable coverage at the virtualenv level',
#packages = '..',
#include_pacakage_date=True,
data_files=[
(rel_site_packages, ['coverage_pth.pth',]),
],
install_requires=[
'coverage',
],
#distclass=PureDistribution,
zip_safe=False,
)
|
Add import.sys. Delete unneeded spaces.
|
Add import.sys. Delete unneeded spaces.
|
Python
|
bsd-2-clause
|
dougn/coverage_pth
|
from setuptools import setup
from distutils import sysconfig
import re
#from setuptools.dist import Distribution
site_packages_path = sysconfig.get_python_lib()
try:
sprem = re.match(
r'.*(lib[\\/](python\d(\.\d)*[\\/])?site-packages)', site_packages_path, re.I)
if sprem is None:
sprem = re.match(
r'.*(lib[\\/](python\d(\.\d)*[\\/])?dist-packages)', site_packages_path, re.I)
rel_site_packages = sprem.group(1)
except Exception as exc:
print("I'm having trouble finding your site-packages directory. Is it where you expect?")
print("sysconfig.get_python_lib() returns '{}'".format(site_packages_path))
print("Exception was: {}".format(exc))
sys.exit(-1)
#class PureDistribution(Distribution):
# def is_pure(self):
# return True
setup(
name = 'coverage_pth',
version = '0.0.1',
description = 'Coverage PTH file to enable coverage at the virtualenv level',
#packages = '..',
#include_pacakage_date=True,
data_files=[
(rel_site_packages, ['coverage_pth.pth',]),
],
install_requires=[
'coverage',
],
#distclass=PureDistribution,
zip_safe=False,
)
Add import.sys. Delete unneeded spaces.
|
from setuptools import setup
from distutils import sysconfig
import sys
import re
#from setuptools.dist import Distribution
site_packages_path = sysconfig.get_python_lib()
try:
sprem = re.match(
r'.*(lib[\\/](python\d(\.\d)*[\\/])?site-packages)', site_packages_path, re.I)
if sprem is None:
sprem = re.match(
r'.*(lib[\\/](python\d(\.\d)*[\\/])?dist-packages)', site_packages_path, re.I)
rel_site_packages = sprem.group(1)
except Exception as exc:
print("I'm having trouble finding your site-packages directory. Is it where you expect?")
print("sysconfig.get_python_lib() returns '{}'".format(site_packages_path))
print("Exception was: {}".format(exc))
sys.exit(-1)
#class PureDistribution(Distribution):
# def is_pure(self):
# return True
setup(
name = 'coverage_pth',
version = '0.0.1',
description = 'Coverage PTH file to enable coverage at the virtualenv level',
#packages = '..',
#include_pacakage_date=True,
data_files=[
(rel_site_packages, ['coverage_pth.pth',]),
],
install_requires=[
'coverage',
],
#distclass=PureDistribution,
zip_safe=False,
)
|
<commit_before>from setuptools import setup
from distutils import sysconfig
import re
#from setuptools.dist import Distribution
site_packages_path = sysconfig.get_python_lib()
try:
sprem = re.match(
r'.*(lib[\\/](python\d(\.\d)*[\\/])?site-packages)', site_packages_path, re.I)
if sprem is None:
sprem = re.match(
r'.*(lib[\\/](python\d(\.\d)*[\\/])?dist-packages)', site_packages_path, re.I)
rel_site_packages = sprem.group(1)
except Exception as exc:
print("I'm having trouble finding your site-packages directory. Is it where you expect?")
print("sysconfig.get_python_lib() returns '{}'".format(site_packages_path))
print("Exception was: {}".format(exc))
sys.exit(-1)
#class PureDistribution(Distribution):
# def is_pure(self):
# return True
setup(
name = 'coverage_pth',
version = '0.0.1',
description = 'Coverage PTH file to enable coverage at the virtualenv level',
#packages = '..',
#include_pacakage_date=True,
data_files=[
(rel_site_packages, ['coverage_pth.pth',]),
],
install_requires=[
'coverage',
],
#distclass=PureDistribution,
zip_safe=False,
)
<commit_msg>Add import.sys. Delete unneeded spaces.<commit_after>
|
from setuptools import setup
from distutils import sysconfig
import sys
import re
#from setuptools.dist import Distribution
site_packages_path = sysconfig.get_python_lib()
try:
sprem = re.match(
r'.*(lib[\\/](python\d(\.\d)*[\\/])?site-packages)', site_packages_path, re.I)
if sprem is None:
sprem = re.match(
r'.*(lib[\\/](python\d(\.\d)*[\\/])?dist-packages)', site_packages_path, re.I)
rel_site_packages = sprem.group(1)
except Exception as exc:
print("I'm having trouble finding your site-packages directory. Is it where you expect?")
print("sysconfig.get_python_lib() returns '{}'".format(site_packages_path))
print("Exception was: {}".format(exc))
sys.exit(-1)
#class PureDistribution(Distribution):
# def is_pure(self):
# return True
setup(
name = 'coverage_pth',
version = '0.0.1',
description = 'Coverage PTH file to enable coverage at the virtualenv level',
#packages = '..',
#include_pacakage_date=True,
data_files=[
(rel_site_packages, ['coverage_pth.pth',]),
],
install_requires=[
'coverage',
],
#distclass=PureDistribution,
zip_safe=False,
)
|
from setuptools import setup
from distutils import sysconfig
import re
#from setuptools.dist import Distribution
site_packages_path = sysconfig.get_python_lib()
try:
sprem = re.match(
r'.*(lib[\\/](python\d(\.\d)*[\\/])?site-packages)', site_packages_path, re.I)
if sprem is None:
sprem = re.match(
r'.*(lib[\\/](python\d(\.\d)*[\\/])?dist-packages)', site_packages_path, re.I)
rel_site_packages = sprem.group(1)
except Exception as exc:
print("I'm having trouble finding your site-packages directory. Is it where you expect?")
print("sysconfig.get_python_lib() returns '{}'".format(site_packages_path))
print("Exception was: {}".format(exc))
sys.exit(-1)
#class PureDistribution(Distribution):
# def is_pure(self):
# return True
setup(
name = 'coverage_pth',
version = '0.0.1',
description = 'Coverage PTH file to enable coverage at the virtualenv level',
#packages = '..',
#include_pacakage_date=True,
data_files=[
(rel_site_packages, ['coverage_pth.pth',]),
],
install_requires=[
'coverage',
],
#distclass=PureDistribution,
zip_safe=False,
)
Add import.sys. Delete unneeded spaces.from setuptools import setup
from distutils import sysconfig
import sys
import re
#from setuptools.dist import Distribution
site_packages_path = sysconfig.get_python_lib()
try:
sprem = re.match(
r'.*(lib[\\/](python\d(\.\d)*[\\/])?site-packages)', site_packages_path, re.I)
if sprem is None:
sprem = re.match(
r'.*(lib[\\/](python\d(\.\d)*[\\/])?dist-packages)', site_packages_path, re.I)
rel_site_packages = sprem.group(1)
except Exception as exc:
print("I'm having trouble finding your site-packages directory. Is it where you expect?")
print("sysconfig.get_python_lib() returns '{}'".format(site_packages_path))
print("Exception was: {}".format(exc))
sys.exit(-1)
#class PureDistribution(Distribution):
# def is_pure(self):
# return True
setup(
name = 'coverage_pth',
version = '0.0.1',
description = 'Coverage PTH file to enable coverage at the virtualenv level',
#packages = '..',
#include_pacakage_date=True,
data_files=[
(rel_site_packages, ['coverage_pth.pth',]),
],
install_requires=[
'coverage',
],
#distclass=PureDistribution,
zip_safe=False,
)
|
<commit_before>from setuptools import setup
from distutils import sysconfig
import re
#from setuptools.dist import Distribution
site_packages_path = sysconfig.get_python_lib()
try:
sprem = re.match(
r'.*(lib[\\/](python\d(\.\d)*[\\/])?site-packages)', site_packages_path, re.I)
if sprem is None:
sprem = re.match(
r'.*(lib[\\/](python\d(\.\d)*[\\/])?dist-packages)', site_packages_path, re.I)
rel_site_packages = sprem.group(1)
except Exception as exc:
print("I'm having trouble finding your site-packages directory. Is it where you expect?")
print("sysconfig.get_python_lib() returns '{}'".format(site_packages_path))
print("Exception was: {}".format(exc))
sys.exit(-1)
#class PureDistribution(Distribution):
# def is_pure(self):
# return True
setup(
name = 'coverage_pth',
version = '0.0.1',
description = 'Coverage PTH file to enable coverage at the virtualenv level',
#packages = '..',
#include_pacakage_date=True,
data_files=[
(rel_site_packages, ['coverage_pth.pth',]),
],
install_requires=[
'coverage',
],
#distclass=PureDistribution,
zip_safe=False,
)
<commit_msg>Add import.sys. Delete unneeded spaces.<commit_after>from setuptools import setup
from distutils import sysconfig
import sys
import re
#from setuptools.dist import Distribution
site_packages_path = sysconfig.get_python_lib()
try:
sprem = re.match(
r'.*(lib[\\/](python\d(\.\d)*[\\/])?site-packages)', site_packages_path, re.I)
if sprem is None:
sprem = re.match(
r'.*(lib[\\/](python\d(\.\d)*[\\/])?dist-packages)', site_packages_path, re.I)
rel_site_packages = sprem.group(1)
except Exception as exc:
print("I'm having trouble finding your site-packages directory. Is it where you expect?")
print("sysconfig.get_python_lib() returns '{}'".format(site_packages_path))
print("Exception was: {}".format(exc))
sys.exit(-1)
#class PureDistribution(Distribution):
# def is_pure(self):
# return True
setup(
name = 'coverage_pth',
version = '0.0.1',
description = 'Coverage PTH file to enable coverage at the virtualenv level',
#packages = '..',
#include_pacakage_date=True,
data_files=[
(rel_site_packages, ['coverage_pth.pth',]),
],
install_requires=[
'coverage',
],
#distclass=PureDistribution,
zip_safe=False,
)
|
480f5535c143c4c36727a5e21de96756f9b907f0
|
setup.py
|
setup.py
|
from setuptools import setup
import glob
import os.path as op
from os import listdir
from pyuvdata import version
import json
data = [version.git_origin, version.git_hash, version.git_description, version.git_branch]
with open(op.join('pyuvdata', 'GIT_INFO'), 'w') as outfile:
json.dump(data, outfile)
setup_args = {
'name': 'pyuvdata',
'author': 'HERA Team',
'url': 'https://github.com/HERA-Team/pyuvdata',
'license': 'BSD',
'description': 'an interface for astronomical interferometeric datasets in python',
'package_dir': {'pyuvdata': 'pyuvdata'},
'packages': ['pyuvdata'],
'scripts': glob.glob('scripts/*'),
'version': version.version,
'include_package_data': True,
'install_requires': ['numpy>=1.10', 'scipy', 'astropy>=1.2', 'pyephem', 'aipy'],
'classifiers': ['Development Status :: 5 - Production/Stable',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering :: Astronomy'],
'keywords': 'radio astronomy interferometry'
}
if __name__ == '__main__':
apply(setup, (), setup_args)
|
from setuptools import setup
import glob
import os.path as op
from os import listdir
from pyuvdata import version
import json
data = [version.git_origin, version.git_hash, version.git_description, version.git_branch]
with open(op.join('pyuvdata', 'GIT_INFO'), 'w') as outfile:
json.dump(data, outfile)
setup_args = {
'name': 'pyuvdata',
'author': 'HERA Team',
'url': 'https://github.com/HERA-Team/pyuvdata',
'license': 'BSD',
'description': 'an interface for astronomical interferometeric datasets in python',
'package_dir': {'pyuvdata': 'pyuvdata'},
'packages': ['pyuvdata', 'pyuvdata.tests'],
'scripts': glob.glob('scripts/*'),
'version': version.version,
'include_package_data': True,
'install_requires': ['numpy>=1.10', 'scipy', 'astropy>=1.2', 'pyephem', 'aipy'],
'classifiers': ['Development Status :: 5 - Production/Stable',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering :: Astronomy'],
'keywords': 'radio astronomy interferometry'
}
if __name__ == '__main__':
apply(setup, (), setup_args)
|
Add tests directory to packages list in install file
|
Add tests directory to packages list in install file
|
Python
|
bsd-2-clause
|
HERA-Team/pyuvdata,HERA-Team/pyuvdata,HERA-Team/pyuvdata,HERA-Team/pyuvdata
|
from setuptools import setup
import glob
import os.path as op
from os import listdir
from pyuvdata import version
import json
data = [version.git_origin, version.git_hash, version.git_description, version.git_branch]
with open(op.join('pyuvdata', 'GIT_INFO'), 'w') as outfile:
json.dump(data, outfile)
setup_args = {
'name': 'pyuvdata',
'author': 'HERA Team',
'url': 'https://github.com/HERA-Team/pyuvdata',
'license': 'BSD',
'description': 'an interface for astronomical interferometeric datasets in python',
'package_dir': {'pyuvdata': 'pyuvdata'},
'packages': ['pyuvdata'],
'scripts': glob.glob('scripts/*'),
'version': version.version,
'include_package_data': True,
'install_requires': ['numpy>=1.10', 'scipy', 'astropy>=1.2', 'pyephem', 'aipy'],
'classifiers': ['Development Status :: 5 - Production/Stable',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering :: Astronomy'],
'keywords': 'radio astronomy interferometry'
}
if __name__ == '__main__':
apply(setup, (), setup_args)
Add tests directory to packages list in install file
|
from setuptools import setup
import glob
import os.path as op
from os import listdir
from pyuvdata import version
import json
data = [version.git_origin, version.git_hash, version.git_description, version.git_branch]
with open(op.join('pyuvdata', 'GIT_INFO'), 'w') as outfile:
json.dump(data, outfile)
setup_args = {
'name': 'pyuvdata',
'author': 'HERA Team',
'url': 'https://github.com/HERA-Team/pyuvdata',
'license': 'BSD',
'description': 'an interface for astronomical interferometeric datasets in python',
'package_dir': {'pyuvdata': 'pyuvdata'},
'packages': ['pyuvdata', 'pyuvdata.tests'],
'scripts': glob.glob('scripts/*'),
'version': version.version,
'include_package_data': True,
'install_requires': ['numpy>=1.10', 'scipy', 'astropy>=1.2', 'pyephem', 'aipy'],
'classifiers': ['Development Status :: 5 - Production/Stable',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering :: Astronomy'],
'keywords': 'radio astronomy interferometry'
}
if __name__ == '__main__':
apply(setup, (), setup_args)
|
<commit_before>from setuptools import setup
import glob
import os.path as op
from os import listdir
from pyuvdata import version
import json
data = [version.git_origin, version.git_hash, version.git_description, version.git_branch]
with open(op.join('pyuvdata', 'GIT_INFO'), 'w') as outfile:
json.dump(data, outfile)
setup_args = {
'name': 'pyuvdata',
'author': 'HERA Team',
'url': 'https://github.com/HERA-Team/pyuvdata',
'license': 'BSD',
'description': 'an interface for astronomical interferometeric datasets in python',
'package_dir': {'pyuvdata': 'pyuvdata'},
'packages': ['pyuvdata'],
'scripts': glob.glob('scripts/*'),
'version': version.version,
'include_package_data': True,
'install_requires': ['numpy>=1.10', 'scipy', 'astropy>=1.2', 'pyephem', 'aipy'],
'classifiers': ['Development Status :: 5 - Production/Stable',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering :: Astronomy'],
'keywords': 'radio astronomy interferometry'
}
if __name__ == '__main__':
apply(setup, (), setup_args)
<commit_msg>Add tests directory to packages list in install file<commit_after>
|
from setuptools import setup
import glob
import os.path as op
from os import listdir
from pyuvdata import version
import json
data = [version.git_origin, version.git_hash, version.git_description, version.git_branch]
with open(op.join('pyuvdata', 'GIT_INFO'), 'w') as outfile:
json.dump(data, outfile)
setup_args = {
'name': 'pyuvdata',
'author': 'HERA Team',
'url': 'https://github.com/HERA-Team/pyuvdata',
'license': 'BSD',
'description': 'an interface for astronomical interferometeric datasets in python',
'package_dir': {'pyuvdata': 'pyuvdata'},
'packages': ['pyuvdata', 'pyuvdata.tests'],
'scripts': glob.glob('scripts/*'),
'version': version.version,
'include_package_data': True,
'install_requires': ['numpy>=1.10', 'scipy', 'astropy>=1.2', 'pyephem', 'aipy'],
'classifiers': ['Development Status :: 5 - Production/Stable',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering :: Astronomy'],
'keywords': 'radio astronomy interferometry'
}
if __name__ == '__main__':
apply(setup, (), setup_args)
|
from setuptools import setup
import glob
import os.path as op
from os import listdir
from pyuvdata import version
import json
data = [version.git_origin, version.git_hash, version.git_description, version.git_branch]
with open(op.join('pyuvdata', 'GIT_INFO'), 'w') as outfile:
json.dump(data, outfile)
setup_args = {
'name': 'pyuvdata',
'author': 'HERA Team',
'url': 'https://github.com/HERA-Team/pyuvdata',
'license': 'BSD',
'description': 'an interface for astronomical interferometeric datasets in python',
'package_dir': {'pyuvdata': 'pyuvdata'},
'packages': ['pyuvdata'],
'scripts': glob.glob('scripts/*'),
'version': version.version,
'include_package_data': True,
'install_requires': ['numpy>=1.10', 'scipy', 'astropy>=1.2', 'pyephem', 'aipy'],
'classifiers': ['Development Status :: 5 - Production/Stable',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering :: Astronomy'],
'keywords': 'radio astronomy interferometry'
}
if __name__ == '__main__':
apply(setup, (), setup_args)
Add tests directory to packages list in install filefrom setuptools import setup
import glob
import os.path as op
from os import listdir
from pyuvdata import version
import json
data = [version.git_origin, version.git_hash, version.git_description, version.git_branch]
with open(op.join('pyuvdata', 'GIT_INFO'), 'w') as outfile:
json.dump(data, outfile)
setup_args = {
'name': 'pyuvdata',
'author': 'HERA Team',
'url': 'https://github.com/HERA-Team/pyuvdata',
'license': 'BSD',
'description': 'an interface for astronomical interferometeric datasets in python',
'package_dir': {'pyuvdata': 'pyuvdata'},
'packages': ['pyuvdata', 'pyuvdata.tests'],
'scripts': glob.glob('scripts/*'),
'version': version.version,
'include_package_data': True,
'install_requires': ['numpy>=1.10', 'scipy', 'astropy>=1.2', 'pyephem', 'aipy'],
'classifiers': ['Development Status :: 5 - Production/Stable',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering :: Astronomy'],
'keywords': 'radio astronomy interferometry'
}
if __name__ == '__main__':
apply(setup, (), setup_args)
|
<commit_before>from setuptools import setup
import glob
import os.path as op
from os import listdir
from pyuvdata import version
import json
data = [version.git_origin, version.git_hash, version.git_description, version.git_branch]
with open(op.join('pyuvdata', 'GIT_INFO'), 'w') as outfile:
json.dump(data, outfile)
setup_args = {
'name': 'pyuvdata',
'author': 'HERA Team',
'url': 'https://github.com/HERA-Team/pyuvdata',
'license': 'BSD',
'description': 'an interface for astronomical interferometeric datasets in python',
'package_dir': {'pyuvdata': 'pyuvdata'},
'packages': ['pyuvdata'],
'scripts': glob.glob('scripts/*'),
'version': version.version,
'include_package_data': True,
'install_requires': ['numpy>=1.10', 'scipy', 'astropy>=1.2', 'pyephem', 'aipy'],
'classifiers': ['Development Status :: 5 - Production/Stable',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering :: Astronomy'],
'keywords': 'radio astronomy interferometry'
}
if __name__ == '__main__':
apply(setup, (), setup_args)
<commit_msg>Add tests directory to packages list in install file<commit_after>from setuptools import setup
import glob
import os.path as op
from os import listdir
from pyuvdata import version
import json
data = [version.git_origin, version.git_hash, version.git_description, version.git_branch]
with open(op.join('pyuvdata', 'GIT_INFO'), 'w') as outfile:
json.dump(data, outfile)
setup_args = {
'name': 'pyuvdata',
'author': 'HERA Team',
'url': 'https://github.com/HERA-Team/pyuvdata',
'license': 'BSD',
'description': 'an interface for astronomical interferometeric datasets in python',
'package_dir': {'pyuvdata': 'pyuvdata'},
'packages': ['pyuvdata', 'pyuvdata.tests'],
'scripts': glob.glob('scripts/*'),
'version': version.version,
'include_package_data': True,
'install_requires': ['numpy>=1.10', 'scipy', 'astropy>=1.2', 'pyephem', 'aipy'],
'classifiers': ['Development Status :: 5 - Production/Stable',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering :: Astronomy'],
'keywords': 'radio astronomy interferometry'
}
if __name__ == '__main__':
apply(setup, (), setup_args)
|
5c1bf492a8308473aa9704823fa3200d08b7a730
|
win_unc/unc_directory.py
|
win_unc/unc_directory.py
|
class UncDirectory(object):
def __init__(self, path, username=None, password=None):
self.path = path
self.username = username
self.password = password
def __eq__(self, other):
try:
return (self.get_normalized_path() == other.get_normalized_path()
and self.username == other.username
and self.password == other.password)
except AttributeError:
return False
def get_normalized_path(self):
"""
Returns the normalized path for this `UncDirectory`. Differing UNC paths that all point to
the same network location will have the same normalized path.
"""
path = self.path.lower()
return path[:-5] if path.endswith(r'\ipc$') else path.rstrip('\\')
def __str__(self):
return '{username}{password}{at}{path}'.format(
username=self.username,
password=':' + self.password if self.password else '',
at='@' if self.username or self.password else '',
path=self.path)
def __repr__(self):
return '<{cls}: {str}>'.format(cls=self.__class__.__name__, str=str(self))
|
class UncDirectory(object):
def __init__(self, path, username=None, password=None):
if hasattr(path, 'path') and hasattr(path, 'username') and hasattr(path, 'password'):
self.path = path.path
self.username = path.username
self.password = path.password
else:
self.path = path
self.username = username
self.password = password
def __eq__(self, other):
try:
return (self.get_normalized_path() == other.get_normalized_path()
and self.username == other.username
and self.password == other.password)
except AttributeError:
return False
def get_normalized_path(self):
"""
Returns the normalized path for this `UncDirectory`. Differing UNC paths that all point to
the same network location will have the same normalized path.
"""
path = self.path.lower()
return path[:-5] if path.endswith(r'\ipc$') else path.rstrip('\\')
def __str__(self):
return '{username}{password}{at}{path}'.format(
username=self.username,
password=':' + self.password if self.password else '',
at='@' if self.username or self.password else '',
path=self.path)
def __repr__(self):
return '<{cls}: {str}>'.format(cls=self.__class__.__name__, str=str(self))
|
Support cloning in UncDirectory constructor
|
Support cloning in UncDirectory constructor
|
Python
|
mit
|
nithinphilips/py_win_unc,CovenantEyes/py_win_unc
|
class UncDirectory(object):
def __init__(self, path, username=None, password=None):
self.path = path
self.username = username
self.password = password
def __eq__(self, other):
try:
return (self.get_normalized_path() == other.get_normalized_path()
and self.username == other.username
and self.password == other.password)
except AttributeError:
return False
def get_normalized_path(self):
"""
Returns the normalized path for this `UncDirectory`. Differing UNC paths that all point to
the same network location will have the same normalized path.
"""
path = self.path.lower()
return path[:-5] if path.endswith(r'\ipc$') else path.rstrip('\\')
def __str__(self):
return '{username}{password}{at}{path}'.format(
username=self.username,
password=':' + self.password if self.password else '',
at='@' if self.username or self.password else '',
path=self.path)
def __repr__(self):
return '<{cls}: {str}>'.format(cls=self.__class__.__name__, str=str(self))
Support cloning in UncDirectory constructor
|
class UncDirectory(object):
def __init__(self, path, username=None, password=None):
if hasattr(path, 'path') and hasattr(path, 'username') and hasattr(path, 'password'):
self.path = path.path
self.username = path.username
self.password = path.password
else:
self.path = path
self.username = username
self.password = password
def __eq__(self, other):
try:
return (self.get_normalized_path() == other.get_normalized_path()
and self.username == other.username
and self.password == other.password)
except AttributeError:
return False
def get_normalized_path(self):
"""
Returns the normalized path for this `UncDirectory`. Differing UNC paths that all point to
the same network location will have the same normalized path.
"""
path = self.path.lower()
return path[:-5] if path.endswith(r'\ipc$') else path.rstrip('\\')
def __str__(self):
return '{username}{password}{at}{path}'.format(
username=self.username,
password=':' + self.password if self.password else '',
at='@' if self.username or self.password else '',
path=self.path)
def __repr__(self):
return '<{cls}: {str}>'.format(cls=self.__class__.__name__, str=str(self))
|
<commit_before>class UncDirectory(object):
def __init__(self, path, username=None, password=None):
self.path = path
self.username = username
self.password = password
def __eq__(self, other):
try:
return (self.get_normalized_path() == other.get_normalized_path()
and self.username == other.username
and self.password == other.password)
except AttributeError:
return False
def get_normalized_path(self):
"""
Returns the normalized path for this `UncDirectory`. Differing UNC paths that all point to
the same network location will have the same normalized path.
"""
path = self.path.lower()
return path[:-5] if path.endswith(r'\ipc$') else path.rstrip('\\')
def __str__(self):
return '{username}{password}{at}{path}'.format(
username=self.username,
password=':' + self.password if self.password else '',
at='@' if self.username or self.password else '',
path=self.path)
def __repr__(self):
return '<{cls}: {str}>'.format(cls=self.__class__.__name__, str=str(self))
<commit_msg>Support cloning in UncDirectory constructor<commit_after>
|
class UncDirectory(object):
def __init__(self, path, username=None, password=None):
if hasattr(path, 'path') and hasattr(path, 'username') and hasattr(path, 'password'):
self.path = path.path
self.username = path.username
self.password = path.password
else:
self.path = path
self.username = username
self.password = password
def __eq__(self, other):
try:
return (self.get_normalized_path() == other.get_normalized_path()
and self.username == other.username
and self.password == other.password)
except AttributeError:
return False
def get_normalized_path(self):
"""
Returns the normalized path for this `UncDirectory`. Differing UNC paths that all point to
the same network location will have the same normalized path.
"""
path = self.path.lower()
return path[:-5] if path.endswith(r'\ipc$') else path.rstrip('\\')
def __str__(self):
return '{username}{password}{at}{path}'.format(
username=self.username,
password=':' + self.password if self.password else '',
at='@' if self.username or self.password else '',
path=self.path)
def __repr__(self):
return '<{cls}: {str}>'.format(cls=self.__class__.__name__, str=str(self))
|
class UncDirectory(object):
def __init__(self, path, username=None, password=None):
self.path = path
self.username = username
self.password = password
def __eq__(self, other):
try:
return (self.get_normalized_path() == other.get_normalized_path()
and self.username == other.username
and self.password == other.password)
except AttributeError:
return False
def get_normalized_path(self):
"""
Returns the normalized path for this `UncDirectory`. Differing UNC paths that all point to
the same network location will have the same normalized path.
"""
path = self.path.lower()
return path[:-5] if path.endswith(r'\ipc$') else path.rstrip('\\')
def __str__(self):
return '{username}{password}{at}{path}'.format(
username=self.username,
password=':' + self.password if self.password else '',
at='@' if self.username or self.password else '',
path=self.path)
def __repr__(self):
return '<{cls}: {str}>'.format(cls=self.__class__.__name__, str=str(self))
Support cloning in UncDirectory constructorclass UncDirectory(object):
def __init__(self, path, username=None, password=None):
if hasattr(path, 'path') and hasattr(path, 'username') and hasattr(path, 'password'):
self.path = path.path
self.username = path.username
self.password = path.password
else:
self.path = path
self.username = username
self.password = password
def __eq__(self, other):
try:
return (self.get_normalized_path() == other.get_normalized_path()
and self.username == other.username
and self.password == other.password)
except AttributeError:
return False
def get_normalized_path(self):
"""
Returns the normalized path for this `UncDirectory`. Differing UNC paths that all point to
the same network location will have the same normalized path.
"""
path = self.path.lower()
return path[:-5] if path.endswith(r'\ipc$') else path.rstrip('\\')
def __str__(self):
return '{username}{password}{at}{path}'.format(
username=self.username,
password=':' + self.password if self.password else '',
at='@' if self.username or self.password else '',
path=self.path)
def __repr__(self):
return '<{cls}: {str}>'.format(cls=self.__class__.__name__, str=str(self))
|
<commit_before>class UncDirectory(object):
def __init__(self, path, username=None, password=None):
self.path = path
self.username = username
self.password = password
def __eq__(self, other):
try:
return (self.get_normalized_path() == other.get_normalized_path()
and self.username == other.username
and self.password == other.password)
except AttributeError:
return False
def get_normalized_path(self):
"""
Returns the normalized path for this `UncDirectory`. Differing UNC paths that all point to
the same network location will have the same normalized path.
"""
path = self.path.lower()
return path[:-5] if path.endswith(r'\ipc$') else path.rstrip('\\')
def __str__(self):
return '{username}{password}{at}{path}'.format(
username=self.username,
password=':' + self.password if self.password else '',
at='@' if self.username or self.password else '',
path=self.path)
def __repr__(self):
return '<{cls}: {str}>'.format(cls=self.__class__.__name__, str=str(self))
<commit_msg>Support cloning in UncDirectory constructor<commit_after>class UncDirectory(object):
def __init__(self, path, username=None, password=None):
if hasattr(path, 'path') and hasattr(path, 'username') and hasattr(path, 'password'):
self.path = path.path
self.username = path.username
self.password = path.password
else:
self.path = path
self.username = username
self.password = password
def __eq__(self, other):
try:
return (self.get_normalized_path() == other.get_normalized_path()
and self.username == other.username
and self.password == other.password)
except AttributeError:
return False
def get_normalized_path(self):
"""
Returns the normalized path for this `UncDirectory`. Differing UNC paths that all point to
the same network location will have the same normalized path.
"""
path = self.path.lower()
return path[:-5] if path.endswith(r'\ipc$') else path.rstrip('\\')
def __str__(self):
return '{username}{password}{at}{path}'.format(
username=self.username,
password=':' + self.password if self.password else '',
at='@' if self.username or self.password else '',
path=self.path)
def __repr__(self):
return '<{cls}: {str}>'.format(cls=self.__class__.__name__, str=str(self))
|
8f4bc11db358a1db227690149fe1780e600d6328
|
integration_tests/tests/test_startup.py
|
integration_tests/tests/test_startup.py
|
from system import auto_power_on, runlevel
from tests.base import RestartPerSuite
from utils import TestEvent
@runlevel(1)
class StartupTest(RestartPerSuite):
def wait_for_lcl(self, lcl, message):
ev = TestEvent()
lcl.on_disable = ev.set
self.power_on_obc()
self.assertTrue(ev.wait_for_change(1), message)
@auto_power_on(auto_power_on=False)
def test_reset_sens_lcl(self):
self.wait_for_lcl(self.system.eps.SENS, "Sens LCL Should be reset on startup")
@auto_power_on(auto_power_on=False)
def test_reset_cam_nadir_lcl(self):
self.wait_for_lcl(self.system.eps.CamNadir, "Cam Nadir LCL Should be reset on startup")
@auto_power_on(auto_power_on=False)
def test_reset_cam_wing_lcl(self):
self.wait_for_lcl(self.system.eps.CamWing, "Cam Wing Should be reset on startup")
@auto_power_on(auto_power_on=False)
def test_reset_ant_lcl(self):
self.wait_for_lcl(self.system.eps.ANTenna, "ANT Should be reset on startup")
@auto_power_on(auto_power_on=False)
def test_reset_ant_red_lcl(self):
self.wait_for_lcl(self.system.eps.ANTennaRed, "ANT Red LCL Should be reset on startup")
|
from system import auto_power_on, runlevel
from tests.base import RestartPerSuite
from utils import TestEvent
@runlevel(1)
class StartupTest(RestartPerSuite):
def setup_lcl(self, lcl, message):
ev = TestEvent()
lcl.on_disable = ev.set
return lambda : self.assertTrue(ev.wait_for_change(1), message)
@auto_power_on(auto_power_on=False)
def test_lcls_are_reset(self):
sens = self.setup_lcl(self.system.eps.SENS, "Sens LCL Should be reset on startup")
camNadir = self.setup_lcl(self.system.eps.CamNadir, "Cam Nadir LCL Should be reset on startup")
camWing = self.setup_lcl(self.system.eps.CamWing, "Cam Wing Should be reset on startup")
ant = self.setup_lcl(self.system.eps.ANTenna, "ANT Should be reset on startup")
antRed = self.setup_lcl(self.system.eps.ANTennaRed, "ANT Red LCL Should be reset on startup")
self.power_on_obc()
sens()
camNadir()
camWing()
ant()
antRed()
|
Merge together all eps startup tests to reduce run time.
|
[integration_tests] Merge together all eps startup tests to reduce run time.
|
Python
|
agpl-3.0
|
PW-Sat2/PWSat2OBC,PW-Sat2/PWSat2OBC,PW-Sat2/PWSat2OBC,PW-Sat2/PWSat2OBC
|
from system import auto_power_on, runlevel
from tests.base import RestartPerSuite
from utils import TestEvent
@runlevel(1)
class StartupTest(RestartPerSuite):
def wait_for_lcl(self, lcl, message):
ev = TestEvent()
lcl.on_disable = ev.set
self.power_on_obc()
self.assertTrue(ev.wait_for_change(1), message)
@auto_power_on(auto_power_on=False)
def test_reset_sens_lcl(self):
self.wait_for_lcl(self.system.eps.SENS, "Sens LCL Should be reset on startup")
@auto_power_on(auto_power_on=False)
def test_reset_cam_nadir_lcl(self):
self.wait_for_lcl(self.system.eps.CamNadir, "Cam Nadir LCL Should be reset on startup")
@auto_power_on(auto_power_on=False)
def test_reset_cam_wing_lcl(self):
self.wait_for_lcl(self.system.eps.CamWing, "Cam Wing Should be reset on startup")
@auto_power_on(auto_power_on=False)
def test_reset_ant_lcl(self):
self.wait_for_lcl(self.system.eps.ANTenna, "ANT Should be reset on startup")
@auto_power_on(auto_power_on=False)
def test_reset_ant_red_lcl(self):
self.wait_for_lcl(self.system.eps.ANTennaRed, "ANT Red LCL Should be reset on startup")
[integration_tests] Merge together all eps startup tests to reduce run time.
|
from system import auto_power_on, runlevel
from tests.base import RestartPerSuite
from utils import TestEvent
@runlevel(1)
class StartupTest(RestartPerSuite):
def setup_lcl(self, lcl, message):
ev = TestEvent()
lcl.on_disable = ev.set
return lambda : self.assertTrue(ev.wait_for_change(1), message)
@auto_power_on(auto_power_on=False)
def test_lcls_are_reset(self):
sens = self.setup_lcl(self.system.eps.SENS, "Sens LCL Should be reset on startup")
camNadir = self.setup_lcl(self.system.eps.CamNadir, "Cam Nadir LCL Should be reset on startup")
camWing = self.setup_lcl(self.system.eps.CamWing, "Cam Wing Should be reset on startup")
ant = self.setup_lcl(self.system.eps.ANTenna, "ANT Should be reset on startup")
antRed = self.setup_lcl(self.system.eps.ANTennaRed, "ANT Red LCL Should be reset on startup")
self.power_on_obc()
sens()
camNadir()
camWing()
ant()
antRed()
|
<commit_before>from system import auto_power_on, runlevel
from tests.base import RestartPerSuite
from utils import TestEvent
@runlevel(1)
class StartupTest(RestartPerSuite):
def wait_for_lcl(self, lcl, message):
ev = TestEvent()
lcl.on_disable = ev.set
self.power_on_obc()
self.assertTrue(ev.wait_for_change(1), message)
@auto_power_on(auto_power_on=False)
def test_reset_sens_lcl(self):
self.wait_for_lcl(self.system.eps.SENS, "Sens LCL Should be reset on startup")
@auto_power_on(auto_power_on=False)
def test_reset_cam_nadir_lcl(self):
self.wait_for_lcl(self.system.eps.CamNadir, "Cam Nadir LCL Should be reset on startup")
@auto_power_on(auto_power_on=False)
def test_reset_cam_wing_lcl(self):
self.wait_for_lcl(self.system.eps.CamWing, "Cam Wing Should be reset on startup")
@auto_power_on(auto_power_on=False)
def test_reset_ant_lcl(self):
self.wait_for_lcl(self.system.eps.ANTenna, "ANT Should be reset on startup")
@auto_power_on(auto_power_on=False)
def test_reset_ant_red_lcl(self):
self.wait_for_lcl(self.system.eps.ANTennaRed, "ANT Red LCL Should be reset on startup")
<commit_msg>[integration_tests] Merge together all eps startup tests to reduce run time.<commit_after>
|
from system import auto_power_on, runlevel
from tests.base import RestartPerSuite
from utils import TestEvent
@runlevel(1)
class StartupTest(RestartPerSuite):
def setup_lcl(self, lcl, message):
ev = TestEvent()
lcl.on_disable = ev.set
return lambda : self.assertTrue(ev.wait_for_change(1), message)
@auto_power_on(auto_power_on=False)
def test_lcls_are_reset(self):
sens = self.setup_lcl(self.system.eps.SENS, "Sens LCL Should be reset on startup")
camNadir = self.setup_lcl(self.system.eps.CamNadir, "Cam Nadir LCL Should be reset on startup")
camWing = self.setup_lcl(self.system.eps.CamWing, "Cam Wing Should be reset on startup")
ant = self.setup_lcl(self.system.eps.ANTenna, "ANT Should be reset on startup")
antRed = self.setup_lcl(self.system.eps.ANTennaRed, "ANT Red LCL Should be reset on startup")
self.power_on_obc()
sens()
camNadir()
camWing()
ant()
antRed()
|
from system import auto_power_on, runlevel
from tests.base import RestartPerSuite
from utils import TestEvent
@runlevel(1)
class StartupTest(RestartPerSuite):
def wait_for_lcl(self, lcl, message):
ev = TestEvent()
lcl.on_disable = ev.set
self.power_on_obc()
self.assertTrue(ev.wait_for_change(1), message)
@auto_power_on(auto_power_on=False)
def test_reset_sens_lcl(self):
self.wait_for_lcl(self.system.eps.SENS, "Sens LCL Should be reset on startup")
@auto_power_on(auto_power_on=False)
def test_reset_cam_nadir_lcl(self):
self.wait_for_lcl(self.system.eps.CamNadir, "Cam Nadir LCL Should be reset on startup")
@auto_power_on(auto_power_on=False)
def test_reset_cam_wing_lcl(self):
self.wait_for_lcl(self.system.eps.CamWing, "Cam Wing Should be reset on startup")
@auto_power_on(auto_power_on=False)
def test_reset_ant_lcl(self):
self.wait_for_lcl(self.system.eps.ANTenna, "ANT Should be reset on startup")
@auto_power_on(auto_power_on=False)
def test_reset_ant_red_lcl(self):
self.wait_for_lcl(self.system.eps.ANTennaRed, "ANT Red LCL Should be reset on startup")
[integration_tests] Merge together all eps startup tests to reduce run time.from system import auto_power_on, runlevel
from tests.base import RestartPerSuite
from utils import TestEvent
@runlevel(1)
class StartupTest(RestartPerSuite):
def setup_lcl(self, lcl, message):
ev = TestEvent()
lcl.on_disable = ev.set
return lambda : self.assertTrue(ev.wait_for_change(1), message)
@auto_power_on(auto_power_on=False)
def test_lcls_are_reset(self):
sens = self.setup_lcl(self.system.eps.SENS, "Sens LCL Should be reset on startup")
camNadir = self.setup_lcl(self.system.eps.CamNadir, "Cam Nadir LCL Should be reset on startup")
camWing = self.setup_lcl(self.system.eps.CamWing, "Cam Wing Should be reset on startup")
ant = self.setup_lcl(self.system.eps.ANTenna, "ANT Should be reset on startup")
antRed = self.setup_lcl(self.system.eps.ANTennaRed, "ANT Red LCL Should be reset on startup")
self.power_on_obc()
sens()
camNadir()
camWing()
ant()
antRed()
|
<commit_before>from system import auto_power_on, runlevel
from tests.base import RestartPerSuite
from utils import TestEvent
@runlevel(1)
class StartupTest(RestartPerSuite):
def wait_for_lcl(self, lcl, message):
ev = TestEvent()
lcl.on_disable = ev.set
self.power_on_obc()
self.assertTrue(ev.wait_for_change(1), message)
@auto_power_on(auto_power_on=False)
def test_reset_sens_lcl(self):
self.wait_for_lcl(self.system.eps.SENS, "Sens LCL Should be reset on startup")
@auto_power_on(auto_power_on=False)
def test_reset_cam_nadir_lcl(self):
self.wait_for_lcl(self.system.eps.CamNadir, "Cam Nadir LCL Should be reset on startup")
@auto_power_on(auto_power_on=False)
def test_reset_cam_wing_lcl(self):
self.wait_for_lcl(self.system.eps.CamWing, "Cam Wing Should be reset on startup")
@auto_power_on(auto_power_on=False)
def test_reset_ant_lcl(self):
self.wait_for_lcl(self.system.eps.ANTenna, "ANT Should be reset on startup")
@auto_power_on(auto_power_on=False)
def test_reset_ant_red_lcl(self):
self.wait_for_lcl(self.system.eps.ANTennaRed, "ANT Red LCL Should be reset on startup")
<commit_msg>[integration_tests] Merge together all eps startup tests to reduce run time.<commit_after>from system import auto_power_on, runlevel
from tests.base import RestartPerSuite
from utils import TestEvent
@runlevel(1)
class StartupTest(RestartPerSuite):
def setup_lcl(self, lcl, message):
ev = TestEvent()
lcl.on_disable = ev.set
return lambda : self.assertTrue(ev.wait_for_change(1), message)
@auto_power_on(auto_power_on=False)
def test_lcls_are_reset(self):
sens = self.setup_lcl(self.system.eps.SENS, "Sens LCL Should be reset on startup")
camNadir = self.setup_lcl(self.system.eps.CamNadir, "Cam Nadir LCL Should be reset on startup")
camWing = self.setup_lcl(self.system.eps.CamWing, "Cam Wing Should be reset on startup")
ant = self.setup_lcl(self.system.eps.ANTenna, "ANT Should be reset on startup")
antRed = self.setup_lcl(self.system.eps.ANTennaRed, "ANT Red LCL Should be reset on startup")
self.power_on_obc()
sens()
camNadir()
camWing()
ant()
antRed()
|
b9379e3c8667d062ec6511ad07f2525ea0b2f5ef
|
tests/test_statepoint_sourcesep/test_statepoint_sourcesep.py
|
tests/test_statepoint_sourcesep/test_statepoint_sourcesep.py
|
#!/usr/bin/env python
import sys
sys.path.insert(0, '..')
from testing_harness import *
class SourcepointTestHarness(TestHarness):
def _test_output_created(self):
"""Make sure statepoint.* and source* have been created."""
TestHarness._test_output_created(self)
source = glob.glob(os.path.join(os.getcwd(), 'source.*'))
assert len(source) == 1, 'Either multiple or no source files ' \
'exist.'
assert source[0].endswith('h5'), \
'Source file is not a HDF5 file.'
if __name__ == '__main__':
harness = SourcepointTestHarness('statepoint.10.*')
harness.main()
|
#!/usr/bin/env python
import sys
sys.path.insert(0, '..')
from testing_harness import *
class SourcepointTestHarness(TestHarness):
def _test_output_created(self):
"""Make sure statepoint.* and source* have been created."""
TestHarness._test_output_created(self)
source = glob.glob(os.path.join(os.getcwd(), 'source.*'))
assert len(source) == 1, 'Either multiple or no source files ' \
'exist.'
assert source[0].endswith('h5'), \
'Source file is not a HDF5 file.'
def _cleanup(self):
TestHarness._cleanup(self)
output = glob.glob(os.path.join(os.getcwd(), 'source.*'))
for f in output:
if os.path.exists(f):
os.remove(f)
if __name__ == '__main__':
harness = SourcepointTestHarness('statepoint.10.*')
harness.main()
|
Make test cleanup source file
|
Make test cleanup source file
|
Python
|
mit
|
amandalund/openmc,mit-crpg/openmc,shikhar413/openmc,walshjon/openmc,bhermanmit/openmc,mjlong/openmc,paulromano/openmc,samuelshaner/openmc,smharper/openmc,liangjg/openmc,paulromano/openmc,mit-crpg/openmc,shikhar413/openmc,wbinventor/openmc,walshjon/openmc,wbinventor/openmc,wbinventor/openmc,wbinventor/openmc,kellyrowland/openmc,bhermanmit/openmc,liangjg/openmc,liangjg/openmc,shikhar413/openmc,shikhar413/openmc,mit-crpg/openmc,smharper/openmc,amandalund/openmc,johnnyliu27/openmc,smharper/openmc,paulromano/openmc,walshjon/openmc,smharper/openmc,walshjon/openmc,johnnyliu27/openmc,liangjg/openmc,amandalund/openmc,amandalund/openmc,paulromano/openmc,samuelshaner/openmc,kellyrowland/openmc,samuelshaner/openmc,mjlong/openmc,johnnyliu27/openmc,johnnyliu27/openmc,mit-crpg/openmc,samuelshaner/openmc
|
#!/usr/bin/env python
import sys
sys.path.insert(0, '..')
from testing_harness import *
class SourcepointTestHarness(TestHarness):
def _test_output_created(self):
"""Make sure statepoint.* and source* have been created."""
TestHarness._test_output_created(self)
source = glob.glob(os.path.join(os.getcwd(), 'source.*'))
assert len(source) == 1, 'Either multiple or no source files ' \
'exist.'
assert source[0].endswith('h5'), \
'Source file is not a HDF5 file.'
if __name__ == '__main__':
harness = SourcepointTestHarness('statepoint.10.*')
harness.main()
Make test cleanup source file
|
#!/usr/bin/env python
import sys
sys.path.insert(0, '..')
from testing_harness import *
class SourcepointTestHarness(TestHarness):
def _test_output_created(self):
"""Make sure statepoint.* and source* have been created."""
TestHarness._test_output_created(self)
source = glob.glob(os.path.join(os.getcwd(), 'source.*'))
assert len(source) == 1, 'Either multiple or no source files ' \
'exist.'
assert source[0].endswith('h5'), \
'Source file is not a HDF5 file.'
def _cleanup(self):
TestHarness._cleanup(self)
output = glob.glob(os.path.join(os.getcwd(), 'source.*'))
for f in output:
if os.path.exists(f):
os.remove(f)
if __name__ == '__main__':
harness = SourcepointTestHarness('statepoint.10.*')
harness.main()
|
<commit_before>#!/usr/bin/env python
import sys
sys.path.insert(0, '..')
from testing_harness import *
class SourcepointTestHarness(TestHarness):
def _test_output_created(self):
"""Make sure statepoint.* and source* have been created."""
TestHarness._test_output_created(self)
source = glob.glob(os.path.join(os.getcwd(), 'source.*'))
assert len(source) == 1, 'Either multiple or no source files ' \
'exist.'
assert source[0].endswith('h5'), \
'Source file is not a HDF5 file.'
if __name__ == '__main__':
harness = SourcepointTestHarness('statepoint.10.*')
harness.main()
<commit_msg>Make test cleanup source file<commit_after>
|
#!/usr/bin/env python
import sys
sys.path.insert(0, '..')
from testing_harness import *
class SourcepointTestHarness(TestHarness):
def _test_output_created(self):
"""Make sure statepoint.* and source* have been created."""
TestHarness._test_output_created(self)
source = glob.glob(os.path.join(os.getcwd(), 'source.*'))
assert len(source) == 1, 'Either multiple or no source files ' \
'exist.'
assert source[0].endswith('h5'), \
'Source file is not a HDF5 file.'
def _cleanup(self):
TestHarness._cleanup(self)
output = glob.glob(os.path.join(os.getcwd(), 'source.*'))
for f in output:
if os.path.exists(f):
os.remove(f)
if __name__ == '__main__':
harness = SourcepointTestHarness('statepoint.10.*')
harness.main()
|
#!/usr/bin/env python
import sys
sys.path.insert(0, '..')
from testing_harness import *
class SourcepointTestHarness(TestHarness):
def _test_output_created(self):
"""Make sure statepoint.* and source* have been created."""
TestHarness._test_output_created(self)
source = glob.glob(os.path.join(os.getcwd(), 'source.*'))
assert len(source) == 1, 'Either multiple or no source files ' \
'exist.'
assert source[0].endswith('h5'), \
'Source file is not a HDF5 file.'
if __name__ == '__main__':
harness = SourcepointTestHarness('statepoint.10.*')
harness.main()
Make test cleanup source file#!/usr/bin/env python
import sys
sys.path.insert(0, '..')
from testing_harness import *
class SourcepointTestHarness(TestHarness):
def _test_output_created(self):
"""Make sure statepoint.* and source* have been created."""
TestHarness._test_output_created(self)
source = glob.glob(os.path.join(os.getcwd(), 'source.*'))
assert len(source) == 1, 'Either multiple or no source files ' \
'exist.'
assert source[0].endswith('h5'), \
'Source file is not a HDF5 file.'
def _cleanup(self):
TestHarness._cleanup(self)
output = glob.glob(os.path.join(os.getcwd(), 'source.*'))
for f in output:
if os.path.exists(f):
os.remove(f)
if __name__ == '__main__':
harness = SourcepointTestHarness('statepoint.10.*')
harness.main()
|
<commit_before>#!/usr/bin/env python
import sys
sys.path.insert(0, '..')
from testing_harness import *
class SourcepointTestHarness(TestHarness):
def _test_output_created(self):
"""Make sure statepoint.* and source* have been created."""
TestHarness._test_output_created(self)
source = glob.glob(os.path.join(os.getcwd(), 'source.*'))
assert len(source) == 1, 'Either multiple or no source files ' \
'exist.'
assert source[0].endswith('h5'), \
'Source file is not a HDF5 file.'
if __name__ == '__main__':
harness = SourcepointTestHarness('statepoint.10.*')
harness.main()
<commit_msg>Make test cleanup source file<commit_after>#!/usr/bin/env python
import sys
sys.path.insert(0, '..')
from testing_harness import *
class SourcepointTestHarness(TestHarness):
def _test_output_created(self):
"""Make sure statepoint.* and source* have been created."""
TestHarness._test_output_created(self)
source = glob.glob(os.path.join(os.getcwd(), 'source.*'))
assert len(source) == 1, 'Either multiple or no source files ' \
'exist.'
assert source[0].endswith('h5'), \
'Source file is not a HDF5 file.'
def _cleanup(self):
TestHarness._cleanup(self)
output = glob.glob(os.path.join(os.getcwd(), 'source.*'))
for f in output:
if os.path.exists(f):
os.remove(f)
if __name__ == '__main__':
harness = SourcepointTestHarness('statepoint.10.*')
harness.main()
|
a4f78af5b2973b044337dc430118fc270e527220
|
allauth/socialaccount/providers/keycloak/provider.py
|
allauth/socialaccount/providers/keycloak/provider.py
|
# -*- coding: utf-8 -*-
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class KeycloakAccount(ProviderAccount):
def get_avatar_url(self):
return self.account.extra_data.get('picture')
def to_str(self):
dflt = super(KeycloakAccount, self).to_str()
return self.account.extra_data.get('name', dflt)
class KeycloakProvider(OAuth2Provider):
id = 'keycloak'
name = 'Keycloak'
account_class = KeycloakAccount
def get_default_scope(self):
return ['openid', 'profile', 'email']
def extract_uid(self, data):
return str(data['id'])
def extract_common_fields(self, data):
return dict(
email=data.get('email'),
username=data.get('username'),
name=data.get('name'),
user_id=data.get('user_id'),
picture=data.get('picture'),
)
provider_classes = [KeycloakProvider]
|
# -*- coding: utf-8 -*-
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class KeycloakAccount(ProviderAccount):
def get_avatar_url(self):
return self.account.extra_data.get('picture')
def to_str(self):
dflt = super(KeycloakAccount, self).to_str()
return self.account.extra_data.get('name', dflt)
class KeycloakProvider(OAuth2Provider):
id = 'keycloak'
name = 'Keycloak'
account_class = KeycloakAccount
def get_default_scope(self):
return ['openid', 'profile', 'email']
def extract_uid(self, data):
return str(data['id'])
def extract_common_fields(self, data):
return dict(
email=data.get('email'),
username=data.get('preferred_username'),
name=data.get('name'),
user_id=data.get('user_id'),
picture=data.get('picture'),
)
provider_classes = [KeycloakProvider]
|
Use preferred_username claim for username
|
fix(keycloak): Use preferred_username claim for username
As per the OpenID Connect spec the standard username claim is
`preferred_username`.
By default Keycloak confirms to OpenID Connect spec and provides a
`preferred_username` claim, but no `username` claim in the profile
scope.
ref: https://openid.net/specs/openid-connect-basic-1_0-28.html#StandardClaims
|
Python
|
mit
|
pennersr/django-allauth,rsalmaso/django-allauth,pennersr/django-allauth,rsalmaso/django-allauth,rsalmaso/django-allauth,pennersr/django-allauth
|
# -*- coding: utf-8 -*-
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class KeycloakAccount(ProviderAccount):
def get_avatar_url(self):
return self.account.extra_data.get('picture')
def to_str(self):
dflt = super(KeycloakAccount, self).to_str()
return self.account.extra_data.get('name', dflt)
class KeycloakProvider(OAuth2Provider):
id = 'keycloak'
name = 'Keycloak'
account_class = KeycloakAccount
def get_default_scope(self):
return ['openid', 'profile', 'email']
def extract_uid(self, data):
return str(data['id'])
def extract_common_fields(self, data):
return dict(
email=data.get('email'),
username=data.get('username'),
name=data.get('name'),
user_id=data.get('user_id'),
picture=data.get('picture'),
)
provider_classes = [KeycloakProvider]
fix(keycloak): Use preferred_username claim for username
As per the OpenID Connect spec the standard username claim is
`preferred_username`.
By default Keycloak confirms to OpenID Connect spec and provides a
`preferred_username` claim, but no `username` claim in the profile
scope.
ref: https://openid.net/specs/openid-connect-basic-1_0-28.html#StandardClaims
|
# -*- coding: utf-8 -*-
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class KeycloakAccount(ProviderAccount):
def get_avatar_url(self):
return self.account.extra_data.get('picture')
def to_str(self):
dflt = super(KeycloakAccount, self).to_str()
return self.account.extra_data.get('name', dflt)
class KeycloakProvider(OAuth2Provider):
id = 'keycloak'
name = 'Keycloak'
account_class = KeycloakAccount
def get_default_scope(self):
return ['openid', 'profile', 'email']
def extract_uid(self, data):
return str(data['id'])
def extract_common_fields(self, data):
return dict(
email=data.get('email'),
username=data.get('preferred_username'),
name=data.get('name'),
user_id=data.get('user_id'),
picture=data.get('picture'),
)
provider_classes = [KeycloakProvider]
|
<commit_before># -*- coding: utf-8 -*-
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class KeycloakAccount(ProviderAccount):
def get_avatar_url(self):
return self.account.extra_data.get('picture')
def to_str(self):
dflt = super(KeycloakAccount, self).to_str()
return self.account.extra_data.get('name', dflt)
class KeycloakProvider(OAuth2Provider):
id = 'keycloak'
name = 'Keycloak'
account_class = KeycloakAccount
def get_default_scope(self):
return ['openid', 'profile', 'email']
def extract_uid(self, data):
return str(data['id'])
def extract_common_fields(self, data):
return dict(
email=data.get('email'),
username=data.get('username'),
name=data.get('name'),
user_id=data.get('user_id'),
picture=data.get('picture'),
)
provider_classes = [KeycloakProvider]
<commit_msg>fix(keycloak): Use preferred_username claim for username
As per the OpenID Connect spec the standard username claim is
`preferred_username`.
By default Keycloak confirms to OpenID Connect spec and provides a
`preferred_username` claim, but no `username` claim in the profile
scope.
ref: https://openid.net/specs/openid-connect-basic-1_0-28.html#StandardClaims<commit_after>
|
# -*- coding: utf-8 -*-
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class KeycloakAccount(ProviderAccount):
def get_avatar_url(self):
return self.account.extra_data.get('picture')
def to_str(self):
dflt = super(KeycloakAccount, self).to_str()
return self.account.extra_data.get('name', dflt)
class KeycloakProvider(OAuth2Provider):
id = 'keycloak'
name = 'Keycloak'
account_class = KeycloakAccount
def get_default_scope(self):
return ['openid', 'profile', 'email']
def extract_uid(self, data):
return str(data['id'])
def extract_common_fields(self, data):
return dict(
email=data.get('email'),
username=data.get('preferred_username'),
name=data.get('name'),
user_id=data.get('user_id'),
picture=data.get('picture'),
)
provider_classes = [KeycloakProvider]
|
# -*- coding: utf-8 -*-
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class KeycloakAccount(ProviderAccount):
def get_avatar_url(self):
return self.account.extra_data.get('picture')
def to_str(self):
dflt = super(KeycloakAccount, self).to_str()
return self.account.extra_data.get('name', dflt)
class KeycloakProvider(OAuth2Provider):
id = 'keycloak'
name = 'Keycloak'
account_class = KeycloakAccount
def get_default_scope(self):
return ['openid', 'profile', 'email']
def extract_uid(self, data):
return str(data['id'])
def extract_common_fields(self, data):
return dict(
email=data.get('email'),
username=data.get('username'),
name=data.get('name'),
user_id=data.get('user_id'),
picture=data.get('picture'),
)
provider_classes = [KeycloakProvider]
fix(keycloak): Use preferred_username claim for username
As per the OpenID Connect spec the standard username claim is
`preferred_username`.
By default Keycloak confirms to OpenID Connect spec and provides a
`preferred_username` claim, but no `username` claim in the profile
scope.
ref: https://openid.net/specs/openid-connect-basic-1_0-28.html#StandardClaims# -*- coding: utf-8 -*-
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class KeycloakAccount(ProviderAccount):
def get_avatar_url(self):
return self.account.extra_data.get('picture')
def to_str(self):
dflt = super(KeycloakAccount, self).to_str()
return self.account.extra_data.get('name', dflt)
class KeycloakProvider(OAuth2Provider):
id = 'keycloak'
name = 'Keycloak'
account_class = KeycloakAccount
def get_default_scope(self):
return ['openid', 'profile', 'email']
def extract_uid(self, data):
return str(data['id'])
def extract_common_fields(self, data):
return dict(
email=data.get('email'),
username=data.get('preferred_username'),
name=data.get('name'),
user_id=data.get('user_id'),
picture=data.get('picture'),
)
provider_classes = [KeycloakProvider]
|
<commit_before># -*- coding: utf-8 -*-
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class KeycloakAccount(ProviderAccount):
def get_avatar_url(self):
return self.account.extra_data.get('picture')
def to_str(self):
dflt = super(KeycloakAccount, self).to_str()
return self.account.extra_data.get('name', dflt)
class KeycloakProvider(OAuth2Provider):
id = 'keycloak'
name = 'Keycloak'
account_class = KeycloakAccount
def get_default_scope(self):
return ['openid', 'profile', 'email']
def extract_uid(self, data):
return str(data['id'])
def extract_common_fields(self, data):
return dict(
email=data.get('email'),
username=data.get('username'),
name=data.get('name'),
user_id=data.get('user_id'),
picture=data.get('picture'),
)
provider_classes = [KeycloakProvider]
<commit_msg>fix(keycloak): Use preferred_username claim for username
As per the OpenID Connect spec the standard username claim is
`preferred_username`.
By default Keycloak confirms to OpenID Connect spec and provides a
`preferred_username` claim, but no `username` claim in the profile
scope.
ref: https://openid.net/specs/openid-connect-basic-1_0-28.html#StandardClaims<commit_after># -*- coding: utf-8 -*-
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class KeycloakAccount(ProviderAccount):
def get_avatar_url(self):
return self.account.extra_data.get('picture')
def to_str(self):
dflt = super(KeycloakAccount, self).to_str()
return self.account.extra_data.get('name', dflt)
class KeycloakProvider(OAuth2Provider):
id = 'keycloak'
name = 'Keycloak'
account_class = KeycloakAccount
def get_default_scope(self):
return ['openid', 'profile', 'email']
def extract_uid(self, data):
return str(data['id'])
def extract_common_fields(self, data):
return dict(
email=data.get('email'),
username=data.get('preferred_username'),
name=data.get('name'),
user_id=data.get('user_id'),
picture=data.get('picture'),
)
provider_classes = [KeycloakProvider]
|
2f039066530533b1a8ae82076ed745c1f2e03688
|
app-tasks/rf/src/rf/uploads/geotiff/create_images.py
|
app-tasks/rf/src/rf/uploads/geotiff/create_images.py
|
import os
from rf.models import Image
from rf.utils.io import Visibility
from .io import get_geotiff_size_bytes, get_geotiff_resolution
from .create_bands import create_geotiff_bands
def create_geotiff_image(organizationId, tif_path, sourceuri, filename=None,
visibility=Visibility.PRIVATE, imageMetadata={}, scene=None,
owner=None):
"""Create an Image object from a GeoTIFF.
Args:
orgnizationId (str): UUID of organization that this image belongs to
tif_path (str): Local path to tif file
sourceuri (str): remote source of image
visibility (str): accessibility level for object
imageMetadata (dict): Optional dict of metadata about the image
scene (Scene): Optional Scene object holding this image
owner (str): Optional owner of an image
"""
filename = filename if filename else os.path.basename(tif_path)
return Image(
organizationId,
get_geotiff_size_bytes(tif_path),
visibility,
filename,
sourceuri,
create_geotiff_bands(tif_path),
imageMetadata,
# TIFFs can have a different resolution in the X and Y directions, that is, pixels can be
# rectangular with respect to the ground. The RF API doesn't currently support this, so just
# select the X resolution.
get_geotiff_resolution(tif_path)[0],
[],
scene=scene,
owner=owner
)
|
import os
from rf.models import Image
from rf.utils.io import Visibility
from .io import get_geotiff_size_bytes, get_geotiff_resolution
from .create_bands import create_geotiff_bands
def create_geotiff_image(organizationId, tif_path, sourceuri, filename=None,
visibility=Visibility.PRIVATE, imageMetadata={}, scene=None,
owner=None, band_create_function=create_geotiff_bands):
"""Create an Image object from a GeoTIFF.
Args:
orgnizationId (str): UUID of organization that this image belongs to
tif_path (str): Local path to tif file
sourceuri (str): remote source of image
visibility (str): accessibility level for object
imageMetadata (dict): Optional dict of metadata about the image
scene (Scene): Optional Scene object holding this image
owner (str): Optional owner of an image
band_create_function (function): function to aid in creating bands for a geotiff
"""
filename = filename if filename else os.path.basename(tif_path)
return Image(
organizationId,
get_geotiff_size_bytes(tif_path),
visibility,
filename,
sourceuri,
band_create_function(tif_path),
imageMetadata,
# TIFFs can have a different resolution in the X and Y directions, that is, pixels can be
# rectangular with respect to the ground. The RF API doesn't currently support this, so just
# select the X resolution.
get_geotiff_resolution(tif_path)[0],
[],
scene=scene,
owner=owner
)
|
Add ability to define band create function for geotiff images
|
Add ability to define band create function for geotiff images
This commit makes defining bands for custom geotiffs more flexible by
allowing passing custom functions for defining bands for different
datasources or other variables - subsequent commits for MODIS take
advantage of this
|
Python
|
apache-2.0
|
raster-foundry/raster-foundry,aaronxsu/raster-foundry,azavea/raster-foundry,azavea/raster-foundry,aaronxsu/raster-foundry,aaronxsu/raster-foundry,azavea/raster-foundry,azavea/raster-foundry,azavea/raster-foundry,raster-foundry/raster-foundry,raster-foundry/raster-foundry,aaronxsu/raster-foundry
|
import os
from rf.models import Image
from rf.utils.io import Visibility
from .io import get_geotiff_size_bytes, get_geotiff_resolution
from .create_bands import create_geotiff_bands
def create_geotiff_image(organizationId, tif_path, sourceuri, filename=None,
visibility=Visibility.PRIVATE, imageMetadata={}, scene=None,
owner=None):
"""Create an Image object from a GeoTIFF.
Args:
orgnizationId (str): UUID of organization that this image belongs to
tif_path (str): Local path to tif file
sourceuri (str): remote source of image
visibility (str): accessibility level for object
imageMetadata (dict): Optional dict of metadata about the image
scene (Scene): Optional Scene object holding this image
owner (str): Optional owner of an image
"""
filename = filename if filename else os.path.basename(tif_path)
return Image(
organizationId,
get_geotiff_size_bytes(tif_path),
visibility,
filename,
sourceuri,
create_geotiff_bands(tif_path),
imageMetadata,
# TIFFs can have a different resolution in the X and Y directions, that is, pixels can be
# rectangular with respect to the ground. The RF API doesn't currently support this, so just
# select the X resolution.
get_geotiff_resolution(tif_path)[0],
[],
scene=scene,
owner=owner
)
Add ability to define band create function for geotiff images
This commit makes defining bands for custom geotiffs more flexible by
allowing passing custom functions for defining bands for different
datasources or other variables - subsequent commits for MODIS take
advantage of this
|
import os
from rf.models import Image
from rf.utils.io import Visibility
from .io import get_geotiff_size_bytes, get_geotiff_resolution
from .create_bands import create_geotiff_bands
def create_geotiff_image(organizationId, tif_path, sourceuri, filename=None,
visibility=Visibility.PRIVATE, imageMetadata={}, scene=None,
owner=None, band_create_function=create_geotiff_bands):
"""Create an Image object from a GeoTIFF.
Args:
orgnizationId (str): UUID of organization that this image belongs to
tif_path (str): Local path to tif file
sourceuri (str): remote source of image
visibility (str): accessibility level for object
imageMetadata (dict): Optional dict of metadata about the image
scene (Scene): Optional Scene object holding this image
owner (str): Optional owner of an image
band_create_function (function): function to aid in creating bands for a geotiff
"""
filename = filename if filename else os.path.basename(tif_path)
return Image(
organizationId,
get_geotiff_size_bytes(tif_path),
visibility,
filename,
sourceuri,
band_create_function(tif_path),
imageMetadata,
# TIFFs can have a different resolution in the X and Y directions, that is, pixels can be
# rectangular with respect to the ground. The RF API doesn't currently support this, so just
# select the X resolution.
get_geotiff_resolution(tif_path)[0],
[],
scene=scene,
owner=owner
)
|
<commit_before>import os
from rf.models import Image
from rf.utils.io import Visibility
from .io import get_geotiff_size_bytes, get_geotiff_resolution
from .create_bands import create_geotiff_bands
def create_geotiff_image(organizationId, tif_path, sourceuri, filename=None,
visibility=Visibility.PRIVATE, imageMetadata={}, scene=None,
owner=None):
"""Create an Image object from a GeoTIFF.
Args:
orgnizationId (str): UUID of organization that this image belongs to
tif_path (str): Local path to tif file
sourceuri (str): remote source of image
visibility (str): accessibility level for object
imageMetadata (dict): Optional dict of metadata about the image
scene (Scene): Optional Scene object holding this image
owner (str): Optional owner of an image
"""
filename = filename if filename else os.path.basename(tif_path)
return Image(
organizationId,
get_geotiff_size_bytes(tif_path),
visibility,
filename,
sourceuri,
create_geotiff_bands(tif_path),
imageMetadata,
# TIFFs can have a different resolution in the X and Y directions, that is, pixels can be
# rectangular with respect to the ground. The RF API doesn't currently support this, so just
# select the X resolution.
get_geotiff_resolution(tif_path)[0],
[],
scene=scene,
owner=owner
)
<commit_msg>Add ability to define band create function for geotiff images
This commit makes defining bands for custom geotiffs more flexible by
allowing passing custom functions for defining bands for different
datasources or other variables - subsequent commits for MODIS take
advantage of this<commit_after>
|
import os
from rf.models import Image
from rf.utils.io import Visibility
from .io import get_geotiff_size_bytes, get_geotiff_resolution
from .create_bands import create_geotiff_bands
def create_geotiff_image(organizationId, tif_path, sourceuri, filename=None,
visibility=Visibility.PRIVATE, imageMetadata={}, scene=None,
owner=None, band_create_function=create_geotiff_bands):
"""Create an Image object from a GeoTIFF.
Args:
orgnizationId (str): UUID of organization that this image belongs to
tif_path (str): Local path to tif file
sourceuri (str): remote source of image
visibility (str): accessibility level for object
imageMetadata (dict): Optional dict of metadata about the image
scene (Scene): Optional Scene object holding this image
owner (str): Optional owner of an image
band_create_function (function): function to aid in creating bands for a geotiff
"""
filename = filename if filename else os.path.basename(tif_path)
return Image(
organizationId,
get_geotiff_size_bytes(tif_path),
visibility,
filename,
sourceuri,
band_create_function(tif_path),
imageMetadata,
# TIFFs can have a different resolution in the X and Y directions, that is, pixels can be
# rectangular with respect to the ground. The RF API doesn't currently support this, so just
# select the X resolution.
get_geotiff_resolution(tif_path)[0],
[],
scene=scene,
owner=owner
)
|
import os
from rf.models import Image
from rf.utils.io import Visibility
from .io import get_geotiff_size_bytes, get_geotiff_resolution
from .create_bands import create_geotiff_bands
def create_geotiff_image(organizationId, tif_path, sourceuri, filename=None,
visibility=Visibility.PRIVATE, imageMetadata={}, scene=None,
owner=None):
"""Create an Image object from a GeoTIFF.
Args:
orgnizationId (str): UUID of organization that this image belongs to
tif_path (str): Local path to tif file
sourceuri (str): remote source of image
visibility (str): accessibility level for object
imageMetadata (dict): Optional dict of metadata about the image
scene (Scene): Optional Scene object holding this image
owner (str): Optional owner of an image
"""
filename = filename if filename else os.path.basename(tif_path)
return Image(
organizationId,
get_geotiff_size_bytes(tif_path),
visibility,
filename,
sourceuri,
create_geotiff_bands(tif_path),
imageMetadata,
# TIFFs can have a different resolution in the X and Y directions, that is, pixels can be
# rectangular with respect to the ground. The RF API doesn't currently support this, so just
# select the X resolution.
get_geotiff_resolution(tif_path)[0],
[],
scene=scene,
owner=owner
)
Add ability to define band create function for geotiff images
This commit makes defining bands for custom geotiffs more flexible by
allowing passing custom functions for defining bands for different
datasources or other variables - subsequent commits for MODIS take
advantage of thisimport os
from rf.models import Image
from rf.utils.io import Visibility
from .io import get_geotiff_size_bytes, get_geotiff_resolution
from .create_bands import create_geotiff_bands
def create_geotiff_image(organizationId, tif_path, sourceuri, filename=None,
visibility=Visibility.PRIVATE, imageMetadata={}, scene=None,
owner=None, band_create_function=create_geotiff_bands):
"""Create an Image object from a GeoTIFF.
Args:
orgnizationId (str): UUID of organization that this image belongs to
tif_path (str): Local path to tif file
sourceuri (str): remote source of image
visibility (str): accessibility level for object
imageMetadata (dict): Optional dict of metadata about the image
scene (Scene): Optional Scene object holding this image
owner (str): Optional owner of an image
band_create_function (function): function to aid in creating bands for a geotiff
"""
filename = filename if filename else os.path.basename(tif_path)
return Image(
organizationId,
get_geotiff_size_bytes(tif_path),
visibility,
filename,
sourceuri,
band_create_function(tif_path),
imageMetadata,
# TIFFs can have a different resolution in the X and Y directions, that is, pixels can be
# rectangular with respect to the ground. The RF API doesn't currently support this, so just
# select the X resolution.
get_geotiff_resolution(tif_path)[0],
[],
scene=scene,
owner=owner
)
|
<commit_before>import os
from rf.models import Image
from rf.utils.io import Visibility
from .io import get_geotiff_size_bytes, get_geotiff_resolution
from .create_bands import create_geotiff_bands
def create_geotiff_image(organizationId, tif_path, sourceuri, filename=None,
visibility=Visibility.PRIVATE, imageMetadata={}, scene=None,
owner=None):
"""Create an Image object from a GeoTIFF.
Args:
orgnizationId (str): UUID of organization that this image belongs to
tif_path (str): Local path to tif file
sourceuri (str): remote source of image
visibility (str): accessibility level for object
imageMetadata (dict): Optional dict of metadata about the image
scene (Scene): Optional Scene object holding this image
owner (str): Optional owner of an image
"""
filename = filename if filename else os.path.basename(tif_path)
return Image(
organizationId,
get_geotiff_size_bytes(tif_path),
visibility,
filename,
sourceuri,
create_geotiff_bands(tif_path),
imageMetadata,
# TIFFs can have a different resolution in the X and Y directions, that is, pixels can be
# rectangular with respect to the ground. The RF API doesn't currently support this, so just
# select the X resolution.
get_geotiff_resolution(tif_path)[0],
[],
scene=scene,
owner=owner
)
<commit_msg>Add ability to define band create function for geotiff images
This commit makes defining bands for custom geotiffs more flexible by
allowing passing custom functions for defining bands for different
datasources or other variables - subsequent commits for MODIS take
advantage of this<commit_after>import os
from rf.models import Image
from rf.utils.io import Visibility
from .io import get_geotiff_size_bytes, get_geotiff_resolution
from .create_bands import create_geotiff_bands
def create_geotiff_image(organizationId, tif_path, sourceuri, filename=None,
visibility=Visibility.PRIVATE, imageMetadata={}, scene=None,
owner=None, band_create_function=create_geotiff_bands):
"""Create an Image object from a GeoTIFF.
Args:
orgnizationId (str): UUID of organization that this image belongs to
tif_path (str): Local path to tif file
sourceuri (str): remote source of image
visibility (str): accessibility level for object
imageMetadata (dict): Optional dict of metadata about the image
scene (Scene): Optional Scene object holding this image
owner (str): Optional owner of an image
band_create_function (function): function to aid in creating bands for a geotiff
"""
filename = filename if filename else os.path.basename(tif_path)
return Image(
organizationId,
get_geotiff_size_bytes(tif_path),
visibility,
filename,
sourceuri,
band_create_function(tif_path),
imageMetadata,
# TIFFs can have a different resolution in the X and Y directions, that is, pixels can be
# rectangular with respect to the ground. The RF API doesn't currently support this, so just
# select the X resolution.
get_geotiff_resolution(tif_path)[0],
[],
scene=scene,
owner=owner
)
|
a76741e81b2c9b9b91f9ffefe08784051f083d8e
|
stock_traceability_operation/models/stock_production_lot.py
|
stock_traceability_operation/models/stock_production_lot.py
|
# -*- coding: utf-8 -*-
# © 2015 Numérigraphe
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import api, models, exceptions
from openerp.tools.safe_eval import safe_eval
class StockProductionLot(models.Model):
_inherit = "stock.production.lot"
@api.multi
def action_traceability(self):
"""Replace the action on stock moves with an action on the report"""
action = super(StockProductionLot, self).action_traceability()
if action['res_model'] != 'stock.move':
raise exceptions.ValidationError(
"An incompatible module returned an action for the wrong "
"model.")
moves = self.env['stock.move'].search(safe_eval(action['domain']))
return moves.mapped('quant_ids').action_view_quant_history()
|
# -*- coding: utf-8 -*-
# © 2015 Numérigraphe
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import api, models, exceptions
from openerp.tools.safe_eval import safe_eval
class StockProductionLot(models.Model):
_inherit = "stock.production.lot"
@api.multi
def action_traceability(self):
"""Replace the action on stock moves with an action on the report"""
action = super(StockProductionLot, self).action_traceability()
if action['res_model'] != 'stock.move':
raise exceptions.ValidationError(
"An incompatible module returned an action for the wrong "
"model.")
moves = self.env['stock.move'].search(safe_eval(action['domain']))
return moves.mapped('quant_ids').filtered(
lambda x: x.lot_id in self).action_view_quant_history()
|
Fix a missing filter in lot traceability
|
Fix a missing filter in lot traceability
When we search for lot traceability we first search for the corresponding stock moves.
But whenever these moves contain extra quants, we need to filter the ones of the wrong lots.
|
Python
|
agpl-3.0
|
kmee/stock-logistics-warehouse,acsone/stock-logistics-warehouse,factorlibre/stock-logistics-warehouse,open-synergy/stock-logistics-warehouse
|
# -*- coding: utf-8 -*-
# © 2015 Numérigraphe
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import api, models, exceptions
from openerp.tools.safe_eval import safe_eval
class StockProductionLot(models.Model):
_inherit = "stock.production.lot"
@api.multi
def action_traceability(self):
"""Replace the action on stock moves with an action on the report"""
action = super(StockProductionLot, self).action_traceability()
if action['res_model'] != 'stock.move':
raise exceptions.ValidationError(
"An incompatible module returned an action for the wrong "
"model.")
moves = self.env['stock.move'].search(safe_eval(action['domain']))
return moves.mapped('quant_ids').action_view_quant_history()
Fix a missing filter in lot traceability
When we search for lot traceability we first search for the corresponding stock moves.
But whenever these moves contain extra quants, we need to filter the ones of the wrong lots.
|
# -*- coding: utf-8 -*-
# © 2015 Numérigraphe
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import api, models, exceptions
from openerp.tools.safe_eval import safe_eval
class StockProductionLot(models.Model):
_inherit = "stock.production.lot"
@api.multi
def action_traceability(self):
"""Replace the action on stock moves with an action on the report"""
action = super(StockProductionLot, self).action_traceability()
if action['res_model'] != 'stock.move':
raise exceptions.ValidationError(
"An incompatible module returned an action for the wrong "
"model.")
moves = self.env['stock.move'].search(safe_eval(action['domain']))
return moves.mapped('quant_ids').filtered(
lambda x: x.lot_id in self).action_view_quant_history()
|
<commit_before># -*- coding: utf-8 -*-
# © 2015 Numérigraphe
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import api, models, exceptions
from openerp.tools.safe_eval import safe_eval
class StockProductionLot(models.Model):
_inherit = "stock.production.lot"
@api.multi
def action_traceability(self):
"""Replace the action on stock moves with an action on the report"""
action = super(StockProductionLot, self).action_traceability()
if action['res_model'] != 'stock.move':
raise exceptions.ValidationError(
"An incompatible module returned an action for the wrong "
"model.")
moves = self.env['stock.move'].search(safe_eval(action['domain']))
return moves.mapped('quant_ids').action_view_quant_history()
<commit_msg>Fix a missing filter in lot traceability
When we search for lot traceability we first search for the corresponding stock moves.
But whenever these moves contain extra quants, we need to filter the ones of the wrong lots.<commit_after>
|
# -*- coding: utf-8 -*-
# © 2015 Numérigraphe
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import api, models, exceptions
from openerp.tools.safe_eval import safe_eval
class StockProductionLot(models.Model):
_inherit = "stock.production.lot"
@api.multi
def action_traceability(self):
"""Replace the action on stock moves with an action on the report"""
action = super(StockProductionLot, self).action_traceability()
if action['res_model'] != 'stock.move':
raise exceptions.ValidationError(
"An incompatible module returned an action for the wrong "
"model.")
moves = self.env['stock.move'].search(safe_eval(action['domain']))
return moves.mapped('quant_ids').filtered(
lambda x: x.lot_id in self).action_view_quant_history()
|
# -*- coding: utf-8 -*-
# © 2015 Numérigraphe
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import api, models, exceptions
from openerp.tools.safe_eval import safe_eval
class StockProductionLot(models.Model):
_inherit = "stock.production.lot"
@api.multi
def action_traceability(self):
"""Replace the action on stock moves with an action on the report"""
action = super(StockProductionLot, self).action_traceability()
if action['res_model'] != 'stock.move':
raise exceptions.ValidationError(
"An incompatible module returned an action for the wrong "
"model.")
moves = self.env['stock.move'].search(safe_eval(action['domain']))
return moves.mapped('quant_ids').action_view_quant_history()
Fix a missing filter in lot traceability
When we search for lot traceability we first search for the corresponding stock moves.
But whenever these moves contain extra quants, we need to filter the ones of the wrong lots.# -*- coding: utf-8 -*-
# © 2015 Numérigraphe
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import api, models, exceptions
from openerp.tools.safe_eval import safe_eval
class StockProductionLot(models.Model):
_inherit = "stock.production.lot"
@api.multi
def action_traceability(self):
"""Replace the action on stock moves with an action on the report"""
action = super(StockProductionLot, self).action_traceability()
if action['res_model'] != 'stock.move':
raise exceptions.ValidationError(
"An incompatible module returned an action for the wrong "
"model.")
moves = self.env['stock.move'].search(safe_eval(action['domain']))
return moves.mapped('quant_ids').filtered(
lambda x: x.lot_id in self).action_view_quant_history()
|
<commit_before># -*- coding: utf-8 -*-
# © 2015 Numérigraphe
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import api, models, exceptions
from openerp.tools.safe_eval import safe_eval
class StockProductionLot(models.Model):
_inherit = "stock.production.lot"
@api.multi
def action_traceability(self):
"""Replace the action on stock moves with an action on the report"""
action = super(StockProductionLot, self).action_traceability()
if action['res_model'] != 'stock.move':
raise exceptions.ValidationError(
"An incompatible module returned an action for the wrong "
"model.")
moves = self.env['stock.move'].search(safe_eval(action['domain']))
return moves.mapped('quant_ids').action_view_quant_history()
<commit_msg>Fix a missing filter in lot traceability
When we search for lot traceability we first search for the corresponding stock moves.
But whenever these moves contain extra quants, we need to filter the ones of the wrong lots.<commit_after># -*- coding: utf-8 -*-
# © 2015 Numérigraphe
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import api, models, exceptions
from openerp.tools.safe_eval import safe_eval
class StockProductionLot(models.Model):
_inherit = "stock.production.lot"
@api.multi
def action_traceability(self):
"""Replace the action on stock moves with an action on the report"""
action = super(StockProductionLot, self).action_traceability()
if action['res_model'] != 'stock.move':
raise exceptions.ValidationError(
"An incompatible module returned an action for the wrong "
"model.")
moves = self.env['stock.move'].search(safe_eval(action['domain']))
return moves.mapped('quant_ids').filtered(
lambda x: x.lot_id in self).action_view_quant_history()
|
2acb5a2eb7ae0a0f8ea8423a7da5a7a8b9f07151
|
fore/mailer.py
|
fore/mailer.py
|
# Import smtplib for the actual sending function
import smtplib
from email.mime.text import MIMEText
import apikeys
def AlertMessage(message, subject='Glitch System Message', me=apikeys.system_email, you=apikeys.admin_email):
msg = MIMEText(message)
msg['Subject'] = subject
msg['From'] = me
msg['To'] = you
# Send the message via our own SMTP server, but don't include the
# envelope header.
s = smtplib.SMTP('localhost')
s.sendmail(me, [you], msg.as_string())
s.quit()
a_message = 'There is someting I need to tell you.'
AlertMessage(a_message)
|
# Import smtplib for the actual sending function
import smtplib
from email.mime.text import MIMEText
import apikeys
def AlertMessage(message, subject='Glitch System Message', me=apikeys.system_email, you=apikeys.admin_email):
msg = MIMEText(message)
msg['Subject'] = subject
msg['From'] = me
msg['To'] = you
# Send the message via our own SMTP server, but don't include the
# envelope header.
s = smtplib.SMTP('localhost')
s.sendmail(me, [you], msg.as_string())
s.quit()
def test():
a_message = 'There is someting I need to tell you.'
AlertMessage(a_message)
|
Move test message into function.
|
Move test message into function.
|
Python
|
artistic-2.0
|
Rosuav/appension,MikeiLL/appension,Rosuav/appension,MikeiLL/appension,Rosuav/appension,MikeiLL/appension,Rosuav/appension,MikeiLL/appension
|
# Import smtplib for the actual sending function
import smtplib
from email.mime.text import MIMEText
import apikeys
def AlertMessage(message, subject='Glitch System Message', me=apikeys.system_email, you=apikeys.admin_email):
msg = MIMEText(message)
msg['Subject'] = subject
msg['From'] = me
msg['To'] = you
# Send the message via our own SMTP server, but don't include the
# envelope header.
s = smtplib.SMTP('localhost')
s.sendmail(me, [you], msg.as_string())
s.quit()
a_message = 'There is someting I need to tell you.'
AlertMessage(a_message)Move test message into function.
|
# Import smtplib for the actual sending function
import smtplib
from email.mime.text import MIMEText
import apikeys
def AlertMessage(message, subject='Glitch System Message', me=apikeys.system_email, you=apikeys.admin_email):
msg = MIMEText(message)
msg['Subject'] = subject
msg['From'] = me
msg['To'] = you
# Send the message via our own SMTP server, but don't include the
# envelope header.
s = smtplib.SMTP('localhost')
s.sendmail(me, [you], msg.as_string())
s.quit()
def test():
a_message = 'There is someting I need to tell you.'
AlertMessage(a_message)
|
<commit_before># Import smtplib for the actual sending function
import smtplib
from email.mime.text import MIMEText
import apikeys
def AlertMessage(message, subject='Glitch System Message', me=apikeys.system_email, you=apikeys.admin_email):
msg = MIMEText(message)
msg['Subject'] = subject
msg['From'] = me
msg['To'] = you
# Send the message via our own SMTP server, but don't include the
# envelope header.
s = smtplib.SMTP('localhost')
s.sendmail(me, [you], msg.as_string())
s.quit()
a_message = 'There is someting I need to tell you.'
AlertMessage(a_message)<commit_msg>Move test message into function.<commit_after>
|
# Import smtplib for the actual sending function
import smtplib
from email.mime.text import MIMEText
import apikeys
def AlertMessage(message, subject='Glitch System Message', me=apikeys.system_email, you=apikeys.admin_email):
msg = MIMEText(message)
msg['Subject'] = subject
msg['From'] = me
msg['To'] = you
# Send the message via our own SMTP server, but don't include the
# envelope header.
s = smtplib.SMTP('localhost')
s.sendmail(me, [you], msg.as_string())
s.quit()
def test():
a_message = 'There is someting I need to tell you.'
AlertMessage(a_message)
|
# Import smtplib for the actual sending function
import smtplib
from email.mime.text import MIMEText
import apikeys
def AlertMessage(message, subject='Glitch System Message', me=apikeys.system_email, you=apikeys.admin_email):
msg = MIMEText(message)
msg['Subject'] = subject
msg['From'] = me
msg['To'] = you
# Send the message via our own SMTP server, but don't include the
# envelope header.
s = smtplib.SMTP('localhost')
s.sendmail(me, [you], msg.as_string())
s.quit()
a_message = 'There is someting I need to tell you.'
AlertMessage(a_message)Move test message into function.# Import smtplib for the actual sending function
import smtplib
from email.mime.text import MIMEText
import apikeys
def AlertMessage(message, subject='Glitch System Message', me=apikeys.system_email, you=apikeys.admin_email):
msg = MIMEText(message)
msg['Subject'] = subject
msg['From'] = me
msg['To'] = you
# Send the message via our own SMTP server, but don't include the
# envelope header.
s = smtplib.SMTP('localhost')
s.sendmail(me, [you], msg.as_string())
s.quit()
def test():
a_message = 'There is someting I need to tell you.'
AlertMessage(a_message)
|
<commit_before># Import smtplib for the actual sending function
import smtplib
from email.mime.text import MIMEText
import apikeys
def AlertMessage(message, subject='Glitch System Message', me=apikeys.system_email, you=apikeys.admin_email):
msg = MIMEText(message)
msg['Subject'] = subject
msg['From'] = me
msg['To'] = you
# Send the message via our own SMTP server, but don't include the
# envelope header.
s = smtplib.SMTP('localhost')
s.sendmail(me, [you], msg.as_string())
s.quit()
a_message = 'There is someting I need to tell you.'
AlertMessage(a_message)<commit_msg>Move test message into function.<commit_after># Import smtplib for the actual sending function
import smtplib
from email.mime.text import MIMEText
import apikeys
def AlertMessage(message, subject='Glitch System Message', me=apikeys.system_email, you=apikeys.admin_email):
msg = MIMEText(message)
msg['Subject'] = subject
msg['From'] = me
msg['To'] = you
# Send the message via our own SMTP server, but don't include the
# envelope header.
s = smtplib.SMTP('localhost')
s.sendmail(me, [you], msg.as_string())
s.quit()
def test():
a_message = 'There is someting I need to tell you.'
AlertMessage(a_message)
|
52c04b66ca9474c8c7b33c8781a02e3573eb5676
|
main.py
|
main.py
|
import os, os.path
import string
import cherrypy
class StringGenerator(object):
@cherrypy.expose
def index(self):
return open('test-carte.html')
if __name__ == '__main__':
conf = {
'/': {
'tools.sessions.on': True,
'tools.staticdir.root': os.path.abspath(os.getcwd())
},
'/static': {
'tools.staticdir.on': True,
'tools.staticdir.dir': './public'
}
}
webapp = StringGenerator()
cherrypy.quickstart(webapp, '/', conf)
|
import os, os.path
import string
import cherrypy
class StringGenerator(object):
@cherrypy.expose
def index(self):
return open('test-carte.html')
if __name__ == '__main__':
conf = {
'/': {
'tools.sessions.on': True,
'tools.staticdir.root': os.path.abspath(os.getcwd())
},
'/static': {
'tools.staticdir.on': True,
'tools.staticdir.dir': './public'
}
}
webapp = StringGenerator()
cherrypy.config.update( {'server.socket_host': '0.0.0.0'} )
cherrypy.quickstart(webapp, '/', conf)
|
Change server.socket_host to allow acces over the network
|
Change server.socket_host to allow acces over the network
|
Python
|
mit
|
guillaume-havard/proto-map,guillaume-havard/proto-map,guillaume-havard/proto-map
|
import os, os.path
import string
import cherrypy
class StringGenerator(object):
@cherrypy.expose
def index(self):
return open('test-carte.html')
if __name__ == '__main__':
conf = {
'/': {
'tools.sessions.on': True,
'tools.staticdir.root': os.path.abspath(os.getcwd())
},
'/static': {
'tools.staticdir.on': True,
'tools.staticdir.dir': './public'
}
}
webapp = StringGenerator()
cherrypy.quickstart(webapp, '/', conf)
Change server.socket_host to allow acces over the network
|
import os, os.path
import string
import cherrypy
class StringGenerator(object):
@cherrypy.expose
def index(self):
return open('test-carte.html')
if __name__ == '__main__':
conf = {
'/': {
'tools.sessions.on': True,
'tools.staticdir.root': os.path.abspath(os.getcwd())
},
'/static': {
'tools.staticdir.on': True,
'tools.staticdir.dir': './public'
}
}
webapp = StringGenerator()
cherrypy.config.update( {'server.socket_host': '0.0.0.0'} )
cherrypy.quickstart(webapp, '/', conf)
|
<commit_before>import os, os.path
import string
import cherrypy
class StringGenerator(object):
@cherrypy.expose
def index(self):
return open('test-carte.html')
if __name__ == '__main__':
conf = {
'/': {
'tools.sessions.on': True,
'tools.staticdir.root': os.path.abspath(os.getcwd())
},
'/static': {
'tools.staticdir.on': True,
'tools.staticdir.dir': './public'
}
}
webapp = StringGenerator()
cherrypy.quickstart(webapp, '/', conf)
<commit_msg>Change server.socket_host to allow acces over the network<commit_after>
|
import os, os.path
import string
import cherrypy
class StringGenerator(object):
@cherrypy.expose
def index(self):
return open('test-carte.html')
if __name__ == '__main__':
conf = {
'/': {
'tools.sessions.on': True,
'tools.staticdir.root': os.path.abspath(os.getcwd())
},
'/static': {
'tools.staticdir.on': True,
'tools.staticdir.dir': './public'
}
}
webapp = StringGenerator()
cherrypy.config.update( {'server.socket_host': '0.0.0.0'} )
cherrypy.quickstart(webapp, '/', conf)
|
import os, os.path
import string
import cherrypy
class StringGenerator(object):
@cherrypy.expose
def index(self):
return open('test-carte.html')
if __name__ == '__main__':
conf = {
'/': {
'tools.sessions.on': True,
'tools.staticdir.root': os.path.abspath(os.getcwd())
},
'/static': {
'tools.staticdir.on': True,
'tools.staticdir.dir': './public'
}
}
webapp = StringGenerator()
cherrypy.quickstart(webapp, '/', conf)
Change server.socket_host to allow acces over the networkimport os, os.path
import string
import cherrypy
class StringGenerator(object):
@cherrypy.expose
def index(self):
return open('test-carte.html')
if __name__ == '__main__':
conf = {
'/': {
'tools.sessions.on': True,
'tools.staticdir.root': os.path.abspath(os.getcwd())
},
'/static': {
'tools.staticdir.on': True,
'tools.staticdir.dir': './public'
}
}
webapp = StringGenerator()
cherrypy.config.update( {'server.socket_host': '0.0.0.0'} )
cherrypy.quickstart(webapp, '/', conf)
|
<commit_before>import os, os.path
import string
import cherrypy
class StringGenerator(object):
@cherrypy.expose
def index(self):
return open('test-carte.html')
if __name__ == '__main__':
conf = {
'/': {
'tools.sessions.on': True,
'tools.staticdir.root': os.path.abspath(os.getcwd())
},
'/static': {
'tools.staticdir.on': True,
'tools.staticdir.dir': './public'
}
}
webapp = StringGenerator()
cherrypy.quickstart(webapp, '/', conf)
<commit_msg>Change server.socket_host to allow acces over the network<commit_after>import os, os.path
import string
import cherrypy
class StringGenerator(object):
@cherrypy.expose
def index(self):
return open('test-carte.html')
if __name__ == '__main__':
conf = {
'/': {
'tools.sessions.on': True,
'tools.staticdir.root': os.path.abspath(os.getcwd())
},
'/static': {
'tools.staticdir.on': True,
'tools.staticdir.dir': './public'
}
}
webapp = StringGenerator()
cherrypy.config.update( {'server.socket_host': '0.0.0.0'} )
cherrypy.quickstart(webapp, '/', conf)
|
4b630e8223f178ad25eef0e2ecf31f838445d2a0
|
nose2/tests/functional/test_coverage.py
|
nose2/tests/functional/test_coverage.py
|
import os.path
from nose2.tests._common import FunctionalTestCase
class TestCoverage(FunctionalTestCase):
def test_run(self):
proc = self.runIn(
'scenario/test_with_module',
'-v',
'--with-coverage',
'--coverage=lib/'
)
STATS = ' 8 5 38%'
stdout, stderr = proc.communicate()
self.assertTestRunOutputMatches(
proc,
stderr=os.path.join('lib', 'mod1.py').replace('\\', r'\\') + STATS)
self.assertTestRunOutputMatches(
proc,
stderr='TOTAL ' + STATS)
|
import os.path
import platform
from nose2.compat import unittest
from nose2.tests._common import FunctionalTestCase
class TestCoverage(FunctionalTestCase):
@unittest.skipIf(
platform.python_version_tuple()[:2] == ('3', '2'),
'coverage package does not support python 3.2')
def test_run(self):
proc = self.runIn(
'scenario/test_with_module',
'-v',
'--with-coverage',
'--coverage=lib/'
)
STATS = ' 8 5 38%'
stdout, stderr = proc.communicate()
self.assertTestRunOutputMatches(
proc,
stderr=os.path.join('lib', 'mod1.py').replace('\\', r'\\') + STATS)
self.assertTestRunOutputMatches(
proc,
stderr='TOTAL ' + STATS)
|
Disable coverage test on python3.2
|
Disable coverage test on python3.2
|
Python
|
bsd-2-clause
|
little-dude/nose2,ojengwa/nose2,little-dude/nose2,ptthiem/nose2,ojengwa/nose2,ptthiem/nose2
|
import os.path
from nose2.tests._common import FunctionalTestCase
class TestCoverage(FunctionalTestCase):
def test_run(self):
proc = self.runIn(
'scenario/test_with_module',
'-v',
'--with-coverage',
'--coverage=lib/'
)
STATS = ' 8 5 38%'
stdout, stderr = proc.communicate()
self.assertTestRunOutputMatches(
proc,
stderr=os.path.join('lib', 'mod1.py').replace('\\', r'\\') + STATS)
self.assertTestRunOutputMatches(
proc,
stderr='TOTAL ' + STATS)
Disable coverage test on python3.2
|
import os.path
import platform
from nose2.compat import unittest
from nose2.tests._common import FunctionalTestCase
class TestCoverage(FunctionalTestCase):
@unittest.skipIf(
platform.python_version_tuple()[:2] == ('3', '2'),
'coverage package does not support python 3.2')
def test_run(self):
proc = self.runIn(
'scenario/test_with_module',
'-v',
'--with-coverage',
'--coverage=lib/'
)
STATS = ' 8 5 38%'
stdout, stderr = proc.communicate()
self.assertTestRunOutputMatches(
proc,
stderr=os.path.join('lib', 'mod1.py').replace('\\', r'\\') + STATS)
self.assertTestRunOutputMatches(
proc,
stderr='TOTAL ' + STATS)
|
<commit_before>import os.path
from nose2.tests._common import FunctionalTestCase
class TestCoverage(FunctionalTestCase):
def test_run(self):
proc = self.runIn(
'scenario/test_with_module',
'-v',
'--with-coverage',
'--coverage=lib/'
)
STATS = ' 8 5 38%'
stdout, stderr = proc.communicate()
self.assertTestRunOutputMatches(
proc,
stderr=os.path.join('lib', 'mod1.py').replace('\\', r'\\') + STATS)
self.assertTestRunOutputMatches(
proc,
stderr='TOTAL ' + STATS)
<commit_msg>Disable coverage test on python3.2<commit_after>
|
import os.path
import platform
from nose2.compat import unittest
from nose2.tests._common import FunctionalTestCase
class TestCoverage(FunctionalTestCase):
@unittest.skipIf(
platform.python_version_tuple()[:2] == ('3', '2'),
'coverage package does not support python 3.2')
def test_run(self):
proc = self.runIn(
'scenario/test_with_module',
'-v',
'--with-coverage',
'--coverage=lib/'
)
STATS = ' 8 5 38%'
stdout, stderr = proc.communicate()
self.assertTestRunOutputMatches(
proc,
stderr=os.path.join('lib', 'mod1.py').replace('\\', r'\\') + STATS)
self.assertTestRunOutputMatches(
proc,
stderr='TOTAL ' + STATS)
|
import os.path
from nose2.tests._common import FunctionalTestCase
class TestCoverage(FunctionalTestCase):
def test_run(self):
proc = self.runIn(
'scenario/test_with_module',
'-v',
'--with-coverage',
'--coverage=lib/'
)
STATS = ' 8 5 38%'
stdout, stderr = proc.communicate()
self.assertTestRunOutputMatches(
proc,
stderr=os.path.join('lib', 'mod1.py').replace('\\', r'\\') + STATS)
self.assertTestRunOutputMatches(
proc,
stderr='TOTAL ' + STATS)
Disable coverage test on python3.2import os.path
import platform
from nose2.compat import unittest
from nose2.tests._common import FunctionalTestCase
class TestCoverage(FunctionalTestCase):
@unittest.skipIf(
platform.python_version_tuple()[:2] == ('3', '2'),
'coverage package does not support python 3.2')
def test_run(self):
proc = self.runIn(
'scenario/test_with_module',
'-v',
'--with-coverage',
'--coverage=lib/'
)
STATS = ' 8 5 38%'
stdout, stderr = proc.communicate()
self.assertTestRunOutputMatches(
proc,
stderr=os.path.join('lib', 'mod1.py').replace('\\', r'\\') + STATS)
self.assertTestRunOutputMatches(
proc,
stderr='TOTAL ' + STATS)
|
<commit_before>import os.path
from nose2.tests._common import FunctionalTestCase
class TestCoverage(FunctionalTestCase):
def test_run(self):
proc = self.runIn(
'scenario/test_with_module',
'-v',
'--with-coverage',
'--coverage=lib/'
)
STATS = ' 8 5 38%'
stdout, stderr = proc.communicate()
self.assertTestRunOutputMatches(
proc,
stderr=os.path.join('lib', 'mod1.py').replace('\\', r'\\') + STATS)
self.assertTestRunOutputMatches(
proc,
stderr='TOTAL ' + STATS)
<commit_msg>Disable coverage test on python3.2<commit_after>import os.path
import platform
from nose2.compat import unittest
from nose2.tests._common import FunctionalTestCase
class TestCoverage(FunctionalTestCase):
@unittest.skipIf(
platform.python_version_tuple()[:2] == ('3', '2'),
'coverage package does not support python 3.2')
def test_run(self):
proc = self.runIn(
'scenario/test_with_module',
'-v',
'--with-coverage',
'--coverage=lib/'
)
STATS = ' 8 5 38%'
stdout, stderr = proc.communicate()
self.assertTestRunOutputMatches(
proc,
stderr=os.path.join('lib', 'mod1.py').replace('\\', r'\\') + STATS)
self.assertTestRunOutputMatches(
proc,
stderr='TOTAL ' + STATS)
|
e677e01c3046efebcdbee9fe68cd4896a58d60bf
|
vumi/middleware/__init__.py
|
vumi/middleware/__init__.py
|
"""Middleware classes to process messages on their way in and out of workers.
"""
from vumi.middleware.base import (
TransportMiddleware, ApplicationMiddleware, MiddlewareStack,
create_middlewares_from_config, setup_middlewares_from_config)
__all__ = [
'TransportMiddleware', 'ApplicationMiddleware', 'MiddlewareStack',
'create_middlewares_from_config', 'setup_middlewares_from_config']
|
"""Middleware classes to process messages on their way in and out of workers.
"""
from vumi.middleware.base import (
BaseMiddleware, TransportMiddleware, ApplicationMiddleware,
MiddlewareStack, create_middlewares_from_config,
setup_middlewares_from_config)
__all__ = [
'BaseMiddleware', 'TransportMiddleware', 'ApplicationMiddleware',
'MiddlewareStack', 'create_middlewares_from_config',
'setup_middlewares_from_config']
|
Add BaseMiddleware to vumi.middleware API for 3rd-party middleware that wants to support both transports and applications.
|
Add BaseMiddleware to vumi.middleware API for 3rd-party middleware that wants to support both transports and applications.
|
Python
|
bsd-3-clause
|
TouK/vumi,harrissoerja/vumi,vishwaprakashmishra/xmatrix,harrissoerja/vumi,TouK/vumi,harrissoerja/vumi,vishwaprakashmishra/xmatrix,vishwaprakashmishra/xmatrix,TouK/vumi
|
"""Middleware classes to process messages on their way in and out of workers.
"""
from vumi.middleware.base import (
TransportMiddleware, ApplicationMiddleware, MiddlewareStack,
create_middlewares_from_config, setup_middlewares_from_config)
__all__ = [
'TransportMiddleware', 'ApplicationMiddleware', 'MiddlewareStack',
'create_middlewares_from_config', 'setup_middlewares_from_config']
Add BaseMiddleware to vumi.middleware API for 3rd-party middleware that wants to support both transports and applications.
|
"""Middleware classes to process messages on their way in and out of workers.
"""
from vumi.middleware.base import (
BaseMiddleware, TransportMiddleware, ApplicationMiddleware,
MiddlewareStack, create_middlewares_from_config,
setup_middlewares_from_config)
__all__ = [
'BaseMiddleware', 'TransportMiddleware', 'ApplicationMiddleware',
'MiddlewareStack', 'create_middlewares_from_config',
'setup_middlewares_from_config']
|
<commit_before>"""Middleware classes to process messages on their way in and out of workers.
"""
from vumi.middleware.base import (
TransportMiddleware, ApplicationMiddleware, MiddlewareStack,
create_middlewares_from_config, setup_middlewares_from_config)
__all__ = [
'TransportMiddleware', 'ApplicationMiddleware', 'MiddlewareStack',
'create_middlewares_from_config', 'setup_middlewares_from_config']
<commit_msg>Add BaseMiddleware to vumi.middleware API for 3rd-party middleware that wants to support both transports and applications.<commit_after>
|
"""Middleware classes to process messages on their way in and out of workers.
"""
from vumi.middleware.base import (
BaseMiddleware, TransportMiddleware, ApplicationMiddleware,
MiddlewareStack, create_middlewares_from_config,
setup_middlewares_from_config)
__all__ = [
'BaseMiddleware', 'TransportMiddleware', 'ApplicationMiddleware',
'MiddlewareStack', 'create_middlewares_from_config',
'setup_middlewares_from_config']
|
"""Middleware classes to process messages on their way in and out of workers.
"""
from vumi.middleware.base import (
TransportMiddleware, ApplicationMiddleware, MiddlewareStack,
create_middlewares_from_config, setup_middlewares_from_config)
__all__ = [
'TransportMiddleware', 'ApplicationMiddleware', 'MiddlewareStack',
'create_middlewares_from_config', 'setup_middlewares_from_config']
Add BaseMiddleware to vumi.middleware API for 3rd-party middleware that wants to support both transports and applications."""Middleware classes to process messages on their way in and out of workers.
"""
from vumi.middleware.base import (
BaseMiddleware, TransportMiddleware, ApplicationMiddleware,
MiddlewareStack, create_middlewares_from_config,
setup_middlewares_from_config)
__all__ = [
'BaseMiddleware', 'TransportMiddleware', 'ApplicationMiddleware',
'MiddlewareStack', 'create_middlewares_from_config',
'setup_middlewares_from_config']
|
<commit_before>"""Middleware classes to process messages on their way in and out of workers.
"""
from vumi.middleware.base import (
TransportMiddleware, ApplicationMiddleware, MiddlewareStack,
create_middlewares_from_config, setup_middlewares_from_config)
__all__ = [
'TransportMiddleware', 'ApplicationMiddleware', 'MiddlewareStack',
'create_middlewares_from_config', 'setup_middlewares_from_config']
<commit_msg>Add BaseMiddleware to vumi.middleware API for 3rd-party middleware that wants to support both transports and applications.<commit_after>"""Middleware classes to process messages on their way in and out of workers.
"""
from vumi.middleware.base import (
BaseMiddleware, TransportMiddleware, ApplicationMiddleware,
MiddlewareStack, create_middlewares_from_config,
setup_middlewares_from_config)
__all__ = [
'BaseMiddleware', 'TransportMiddleware', 'ApplicationMiddleware',
'MiddlewareStack', 'create_middlewares_from_config',
'setup_middlewares_from_config']
|
4946ae0305a6add9247149784cea62823272b39e
|
seleniumlogin/__init__.py
|
seleniumlogin/__init__.py
|
from importlib import import_module
from django.contrib.auth import SESSION_KEY, BACKEND_SESSION_KEY, HASH_SESSION_KEY
def force_login(user, driver, base_url):
from django.conf import settings
SessionStore = import_module(settings.SESSION_ENGINE).SessionStore
selenium_login_start_page = getattr(settings, 'SELENIUM_LOGIN_START_PAGE', '/page_404/')
driver.get('{}{}'.format(base_url, selenium_login_start_page))
session = SessionStore()
session[SESSION_KEY] = user.id
session[BACKEND_SESSION_KEY] = settings.AUTHENTICATION_BACKENDS[0]
session[HASH_SESSION_KEY] = user.get_session_auth_hash()
session.save()
cookie = {
'name': settings.SESSION_COOKIE_NAME,
'value': session.session_key,
'path': '/'
}
driver.add_cookie(cookie)
driver.refresh()
|
from importlib import import_module
from django.contrib.auth import SESSION_KEY, BACKEND_SESSION_KEY, HASH_SESSION_KEY
def force_login(user, driver, base_url):
from django.conf import settings
SessionStore = import_module(settings.SESSION_ENGINE).SessionStore
selenium_login_start_page = getattr(settings, 'SELENIUM_LOGIN_START_PAGE', '/page_404/')
driver.get('{}{}'.format(base_url, selenium_login_start_page))
session = SessionStore()
session[SESSION_KEY] = user.id
session[BACKEND_SESSION_KEY] = settings.AUTHENTICATION_BACKENDS[0]
session[HASH_SESSION_KEY] = user.get_session_auth_hash()
session.save()
domain = base_url.split(':')[-2].split('/')[-1]
cookie = {
'name': settings.SESSION_COOKIE_NAME,
'value': session.session_key,
'path': '/',
'domain': domain
}
driver.add_cookie(cookie)
driver.refresh()
|
Add domain to cookie to set cookie for PhantomJS
|
Add domain to cookie to set cookie for PhantomJS
|
Python
|
mit
|
feffe/django-selenium-login,feffe/django-selenium-login
|
from importlib import import_module
from django.contrib.auth import SESSION_KEY, BACKEND_SESSION_KEY, HASH_SESSION_KEY
def force_login(user, driver, base_url):
from django.conf import settings
SessionStore = import_module(settings.SESSION_ENGINE).SessionStore
selenium_login_start_page = getattr(settings, 'SELENIUM_LOGIN_START_PAGE', '/page_404/')
driver.get('{}{}'.format(base_url, selenium_login_start_page))
session = SessionStore()
session[SESSION_KEY] = user.id
session[BACKEND_SESSION_KEY] = settings.AUTHENTICATION_BACKENDS[0]
session[HASH_SESSION_KEY] = user.get_session_auth_hash()
session.save()
cookie = {
'name': settings.SESSION_COOKIE_NAME,
'value': session.session_key,
'path': '/'
}
driver.add_cookie(cookie)
driver.refresh()
Add domain to cookie to set cookie for PhantomJS
|
from importlib import import_module
from django.contrib.auth import SESSION_KEY, BACKEND_SESSION_KEY, HASH_SESSION_KEY
def force_login(user, driver, base_url):
from django.conf import settings
SessionStore = import_module(settings.SESSION_ENGINE).SessionStore
selenium_login_start_page = getattr(settings, 'SELENIUM_LOGIN_START_PAGE', '/page_404/')
driver.get('{}{}'.format(base_url, selenium_login_start_page))
session = SessionStore()
session[SESSION_KEY] = user.id
session[BACKEND_SESSION_KEY] = settings.AUTHENTICATION_BACKENDS[0]
session[HASH_SESSION_KEY] = user.get_session_auth_hash()
session.save()
domain = base_url.split(':')[-2].split('/')[-1]
cookie = {
'name': settings.SESSION_COOKIE_NAME,
'value': session.session_key,
'path': '/',
'domain': domain
}
driver.add_cookie(cookie)
driver.refresh()
|
<commit_before>from importlib import import_module
from django.contrib.auth import SESSION_KEY, BACKEND_SESSION_KEY, HASH_SESSION_KEY
def force_login(user, driver, base_url):
from django.conf import settings
SessionStore = import_module(settings.SESSION_ENGINE).SessionStore
selenium_login_start_page = getattr(settings, 'SELENIUM_LOGIN_START_PAGE', '/page_404/')
driver.get('{}{}'.format(base_url, selenium_login_start_page))
session = SessionStore()
session[SESSION_KEY] = user.id
session[BACKEND_SESSION_KEY] = settings.AUTHENTICATION_BACKENDS[0]
session[HASH_SESSION_KEY] = user.get_session_auth_hash()
session.save()
cookie = {
'name': settings.SESSION_COOKIE_NAME,
'value': session.session_key,
'path': '/'
}
driver.add_cookie(cookie)
driver.refresh()
<commit_msg>Add domain to cookie to set cookie for PhantomJS<commit_after>
|
from importlib import import_module
from django.contrib.auth import SESSION_KEY, BACKEND_SESSION_KEY, HASH_SESSION_KEY
def force_login(user, driver, base_url):
from django.conf import settings
SessionStore = import_module(settings.SESSION_ENGINE).SessionStore
selenium_login_start_page = getattr(settings, 'SELENIUM_LOGIN_START_PAGE', '/page_404/')
driver.get('{}{}'.format(base_url, selenium_login_start_page))
session = SessionStore()
session[SESSION_KEY] = user.id
session[BACKEND_SESSION_KEY] = settings.AUTHENTICATION_BACKENDS[0]
session[HASH_SESSION_KEY] = user.get_session_auth_hash()
session.save()
domain = base_url.split(':')[-2].split('/')[-1]
cookie = {
'name': settings.SESSION_COOKIE_NAME,
'value': session.session_key,
'path': '/',
'domain': domain
}
driver.add_cookie(cookie)
driver.refresh()
|
from importlib import import_module
from django.contrib.auth import SESSION_KEY, BACKEND_SESSION_KEY, HASH_SESSION_KEY
def force_login(user, driver, base_url):
from django.conf import settings
SessionStore = import_module(settings.SESSION_ENGINE).SessionStore
selenium_login_start_page = getattr(settings, 'SELENIUM_LOGIN_START_PAGE', '/page_404/')
driver.get('{}{}'.format(base_url, selenium_login_start_page))
session = SessionStore()
session[SESSION_KEY] = user.id
session[BACKEND_SESSION_KEY] = settings.AUTHENTICATION_BACKENDS[0]
session[HASH_SESSION_KEY] = user.get_session_auth_hash()
session.save()
cookie = {
'name': settings.SESSION_COOKIE_NAME,
'value': session.session_key,
'path': '/'
}
driver.add_cookie(cookie)
driver.refresh()
Add domain to cookie to set cookie for PhantomJSfrom importlib import import_module
from django.contrib.auth import SESSION_KEY, BACKEND_SESSION_KEY, HASH_SESSION_KEY
def force_login(user, driver, base_url):
from django.conf import settings
SessionStore = import_module(settings.SESSION_ENGINE).SessionStore
selenium_login_start_page = getattr(settings, 'SELENIUM_LOGIN_START_PAGE', '/page_404/')
driver.get('{}{}'.format(base_url, selenium_login_start_page))
session = SessionStore()
session[SESSION_KEY] = user.id
session[BACKEND_SESSION_KEY] = settings.AUTHENTICATION_BACKENDS[0]
session[HASH_SESSION_KEY] = user.get_session_auth_hash()
session.save()
domain = base_url.split(':')[-2].split('/')[-1]
cookie = {
'name': settings.SESSION_COOKIE_NAME,
'value': session.session_key,
'path': '/',
'domain': domain
}
driver.add_cookie(cookie)
driver.refresh()
|
<commit_before>from importlib import import_module
from django.contrib.auth import SESSION_KEY, BACKEND_SESSION_KEY, HASH_SESSION_KEY
def force_login(user, driver, base_url):
from django.conf import settings
SessionStore = import_module(settings.SESSION_ENGINE).SessionStore
selenium_login_start_page = getattr(settings, 'SELENIUM_LOGIN_START_PAGE', '/page_404/')
driver.get('{}{}'.format(base_url, selenium_login_start_page))
session = SessionStore()
session[SESSION_KEY] = user.id
session[BACKEND_SESSION_KEY] = settings.AUTHENTICATION_BACKENDS[0]
session[HASH_SESSION_KEY] = user.get_session_auth_hash()
session.save()
cookie = {
'name': settings.SESSION_COOKIE_NAME,
'value': session.session_key,
'path': '/'
}
driver.add_cookie(cookie)
driver.refresh()
<commit_msg>Add domain to cookie to set cookie for PhantomJS<commit_after>from importlib import import_module
from django.contrib.auth import SESSION_KEY, BACKEND_SESSION_KEY, HASH_SESSION_KEY
def force_login(user, driver, base_url):
from django.conf import settings
SessionStore = import_module(settings.SESSION_ENGINE).SessionStore
selenium_login_start_page = getattr(settings, 'SELENIUM_LOGIN_START_PAGE', '/page_404/')
driver.get('{}{}'.format(base_url, selenium_login_start_page))
session = SessionStore()
session[SESSION_KEY] = user.id
session[BACKEND_SESSION_KEY] = settings.AUTHENTICATION_BACKENDS[0]
session[HASH_SESSION_KEY] = user.get_session_auth_hash()
session.save()
domain = base_url.split(':')[-2].split('/')[-1]
cookie = {
'name': settings.SESSION_COOKIE_NAME,
'value': session.session_key,
'path': '/',
'domain': domain
}
driver.add_cookie(cookie)
driver.refresh()
|
ff725b4ae24c58cb126c1d49ce58a69d9b32d3b0
|
app/soc/models/timeline.py
|
app/soc/models/timeline.py
|
#!/usr/bin/env python2.5
#
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains the Timeline Model.
"""
from google.appengine.ext import db
from django.utils.translation import ugettext
from soc.models import linkable
class Timeline(linkable.Linkable):
"""The Timeline Model, representing the timeline for a Program.
"""
program_start = db.DateTimeProperty(
verbose_name=ugettext('Program Start date'))
program_end = db.DateTimeProperty(
verbose_name=ugettext('Program End date'))
accepted_organization_announced_deadline = db.DateTimeProperty(
verbose_name=ugettext('Accepted Organizations Announced Deadline'))
student_signup_start = db.DateTimeProperty(
verbose_name=ugettext('Student Signup Start date'))
student_signup_end = db.DateTimeProperty(
verbose_name=ugettext('Student Signup End date'))
|
#!/usr/bin/env python2.5
#
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains the Timeline Model.
"""
from google.appengine.ext import db
from django.utils.translation import ugettext
from soc.models import linkable
class Timeline(linkable.Linkable):
"""The Timeline Model, representing the timeline for a Program.
"""
program_start = db.DateTimeProperty(
verbose_name=ugettext('Program Start date'))
program_end = db.DateTimeProperty(
verbose_name=ugettext('Program End date'))
program_end.help_text = ugettext(
'After this date no data (such as profiles and forms) can be changed.')
accepted_organization_announced_deadline = db.DateTimeProperty(
verbose_name=ugettext('Accepted Organizations Announced Deadline'))
student_signup_start = db.DateTimeProperty(
verbose_name=ugettext('Student Signup Start date'))
student_signup_end = db.DateTimeProperty(
verbose_name=ugettext('Student Signup End date'))
|
Add help text for program_end date.
|
Add help text for program_end date.
Fixes 1411.
|
Python
|
apache-2.0
|
rhyolight/nupic.son,rhyolight/nupic.son,rhyolight/nupic.son
|
#!/usr/bin/env python2.5
#
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains the Timeline Model.
"""
from google.appengine.ext import db
from django.utils.translation import ugettext
from soc.models import linkable
class Timeline(linkable.Linkable):
"""The Timeline Model, representing the timeline for a Program.
"""
program_start = db.DateTimeProperty(
verbose_name=ugettext('Program Start date'))
program_end = db.DateTimeProperty(
verbose_name=ugettext('Program End date'))
accepted_organization_announced_deadline = db.DateTimeProperty(
verbose_name=ugettext('Accepted Organizations Announced Deadline'))
student_signup_start = db.DateTimeProperty(
verbose_name=ugettext('Student Signup Start date'))
student_signup_end = db.DateTimeProperty(
verbose_name=ugettext('Student Signup End date'))
Add help text for program_end date.
Fixes 1411.
|
#!/usr/bin/env python2.5
#
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains the Timeline Model.
"""
from google.appengine.ext import db
from django.utils.translation import ugettext
from soc.models import linkable
class Timeline(linkable.Linkable):
"""The Timeline Model, representing the timeline for a Program.
"""
program_start = db.DateTimeProperty(
verbose_name=ugettext('Program Start date'))
program_end = db.DateTimeProperty(
verbose_name=ugettext('Program End date'))
program_end.help_text = ugettext(
'After this date no data (such as profiles and forms) can be changed.')
accepted_organization_announced_deadline = db.DateTimeProperty(
verbose_name=ugettext('Accepted Organizations Announced Deadline'))
student_signup_start = db.DateTimeProperty(
verbose_name=ugettext('Student Signup Start date'))
student_signup_end = db.DateTimeProperty(
verbose_name=ugettext('Student Signup End date'))
|
<commit_before>#!/usr/bin/env python2.5
#
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains the Timeline Model.
"""
from google.appengine.ext import db
from django.utils.translation import ugettext
from soc.models import linkable
class Timeline(linkable.Linkable):
"""The Timeline Model, representing the timeline for a Program.
"""
program_start = db.DateTimeProperty(
verbose_name=ugettext('Program Start date'))
program_end = db.DateTimeProperty(
verbose_name=ugettext('Program End date'))
accepted_organization_announced_deadline = db.DateTimeProperty(
verbose_name=ugettext('Accepted Organizations Announced Deadline'))
student_signup_start = db.DateTimeProperty(
verbose_name=ugettext('Student Signup Start date'))
student_signup_end = db.DateTimeProperty(
verbose_name=ugettext('Student Signup End date'))
<commit_msg>Add help text for program_end date.
Fixes 1411.<commit_after>
|
#!/usr/bin/env python2.5
#
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains the Timeline Model.
"""
from google.appengine.ext import db
from django.utils.translation import ugettext
from soc.models import linkable
class Timeline(linkable.Linkable):
"""The Timeline Model, representing the timeline for a Program.
"""
program_start = db.DateTimeProperty(
verbose_name=ugettext('Program Start date'))
program_end = db.DateTimeProperty(
verbose_name=ugettext('Program End date'))
program_end.help_text = ugettext(
'After this date no data (such as profiles and forms) can be changed.')
accepted_organization_announced_deadline = db.DateTimeProperty(
verbose_name=ugettext('Accepted Organizations Announced Deadline'))
student_signup_start = db.DateTimeProperty(
verbose_name=ugettext('Student Signup Start date'))
student_signup_end = db.DateTimeProperty(
verbose_name=ugettext('Student Signup End date'))
|
#!/usr/bin/env python2.5
#
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains the Timeline Model.
"""
from google.appengine.ext import db
from django.utils.translation import ugettext
from soc.models import linkable
class Timeline(linkable.Linkable):
"""The Timeline Model, representing the timeline for a Program.
"""
program_start = db.DateTimeProperty(
verbose_name=ugettext('Program Start date'))
program_end = db.DateTimeProperty(
verbose_name=ugettext('Program End date'))
accepted_organization_announced_deadline = db.DateTimeProperty(
verbose_name=ugettext('Accepted Organizations Announced Deadline'))
student_signup_start = db.DateTimeProperty(
verbose_name=ugettext('Student Signup Start date'))
student_signup_end = db.DateTimeProperty(
verbose_name=ugettext('Student Signup End date'))
Add help text for program_end date.
Fixes 1411.#!/usr/bin/env python2.5
#
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains the Timeline Model.
"""
from google.appengine.ext import db
from django.utils.translation import ugettext
from soc.models import linkable
class Timeline(linkable.Linkable):
"""The Timeline Model, representing the timeline for a Program.
"""
program_start = db.DateTimeProperty(
verbose_name=ugettext('Program Start date'))
program_end = db.DateTimeProperty(
verbose_name=ugettext('Program End date'))
program_end.help_text = ugettext(
'After this date no data (such as profiles and forms) can be changed.')
accepted_organization_announced_deadline = db.DateTimeProperty(
verbose_name=ugettext('Accepted Organizations Announced Deadline'))
student_signup_start = db.DateTimeProperty(
verbose_name=ugettext('Student Signup Start date'))
student_signup_end = db.DateTimeProperty(
verbose_name=ugettext('Student Signup End date'))
|
<commit_before>#!/usr/bin/env python2.5
#
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains the Timeline Model.
"""
from google.appengine.ext import db
from django.utils.translation import ugettext
from soc.models import linkable
class Timeline(linkable.Linkable):
"""The Timeline Model, representing the timeline for a Program.
"""
program_start = db.DateTimeProperty(
verbose_name=ugettext('Program Start date'))
program_end = db.DateTimeProperty(
verbose_name=ugettext('Program End date'))
accepted_organization_announced_deadline = db.DateTimeProperty(
verbose_name=ugettext('Accepted Organizations Announced Deadline'))
student_signup_start = db.DateTimeProperty(
verbose_name=ugettext('Student Signup Start date'))
student_signup_end = db.DateTimeProperty(
verbose_name=ugettext('Student Signup End date'))
<commit_msg>Add help text for program_end date.
Fixes 1411.<commit_after>#!/usr/bin/env python2.5
#
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains the Timeline Model.
"""
from google.appengine.ext import db
from django.utils.translation import ugettext
from soc.models import linkable
class Timeline(linkable.Linkable):
"""The Timeline Model, representing the timeline for a Program.
"""
program_start = db.DateTimeProperty(
verbose_name=ugettext('Program Start date'))
program_end = db.DateTimeProperty(
verbose_name=ugettext('Program End date'))
program_end.help_text = ugettext(
'After this date no data (such as profiles and forms) can be changed.')
accepted_organization_announced_deadline = db.DateTimeProperty(
verbose_name=ugettext('Accepted Organizations Announced Deadline'))
student_signup_start = db.DateTimeProperty(
verbose_name=ugettext('Student Signup Start date'))
student_signup_end = db.DateTimeProperty(
verbose_name=ugettext('Student Signup End date'))
|
73d302a2e8ee2c8dfadb328deba07d1c8cd57438
|
yarn_api_client/__init__.py
|
yarn_api_client/__init__.py
|
# -*- coding: utf-8 -*-
__version__ = '1.0.1'
__all__ = ['ApplicationMaster', 'HistoryServer', 'NodeManager', 'ResourceManager']
from .application_master import ApplicationMaster
from .history_server import HistoryServer
from .node_manager import NodeManager
from .resource_manager import ResourceManager
|
# -*- coding: utf-8 -*-
__version__ = '2.0.0.dev0'
__all__ = ['ApplicationMaster', 'HistoryServer', 'NodeManager', 'ResourceManager']
from .application_master import ApplicationMaster
from .history_server import HistoryServer
from .node_manager import NodeManager
from .resource_manager import ResourceManager
|
Prepare for next development iteration
|
Prepare for next development iteration
|
Python
|
bsd-3-clause
|
toidi/hadoop-yarn-api-python-client
|
# -*- coding: utf-8 -*-
__version__ = '1.0.1'
__all__ = ['ApplicationMaster', 'HistoryServer', 'NodeManager', 'ResourceManager']
from .application_master import ApplicationMaster
from .history_server import HistoryServer
from .node_manager import NodeManager
from .resource_manager import ResourceManager
Prepare for next development iteration
|
# -*- coding: utf-8 -*-
__version__ = '2.0.0.dev0'
__all__ = ['ApplicationMaster', 'HistoryServer', 'NodeManager', 'ResourceManager']
from .application_master import ApplicationMaster
from .history_server import HistoryServer
from .node_manager import NodeManager
from .resource_manager import ResourceManager
|
<commit_before># -*- coding: utf-8 -*-
__version__ = '1.0.1'
__all__ = ['ApplicationMaster', 'HistoryServer', 'NodeManager', 'ResourceManager']
from .application_master import ApplicationMaster
from .history_server import HistoryServer
from .node_manager import NodeManager
from .resource_manager import ResourceManager
<commit_msg>Prepare for next development iteration<commit_after>
|
# -*- coding: utf-8 -*-
__version__ = '2.0.0.dev0'
__all__ = ['ApplicationMaster', 'HistoryServer', 'NodeManager', 'ResourceManager']
from .application_master import ApplicationMaster
from .history_server import HistoryServer
from .node_manager import NodeManager
from .resource_manager import ResourceManager
|
# -*- coding: utf-8 -*-
__version__ = '1.0.1'
__all__ = ['ApplicationMaster', 'HistoryServer', 'NodeManager', 'ResourceManager']
from .application_master import ApplicationMaster
from .history_server import HistoryServer
from .node_manager import NodeManager
from .resource_manager import ResourceManager
Prepare for next development iteration# -*- coding: utf-8 -*-
__version__ = '2.0.0.dev0'
__all__ = ['ApplicationMaster', 'HistoryServer', 'NodeManager', 'ResourceManager']
from .application_master import ApplicationMaster
from .history_server import HistoryServer
from .node_manager import NodeManager
from .resource_manager import ResourceManager
|
<commit_before># -*- coding: utf-8 -*-
__version__ = '1.0.1'
__all__ = ['ApplicationMaster', 'HistoryServer', 'NodeManager', 'ResourceManager']
from .application_master import ApplicationMaster
from .history_server import HistoryServer
from .node_manager import NodeManager
from .resource_manager import ResourceManager
<commit_msg>Prepare for next development iteration<commit_after># -*- coding: utf-8 -*-
__version__ = '2.0.0.dev0'
__all__ = ['ApplicationMaster', 'HistoryServer', 'NodeManager', 'ResourceManager']
from .application_master import ApplicationMaster
from .history_server import HistoryServer
from .node_manager import NodeManager
from .resource_manager import ResourceManager
|
d245bd41808879be6637acfd7460633c0c7dfdd6
|
yarn_api_client/__init__.py
|
yarn_api_client/__init__.py
|
# -*- coding: utf-8 -*-
__version__ = '0.3.3'
__all__ = ['ApplicationMaster', 'HistoryServer', 'NodeManager','ResourceManager']
from .application_master import ApplicationMaster
from .history_server import HistoryServer
from .node_manager import NodeManager
from .resource_manager import ResourceManager
|
# -*- coding: utf-8 -*-
__version__ = '0.3.4.dev0'
__all__ = ['ApplicationMaster', 'HistoryServer', 'NodeManager','ResourceManager']
from .application_master import ApplicationMaster
from .history_server import HistoryServer
from .node_manager import NodeManager
from .resource_manager import ResourceManager
|
Prepare for next development iteration
|
Prepare for next development iteration
|
Python
|
bsd-3-clause
|
toidi/hadoop-yarn-api-python-client
|
# -*- coding: utf-8 -*-
__version__ = '0.3.3'
__all__ = ['ApplicationMaster', 'HistoryServer', 'NodeManager','ResourceManager']
from .application_master import ApplicationMaster
from .history_server import HistoryServer
from .node_manager import NodeManager
from .resource_manager import ResourceManager
Prepare for next development iteration
|
# -*- coding: utf-8 -*-
__version__ = '0.3.4.dev0'
__all__ = ['ApplicationMaster', 'HistoryServer', 'NodeManager','ResourceManager']
from .application_master import ApplicationMaster
from .history_server import HistoryServer
from .node_manager import NodeManager
from .resource_manager import ResourceManager
|
<commit_before># -*- coding: utf-8 -*-
__version__ = '0.3.3'
__all__ = ['ApplicationMaster', 'HistoryServer', 'NodeManager','ResourceManager']
from .application_master import ApplicationMaster
from .history_server import HistoryServer
from .node_manager import NodeManager
from .resource_manager import ResourceManager
<commit_msg>Prepare for next development iteration<commit_after>
|
# -*- coding: utf-8 -*-
__version__ = '0.3.4.dev0'
__all__ = ['ApplicationMaster', 'HistoryServer', 'NodeManager','ResourceManager']
from .application_master import ApplicationMaster
from .history_server import HistoryServer
from .node_manager import NodeManager
from .resource_manager import ResourceManager
|
# -*- coding: utf-8 -*-
__version__ = '0.3.3'
__all__ = ['ApplicationMaster', 'HistoryServer', 'NodeManager','ResourceManager']
from .application_master import ApplicationMaster
from .history_server import HistoryServer
from .node_manager import NodeManager
from .resource_manager import ResourceManager
Prepare for next development iteration# -*- coding: utf-8 -*-
__version__ = '0.3.4.dev0'
__all__ = ['ApplicationMaster', 'HistoryServer', 'NodeManager','ResourceManager']
from .application_master import ApplicationMaster
from .history_server import HistoryServer
from .node_manager import NodeManager
from .resource_manager import ResourceManager
|
<commit_before># -*- coding: utf-8 -*-
__version__ = '0.3.3'
__all__ = ['ApplicationMaster', 'HistoryServer', 'NodeManager','ResourceManager']
from .application_master import ApplicationMaster
from .history_server import HistoryServer
from .node_manager import NodeManager
from .resource_manager import ResourceManager
<commit_msg>Prepare for next development iteration<commit_after># -*- coding: utf-8 -*-
__version__ = '0.3.4.dev0'
__all__ = ['ApplicationMaster', 'HistoryServer', 'NodeManager','ResourceManager']
from .application_master import ApplicationMaster
from .history_server import HistoryServer
from .node_manager import NodeManager
from .resource_manager import ResourceManager
|
3a428dea9a27709e50bcf84666df6281a0337691
|
httphandler.py
|
httphandler.py
|
from BaseHTTPServer import BaseHTTPRequestHandler
from StringIO import StringIO
class HTTPRequest(BaseHTTPRequestHandler):
"""
This class is just an incapsulation of BaseHTTPRequestHandler, so it can be
created from string.
Code from:
http://stackoverflow.com/questions/2115410/does-python-have-a-module-for-parsing-http-requests-and-responses
print request.command # "GET"
print request.path # "/who/ken/trust.html"
print request.request_version # "HTTP/1.1"
print len(request.headers) # 3
print request.headers.keys() # ['accept-charset', 'host', 'accept']
print request.headers['host'] # "cm.bell-labs.com"
"""
def __init__(self, request_text):
self.rfile = StringIO(request_text)
self.raw_requestline = self.rfile.readline()
self.error_code = self.error_message = None
self.parse_request()
def send_error(self, code, message):
self.error_code = code
self.error_message = message
|
from BaseHTTPServer import BaseHTTPRequestHandler
from StringIO import StringIO
class HTTPRequest(BaseHTTPRequestHandler):
"""
This class is just an incapsulation of BaseHTTPRequestHandler, so it can be
created from string.
Code from:
http://stackoverflow.com/questions/2115410/does-python-have-a-module-for-parsing-http-requests-and-responses
print request.command # "GET"
print request.path # "/who/ken/trust.html"
print request.request_version # "HTTP/1.1"
print len(request.headers) # 3
print request.headers.keys() # ['accept-charset', 'host', 'accept']
print request.headers['host'] # "cm.bell-labs.com"
"""
def __init__(self, request_text):
self.rfile = StringIO(request_text)
self.raw_requestline = self.rfile.readline()
self.error_code = self.error_message = None
self.parse_request()
self.data = request_text
def send_error(self, code, message):
self.error_code = code
self.error_message = message
|
Add raw request field to request
|
Add raw request field to request
|
Python
|
mit
|
Zloool/manyfaced-honeypot
|
from BaseHTTPServer import BaseHTTPRequestHandler
from StringIO import StringIO
class HTTPRequest(BaseHTTPRequestHandler):
"""
This class is just an incapsulation of BaseHTTPRequestHandler, so it can be
created from string.
Code from:
http://stackoverflow.com/questions/2115410/does-python-have-a-module-for-parsing-http-requests-and-responses
print request.command # "GET"
print request.path # "/who/ken/trust.html"
print request.request_version # "HTTP/1.1"
print len(request.headers) # 3
print request.headers.keys() # ['accept-charset', 'host', 'accept']
print request.headers['host'] # "cm.bell-labs.com"
"""
def __init__(self, request_text):
self.rfile = StringIO(request_text)
self.raw_requestline = self.rfile.readline()
self.error_code = self.error_message = None
self.parse_request()
def send_error(self, code, message):
self.error_code = code
self.error_message = message
Add raw request field to request
|
from BaseHTTPServer import BaseHTTPRequestHandler
from StringIO import StringIO
class HTTPRequest(BaseHTTPRequestHandler):
"""
This class is just an incapsulation of BaseHTTPRequestHandler, so it can be
created from string.
Code from:
http://stackoverflow.com/questions/2115410/does-python-have-a-module-for-parsing-http-requests-and-responses
print request.command # "GET"
print request.path # "/who/ken/trust.html"
print request.request_version # "HTTP/1.1"
print len(request.headers) # 3
print request.headers.keys() # ['accept-charset', 'host', 'accept']
print request.headers['host'] # "cm.bell-labs.com"
"""
def __init__(self, request_text):
self.rfile = StringIO(request_text)
self.raw_requestline = self.rfile.readline()
self.error_code = self.error_message = None
self.parse_request()
self.data = request_text
def send_error(self, code, message):
self.error_code = code
self.error_message = message
|
<commit_before>from BaseHTTPServer import BaseHTTPRequestHandler
from StringIO import StringIO
class HTTPRequest(BaseHTTPRequestHandler):
"""
This class is just an incapsulation of BaseHTTPRequestHandler, so it can be
created from string.
Code from:
http://stackoverflow.com/questions/2115410/does-python-have-a-module-for-parsing-http-requests-and-responses
print request.command # "GET"
print request.path # "/who/ken/trust.html"
print request.request_version # "HTTP/1.1"
print len(request.headers) # 3
print request.headers.keys() # ['accept-charset', 'host', 'accept']
print request.headers['host'] # "cm.bell-labs.com"
"""
def __init__(self, request_text):
self.rfile = StringIO(request_text)
self.raw_requestline = self.rfile.readline()
self.error_code = self.error_message = None
self.parse_request()
def send_error(self, code, message):
self.error_code = code
self.error_message = message
<commit_msg>Add raw request field to request<commit_after>
|
from BaseHTTPServer import BaseHTTPRequestHandler
from StringIO import StringIO
class HTTPRequest(BaseHTTPRequestHandler):
"""
This class is just an incapsulation of BaseHTTPRequestHandler, so it can be
created from string.
Code from:
http://stackoverflow.com/questions/2115410/does-python-have-a-module-for-parsing-http-requests-and-responses
print request.command # "GET"
print request.path # "/who/ken/trust.html"
print request.request_version # "HTTP/1.1"
print len(request.headers) # 3
print request.headers.keys() # ['accept-charset', 'host', 'accept']
print request.headers['host'] # "cm.bell-labs.com"
"""
def __init__(self, request_text):
self.rfile = StringIO(request_text)
self.raw_requestline = self.rfile.readline()
self.error_code = self.error_message = None
self.parse_request()
self.data = request_text
def send_error(self, code, message):
self.error_code = code
self.error_message = message
|
from BaseHTTPServer import BaseHTTPRequestHandler
from StringIO import StringIO
class HTTPRequest(BaseHTTPRequestHandler):
"""
This class is just an incapsulation of BaseHTTPRequestHandler, so it can be
created from string.
Code from:
http://stackoverflow.com/questions/2115410/does-python-have-a-module-for-parsing-http-requests-and-responses
print request.command # "GET"
print request.path # "/who/ken/trust.html"
print request.request_version # "HTTP/1.1"
print len(request.headers) # 3
print request.headers.keys() # ['accept-charset', 'host', 'accept']
print request.headers['host'] # "cm.bell-labs.com"
"""
def __init__(self, request_text):
self.rfile = StringIO(request_text)
self.raw_requestline = self.rfile.readline()
self.error_code = self.error_message = None
self.parse_request()
def send_error(self, code, message):
self.error_code = code
self.error_message = message
Add raw request field to requestfrom BaseHTTPServer import BaseHTTPRequestHandler
from StringIO import StringIO
class HTTPRequest(BaseHTTPRequestHandler):
"""
This class is just an incapsulation of BaseHTTPRequestHandler, so it can be
created from string.
Code from:
http://stackoverflow.com/questions/2115410/does-python-have-a-module-for-parsing-http-requests-and-responses
print request.command # "GET"
print request.path # "/who/ken/trust.html"
print request.request_version # "HTTP/1.1"
print len(request.headers) # 3
print request.headers.keys() # ['accept-charset', 'host', 'accept']
print request.headers['host'] # "cm.bell-labs.com"
"""
def __init__(self, request_text):
self.rfile = StringIO(request_text)
self.raw_requestline = self.rfile.readline()
self.error_code = self.error_message = None
self.parse_request()
self.data = request_text
def send_error(self, code, message):
self.error_code = code
self.error_message = message
|
<commit_before>from BaseHTTPServer import BaseHTTPRequestHandler
from StringIO import StringIO
class HTTPRequest(BaseHTTPRequestHandler):
"""
This class is just an incapsulation of BaseHTTPRequestHandler, so it can be
created from string.
Code from:
http://stackoverflow.com/questions/2115410/does-python-have-a-module-for-parsing-http-requests-and-responses
print request.command # "GET"
print request.path # "/who/ken/trust.html"
print request.request_version # "HTTP/1.1"
print len(request.headers) # 3
print request.headers.keys() # ['accept-charset', 'host', 'accept']
print request.headers['host'] # "cm.bell-labs.com"
"""
def __init__(self, request_text):
self.rfile = StringIO(request_text)
self.raw_requestline = self.rfile.readline()
self.error_code = self.error_message = None
self.parse_request()
def send_error(self, code, message):
self.error_code = code
self.error_message = message
<commit_msg>Add raw request field to request<commit_after>from BaseHTTPServer import BaseHTTPRequestHandler
from StringIO import StringIO
class HTTPRequest(BaseHTTPRequestHandler):
"""
This class is just an incapsulation of BaseHTTPRequestHandler, so it can be
created from string.
Code from:
http://stackoverflow.com/questions/2115410/does-python-have-a-module-for-parsing-http-requests-and-responses
print request.command # "GET"
print request.path # "/who/ken/trust.html"
print request.request_version # "HTTP/1.1"
print len(request.headers) # 3
print request.headers.keys() # ['accept-charset', 'host', 'accept']
print request.headers['host'] # "cm.bell-labs.com"
"""
def __init__(self, request_text):
self.rfile = StringIO(request_text)
self.raw_requestline = self.rfile.readline()
self.error_code = self.error_message = None
self.parse_request()
self.data = request_text
def send_error(self, code, message):
self.error_code = code
self.error_message = message
|
498e23919b09bfd782da4bb52f19f7c21aa14277
|
plantcv/plantcv/color_palette.py
|
plantcv/plantcv/color_palette.py
|
# Color palette returns an array of colors (rainbow)
import numpy as np
from plantcv.plantcv import params
def color_palette(num):
"""color_palette: Returns a list of colors length num
Inputs:
num = number of colors to return.
Returns:
colors = a list of color lists (RGB values)
:param num: int
:return colors: list
"""
from matplotlib import pyplot as plt
# If a previous palette is saved, return it
if params.saved_color_scale is not None:
return params.saved_color_scale
else:
# Retrieve the matplotlib colormap
cmap = plt.get_cmap(params.color_scale)
# Get num evenly spaced colors
colors = cmap(np.linspace(0, 1, num), bytes=True)
colors = colors[:, 0:3].tolist()
# colors are sequential, if params.color_sequence is random then shuffle the colors
if params.color_sequence == "random":
np.random.shuffle(colors)
# Save the color scale for further use
params.saved_color_scale = colors
return colors
|
# Color palette returns an array of colors (rainbow)
from matplotlib import pyplot as plt
import numpy as np
from plantcv.plantcv import params
def color_palette(num):
"""color_palette: Returns a list of colors length num
Inputs:
num = number of colors to return.
Returns:
colors = a list of color lists (RGB values)
:param num: int
:return colors: list
"""
# If a previous palette is saved, return it
if params.saved_color_scale is not None:
return params.saved_color_scale
else:
# Retrieve the matplotlib colormap
cmap = plt.get_cmap(params.color_scale)
# Get num evenly spaced colors
colors = cmap(np.linspace(0, 1, num), bytes=True)
colors = colors[:, 0:3].tolist()
# colors are sequential, if params.color_sequence is random then shuffle the colors
if params.color_sequence == "random":
np.random.shuffle(colors)
# Save the color scale for further use
params.saved_color_scale = colors
return colors
|
Move import back to the top
|
Move import back to the top
|
Python
|
mit
|
stiphyMT/plantcv,danforthcenter/plantcv,danforthcenter/plantcv,stiphyMT/plantcv,stiphyMT/plantcv,danforthcenter/plantcv
|
# Color palette returns an array of colors (rainbow)
import numpy as np
from plantcv.plantcv import params
def color_palette(num):
"""color_palette: Returns a list of colors length num
Inputs:
num = number of colors to return.
Returns:
colors = a list of color lists (RGB values)
:param num: int
:return colors: list
"""
from matplotlib import pyplot as plt
# If a previous palette is saved, return it
if params.saved_color_scale is not None:
return params.saved_color_scale
else:
# Retrieve the matplotlib colormap
cmap = plt.get_cmap(params.color_scale)
# Get num evenly spaced colors
colors = cmap(np.linspace(0, 1, num), bytes=True)
colors = colors[:, 0:3].tolist()
# colors are sequential, if params.color_sequence is random then shuffle the colors
if params.color_sequence == "random":
np.random.shuffle(colors)
# Save the color scale for further use
params.saved_color_scale = colors
return colors
Move import back to the top
|
# Color palette returns an array of colors (rainbow)
from matplotlib import pyplot as plt
import numpy as np
from plantcv.plantcv import params
def color_palette(num):
"""color_palette: Returns a list of colors length num
Inputs:
num = number of colors to return.
Returns:
colors = a list of color lists (RGB values)
:param num: int
:return colors: list
"""
# If a previous palette is saved, return it
if params.saved_color_scale is not None:
return params.saved_color_scale
else:
# Retrieve the matplotlib colormap
cmap = plt.get_cmap(params.color_scale)
# Get num evenly spaced colors
colors = cmap(np.linspace(0, 1, num), bytes=True)
colors = colors[:, 0:3].tolist()
# colors are sequential, if params.color_sequence is random then shuffle the colors
if params.color_sequence == "random":
np.random.shuffle(colors)
# Save the color scale for further use
params.saved_color_scale = colors
return colors
|
<commit_before># Color palette returns an array of colors (rainbow)
import numpy as np
from plantcv.plantcv import params
def color_palette(num):
"""color_palette: Returns a list of colors length num
Inputs:
num = number of colors to return.
Returns:
colors = a list of color lists (RGB values)
:param num: int
:return colors: list
"""
from matplotlib import pyplot as plt
# If a previous palette is saved, return it
if params.saved_color_scale is not None:
return params.saved_color_scale
else:
# Retrieve the matplotlib colormap
cmap = plt.get_cmap(params.color_scale)
# Get num evenly spaced colors
colors = cmap(np.linspace(0, 1, num), bytes=True)
colors = colors[:, 0:3].tolist()
# colors are sequential, if params.color_sequence is random then shuffle the colors
if params.color_sequence == "random":
np.random.shuffle(colors)
# Save the color scale for further use
params.saved_color_scale = colors
return colors
<commit_msg>Move import back to the top<commit_after>
|
# Color palette returns an array of colors (rainbow)
from matplotlib import pyplot as plt
import numpy as np
from plantcv.plantcv import params
def color_palette(num):
"""color_palette: Returns a list of colors length num
Inputs:
num = number of colors to return.
Returns:
colors = a list of color lists (RGB values)
:param num: int
:return colors: list
"""
# If a previous palette is saved, return it
if params.saved_color_scale is not None:
return params.saved_color_scale
else:
# Retrieve the matplotlib colormap
cmap = plt.get_cmap(params.color_scale)
# Get num evenly spaced colors
colors = cmap(np.linspace(0, 1, num), bytes=True)
colors = colors[:, 0:3].tolist()
# colors are sequential, if params.color_sequence is random then shuffle the colors
if params.color_sequence == "random":
np.random.shuffle(colors)
# Save the color scale for further use
params.saved_color_scale = colors
return colors
|
# Color palette returns an array of colors (rainbow)
import numpy as np
from plantcv.plantcv import params
def color_palette(num):
"""color_palette: Returns a list of colors length num
Inputs:
num = number of colors to return.
Returns:
colors = a list of color lists (RGB values)
:param num: int
:return colors: list
"""
from matplotlib import pyplot as plt
# If a previous palette is saved, return it
if params.saved_color_scale is not None:
return params.saved_color_scale
else:
# Retrieve the matplotlib colormap
cmap = plt.get_cmap(params.color_scale)
# Get num evenly spaced colors
colors = cmap(np.linspace(0, 1, num), bytes=True)
colors = colors[:, 0:3].tolist()
# colors are sequential, if params.color_sequence is random then shuffle the colors
if params.color_sequence == "random":
np.random.shuffle(colors)
# Save the color scale for further use
params.saved_color_scale = colors
return colors
Move import back to the top# Color palette returns an array of colors (rainbow)
from matplotlib import pyplot as plt
import numpy as np
from plantcv.plantcv import params
def color_palette(num):
"""color_palette: Returns a list of colors length num
Inputs:
num = number of colors to return.
Returns:
colors = a list of color lists (RGB values)
:param num: int
:return colors: list
"""
# If a previous palette is saved, return it
if params.saved_color_scale is not None:
return params.saved_color_scale
else:
# Retrieve the matplotlib colormap
cmap = plt.get_cmap(params.color_scale)
# Get num evenly spaced colors
colors = cmap(np.linspace(0, 1, num), bytes=True)
colors = colors[:, 0:3].tolist()
# colors are sequential, if params.color_sequence is random then shuffle the colors
if params.color_sequence == "random":
np.random.shuffle(colors)
# Save the color scale for further use
params.saved_color_scale = colors
return colors
|
<commit_before># Color palette returns an array of colors (rainbow)
import numpy as np
from plantcv.plantcv import params
def color_palette(num):
"""color_palette: Returns a list of colors length num
Inputs:
num = number of colors to return.
Returns:
colors = a list of color lists (RGB values)
:param num: int
:return colors: list
"""
from matplotlib import pyplot as plt
# If a previous palette is saved, return it
if params.saved_color_scale is not None:
return params.saved_color_scale
else:
# Retrieve the matplotlib colormap
cmap = plt.get_cmap(params.color_scale)
# Get num evenly spaced colors
colors = cmap(np.linspace(0, 1, num), bytes=True)
colors = colors[:, 0:3].tolist()
# colors are sequential, if params.color_sequence is random then shuffle the colors
if params.color_sequence == "random":
np.random.shuffle(colors)
# Save the color scale for further use
params.saved_color_scale = colors
return colors
<commit_msg>Move import back to the top<commit_after># Color palette returns an array of colors (rainbow)
from matplotlib import pyplot as plt
import numpy as np
from plantcv.plantcv import params
def color_palette(num):
"""color_palette: Returns a list of colors length num
Inputs:
num = number of colors to return.
Returns:
colors = a list of color lists (RGB values)
:param num: int
:return colors: list
"""
# If a previous palette is saved, return it
if params.saved_color_scale is not None:
return params.saved_color_scale
else:
# Retrieve the matplotlib colormap
cmap = plt.get_cmap(params.color_scale)
# Get num evenly spaced colors
colors = cmap(np.linspace(0, 1, num), bytes=True)
colors = colors[:, 0:3].tolist()
# colors are sequential, if params.color_sequence is random then shuffle the colors
if params.color_sequence == "random":
np.random.shuffle(colors)
# Save the color scale for further use
params.saved_color_scale = colors
return colors
|
2b8da13d4a5495082553e94047dbbd78a07905fc
|
insert-temp.py
|
insert-temp.py
|
#!/usr/bin/python
import string
import sqlite3 as lite
import sys
from random import randint
def getTemp():
tempFile = open('/sys/bus/w1/devices/28-00000529fbad/w1_slave', 'r')
contents = tempFile.read()
contentsList = string.split(contents)
temp = contentsList[-1]
tempFile.close()
return temp
def insertTemp():
temp = getTemp()
with connection:
cursor = connection.cursor();
cursor.execute('INSERT INTO temp(temp_instance) VALUES(?)', (temp,))
connection.commit()
def getData():
with connection:
cursor = connection.cursor()
cursor.execute('SELECT * FROM temp')
data = cursor.fetchall()
print data
if __name__ == '__main__':
connection = lite.connect('weather.db')
insertTemp()
getData()
|
#!/usr/bin/python
import string
import sqlite3 as lite
import sys
from random import randint
def getTemp():
tempFile = open('/sys/bus/w1/devices/28-00000529fbad/w1_slave', 'r')
contents = tempFile.read()
contentsList = string.split(contents)
temp = contentsList[-1]
tempFile.close()
return temp
def insertTemp():
temp = getTemp()
with connection:
cursor = connection.cursor();
cursor.execute('INSERT INTO temp(temp_instance) VALUES(?)', (temp,))
connection.commit()
print temp
if __name__ == '__main__':
connection = lite.connect('weather.db')
insertTemp()
|
Remove Pull of Data from DB
|
Remove Pull of Data from DB
|
Python
|
mit
|
eddturtle/WeatherPi,eddturtle/WeatherPi
|
#!/usr/bin/python
import string
import sqlite3 as lite
import sys
from random import randint
def getTemp():
tempFile = open('/sys/bus/w1/devices/28-00000529fbad/w1_slave', 'r')
contents = tempFile.read()
contentsList = string.split(contents)
temp = contentsList[-1]
tempFile.close()
return temp
def insertTemp():
temp = getTemp()
with connection:
cursor = connection.cursor();
cursor.execute('INSERT INTO temp(temp_instance) VALUES(?)', (temp,))
connection.commit()
def getData():
with connection:
cursor = connection.cursor()
cursor.execute('SELECT * FROM temp')
data = cursor.fetchall()
print data
if __name__ == '__main__':
connection = lite.connect('weather.db')
insertTemp()
getData()
Remove Pull of Data from DB
|
#!/usr/bin/python
import string
import sqlite3 as lite
import sys
from random import randint
def getTemp():
tempFile = open('/sys/bus/w1/devices/28-00000529fbad/w1_slave', 'r')
contents = tempFile.read()
contentsList = string.split(contents)
temp = contentsList[-1]
tempFile.close()
return temp
def insertTemp():
temp = getTemp()
with connection:
cursor = connection.cursor();
cursor.execute('INSERT INTO temp(temp_instance) VALUES(?)', (temp,))
connection.commit()
print temp
if __name__ == '__main__':
connection = lite.connect('weather.db')
insertTemp()
|
<commit_before>#!/usr/bin/python
import string
import sqlite3 as lite
import sys
from random import randint
def getTemp():
tempFile = open('/sys/bus/w1/devices/28-00000529fbad/w1_slave', 'r')
contents = tempFile.read()
contentsList = string.split(contents)
temp = contentsList[-1]
tempFile.close()
return temp
def insertTemp():
temp = getTemp()
with connection:
cursor = connection.cursor();
cursor.execute('INSERT INTO temp(temp_instance) VALUES(?)', (temp,))
connection.commit()
def getData():
with connection:
cursor = connection.cursor()
cursor.execute('SELECT * FROM temp')
data = cursor.fetchall()
print data
if __name__ == '__main__':
connection = lite.connect('weather.db')
insertTemp()
getData()
<commit_msg>Remove Pull of Data from DB<commit_after>
|
#!/usr/bin/python
import string
import sqlite3 as lite
import sys
from random import randint
def getTemp():
tempFile = open('/sys/bus/w1/devices/28-00000529fbad/w1_slave', 'r')
contents = tempFile.read()
contentsList = string.split(contents)
temp = contentsList[-1]
tempFile.close()
return temp
def insertTemp():
temp = getTemp()
with connection:
cursor = connection.cursor();
cursor.execute('INSERT INTO temp(temp_instance) VALUES(?)', (temp,))
connection.commit()
print temp
if __name__ == '__main__':
connection = lite.connect('weather.db')
insertTemp()
|
#!/usr/bin/python
import string
import sqlite3 as lite
import sys
from random import randint
def getTemp():
tempFile = open('/sys/bus/w1/devices/28-00000529fbad/w1_slave', 'r')
contents = tempFile.read()
contentsList = string.split(contents)
temp = contentsList[-1]
tempFile.close()
return temp
def insertTemp():
temp = getTemp()
with connection:
cursor = connection.cursor();
cursor.execute('INSERT INTO temp(temp_instance) VALUES(?)', (temp,))
connection.commit()
def getData():
with connection:
cursor = connection.cursor()
cursor.execute('SELECT * FROM temp')
data = cursor.fetchall()
print data
if __name__ == '__main__':
connection = lite.connect('weather.db')
insertTemp()
getData()
Remove Pull of Data from DB#!/usr/bin/python
import string
import sqlite3 as lite
import sys
from random import randint
def getTemp():
tempFile = open('/sys/bus/w1/devices/28-00000529fbad/w1_slave', 'r')
contents = tempFile.read()
contentsList = string.split(contents)
temp = contentsList[-1]
tempFile.close()
return temp
def insertTemp():
temp = getTemp()
with connection:
cursor = connection.cursor();
cursor.execute('INSERT INTO temp(temp_instance) VALUES(?)', (temp,))
connection.commit()
print temp
if __name__ == '__main__':
connection = lite.connect('weather.db')
insertTemp()
|
<commit_before>#!/usr/bin/python
import string
import sqlite3 as lite
import sys
from random import randint
def getTemp():
tempFile = open('/sys/bus/w1/devices/28-00000529fbad/w1_slave', 'r')
contents = tempFile.read()
contentsList = string.split(contents)
temp = contentsList[-1]
tempFile.close()
return temp
def insertTemp():
temp = getTemp()
with connection:
cursor = connection.cursor();
cursor.execute('INSERT INTO temp(temp_instance) VALUES(?)', (temp,))
connection.commit()
def getData():
with connection:
cursor = connection.cursor()
cursor.execute('SELECT * FROM temp')
data = cursor.fetchall()
print data
if __name__ == '__main__':
connection = lite.connect('weather.db')
insertTemp()
getData()
<commit_msg>Remove Pull of Data from DB<commit_after>#!/usr/bin/python
import string
import sqlite3 as lite
import sys
from random import randint
def getTemp():
tempFile = open('/sys/bus/w1/devices/28-00000529fbad/w1_slave', 'r')
contents = tempFile.read()
contentsList = string.split(contents)
temp = contentsList[-1]
tempFile.close()
return temp
def insertTemp():
temp = getTemp()
with connection:
cursor = connection.cursor();
cursor.execute('INSERT INTO temp(temp_instance) VALUES(?)', (temp,))
connection.commit()
print temp
if __name__ == '__main__':
connection = lite.connect('weather.db')
insertTemp()
|
b059f5128576d468ab0109da8d01bfdc50f6db56
|
accelerator/tests/contexts/analyze_judging_context.py
|
accelerator/tests/contexts/analyze_judging_context.py
|
from accelerator.tests.factories import (
CriterionFactory,
CriterionOptionSpecFactory,
)
from accelerator.tests.contexts.judge_feedback_context import (
JudgeFeedbackContext,
)
from accelerator.models import (
JUDGING_FEEDBACK_STATUS_COMPLETE,
JudgeApplicationFeedback,
)
class AnalyzeJudgingContext(JudgeFeedbackContext):
def __init__(self, type, name, read_count, options):
super().__init__()
self.read_count = read_count
self.options = options
self.feedback.feedback_status = JUDGING_FEEDBACK_STATUS_COMPLETE
self.feedback.save()
self.add_application() # Add unread app
self.criterion = CriterionFactory(type=type,
name=name,
judging_round=self.judging_round)
self.option_specs = [CriterionOptionSpecFactory(
criterion=self.criterion,
count=read_count,
option=option) for option in options]
def needed_reads(self):
return (self.read_count * len(self.applications) -
self.feedback_count())
def feedback_count(self):
counts = [JudgeApplicationFeedback.objects.filter(
application=app,
feedback_status=JUDGING_FEEDBACK_STATUS_COMPLETE).count()
for app in self.applications]
return sum([min(self.read_count, count)
for count in counts])
|
from accelerator.tests.factories import (
CriterionFactory,
CriterionOptionSpecFactory,
)
from accelerator.tests.contexts.judge_feedback_context import (
JudgeFeedbackContext,
)
from accelerator.models import (
JUDGING_FEEDBACK_STATUS_COMPLETE,
JudgeApplicationFeedback,
)
class AnalyzeJudgingContext(JudgeFeedbackContext):
def __init__(self,
type="reads",
name="reads",
read_count=1,
options=[""],
is_active=True):
super().__init__(is_active=is_active)
self.read_count = read_count
self.options = options
self.feedback.feedback_status = JUDGING_FEEDBACK_STATUS_COMPLETE
self.feedback.save()
self.add_application() # Add unread app
self.criterion = CriterionFactory(type=type,
name=name,
judging_round=self.judging_round)
self.option_specs = [CriterionOptionSpecFactory(
criterion=self.criterion,
count=read_count,
option=option) for option in options]
def needed_reads(self):
return (self.read_count * len(self.applications) -
self.feedback_count())
def feedback_count(self):
counts = [JudgeApplicationFeedback.objects.filter(
application=app,
feedback_status=JUDGING_FEEDBACK_STATUS_COMPLETE).count()
for app in self.applications]
return sum([min(self.read_count, count)
for count in counts])
|
Add is_active and default args to AnalyzeJudgingContext
|
Add is_active and default args to AnalyzeJudgingContext
|
Python
|
mit
|
masschallenge/django-accelerator,masschallenge/django-accelerator
|
from accelerator.tests.factories import (
CriterionFactory,
CriterionOptionSpecFactory,
)
from accelerator.tests.contexts.judge_feedback_context import (
JudgeFeedbackContext,
)
from accelerator.models import (
JUDGING_FEEDBACK_STATUS_COMPLETE,
JudgeApplicationFeedback,
)
class AnalyzeJudgingContext(JudgeFeedbackContext):
def __init__(self, type, name, read_count, options):
super().__init__()
self.read_count = read_count
self.options = options
self.feedback.feedback_status = JUDGING_FEEDBACK_STATUS_COMPLETE
self.feedback.save()
self.add_application() # Add unread app
self.criterion = CriterionFactory(type=type,
name=name,
judging_round=self.judging_round)
self.option_specs = [CriterionOptionSpecFactory(
criterion=self.criterion,
count=read_count,
option=option) for option in options]
def needed_reads(self):
return (self.read_count * len(self.applications) -
self.feedback_count())
def feedback_count(self):
counts = [JudgeApplicationFeedback.objects.filter(
application=app,
feedback_status=JUDGING_FEEDBACK_STATUS_COMPLETE).count()
for app in self.applications]
return sum([min(self.read_count, count)
for count in counts])
Add is_active and default args to AnalyzeJudgingContext
|
from accelerator.tests.factories import (
CriterionFactory,
CriterionOptionSpecFactory,
)
from accelerator.tests.contexts.judge_feedback_context import (
JudgeFeedbackContext,
)
from accelerator.models import (
JUDGING_FEEDBACK_STATUS_COMPLETE,
JudgeApplicationFeedback,
)
class AnalyzeJudgingContext(JudgeFeedbackContext):
def __init__(self,
type="reads",
name="reads",
read_count=1,
options=[""],
is_active=True):
super().__init__(is_active=is_active)
self.read_count = read_count
self.options = options
self.feedback.feedback_status = JUDGING_FEEDBACK_STATUS_COMPLETE
self.feedback.save()
self.add_application() # Add unread app
self.criterion = CriterionFactory(type=type,
name=name,
judging_round=self.judging_round)
self.option_specs = [CriterionOptionSpecFactory(
criterion=self.criterion,
count=read_count,
option=option) for option in options]
def needed_reads(self):
return (self.read_count * len(self.applications) -
self.feedback_count())
def feedback_count(self):
counts = [JudgeApplicationFeedback.objects.filter(
application=app,
feedback_status=JUDGING_FEEDBACK_STATUS_COMPLETE).count()
for app in self.applications]
return sum([min(self.read_count, count)
for count in counts])
|
<commit_before>from accelerator.tests.factories import (
CriterionFactory,
CriterionOptionSpecFactory,
)
from accelerator.tests.contexts.judge_feedback_context import (
JudgeFeedbackContext,
)
from accelerator.models import (
JUDGING_FEEDBACK_STATUS_COMPLETE,
JudgeApplicationFeedback,
)
class AnalyzeJudgingContext(JudgeFeedbackContext):
def __init__(self, type, name, read_count, options):
super().__init__()
self.read_count = read_count
self.options = options
self.feedback.feedback_status = JUDGING_FEEDBACK_STATUS_COMPLETE
self.feedback.save()
self.add_application() # Add unread app
self.criterion = CriterionFactory(type=type,
name=name,
judging_round=self.judging_round)
self.option_specs = [CriterionOptionSpecFactory(
criterion=self.criterion,
count=read_count,
option=option) for option in options]
def needed_reads(self):
return (self.read_count * len(self.applications) -
self.feedback_count())
def feedback_count(self):
counts = [JudgeApplicationFeedback.objects.filter(
application=app,
feedback_status=JUDGING_FEEDBACK_STATUS_COMPLETE).count()
for app in self.applications]
return sum([min(self.read_count, count)
for count in counts])
<commit_msg>Add is_active and default args to AnalyzeJudgingContext<commit_after>
|
from accelerator.tests.factories import (
CriterionFactory,
CriterionOptionSpecFactory,
)
from accelerator.tests.contexts.judge_feedback_context import (
JudgeFeedbackContext,
)
from accelerator.models import (
JUDGING_FEEDBACK_STATUS_COMPLETE,
JudgeApplicationFeedback,
)
class AnalyzeJudgingContext(JudgeFeedbackContext):
def __init__(self,
type="reads",
name="reads",
read_count=1,
options=[""],
is_active=True):
super().__init__(is_active=is_active)
self.read_count = read_count
self.options = options
self.feedback.feedback_status = JUDGING_FEEDBACK_STATUS_COMPLETE
self.feedback.save()
self.add_application() # Add unread app
self.criterion = CriterionFactory(type=type,
name=name,
judging_round=self.judging_round)
self.option_specs = [CriterionOptionSpecFactory(
criterion=self.criterion,
count=read_count,
option=option) for option in options]
def needed_reads(self):
return (self.read_count * len(self.applications) -
self.feedback_count())
def feedback_count(self):
counts = [JudgeApplicationFeedback.objects.filter(
application=app,
feedback_status=JUDGING_FEEDBACK_STATUS_COMPLETE).count()
for app in self.applications]
return sum([min(self.read_count, count)
for count in counts])
|
from accelerator.tests.factories import (
CriterionFactory,
CriterionOptionSpecFactory,
)
from accelerator.tests.contexts.judge_feedback_context import (
JudgeFeedbackContext,
)
from accelerator.models import (
JUDGING_FEEDBACK_STATUS_COMPLETE,
JudgeApplicationFeedback,
)
class AnalyzeJudgingContext(JudgeFeedbackContext):
def __init__(self, type, name, read_count, options):
super().__init__()
self.read_count = read_count
self.options = options
self.feedback.feedback_status = JUDGING_FEEDBACK_STATUS_COMPLETE
self.feedback.save()
self.add_application() # Add unread app
self.criterion = CriterionFactory(type=type,
name=name,
judging_round=self.judging_round)
self.option_specs = [CriterionOptionSpecFactory(
criterion=self.criterion,
count=read_count,
option=option) for option in options]
def needed_reads(self):
return (self.read_count * len(self.applications) -
self.feedback_count())
def feedback_count(self):
counts = [JudgeApplicationFeedback.objects.filter(
application=app,
feedback_status=JUDGING_FEEDBACK_STATUS_COMPLETE).count()
for app in self.applications]
return sum([min(self.read_count, count)
for count in counts])
Add is_active and default args to AnalyzeJudgingContextfrom accelerator.tests.factories import (
CriterionFactory,
CriterionOptionSpecFactory,
)
from accelerator.tests.contexts.judge_feedback_context import (
JudgeFeedbackContext,
)
from accelerator.models import (
JUDGING_FEEDBACK_STATUS_COMPLETE,
JudgeApplicationFeedback,
)
class AnalyzeJudgingContext(JudgeFeedbackContext):
def __init__(self,
type="reads",
name="reads",
read_count=1,
options=[""],
is_active=True):
super().__init__(is_active=is_active)
self.read_count = read_count
self.options = options
self.feedback.feedback_status = JUDGING_FEEDBACK_STATUS_COMPLETE
self.feedback.save()
self.add_application() # Add unread app
self.criterion = CriterionFactory(type=type,
name=name,
judging_round=self.judging_round)
self.option_specs = [CriterionOptionSpecFactory(
criterion=self.criterion,
count=read_count,
option=option) for option in options]
def needed_reads(self):
return (self.read_count * len(self.applications) -
self.feedback_count())
def feedback_count(self):
counts = [JudgeApplicationFeedback.objects.filter(
application=app,
feedback_status=JUDGING_FEEDBACK_STATUS_COMPLETE).count()
for app in self.applications]
return sum([min(self.read_count, count)
for count in counts])
|
<commit_before>from accelerator.tests.factories import (
CriterionFactory,
CriterionOptionSpecFactory,
)
from accelerator.tests.contexts.judge_feedback_context import (
JudgeFeedbackContext,
)
from accelerator.models import (
JUDGING_FEEDBACK_STATUS_COMPLETE,
JudgeApplicationFeedback,
)
class AnalyzeJudgingContext(JudgeFeedbackContext):
def __init__(self, type, name, read_count, options):
super().__init__()
self.read_count = read_count
self.options = options
self.feedback.feedback_status = JUDGING_FEEDBACK_STATUS_COMPLETE
self.feedback.save()
self.add_application() # Add unread app
self.criterion = CriterionFactory(type=type,
name=name,
judging_round=self.judging_round)
self.option_specs = [CriterionOptionSpecFactory(
criterion=self.criterion,
count=read_count,
option=option) for option in options]
def needed_reads(self):
return (self.read_count * len(self.applications) -
self.feedback_count())
def feedback_count(self):
counts = [JudgeApplicationFeedback.objects.filter(
application=app,
feedback_status=JUDGING_FEEDBACK_STATUS_COMPLETE).count()
for app in self.applications]
return sum([min(self.read_count, count)
for count in counts])
<commit_msg>Add is_active and default args to AnalyzeJudgingContext<commit_after>from accelerator.tests.factories import (
CriterionFactory,
CriterionOptionSpecFactory,
)
from accelerator.tests.contexts.judge_feedback_context import (
JudgeFeedbackContext,
)
from accelerator.models import (
JUDGING_FEEDBACK_STATUS_COMPLETE,
JudgeApplicationFeedback,
)
class AnalyzeJudgingContext(JudgeFeedbackContext):
def __init__(self,
type="reads",
name="reads",
read_count=1,
options=[""],
is_active=True):
super().__init__(is_active=is_active)
self.read_count = read_count
self.options = options
self.feedback.feedback_status = JUDGING_FEEDBACK_STATUS_COMPLETE
self.feedback.save()
self.add_application() # Add unread app
self.criterion = CriterionFactory(type=type,
name=name,
judging_round=self.judging_round)
self.option_specs = [CriterionOptionSpecFactory(
criterion=self.criterion,
count=read_count,
option=option) for option in options]
def needed_reads(self):
return (self.read_count * len(self.applications) -
self.feedback_count())
def feedback_count(self):
counts = [JudgeApplicationFeedback.objects.filter(
application=app,
feedback_status=JUDGING_FEEDBACK_STATUS_COMPLETE).count()
for app in self.applications]
return sum([min(self.read_count, count)
for count in counts])
|
24d58fb9650c5253eb24c4596a49daa18b8b2807
|
tests.py
|
tests.py
|
import os
from os.path import isdir
import pytest
from filesystem_tree import FilesystemTree
@pytest.yield_fixture
def fs():
fs = FilesystemTree()
yield fs
fs.remove()
def test_it_can_be_instantiated():
assert FilesystemTree().__class__.__name__ == 'FilesystemTree'
def test_args_go_to_mk_not_root():
fs = FilesystemTree('foo', 'bar')
assert fs.root != 'foo'
def test_it_makes_a_directory(fs):
assert isdir(fs.root)
def test_resolve_resolves(fs):
path = fs.resolve('some/dir')
assert path == os.path.realpath(os.sep.join([fs.root, 'some', 'dir']))
def test_mk_makes_a_dir(fs):
fs.mk('some/dir')
assert isdir(fs.resolve('some/dir'))
def test_remove_removes(fs):
assert isdir(fs.root)
fs.remove()
assert not isdir(fs.root)
def test_resolve_with_no_arg_is_equivalent_to_root(fs):
assert fs.resolve() == fs.root
|
import os
from os.path import isdir
import pytest
from filesystem_tree import FilesystemTree
@pytest.yield_fixture
def fs():
fs = FilesystemTree()
yield fs
fs.remove()
def test_it_can_be_instantiated():
assert FilesystemTree().__class__.__name__ == 'FilesystemTree'
def test_args_go_to_mk_not_root():
fs = FilesystemTree('foo', 'bar')
assert fs.root != 'foo'
def test_it_makes_a_directory(fs):
assert isdir(fs.root)
def test_resolve_resolves(fs):
path = fs.resolve('some/dir')
assert path == os.path.realpath(os.sep.join([fs.root, 'some', 'dir']))
def test_mk_makes_a_dir(fs):
fs.mk('some/dir')
assert isdir(fs.resolve('some/dir'))
def test_mk_makes_a_file(fs):
fs.mk(('some/dir/file.txt', 'Greetings, program!'))
contents = open(fs.resolve('some/dir/file.txt')).read()
assert contents == 'Greetings, program!'
def test_remove_removes(fs):
assert isdir(fs.root)
fs.remove()
assert not isdir(fs.root)
def test_resolve_with_no_arg_is_equivalent_to_root(fs):
assert fs.resolve() == fs.root
|
Add a test for making a file
|
Add a test for making a file
|
Python
|
mit
|
gratipay/filesystem_tree.py,gratipay/filesystem_tree.py
|
import os
from os.path import isdir
import pytest
from filesystem_tree import FilesystemTree
@pytest.yield_fixture
def fs():
fs = FilesystemTree()
yield fs
fs.remove()
def test_it_can_be_instantiated():
assert FilesystemTree().__class__.__name__ == 'FilesystemTree'
def test_args_go_to_mk_not_root():
fs = FilesystemTree('foo', 'bar')
assert fs.root != 'foo'
def test_it_makes_a_directory(fs):
assert isdir(fs.root)
def test_resolve_resolves(fs):
path = fs.resolve('some/dir')
assert path == os.path.realpath(os.sep.join([fs.root, 'some', 'dir']))
def test_mk_makes_a_dir(fs):
fs.mk('some/dir')
assert isdir(fs.resolve('some/dir'))
def test_remove_removes(fs):
assert isdir(fs.root)
fs.remove()
assert not isdir(fs.root)
def test_resolve_with_no_arg_is_equivalent_to_root(fs):
assert fs.resolve() == fs.root
Add a test for making a file
|
import os
from os.path import isdir
import pytest
from filesystem_tree import FilesystemTree
@pytest.yield_fixture
def fs():
fs = FilesystemTree()
yield fs
fs.remove()
def test_it_can_be_instantiated():
assert FilesystemTree().__class__.__name__ == 'FilesystemTree'
def test_args_go_to_mk_not_root():
fs = FilesystemTree('foo', 'bar')
assert fs.root != 'foo'
def test_it_makes_a_directory(fs):
assert isdir(fs.root)
def test_resolve_resolves(fs):
path = fs.resolve('some/dir')
assert path == os.path.realpath(os.sep.join([fs.root, 'some', 'dir']))
def test_mk_makes_a_dir(fs):
fs.mk('some/dir')
assert isdir(fs.resolve('some/dir'))
def test_mk_makes_a_file(fs):
fs.mk(('some/dir/file.txt', 'Greetings, program!'))
contents = open(fs.resolve('some/dir/file.txt')).read()
assert contents == 'Greetings, program!'
def test_remove_removes(fs):
assert isdir(fs.root)
fs.remove()
assert not isdir(fs.root)
def test_resolve_with_no_arg_is_equivalent_to_root(fs):
assert fs.resolve() == fs.root
|
<commit_before>import os
from os.path import isdir
import pytest
from filesystem_tree import FilesystemTree
@pytest.yield_fixture
def fs():
fs = FilesystemTree()
yield fs
fs.remove()
def test_it_can_be_instantiated():
assert FilesystemTree().__class__.__name__ == 'FilesystemTree'
def test_args_go_to_mk_not_root():
fs = FilesystemTree('foo', 'bar')
assert fs.root != 'foo'
def test_it_makes_a_directory(fs):
assert isdir(fs.root)
def test_resolve_resolves(fs):
path = fs.resolve('some/dir')
assert path == os.path.realpath(os.sep.join([fs.root, 'some', 'dir']))
def test_mk_makes_a_dir(fs):
fs.mk('some/dir')
assert isdir(fs.resolve('some/dir'))
def test_remove_removes(fs):
assert isdir(fs.root)
fs.remove()
assert not isdir(fs.root)
def test_resolve_with_no_arg_is_equivalent_to_root(fs):
assert fs.resolve() == fs.root
<commit_msg>Add a test for making a file<commit_after>
|
import os
from os.path import isdir
import pytest
from filesystem_tree import FilesystemTree
@pytest.yield_fixture
def fs():
fs = FilesystemTree()
yield fs
fs.remove()
def test_it_can_be_instantiated():
assert FilesystemTree().__class__.__name__ == 'FilesystemTree'
def test_args_go_to_mk_not_root():
fs = FilesystemTree('foo', 'bar')
assert fs.root != 'foo'
def test_it_makes_a_directory(fs):
assert isdir(fs.root)
def test_resolve_resolves(fs):
path = fs.resolve('some/dir')
assert path == os.path.realpath(os.sep.join([fs.root, 'some', 'dir']))
def test_mk_makes_a_dir(fs):
fs.mk('some/dir')
assert isdir(fs.resolve('some/dir'))
def test_mk_makes_a_file(fs):
fs.mk(('some/dir/file.txt', 'Greetings, program!'))
contents = open(fs.resolve('some/dir/file.txt')).read()
assert contents == 'Greetings, program!'
def test_remove_removes(fs):
assert isdir(fs.root)
fs.remove()
assert not isdir(fs.root)
def test_resolve_with_no_arg_is_equivalent_to_root(fs):
assert fs.resolve() == fs.root
|
import os
from os.path import isdir
import pytest
from filesystem_tree import FilesystemTree
@pytest.yield_fixture
def fs():
fs = FilesystemTree()
yield fs
fs.remove()
def test_it_can_be_instantiated():
assert FilesystemTree().__class__.__name__ == 'FilesystemTree'
def test_args_go_to_mk_not_root():
fs = FilesystemTree('foo', 'bar')
assert fs.root != 'foo'
def test_it_makes_a_directory(fs):
assert isdir(fs.root)
def test_resolve_resolves(fs):
path = fs.resolve('some/dir')
assert path == os.path.realpath(os.sep.join([fs.root, 'some', 'dir']))
def test_mk_makes_a_dir(fs):
fs.mk('some/dir')
assert isdir(fs.resolve('some/dir'))
def test_remove_removes(fs):
assert isdir(fs.root)
fs.remove()
assert not isdir(fs.root)
def test_resolve_with_no_arg_is_equivalent_to_root(fs):
assert fs.resolve() == fs.root
Add a test for making a fileimport os
from os.path import isdir
import pytest
from filesystem_tree import FilesystemTree
@pytest.yield_fixture
def fs():
fs = FilesystemTree()
yield fs
fs.remove()
def test_it_can_be_instantiated():
assert FilesystemTree().__class__.__name__ == 'FilesystemTree'
def test_args_go_to_mk_not_root():
fs = FilesystemTree('foo', 'bar')
assert fs.root != 'foo'
def test_it_makes_a_directory(fs):
assert isdir(fs.root)
def test_resolve_resolves(fs):
path = fs.resolve('some/dir')
assert path == os.path.realpath(os.sep.join([fs.root, 'some', 'dir']))
def test_mk_makes_a_dir(fs):
fs.mk('some/dir')
assert isdir(fs.resolve('some/dir'))
def test_mk_makes_a_file(fs):
fs.mk(('some/dir/file.txt', 'Greetings, program!'))
contents = open(fs.resolve('some/dir/file.txt')).read()
assert contents == 'Greetings, program!'
def test_remove_removes(fs):
assert isdir(fs.root)
fs.remove()
assert not isdir(fs.root)
def test_resolve_with_no_arg_is_equivalent_to_root(fs):
assert fs.resolve() == fs.root
|
<commit_before>import os
from os.path import isdir
import pytest
from filesystem_tree import FilesystemTree
@pytest.yield_fixture
def fs():
fs = FilesystemTree()
yield fs
fs.remove()
def test_it_can_be_instantiated():
assert FilesystemTree().__class__.__name__ == 'FilesystemTree'
def test_args_go_to_mk_not_root():
fs = FilesystemTree('foo', 'bar')
assert fs.root != 'foo'
def test_it_makes_a_directory(fs):
assert isdir(fs.root)
def test_resolve_resolves(fs):
path = fs.resolve('some/dir')
assert path == os.path.realpath(os.sep.join([fs.root, 'some', 'dir']))
def test_mk_makes_a_dir(fs):
fs.mk('some/dir')
assert isdir(fs.resolve('some/dir'))
def test_remove_removes(fs):
assert isdir(fs.root)
fs.remove()
assert not isdir(fs.root)
def test_resolve_with_no_arg_is_equivalent_to_root(fs):
assert fs.resolve() == fs.root
<commit_msg>Add a test for making a file<commit_after>import os
from os.path import isdir
import pytest
from filesystem_tree import FilesystemTree
@pytest.yield_fixture
def fs():
fs = FilesystemTree()
yield fs
fs.remove()
def test_it_can_be_instantiated():
assert FilesystemTree().__class__.__name__ == 'FilesystemTree'
def test_args_go_to_mk_not_root():
fs = FilesystemTree('foo', 'bar')
assert fs.root != 'foo'
def test_it_makes_a_directory(fs):
assert isdir(fs.root)
def test_resolve_resolves(fs):
path = fs.resolve('some/dir')
assert path == os.path.realpath(os.sep.join([fs.root, 'some', 'dir']))
def test_mk_makes_a_dir(fs):
fs.mk('some/dir')
assert isdir(fs.resolve('some/dir'))
def test_mk_makes_a_file(fs):
fs.mk(('some/dir/file.txt', 'Greetings, program!'))
contents = open(fs.resolve('some/dir/file.txt')).read()
assert contents == 'Greetings, program!'
def test_remove_removes(fs):
assert isdir(fs.root)
fs.remove()
assert not isdir(fs.root)
def test_resolve_with_no_arg_is_equivalent_to_root(fs):
assert fs.resolve() == fs.root
|
b7a17837a618a9d03cb8c94f3ee9765e48d83f57
|
froide/helper/api_utils.py
|
froide/helper/api_utils.py
|
from collections import OrderedDict
from rest_framework.pagination import LimitOffsetPagination
from rest_framework.response import Response
from rest_framework.serializers import ListSerializer
from rest_framework.utils.serializer_helpers import ReturnDict
class CustomLimitOffsetPagination(LimitOffsetPagination):
def get_paginated_response(self, data):
return Response(OrderedDict([
('meta', OrderedDict([
('limit', self.limit),
('next', self.get_next_link()),
('offset', self.offset),
('previous', self.get_previous_link()),
('total_count', self.count),
])),
('objects', data),
]))
class SearchFacetListSerializer(ListSerializer):
@property
def data(self):
ret = super(ListSerializer, self).data
return ReturnDict(ret, serializer=self)
def to_representation(self, instance):
ret = super(SearchFacetListSerializer, self).to_representation(instance)
ret = OrderedDict([
('results', ret),
('facets', self._context['facets']),
])
return ret
|
from collections import OrderedDict
from rest_framework.pagination import LimitOffsetPagination
from rest_framework.response import Response
from rest_framework.serializers import ListSerializer
from rest_framework.utils.serializer_helpers import ReturnDict
class CustomLimitOffsetPagination(LimitOffsetPagination):
def get_paginated_response(self, data):
return Response(OrderedDict([
('meta', OrderedDict([
('limit', self.limit),
('next', self.get_next_link()),
('offset', self.offset),
('previous', self.get_previous_link()),
('total_count', self.count),
])),
('objects', data),
]))
class SearchFacetListSerializer(ListSerializer):
@property
def data(self):
ret = super(ListSerializer, self).data
return ReturnDict(ret, serializer=self)
def to_representation(self, instance):
ret = super(SearchFacetListSerializer, self).to_representation(instance)
ret = OrderedDict([
('results', ret),
('facets', self._context.get('facets', {'fields': {}})),
])
return ret
|
Make facet context optional for public body search
|
Make facet context optional for public body search
For search backends that don't support faceting
|
Python
|
mit
|
stefanw/froide,stefanw/froide,stefanw/froide,fin/froide,fin/froide,stefanw/froide,fin/froide,fin/froide,stefanw/froide
|
from collections import OrderedDict
from rest_framework.pagination import LimitOffsetPagination
from rest_framework.response import Response
from rest_framework.serializers import ListSerializer
from rest_framework.utils.serializer_helpers import ReturnDict
class CustomLimitOffsetPagination(LimitOffsetPagination):
def get_paginated_response(self, data):
return Response(OrderedDict([
('meta', OrderedDict([
('limit', self.limit),
('next', self.get_next_link()),
('offset', self.offset),
('previous', self.get_previous_link()),
('total_count', self.count),
])),
('objects', data),
]))
class SearchFacetListSerializer(ListSerializer):
@property
def data(self):
ret = super(ListSerializer, self).data
return ReturnDict(ret, serializer=self)
def to_representation(self, instance):
ret = super(SearchFacetListSerializer, self).to_representation(instance)
ret = OrderedDict([
('results', ret),
('facets', self._context['facets']),
])
return ret
Make facet context optional for public body search
For search backends that don't support faceting
|
from collections import OrderedDict
from rest_framework.pagination import LimitOffsetPagination
from rest_framework.response import Response
from rest_framework.serializers import ListSerializer
from rest_framework.utils.serializer_helpers import ReturnDict
class CustomLimitOffsetPagination(LimitOffsetPagination):
def get_paginated_response(self, data):
return Response(OrderedDict([
('meta', OrderedDict([
('limit', self.limit),
('next', self.get_next_link()),
('offset', self.offset),
('previous', self.get_previous_link()),
('total_count', self.count),
])),
('objects', data),
]))
class SearchFacetListSerializer(ListSerializer):
@property
def data(self):
ret = super(ListSerializer, self).data
return ReturnDict(ret, serializer=self)
def to_representation(self, instance):
ret = super(SearchFacetListSerializer, self).to_representation(instance)
ret = OrderedDict([
('results', ret),
('facets', self._context.get('facets', {'fields': {}})),
])
return ret
|
<commit_before>from collections import OrderedDict
from rest_framework.pagination import LimitOffsetPagination
from rest_framework.response import Response
from rest_framework.serializers import ListSerializer
from rest_framework.utils.serializer_helpers import ReturnDict
class CustomLimitOffsetPagination(LimitOffsetPagination):
def get_paginated_response(self, data):
return Response(OrderedDict([
('meta', OrderedDict([
('limit', self.limit),
('next', self.get_next_link()),
('offset', self.offset),
('previous', self.get_previous_link()),
('total_count', self.count),
])),
('objects', data),
]))
class SearchFacetListSerializer(ListSerializer):
@property
def data(self):
ret = super(ListSerializer, self).data
return ReturnDict(ret, serializer=self)
def to_representation(self, instance):
ret = super(SearchFacetListSerializer, self).to_representation(instance)
ret = OrderedDict([
('results', ret),
('facets', self._context['facets']),
])
return ret
<commit_msg>Make facet context optional for public body search
For search backends that don't support faceting<commit_after>
|
from collections import OrderedDict
from rest_framework.pagination import LimitOffsetPagination
from rest_framework.response import Response
from rest_framework.serializers import ListSerializer
from rest_framework.utils.serializer_helpers import ReturnDict
class CustomLimitOffsetPagination(LimitOffsetPagination):
def get_paginated_response(self, data):
return Response(OrderedDict([
('meta', OrderedDict([
('limit', self.limit),
('next', self.get_next_link()),
('offset', self.offset),
('previous', self.get_previous_link()),
('total_count', self.count),
])),
('objects', data),
]))
class SearchFacetListSerializer(ListSerializer):
@property
def data(self):
ret = super(ListSerializer, self).data
return ReturnDict(ret, serializer=self)
def to_representation(self, instance):
ret = super(SearchFacetListSerializer, self).to_representation(instance)
ret = OrderedDict([
('results', ret),
('facets', self._context.get('facets', {'fields': {}})),
])
return ret
|
from collections import OrderedDict
from rest_framework.pagination import LimitOffsetPagination
from rest_framework.response import Response
from rest_framework.serializers import ListSerializer
from rest_framework.utils.serializer_helpers import ReturnDict
class CustomLimitOffsetPagination(LimitOffsetPagination):
def get_paginated_response(self, data):
return Response(OrderedDict([
('meta', OrderedDict([
('limit', self.limit),
('next', self.get_next_link()),
('offset', self.offset),
('previous', self.get_previous_link()),
('total_count', self.count),
])),
('objects', data),
]))
class SearchFacetListSerializer(ListSerializer):
@property
def data(self):
ret = super(ListSerializer, self).data
return ReturnDict(ret, serializer=self)
def to_representation(self, instance):
ret = super(SearchFacetListSerializer, self).to_representation(instance)
ret = OrderedDict([
('results', ret),
('facets', self._context['facets']),
])
return ret
Make facet context optional for public body search
For search backends that don't support facetingfrom collections import OrderedDict
from rest_framework.pagination import LimitOffsetPagination
from rest_framework.response import Response
from rest_framework.serializers import ListSerializer
from rest_framework.utils.serializer_helpers import ReturnDict
class CustomLimitOffsetPagination(LimitOffsetPagination):
def get_paginated_response(self, data):
return Response(OrderedDict([
('meta', OrderedDict([
('limit', self.limit),
('next', self.get_next_link()),
('offset', self.offset),
('previous', self.get_previous_link()),
('total_count', self.count),
])),
('objects', data),
]))
class SearchFacetListSerializer(ListSerializer):
@property
def data(self):
ret = super(ListSerializer, self).data
return ReturnDict(ret, serializer=self)
def to_representation(self, instance):
ret = super(SearchFacetListSerializer, self).to_representation(instance)
ret = OrderedDict([
('results', ret),
('facets', self._context.get('facets', {'fields': {}})),
])
return ret
|
<commit_before>from collections import OrderedDict
from rest_framework.pagination import LimitOffsetPagination
from rest_framework.response import Response
from rest_framework.serializers import ListSerializer
from rest_framework.utils.serializer_helpers import ReturnDict
class CustomLimitOffsetPagination(LimitOffsetPagination):
def get_paginated_response(self, data):
return Response(OrderedDict([
('meta', OrderedDict([
('limit', self.limit),
('next', self.get_next_link()),
('offset', self.offset),
('previous', self.get_previous_link()),
('total_count', self.count),
])),
('objects', data),
]))
class SearchFacetListSerializer(ListSerializer):
@property
def data(self):
ret = super(ListSerializer, self).data
return ReturnDict(ret, serializer=self)
def to_representation(self, instance):
ret = super(SearchFacetListSerializer, self).to_representation(instance)
ret = OrderedDict([
('results', ret),
('facets', self._context['facets']),
])
return ret
<commit_msg>Make facet context optional for public body search
For search backends that don't support faceting<commit_after>from collections import OrderedDict
from rest_framework.pagination import LimitOffsetPagination
from rest_framework.response import Response
from rest_framework.serializers import ListSerializer
from rest_framework.utils.serializer_helpers import ReturnDict
class CustomLimitOffsetPagination(LimitOffsetPagination):
def get_paginated_response(self, data):
return Response(OrderedDict([
('meta', OrderedDict([
('limit', self.limit),
('next', self.get_next_link()),
('offset', self.offset),
('previous', self.get_previous_link()),
('total_count', self.count),
])),
('objects', data),
]))
class SearchFacetListSerializer(ListSerializer):
@property
def data(self):
ret = super(ListSerializer, self).data
return ReturnDict(ret, serializer=self)
def to_representation(self, instance):
ret = super(SearchFacetListSerializer, self).to_representation(instance)
ret = OrderedDict([
('results', ret),
('facets', self._context.get('facets', {'fields': {}})),
])
return ret
|
7a00293d602c1997777fb90331fcbf7cde1b0838
|
tweet.py
|
tweet.py
|
#!/usr/bin/env python
from twython import Twython
from credentials import *
from os import urandom
def random_tweet(account):
# https://docs.python.org/2/library/codecs.html#python-specific-encodings
status = urandom(140).decode('utf-8', errors='ignore')
tweet = account.update_status(status=status)
# Gotta like this tweet, after all, we wrote it
account.create_favorite(id=tweet['id'])
if __name__ == '__main__':
account = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
random_tweet(account)
|
#!/usr/bin/env python
from twython import Twython
from credentials import *
from os import urandom
def random_tweet(account):
# https://docs.python.org/2/library/codecs.html
status = urandom(400).decode('utf-8', errors='ignore')
status = status[0:140]
tweet = account.update_status(status=status)
# Gotta like this tweet, after all, we wrote it
account.create_favorite(id=tweet['id'])
if __name__ == '__main__':
account = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
random_tweet(account)
|
Make sure we fill all 140 possible characters.
|
Make sure we fill all 140 possible characters.
|
Python
|
mit
|
chrisma/dev-urandom,chrisma/dev-urandom
|
#!/usr/bin/env python
from twython import Twython
from credentials import *
from os import urandom
def random_tweet(account):
# https://docs.python.org/2/library/codecs.html#python-specific-encodings
status = urandom(140).decode('utf-8', errors='ignore')
tweet = account.update_status(status=status)
# Gotta like this tweet, after all, we wrote it
account.create_favorite(id=tweet['id'])
if __name__ == '__main__':
account = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
random_tweet(account)
Make sure we fill all 140 possible characters.
|
#!/usr/bin/env python
from twython import Twython
from credentials import *
from os import urandom
def random_tweet(account):
# https://docs.python.org/2/library/codecs.html
status = urandom(400).decode('utf-8', errors='ignore')
status = status[0:140]
tweet = account.update_status(status=status)
# Gotta like this tweet, after all, we wrote it
account.create_favorite(id=tweet['id'])
if __name__ == '__main__':
account = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
random_tweet(account)
|
<commit_before>#!/usr/bin/env python
from twython import Twython
from credentials import *
from os import urandom
def random_tweet(account):
# https://docs.python.org/2/library/codecs.html#python-specific-encodings
status = urandom(140).decode('utf-8', errors='ignore')
tweet = account.update_status(status=status)
# Gotta like this tweet, after all, we wrote it
account.create_favorite(id=tweet['id'])
if __name__ == '__main__':
account = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
random_tweet(account)
<commit_msg>Make sure we fill all 140 possible characters.<commit_after>
|
#!/usr/bin/env python
from twython import Twython
from credentials import *
from os import urandom
def random_tweet(account):
# https://docs.python.org/2/library/codecs.html
status = urandom(400).decode('utf-8', errors='ignore')
status = status[0:140]
tweet = account.update_status(status=status)
# Gotta like this tweet, after all, we wrote it
account.create_favorite(id=tweet['id'])
if __name__ == '__main__':
account = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
random_tweet(account)
|
#!/usr/bin/env python
from twython import Twython
from credentials import *
from os import urandom
def random_tweet(account):
# https://docs.python.org/2/library/codecs.html#python-specific-encodings
status = urandom(140).decode('utf-8', errors='ignore')
tweet = account.update_status(status=status)
# Gotta like this tweet, after all, we wrote it
account.create_favorite(id=tweet['id'])
if __name__ == '__main__':
account = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
random_tweet(account)
Make sure we fill all 140 possible characters.#!/usr/bin/env python
from twython import Twython
from credentials import *
from os import urandom
def random_tweet(account):
# https://docs.python.org/2/library/codecs.html
status = urandom(400).decode('utf-8', errors='ignore')
status = status[0:140]
tweet = account.update_status(status=status)
# Gotta like this tweet, after all, we wrote it
account.create_favorite(id=tweet['id'])
if __name__ == '__main__':
account = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
random_tweet(account)
|
<commit_before>#!/usr/bin/env python
from twython import Twython
from credentials import *
from os import urandom
def random_tweet(account):
# https://docs.python.org/2/library/codecs.html#python-specific-encodings
status = urandom(140).decode('utf-8', errors='ignore')
tweet = account.update_status(status=status)
# Gotta like this tweet, after all, we wrote it
account.create_favorite(id=tweet['id'])
if __name__ == '__main__':
account = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
random_tweet(account)
<commit_msg>Make sure we fill all 140 possible characters.<commit_after>#!/usr/bin/env python
from twython import Twython
from credentials import *
from os import urandom
def random_tweet(account):
# https://docs.python.org/2/library/codecs.html
status = urandom(400).decode('utf-8', errors='ignore')
status = status[0:140]
tweet = account.update_status(status=status)
# Gotta like this tweet, after all, we wrote it
account.create_favorite(id=tweet['id'])
if __name__ == '__main__':
account = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
random_tweet(account)
|
1fdd6b13faa286f81cf92151262e0f549de457d7
|
PcDuino/default_settings.py
|
PcDuino/default_settings.py
|
# Web server
DEBUG = True
HOST = '127.0.0.1'
PORT = 5000
TEMPLATE = 'flot.html'
# Database location
SQLITE3_DB_PATH = 'data/ua_sensors.sqlite3'
# Sensor type mapping
#
# - First parameter is the identifier sent from sensors in the wild
# - Second parameter is the readable name of the sensor type
# - Third parameter is the types of readings returned
# - Semi-colon separates different sensor reading types
# - Each reading type needs to specify type and unit of measurement
# - For, example TYPE UNITS; TYPE UNITS -> distance meters; temperature celsius
SENSOR_TYPES = (
("d", "Water", "distance meters; temperature celsius"),
("g", "Gate", "distance meters"),
("s", "Soil", "moisture percent; temperature celsius"),
)
# Data logger error logging
DATA_LOGGER_ERROR_FILE = 'log/ua_sensor.log'
DATA_LOGGER_ERROR_LEVEL = 'INFO'
DATA_LOGGER_ERROR_FORMAT = '%(levelname)s - %(message)s'
|
# Web server
DEBUG = True
HOST = '127.0.0.1'
PORT = 5000
TEMPLATE = 'flot.html'
SITE_TITLE = "UA Sensors Visualization"
# Database location
SQLITE3_DB_PATH = 'data/ua_sensors.sqlite3'
# Sensor type mapping
#
# - First parameter is the identifier sent from sensors in the wild
# - Second parameter is the readable name of the sensor type
# - Third parameter is the types of readings returned
# - Semi-colon separates different sensor reading types
# - Each reading type needs to specify type and unit of measurement
# - For, example TYPE UNITS; TYPE UNITS -> distance meters; temperature celsius
SENSOR_TYPES = (
("d", "Water", "distance meters; temperature celsius"),
("g", "Gate", "distance meters"),
("s", "Soil", "moisture percent; temperature celsius"),
)
# Maps sensor ids to more informative name.
#
# Not all sensors need to be named. Can be adjusted later, just remember to
# restart server on config file updates
#
# For instance, +12223334444 is not as informative as Campbell Creek Water Sensor
#
# Example format:
#
# SENSOR_NAMES = {
# '+12223334444': 'Yosemite Distance Sensor',
# '+01234567890': 'Siberia Gate Sensor',
# 'moteino_1': 'Arctic Ocean Moisture Sensor',
# }
SENSOR_NAMES = {}
# Data logger error logging
DATA_LOGGER_ERROR_FILE = 'log/ua_sensor.log'
DATA_LOGGER_ERROR_LEVEL = 'INFO'
DATA_LOGGER_ERROR_FORMAT = '%(levelname)s - %(message)s'
|
Add some new default settings
|
Add some new default settings
|
Python
|
unlicense
|
UAA-EQLNES/EQLNES-Sensors
|
# Web server
DEBUG = True
HOST = '127.0.0.1'
PORT = 5000
TEMPLATE = 'flot.html'
# Database location
SQLITE3_DB_PATH = 'data/ua_sensors.sqlite3'
# Sensor type mapping
#
# - First parameter is the identifier sent from sensors in the wild
# - Second parameter is the readable name of the sensor type
# - Third parameter is the types of readings returned
# - Semi-colon separates different sensor reading types
# - Each reading type needs to specify type and unit of measurement
# - For, example TYPE UNITS; TYPE UNITS -> distance meters; temperature celsius
SENSOR_TYPES = (
("d", "Water", "distance meters; temperature celsius"),
("g", "Gate", "distance meters"),
("s", "Soil", "moisture percent; temperature celsius"),
)
# Data logger error logging
DATA_LOGGER_ERROR_FILE = 'log/ua_sensor.log'
DATA_LOGGER_ERROR_LEVEL = 'INFO'
DATA_LOGGER_ERROR_FORMAT = '%(levelname)s - %(message)s'Add some new default settings
|
# Web server
DEBUG = True
HOST = '127.0.0.1'
PORT = 5000
TEMPLATE = 'flot.html'
SITE_TITLE = "UA Sensors Visualization"
# Database location
SQLITE3_DB_PATH = 'data/ua_sensors.sqlite3'
# Sensor type mapping
#
# - First parameter is the identifier sent from sensors in the wild
# - Second parameter is the readable name of the sensor type
# - Third parameter is the types of readings returned
# - Semi-colon separates different sensor reading types
# - Each reading type needs to specify type and unit of measurement
# - For, example TYPE UNITS; TYPE UNITS -> distance meters; temperature celsius
SENSOR_TYPES = (
("d", "Water", "distance meters; temperature celsius"),
("g", "Gate", "distance meters"),
("s", "Soil", "moisture percent; temperature celsius"),
)
# Maps sensor ids to more informative name.
#
# Not all sensors need to be named. Can be adjusted later, just remember to
# restart server on config file updates
#
# For instance, +12223334444 is not as informative as Campbell Creek Water Sensor
#
# Example format:
#
# SENSOR_NAMES = {
# '+12223334444': 'Yosemite Distance Sensor',
# '+01234567890': 'Siberia Gate Sensor',
# 'moteino_1': 'Arctic Ocean Moisture Sensor',
# }
SENSOR_NAMES = {}
# Data logger error logging
DATA_LOGGER_ERROR_FILE = 'log/ua_sensor.log'
DATA_LOGGER_ERROR_LEVEL = 'INFO'
DATA_LOGGER_ERROR_FORMAT = '%(levelname)s - %(message)s'
|
<commit_before># Web server
DEBUG = True
HOST = '127.0.0.1'
PORT = 5000
TEMPLATE = 'flot.html'
# Database location
SQLITE3_DB_PATH = 'data/ua_sensors.sqlite3'
# Sensor type mapping
#
# - First parameter is the identifier sent from sensors in the wild
# - Second parameter is the readable name of the sensor type
# - Third parameter is the types of readings returned
# - Semi-colon separates different sensor reading types
# - Each reading type needs to specify type and unit of measurement
# - For, example TYPE UNITS; TYPE UNITS -> distance meters; temperature celsius
SENSOR_TYPES = (
("d", "Water", "distance meters; temperature celsius"),
("g", "Gate", "distance meters"),
("s", "Soil", "moisture percent; temperature celsius"),
)
# Data logger error logging
DATA_LOGGER_ERROR_FILE = 'log/ua_sensor.log'
DATA_LOGGER_ERROR_LEVEL = 'INFO'
DATA_LOGGER_ERROR_FORMAT = '%(levelname)s - %(message)s'<commit_msg>Add some new default settings<commit_after>
|
# Web server
DEBUG = True
HOST = '127.0.0.1'
PORT = 5000
TEMPLATE = 'flot.html'
SITE_TITLE = "UA Sensors Visualization"
# Database location
SQLITE3_DB_PATH = 'data/ua_sensors.sqlite3'
# Sensor type mapping
#
# - First parameter is the identifier sent from sensors in the wild
# - Second parameter is the readable name of the sensor type
# - Third parameter is the types of readings returned
# - Semi-colon separates different sensor reading types
# - Each reading type needs to specify type and unit of measurement
# - For, example TYPE UNITS; TYPE UNITS -> distance meters; temperature celsius
SENSOR_TYPES = (
("d", "Water", "distance meters; temperature celsius"),
("g", "Gate", "distance meters"),
("s", "Soil", "moisture percent; temperature celsius"),
)
# Maps sensor ids to more informative name.
#
# Not all sensors need to be named. Can be adjusted later, just remember to
# restart server on config file updates
#
# For instance, +12223334444 is not as informative as Campbell Creek Water Sensor
#
# Example format:
#
# SENSOR_NAMES = {
# '+12223334444': 'Yosemite Distance Sensor',
# '+01234567890': 'Siberia Gate Sensor',
# 'moteino_1': 'Arctic Ocean Moisture Sensor',
# }
SENSOR_NAMES = {}
# Data logger error logging
DATA_LOGGER_ERROR_FILE = 'log/ua_sensor.log'
DATA_LOGGER_ERROR_LEVEL = 'INFO'
DATA_LOGGER_ERROR_FORMAT = '%(levelname)s - %(message)s'
|
# Web server
DEBUG = True
HOST = '127.0.0.1'
PORT = 5000
TEMPLATE = 'flot.html'
# Database location
SQLITE3_DB_PATH = 'data/ua_sensors.sqlite3'
# Sensor type mapping
#
# - First parameter is the identifier sent from sensors in the wild
# - Second parameter is the readable name of the sensor type
# - Third parameter is the types of readings returned
# - Semi-colon separates different sensor reading types
# - Each reading type needs to specify type and unit of measurement
# - For, example TYPE UNITS; TYPE UNITS -> distance meters; temperature celsius
SENSOR_TYPES = (
("d", "Water", "distance meters; temperature celsius"),
("g", "Gate", "distance meters"),
("s", "Soil", "moisture percent; temperature celsius"),
)
# Data logger error logging
DATA_LOGGER_ERROR_FILE = 'log/ua_sensor.log'
DATA_LOGGER_ERROR_LEVEL = 'INFO'
DATA_LOGGER_ERROR_FORMAT = '%(levelname)s - %(message)s'Add some new default settings# Web server
DEBUG = True
HOST = '127.0.0.1'
PORT = 5000
TEMPLATE = 'flot.html'
SITE_TITLE = "UA Sensors Visualization"
# Database location
SQLITE3_DB_PATH = 'data/ua_sensors.sqlite3'
# Sensor type mapping
#
# - First parameter is the identifier sent from sensors in the wild
# - Second parameter is the readable name of the sensor type
# - Third parameter is the types of readings returned
# - Semi-colon separates different sensor reading types
# - Each reading type needs to specify type and unit of measurement
# - For, example TYPE UNITS; TYPE UNITS -> distance meters; temperature celsius
SENSOR_TYPES = (
("d", "Water", "distance meters; temperature celsius"),
("g", "Gate", "distance meters"),
("s", "Soil", "moisture percent; temperature celsius"),
)
# Maps sensor ids to more informative name.
#
# Not all sensors need to be named. Can be adjusted later, just remember to
# restart server on config file updates
#
# For instance, +12223334444 is not as informative as Campbell Creek Water Sensor
#
# Example format:
#
# SENSOR_NAMES = {
# '+12223334444': 'Yosemite Distance Sensor',
# '+01234567890': 'Siberia Gate Sensor',
# 'moteino_1': 'Arctic Ocean Moisture Sensor',
# }
SENSOR_NAMES = {}
# Data logger error logging
DATA_LOGGER_ERROR_FILE = 'log/ua_sensor.log'
DATA_LOGGER_ERROR_LEVEL = 'INFO'
DATA_LOGGER_ERROR_FORMAT = '%(levelname)s - %(message)s'
|
<commit_before># Web server
DEBUG = True
HOST = '127.0.0.1'
PORT = 5000
TEMPLATE = 'flot.html'
# Database location
SQLITE3_DB_PATH = 'data/ua_sensors.sqlite3'
# Sensor type mapping
#
# - First parameter is the identifier sent from sensors in the wild
# - Second parameter is the readable name of the sensor type
# - Third parameter is the types of readings returned
# - Semi-colon separates different sensor reading types
# - Each reading type needs to specify type and unit of measurement
# - For, example TYPE UNITS; TYPE UNITS -> distance meters; temperature celsius
SENSOR_TYPES = (
("d", "Water", "distance meters; temperature celsius"),
("g", "Gate", "distance meters"),
("s", "Soil", "moisture percent; temperature celsius"),
)
# Data logger error logging
DATA_LOGGER_ERROR_FILE = 'log/ua_sensor.log'
DATA_LOGGER_ERROR_LEVEL = 'INFO'
DATA_LOGGER_ERROR_FORMAT = '%(levelname)s - %(message)s'<commit_msg>Add some new default settings<commit_after># Web server
DEBUG = True
HOST = '127.0.0.1'
PORT = 5000
TEMPLATE = 'flot.html'
SITE_TITLE = "UA Sensors Visualization"
# Database location
SQLITE3_DB_PATH = 'data/ua_sensors.sqlite3'
# Sensor type mapping
#
# - First parameter is the identifier sent from sensors in the wild
# - Second parameter is the readable name of the sensor type
# - Third parameter is the types of readings returned
# - Semi-colon separates different sensor reading types
# - Each reading type needs to specify type and unit of measurement
# - For, example TYPE UNITS; TYPE UNITS -> distance meters; temperature celsius
SENSOR_TYPES = (
("d", "Water", "distance meters; temperature celsius"),
("g", "Gate", "distance meters"),
("s", "Soil", "moisture percent; temperature celsius"),
)
# Maps sensor ids to more informative name.
#
# Not all sensors need to be named. Can be adjusted later, just remember to
# restart server on config file updates
#
# For instance, +12223334444 is not as informative as Campbell Creek Water Sensor
#
# Example format:
#
# SENSOR_NAMES = {
# '+12223334444': 'Yosemite Distance Sensor',
# '+01234567890': 'Siberia Gate Sensor',
# 'moteino_1': 'Arctic Ocean Moisture Sensor',
# }
SENSOR_NAMES = {}
# Data logger error logging
DATA_LOGGER_ERROR_FILE = 'log/ua_sensor.log'
DATA_LOGGER_ERROR_LEVEL = 'INFO'
DATA_LOGGER_ERROR_FORMAT = '%(levelname)s - %(message)s'
|
6439b3999f729c1689889845d879c2cb3b54266c
|
test/benchmarks/performance_vs_serial/linear_fft_pipeline.py
|
test/benchmarks/performance_vs_serial/linear_fft_pipeline.py
|
""" Test a pipeline with repeated FFTs and inverse FFTs """
from timeit import default_timer as timer
import numpy as np
import bifrost as bf
from bifrost import pipeline as bfp
from bifrost import blocks as blocks
from bifrost_benchmarks import PipelineBenchmarker
class GPUFFTBenchmarker(PipelineBenchmarker):
""" Test the sigproc read function """
def run_benchmark(self):
with bf.Pipeline() as pipeline:
datafile = "numpy_data0.bin"
data = blocks.binary_io.BinaryFileReadBlock(
[datafile], gulp_size=32768, gulp_nframe=4, dtype='f32')
#data.on_data = self.timeit(data.on_data)
start = timer()
pipeline.run()
end = timer()
self.total_clock_time = end-start
#sigproc_benchmarker = SigprocBenchmarker()
#print sigproc_benchmarker.average_benchmark(10)
t = np.arange(32768*1024)
w = 0.01
s = np.sin(w * 4 * t, dtype='float32')
with open('numpy_data0.bin', 'wb') as myfile: pass
s.tofile('numpy_data0.bin')
gpufftbenchmarker = GPUFFTBenchmarker()
print gpufftbenchmarker.average_benchmark(10)
|
""" Test a pipeline with repeated FFTs and inverse FFTs """
from timeit import default_timer as timer
import numpy as np
import bifrost as bf
from bifrost import pipeline as bfp
from bifrost import blocks as blocks
from bifrost_benchmarks import PipelineBenchmarker
class GPUFFTBenchmarker(PipelineBenchmarker):
""" Test the sigproc read function """
def run_benchmark(self):
with bf.Pipeline() as pipeline:
datafile = "numpy_data0.bin"
bc = bf.BlockChainer()
bc.blocks.binary_io.BinaryFileReadBlock(
[datafile], gulp_size=32768, gulp_nframe=4, dtype='f32')
bc.blocks.copy('cuda')
bc.blocks.print_header()
start = timer()
pipeline.run()
end = timer()
self.total_clock_time = end-start
#sigproc_benchmarker = SigprocBenchmarker()
#print sigproc_benchmarker.average_benchmark(10)
t = np.arange(32768*1024)
w = 0.01
s = np.sin(w * 4 * t, dtype='float32')
with open('numpy_data0.bin', 'wb') as myfile: pass
s.tofile('numpy_data0.bin')
gpufftbenchmarker = GPUFFTBenchmarker()
print gpufftbenchmarker.average_benchmark(4)
|
Switch to using block chainer
|
Switch to using block chainer
|
Python
|
bsd-3-clause
|
ledatelescope/bifrost,ledatelescope/bifrost,ledatelescope/bifrost,ledatelescope/bifrost
|
""" Test a pipeline with repeated FFTs and inverse FFTs """
from timeit import default_timer as timer
import numpy as np
import bifrost as bf
from bifrost import pipeline as bfp
from bifrost import blocks as blocks
from bifrost_benchmarks import PipelineBenchmarker
class GPUFFTBenchmarker(PipelineBenchmarker):
""" Test the sigproc read function """
def run_benchmark(self):
with bf.Pipeline() as pipeline:
datafile = "numpy_data0.bin"
data = blocks.binary_io.BinaryFileReadBlock(
[datafile], gulp_size=32768, gulp_nframe=4, dtype='f32')
#data.on_data = self.timeit(data.on_data)
start = timer()
pipeline.run()
end = timer()
self.total_clock_time = end-start
#sigproc_benchmarker = SigprocBenchmarker()
#print sigproc_benchmarker.average_benchmark(10)
t = np.arange(32768*1024)
w = 0.01
s = np.sin(w * 4 * t, dtype='float32')
with open('numpy_data0.bin', 'wb') as myfile: pass
s.tofile('numpy_data0.bin')
gpufftbenchmarker = GPUFFTBenchmarker()
print gpufftbenchmarker.average_benchmark(10)
Switch to using block chainer
|
""" Test a pipeline with repeated FFTs and inverse FFTs """
from timeit import default_timer as timer
import numpy as np
import bifrost as bf
from bifrost import pipeline as bfp
from bifrost import blocks as blocks
from bifrost_benchmarks import PipelineBenchmarker
class GPUFFTBenchmarker(PipelineBenchmarker):
""" Test the sigproc read function """
def run_benchmark(self):
with bf.Pipeline() as pipeline:
datafile = "numpy_data0.bin"
bc = bf.BlockChainer()
bc.blocks.binary_io.BinaryFileReadBlock(
[datafile], gulp_size=32768, gulp_nframe=4, dtype='f32')
bc.blocks.copy('cuda')
bc.blocks.print_header()
start = timer()
pipeline.run()
end = timer()
self.total_clock_time = end-start
#sigproc_benchmarker = SigprocBenchmarker()
#print sigproc_benchmarker.average_benchmark(10)
t = np.arange(32768*1024)
w = 0.01
s = np.sin(w * 4 * t, dtype='float32')
with open('numpy_data0.bin', 'wb') as myfile: pass
s.tofile('numpy_data0.bin')
gpufftbenchmarker = GPUFFTBenchmarker()
print gpufftbenchmarker.average_benchmark(4)
|
<commit_before>""" Test a pipeline with repeated FFTs and inverse FFTs """
from timeit import default_timer as timer
import numpy as np
import bifrost as bf
from bifrost import pipeline as bfp
from bifrost import blocks as blocks
from bifrost_benchmarks import PipelineBenchmarker
class GPUFFTBenchmarker(PipelineBenchmarker):
""" Test the sigproc read function """
def run_benchmark(self):
with bf.Pipeline() as pipeline:
datafile = "numpy_data0.bin"
data = blocks.binary_io.BinaryFileReadBlock(
[datafile], gulp_size=32768, gulp_nframe=4, dtype='f32')
#data.on_data = self.timeit(data.on_data)
start = timer()
pipeline.run()
end = timer()
self.total_clock_time = end-start
#sigproc_benchmarker = SigprocBenchmarker()
#print sigproc_benchmarker.average_benchmark(10)
t = np.arange(32768*1024)
w = 0.01
s = np.sin(w * 4 * t, dtype='float32')
with open('numpy_data0.bin', 'wb') as myfile: pass
s.tofile('numpy_data0.bin')
gpufftbenchmarker = GPUFFTBenchmarker()
print gpufftbenchmarker.average_benchmark(10)
<commit_msg>Switch to using block chainer<commit_after>
|
""" Test a pipeline with repeated FFTs and inverse FFTs """
from timeit import default_timer as timer
import numpy as np
import bifrost as bf
from bifrost import pipeline as bfp
from bifrost import blocks as blocks
from bifrost_benchmarks import PipelineBenchmarker
class GPUFFTBenchmarker(PipelineBenchmarker):
""" Test the sigproc read function """
def run_benchmark(self):
with bf.Pipeline() as pipeline:
datafile = "numpy_data0.bin"
bc = bf.BlockChainer()
bc.blocks.binary_io.BinaryFileReadBlock(
[datafile], gulp_size=32768, gulp_nframe=4, dtype='f32')
bc.blocks.copy('cuda')
bc.blocks.print_header()
start = timer()
pipeline.run()
end = timer()
self.total_clock_time = end-start
#sigproc_benchmarker = SigprocBenchmarker()
#print sigproc_benchmarker.average_benchmark(10)
t = np.arange(32768*1024)
w = 0.01
s = np.sin(w * 4 * t, dtype='float32')
with open('numpy_data0.bin', 'wb') as myfile: pass
s.tofile('numpy_data0.bin')
gpufftbenchmarker = GPUFFTBenchmarker()
print gpufftbenchmarker.average_benchmark(4)
|
""" Test a pipeline with repeated FFTs and inverse FFTs """
from timeit import default_timer as timer
import numpy as np
import bifrost as bf
from bifrost import pipeline as bfp
from bifrost import blocks as blocks
from bifrost_benchmarks import PipelineBenchmarker
class GPUFFTBenchmarker(PipelineBenchmarker):
""" Test the sigproc read function """
def run_benchmark(self):
with bf.Pipeline() as pipeline:
datafile = "numpy_data0.bin"
data = blocks.binary_io.BinaryFileReadBlock(
[datafile], gulp_size=32768, gulp_nframe=4, dtype='f32')
#data.on_data = self.timeit(data.on_data)
start = timer()
pipeline.run()
end = timer()
self.total_clock_time = end-start
#sigproc_benchmarker = SigprocBenchmarker()
#print sigproc_benchmarker.average_benchmark(10)
t = np.arange(32768*1024)
w = 0.01
s = np.sin(w * 4 * t, dtype='float32')
with open('numpy_data0.bin', 'wb') as myfile: pass
s.tofile('numpy_data0.bin')
gpufftbenchmarker = GPUFFTBenchmarker()
print gpufftbenchmarker.average_benchmark(10)
Switch to using block chainer""" Test a pipeline with repeated FFTs and inverse FFTs """
from timeit import default_timer as timer
import numpy as np
import bifrost as bf
from bifrost import pipeline as bfp
from bifrost import blocks as blocks
from bifrost_benchmarks import PipelineBenchmarker
class GPUFFTBenchmarker(PipelineBenchmarker):
""" Test the sigproc read function """
def run_benchmark(self):
with bf.Pipeline() as pipeline:
datafile = "numpy_data0.bin"
bc = bf.BlockChainer()
bc.blocks.binary_io.BinaryFileReadBlock(
[datafile], gulp_size=32768, gulp_nframe=4, dtype='f32')
bc.blocks.copy('cuda')
bc.blocks.print_header()
start = timer()
pipeline.run()
end = timer()
self.total_clock_time = end-start
#sigproc_benchmarker = SigprocBenchmarker()
#print sigproc_benchmarker.average_benchmark(10)
t = np.arange(32768*1024)
w = 0.01
s = np.sin(w * 4 * t, dtype='float32')
with open('numpy_data0.bin', 'wb') as myfile: pass
s.tofile('numpy_data0.bin')
gpufftbenchmarker = GPUFFTBenchmarker()
print gpufftbenchmarker.average_benchmark(4)
|
<commit_before>""" Test a pipeline with repeated FFTs and inverse FFTs """
from timeit import default_timer as timer
import numpy as np
import bifrost as bf
from bifrost import pipeline as bfp
from bifrost import blocks as blocks
from bifrost_benchmarks import PipelineBenchmarker
class GPUFFTBenchmarker(PipelineBenchmarker):
""" Test the sigproc read function """
def run_benchmark(self):
with bf.Pipeline() as pipeline:
datafile = "numpy_data0.bin"
data = blocks.binary_io.BinaryFileReadBlock(
[datafile], gulp_size=32768, gulp_nframe=4, dtype='f32')
#data.on_data = self.timeit(data.on_data)
start = timer()
pipeline.run()
end = timer()
self.total_clock_time = end-start
#sigproc_benchmarker = SigprocBenchmarker()
#print sigproc_benchmarker.average_benchmark(10)
t = np.arange(32768*1024)
w = 0.01
s = np.sin(w * 4 * t, dtype='float32')
with open('numpy_data0.bin', 'wb') as myfile: pass
s.tofile('numpy_data0.bin')
gpufftbenchmarker = GPUFFTBenchmarker()
print gpufftbenchmarker.average_benchmark(10)
<commit_msg>Switch to using block chainer<commit_after>""" Test a pipeline with repeated FFTs and inverse FFTs """
from timeit import default_timer as timer
import numpy as np
import bifrost as bf
from bifrost import pipeline as bfp
from bifrost import blocks as blocks
from bifrost_benchmarks import PipelineBenchmarker
class GPUFFTBenchmarker(PipelineBenchmarker):
""" Test the sigproc read function """
def run_benchmark(self):
with bf.Pipeline() as pipeline:
datafile = "numpy_data0.bin"
bc = bf.BlockChainer()
bc.blocks.binary_io.BinaryFileReadBlock(
[datafile], gulp_size=32768, gulp_nframe=4, dtype='f32')
bc.blocks.copy('cuda')
bc.blocks.print_header()
start = timer()
pipeline.run()
end = timer()
self.total_clock_time = end-start
#sigproc_benchmarker = SigprocBenchmarker()
#print sigproc_benchmarker.average_benchmark(10)
t = np.arange(32768*1024)
w = 0.01
s = np.sin(w * 4 * t, dtype='float32')
with open('numpy_data0.bin', 'wb') as myfile: pass
s.tofile('numpy_data0.bin')
gpufftbenchmarker = GPUFFTBenchmarker()
print gpufftbenchmarker.average_benchmark(4)
|
0bfab562565a8e6291911a4483e21428f4a31fd0
|
webview/documents/tasks.py
|
webview/documents/tasks.py
|
from __future__ import absolute_import
from celery import shared_task
from documents.models import Document
import clamd
@shared_task
def compile_tex(document_id):
document = Document.objects.get(pk=document_id)
print(document)
@shared_task
def check_clamav(document_id):
document = Document.objects.get(pk=document_id)
clam = clamd.ClamdUnixSocket(settings.CLAMAV_SOCKET)
status, sig = clam.scan(absolute_path_to_pdf)[absolute_path_to_pdf]
if status == 'OK':
document.clean = True
elif status == 'FOUND':
document.clean = False
print("Signature found", sig)
else:
# unknown state
document.clean = False
print("Unknown return of clamav", status, sig)
document.save()
|
from __future__ import absolute_import
from celery import shared_task
from documents.models import Document
import clamd
@shared_task
def compile_tex(document_id):
document = Document.objects.get(pk=document_id)
print(document)
@shared_task
def check_clamav(document_id):
document = Document.objects.get(pk=document_id)
clam = clamd.ClamdUnixSocket(settings.CLAMAV_SOCKET)
status, sig = clam.scan(absolute_path_to_pdf)[absolute_path_to_pdf]
if status == 'OK':
document.isClean = True
elif status == 'FOUND':
document.isClean = False
print("Signature found", sig)
else:
# unknown state
document.isClean = False
print("Unknown return of clamav", status, sig)
document.save()
|
Change clean by isClean in check_clamav task
|
Change clean by isClean in check_clamav task
|
Python
|
agpl-3.0
|
C4ptainCrunch/info-f-309,C4ptainCrunch/info-f-309,C4ptainCrunch/info-f-309
|
from __future__ import absolute_import
from celery import shared_task
from documents.models import Document
import clamd
@shared_task
def compile_tex(document_id):
document = Document.objects.get(pk=document_id)
print(document)
@shared_task
def check_clamav(document_id):
document = Document.objects.get(pk=document_id)
clam = clamd.ClamdUnixSocket(settings.CLAMAV_SOCKET)
status, sig = clam.scan(absolute_path_to_pdf)[absolute_path_to_pdf]
if status == 'OK':
document.clean = True
elif status == 'FOUND':
document.clean = False
print("Signature found", sig)
else:
# unknown state
document.clean = False
print("Unknown return of clamav", status, sig)
document.save()
Change clean by isClean in check_clamav task
|
from __future__ import absolute_import
from celery import shared_task
from documents.models import Document
import clamd
@shared_task
def compile_tex(document_id):
document = Document.objects.get(pk=document_id)
print(document)
@shared_task
def check_clamav(document_id):
document = Document.objects.get(pk=document_id)
clam = clamd.ClamdUnixSocket(settings.CLAMAV_SOCKET)
status, sig = clam.scan(absolute_path_to_pdf)[absolute_path_to_pdf]
if status == 'OK':
document.isClean = True
elif status == 'FOUND':
document.isClean = False
print("Signature found", sig)
else:
# unknown state
document.isClean = False
print("Unknown return of clamav", status, sig)
document.save()
|
<commit_before>from __future__ import absolute_import
from celery import shared_task
from documents.models import Document
import clamd
@shared_task
def compile_tex(document_id):
document = Document.objects.get(pk=document_id)
print(document)
@shared_task
def check_clamav(document_id):
document = Document.objects.get(pk=document_id)
clam = clamd.ClamdUnixSocket(settings.CLAMAV_SOCKET)
status, sig = clam.scan(absolute_path_to_pdf)[absolute_path_to_pdf]
if status == 'OK':
document.clean = True
elif status == 'FOUND':
document.clean = False
print("Signature found", sig)
else:
# unknown state
document.clean = False
print("Unknown return of clamav", status, sig)
document.save()
<commit_msg>Change clean by isClean in check_clamav task<commit_after>
|
from __future__ import absolute_import
from celery import shared_task
from documents.models import Document
import clamd
@shared_task
def compile_tex(document_id):
document = Document.objects.get(pk=document_id)
print(document)
@shared_task
def check_clamav(document_id):
document = Document.objects.get(pk=document_id)
clam = clamd.ClamdUnixSocket(settings.CLAMAV_SOCKET)
status, sig = clam.scan(absolute_path_to_pdf)[absolute_path_to_pdf]
if status == 'OK':
document.isClean = True
elif status == 'FOUND':
document.isClean = False
print("Signature found", sig)
else:
# unknown state
document.isClean = False
print("Unknown return of clamav", status, sig)
document.save()
|
from __future__ import absolute_import
from celery import shared_task
from documents.models import Document
import clamd
@shared_task
def compile_tex(document_id):
document = Document.objects.get(pk=document_id)
print(document)
@shared_task
def check_clamav(document_id):
document = Document.objects.get(pk=document_id)
clam = clamd.ClamdUnixSocket(settings.CLAMAV_SOCKET)
status, sig = clam.scan(absolute_path_to_pdf)[absolute_path_to_pdf]
if status == 'OK':
document.clean = True
elif status == 'FOUND':
document.clean = False
print("Signature found", sig)
else:
# unknown state
document.clean = False
print("Unknown return of clamav", status, sig)
document.save()
Change clean by isClean in check_clamav taskfrom __future__ import absolute_import
from celery import shared_task
from documents.models import Document
import clamd
@shared_task
def compile_tex(document_id):
document = Document.objects.get(pk=document_id)
print(document)
@shared_task
def check_clamav(document_id):
document = Document.objects.get(pk=document_id)
clam = clamd.ClamdUnixSocket(settings.CLAMAV_SOCKET)
status, sig = clam.scan(absolute_path_to_pdf)[absolute_path_to_pdf]
if status == 'OK':
document.isClean = True
elif status == 'FOUND':
document.isClean = False
print("Signature found", sig)
else:
# unknown state
document.isClean = False
print("Unknown return of clamav", status, sig)
document.save()
|
<commit_before>from __future__ import absolute_import
from celery import shared_task
from documents.models import Document
import clamd
@shared_task
def compile_tex(document_id):
document = Document.objects.get(pk=document_id)
print(document)
@shared_task
def check_clamav(document_id):
document = Document.objects.get(pk=document_id)
clam = clamd.ClamdUnixSocket(settings.CLAMAV_SOCKET)
status, sig = clam.scan(absolute_path_to_pdf)[absolute_path_to_pdf]
if status == 'OK':
document.clean = True
elif status == 'FOUND':
document.clean = False
print("Signature found", sig)
else:
# unknown state
document.clean = False
print("Unknown return of clamav", status, sig)
document.save()
<commit_msg>Change clean by isClean in check_clamav task<commit_after>from __future__ import absolute_import
from celery import shared_task
from documents.models import Document
import clamd
@shared_task
def compile_tex(document_id):
document = Document.objects.get(pk=document_id)
print(document)
@shared_task
def check_clamav(document_id):
document = Document.objects.get(pk=document_id)
clam = clamd.ClamdUnixSocket(settings.CLAMAV_SOCKET)
status, sig = clam.scan(absolute_path_to_pdf)[absolute_path_to_pdf]
if status == 'OK':
document.isClean = True
elif status == 'FOUND':
document.isClean = False
print("Signature found", sig)
else:
# unknown state
document.isClean = False
print("Unknown return of clamav", status, sig)
document.save()
|
07549339c6b0e4b1c98a11799ca95e90cbf109cd
|
homedisplay/control_milight/management/commands/listen_433.py
|
homedisplay/control_milight/management/commands/listen_433.py
|
from control_milight.utils import process_automatic_trigger
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
import serial
import time
import logging
logger = logging.getLogger("%s.%s" % ("homecontroller", __name__))
class Command(BaseCommand):
args = ''
help = 'Listen for 433MHz radio messages'
def handle(self, *args, **options):
s = serial.Serial(settings.ARDUINO_433, 9600)
ITEM_MAP = settings.ARDUINO_433_ITEM_MAP
sent_event_map = {}
while True:
line = s.readline()
if line.startswith("Received "):
id = line.split(" ")[1]
if id in self.ITEM_MAP:
item_name = self.ITEM_MAP[id]
if item_name in sent_event_map:
if sent_event_map[item_name] > time.time() - 5:
continue
logger.info("Processing trigger %s (%s)", item_name, id)
process_automatic_trigger(item_name)
sent_event_map[item_name] = time.time()
else:
logger.warn("Unknown ID: %s", id)
|
from control_milight.utils import process_automatic_trigger
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
import serial
import time
import logging
logger = logging.getLogger("%s.%s" % ("homecontroller", __name__))
class Command(BaseCommand):
args = ''
help = 'Listen for 433MHz radio messages'
def handle(self, *args, **options):
s = serial.Serial(settings.ARDUINO_433, 9600)
ITEM_MAP = settings.ARDUINO_433_ITEM_MAP
sent_event_map = {}
while True:
line = s.readline()
if line.startswith("Received "):
id = line.split(" ")[1]
if id in ITEM_MAP:
item_name = ITEM_MAP[id]
if item_name in sent_event_map:
if sent_event_map[item_name] > time.time() - 5:
continue
logger.info("Processing trigger %s (%s)", item_name, id)
process_automatic_trigger(item_name)
sent_event_map[item_name] = time.time()
else:
logger.warn("Unknown ID: %s", id)
|
Move ITEM_MAP to method variable
|
Move ITEM_MAP to method variable
|
Python
|
bsd-3-clause
|
ojarva/home-info-display,ojarva/home-info-display,ojarva/home-info-display,ojarva/home-info-display
|
from control_milight.utils import process_automatic_trigger
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
import serial
import time
import logging
logger = logging.getLogger("%s.%s" % ("homecontroller", __name__))
class Command(BaseCommand):
args = ''
help = 'Listen for 433MHz radio messages'
def handle(self, *args, **options):
s = serial.Serial(settings.ARDUINO_433, 9600)
ITEM_MAP = settings.ARDUINO_433_ITEM_MAP
sent_event_map = {}
while True:
line = s.readline()
if line.startswith("Received "):
id = line.split(" ")[1]
if id in self.ITEM_MAP:
item_name = self.ITEM_MAP[id]
if item_name in sent_event_map:
if sent_event_map[item_name] > time.time() - 5:
continue
logger.info("Processing trigger %s (%s)", item_name, id)
process_automatic_trigger(item_name)
sent_event_map[item_name] = time.time()
else:
logger.warn("Unknown ID: %s", id)
Move ITEM_MAP to method variable
|
from control_milight.utils import process_automatic_trigger
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
import serial
import time
import logging
logger = logging.getLogger("%s.%s" % ("homecontroller", __name__))
class Command(BaseCommand):
args = ''
help = 'Listen for 433MHz radio messages'
def handle(self, *args, **options):
s = serial.Serial(settings.ARDUINO_433, 9600)
ITEM_MAP = settings.ARDUINO_433_ITEM_MAP
sent_event_map = {}
while True:
line = s.readline()
if line.startswith("Received "):
id = line.split(" ")[1]
if id in ITEM_MAP:
item_name = ITEM_MAP[id]
if item_name in sent_event_map:
if sent_event_map[item_name] > time.time() - 5:
continue
logger.info("Processing trigger %s (%s)", item_name, id)
process_automatic_trigger(item_name)
sent_event_map[item_name] = time.time()
else:
logger.warn("Unknown ID: %s", id)
|
<commit_before>from control_milight.utils import process_automatic_trigger
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
import serial
import time
import logging
logger = logging.getLogger("%s.%s" % ("homecontroller", __name__))
class Command(BaseCommand):
args = ''
help = 'Listen for 433MHz radio messages'
def handle(self, *args, **options):
s = serial.Serial(settings.ARDUINO_433, 9600)
ITEM_MAP = settings.ARDUINO_433_ITEM_MAP
sent_event_map = {}
while True:
line = s.readline()
if line.startswith("Received "):
id = line.split(" ")[1]
if id in self.ITEM_MAP:
item_name = self.ITEM_MAP[id]
if item_name in sent_event_map:
if sent_event_map[item_name] > time.time() - 5:
continue
logger.info("Processing trigger %s (%s)", item_name, id)
process_automatic_trigger(item_name)
sent_event_map[item_name] = time.time()
else:
logger.warn("Unknown ID: %s", id)
<commit_msg>Move ITEM_MAP to method variable<commit_after>
|
from control_milight.utils import process_automatic_trigger
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
import serial
import time
import logging
logger = logging.getLogger("%s.%s" % ("homecontroller", __name__))
class Command(BaseCommand):
args = ''
help = 'Listen for 433MHz radio messages'
def handle(self, *args, **options):
s = serial.Serial(settings.ARDUINO_433, 9600)
ITEM_MAP = settings.ARDUINO_433_ITEM_MAP
sent_event_map = {}
while True:
line = s.readline()
if line.startswith("Received "):
id = line.split(" ")[1]
if id in ITEM_MAP:
item_name = ITEM_MAP[id]
if item_name in sent_event_map:
if sent_event_map[item_name] > time.time() - 5:
continue
logger.info("Processing trigger %s (%s)", item_name, id)
process_automatic_trigger(item_name)
sent_event_map[item_name] = time.time()
else:
logger.warn("Unknown ID: %s", id)
|
from control_milight.utils import process_automatic_trigger
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
import serial
import time
import logging
logger = logging.getLogger("%s.%s" % ("homecontroller", __name__))
class Command(BaseCommand):
args = ''
help = 'Listen for 433MHz radio messages'
def handle(self, *args, **options):
s = serial.Serial(settings.ARDUINO_433, 9600)
ITEM_MAP = settings.ARDUINO_433_ITEM_MAP
sent_event_map = {}
while True:
line = s.readline()
if line.startswith("Received "):
id = line.split(" ")[1]
if id in self.ITEM_MAP:
item_name = self.ITEM_MAP[id]
if item_name in sent_event_map:
if sent_event_map[item_name] > time.time() - 5:
continue
logger.info("Processing trigger %s (%s)", item_name, id)
process_automatic_trigger(item_name)
sent_event_map[item_name] = time.time()
else:
logger.warn("Unknown ID: %s", id)
Move ITEM_MAP to method variablefrom control_milight.utils import process_automatic_trigger
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
import serial
import time
import logging
logger = logging.getLogger("%s.%s" % ("homecontroller", __name__))
class Command(BaseCommand):
args = ''
help = 'Listen for 433MHz radio messages'
def handle(self, *args, **options):
s = serial.Serial(settings.ARDUINO_433, 9600)
ITEM_MAP = settings.ARDUINO_433_ITEM_MAP
sent_event_map = {}
while True:
line = s.readline()
if line.startswith("Received "):
id = line.split(" ")[1]
if id in ITEM_MAP:
item_name = ITEM_MAP[id]
if item_name in sent_event_map:
if sent_event_map[item_name] > time.time() - 5:
continue
logger.info("Processing trigger %s (%s)", item_name, id)
process_automatic_trigger(item_name)
sent_event_map[item_name] = time.time()
else:
logger.warn("Unknown ID: %s", id)
|
<commit_before>from control_milight.utils import process_automatic_trigger
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
import serial
import time
import logging
logger = logging.getLogger("%s.%s" % ("homecontroller", __name__))
class Command(BaseCommand):
args = ''
help = 'Listen for 433MHz radio messages'
def handle(self, *args, **options):
s = serial.Serial(settings.ARDUINO_433, 9600)
ITEM_MAP = settings.ARDUINO_433_ITEM_MAP
sent_event_map = {}
while True:
line = s.readline()
if line.startswith("Received "):
id = line.split(" ")[1]
if id in self.ITEM_MAP:
item_name = self.ITEM_MAP[id]
if item_name in sent_event_map:
if sent_event_map[item_name] > time.time() - 5:
continue
logger.info("Processing trigger %s (%s)", item_name, id)
process_automatic_trigger(item_name)
sent_event_map[item_name] = time.time()
else:
logger.warn("Unknown ID: %s", id)
<commit_msg>Move ITEM_MAP to method variable<commit_after>from control_milight.utils import process_automatic_trigger
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
import serial
import time
import logging
logger = logging.getLogger("%s.%s" % ("homecontroller", __name__))
class Command(BaseCommand):
args = ''
help = 'Listen for 433MHz radio messages'
def handle(self, *args, **options):
s = serial.Serial(settings.ARDUINO_433, 9600)
ITEM_MAP = settings.ARDUINO_433_ITEM_MAP
sent_event_map = {}
while True:
line = s.readline()
if line.startswith("Received "):
id = line.split(" ")[1]
if id in ITEM_MAP:
item_name = ITEM_MAP[id]
if item_name in sent_event_map:
if sent_event_map[item_name] > time.time() - 5:
continue
logger.info("Processing trigger %s (%s)", item_name, id)
process_automatic_trigger(item_name)
sent_event_map[item_name] = time.time()
else:
logger.warn("Unknown ID: %s", id)
|
3b3c51cbf77085b4d5ccdbbc41a3c7ee8b67b713
|
turtle-trading.py
|
turtle-trading.py
|
def initialize(context):
"""
Set up algorithm.
"""
# https://www.quantopian.com/help#available-futures
context.markets = [
continuous_future('US'),
continuous_future('TY'),
continuous_future('SB'),
continuous_future('SF'),
continuous_future('BP'),
continuous_future('JY'),
continuous_future('CD'),
continuous_future('SP'),
continuous_future('ED'),
continuous_future('TB'),
continuous_future('GC'),
continuous_future('SV'),
continuous_future('HG'),
continuous_future('CL'),
continuous_future('HO'),
continuous_future('HU')
]
def handle_data(context, data):
"""
Process data every minute.
"""
log.info(context.markets)
|
def initialize(context):
"""
Set up algorithm.
"""
# https://www.quantopian.com/help#available-futures
context.markets = [
continuous_future('US'),
continuous_future('TY'),
continuous_future('SB'),
continuous_future('SF'),
continuous_future('BP'),
continuous_future('JY'),
continuous_future('CD'),
continuous_future('SP'),
continuous_future('ED'),
continuous_future('TB'),
continuous_future('GC'),
continuous_future('SV'),
continuous_future('HG'),
continuous_future('CL'),
continuous_future('HO'),
continuous_future('HU')
]
def before_trading_start(context, data):
"""
Process data before every market open.
"""
markets = context.markets[:]
for market in markets:
if market.end_date < get_datetime():
context.markets.remove(market)
log.info(
'%s stopped trading. Deleted from markets.'
% market.root_symbol
)
assert(len(context.markets) == 14)
def handle_data(context, data):
"""
Process data every minute.
"""
pass
|
Delete markets that stopped trading.
|
Delete markets that stopped trading.
|
Python
|
mit
|
vyq/turtle-trading
|
def initialize(context):
"""
Set up algorithm.
"""
# https://www.quantopian.com/help#available-futures
context.markets = [
continuous_future('US'),
continuous_future('TY'),
continuous_future('SB'),
continuous_future('SF'),
continuous_future('BP'),
continuous_future('JY'),
continuous_future('CD'),
continuous_future('SP'),
continuous_future('ED'),
continuous_future('TB'),
continuous_future('GC'),
continuous_future('SV'),
continuous_future('HG'),
continuous_future('CL'),
continuous_future('HO'),
continuous_future('HU')
]
def handle_data(context, data):
"""
Process data every minute.
"""
log.info(context.markets)Delete markets that stopped trading.
|
def initialize(context):
"""
Set up algorithm.
"""
# https://www.quantopian.com/help#available-futures
context.markets = [
continuous_future('US'),
continuous_future('TY'),
continuous_future('SB'),
continuous_future('SF'),
continuous_future('BP'),
continuous_future('JY'),
continuous_future('CD'),
continuous_future('SP'),
continuous_future('ED'),
continuous_future('TB'),
continuous_future('GC'),
continuous_future('SV'),
continuous_future('HG'),
continuous_future('CL'),
continuous_future('HO'),
continuous_future('HU')
]
def before_trading_start(context, data):
"""
Process data before every market open.
"""
markets = context.markets[:]
for market in markets:
if market.end_date < get_datetime():
context.markets.remove(market)
log.info(
'%s stopped trading. Deleted from markets.'
% market.root_symbol
)
assert(len(context.markets) == 14)
def handle_data(context, data):
"""
Process data every minute.
"""
pass
|
<commit_before>def initialize(context):
"""
Set up algorithm.
"""
# https://www.quantopian.com/help#available-futures
context.markets = [
continuous_future('US'),
continuous_future('TY'),
continuous_future('SB'),
continuous_future('SF'),
continuous_future('BP'),
continuous_future('JY'),
continuous_future('CD'),
continuous_future('SP'),
continuous_future('ED'),
continuous_future('TB'),
continuous_future('GC'),
continuous_future('SV'),
continuous_future('HG'),
continuous_future('CL'),
continuous_future('HO'),
continuous_future('HU')
]
def handle_data(context, data):
"""
Process data every minute.
"""
log.info(context.markets)<commit_msg>Delete markets that stopped trading.<commit_after>
|
def initialize(context):
"""
Set up algorithm.
"""
# https://www.quantopian.com/help#available-futures
context.markets = [
continuous_future('US'),
continuous_future('TY'),
continuous_future('SB'),
continuous_future('SF'),
continuous_future('BP'),
continuous_future('JY'),
continuous_future('CD'),
continuous_future('SP'),
continuous_future('ED'),
continuous_future('TB'),
continuous_future('GC'),
continuous_future('SV'),
continuous_future('HG'),
continuous_future('CL'),
continuous_future('HO'),
continuous_future('HU')
]
def before_trading_start(context, data):
"""
Process data before every market open.
"""
markets = context.markets[:]
for market in markets:
if market.end_date < get_datetime():
context.markets.remove(market)
log.info(
'%s stopped trading. Deleted from markets.'
% market.root_symbol
)
assert(len(context.markets) == 14)
def handle_data(context, data):
"""
Process data every minute.
"""
pass
|
def initialize(context):
"""
Set up algorithm.
"""
# https://www.quantopian.com/help#available-futures
context.markets = [
continuous_future('US'),
continuous_future('TY'),
continuous_future('SB'),
continuous_future('SF'),
continuous_future('BP'),
continuous_future('JY'),
continuous_future('CD'),
continuous_future('SP'),
continuous_future('ED'),
continuous_future('TB'),
continuous_future('GC'),
continuous_future('SV'),
continuous_future('HG'),
continuous_future('CL'),
continuous_future('HO'),
continuous_future('HU')
]
def handle_data(context, data):
"""
Process data every minute.
"""
log.info(context.markets)Delete markets that stopped trading.def initialize(context):
"""
Set up algorithm.
"""
# https://www.quantopian.com/help#available-futures
context.markets = [
continuous_future('US'),
continuous_future('TY'),
continuous_future('SB'),
continuous_future('SF'),
continuous_future('BP'),
continuous_future('JY'),
continuous_future('CD'),
continuous_future('SP'),
continuous_future('ED'),
continuous_future('TB'),
continuous_future('GC'),
continuous_future('SV'),
continuous_future('HG'),
continuous_future('CL'),
continuous_future('HO'),
continuous_future('HU')
]
def before_trading_start(context, data):
"""
Process data before every market open.
"""
markets = context.markets[:]
for market in markets:
if market.end_date < get_datetime():
context.markets.remove(market)
log.info(
'%s stopped trading. Deleted from markets.'
% market.root_symbol
)
assert(len(context.markets) == 14)
def handle_data(context, data):
"""
Process data every minute.
"""
pass
|
<commit_before>def initialize(context):
"""
Set up algorithm.
"""
# https://www.quantopian.com/help#available-futures
context.markets = [
continuous_future('US'),
continuous_future('TY'),
continuous_future('SB'),
continuous_future('SF'),
continuous_future('BP'),
continuous_future('JY'),
continuous_future('CD'),
continuous_future('SP'),
continuous_future('ED'),
continuous_future('TB'),
continuous_future('GC'),
continuous_future('SV'),
continuous_future('HG'),
continuous_future('CL'),
continuous_future('HO'),
continuous_future('HU')
]
def handle_data(context, data):
"""
Process data every minute.
"""
log.info(context.markets)<commit_msg>Delete markets that stopped trading.<commit_after>def initialize(context):
"""
Set up algorithm.
"""
# https://www.quantopian.com/help#available-futures
context.markets = [
continuous_future('US'),
continuous_future('TY'),
continuous_future('SB'),
continuous_future('SF'),
continuous_future('BP'),
continuous_future('JY'),
continuous_future('CD'),
continuous_future('SP'),
continuous_future('ED'),
continuous_future('TB'),
continuous_future('GC'),
continuous_future('SV'),
continuous_future('HG'),
continuous_future('CL'),
continuous_future('HO'),
continuous_future('HU')
]
def before_trading_start(context, data):
"""
Process data before every market open.
"""
markets = context.markets[:]
for market in markets:
if market.end_date < get_datetime():
context.markets.remove(market)
log.info(
'%s stopped trading. Deleted from markets.'
% market.root_symbol
)
assert(len(context.markets) == 14)
def handle_data(context, data):
"""
Process data every minute.
"""
pass
|
c7231cb92a75dc178d068ac9bff16d90032ab270
|
tests/test_utils.py
|
tests/test_utils.py
|
# coding: utf-8
import unittest
from .utils import generate_string
class TestGenerateString(unittest.TestCase):
'''Test string generator function'''
def test_generate(self):
gen_str = generate_string()
self.assertEqual(6, len(gen_str))
self.assertEqual(gen_str, gen_str.lower())
|
# coding: utf-8
import unittest
from utils import generate_string
class TestGenerateString(unittest.TestCase):
'''Test string generator function'''
def test_generate(self):
gen_str = generate_string()
self.assertEqual(6, len(gen_str))
self.assertEqual(gen_str, gen_str.lower())
|
Fix import for test utils
|
Fix import for test utils
|
Python
|
mit
|
sarutobi/Rynda,sarutobi/Rynda,sarutobi/flowofkindness,sarutobi/ritmserdtsa,sarutobi/Rynda,sarutobi/flowofkindness,sarutobi/ritmserdtsa,sarutobi/ritmserdtsa,sarutobi/Rynda,sarutobi/flowofkindness,sarutobi/ritmserdtsa,sarutobi/flowofkindness
|
# coding: utf-8
import unittest
from .utils import generate_string
class TestGenerateString(unittest.TestCase):
'''Test string generator function'''
def test_generate(self):
gen_str = generate_string()
self.assertEqual(6, len(gen_str))
self.assertEqual(gen_str, gen_str.lower())
Fix import for test utils
|
# coding: utf-8
import unittest
from utils import generate_string
class TestGenerateString(unittest.TestCase):
'''Test string generator function'''
def test_generate(self):
gen_str = generate_string()
self.assertEqual(6, len(gen_str))
self.assertEqual(gen_str, gen_str.lower())
|
<commit_before># coding: utf-8
import unittest
from .utils import generate_string
class TestGenerateString(unittest.TestCase):
'''Test string generator function'''
def test_generate(self):
gen_str = generate_string()
self.assertEqual(6, len(gen_str))
self.assertEqual(gen_str, gen_str.lower())
<commit_msg>Fix import for test utils<commit_after>
|
# coding: utf-8
import unittest
from utils import generate_string
class TestGenerateString(unittest.TestCase):
'''Test string generator function'''
def test_generate(self):
gen_str = generate_string()
self.assertEqual(6, len(gen_str))
self.assertEqual(gen_str, gen_str.lower())
|
# coding: utf-8
import unittest
from .utils import generate_string
class TestGenerateString(unittest.TestCase):
'''Test string generator function'''
def test_generate(self):
gen_str = generate_string()
self.assertEqual(6, len(gen_str))
self.assertEqual(gen_str, gen_str.lower())
Fix import for test utils# coding: utf-8
import unittest
from utils import generate_string
class TestGenerateString(unittest.TestCase):
'''Test string generator function'''
def test_generate(self):
gen_str = generate_string()
self.assertEqual(6, len(gen_str))
self.assertEqual(gen_str, gen_str.lower())
|
<commit_before># coding: utf-8
import unittest
from .utils import generate_string
class TestGenerateString(unittest.TestCase):
'''Test string generator function'''
def test_generate(self):
gen_str = generate_string()
self.assertEqual(6, len(gen_str))
self.assertEqual(gen_str, gen_str.lower())
<commit_msg>Fix import for test utils<commit_after># coding: utf-8
import unittest
from utils import generate_string
class TestGenerateString(unittest.TestCase):
'''Test string generator function'''
def test_generate(self):
gen_str = generate_string()
self.assertEqual(6, len(gen_str))
self.assertEqual(gen_str, gen_str.lower())
|
832dc359a725ecf1b0319237de4cc13f136b1491
|
docs/extensions/jira.py
|
docs/extensions/jira.py
|
import os
import sys
from docutils import nodes
from docutils.parsers.rst import Directive
def make_link_node(rawtext, app, type, slug, options):
"""Create a link to a JIRA ticket.
:param rawtext: Text being replaced with link node.
:param app: Sphinx application context
:param type: Link type (issue, changeset, etc.)
:param slug: ID of the ticket to link to
:param options: Options dictionary passed to role func.
"""
base = "https://opensciencegrid.atlassian.net/browse/HTCONDOR-"
ref = base + slug
# set_classes(options)
node = nodes.reference(rawtext, "(Ticket #" + slug + ")", refuri=ref, **options)
return node
def ticket_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
try:
ticket_id = int(text)
if ticket_id > 1000:
raise ValueError
except ValueError:
msg = inliner.reporter.error(
'HTCondor ticket number must be a number less than or equal to 1000; '
'"%s" is invalid.' % text, line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
app = inliner.document.settings.env.app
node = make_link_node(rawtext, app, 'issue', str(ticket_id), options)
return [node], []
def setup(app):
app.add_role("jira", ticket_role)
|
import os
import sys
from docutils import nodes
from docutils.parsers.rst import Directive
def make_link_node(rawtext, app, type, slug, options):
"""Create a link to a JIRA ticket.
:param rawtext: Text being replaced with link node.
:param app: Sphinx application context
:param type: Link type (issue, changeset, etc.)
:param slug: ID of the ticket to link to
:param options: Options dictionary passed to role func.
"""
base = "https://opensciencegrid.atlassian.net/browse/HTCONDOR-"
ref = base + slug
# set_classes(options)
node = nodes.reference(rawtext, "(Ticket #" + slug + ")", refuri=ref, **options)
return node
def ticket_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
try:
ticket_id = int(text)
if ticket_id > 10000:
raise ValueError
except ValueError:
msg = inliner.reporter.error(
'HTCondor ticket number must be a number less than or equal to 10000; '
'"%s" is invalid.' % text, line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
app = inliner.document.settings.env.app
node = make_link_node(rawtext, app, 'issue', str(ticket_id), options)
return [node], []
def setup(app):
app.add_role("jira", ticket_role)
|
Allow Jira tickets over 1000
|
HTCONDOR-1028: Allow Jira tickets over 1000
This used to double check between GitTrac and Jira ticket numbers.
I was tempted to remove the check altogether. However, it would
guard against and unfortunate key bounce. The change is going into
stable, so adding a digit to the number is a minimal change.
|
Python
|
apache-2.0
|
htcondor/htcondor,htcondor/htcondor,htcondor/htcondor,htcondor/htcondor,htcondor/htcondor,htcondor/htcondor,htcondor/htcondor,htcondor/htcondor
|
import os
import sys
from docutils import nodes
from docutils.parsers.rst import Directive
def make_link_node(rawtext, app, type, slug, options):
"""Create a link to a JIRA ticket.
:param rawtext: Text being replaced with link node.
:param app: Sphinx application context
:param type: Link type (issue, changeset, etc.)
:param slug: ID of the ticket to link to
:param options: Options dictionary passed to role func.
"""
base = "https://opensciencegrid.atlassian.net/browse/HTCONDOR-"
ref = base + slug
# set_classes(options)
node = nodes.reference(rawtext, "(Ticket #" + slug + ")", refuri=ref, **options)
return node
def ticket_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
try:
ticket_id = int(text)
if ticket_id > 1000:
raise ValueError
except ValueError:
msg = inliner.reporter.error(
'HTCondor ticket number must be a number less than or equal to 1000; '
'"%s" is invalid.' % text, line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
app = inliner.document.settings.env.app
node = make_link_node(rawtext, app, 'issue', str(ticket_id), options)
return [node], []
def setup(app):
app.add_role("jira", ticket_role)
HTCONDOR-1028: Allow Jira tickets over 1000
This used to double check between GitTrac and Jira ticket numbers.
I was tempted to remove the check altogether. However, it would
guard against and unfortunate key bounce. The change is going into
stable, so adding a digit to the number is a minimal change.
|
import os
import sys
from docutils import nodes
from docutils.parsers.rst import Directive
def make_link_node(rawtext, app, type, slug, options):
"""Create a link to a JIRA ticket.
:param rawtext: Text being replaced with link node.
:param app: Sphinx application context
:param type: Link type (issue, changeset, etc.)
:param slug: ID of the ticket to link to
:param options: Options dictionary passed to role func.
"""
base = "https://opensciencegrid.atlassian.net/browse/HTCONDOR-"
ref = base + slug
# set_classes(options)
node = nodes.reference(rawtext, "(Ticket #" + slug + ")", refuri=ref, **options)
return node
def ticket_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
try:
ticket_id = int(text)
if ticket_id > 10000:
raise ValueError
except ValueError:
msg = inliner.reporter.error(
'HTCondor ticket number must be a number less than or equal to 10000; '
'"%s" is invalid.' % text, line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
app = inliner.document.settings.env.app
node = make_link_node(rawtext, app, 'issue', str(ticket_id), options)
return [node], []
def setup(app):
app.add_role("jira", ticket_role)
|
<commit_before>import os
import sys
from docutils import nodes
from docutils.parsers.rst import Directive
def make_link_node(rawtext, app, type, slug, options):
"""Create a link to a JIRA ticket.
:param rawtext: Text being replaced with link node.
:param app: Sphinx application context
:param type: Link type (issue, changeset, etc.)
:param slug: ID of the ticket to link to
:param options: Options dictionary passed to role func.
"""
base = "https://opensciencegrid.atlassian.net/browse/HTCONDOR-"
ref = base + slug
# set_classes(options)
node = nodes.reference(rawtext, "(Ticket #" + slug + ")", refuri=ref, **options)
return node
def ticket_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
try:
ticket_id = int(text)
if ticket_id > 1000:
raise ValueError
except ValueError:
msg = inliner.reporter.error(
'HTCondor ticket number must be a number less than or equal to 1000; '
'"%s" is invalid.' % text, line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
app = inliner.document.settings.env.app
node = make_link_node(rawtext, app, 'issue', str(ticket_id), options)
return [node], []
def setup(app):
app.add_role("jira", ticket_role)
<commit_msg>HTCONDOR-1028: Allow Jira tickets over 1000
This used to double check between GitTrac and Jira ticket numbers.
I was tempted to remove the check altogether. However, it would
guard against and unfortunate key bounce. The change is going into
stable, so adding a digit to the number is a minimal change.<commit_after>
|
import os
import sys
from docutils import nodes
from docutils.parsers.rst import Directive
def make_link_node(rawtext, app, type, slug, options):
"""Create a link to a JIRA ticket.
:param rawtext: Text being replaced with link node.
:param app: Sphinx application context
:param type: Link type (issue, changeset, etc.)
:param slug: ID of the ticket to link to
:param options: Options dictionary passed to role func.
"""
base = "https://opensciencegrid.atlassian.net/browse/HTCONDOR-"
ref = base + slug
# set_classes(options)
node = nodes.reference(rawtext, "(Ticket #" + slug + ")", refuri=ref, **options)
return node
def ticket_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
try:
ticket_id = int(text)
if ticket_id > 10000:
raise ValueError
except ValueError:
msg = inliner.reporter.error(
'HTCondor ticket number must be a number less than or equal to 10000; '
'"%s" is invalid.' % text, line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
app = inliner.document.settings.env.app
node = make_link_node(rawtext, app, 'issue', str(ticket_id), options)
return [node], []
def setup(app):
app.add_role("jira", ticket_role)
|
import os
import sys
from docutils import nodes
from docutils.parsers.rst import Directive
def make_link_node(rawtext, app, type, slug, options):
"""Create a link to a JIRA ticket.
:param rawtext: Text being replaced with link node.
:param app: Sphinx application context
:param type: Link type (issue, changeset, etc.)
:param slug: ID of the ticket to link to
:param options: Options dictionary passed to role func.
"""
base = "https://opensciencegrid.atlassian.net/browse/HTCONDOR-"
ref = base + slug
# set_classes(options)
node = nodes.reference(rawtext, "(Ticket #" + slug + ")", refuri=ref, **options)
return node
def ticket_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
try:
ticket_id = int(text)
if ticket_id > 1000:
raise ValueError
except ValueError:
msg = inliner.reporter.error(
'HTCondor ticket number must be a number less than or equal to 1000; '
'"%s" is invalid.' % text, line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
app = inliner.document.settings.env.app
node = make_link_node(rawtext, app, 'issue', str(ticket_id), options)
return [node], []
def setup(app):
app.add_role("jira", ticket_role)
HTCONDOR-1028: Allow Jira tickets over 1000
This used to double check between GitTrac and Jira ticket numbers.
I was tempted to remove the check altogether. However, it would
guard against and unfortunate key bounce. The change is going into
stable, so adding a digit to the number is a minimal change.import os
import sys
from docutils import nodes
from docutils.parsers.rst import Directive
def make_link_node(rawtext, app, type, slug, options):
"""Create a link to a JIRA ticket.
:param rawtext: Text being replaced with link node.
:param app: Sphinx application context
:param type: Link type (issue, changeset, etc.)
:param slug: ID of the ticket to link to
:param options: Options dictionary passed to role func.
"""
base = "https://opensciencegrid.atlassian.net/browse/HTCONDOR-"
ref = base + slug
# set_classes(options)
node = nodes.reference(rawtext, "(Ticket #" + slug + ")", refuri=ref, **options)
return node
def ticket_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
try:
ticket_id = int(text)
if ticket_id > 10000:
raise ValueError
except ValueError:
msg = inliner.reporter.error(
'HTCondor ticket number must be a number less than or equal to 10000; '
'"%s" is invalid.' % text, line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
app = inliner.document.settings.env.app
node = make_link_node(rawtext, app, 'issue', str(ticket_id), options)
return [node], []
def setup(app):
app.add_role("jira", ticket_role)
|
<commit_before>import os
import sys
from docutils import nodes
from docutils.parsers.rst import Directive
def make_link_node(rawtext, app, type, slug, options):
"""Create a link to a JIRA ticket.
:param rawtext: Text being replaced with link node.
:param app: Sphinx application context
:param type: Link type (issue, changeset, etc.)
:param slug: ID of the ticket to link to
:param options: Options dictionary passed to role func.
"""
base = "https://opensciencegrid.atlassian.net/browse/HTCONDOR-"
ref = base + slug
# set_classes(options)
node = nodes.reference(rawtext, "(Ticket #" + slug + ")", refuri=ref, **options)
return node
def ticket_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
try:
ticket_id = int(text)
if ticket_id > 1000:
raise ValueError
except ValueError:
msg = inliner.reporter.error(
'HTCondor ticket number must be a number less than or equal to 1000; '
'"%s" is invalid.' % text, line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
app = inliner.document.settings.env.app
node = make_link_node(rawtext, app, 'issue', str(ticket_id), options)
return [node], []
def setup(app):
app.add_role("jira", ticket_role)
<commit_msg>HTCONDOR-1028: Allow Jira tickets over 1000
This used to double check between GitTrac and Jira ticket numbers.
I was tempted to remove the check altogether. However, it would
guard against and unfortunate key bounce. The change is going into
stable, so adding a digit to the number is a minimal change.<commit_after>import os
import sys
from docutils import nodes
from docutils.parsers.rst import Directive
def make_link_node(rawtext, app, type, slug, options):
"""Create a link to a JIRA ticket.
:param rawtext: Text being replaced with link node.
:param app: Sphinx application context
:param type: Link type (issue, changeset, etc.)
:param slug: ID of the ticket to link to
:param options: Options dictionary passed to role func.
"""
base = "https://opensciencegrid.atlassian.net/browse/HTCONDOR-"
ref = base + slug
# set_classes(options)
node = nodes.reference(rawtext, "(Ticket #" + slug + ")", refuri=ref, **options)
return node
def ticket_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
try:
ticket_id = int(text)
if ticket_id > 10000:
raise ValueError
except ValueError:
msg = inliner.reporter.error(
'HTCondor ticket number must be a number less than or equal to 10000; '
'"%s" is invalid.' % text, line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
app = inliner.document.settings.env.app
node = make_link_node(rawtext, app, 'issue', str(ticket_id), options)
return [node], []
def setup(app):
app.add_role("jira", ticket_role)
|
79d80db7b67c787bf970dd8c593e505bb8915a21
|
bond_analytics_project/bond_analytics_project/urls.py
|
bond_analytics_project/bond_analytics_project/urls.py
|
"""bond_analytics_project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
admin.autodiscover()
urlpatterns = [
url(regex='^admin/', view=admin.site.urls),
]
|
"""bond_analytics_project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import patterns, include, url
from django.contrib import admin
from rest_framework import routers
from views import BondViewSet
admin.autodiscover()
router = routers.DefaultRouter()
router.register(r'bond', BondViewSet)
urlpatterns = patterns(
'',
url(regex='^', view=include(router.urls)),
url(regex='^admin/', view=admin.site.urls),
)
|
Add djangorestframework DefaultRouter instance and registered BondViewSet.
|
Add djangorestframework DefaultRouter instance and registered BondViewSet.
|
Python
|
mit
|
bsmukasa/bond_analytics
|
"""bond_analytics_project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
admin.autodiscover()
urlpatterns = [
url(regex='^admin/', view=admin.site.urls),
]
Add djangorestframework DefaultRouter instance and registered BondViewSet.
|
"""bond_analytics_project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import patterns, include, url
from django.contrib import admin
from rest_framework import routers
from views import BondViewSet
admin.autodiscover()
router = routers.DefaultRouter()
router.register(r'bond', BondViewSet)
urlpatterns = patterns(
'',
url(regex='^', view=include(router.urls)),
url(regex='^admin/', view=admin.site.urls),
)
|
<commit_before>"""bond_analytics_project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
admin.autodiscover()
urlpatterns = [
url(regex='^admin/', view=admin.site.urls),
]
<commit_msg>Add djangorestframework DefaultRouter instance and registered BondViewSet.<commit_after>
|
"""bond_analytics_project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import patterns, include, url
from django.contrib import admin
from rest_framework import routers
from views import BondViewSet
admin.autodiscover()
router = routers.DefaultRouter()
router.register(r'bond', BondViewSet)
urlpatterns = patterns(
'',
url(regex='^', view=include(router.urls)),
url(regex='^admin/', view=admin.site.urls),
)
|
"""bond_analytics_project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
admin.autodiscover()
urlpatterns = [
url(regex='^admin/', view=admin.site.urls),
]
Add djangorestframework DefaultRouter instance and registered BondViewSet."""bond_analytics_project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import patterns, include, url
from django.contrib import admin
from rest_framework import routers
from views import BondViewSet
admin.autodiscover()
router = routers.DefaultRouter()
router.register(r'bond', BondViewSet)
urlpatterns = patterns(
'',
url(regex='^', view=include(router.urls)),
url(regex='^admin/', view=admin.site.urls),
)
|
<commit_before>"""bond_analytics_project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
admin.autodiscover()
urlpatterns = [
url(regex='^admin/', view=admin.site.urls),
]
<commit_msg>Add djangorestframework DefaultRouter instance and registered BondViewSet.<commit_after>"""bond_analytics_project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import patterns, include, url
from django.contrib import admin
from rest_framework import routers
from views import BondViewSet
admin.autodiscover()
router = routers.DefaultRouter()
router.register(r'bond', BondViewSet)
urlpatterns = patterns(
'',
url(regex='^', view=include(router.urls)),
url(regex='^admin/', view=admin.site.urls),
)
|
a951b873885a3747bfb0f664fc6ba89f587b4fd0
|
fake_useragent/settings.py
|
fake_useragent/settings.py
|
from __future__ import absolute_import, unicode_literals
import os
import tempfile
DB = os.path.join(tempfile.gettempdir(), 'fake_useragent.json')
BROWSERS_STATS_PAGE = 'http://www.w3schools.com/browsers/browsers_stats.asp'
BROWSER_BASE_PAGE = 'http://useragentstring.com/pages/useragentstring.php?name={}' # noqa
BROWSERS_COUNT_LIMIT = 50
REPLACEMENTS = {
' ': '',
'_': '',
}
SHORTCUTS = {
'internet explorer': 'internetexplorer',
'ie': 'internetexplorer',
'msie': 'internetexplorer',
'google': 'chrome',
'googlechrome': 'chrome',
'ff': 'firefox',
}
OVERRIDES = {
'IE': 'Internet Explorer',
}
|
from __future__ import absolute_import, unicode_literals
import os
import tempfile
DB = os.path.join(tempfile.gettempdir(), 'fake_useragent.json')
BROWSERS_STATS_PAGE = 'http://www.w3schools.com/browsers/browsers_stats.asp'
BROWSER_BASE_PAGE = 'http://useragentstring.com/pages/useragentstring.php?name={0}' # noqa
BROWSERS_COUNT_LIMIT = 50
REPLACEMENTS = {
' ': '',
'_': '',
}
SHORTCUTS = {
'internet explorer': 'internetexplorer',
'ie': 'internetexplorer',
'msie': 'internetexplorer',
'google': 'chrome',
'googlechrome': 'chrome',
'ff': 'firefox',
}
OVERRIDES = {
'IE': 'Internet Explorer',
}
|
Fix Python2.6 format string placeholder.
|
Fix Python2.6 format string placeholder.
|
Python
|
apache-2.0
|
hellysmile/fake-useragent,hellysmile/fake-useragent,mochawich/fake-useragent,hellysmile/fake-useragent
|
from __future__ import absolute_import, unicode_literals
import os
import tempfile
DB = os.path.join(tempfile.gettempdir(), 'fake_useragent.json')
BROWSERS_STATS_PAGE = 'http://www.w3schools.com/browsers/browsers_stats.asp'
BROWSER_BASE_PAGE = 'http://useragentstring.com/pages/useragentstring.php?name={}' # noqa
BROWSERS_COUNT_LIMIT = 50
REPLACEMENTS = {
' ': '',
'_': '',
}
SHORTCUTS = {
'internet explorer': 'internetexplorer',
'ie': 'internetexplorer',
'msie': 'internetexplorer',
'google': 'chrome',
'googlechrome': 'chrome',
'ff': 'firefox',
}
OVERRIDES = {
'IE': 'Internet Explorer',
}
Fix Python2.6 format string placeholder.
|
from __future__ import absolute_import, unicode_literals
import os
import tempfile
DB = os.path.join(tempfile.gettempdir(), 'fake_useragent.json')
BROWSERS_STATS_PAGE = 'http://www.w3schools.com/browsers/browsers_stats.asp'
BROWSER_BASE_PAGE = 'http://useragentstring.com/pages/useragentstring.php?name={0}' # noqa
BROWSERS_COUNT_LIMIT = 50
REPLACEMENTS = {
' ': '',
'_': '',
}
SHORTCUTS = {
'internet explorer': 'internetexplorer',
'ie': 'internetexplorer',
'msie': 'internetexplorer',
'google': 'chrome',
'googlechrome': 'chrome',
'ff': 'firefox',
}
OVERRIDES = {
'IE': 'Internet Explorer',
}
|
<commit_before>from __future__ import absolute_import, unicode_literals
import os
import tempfile
DB = os.path.join(tempfile.gettempdir(), 'fake_useragent.json')
BROWSERS_STATS_PAGE = 'http://www.w3schools.com/browsers/browsers_stats.asp'
BROWSER_BASE_PAGE = 'http://useragentstring.com/pages/useragentstring.php?name={}' # noqa
BROWSERS_COUNT_LIMIT = 50
REPLACEMENTS = {
' ': '',
'_': '',
}
SHORTCUTS = {
'internet explorer': 'internetexplorer',
'ie': 'internetexplorer',
'msie': 'internetexplorer',
'google': 'chrome',
'googlechrome': 'chrome',
'ff': 'firefox',
}
OVERRIDES = {
'IE': 'Internet Explorer',
}
<commit_msg>Fix Python2.6 format string placeholder.<commit_after>
|
from __future__ import absolute_import, unicode_literals
import os
import tempfile
DB = os.path.join(tempfile.gettempdir(), 'fake_useragent.json')
BROWSERS_STATS_PAGE = 'http://www.w3schools.com/browsers/browsers_stats.asp'
BROWSER_BASE_PAGE = 'http://useragentstring.com/pages/useragentstring.php?name={0}' # noqa
BROWSERS_COUNT_LIMIT = 50
REPLACEMENTS = {
' ': '',
'_': '',
}
SHORTCUTS = {
'internet explorer': 'internetexplorer',
'ie': 'internetexplorer',
'msie': 'internetexplorer',
'google': 'chrome',
'googlechrome': 'chrome',
'ff': 'firefox',
}
OVERRIDES = {
'IE': 'Internet Explorer',
}
|
from __future__ import absolute_import, unicode_literals
import os
import tempfile
DB = os.path.join(tempfile.gettempdir(), 'fake_useragent.json')
BROWSERS_STATS_PAGE = 'http://www.w3schools.com/browsers/browsers_stats.asp'
BROWSER_BASE_PAGE = 'http://useragentstring.com/pages/useragentstring.php?name={}' # noqa
BROWSERS_COUNT_LIMIT = 50
REPLACEMENTS = {
' ': '',
'_': '',
}
SHORTCUTS = {
'internet explorer': 'internetexplorer',
'ie': 'internetexplorer',
'msie': 'internetexplorer',
'google': 'chrome',
'googlechrome': 'chrome',
'ff': 'firefox',
}
OVERRIDES = {
'IE': 'Internet Explorer',
}
Fix Python2.6 format string placeholder.from __future__ import absolute_import, unicode_literals
import os
import tempfile
DB = os.path.join(tempfile.gettempdir(), 'fake_useragent.json')
BROWSERS_STATS_PAGE = 'http://www.w3schools.com/browsers/browsers_stats.asp'
BROWSER_BASE_PAGE = 'http://useragentstring.com/pages/useragentstring.php?name={0}' # noqa
BROWSERS_COUNT_LIMIT = 50
REPLACEMENTS = {
' ': '',
'_': '',
}
SHORTCUTS = {
'internet explorer': 'internetexplorer',
'ie': 'internetexplorer',
'msie': 'internetexplorer',
'google': 'chrome',
'googlechrome': 'chrome',
'ff': 'firefox',
}
OVERRIDES = {
'IE': 'Internet Explorer',
}
|
<commit_before>from __future__ import absolute_import, unicode_literals
import os
import tempfile
DB = os.path.join(tempfile.gettempdir(), 'fake_useragent.json')
BROWSERS_STATS_PAGE = 'http://www.w3schools.com/browsers/browsers_stats.asp'
BROWSER_BASE_PAGE = 'http://useragentstring.com/pages/useragentstring.php?name={}' # noqa
BROWSERS_COUNT_LIMIT = 50
REPLACEMENTS = {
' ': '',
'_': '',
}
SHORTCUTS = {
'internet explorer': 'internetexplorer',
'ie': 'internetexplorer',
'msie': 'internetexplorer',
'google': 'chrome',
'googlechrome': 'chrome',
'ff': 'firefox',
}
OVERRIDES = {
'IE': 'Internet Explorer',
}
<commit_msg>Fix Python2.6 format string placeholder.<commit_after>from __future__ import absolute_import, unicode_literals
import os
import tempfile
DB = os.path.join(tempfile.gettempdir(), 'fake_useragent.json')
BROWSERS_STATS_PAGE = 'http://www.w3schools.com/browsers/browsers_stats.asp'
BROWSER_BASE_PAGE = 'http://useragentstring.com/pages/useragentstring.php?name={0}' # noqa
BROWSERS_COUNT_LIMIT = 50
REPLACEMENTS = {
' ': '',
'_': '',
}
SHORTCUTS = {
'internet explorer': 'internetexplorer',
'ie': 'internetexplorer',
'msie': 'internetexplorer',
'google': 'chrome',
'googlechrome': 'chrome',
'ff': 'firefox',
}
OVERRIDES = {
'IE': 'Internet Explorer',
}
|
7e1ed9cca3e02488d8d189d22e6fca35c0bec108
|
xmantissa/test/test_siteroot.py
|
xmantissa/test/test_siteroot.py
|
from twisted.trial import unittest
from axiom.store import Store
from axiom.item import Item
from axiom.attributes import text
from xmantissa.website import PrefixURLMixin, WebSite
from xmantissa.ixmantissa import ISiteRootPlugin
from zope.interface import implements
class Dummy:
def __init__(self, pfx):
self.pfx = pfx
class PrefixTester(Item, PrefixURLMixin):
implements(ISiteRootPlugin)
typeName = 'test_prefix_widget'
schemaVersion = 1
prefixURL = text()
def createResource(self):
return Dummy(self.prefixURL)
class SiteRootTest(unittest.TestCase):
def testPrefixPriorityMath(self):
s = Store()
PrefixTester(store=s,
prefixURL=u"hello").installOn(s)
PrefixTester(store=s,
prefixURL=u"").installOn(s)
ws = WebSite(store=s)
res, segs = ws.locateChild(None, ('hello',))
self.assertEquals(res.pfx, 'hello')
self.assertEquals(segs, ())
res, segs = ws.locateChild(None, ('',))
self.assertEquals(res.pfx, '')
self.assertEquals(segs, ('',))
|
from twisted.trial import unittest
from axiom.store import Store
from axiom.item import Item
from axiom.attributes import text
from xmantissa.website import PrefixURLMixin, WebSite
from xmantissa.ixmantissa import ISiteRootPlugin
from zope.interface import implements
class Dummy:
def __init__(self, pfx):
self.pfx = pfx
class PrefixTester(Item, PrefixURLMixin):
implements(ISiteRootPlugin)
typeName = 'test_prefix_widget'
schemaVersion = 1
prefixURL = text()
def createResource(self):
return Dummy(self.prefixURL)
class SiteRootTest(unittest.TestCase):
def testPrefixPriorityMath(self):
s = Store()
PrefixTester(store=s,
prefixURL=u"hello").installOn(s)
PrefixTester(store=s,
prefixURL=u"").installOn(s)
ws = WebSite(store=s)
ws.installOn(s)
res, segs = ws.locateChild(None, ('hello',))
self.assertEquals(res.pfx, 'hello')
self.assertEquals(segs, ())
res, segs = ws.locateChild(None, ('',))
self.assertEquals(res.pfx, '')
self.assertEquals(segs, ('',))
|
Fix boken test - install WebSite before trying to locateChild
|
Fix boken test - install WebSite before trying to locateChild
|
Python
|
mit
|
twisted/mantissa,twisted/mantissa,twisted/mantissa
|
from twisted.trial import unittest
from axiom.store import Store
from axiom.item import Item
from axiom.attributes import text
from xmantissa.website import PrefixURLMixin, WebSite
from xmantissa.ixmantissa import ISiteRootPlugin
from zope.interface import implements
class Dummy:
def __init__(self, pfx):
self.pfx = pfx
class PrefixTester(Item, PrefixURLMixin):
implements(ISiteRootPlugin)
typeName = 'test_prefix_widget'
schemaVersion = 1
prefixURL = text()
def createResource(self):
return Dummy(self.prefixURL)
class SiteRootTest(unittest.TestCase):
def testPrefixPriorityMath(self):
s = Store()
PrefixTester(store=s,
prefixURL=u"hello").installOn(s)
PrefixTester(store=s,
prefixURL=u"").installOn(s)
ws = WebSite(store=s)
res, segs = ws.locateChild(None, ('hello',))
self.assertEquals(res.pfx, 'hello')
self.assertEquals(segs, ())
res, segs = ws.locateChild(None, ('',))
self.assertEquals(res.pfx, '')
self.assertEquals(segs, ('',))
Fix boken test - install WebSite before trying to locateChild
|
from twisted.trial import unittest
from axiom.store import Store
from axiom.item import Item
from axiom.attributes import text
from xmantissa.website import PrefixURLMixin, WebSite
from xmantissa.ixmantissa import ISiteRootPlugin
from zope.interface import implements
class Dummy:
def __init__(self, pfx):
self.pfx = pfx
class PrefixTester(Item, PrefixURLMixin):
implements(ISiteRootPlugin)
typeName = 'test_prefix_widget'
schemaVersion = 1
prefixURL = text()
def createResource(self):
return Dummy(self.prefixURL)
class SiteRootTest(unittest.TestCase):
def testPrefixPriorityMath(self):
s = Store()
PrefixTester(store=s,
prefixURL=u"hello").installOn(s)
PrefixTester(store=s,
prefixURL=u"").installOn(s)
ws = WebSite(store=s)
ws.installOn(s)
res, segs = ws.locateChild(None, ('hello',))
self.assertEquals(res.pfx, 'hello')
self.assertEquals(segs, ())
res, segs = ws.locateChild(None, ('',))
self.assertEquals(res.pfx, '')
self.assertEquals(segs, ('',))
|
<commit_before>
from twisted.trial import unittest
from axiom.store import Store
from axiom.item import Item
from axiom.attributes import text
from xmantissa.website import PrefixURLMixin, WebSite
from xmantissa.ixmantissa import ISiteRootPlugin
from zope.interface import implements
class Dummy:
def __init__(self, pfx):
self.pfx = pfx
class PrefixTester(Item, PrefixURLMixin):
implements(ISiteRootPlugin)
typeName = 'test_prefix_widget'
schemaVersion = 1
prefixURL = text()
def createResource(self):
return Dummy(self.prefixURL)
class SiteRootTest(unittest.TestCase):
def testPrefixPriorityMath(self):
s = Store()
PrefixTester(store=s,
prefixURL=u"hello").installOn(s)
PrefixTester(store=s,
prefixURL=u"").installOn(s)
ws = WebSite(store=s)
res, segs = ws.locateChild(None, ('hello',))
self.assertEquals(res.pfx, 'hello')
self.assertEquals(segs, ())
res, segs = ws.locateChild(None, ('',))
self.assertEquals(res.pfx, '')
self.assertEquals(segs, ('',))
<commit_msg>Fix boken test - install WebSite before trying to locateChild<commit_after>
|
from twisted.trial import unittest
from axiom.store import Store
from axiom.item import Item
from axiom.attributes import text
from xmantissa.website import PrefixURLMixin, WebSite
from xmantissa.ixmantissa import ISiteRootPlugin
from zope.interface import implements
class Dummy:
def __init__(self, pfx):
self.pfx = pfx
class PrefixTester(Item, PrefixURLMixin):
implements(ISiteRootPlugin)
typeName = 'test_prefix_widget'
schemaVersion = 1
prefixURL = text()
def createResource(self):
return Dummy(self.prefixURL)
class SiteRootTest(unittest.TestCase):
def testPrefixPriorityMath(self):
s = Store()
PrefixTester(store=s,
prefixURL=u"hello").installOn(s)
PrefixTester(store=s,
prefixURL=u"").installOn(s)
ws = WebSite(store=s)
ws.installOn(s)
res, segs = ws.locateChild(None, ('hello',))
self.assertEquals(res.pfx, 'hello')
self.assertEquals(segs, ())
res, segs = ws.locateChild(None, ('',))
self.assertEquals(res.pfx, '')
self.assertEquals(segs, ('',))
|
from twisted.trial import unittest
from axiom.store import Store
from axiom.item import Item
from axiom.attributes import text
from xmantissa.website import PrefixURLMixin, WebSite
from xmantissa.ixmantissa import ISiteRootPlugin
from zope.interface import implements
class Dummy:
def __init__(self, pfx):
self.pfx = pfx
class PrefixTester(Item, PrefixURLMixin):
implements(ISiteRootPlugin)
typeName = 'test_prefix_widget'
schemaVersion = 1
prefixURL = text()
def createResource(self):
return Dummy(self.prefixURL)
class SiteRootTest(unittest.TestCase):
def testPrefixPriorityMath(self):
s = Store()
PrefixTester(store=s,
prefixURL=u"hello").installOn(s)
PrefixTester(store=s,
prefixURL=u"").installOn(s)
ws = WebSite(store=s)
res, segs = ws.locateChild(None, ('hello',))
self.assertEquals(res.pfx, 'hello')
self.assertEquals(segs, ())
res, segs = ws.locateChild(None, ('',))
self.assertEquals(res.pfx, '')
self.assertEquals(segs, ('',))
Fix boken test - install WebSite before trying to locateChild
from twisted.trial import unittest
from axiom.store import Store
from axiom.item import Item
from axiom.attributes import text
from xmantissa.website import PrefixURLMixin, WebSite
from xmantissa.ixmantissa import ISiteRootPlugin
from zope.interface import implements
class Dummy:
def __init__(self, pfx):
self.pfx = pfx
class PrefixTester(Item, PrefixURLMixin):
implements(ISiteRootPlugin)
typeName = 'test_prefix_widget'
schemaVersion = 1
prefixURL = text()
def createResource(self):
return Dummy(self.prefixURL)
class SiteRootTest(unittest.TestCase):
def testPrefixPriorityMath(self):
s = Store()
PrefixTester(store=s,
prefixURL=u"hello").installOn(s)
PrefixTester(store=s,
prefixURL=u"").installOn(s)
ws = WebSite(store=s)
ws.installOn(s)
res, segs = ws.locateChild(None, ('hello',))
self.assertEquals(res.pfx, 'hello')
self.assertEquals(segs, ())
res, segs = ws.locateChild(None, ('',))
self.assertEquals(res.pfx, '')
self.assertEquals(segs, ('',))
|
<commit_before>
from twisted.trial import unittest
from axiom.store import Store
from axiom.item import Item
from axiom.attributes import text
from xmantissa.website import PrefixURLMixin, WebSite
from xmantissa.ixmantissa import ISiteRootPlugin
from zope.interface import implements
class Dummy:
def __init__(self, pfx):
self.pfx = pfx
class PrefixTester(Item, PrefixURLMixin):
implements(ISiteRootPlugin)
typeName = 'test_prefix_widget'
schemaVersion = 1
prefixURL = text()
def createResource(self):
return Dummy(self.prefixURL)
class SiteRootTest(unittest.TestCase):
def testPrefixPriorityMath(self):
s = Store()
PrefixTester(store=s,
prefixURL=u"hello").installOn(s)
PrefixTester(store=s,
prefixURL=u"").installOn(s)
ws = WebSite(store=s)
res, segs = ws.locateChild(None, ('hello',))
self.assertEquals(res.pfx, 'hello')
self.assertEquals(segs, ())
res, segs = ws.locateChild(None, ('',))
self.assertEquals(res.pfx, '')
self.assertEquals(segs, ('',))
<commit_msg>Fix boken test - install WebSite before trying to locateChild<commit_after>
from twisted.trial import unittest
from axiom.store import Store
from axiom.item import Item
from axiom.attributes import text
from xmantissa.website import PrefixURLMixin, WebSite
from xmantissa.ixmantissa import ISiteRootPlugin
from zope.interface import implements
class Dummy:
def __init__(self, pfx):
self.pfx = pfx
class PrefixTester(Item, PrefixURLMixin):
implements(ISiteRootPlugin)
typeName = 'test_prefix_widget'
schemaVersion = 1
prefixURL = text()
def createResource(self):
return Dummy(self.prefixURL)
class SiteRootTest(unittest.TestCase):
def testPrefixPriorityMath(self):
s = Store()
PrefixTester(store=s,
prefixURL=u"hello").installOn(s)
PrefixTester(store=s,
prefixURL=u"").installOn(s)
ws = WebSite(store=s)
ws.installOn(s)
res, segs = ws.locateChild(None, ('hello',))
self.assertEquals(res.pfx, 'hello')
self.assertEquals(segs, ())
res, segs = ws.locateChild(None, ('',))
self.assertEquals(res.pfx, '')
self.assertEquals(segs, ('',))
|
74d9bf3818b2c93e78028b5ff24e4c0b7e231ce1
|
scripts/downgrade-pip-on-pypy.py
|
scripts/downgrade-pip-on-pypy.py
|
"""
Downgrade to pip < 20.2 when running on PyPy because pip 20.2 (the most recent
release at the time of writing) breaks compatibility with PyPy, thereby causing
Travis CI builds of py2deb to fail as well. For more details please refer to
https://github.com/pypa/pip/issues/8653.
"""
import pip
import platform
import subprocess
from distutils.version import LooseVersion
if platform.python_implementation() == 'PyPy':
installed_release = LooseVersion(pip.__version__)
known_bad_release = LooseVersion('20.2')
if installed_release >= known_bad_release:
# Given that pip is broken, we can't use it to downgrade itself!
# Fortunately setuptools provides easy_install which works fine.
subprocess.check_call(['easy_install', 'pip < 20.2'])
|
"""
Downgrade to pip < 20.2 when running on PyPy.
Unfortunately pip 20.2 (the most recent release at the time of writing) breaks
compatibility with PyPy, thereby causing Travis CI builds of py2deb to fail as
well. For details please refer to https://github.com/pypa/pip/issues/8653.
"""
import pip
import platform
import subprocess
from distutils.version import LooseVersion
if platform.python_implementation() == 'PyPy':
installed_release = LooseVersion(pip.__version__)
known_bad_release = LooseVersion('20.2')
if installed_release >= known_bad_release:
# Given that pip is broken, we can't use it to downgrade itself!
# Fortunately setuptools provides easy_install which works fine.
subprocess.check_call(['easy_install', 'pip < 20.2'])
|
Fix flake8 "violations" in new PyPy compensation script
|
Fix flake8 "violations" in new PyPy compensation script
|
Python
|
mit
|
paylogic/py2deb,paylogic/py2deb
|
"""
Downgrade to pip < 20.2 when running on PyPy because pip 20.2 (the most recent
release at the time of writing) breaks compatibility with PyPy, thereby causing
Travis CI builds of py2deb to fail as well. For more details please refer to
https://github.com/pypa/pip/issues/8653.
"""
import pip
import platform
import subprocess
from distutils.version import LooseVersion
if platform.python_implementation() == 'PyPy':
installed_release = LooseVersion(pip.__version__)
known_bad_release = LooseVersion('20.2')
if installed_release >= known_bad_release:
# Given that pip is broken, we can't use it to downgrade itself!
# Fortunately setuptools provides easy_install which works fine.
subprocess.check_call(['easy_install', 'pip < 20.2'])
Fix flake8 "violations" in new PyPy compensation script
|
"""
Downgrade to pip < 20.2 when running on PyPy.
Unfortunately pip 20.2 (the most recent release at the time of writing) breaks
compatibility with PyPy, thereby causing Travis CI builds of py2deb to fail as
well. For details please refer to https://github.com/pypa/pip/issues/8653.
"""
import pip
import platform
import subprocess
from distutils.version import LooseVersion
if platform.python_implementation() == 'PyPy':
installed_release = LooseVersion(pip.__version__)
known_bad_release = LooseVersion('20.2')
if installed_release >= known_bad_release:
# Given that pip is broken, we can't use it to downgrade itself!
# Fortunately setuptools provides easy_install which works fine.
subprocess.check_call(['easy_install', 'pip < 20.2'])
|
<commit_before>"""
Downgrade to pip < 20.2 when running on PyPy because pip 20.2 (the most recent
release at the time of writing) breaks compatibility with PyPy, thereby causing
Travis CI builds of py2deb to fail as well. For more details please refer to
https://github.com/pypa/pip/issues/8653.
"""
import pip
import platform
import subprocess
from distutils.version import LooseVersion
if platform.python_implementation() == 'PyPy':
installed_release = LooseVersion(pip.__version__)
known_bad_release = LooseVersion('20.2')
if installed_release >= known_bad_release:
# Given that pip is broken, we can't use it to downgrade itself!
# Fortunately setuptools provides easy_install which works fine.
subprocess.check_call(['easy_install', 'pip < 20.2'])
<commit_msg>Fix flake8 "violations" in new PyPy compensation script<commit_after>
|
"""
Downgrade to pip < 20.2 when running on PyPy.
Unfortunately pip 20.2 (the most recent release at the time of writing) breaks
compatibility with PyPy, thereby causing Travis CI builds of py2deb to fail as
well. For details please refer to https://github.com/pypa/pip/issues/8653.
"""
import pip
import platform
import subprocess
from distutils.version import LooseVersion
if platform.python_implementation() == 'PyPy':
installed_release = LooseVersion(pip.__version__)
known_bad_release = LooseVersion('20.2')
if installed_release >= known_bad_release:
# Given that pip is broken, we can't use it to downgrade itself!
# Fortunately setuptools provides easy_install which works fine.
subprocess.check_call(['easy_install', 'pip < 20.2'])
|
"""
Downgrade to pip < 20.2 when running on PyPy because pip 20.2 (the most recent
release at the time of writing) breaks compatibility with PyPy, thereby causing
Travis CI builds of py2deb to fail as well. For more details please refer to
https://github.com/pypa/pip/issues/8653.
"""
import pip
import platform
import subprocess
from distutils.version import LooseVersion
if platform.python_implementation() == 'PyPy':
installed_release = LooseVersion(pip.__version__)
known_bad_release = LooseVersion('20.2')
if installed_release >= known_bad_release:
# Given that pip is broken, we can't use it to downgrade itself!
# Fortunately setuptools provides easy_install which works fine.
subprocess.check_call(['easy_install', 'pip < 20.2'])
Fix flake8 "violations" in new PyPy compensation script"""
Downgrade to pip < 20.2 when running on PyPy.
Unfortunately pip 20.2 (the most recent release at the time of writing) breaks
compatibility with PyPy, thereby causing Travis CI builds of py2deb to fail as
well. For details please refer to https://github.com/pypa/pip/issues/8653.
"""
import pip
import platform
import subprocess
from distutils.version import LooseVersion
if platform.python_implementation() == 'PyPy':
installed_release = LooseVersion(pip.__version__)
known_bad_release = LooseVersion('20.2')
if installed_release >= known_bad_release:
# Given that pip is broken, we can't use it to downgrade itself!
# Fortunately setuptools provides easy_install which works fine.
subprocess.check_call(['easy_install', 'pip < 20.2'])
|
<commit_before>"""
Downgrade to pip < 20.2 when running on PyPy because pip 20.2 (the most recent
release at the time of writing) breaks compatibility with PyPy, thereby causing
Travis CI builds of py2deb to fail as well. For more details please refer to
https://github.com/pypa/pip/issues/8653.
"""
import pip
import platform
import subprocess
from distutils.version import LooseVersion
if platform.python_implementation() == 'PyPy':
installed_release = LooseVersion(pip.__version__)
known_bad_release = LooseVersion('20.2')
if installed_release >= known_bad_release:
# Given that pip is broken, we can't use it to downgrade itself!
# Fortunately setuptools provides easy_install which works fine.
subprocess.check_call(['easy_install', 'pip < 20.2'])
<commit_msg>Fix flake8 "violations" in new PyPy compensation script<commit_after>"""
Downgrade to pip < 20.2 when running on PyPy.
Unfortunately pip 20.2 (the most recent release at the time of writing) breaks
compatibility with PyPy, thereby causing Travis CI builds of py2deb to fail as
well. For details please refer to https://github.com/pypa/pip/issues/8653.
"""
import pip
import platform
import subprocess
from distutils.version import LooseVersion
if platform.python_implementation() == 'PyPy':
installed_release = LooseVersion(pip.__version__)
known_bad_release = LooseVersion('20.2')
if installed_release >= known_bad_release:
# Given that pip is broken, we can't use it to downgrade itself!
# Fortunately setuptools provides easy_install which works fine.
subprocess.check_call(['easy_install', 'pip < 20.2'])
|
b428473ba1c80bf0c5502d2c9621117ec206d66a
|
src/pythonModules/fourgp_pipeline/fourgp_pipeline/__init__.py
|
src/pythonModules/fourgp_pipeline/fourgp_pipeline/__init__.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
This python module defines the Pipeline implemented by 4GP.
"""
import logging
from numpy import RankWarning
from warnings import simplefilter
from .pipeline_manager import PipelineManager
from .pipeline import Pipeline
from .spectrum_analysis import SpectrumAnalysis
__version__ = "20190301.1"
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO) # TODO: Remove this when stable.
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter("%(asctime)s [%(levelname)-8s] %(message)s"))
logger.addHandler(handler)
simplefilter("ignore", RankWarning)
simplefilter("ignore", RuntimeWarning)
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
This python module defines the Pipeline implemented by 4GP.
"""
import logging
from numpy import RankWarning
from warnings import simplefilter
from .pipeline_manager import PipelineManager
from .pipeline import Pipeline
from .pipeline_fgk import PipelineFGK
from .spectrum_analysis import SpectrumAnalysis
__version__ = "20190301.1"
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO) # TODO: Remove this when stable.
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter("%(asctime)s [%(levelname)-8s] %(message)s"))
logger.addHandler(handler)
simplefilter("ignore", RankWarning)
simplefilter("ignore", RuntimeWarning)
|
Make sure that we make PipelineFGK accessible in fourgp_pipeline
|
Make sure that we make PipelineFGK accessible in fourgp_pipeline
|
Python
|
mit
|
dcf21/4most-4gp,dcf21/4most-4gp,dcf21/4most-4gp
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
This python module defines the Pipeline implemented by 4GP.
"""
import logging
from numpy import RankWarning
from warnings import simplefilter
from .pipeline_manager import PipelineManager
from .pipeline import Pipeline
from .spectrum_analysis import SpectrumAnalysis
__version__ = "20190301.1"
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO) # TODO: Remove this when stable.
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter("%(asctime)s [%(levelname)-8s] %(message)s"))
logger.addHandler(handler)
simplefilter("ignore", RankWarning)
simplefilter("ignore", RuntimeWarning)
Make sure that we make PipelineFGK accessible in fourgp_pipeline
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
This python module defines the Pipeline implemented by 4GP.
"""
import logging
from numpy import RankWarning
from warnings import simplefilter
from .pipeline_manager import PipelineManager
from .pipeline import Pipeline
from .pipeline_fgk import PipelineFGK
from .spectrum_analysis import SpectrumAnalysis
__version__ = "20190301.1"
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO) # TODO: Remove this when stable.
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter("%(asctime)s [%(levelname)-8s] %(message)s"))
logger.addHandler(handler)
simplefilter("ignore", RankWarning)
simplefilter("ignore", RuntimeWarning)
|
<commit_before>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
This python module defines the Pipeline implemented by 4GP.
"""
import logging
from numpy import RankWarning
from warnings import simplefilter
from .pipeline_manager import PipelineManager
from .pipeline import Pipeline
from .spectrum_analysis import SpectrumAnalysis
__version__ = "20190301.1"
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO) # TODO: Remove this when stable.
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter("%(asctime)s [%(levelname)-8s] %(message)s"))
logger.addHandler(handler)
simplefilter("ignore", RankWarning)
simplefilter("ignore", RuntimeWarning)
<commit_msg>Make sure that we make PipelineFGK accessible in fourgp_pipeline<commit_after>
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
This python module defines the Pipeline implemented by 4GP.
"""
import logging
from numpy import RankWarning
from warnings import simplefilter
from .pipeline_manager import PipelineManager
from .pipeline import Pipeline
from .pipeline_fgk import PipelineFGK
from .spectrum_analysis import SpectrumAnalysis
__version__ = "20190301.1"
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO) # TODO: Remove this when stable.
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter("%(asctime)s [%(levelname)-8s] %(message)s"))
logger.addHandler(handler)
simplefilter("ignore", RankWarning)
simplefilter("ignore", RuntimeWarning)
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
This python module defines the Pipeline implemented by 4GP.
"""
import logging
from numpy import RankWarning
from warnings import simplefilter
from .pipeline_manager import PipelineManager
from .pipeline import Pipeline
from .spectrum_analysis import SpectrumAnalysis
__version__ = "20190301.1"
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO) # TODO: Remove this when stable.
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter("%(asctime)s [%(levelname)-8s] %(message)s"))
logger.addHandler(handler)
simplefilter("ignore", RankWarning)
simplefilter("ignore", RuntimeWarning)
Make sure that we make PipelineFGK accessible in fourgp_pipeline#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
This python module defines the Pipeline implemented by 4GP.
"""
import logging
from numpy import RankWarning
from warnings import simplefilter
from .pipeline_manager import PipelineManager
from .pipeline import Pipeline
from .pipeline_fgk import PipelineFGK
from .spectrum_analysis import SpectrumAnalysis
__version__ = "20190301.1"
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO) # TODO: Remove this when stable.
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter("%(asctime)s [%(levelname)-8s] %(message)s"))
logger.addHandler(handler)
simplefilter("ignore", RankWarning)
simplefilter("ignore", RuntimeWarning)
|
<commit_before>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
This python module defines the Pipeline implemented by 4GP.
"""
import logging
from numpy import RankWarning
from warnings import simplefilter
from .pipeline_manager import PipelineManager
from .pipeline import Pipeline
from .spectrum_analysis import SpectrumAnalysis
__version__ = "20190301.1"
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO) # TODO: Remove this when stable.
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter("%(asctime)s [%(levelname)-8s] %(message)s"))
logger.addHandler(handler)
simplefilter("ignore", RankWarning)
simplefilter("ignore", RuntimeWarning)
<commit_msg>Make sure that we make PipelineFGK accessible in fourgp_pipeline<commit_after>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
This python module defines the Pipeline implemented by 4GP.
"""
import logging
from numpy import RankWarning
from warnings import simplefilter
from .pipeline_manager import PipelineManager
from .pipeline import Pipeline
from .pipeline_fgk import PipelineFGK
from .spectrum_analysis import SpectrumAnalysis
__version__ = "20190301.1"
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO) # TODO: Remove this when stable.
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter("%(asctime)s [%(levelname)-8s] %(message)s"))
logger.addHandler(handler)
simplefilter("ignore", RankWarning)
simplefilter("ignore", RuntimeWarning)
|
8e2ec397f4e0c66e3f22056202d0d0829cdd64ac
|
publishing/serializers.py
|
publishing/serializers.py
|
from rest_framework import serializers
from models import Page
class PageSerializer(serializers.ModelSerializer):
parent = serializers.PrimaryKeyRelatedField(required=False)
children = serializers.PrimaryKeyRelatedField(many=True, required=False)
class Meta:
model = Page
fields = (
'id',
'parent',
'children',
'title',
'slug',
'url',
'content',
'summary',
'author',
'status',
'publish_date',
'type',
'created',
'modified'
)
|
from rest_framework import serializers
from models import Page
class PageSerializer(serializers.ModelSerializer):
parent = serializers.PrimaryKeyRelatedField(required=False)
children = serializers.PrimaryKeyRelatedField(many=True, required=False, read_only=True)
class Meta:
model = Page
fields = (
'id',
'parent',
'children',
'title',
'slug',
'url',
'content',
'summary',
'author',
'status',
'publish_date',
'type',
'created',
'modified'
)
|
Make the children field in the api read only.
|
Make the children field in the api read only.
|
Python
|
mit
|
olofsj/django-simple-publishing,olofsj/django-simple-publishing
|
from rest_framework import serializers
from models import Page
class PageSerializer(serializers.ModelSerializer):
parent = serializers.PrimaryKeyRelatedField(required=False)
children = serializers.PrimaryKeyRelatedField(many=True, required=False)
class Meta:
model = Page
fields = (
'id',
'parent',
'children',
'title',
'slug',
'url',
'content',
'summary',
'author',
'status',
'publish_date',
'type',
'created',
'modified'
)
Make the children field in the api read only.
|
from rest_framework import serializers
from models import Page
class PageSerializer(serializers.ModelSerializer):
parent = serializers.PrimaryKeyRelatedField(required=False)
children = serializers.PrimaryKeyRelatedField(many=True, required=False, read_only=True)
class Meta:
model = Page
fields = (
'id',
'parent',
'children',
'title',
'slug',
'url',
'content',
'summary',
'author',
'status',
'publish_date',
'type',
'created',
'modified'
)
|
<commit_before>from rest_framework import serializers
from models import Page
class PageSerializer(serializers.ModelSerializer):
parent = serializers.PrimaryKeyRelatedField(required=False)
children = serializers.PrimaryKeyRelatedField(many=True, required=False)
class Meta:
model = Page
fields = (
'id',
'parent',
'children',
'title',
'slug',
'url',
'content',
'summary',
'author',
'status',
'publish_date',
'type',
'created',
'modified'
)
<commit_msg>Make the children field in the api read only.<commit_after>
|
from rest_framework import serializers
from models import Page
class PageSerializer(serializers.ModelSerializer):
parent = serializers.PrimaryKeyRelatedField(required=False)
children = serializers.PrimaryKeyRelatedField(many=True, required=False, read_only=True)
class Meta:
model = Page
fields = (
'id',
'parent',
'children',
'title',
'slug',
'url',
'content',
'summary',
'author',
'status',
'publish_date',
'type',
'created',
'modified'
)
|
from rest_framework import serializers
from models import Page
class PageSerializer(serializers.ModelSerializer):
parent = serializers.PrimaryKeyRelatedField(required=False)
children = serializers.PrimaryKeyRelatedField(many=True, required=False)
class Meta:
model = Page
fields = (
'id',
'parent',
'children',
'title',
'slug',
'url',
'content',
'summary',
'author',
'status',
'publish_date',
'type',
'created',
'modified'
)
Make the children field in the api read only.from rest_framework import serializers
from models import Page
class PageSerializer(serializers.ModelSerializer):
parent = serializers.PrimaryKeyRelatedField(required=False)
children = serializers.PrimaryKeyRelatedField(many=True, required=False, read_only=True)
class Meta:
model = Page
fields = (
'id',
'parent',
'children',
'title',
'slug',
'url',
'content',
'summary',
'author',
'status',
'publish_date',
'type',
'created',
'modified'
)
|
<commit_before>from rest_framework import serializers
from models import Page
class PageSerializer(serializers.ModelSerializer):
parent = serializers.PrimaryKeyRelatedField(required=False)
children = serializers.PrimaryKeyRelatedField(many=True, required=False)
class Meta:
model = Page
fields = (
'id',
'parent',
'children',
'title',
'slug',
'url',
'content',
'summary',
'author',
'status',
'publish_date',
'type',
'created',
'modified'
)
<commit_msg>Make the children field in the api read only.<commit_after>from rest_framework import serializers
from models import Page
class PageSerializer(serializers.ModelSerializer):
parent = serializers.PrimaryKeyRelatedField(required=False)
children = serializers.PrimaryKeyRelatedField(many=True, required=False, read_only=True)
class Meta:
model = Page
fields = (
'id',
'parent',
'children',
'title',
'slug',
'url',
'content',
'summary',
'author',
'status',
'publish_date',
'type',
'created',
'modified'
)
|
e4b9899e8dee8070b2c214becd7b4748b30c6949
|
admin_log_view/middleware.py
|
admin_log_view/middleware.py
|
from models import error
from models import info
from django.conf import settings
class LoggerMiddleware:
def process_request(self, request):
if (request.path.startswith(settings.ADMIN_MEDIA_PREFIX)) or (request.path == '/favicon.ico'):
return
else:
msg = 'user ' + str(request.user) + ' path ' + str(request.path)
info(msg)
def process_exception(self, request, exception):
if hasattr(exception,'value'):
msg = 'user: ' + str(request.user) + ' exception ' + str(type(exception)) + ' value: ' + str(exception.value)
error(msg)
else:
msg = 'user: ' + str(request.user) + ' exception: ' + str(type(exception))
error(msg)
|
from models import error
from models import info
from django.conf import settings
class LoggerMiddleware:
def process_request(self, request):
if (request.path.startswith(settings.ADMIN_MEDIA_PREFIX)) or (request.path == '/favicon.ico'):
return
else:
msg = 'user: ' + str(request.user) + ' path: ' + str(request.path)
info(msg)
def process_exception(self, request, exception):
if hasattr(exception,'value'):
msg = 'user: ' + str(request.user) + ' exception ' + str(type(exception)) + ' value: ' + str(exception.value)
error(msg)
else:
msg = 'user: ' + str(request.user) + ' exception: ' + str(type(exception))
error(msg)
|
Improve the sentence for the log
|
[Logger] Improve the sentence for the log
|
Python
|
agpl-3.0
|
adieu/authentic2,incuna/authentic,pu239ppy/authentic2,BryceLohr/authentic,adieu/authentic2,pu239ppy/authentic2,incuna/authentic,pu239ppy/authentic2,BryceLohr/authentic,incuna/authentic,incuna/authentic,incuna/authentic,pu239ppy/authentic2,BryceLohr/authentic,adieu/authentic2,adieu/authentic2,BryceLohr/authentic
|
from models import error
from models import info
from django.conf import settings
class LoggerMiddleware:
def process_request(self, request):
if (request.path.startswith(settings.ADMIN_MEDIA_PREFIX)) or (request.path == '/favicon.ico'):
return
else:
msg = 'user ' + str(request.user) + ' path ' + str(request.path)
info(msg)
def process_exception(self, request, exception):
if hasattr(exception,'value'):
msg = 'user: ' + str(request.user) + ' exception ' + str(type(exception)) + ' value: ' + str(exception.value)
error(msg)
else:
msg = 'user: ' + str(request.user) + ' exception: ' + str(type(exception))
error(msg)
[Logger] Improve the sentence for the log
|
from models import error
from models import info
from django.conf import settings
class LoggerMiddleware:
def process_request(self, request):
if (request.path.startswith(settings.ADMIN_MEDIA_PREFIX)) or (request.path == '/favicon.ico'):
return
else:
msg = 'user: ' + str(request.user) + ' path: ' + str(request.path)
info(msg)
def process_exception(self, request, exception):
if hasattr(exception,'value'):
msg = 'user: ' + str(request.user) + ' exception ' + str(type(exception)) + ' value: ' + str(exception.value)
error(msg)
else:
msg = 'user: ' + str(request.user) + ' exception: ' + str(type(exception))
error(msg)
|
<commit_before>from models import error
from models import info
from django.conf import settings
class LoggerMiddleware:
def process_request(self, request):
if (request.path.startswith(settings.ADMIN_MEDIA_PREFIX)) or (request.path == '/favicon.ico'):
return
else:
msg = 'user ' + str(request.user) + ' path ' + str(request.path)
info(msg)
def process_exception(self, request, exception):
if hasattr(exception,'value'):
msg = 'user: ' + str(request.user) + ' exception ' + str(type(exception)) + ' value: ' + str(exception.value)
error(msg)
else:
msg = 'user: ' + str(request.user) + ' exception: ' + str(type(exception))
error(msg)
<commit_msg>[Logger] Improve the sentence for the log<commit_after>
|
from models import error
from models import info
from django.conf import settings
class LoggerMiddleware:
def process_request(self, request):
if (request.path.startswith(settings.ADMIN_MEDIA_PREFIX)) or (request.path == '/favicon.ico'):
return
else:
msg = 'user: ' + str(request.user) + ' path: ' + str(request.path)
info(msg)
def process_exception(self, request, exception):
if hasattr(exception,'value'):
msg = 'user: ' + str(request.user) + ' exception ' + str(type(exception)) + ' value: ' + str(exception.value)
error(msg)
else:
msg = 'user: ' + str(request.user) + ' exception: ' + str(type(exception))
error(msg)
|
from models import error
from models import info
from django.conf import settings
class LoggerMiddleware:
def process_request(self, request):
if (request.path.startswith(settings.ADMIN_MEDIA_PREFIX)) or (request.path == '/favicon.ico'):
return
else:
msg = 'user ' + str(request.user) + ' path ' + str(request.path)
info(msg)
def process_exception(self, request, exception):
if hasattr(exception,'value'):
msg = 'user: ' + str(request.user) + ' exception ' + str(type(exception)) + ' value: ' + str(exception.value)
error(msg)
else:
msg = 'user: ' + str(request.user) + ' exception: ' + str(type(exception))
error(msg)
[Logger] Improve the sentence for the logfrom models import error
from models import info
from django.conf import settings
class LoggerMiddleware:
def process_request(self, request):
if (request.path.startswith(settings.ADMIN_MEDIA_PREFIX)) or (request.path == '/favicon.ico'):
return
else:
msg = 'user: ' + str(request.user) + ' path: ' + str(request.path)
info(msg)
def process_exception(self, request, exception):
if hasattr(exception,'value'):
msg = 'user: ' + str(request.user) + ' exception ' + str(type(exception)) + ' value: ' + str(exception.value)
error(msg)
else:
msg = 'user: ' + str(request.user) + ' exception: ' + str(type(exception))
error(msg)
|
<commit_before>from models import error
from models import info
from django.conf import settings
class LoggerMiddleware:
def process_request(self, request):
if (request.path.startswith(settings.ADMIN_MEDIA_PREFIX)) or (request.path == '/favicon.ico'):
return
else:
msg = 'user ' + str(request.user) + ' path ' + str(request.path)
info(msg)
def process_exception(self, request, exception):
if hasattr(exception,'value'):
msg = 'user: ' + str(request.user) + ' exception ' + str(type(exception)) + ' value: ' + str(exception.value)
error(msg)
else:
msg = 'user: ' + str(request.user) + ' exception: ' + str(type(exception))
error(msg)
<commit_msg>[Logger] Improve the sentence for the log<commit_after>from models import error
from models import info
from django.conf import settings
class LoggerMiddleware:
def process_request(self, request):
if (request.path.startswith(settings.ADMIN_MEDIA_PREFIX)) or (request.path == '/favicon.ico'):
return
else:
msg = 'user: ' + str(request.user) + ' path: ' + str(request.path)
info(msg)
def process_exception(self, request, exception):
if hasattr(exception,'value'):
msg = 'user: ' + str(request.user) + ' exception ' + str(type(exception)) + ' value: ' + str(exception.value)
error(msg)
else:
msg = 'user: ' + str(request.user) + ' exception: ' + str(type(exception))
error(msg)
|
f580b793ad74da3338e62ba16d30d986e95dcbc1
|
githubsync.py
|
githubsync.py
|
#!/usr/bin/env python
"""
Grab all of a user's projects from github.
Copyright (c) 2008 Dustin Sallings <dustin@spy.net>
"""
import os
import sys
import subprocess
import github
def sync(path, user, repo):
p=os.path.join(path, repo.name) + ".git"
print "Syncing %s -> %s" % (repo, p)
if os.path.exists(p):
subprocess.call(['git', '--git-dir=' + p, 'fetch'])
else:
url = "git://github.com/%s/%s" % (user.login, repo.name)
subprocess.call(['git', 'clone', '--bare', url, p])
subprocess.call(['git', '--git-dir=' + p, 'remote', 'add', 'origin',
url])
def usage():
sys.stderr.write("Usage: %s username destination_url\n" % sys.argv[0])
sys.stderr.write(
" Ensures you've got the latest stuff for the given user.\n")
if __name__ == '__main__':
try:
user, path = sys.argv[1:]
except ValueError:
usage()
exit(1)
gh=github.GitHub()
u = gh.user(user)
for repo in u.repos.values():
sync(path, u, repo)
|
#!/usr/bin/env python
"""
Grab all of a user's projects from github.
Copyright (c) 2008 Dustin Sallings <dustin@spy.net>
"""
import os
import sys
import subprocess
import github
def sync(path, user, repo):
p=os.path.join(path, repo.name) + ".git"
print "Syncing %s -> %s" % (repo, p)
if os.path.exists(p):
subprocess.call(['git', '--git-dir=' + p, 'fetch', '-f'])
else:
url = "git://github.com/%s/%s" % (user.login, repo.name)
subprocess.call(['git', 'clone', '--bare', url, p])
subprocess.call(['git', '--git-dir=' + p, 'remote', 'add', 'origin',
url])
def usage():
sys.stderr.write("Usage: %s username destination_url\n" % sys.argv[0])
sys.stderr.write(
" Ensures you've got the latest stuff for the given user.\n")
if __name__ == '__main__':
try:
user, path = sys.argv[1:]
except ValueError:
usage()
exit(1)
gh=github.GitHub()
u = gh.user(user)
for repo in u.repos.values():
sync(path, u, repo)
|
Add -f to fetch since the goal is to sync down, so we don't care about ff.
|
Add -f to fetch since the goal is to sync down, so we don't care about ff.
|
Python
|
mit
|
dustin/py-github
|
#!/usr/bin/env python
"""
Grab all of a user's projects from github.
Copyright (c) 2008 Dustin Sallings <dustin@spy.net>
"""
import os
import sys
import subprocess
import github
def sync(path, user, repo):
p=os.path.join(path, repo.name) + ".git"
print "Syncing %s -> %s" % (repo, p)
if os.path.exists(p):
subprocess.call(['git', '--git-dir=' + p, 'fetch'])
else:
url = "git://github.com/%s/%s" % (user.login, repo.name)
subprocess.call(['git', 'clone', '--bare', url, p])
subprocess.call(['git', '--git-dir=' + p, 'remote', 'add', 'origin',
url])
def usage():
sys.stderr.write("Usage: %s username destination_url\n" % sys.argv[0])
sys.stderr.write(
" Ensures you've got the latest stuff for the given user.\n")
if __name__ == '__main__':
try:
user, path = sys.argv[1:]
except ValueError:
usage()
exit(1)
gh=github.GitHub()
u = gh.user(user)
for repo in u.repos.values():
sync(path, u, repo)
Add -f to fetch since the goal is to sync down, so we don't care about ff.
|
#!/usr/bin/env python
"""
Grab all of a user's projects from github.
Copyright (c) 2008 Dustin Sallings <dustin@spy.net>
"""
import os
import sys
import subprocess
import github
def sync(path, user, repo):
p=os.path.join(path, repo.name) + ".git"
print "Syncing %s -> %s" % (repo, p)
if os.path.exists(p):
subprocess.call(['git', '--git-dir=' + p, 'fetch', '-f'])
else:
url = "git://github.com/%s/%s" % (user.login, repo.name)
subprocess.call(['git', 'clone', '--bare', url, p])
subprocess.call(['git', '--git-dir=' + p, 'remote', 'add', 'origin',
url])
def usage():
sys.stderr.write("Usage: %s username destination_url\n" % sys.argv[0])
sys.stderr.write(
" Ensures you've got the latest stuff for the given user.\n")
if __name__ == '__main__':
try:
user, path = sys.argv[1:]
except ValueError:
usage()
exit(1)
gh=github.GitHub()
u = gh.user(user)
for repo in u.repos.values():
sync(path, u, repo)
|
<commit_before>#!/usr/bin/env python
"""
Grab all of a user's projects from github.
Copyright (c) 2008 Dustin Sallings <dustin@spy.net>
"""
import os
import sys
import subprocess
import github
def sync(path, user, repo):
p=os.path.join(path, repo.name) + ".git"
print "Syncing %s -> %s" % (repo, p)
if os.path.exists(p):
subprocess.call(['git', '--git-dir=' + p, 'fetch'])
else:
url = "git://github.com/%s/%s" % (user.login, repo.name)
subprocess.call(['git', 'clone', '--bare', url, p])
subprocess.call(['git', '--git-dir=' + p, 'remote', 'add', 'origin',
url])
def usage():
sys.stderr.write("Usage: %s username destination_url\n" % sys.argv[0])
sys.stderr.write(
" Ensures you've got the latest stuff for the given user.\n")
if __name__ == '__main__':
try:
user, path = sys.argv[1:]
except ValueError:
usage()
exit(1)
gh=github.GitHub()
u = gh.user(user)
for repo in u.repos.values():
sync(path, u, repo)
<commit_msg>Add -f to fetch since the goal is to sync down, so we don't care about ff.<commit_after>
|
#!/usr/bin/env python
"""
Grab all of a user's projects from github.
Copyright (c) 2008 Dustin Sallings <dustin@spy.net>
"""
import os
import sys
import subprocess
import github
def sync(path, user, repo):
p=os.path.join(path, repo.name) + ".git"
print "Syncing %s -> %s" % (repo, p)
if os.path.exists(p):
subprocess.call(['git', '--git-dir=' + p, 'fetch', '-f'])
else:
url = "git://github.com/%s/%s" % (user.login, repo.name)
subprocess.call(['git', 'clone', '--bare', url, p])
subprocess.call(['git', '--git-dir=' + p, 'remote', 'add', 'origin',
url])
def usage():
sys.stderr.write("Usage: %s username destination_url\n" % sys.argv[0])
sys.stderr.write(
" Ensures you've got the latest stuff for the given user.\n")
if __name__ == '__main__':
try:
user, path = sys.argv[1:]
except ValueError:
usage()
exit(1)
gh=github.GitHub()
u = gh.user(user)
for repo in u.repos.values():
sync(path, u, repo)
|
#!/usr/bin/env python
"""
Grab all of a user's projects from github.
Copyright (c) 2008 Dustin Sallings <dustin@spy.net>
"""
import os
import sys
import subprocess
import github
def sync(path, user, repo):
p=os.path.join(path, repo.name) + ".git"
print "Syncing %s -> %s" % (repo, p)
if os.path.exists(p):
subprocess.call(['git', '--git-dir=' + p, 'fetch'])
else:
url = "git://github.com/%s/%s" % (user.login, repo.name)
subprocess.call(['git', 'clone', '--bare', url, p])
subprocess.call(['git', '--git-dir=' + p, 'remote', 'add', 'origin',
url])
def usage():
sys.stderr.write("Usage: %s username destination_url\n" % sys.argv[0])
sys.stderr.write(
" Ensures you've got the latest stuff for the given user.\n")
if __name__ == '__main__':
try:
user, path = sys.argv[1:]
except ValueError:
usage()
exit(1)
gh=github.GitHub()
u = gh.user(user)
for repo in u.repos.values():
sync(path, u, repo)
Add -f to fetch since the goal is to sync down, so we don't care about ff.#!/usr/bin/env python
"""
Grab all of a user's projects from github.
Copyright (c) 2008 Dustin Sallings <dustin@spy.net>
"""
import os
import sys
import subprocess
import github
def sync(path, user, repo):
p=os.path.join(path, repo.name) + ".git"
print "Syncing %s -> %s" % (repo, p)
if os.path.exists(p):
subprocess.call(['git', '--git-dir=' + p, 'fetch', '-f'])
else:
url = "git://github.com/%s/%s" % (user.login, repo.name)
subprocess.call(['git', 'clone', '--bare', url, p])
subprocess.call(['git', '--git-dir=' + p, 'remote', 'add', 'origin',
url])
def usage():
sys.stderr.write("Usage: %s username destination_url\n" % sys.argv[0])
sys.stderr.write(
" Ensures you've got the latest stuff for the given user.\n")
if __name__ == '__main__':
try:
user, path = sys.argv[1:]
except ValueError:
usage()
exit(1)
gh=github.GitHub()
u = gh.user(user)
for repo in u.repos.values():
sync(path, u, repo)
|
<commit_before>#!/usr/bin/env python
"""
Grab all of a user's projects from github.
Copyright (c) 2008 Dustin Sallings <dustin@spy.net>
"""
import os
import sys
import subprocess
import github
def sync(path, user, repo):
p=os.path.join(path, repo.name) + ".git"
print "Syncing %s -> %s" % (repo, p)
if os.path.exists(p):
subprocess.call(['git', '--git-dir=' + p, 'fetch'])
else:
url = "git://github.com/%s/%s" % (user.login, repo.name)
subprocess.call(['git', 'clone', '--bare', url, p])
subprocess.call(['git', '--git-dir=' + p, 'remote', 'add', 'origin',
url])
def usage():
sys.stderr.write("Usage: %s username destination_url\n" % sys.argv[0])
sys.stderr.write(
" Ensures you've got the latest stuff for the given user.\n")
if __name__ == '__main__':
try:
user, path = sys.argv[1:]
except ValueError:
usage()
exit(1)
gh=github.GitHub()
u = gh.user(user)
for repo in u.repos.values():
sync(path, u, repo)
<commit_msg>Add -f to fetch since the goal is to sync down, so we don't care about ff.<commit_after>#!/usr/bin/env python
"""
Grab all of a user's projects from github.
Copyright (c) 2008 Dustin Sallings <dustin@spy.net>
"""
import os
import sys
import subprocess
import github
def sync(path, user, repo):
p=os.path.join(path, repo.name) + ".git"
print "Syncing %s -> %s" % (repo, p)
if os.path.exists(p):
subprocess.call(['git', '--git-dir=' + p, 'fetch', '-f'])
else:
url = "git://github.com/%s/%s" % (user.login, repo.name)
subprocess.call(['git', 'clone', '--bare', url, p])
subprocess.call(['git', '--git-dir=' + p, 'remote', 'add', 'origin',
url])
def usage():
sys.stderr.write("Usage: %s username destination_url\n" % sys.argv[0])
sys.stderr.write(
" Ensures you've got the latest stuff for the given user.\n")
if __name__ == '__main__':
try:
user, path = sys.argv[1:]
except ValueError:
usage()
exit(1)
gh=github.GitHub()
u = gh.user(user)
for repo in u.repos.values():
sync(path, u, repo)
|
442f7d10429ed6964a5558680780b3c79ae12d5b
|
greenquote.py
|
greenquote.py
|
import sys
from flask import Flask
import pandas as pd
sys.path.insert(0, "../financialScraper")
from financialScraper import getqf
from sqlalchemy import create_engine
app = Flask(__name__)
@app.route('/')
def hello():
engine = create_engine('postgresql://postgres:becreative@localhost/greenq')
dfdict = getqf.scraper()
df = dfdict['nsdqct.csv']
df.to_sql(name='entries', con = engine, if_exists = 'replace')
output = pd.read_sql_query('SELECT * FROM entries', engine)
mean = output[[2]].mean()
return u"The mean is :" + str(mean)
if __name__ == "__main__":
app.run(debug=True)
|
import sys
import os
from flask import Flask
import pandas as pd
sys.path.insert(0, "../financialScraper")
from financialScraper import getqf
from sqlalchemy import create_engine
app = Flask(__name__)
app.config['DATABASE'] = os.environ.get(
'HEROKU_POSTGRESQL_GOLD_URL', ''
)
dfdict = getqf.scraper()
engine = create_engine(app.config['DATABASE'])
@app.route('/')
def hello():
df = dfdict['nsdqct.csv']
df.to_sql(name='entries', con = engine, if_exists = 'replace')
output = pd.read_sql_query('SELECT * FROM entries', engine)
mean = output[[2]].mean()
return u"The mean is :" + str(mean)
if __name__ == "__main__":
app.run(debug=True)
|
Fix problem with Heroku database url
|
Fix problem with Heroku database url
|
Python
|
mit
|
caseymacphee/green_quote,caseymacphee/green_quote
|
import sys
from flask import Flask
import pandas as pd
sys.path.insert(0, "../financialScraper")
from financialScraper import getqf
from sqlalchemy import create_engine
app = Flask(__name__)
@app.route('/')
def hello():
engine = create_engine('postgresql://postgres:becreative@localhost/greenq')
dfdict = getqf.scraper()
df = dfdict['nsdqct.csv']
df.to_sql(name='entries', con = engine, if_exists = 'replace')
output = pd.read_sql_query('SELECT * FROM entries', engine)
mean = output[[2]].mean()
return u"The mean is :" + str(mean)
if __name__ == "__main__":
app.run(debug=True)
Fix problem with Heroku database url
|
import sys
import os
from flask import Flask
import pandas as pd
sys.path.insert(0, "../financialScraper")
from financialScraper import getqf
from sqlalchemy import create_engine
app = Flask(__name__)
app.config['DATABASE'] = os.environ.get(
'HEROKU_POSTGRESQL_GOLD_URL', ''
)
dfdict = getqf.scraper()
engine = create_engine(app.config['DATABASE'])
@app.route('/')
def hello():
df = dfdict['nsdqct.csv']
df.to_sql(name='entries', con = engine, if_exists = 'replace')
output = pd.read_sql_query('SELECT * FROM entries', engine)
mean = output[[2]].mean()
return u"The mean is :" + str(mean)
if __name__ == "__main__":
app.run(debug=True)
|
<commit_before>import sys
from flask import Flask
import pandas as pd
sys.path.insert(0, "../financialScraper")
from financialScraper import getqf
from sqlalchemy import create_engine
app = Flask(__name__)
@app.route('/')
def hello():
engine = create_engine('postgresql://postgres:becreative@localhost/greenq')
dfdict = getqf.scraper()
df = dfdict['nsdqct.csv']
df.to_sql(name='entries', con = engine, if_exists = 'replace')
output = pd.read_sql_query('SELECT * FROM entries', engine)
mean = output[[2]].mean()
return u"The mean is :" + str(mean)
if __name__ == "__main__":
app.run(debug=True)
<commit_msg>Fix problem with Heroku database url<commit_after>
|
import sys
import os
from flask import Flask
import pandas as pd
sys.path.insert(0, "../financialScraper")
from financialScraper import getqf
from sqlalchemy import create_engine
app = Flask(__name__)
app.config['DATABASE'] = os.environ.get(
'HEROKU_POSTGRESQL_GOLD_URL', ''
)
dfdict = getqf.scraper()
engine = create_engine(app.config['DATABASE'])
@app.route('/')
def hello():
df = dfdict['nsdqct.csv']
df.to_sql(name='entries', con = engine, if_exists = 'replace')
output = pd.read_sql_query('SELECT * FROM entries', engine)
mean = output[[2]].mean()
return u"The mean is :" + str(mean)
if __name__ == "__main__":
app.run(debug=True)
|
import sys
from flask import Flask
import pandas as pd
sys.path.insert(0, "../financialScraper")
from financialScraper import getqf
from sqlalchemy import create_engine
app = Flask(__name__)
@app.route('/')
def hello():
engine = create_engine('postgresql://postgres:becreative@localhost/greenq')
dfdict = getqf.scraper()
df = dfdict['nsdqct.csv']
df.to_sql(name='entries', con = engine, if_exists = 'replace')
output = pd.read_sql_query('SELECT * FROM entries', engine)
mean = output[[2]].mean()
return u"The mean is :" + str(mean)
if __name__ == "__main__":
app.run(debug=True)
Fix problem with Heroku database urlimport sys
import os
from flask import Flask
import pandas as pd
sys.path.insert(0, "../financialScraper")
from financialScraper import getqf
from sqlalchemy import create_engine
app = Flask(__name__)
app.config['DATABASE'] = os.environ.get(
'HEROKU_POSTGRESQL_GOLD_URL', ''
)
dfdict = getqf.scraper()
engine = create_engine(app.config['DATABASE'])
@app.route('/')
def hello():
df = dfdict['nsdqct.csv']
df.to_sql(name='entries', con = engine, if_exists = 'replace')
output = pd.read_sql_query('SELECT * FROM entries', engine)
mean = output[[2]].mean()
return u"The mean is :" + str(mean)
if __name__ == "__main__":
app.run(debug=True)
|
<commit_before>import sys
from flask import Flask
import pandas as pd
sys.path.insert(0, "../financialScraper")
from financialScraper import getqf
from sqlalchemy import create_engine
app = Flask(__name__)
@app.route('/')
def hello():
engine = create_engine('postgresql://postgres:becreative@localhost/greenq')
dfdict = getqf.scraper()
df = dfdict['nsdqct.csv']
df.to_sql(name='entries', con = engine, if_exists = 'replace')
output = pd.read_sql_query('SELECT * FROM entries', engine)
mean = output[[2]].mean()
return u"The mean is :" + str(mean)
if __name__ == "__main__":
app.run(debug=True)
<commit_msg>Fix problem with Heroku database url<commit_after>import sys
import os
from flask import Flask
import pandas as pd
sys.path.insert(0, "../financialScraper")
from financialScraper import getqf
from sqlalchemy import create_engine
app = Flask(__name__)
app.config['DATABASE'] = os.environ.get(
'HEROKU_POSTGRESQL_GOLD_URL', ''
)
dfdict = getqf.scraper()
engine = create_engine(app.config['DATABASE'])
@app.route('/')
def hello():
df = dfdict['nsdqct.csv']
df.to_sql(name='entries', con = engine, if_exists = 'replace')
output = pd.read_sql_query('SELECT * FROM entries', engine)
mean = output[[2]].mean()
return u"The mean is :" + str(mean)
if __name__ == "__main__":
app.run(debug=True)
|
612d54c6f8eace7b4c87d5771059bfb55a4c583f
|
pyspectator/monitoring.py
|
pyspectator/monitoring.py
|
from abc import ABCMeta, abstractmethod
from threading import Timer
class AbcMonitor(metaclass=ABCMeta):
"""Base class for entities, which require repeating event.
Attributes:
monitoring (bool): indicator activity of monitor.
monitoring_latency (int, float): frequency of execution monitor's action.
"""
# region initialization
def __init__(self, monitoring_latency):
self.__monitoring_latency = None
self.monitoring_latency = monitoring_latency
self.__monitoring = False
# endregion
# region properties
@property
def monitoring(self):
return self.__monitoring
@property
def monitoring_latency(self):
return self.__monitoring_latency
@monitoring_latency.setter
def monitoring_latency(self, value):
self.__monitoring_latency = value
# endregion
# region methods
def start_monitoring(self):
"""Enable periodically monitoring.
"""
if self.__monitoring is False:
self.__monitoring = True
self.__monitoring_action()
def stop_monitoring(self):
"""Disable periodically monitoring.
"""
self.__monitoring = False
def __monitoring_action(self):
if self.__monitoring is True:
self._monitoring_action()
Timer(self.monitoring_latency, self.__monitoring_action).start()
@abstractmethod
def _monitoring_action(self):
"""Action, which repeated, when monitoring is enabled.
"""
raise NotImplementedError('Method not implemented by derived class!')
# endregion
pass
|
from abc import ABCMeta, abstractmethod
from threading import Timer
class AbcMonitor(metaclass=ABCMeta):
"""Base class for entities, which require repeating event.
Attributes:
monitoring (bool): indicator activity of monitor.
monitoring_latency (int, float): frequency of execution monitor's action.
"""
# region initialization
def __init__(self, monitoring_latency):
self.__monitoring_latency = None
self.monitoring_latency = monitoring_latency
self.__monitoring = False
# endregion
# region properties
@property
def monitoring(self):
return self.__monitoring
@property
def monitoring_latency(self):
return self.__monitoring_latency
@monitoring_latency.setter
def monitoring_latency(self, value):
self.__monitoring_latency = value
# endregion
# region methods
def start_monitoring(self):
"""Enable periodically monitoring.
"""
if self.__monitoring is False:
self.__monitoring = True
self.__monitoring_action()
def stop_monitoring(self):
"""Disable periodically monitoring.
"""
self.__monitoring = False
def __enter__(self):
self.start_monitoring()
def __exit__(self, exc_type, exc_val, exc_tb):
self.stop_monitoring()
def __monitoring_action(self):
if self.__monitoring is True:
self._monitoring_action()
Timer(self.monitoring_latency, self.__monitoring_action).start()
@abstractmethod
def _monitoring_action(self):
"""Action, which repeated, when monitoring is enabled.
"""
raise NotImplementedError('Method not implemented by derived class!')
# endregion
pass
|
Support of "with" statement in class "AbsMonitor".
|
Support of "with" statement in class "AbsMonitor".
|
Python
|
bsd-3-clause
|
uzumaxy/pyspectator
|
from abc import ABCMeta, abstractmethod
from threading import Timer
class AbcMonitor(metaclass=ABCMeta):
"""Base class for entities, which require repeating event.
Attributes:
monitoring (bool): indicator activity of monitor.
monitoring_latency (int, float): frequency of execution monitor's action.
"""
# region initialization
def __init__(self, monitoring_latency):
self.__monitoring_latency = None
self.monitoring_latency = monitoring_latency
self.__monitoring = False
# endregion
# region properties
@property
def monitoring(self):
return self.__monitoring
@property
def monitoring_latency(self):
return self.__monitoring_latency
@monitoring_latency.setter
def monitoring_latency(self, value):
self.__monitoring_latency = value
# endregion
# region methods
def start_monitoring(self):
"""Enable periodically monitoring.
"""
if self.__monitoring is False:
self.__monitoring = True
self.__monitoring_action()
def stop_monitoring(self):
"""Disable periodically monitoring.
"""
self.__monitoring = False
def __monitoring_action(self):
if self.__monitoring is True:
self._monitoring_action()
Timer(self.monitoring_latency, self.__monitoring_action).start()
@abstractmethod
def _monitoring_action(self):
"""Action, which repeated, when monitoring is enabled.
"""
raise NotImplementedError('Method not implemented by derived class!')
# endregion
passSupport of "with" statement in class "AbsMonitor".
|
from abc import ABCMeta, abstractmethod
from threading import Timer
class AbcMonitor(metaclass=ABCMeta):
"""Base class for entities, which require repeating event.
Attributes:
monitoring (bool): indicator activity of monitor.
monitoring_latency (int, float): frequency of execution monitor's action.
"""
# region initialization
def __init__(self, monitoring_latency):
self.__monitoring_latency = None
self.monitoring_latency = monitoring_latency
self.__monitoring = False
# endregion
# region properties
@property
def monitoring(self):
return self.__monitoring
@property
def monitoring_latency(self):
return self.__monitoring_latency
@monitoring_latency.setter
def monitoring_latency(self, value):
self.__monitoring_latency = value
# endregion
# region methods
def start_monitoring(self):
"""Enable periodically monitoring.
"""
if self.__monitoring is False:
self.__monitoring = True
self.__monitoring_action()
def stop_monitoring(self):
"""Disable periodically monitoring.
"""
self.__monitoring = False
def __enter__(self):
self.start_monitoring()
def __exit__(self, exc_type, exc_val, exc_tb):
self.stop_monitoring()
def __monitoring_action(self):
if self.__monitoring is True:
self._monitoring_action()
Timer(self.monitoring_latency, self.__monitoring_action).start()
@abstractmethod
def _monitoring_action(self):
"""Action, which repeated, when monitoring is enabled.
"""
raise NotImplementedError('Method not implemented by derived class!')
# endregion
pass
|
<commit_before>from abc import ABCMeta, abstractmethod
from threading import Timer
class AbcMonitor(metaclass=ABCMeta):
"""Base class for entities, which require repeating event.
Attributes:
monitoring (bool): indicator activity of monitor.
monitoring_latency (int, float): frequency of execution monitor's action.
"""
# region initialization
def __init__(self, monitoring_latency):
self.__monitoring_latency = None
self.monitoring_latency = monitoring_latency
self.__monitoring = False
# endregion
# region properties
@property
def monitoring(self):
return self.__monitoring
@property
def monitoring_latency(self):
return self.__monitoring_latency
@monitoring_latency.setter
def monitoring_latency(self, value):
self.__monitoring_latency = value
# endregion
# region methods
def start_monitoring(self):
"""Enable periodically monitoring.
"""
if self.__monitoring is False:
self.__monitoring = True
self.__monitoring_action()
def stop_monitoring(self):
"""Disable periodically monitoring.
"""
self.__monitoring = False
def __monitoring_action(self):
if self.__monitoring is True:
self._monitoring_action()
Timer(self.monitoring_latency, self.__monitoring_action).start()
@abstractmethod
def _monitoring_action(self):
"""Action, which repeated, when monitoring is enabled.
"""
raise NotImplementedError('Method not implemented by derived class!')
# endregion
pass<commit_msg>Support of "with" statement in class "AbsMonitor".<commit_after>
|
from abc import ABCMeta, abstractmethod
from threading import Timer
class AbcMonitor(metaclass=ABCMeta):
"""Base class for entities, which require repeating event.
Attributes:
monitoring (bool): indicator activity of monitor.
monitoring_latency (int, float): frequency of execution monitor's action.
"""
# region initialization
def __init__(self, monitoring_latency):
self.__monitoring_latency = None
self.monitoring_latency = monitoring_latency
self.__monitoring = False
# endregion
# region properties
@property
def monitoring(self):
return self.__monitoring
@property
def monitoring_latency(self):
return self.__monitoring_latency
@monitoring_latency.setter
def monitoring_latency(self, value):
self.__monitoring_latency = value
# endregion
# region methods
def start_monitoring(self):
"""Enable periodically monitoring.
"""
if self.__monitoring is False:
self.__monitoring = True
self.__monitoring_action()
def stop_monitoring(self):
"""Disable periodically monitoring.
"""
self.__monitoring = False
def __enter__(self):
self.start_monitoring()
def __exit__(self, exc_type, exc_val, exc_tb):
self.stop_monitoring()
def __monitoring_action(self):
if self.__monitoring is True:
self._monitoring_action()
Timer(self.monitoring_latency, self.__monitoring_action).start()
@abstractmethod
def _monitoring_action(self):
"""Action, which repeated, when monitoring is enabled.
"""
raise NotImplementedError('Method not implemented by derived class!')
# endregion
pass
|
from abc import ABCMeta, abstractmethod
from threading import Timer
class AbcMonitor(metaclass=ABCMeta):
"""Base class for entities, which require repeating event.
Attributes:
monitoring (bool): indicator activity of monitor.
monitoring_latency (int, float): frequency of execution monitor's action.
"""
# region initialization
def __init__(self, monitoring_latency):
self.__monitoring_latency = None
self.monitoring_latency = monitoring_latency
self.__monitoring = False
# endregion
# region properties
@property
def monitoring(self):
return self.__monitoring
@property
def monitoring_latency(self):
return self.__monitoring_latency
@monitoring_latency.setter
def monitoring_latency(self, value):
self.__monitoring_latency = value
# endregion
# region methods
def start_monitoring(self):
"""Enable periodically monitoring.
"""
if self.__monitoring is False:
self.__monitoring = True
self.__monitoring_action()
def stop_monitoring(self):
"""Disable periodically monitoring.
"""
self.__monitoring = False
def __monitoring_action(self):
if self.__monitoring is True:
self._monitoring_action()
Timer(self.monitoring_latency, self.__monitoring_action).start()
@abstractmethod
def _monitoring_action(self):
"""Action, which repeated, when monitoring is enabled.
"""
raise NotImplementedError('Method not implemented by derived class!')
# endregion
passSupport of "with" statement in class "AbsMonitor".from abc import ABCMeta, abstractmethod
from threading import Timer
class AbcMonitor(metaclass=ABCMeta):
"""Base class for entities, which require repeating event.
Attributes:
monitoring (bool): indicator activity of monitor.
monitoring_latency (int, float): frequency of execution monitor's action.
"""
# region initialization
def __init__(self, monitoring_latency):
self.__monitoring_latency = None
self.monitoring_latency = monitoring_latency
self.__monitoring = False
# endregion
# region properties
@property
def monitoring(self):
return self.__monitoring
@property
def monitoring_latency(self):
return self.__monitoring_latency
@monitoring_latency.setter
def monitoring_latency(self, value):
self.__monitoring_latency = value
# endregion
# region methods
def start_monitoring(self):
"""Enable periodically monitoring.
"""
if self.__monitoring is False:
self.__monitoring = True
self.__monitoring_action()
def stop_monitoring(self):
"""Disable periodically monitoring.
"""
self.__monitoring = False
def __enter__(self):
self.start_monitoring()
def __exit__(self, exc_type, exc_val, exc_tb):
self.stop_monitoring()
def __monitoring_action(self):
if self.__monitoring is True:
self._monitoring_action()
Timer(self.monitoring_latency, self.__monitoring_action).start()
@abstractmethod
def _monitoring_action(self):
"""Action, which repeated, when monitoring is enabled.
"""
raise NotImplementedError('Method not implemented by derived class!')
# endregion
pass
|
<commit_before>from abc import ABCMeta, abstractmethod
from threading import Timer
class AbcMonitor(metaclass=ABCMeta):
"""Base class for entities, which require repeating event.
Attributes:
monitoring (bool): indicator activity of monitor.
monitoring_latency (int, float): frequency of execution monitor's action.
"""
# region initialization
def __init__(self, monitoring_latency):
self.__monitoring_latency = None
self.monitoring_latency = monitoring_latency
self.__monitoring = False
# endregion
# region properties
@property
def monitoring(self):
return self.__monitoring
@property
def monitoring_latency(self):
return self.__monitoring_latency
@monitoring_latency.setter
def monitoring_latency(self, value):
self.__monitoring_latency = value
# endregion
# region methods
def start_monitoring(self):
"""Enable periodically monitoring.
"""
if self.__monitoring is False:
self.__monitoring = True
self.__monitoring_action()
def stop_monitoring(self):
"""Disable periodically monitoring.
"""
self.__monitoring = False
def __monitoring_action(self):
if self.__monitoring is True:
self._monitoring_action()
Timer(self.monitoring_latency, self.__monitoring_action).start()
@abstractmethod
def _monitoring_action(self):
"""Action, which repeated, when monitoring is enabled.
"""
raise NotImplementedError('Method not implemented by derived class!')
# endregion
pass<commit_msg>Support of "with" statement in class "AbsMonitor".<commit_after>from abc import ABCMeta, abstractmethod
from threading import Timer
class AbcMonitor(metaclass=ABCMeta):
"""Base class for entities, which require repeating event.
Attributes:
monitoring (bool): indicator activity of monitor.
monitoring_latency (int, float): frequency of execution monitor's action.
"""
# region initialization
def __init__(self, monitoring_latency):
self.__monitoring_latency = None
self.monitoring_latency = monitoring_latency
self.__monitoring = False
# endregion
# region properties
@property
def monitoring(self):
return self.__monitoring
@property
def monitoring_latency(self):
return self.__monitoring_latency
@monitoring_latency.setter
def monitoring_latency(self, value):
self.__monitoring_latency = value
# endregion
# region methods
def start_monitoring(self):
"""Enable periodically monitoring.
"""
if self.__monitoring is False:
self.__monitoring = True
self.__monitoring_action()
def stop_monitoring(self):
"""Disable periodically monitoring.
"""
self.__monitoring = False
def __enter__(self):
self.start_monitoring()
def __exit__(self, exc_type, exc_val, exc_tb):
self.stop_monitoring()
def __monitoring_action(self):
if self.__monitoring is True:
self._monitoring_action()
Timer(self.monitoring_latency, self.__monitoring_action).start()
@abstractmethod
def _monitoring_action(self):
"""Action, which repeated, when monitoring is enabled.
"""
raise NotImplementedError('Method not implemented by derived class!')
# endregion
pass
|
02e7251da64c9a2853c2e05a2b93b862268a840c
|
SixTrack/pytools/fix_cdblocks.py
|
SixTrack/pytools/fix_cdblocks.py
|
#!/usr/bin/env python
import sys
assert len(sys.argv) == 3, "Usage: to_replace filename"
to_replace = sys.argv[1]
filename = sys.argv[2]
lines_in = open(filename,'r').readlines()
#print '+ca '+to_replace
i = 0
num_replaced = 0
while True:
line = lines_in[i]
#print line
if line.startswith('+ca '+to_replace):
#print "found!"
#delete the bad line
del lines_in[i]
#search backwards for the implicit none
while True:
i = i-1
if i < 0:
print "Error, i<0"
exit(1)
line = lines_in[i]
if "implicit none" in line or "IMPLICIT NONE" in line:
break
lines_in.insert(i," use "+to_replace + "\n")
num_replaced += 1
i = i+1
if i >= len(lines_in):
break
file_out = open(filename,'w')
for l in lines_in:
file_out.write(l)
file_out.close()
print num_replaced
|
#!/usr/bin/env python
import sys
assert len(sys.argv) == 3, "Usage: to_replace filename"
to_replace = sys.argv[1]
filename = sys.argv[2]
lines_in = open(filename,'r').readlines()
#print '+ca '+to_replace
i = 0
num_replaced = 0
numspaces = 6 # default value
while True:
line = lines_in[i]
#print line
if line.startswith('+ca '+to_replace):
#print "found!"
#delete the bad line
del lines_in[i]
#search backwards for the implicit none
while True:
i = i-1
if i < 0:
print "Error, i<0"
exit(1)
line = lines_in[i]
if "implicit none" in line or "IMPLICIT NONE" in line:
numspaces = len(line)-len(line.lstrip())
break
lines_in.insert(i,numspaces*" " + "use "+to_replace + "\n")
num_replaced += 1
i = i+1
if i >= len(lines_in):
break
file_out = open(filename,'w')
for l in lines_in:
file_out.write(l)
file_out.close()
print num_replaced
|
Indent the use statements correctly
|
Indent the use statements correctly
|
Python
|
lgpl-2.1
|
SixTrack/SixTrack,SixTrack/SixTrack,SixTrack/SixTrack,SixTrack/SixTrack,SixTrack/SixTrack,SixTrack/SixTrack
|
#!/usr/bin/env python
import sys
assert len(sys.argv) == 3, "Usage: to_replace filename"
to_replace = sys.argv[1]
filename = sys.argv[2]
lines_in = open(filename,'r').readlines()
#print '+ca '+to_replace
i = 0
num_replaced = 0
while True:
line = lines_in[i]
#print line
if line.startswith('+ca '+to_replace):
#print "found!"
#delete the bad line
del lines_in[i]
#search backwards for the implicit none
while True:
i = i-1
if i < 0:
print "Error, i<0"
exit(1)
line = lines_in[i]
if "implicit none" in line or "IMPLICIT NONE" in line:
break
lines_in.insert(i," use "+to_replace + "\n")
num_replaced += 1
i = i+1
if i >= len(lines_in):
break
file_out = open(filename,'w')
for l in lines_in:
file_out.write(l)
file_out.close()
print num_replaced
Indent the use statements correctly
|
#!/usr/bin/env python
import sys
assert len(sys.argv) == 3, "Usage: to_replace filename"
to_replace = sys.argv[1]
filename = sys.argv[2]
lines_in = open(filename,'r').readlines()
#print '+ca '+to_replace
i = 0
num_replaced = 0
numspaces = 6 # default value
while True:
line = lines_in[i]
#print line
if line.startswith('+ca '+to_replace):
#print "found!"
#delete the bad line
del lines_in[i]
#search backwards for the implicit none
while True:
i = i-1
if i < 0:
print "Error, i<0"
exit(1)
line = lines_in[i]
if "implicit none" in line or "IMPLICIT NONE" in line:
numspaces = len(line)-len(line.lstrip())
break
lines_in.insert(i,numspaces*" " + "use "+to_replace + "\n")
num_replaced += 1
i = i+1
if i >= len(lines_in):
break
file_out = open(filename,'w')
for l in lines_in:
file_out.write(l)
file_out.close()
print num_replaced
|
<commit_before>#!/usr/bin/env python
import sys
assert len(sys.argv) == 3, "Usage: to_replace filename"
to_replace = sys.argv[1]
filename = sys.argv[2]
lines_in = open(filename,'r').readlines()
#print '+ca '+to_replace
i = 0
num_replaced = 0
while True:
line = lines_in[i]
#print line
if line.startswith('+ca '+to_replace):
#print "found!"
#delete the bad line
del lines_in[i]
#search backwards for the implicit none
while True:
i = i-1
if i < 0:
print "Error, i<0"
exit(1)
line = lines_in[i]
if "implicit none" in line or "IMPLICIT NONE" in line:
break
lines_in.insert(i," use "+to_replace + "\n")
num_replaced += 1
i = i+1
if i >= len(lines_in):
break
file_out = open(filename,'w')
for l in lines_in:
file_out.write(l)
file_out.close()
print num_replaced
<commit_msg>Indent the use statements correctly<commit_after>
|
#!/usr/bin/env python
import sys
assert len(sys.argv) == 3, "Usage: to_replace filename"
to_replace = sys.argv[1]
filename = sys.argv[2]
lines_in = open(filename,'r').readlines()
#print '+ca '+to_replace
i = 0
num_replaced = 0
numspaces = 6 # default value
while True:
line = lines_in[i]
#print line
if line.startswith('+ca '+to_replace):
#print "found!"
#delete the bad line
del lines_in[i]
#search backwards for the implicit none
while True:
i = i-1
if i < 0:
print "Error, i<0"
exit(1)
line = lines_in[i]
if "implicit none" in line or "IMPLICIT NONE" in line:
numspaces = len(line)-len(line.lstrip())
break
lines_in.insert(i,numspaces*" " + "use "+to_replace + "\n")
num_replaced += 1
i = i+1
if i >= len(lines_in):
break
file_out = open(filename,'w')
for l in lines_in:
file_out.write(l)
file_out.close()
print num_replaced
|
#!/usr/bin/env python
import sys
assert len(sys.argv) == 3, "Usage: to_replace filename"
to_replace = sys.argv[1]
filename = sys.argv[2]
lines_in = open(filename,'r').readlines()
#print '+ca '+to_replace
i = 0
num_replaced = 0
while True:
line = lines_in[i]
#print line
if line.startswith('+ca '+to_replace):
#print "found!"
#delete the bad line
del lines_in[i]
#search backwards for the implicit none
while True:
i = i-1
if i < 0:
print "Error, i<0"
exit(1)
line = lines_in[i]
if "implicit none" in line or "IMPLICIT NONE" in line:
break
lines_in.insert(i," use "+to_replace + "\n")
num_replaced += 1
i = i+1
if i >= len(lines_in):
break
file_out = open(filename,'w')
for l in lines_in:
file_out.write(l)
file_out.close()
print num_replaced
Indent the use statements correctly#!/usr/bin/env python
import sys
assert len(sys.argv) == 3, "Usage: to_replace filename"
to_replace = sys.argv[1]
filename = sys.argv[2]
lines_in = open(filename,'r').readlines()
#print '+ca '+to_replace
i = 0
num_replaced = 0
numspaces = 6 # default value
while True:
line = lines_in[i]
#print line
if line.startswith('+ca '+to_replace):
#print "found!"
#delete the bad line
del lines_in[i]
#search backwards for the implicit none
while True:
i = i-1
if i < 0:
print "Error, i<0"
exit(1)
line = lines_in[i]
if "implicit none" in line or "IMPLICIT NONE" in line:
numspaces = len(line)-len(line.lstrip())
break
lines_in.insert(i,numspaces*" " + "use "+to_replace + "\n")
num_replaced += 1
i = i+1
if i >= len(lines_in):
break
file_out = open(filename,'w')
for l in lines_in:
file_out.write(l)
file_out.close()
print num_replaced
|
<commit_before>#!/usr/bin/env python
import sys
assert len(sys.argv) == 3, "Usage: to_replace filename"
to_replace = sys.argv[1]
filename = sys.argv[2]
lines_in = open(filename,'r').readlines()
#print '+ca '+to_replace
i = 0
num_replaced = 0
while True:
line = lines_in[i]
#print line
if line.startswith('+ca '+to_replace):
#print "found!"
#delete the bad line
del lines_in[i]
#search backwards for the implicit none
while True:
i = i-1
if i < 0:
print "Error, i<0"
exit(1)
line = lines_in[i]
if "implicit none" in line or "IMPLICIT NONE" in line:
break
lines_in.insert(i," use "+to_replace + "\n")
num_replaced += 1
i = i+1
if i >= len(lines_in):
break
file_out = open(filename,'w')
for l in lines_in:
file_out.write(l)
file_out.close()
print num_replaced
<commit_msg>Indent the use statements correctly<commit_after>#!/usr/bin/env python
import sys
assert len(sys.argv) == 3, "Usage: to_replace filename"
to_replace = sys.argv[1]
filename = sys.argv[2]
lines_in = open(filename,'r').readlines()
#print '+ca '+to_replace
i = 0
num_replaced = 0
numspaces = 6 # default value
while True:
line = lines_in[i]
#print line
if line.startswith('+ca '+to_replace):
#print "found!"
#delete the bad line
del lines_in[i]
#search backwards for the implicit none
while True:
i = i-1
if i < 0:
print "Error, i<0"
exit(1)
line = lines_in[i]
if "implicit none" in line or "IMPLICIT NONE" in line:
numspaces = len(line)-len(line.lstrip())
break
lines_in.insert(i,numspaces*" " + "use "+to_replace + "\n")
num_replaced += 1
i = i+1
if i >= len(lines_in):
break
file_out = open(filename,'w')
for l in lines_in:
file_out.write(l)
file_out.close()
print num_replaced
|
71ffcd1ed4447090b43b3424548f46c5230a1045
|
tasks.py
|
tasks.py
|
#!/usr/bin/env python3
import subprocess
from invoke import task
@task
def test(cover=False):
# Run tests using nose called with coverage
code = subprocess.call(['coverage', 'run', '-m', 'nose', '--rednose'])
# Also generate coverage reports when --cover flag is given
if cover and code == 0:
# Add blank line between test report and coverage report
print('')
subprocess.call(['coverage', 'report'])
subprocess.call(['coverage', 'html'])
|
# Project tasks (for use with invoke task runner)
import subprocess
from invoke import task
@task
def test(cover=False):
if cover:
# Run tests via coverage and generate reports if --cover flag is given
code = subprocess.call(['coverage', 'run', '-m', 'nose', '--rednose'])
# Only show coverage report if all tests have passed
if code == 0:
# Add blank line between test report and coverage report
print('')
subprocess.call(['coverage', 'report'])
subprocess.call(['coverage', 'html'])
else:
# Otherwise, run tests via nose (which is faster)
code = subprocess.call(['nosetests', '--rednose'])
|
Improve efficiency of test task
|
Improve efficiency of test task
|
Python
|
mit
|
caleb531/ssh-wp-backup,caleb531/ssh-wp-backup
|
#!/usr/bin/env python3
import subprocess
from invoke import task
@task
def test(cover=False):
# Run tests using nose called with coverage
code = subprocess.call(['coverage', 'run', '-m', 'nose', '--rednose'])
# Also generate coverage reports when --cover flag is given
if cover and code == 0:
# Add blank line between test report and coverage report
print('')
subprocess.call(['coverage', 'report'])
subprocess.call(['coverage', 'html'])
Improve efficiency of test task
|
# Project tasks (for use with invoke task runner)
import subprocess
from invoke import task
@task
def test(cover=False):
if cover:
# Run tests via coverage and generate reports if --cover flag is given
code = subprocess.call(['coverage', 'run', '-m', 'nose', '--rednose'])
# Only show coverage report if all tests have passed
if code == 0:
# Add blank line between test report and coverage report
print('')
subprocess.call(['coverage', 'report'])
subprocess.call(['coverage', 'html'])
else:
# Otherwise, run tests via nose (which is faster)
code = subprocess.call(['nosetests', '--rednose'])
|
<commit_before>#!/usr/bin/env python3
import subprocess
from invoke import task
@task
def test(cover=False):
# Run tests using nose called with coverage
code = subprocess.call(['coverage', 'run', '-m', 'nose', '--rednose'])
# Also generate coverage reports when --cover flag is given
if cover and code == 0:
# Add blank line between test report and coverage report
print('')
subprocess.call(['coverage', 'report'])
subprocess.call(['coverage', 'html'])
<commit_msg>Improve efficiency of test task<commit_after>
|
# Project tasks (for use with invoke task runner)
import subprocess
from invoke import task
@task
def test(cover=False):
if cover:
# Run tests via coverage and generate reports if --cover flag is given
code = subprocess.call(['coverage', 'run', '-m', 'nose', '--rednose'])
# Only show coverage report if all tests have passed
if code == 0:
# Add blank line between test report and coverage report
print('')
subprocess.call(['coverage', 'report'])
subprocess.call(['coverage', 'html'])
else:
# Otherwise, run tests via nose (which is faster)
code = subprocess.call(['nosetests', '--rednose'])
|
#!/usr/bin/env python3
import subprocess
from invoke import task
@task
def test(cover=False):
# Run tests using nose called with coverage
code = subprocess.call(['coverage', 'run', '-m', 'nose', '--rednose'])
# Also generate coverage reports when --cover flag is given
if cover and code == 0:
# Add blank line between test report and coverage report
print('')
subprocess.call(['coverage', 'report'])
subprocess.call(['coverage', 'html'])
Improve efficiency of test task# Project tasks (for use with invoke task runner)
import subprocess
from invoke import task
@task
def test(cover=False):
if cover:
# Run tests via coverage and generate reports if --cover flag is given
code = subprocess.call(['coverage', 'run', '-m', 'nose', '--rednose'])
# Only show coverage report if all tests have passed
if code == 0:
# Add blank line between test report and coverage report
print('')
subprocess.call(['coverage', 'report'])
subprocess.call(['coverage', 'html'])
else:
# Otherwise, run tests via nose (which is faster)
code = subprocess.call(['nosetests', '--rednose'])
|
<commit_before>#!/usr/bin/env python3
import subprocess
from invoke import task
@task
def test(cover=False):
# Run tests using nose called with coverage
code = subprocess.call(['coverage', 'run', '-m', 'nose', '--rednose'])
# Also generate coverage reports when --cover flag is given
if cover and code == 0:
# Add blank line between test report and coverage report
print('')
subprocess.call(['coverage', 'report'])
subprocess.call(['coverage', 'html'])
<commit_msg>Improve efficiency of test task<commit_after># Project tasks (for use with invoke task runner)
import subprocess
from invoke import task
@task
def test(cover=False):
if cover:
# Run tests via coverage and generate reports if --cover flag is given
code = subprocess.call(['coverage', 'run', '-m', 'nose', '--rednose'])
# Only show coverage report if all tests have passed
if code == 0:
# Add blank line between test report and coverage report
print('')
subprocess.call(['coverage', 'report'])
subprocess.call(['coverage', 'html'])
else:
# Otherwise, run tests via nose (which is faster)
code = subprocess.call(['nosetests', '--rednose'])
|
5fa4bbf781117c20357cf6f477e0298047e11094
|
engine/migrate.py
|
engine/migrate.py
|
import store
rstore = store.RedisStore()
def populate_terminations():
for game in rstore.all_games():
rstore.set_game(game["game_id"], game["game"])
def populate_game_ids():
keys = rstore.rconn.keys("chess:games:*:game")
game_ids = [k.split(":")[-2] for k in keys]
rstore.rconn.sadd("chess:game_ids", *game_ids)
def populate_players():
keys = rstore.rconn.keys("chess:games:*:*:email")
players = set()
for k in keys:
val = rstore.rconn.get(k)
if val:
players.add(k)
rstore.rconn.sadd("chess:players", *players)
|
import store
rstore = store.RedisStore()
def populate_terminations():
for game in rstore.all_games():
rstore.set_game(game["game_id"], game["game"])
def populate_game_ids():
keys = rstore.rconn.keys("chess:games:*:game")
game_ids = [k.split(":")[-2] for k in keys]
rstore.rconn.sadd("chess:game_ids", *game_ids)
def populate_players():
keys = rstore.rconn.keys("chess:games:*:*:email")
players = set()
for k in keys:
val = rstore.rconn.get(k)
if val:
players.add(val)
print players
rstore.rconn.sadd("chess:players", *players)
|
Print player emails in migration script.
|
Print player emails in migration script.
|
Python
|
mit
|
haldean/chess,haldean/chess,haldean/chess
|
import store
rstore = store.RedisStore()
def populate_terminations():
for game in rstore.all_games():
rstore.set_game(game["game_id"], game["game"])
def populate_game_ids():
keys = rstore.rconn.keys("chess:games:*:game")
game_ids = [k.split(":")[-2] for k in keys]
rstore.rconn.sadd("chess:game_ids", *game_ids)
def populate_players():
keys = rstore.rconn.keys("chess:games:*:*:email")
players = set()
for k in keys:
val = rstore.rconn.get(k)
if val:
players.add(k)
rstore.rconn.sadd("chess:players", *players)
Print player emails in migration script.
|
import store
rstore = store.RedisStore()
def populate_terminations():
for game in rstore.all_games():
rstore.set_game(game["game_id"], game["game"])
def populate_game_ids():
keys = rstore.rconn.keys("chess:games:*:game")
game_ids = [k.split(":")[-2] for k in keys]
rstore.rconn.sadd("chess:game_ids", *game_ids)
def populate_players():
keys = rstore.rconn.keys("chess:games:*:*:email")
players = set()
for k in keys:
val = rstore.rconn.get(k)
if val:
players.add(val)
print players
rstore.rconn.sadd("chess:players", *players)
|
<commit_before>import store
rstore = store.RedisStore()
def populate_terminations():
for game in rstore.all_games():
rstore.set_game(game["game_id"], game["game"])
def populate_game_ids():
keys = rstore.rconn.keys("chess:games:*:game")
game_ids = [k.split(":")[-2] for k in keys]
rstore.rconn.sadd("chess:game_ids", *game_ids)
def populate_players():
keys = rstore.rconn.keys("chess:games:*:*:email")
players = set()
for k in keys:
val = rstore.rconn.get(k)
if val:
players.add(k)
rstore.rconn.sadd("chess:players", *players)
<commit_msg>Print player emails in migration script.<commit_after>
|
import store
rstore = store.RedisStore()
def populate_terminations():
for game in rstore.all_games():
rstore.set_game(game["game_id"], game["game"])
def populate_game_ids():
keys = rstore.rconn.keys("chess:games:*:game")
game_ids = [k.split(":")[-2] for k in keys]
rstore.rconn.sadd("chess:game_ids", *game_ids)
def populate_players():
keys = rstore.rconn.keys("chess:games:*:*:email")
players = set()
for k in keys:
val = rstore.rconn.get(k)
if val:
players.add(val)
print players
rstore.rconn.sadd("chess:players", *players)
|
import store
rstore = store.RedisStore()
def populate_terminations():
for game in rstore.all_games():
rstore.set_game(game["game_id"], game["game"])
def populate_game_ids():
keys = rstore.rconn.keys("chess:games:*:game")
game_ids = [k.split(":")[-2] for k in keys]
rstore.rconn.sadd("chess:game_ids", *game_ids)
def populate_players():
keys = rstore.rconn.keys("chess:games:*:*:email")
players = set()
for k in keys:
val = rstore.rconn.get(k)
if val:
players.add(k)
rstore.rconn.sadd("chess:players", *players)
Print player emails in migration script.import store
rstore = store.RedisStore()
def populate_terminations():
for game in rstore.all_games():
rstore.set_game(game["game_id"], game["game"])
def populate_game_ids():
keys = rstore.rconn.keys("chess:games:*:game")
game_ids = [k.split(":")[-2] for k in keys]
rstore.rconn.sadd("chess:game_ids", *game_ids)
def populate_players():
keys = rstore.rconn.keys("chess:games:*:*:email")
players = set()
for k in keys:
val = rstore.rconn.get(k)
if val:
players.add(val)
print players
rstore.rconn.sadd("chess:players", *players)
|
<commit_before>import store
rstore = store.RedisStore()
def populate_terminations():
for game in rstore.all_games():
rstore.set_game(game["game_id"], game["game"])
def populate_game_ids():
keys = rstore.rconn.keys("chess:games:*:game")
game_ids = [k.split(":")[-2] for k in keys]
rstore.rconn.sadd("chess:game_ids", *game_ids)
def populate_players():
keys = rstore.rconn.keys("chess:games:*:*:email")
players = set()
for k in keys:
val = rstore.rconn.get(k)
if val:
players.add(k)
rstore.rconn.sadd("chess:players", *players)
<commit_msg>Print player emails in migration script.<commit_after>import store
rstore = store.RedisStore()
def populate_terminations():
for game in rstore.all_games():
rstore.set_game(game["game_id"], game["game"])
def populate_game_ids():
keys = rstore.rconn.keys("chess:games:*:game")
game_ids = [k.split(":")[-2] for k in keys]
rstore.rconn.sadd("chess:game_ids", *game_ids)
def populate_players():
keys = rstore.rconn.keys("chess:games:*:*:email")
players = set()
for k in keys:
val = rstore.rconn.get(k)
if val:
players.add(val)
print players
rstore.rconn.sadd("chess:players", *players)
|
5805f84500ebca762ec2c7e34f344bf3c2406a8d
|
examples/pywapi-example.py
|
examples/pywapi-example.py
|
#!/usr/bin/env python
import pywapi
weather_com_result = pywapi.get_weather_from_weather_com('10001')
yahoo_result = pywapi.get_weather_from_yahoo('10001')
noaa_result = pywapi.get_weather_from_noaa('KJFK')
print "Weather.com says: It is " + string.lower(weather_com_result['current_conditions']['text']) + " and " + weather_com_result['current_conditions']['temperature'] + "C now in New York.\n\n"
print("Yahoo says: It is " + yahoo_result['condition']['text'].lower() + " and " + yahoo_result['condition']['temp'] + "C now in New York.")
print("NOAA says: It is " + noaa_result['weather'].lower() + " and " + noaa_result['temp_c'] + "C now in New York.")
|
#!/usr/bin/env python
import pywapi
weather_com_result = pywapi.get_weather_from_weather_com('10001')
yahoo_result = pywapi.get_weather_from_yahoo('10001')
noaa_result = pywapi.get_weather_from_noaa('KJFK')
print "Weather.com says: It is " + weather_com_result['current_conditions']['text'].lower() + " and " + weather_com_result['current_conditions']['temperature'] + "C now in New York."
print("Yahoo says: It is " + yahoo_result['condition']['text'].lower() + " and " + yahoo_result['condition']['temp'] + "C now in New York.")
print("NOAA says: It is " + noaa_result['weather'].lower() + " and " + noaa_result['temp_c'] + "C now in New York.")
|
Fix error in example script
|
Fix error in example script
|
Python
|
mit
|
qetzal/python-weather-api,esteban22x/python-weather-api,dreamable/python-weather-api,alinahid477/python-weather-api,sirspamalot/python-weather-api,gmassei/python-weather-api,handyfreak/python-weather-api,jtasker/python-weather-api
|
#!/usr/bin/env python
import pywapi
weather_com_result = pywapi.get_weather_from_weather_com('10001')
yahoo_result = pywapi.get_weather_from_yahoo('10001')
noaa_result = pywapi.get_weather_from_noaa('KJFK')
print "Weather.com says: It is " + string.lower(weather_com_result['current_conditions']['text']) + " and " + weather_com_result['current_conditions']['temperature'] + "C now in New York.\n\n"
print("Yahoo says: It is " + yahoo_result['condition']['text'].lower() + " and " + yahoo_result['condition']['temp'] + "C now in New York.")
print("NOAA says: It is " + noaa_result['weather'].lower() + " and " + noaa_result['temp_c'] + "C now in New York.")
Fix error in example script
|
#!/usr/bin/env python
import pywapi
weather_com_result = pywapi.get_weather_from_weather_com('10001')
yahoo_result = pywapi.get_weather_from_yahoo('10001')
noaa_result = pywapi.get_weather_from_noaa('KJFK')
print "Weather.com says: It is " + weather_com_result['current_conditions']['text'].lower() + " and " + weather_com_result['current_conditions']['temperature'] + "C now in New York."
print("Yahoo says: It is " + yahoo_result['condition']['text'].lower() + " and " + yahoo_result['condition']['temp'] + "C now in New York.")
print("NOAA says: It is " + noaa_result['weather'].lower() + " and " + noaa_result['temp_c'] + "C now in New York.")
|
<commit_before>#!/usr/bin/env python
import pywapi
weather_com_result = pywapi.get_weather_from_weather_com('10001')
yahoo_result = pywapi.get_weather_from_yahoo('10001')
noaa_result = pywapi.get_weather_from_noaa('KJFK')
print "Weather.com says: It is " + string.lower(weather_com_result['current_conditions']['text']) + " and " + weather_com_result['current_conditions']['temperature'] + "C now in New York.\n\n"
print("Yahoo says: It is " + yahoo_result['condition']['text'].lower() + " and " + yahoo_result['condition']['temp'] + "C now in New York.")
print("NOAA says: It is " + noaa_result['weather'].lower() + " and " + noaa_result['temp_c'] + "C now in New York.")
<commit_msg>Fix error in example script<commit_after>
|
#!/usr/bin/env python
import pywapi
weather_com_result = pywapi.get_weather_from_weather_com('10001')
yahoo_result = pywapi.get_weather_from_yahoo('10001')
noaa_result = pywapi.get_weather_from_noaa('KJFK')
print "Weather.com says: It is " + weather_com_result['current_conditions']['text'].lower() + " and " + weather_com_result['current_conditions']['temperature'] + "C now in New York."
print("Yahoo says: It is " + yahoo_result['condition']['text'].lower() + " and " + yahoo_result['condition']['temp'] + "C now in New York.")
print("NOAA says: It is " + noaa_result['weather'].lower() + " and " + noaa_result['temp_c'] + "C now in New York.")
|
#!/usr/bin/env python
import pywapi
weather_com_result = pywapi.get_weather_from_weather_com('10001')
yahoo_result = pywapi.get_weather_from_yahoo('10001')
noaa_result = pywapi.get_weather_from_noaa('KJFK')
print "Weather.com says: It is " + string.lower(weather_com_result['current_conditions']['text']) + " and " + weather_com_result['current_conditions']['temperature'] + "C now in New York.\n\n"
print("Yahoo says: It is " + yahoo_result['condition']['text'].lower() + " and " + yahoo_result['condition']['temp'] + "C now in New York.")
print("NOAA says: It is " + noaa_result['weather'].lower() + " and " + noaa_result['temp_c'] + "C now in New York.")
Fix error in example script#!/usr/bin/env python
import pywapi
weather_com_result = pywapi.get_weather_from_weather_com('10001')
yahoo_result = pywapi.get_weather_from_yahoo('10001')
noaa_result = pywapi.get_weather_from_noaa('KJFK')
print "Weather.com says: It is " + weather_com_result['current_conditions']['text'].lower() + " and " + weather_com_result['current_conditions']['temperature'] + "C now in New York."
print("Yahoo says: It is " + yahoo_result['condition']['text'].lower() + " and " + yahoo_result['condition']['temp'] + "C now in New York.")
print("NOAA says: It is " + noaa_result['weather'].lower() + " and " + noaa_result['temp_c'] + "C now in New York.")
|
<commit_before>#!/usr/bin/env python
import pywapi
weather_com_result = pywapi.get_weather_from_weather_com('10001')
yahoo_result = pywapi.get_weather_from_yahoo('10001')
noaa_result = pywapi.get_weather_from_noaa('KJFK')
print "Weather.com says: It is " + string.lower(weather_com_result['current_conditions']['text']) + " and " + weather_com_result['current_conditions']['temperature'] + "C now in New York.\n\n"
print("Yahoo says: It is " + yahoo_result['condition']['text'].lower() + " and " + yahoo_result['condition']['temp'] + "C now in New York.")
print("NOAA says: It is " + noaa_result['weather'].lower() + " and " + noaa_result['temp_c'] + "C now in New York.")
<commit_msg>Fix error in example script<commit_after>#!/usr/bin/env python
import pywapi
weather_com_result = pywapi.get_weather_from_weather_com('10001')
yahoo_result = pywapi.get_weather_from_yahoo('10001')
noaa_result = pywapi.get_weather_from_noaa('KJFK')
print "Weather.com says: It is " + weather_com_result['current_conditions']['text'].lower() + " and " + weather_com_result['current_conditions']['temperature'] + "C now in New York."
print("Yahoo says: It is " + yahoo_result['condition']['text'].lower() + " and " + yahoo_result['condition']['temp'] + "C now in New York.")
print("NOAA says: It is " + noaa_result['weather'].lower() + " and " + noaa_result['temp_c'] + "C now in New York.")
|
f3ec19e0893db4fbbad8848dec8f63a09d7ffd06
|
alg_sum_list.py
|
alg_sum_list.py
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def sum_list_iter(num_ls):
"""Sum number list by for loop."""
_sum = 0
for num in num_ls:
_sum += num
return _sum
def sum_list_recur(num_ls):
"""Sum number list by recursion."""
if len(num_ls) == 1:
return num_ls[0]
else:
return num_ls[0] + sum_list_recur(num_ls[1:])
def main():
import time
num_ls = range(100)
start_time = time.time()
print('By iteration: {}'.format(sum_list_iter(num_ls)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('By recursion: {}'.format(sum_list_recur(num_ls)))
print('Time: {}'.format(time.time() - start_time))
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def sum_list_recur(a_list):
"""Sum list by recursion.
Time complexity: O(n), where n is the list length.
Space complexity: O(n).
"""
if len(a_list) == 1:
return a_list[0]
else:
return a_list[0] + sum_list_recur(a_list[1:])
def sum_list_dp(a_list):
"""Sum list by bottom-up dynamic programming.
Time complexity: O(n).
Space complexity: O(1).
"""
s = 0
for x in a_list:
s += x
return s
def main():
import time
import random
a_list = [random.randint(0, 1000) for _ in range(100)]
start_time = time.time()
print('By recursion: {}'.format(sum_list_recur(a_list)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('By DP: {}'.format(sum_list_dp(a_list)))
print('Time: {}'.format(time.time() - start_time))
if __name__ == '__main__':
main()
|
Add time/space complexity; revise var's
|
Add time/space complexity; revise var's
|
Python
|
bsd-2-clause
|
bowen0701/algorithms_data_structures
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def sum_list_iter(num_ls):
"""Sum number list by for loop."""
_sum = 0
for num in num_ls:
_sum += num
return _sum
def sum_list_recur(num_ls):
"""Sum number list by recursion."""
if len(num_ls) == 1:
return num_ls[0]
else:
return num_ls[0] + sum_list_recur(num_ls[1:])
def main():
import time
num_ls = range(100)
start_time = time.time()
print('By iteration: {}'.format(sum_list_iter(num_ls)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('By recursion: {}'.format(sum_list_recur(num_ls)))
print('Time: {}'.format(time.time() - start_time))
if __name__ == '__main__':
main()
Add time/space complexity; revise var's
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def sum_list_recur(a_list):
"""Sum list by recursion.
Time complexity: O(n), where n is the list length.
Space complexity: O(n).
"""
if len(a_list) == 1:
return a_list[0]
else:
return a_list[0] + sum_list_recur(a_list[1:])
def sum_list_dp(a_list):
"""Sum list by bottom-up dynamic programming.
Time complexity: O(n).
Space complexity: O(1).
"""
s = 0
for x in a_list:
s += x
return s
def main():
import time
import random
a_list = [random.randint(0, 1000) for _ in range(100)]
start_time = time.time()
print('By recursion: {}'.format(sum_list_recur(a_list)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('By DP: {}'.format(sum_list_dp(a_list)))
print('Time: {}'.format(time.time() - start_time))
if __name__ == '__main__':
main()
|
<commit_before>from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def sum_list_iter(num_ls):
"""Sum number list by for loop."""
_sum = 0
for num in num_ls:
_sum += num
return _sum
def sum_list_recur(num_ls):
"""Sum number list by recursion."""
if len(num_ls) == 1:
return num_ls[0]
else:
return num_ls[0] + sum_list_recur(num_ls[1:])
def main():
import time
num_ls = range(100)
start_time = time.time()
print('By iteration: {}'.format(sum_list_iter(num_ls)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('By recursion: {}'.format(sum_list_recur(num_ls)))
print('Time: {}'.format(time.time() - start_time))
if __name__ == '__main__':
main()
<commit_msg>Add time/space complexity; revise var's<commit_after>
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def sum_list_recur(a_list):
"""Sum list by recursion.
Time complexity: O(n), where n is the list length.
Space complexity: O(n).
"""
if len(a_list) == 1:
return a_list[0]
else:
return a_list[0] + sum_list_recur(a_list[1:])
def sum_list_dp(a_list):
"""Sum list by bottom-up dynamic programming.
Time complexity: O(n).
Space complexity: O(1).
"""
s = 0
for x in a_list:
s += x
return s
def main():
import time
import random
a_list = [random.randint(0, 1000) for _ in range(100)]
start_time = time.time()
print('By recursion: {}'.format(sum_list_recur(a_list)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('By DP: {}'.format(sum_list_dp(a_list)))
print('Time: {}'.format(time.time() - start_time))
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def sum_list_iter(num_ls):
"""Sum number list by for loop."""
_sum = 0
for num in num_ls:
_sum += num
return _sum
def sum_list_recur(num_ls):
"""Sum number list by recursion."""
if len(num_ls) == 1:
return num_ls[0]
else:
return num_ls[0] + sum_list_recur(num_ls[1:])
def main():
import time
num_ls = range(100)
start_time = time.time()
print('By iteration: {}'.format(sum_list_iter(num_ls)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('By recursion: {}'.format(sum_list_recur(num_ls)))
print('Time: {}'.format(time.time() - start_time))
if __name__ == '__main__':
main()
Add time/space complexity; revise var'sfrom __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def sum_list_recur(a_list):
"""Sum list by recursion.
Time complexity: O(n), where n is the list length.
Space complexity: O(n).
"""
if len(a_list) == 1:
return a_list[0]
else:
return a_list[0] + sum_list_recur(a_list[1:])
def sum_list_dp(a_list):
"""Sum list by bottom-up dynamic programming.
Time complexity: O(n).
Space complexity: O(1).
"""
s = 0
for x in a_list:
s += x
return s
def main():
import time
import random
a_list = [random.randint(0, 1000) for _ in range(100)]
start_time = time.time()
print('By recursion: {}'.format(sum_list_recur(a_list)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('By DP: {}'.format(sum_list_dp(a_list)))
print('Time: {}'.format(time.time() - start_time))
if __name__ == '__main__':
main()
|
<commit_before>from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def sum_list_iter(num_ls):
"""Sum number list by for loop."""
_sum = 0
for num in num_ls:
_sum += num
return _sum
def sum_list_recur(num_ls):
"""Sum number list by recursion."""
if len(num_ls) == 1:
return num_ls[0]
else:
return num_ls[0] + sum_list_recur(num_ls[1:])
def main():
import time
num_ls = range(100)
start_time = time.time()
print('By iteration: {}'.format(sum_list_iter(num_ls)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('By recursion: {}'.format(sum_list_recur(num_ls)))
print('Time: {}'.format(time.time() - start_time))
if __name__ == '__main__':
main()
<commit_msg>Add time/space complexity; revise var's<commit_after>from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def sum_list_recur(a_list):
"""Sum list by recursion.
Time complexity: O(n), where n is the list length.
Space complexity: O(n).
"""
if len(a_list) == 1:
return a_list[0]
else:
return a_list[0] + sum_list_recur(a_list[1:])
def sum_list_dp(a_list):
"""Sum list by bottom-up dynamic programming.
Time complexity: O(n).
Space complexity: O(1).
"""
s = 0
for x in a_list:
s += x
return s
def main():
import time
import random
a_list = [random.randint(0, 1000) for _ in range(100)]
start_time = time.time()
print('By recursion: {}'.format(sum_list_recur(a_list)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('By DP: {}'.format(sum_list_dp(a_list)))
print('Time: {}'.format(time.time() - start_time))
if __name__ == '__main__':
main()
|
ea79df2314afc4bf8e2747800120a7d5d005ece7
|
reviewboard/notifications/templatetags/markdown_email.py
|
reviewboard/notifications/templatetags/markdown_email.py
|
from __future__ import unicode_literals
import markdown
from django import template
from django.utils.safestring import mark_safe
from djblets.markdown import markdown_unescape
register = template.Library()
@register.filter
def markdown_email_html(text, is_rich_text):
if not is_rich_text:
return text
# We use XHTML1 instead of HTML5 to ensure the results can be parsed by
# an XML parser. This is actually needed for the main Markdown renderer
# for the web UI, but consistency is good here.
return mark_safe(markdown.markdown(
text,
output_format='xhtml1',
extensions=[
'markdown.extensions.fenced_code',
'markdown.extensions.codehilite',
'markdown.extensions.tables',
'pymdownx.tilde',
'djblets.markdown.extensions.escape_html',
'djblets.markdown.extensions.wysiwyg_email',
],
extension_configs={
'codehilite': {
'noclasses': True,
},
}))
@register.filter
def markdown_email_text(text, is_rich_text):
if not is_rich_text:
return text
return markdown_unescape(text)
|
from __future__ import unicode_literals
import markdown
from django import template
from django.utils.safestring import mark_safe
from djblets.markdown import markdown_unescape
register = template.Library()
@register.filter
def markdown_email_html(text, is_rich_text):
if not is_rich_text:
return text
# We use XHTML1 instead of HTML5 to ensure the results can be parsed by
# an XML parser. This is actually needed for the main Markdown renderer
# for the web UI, but consistency is good here.
return mark_safe(markdown.markdown(
text,
output_format='xhtml1',
extensions=[
'markdown.extensions.fenced_code',
'markdown.extensions.codehilite',
'markdown.extensions.tables',
'markdown.extensions.sane_lists',
'markdown.extensions.smart_strong',
'pymdownx.tilde',
'djblets.markdown.extensions.escape_html',
'djblets.markdown.extensions.wysiwyg_email',
],
extension_configs={
'codehilite': {
'noclasses': True,
},
}))
@register.filter
def markdown_email_text(text, is_rich_text):
if not is_rich_text:
return text
return markdown_unescape(text)
|
Add a couple missing Markdown extensions for e-mail rendering.
|
Add a couple missing Markdown extensions for e-mail rendering.
The Markdown e-mail rendering code wasn't correctly rendering lists or
strings with double-underscores separating words, due to missing a
couple of extensions. This adds those missing extensions, bringing some
consistency.
Testing Done:
Tested the e-mail previews before and after this change. Saw that the
Markdown had rendered incorrectly before in these cases, but rendered
correctly after.
Reviewed at https://reviews.reviewboard.org/r/8992/
|
Python
|
mit
|
reviewboard/reviewboard,chipx86/reviewboard,chipx86/reviewboard,brennie/reviewboard,reviewboard/reviewboard,reviewboard/reviewboard,chipx86/reviewboard,brennie/reviewboard,brennie/reviewboard,reviewboard/reviewboard,brennie/reviewboard,chipx86/reviewboard
|
from __future__ import unicode_literals
import markdown
from django import template
from django.utils.safestring import mark_safe
from djblets.markdown import markdown_unescape
register = template.Library()
@register.filter
def markdown_email_html(text, is_rich_text):
if not is_rich_text:
return text
# We use XHTML1 instead of HTML5 to ensure the results can be parsed by
# an XML parser. This is actually needed for the main Markdown renderer
# for the web UI, but consistency is good here.
return mark_safe(markdown.markdown(
text,
output_format='xhtml1',
extensions=[
'markdown.extensions.fenced_code',
'markdown.extensions.codehilite',
'markdown.extensions.tables',
'pymdownx.tilde',
'djblets.markdown.extensions.escape_html',
'djblets.markdown.extensions.wysiwyg_email',
],
extension_configs={
'codehilite': {
'noclasses': True,
},
}))
@register.filter
def markdown_email_text(text, is_rich_text):
if not is_rich_text:
return text
return markdown_unescape(text)
Add a couple missing Markdown extensions for e-mail rendering.
The Markdown e-mail rendering code wasn't correctly rendering lists or
strings with double-underscores separating words, due to missing a
couple of extensions. This adds those missing extensions, bringing some
consistency.
Testing Done:
Tested the e-mail previews before and after this change. Saw that the
Markdown had rendered incorrectly before in these cases, but rendered
correctly after.
Reviewed at https://reviews.reviewboard.org/r/8992/
|
from __future__ import unicode_literals
import markdown
from django import template
from django.utils.safestring import mark_safe
from djblets.markdown import markdown_unescape
register = template.Library()
@register.filter
def markdown_email_html(text, is_rich_text):
if not is_rich_text:
return text
# We use XHTML1 instead of HTML5 to ensure the results can be parsed by
# an XML parser. This is actually needed for the main Markdown renderer
# for the web UI, but consistency is good here.
return mark_safe(markdown.markdown(
text,
output_format='xhtml1',
extensions=[
'markdown.extensions.fenced_code',
'markdown.extensions.codehilite',
'markdown.extensions.tables',
'markdown.extensions.sane_lists',
'markdown.extensions.smart_strong',
'pymdownx.tilde',
'djblets.markdown.extensions.escape_html',
'djblets.markdown.extensions.wysiwyg_email',
],
extension_configs={
'codehilite': {
'noclasses': True,
},
}))
@register.filter
def markdown_email_text(text, is_rich_text):
if not is_rich_text:
return text
return markdown_unescape(text)
|
<commit_before>from __future__ import unicode_literals
import markdown
from django import template
from django.utils.safestring import mark_safe
from djblets.markdown import markdown_unescape
register = template.Library()
@register.filter
def markdown_email_html(text, is_rich_text):
if not is_rich_text:
return text
# We use XHTML1 instead of HTML5 to ensure the results can be parsed by
# an XML parser. This is actually needed for the main Markdown renderer
# for the web UI, but consistency is good here.
return mark_safe(markdown.markdown(
text,
output_format='xhtml1',
extensions=[
'markdown.extensions.fenced_code',
'markdown.extensions.codehilite',
'markdown.extensions.tables',
'pymdownx.tilde',
'djblets.markdown.extensions.escape_html',
'djblets.markdown.extensions.wysiwyg_email',
],
extension_configs={
'codehilite': {
'noclasses': True,
},
}))
@register.filter
def markdown_email_text(text, is_rich_text):
if not is_rich_text:
return text
return markdown_unescape(text)
<commit_msg>Add a couple missing Markdown extensions for e-mail rendering.
The Markdown e-mail rendering code wasn't correctly rendering lists or
strings with double-underscores separating words, due to missing a
couple of extensions. This adds those missing extensions, bringing some
consistency.
Testing Done:
Tested the e-mail previews before and after this change. Saw that the
Markdown had rendered incorrectly before in these cases, but rendered
correctly after.
Reviewed at https://reviews.reviewboard.org/r/8992/<commit_after>
|
from __future__ import unicode_literals
import markdown
from django import template
from django.utils.safestring import mark_safe
from djblets.markdown import markdown_unescape
register = template.Library()
@register.filter
def markdown_email_html(text, is_rich_text):
if not is_rich_text:
return text
# We use XHTML1 instead of HTML5 to ensure the results can be parsed by
# an XML parser. This is actually needed for the main Markdown renderer
# for the web UI, but consistency is good here.
return mark_safe(markdown.markdown(
text,
output_format='xhtml1',
extensions=[
'markdown.extensions.fenced_code',
'markdown.extensions.codehilite',
'markdown.extensions.tables',
'markdown.extensions.sane_lists',
'markdown.extensions.smart_strong',
'pymdownx.tilde',
'djblets.markdown.extensions.escape_html',
'djblets.markdown.extensions.wysiwyg_email',
],
extension_configs={
'codehilite': {
'noclasses': True,
},
}))
@register.filter
def markdown_email_text(text, is_rich_text):
if not is_rich_text:
return text
return markdown_unescape(text)
|
from __future__ import unicode_literals
import markdown
from django import template
from django.utils.safestring import mark_safe
from djblets.markdown import markdown_unescape
register = template.Library()
@register.filter
def markdown_email_html(text, is_rich_text):
if not is_rich_text:
return text
# We use XHTML1 instead of HTML5 to ensure the results can be parsed by
# an XML parser. This is actually needed for the main Markdown renderer
# for the web UI, but consistency is good here.
return mark_safe(markdown.markdown(
text,
output_format='xhtml1',
extensions=[
'markdown.extensions.fenced_code',
'markdown.extensions.codehilite',
'markdown.extensions.tables',
'pymdownx.tilde',
'djblets.markdown.extensions.escape_html',
'djblets.markdown.extensions.wysiwyg_email',
],
extension_configs={
'codehilite': {
'noclasses': True,
},
}))
@register.filter
def markdown_email_text(text, is_rich_text):
if not is_rich_text:
return text
return markdown_unescape(text)
Add a couple missing Markdown extensions for e-mail rendering.
The Markdown e-mail rendering code wasn't correctly rendering lists or
strings with double-underscores separating words, due to missing a
couple of extensions. This adds those missing extensions, bringing some
consistency.
Testing Done:
Tested the e-mail previews before and after this change. Saw that the
Markdown had rendered incorrectly before in these cases, but rendered
correctly after.
Reviewed at https://reviews.reviewboard.org/r/8992/from __future__ import unicode_literals
import markdown
from django import template
from django.utils.safestring import mark_safe
from djblets.markdown import markdown_unescape
register = template.Library()
@register.filter
def markdown_email_html(text, is_rich_text):
if not is_rich_text:
return text
# We use XHTML1 instead of HTML5 to ensure the results can be parsed by
# an XML parser. This is actually needed for the main Markdown renderer
# for the web UI, but consistency is good here.
return mark_safe(markdown.markdown(
text,
output_format='xhtml1',
extensions=[
'markdown.extensions.fenced_code',
'markdown.extensions.codehilite',
'markdown.extensions.tables',
'markdown.extensions.sane_lists',
'markdown.extensions.smart_strong',
'pymdownx.tilde',
'djblets.markdown.extensions.escape_html',
'djblets.markdown.extensions.wysiwyg_email',
],
extension_configs={
'codehilite': {
'noclasses': True,
},
}))
@register.filter
def markdown_email_text(text, is_rich_text):
if not is_rich_text:
return text
return markdown_unescape(text)
|
<commit_before>from __future__ import unicode_literals
import markdown
from django import template
from django.utils.safestring import mark_safe
from djblets.markdown import markdown_unescape
register = template.Library()
@register.filter
def markdown_email_html(text, is_rich_text):
if not is_rich_text:
return text
# We use XHTML1 instead of HTML5 to ensure the results can be parsed by
# an XML parser. This is actually needed for the main Markdown renderer
# for the web UI, but consistency is good here.
return mark_safe(markdown.markdown(
text,
output_format='xhtml1',
extensions=[
'markdown.extensions.fenced_code',
'markdown.extensions.codehilite',
'markdown.extensions.tables',
'pymdownx.tilde',
'djblets.markdown.extensions.escape_html',
'djblets.markdown.extensions.wysiwyg_email',
],
extension_configs={
'codehilite': {
'noclasses': True,
},
}))
@register.filter
def markdown_email_text(text, is_rich_text):
if not is_rich_text:
return text
return markdown_unescape(text)
<commit_msg>Add a couple missing Markdown extensions for e-mail rendering.
The Markdown e-mail rendering code wasn't correctly rendering lists or
strings with double-underscores separating words, due to missing a
couple of extensions. This adds those missing extensions, bringing some
consistency.
Testing Done:
Tested the e-mail previews before and after this change. Saw that the
Markdown had rendered incorrectly before in these cases, but rendered
correctly after.
Reviewed at https://reviews.reviewboard.org/r/8992/<commit_after>from __future__ import unicode_literals
import markdown
from django import template
from django.utils.safestring import mark_safe
from djblets.markdown import markdown_unescape
register = template.Library()
@register.filter
def markdown_email_html(text, is_rich_text):
if not is_rich_text:
return text
# We use XHTML1 instead of HTML5 to ensure the results can be parsed by
# an XML parser. This is actually needed for the main Markdown renderer
# for the web UI, but consistency is good here.
return mark_safe(markdown.markdown(
text,
output_format='xhtml1',
extensions=[
'markdown.extensions.fenced_code',
'markdown.extensions.codehilite',
'markdown.extensions.tables',
'markdown.extensions.sane_lists',
'markdown.extensions.smart_strong',
'pymdownx.tilde',
'djblets.markdown.extensions.escape_html',
'djblets.markdown.extensions.wysiwyg_email',
],
extension_configs={
'codehilite': {
'noclasses': True,
},
}))
@register.filter
def markdown_email_text(text, is_rich_text):
if not is_rich_text:
return text
return markdown_unescape(text)
|
e8548c26df021d9eff7c056338e3442beeed9397
|
cactusbot/handlers/spam.py
|
cactusbot/handlers/spam.py
|
"""Handle incoming spam messages."""
from ..handler import Handler
import logging
class SpamHandler(Handler):
"""Spam handler."""
MAX_SCORE = 16
MAX_EMOTES = 6
ALLOW_LINKS = False
def __init__(self):
self.logger = logging.getLogger(__name__)
def on_message(self, packet):
"""Handle message events."""
built_message = ""
for chunk in packet:
if chunk["type"] == "text":
built_message += chunk["text"]
exceeds_caps = self.check_caps(built_message)
contains_emotes = self.check_emotes(packet)
has_links = self.check_links(packet)
if exceeds_caps or contains_emotes or has_links:
return True
else:
return False
def check_links(self, packet):
"""Check for links in the message."""
return not self.ALLOW_LINKS and any(chunk["type"] == "link" for chunk in packet)
def check_emotes(self, packet):
"""Check for emotes in the message."""
return sum(chunk["type"] == "emote" for chunk in packet) > self.MAX_EMOTES
def check_caps(self, message):
"""Check for caps in the message."""
return sum(char.isupper() - char.islower() for char in message) > self.MAX_SCORE
|
"""Handle incoming spam messages."""
from ..handler import Handler
import logging
class SpamHandler(Handler):
"""Spam handler."""
MAX_SCORE = 16
MAX_EMOTES = 6
ALLOW_LINKS = False
def __init__(self):
self.logger = logging.getLogger(__name__)
def on_message(self, packet):
"""Handle message events."""
exceeds_caps = self.check_caps(''.join(
chunk["text"] for chunk in packet if chunk["type"] == chunk["text"]
))
contains_emotes = self.check_emotes(packet)
has_links = self.check_links(packet)
if exceeds_caps or contains_emotes or has_links:
return True
else:
return False
def check_links(self, packet):
"""Check for links in the message."""
return not self.ALLOW_LINKS and any(chunk["type"] == "link" for chunk in packet)
def check_emotes(self, packet):
"""Check for emotes in the message."""
return sum(chunk["type"] == "emote" for chunk in packet) > self.MAX_EMOTES
def check_caps(self, message):
"""Check for caps in the message."""
return sum(char.isupper() - char.islower() for char in message) > self.MAX_SCORE
|
Move for loop to generator comprehension
|
Move for loop to generator comprehension
|
Python
|
mit
|
CactusDev/CactusBot
|
"""Handle incoming spam messages."""
from ..handler import Handler
import logging
class SpamHandler(Handler):
"""Spam handler."""
MAX_SCORE = 16
MAX_EMOTES = 6
ALLOW_LINKS = False
def __init__(self):
self.logger = logging.getLogger(__name__)
def on_message(self, packet):
"""Handle message events."""
built_message = ""
for chunk in packet:
if chunk["type"] == "text":
built_message += chunk["text"]
exceeds_caps = self.check_caps(built_message)
contains_emotes = self.check_emotes(packet)
has_links = self.check_links(packet)
if exceeds_caps or contains_emotes or has_links:
return True
else:
return False
def check_links(self, packet):
"""Check for links in the message."""
return not self.ALLOW_LINKS and any(chunk["type"] == "link" for chunk in packet)
def check_emotes(self, packet):
"""Check for emotes in the message."""
return sum(chunk["type"] == "emote" for chunk in packet) > self.MAX_EMOTES
def check_caps(self, message):
"""Check for caps in the message."""
return sum(char.isupper() - char.islower() for char in message) > self.MAX_SCORE
Move for loop to generator comprehension
|
"""Handle incoming spam messages."""
from ..handler import Handler
import logging
class SpamHandler(Handler):
"""Spam handler."""
MAX_SCORE = 16
MAX_EMOTES = 6
ALLOW_LINKS = False
def __init__(self):
self.logger = logging.getLogger(__name__)
def on_message(self, packet):
"""Handle message events."""
exceeds_caps = self.check_caps(''.join(
chunk["text"] for chunk in packet if chunk["type"] == chunk["text"]
))
contains_emotes = self.check_emotes(packet)
has_links = self.check_links(packet)
if exceeds_caps or contains_emotes or has_links:
return True
else:
return False
def check_links(self, packet):
"""Check for links in the message."""
return not self.ALLOW_LINKS and any(chunk["type"] == "link" for chunk in packet)
def check_emotes(self, packet):
"""Check for emotes in the message."""
return sum(chunk["type"] == "emote" for chunk in packet) > self.MAX_EMOTES
def check_caps(self, message):
"""Check for caps in the message."""
return sum(char.isupper() - char.islower() for char in message) > self.MAX_SCORE
|
<commit_before>"""Handle incoming spam messages."""
from ..handler import Handler
import logging
class SpamHandler(Handler):
"""Spam handler."""
MAX_SCORE = 16
MAX_EMOTES = 6
ALLOW_LINKS = False
def __init__(self):
self.logger = logging.getLogger(__name__)
def on_message(self, packet):
"""Handle message events."""
built_message = ""
for chunk in packet:
if chunk["type"] == "text":
built_message += chunk["text"]
exceeds_caps = self.check_caps(built_message)
contains_emotes = self.check_emotes(packet)
has_links = self.check_links(packet)
if exceeds_caps or contains_emotes or has_links:
return True
else:
return False
def check_links(self, packet):
"""Check for links in the message."""
return not self.ALLOW_LINKS and any(chunk["type"] == "link" for chunk in packet)
def check_emotes(self, packet):
"""Check for emotes in the message."""
return sum(chunk["type"] == "emote" for chunk in packet) > self.MAX_EMOTES
def check_caps(self, message):
"""Check for caps in the message."""
return sum(char.isupper() - char.islower() for char in message) > self.MAX_SCORE
<commit_msg>Move for loop to generator comprehension<commit_after>
|
"""Handle incoming spam messages."""
from ..handler import Handler
import logging
class SpamHandler(Handler):
"""Spam handler."""
MAX_SCORE = 16
MAX_EMOTES = 6
ALLOW_LINKS = False
def __init__(self):
self.logger = logging.getLogger(__name__)
def on_message(self, packet):
"""Handle message events."""
exceeds_caps = self.check_caps(''.join(
chunk["text"] for chunk in packet if chunk["type"] == chunk["text"]
))
contains_emotes = self.check_emotes(packet)
has_links = self.check_links(packet)
if exceeds_caps or contains_emotes or has_links:
return True
else:
return False
def check_links(self, packet):
"""Check for links in the message."""
return not self.ALLOW_LINKS and any(chunk["type"] == "link" for chunk in packet)
def check_emotes(self, packet):
"""Check for emotes in the message."""
return sum(chunk["type"] == "emote" for chunk in packet) > self.MAX_EMOTES
def check_caps(self, message):
"""Check for caps in the message."""
return sum(char.isupper() - char.islower() for char in message) > self.MAX_SCORE
|
"""Handle incoming spam messages."""
from ..handler import Handler
import logging
class SpamHandler(Handler):
"""Spam handler."""
MAX_SCORE = 16
MAX_EMOTES = 6
ALLOW_LINKS = False
def __init__(self):
self.logger = logging.getLogger(__name__)
def on_message(self, packet):
"""Handle message events."""
built_message = ""
for chunk in packet:
if chunk["type"] == "text":
built_message += chunk["text"]
exceeds_caps = self.check_caps(built_message)
contains_emotes = self.check_emotes(packet)
has_links = self.check_links(packet)
if exceeds_caps or contains_emotes or has_links:
return True
else:
return False
def check_links(self, packet):
"""Check for links in the message."""
return not self.ALLOW_LINKS and any(chunk["type"] == "link" for chunk in packet)
def check_emotes(self, packet):
"""Check for emotes in the message."""
return sum(chunk["type"] == "emote" for chunk in packet) > self.MAX_EMOTES
def check_caps(self, message):
"""Check for caps in the message."""
return sum(char.isupper() - char.islower() for char in message) > self.MAX_SCORE
Move for loop to generator comprehension"""Handle incoming spam messages."""
from ..handler import Handler
import logging
class SpamHandler(Handler):
"""Spam handler."""
MAX_SCORE = 16
MAX_EMOTES = 6
ALLOW_LINKS = False
def __init__(self):
self.logger = logging.getLogger(__name__)
def on_message(self, packet):
"""Handle message events."""
exceeds_caps = self.check_caps(''.join(
chunk["text"] for chunk in packet if chunk["type"] == chunk["text"]
))
contains_emotes = self.check_emotes(packet)
has_links = self.check_links(packet)
if exceeds_caps or contains_emotes or has_links:
return True
else:
return False
def check_links(self, packet):
"""Check for links in the message."""
return not self.ALLOW_LINKS and any(chunk["type"] == "link" for chunk in packet)
def check_emotes(self, packet):
"""Check for emotes in the message."""
return sum(chunk["type"] == "emote" for chunk in packet) > self.MAX_EMOTES
def check_caps(self, message):
"""Check for caps in the message."""
return sum(char.isupper() - char.islower() for char in message) > self.MAX_SCORE
|
<commit_before>"""Handle incoming spam messages."""
from ..handler import Handler
import logging
class SpamHandler(Handler):
"""Spam handler."""
MAX_SCORE = 16
MAX_EMOTES = 6
ALLOW_LINKS = False
def __init__(self):
self.logger = logging.getLogger(__name__)
def on_message(self, packet):
"""Handle message events."""
built_message = ""
for chunk in packet:
if chunk["type"] == "text":
built_message += chunk["text"]
exceeds_caps = self.check_caps(built_message)
contains_emotes = self.check_emotes(packet)
has_links = self.check_links(packet)
if exceeds_caps or contains_emotes or has_links:
return True
else:
return False
def check_links(self, packet):
"""Check for links in the message."""
return not self.ALLOW_LINKS and any(chunk["type"] == "link" for chunk in packet)
def check_emotes(self, packet):
"""Check for emotes in the message."""
return sum(chunk["type"] == "emote" for chunk in packet) > self.MAX_EMOTES
def check_caps(self, message):
"""Check for caps in the message."""
return sum(char.isupper() - char.islower() for char in message) > self.MAX_SCORE
<commit_msg>Move for loop to generator comprehension<commit_after>"""Handle incoming spam messages."""
from ..handler import Handler
import logging
class SpamHandler(Handler):
"""Spam handler."""
MAX_SCORE = 16
MAX_EMOTES = 6
ALLOW_LINKS = False
def __init__(self):
self.logger = logging.getLogger(__name__)
def on_message(self, packet):
"""Handle message events."""
exceeds_caps = self.check_caps(''.join(
chunk["text"] for chunk in packet if chunk["type"] == chunk["text"]
))
contains_emotes = self.check_emotes(packet)
has_links = self.check_links(packet)
if exceeds_caps or contains_emotes or has_links:
return True
else:
return False
def check_links(self, packet):
"""Check for links in the message."""
return not self.ALLOW_LINKS and any(chunk["type"] == "link" for chunk in packet)
def check_emotes(self, packet):
"""Check for emotes in the message."""
return sum(chunk["type"] == "emote" for chunk in packet) > self.MAX_EMOTES
def check_caps(self, message):
"""Check for caps in the message."""
return sum(char.isupper() - char.islower() for char in message) > self.MAX_SCORE
|
3c49598aaaceaa73b7aeb033d4dffd21a14ecf7c
|
src/account.py
|
src/account.py
|
#!/usr/bin/env python3
import sqlite3
import os
from os.path import expanduser
def connect_db():
# get home dir
dir_config = expanduser("~") + "/.config/becon"
# check if config dir exists
if not (os.path.exists(dir_config)):
os.makedirs(dir_config)
# connexion to db
database = dir_config + "/storage.sq3"
connexion = sqlite3.connect(database)
cursor = connexion.cursor()
def config_db(t1, t2):
for row in cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='"+ t2 +"' AND name='"+ t2 +"';"):
print(row)
if ("users" not in row):
cursor.execute("CREATE TABLE users(id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE, nickname VARCHAR(55), fullname VARCHAR(128), password VARCHAR(255), email VARCHAR(128))")
elif ("password" not in row):
cursor.execute("CREATE TABLE password(id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE, host VARCHAR(255), user_id INTEGER, password VARCHAR(255), dt datetime DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY(user_id) REFERENCES users(id));")
connexion.commit()
connexion.close()
|
#!/usr/bin/env python3
# coding: utf8
import sqlite3
import os
import sys
import hashlib
import getpass
from os.path import expanduser
def connect_db():
# get home dir
dir_config = expanduser("~") + "/.config/becon"
# check if config dir exists
if not (os.path.exists(dir_config)):
os.makedirs(dir_config)
# connexion to db
database = dir_config + "/storage.sq3"
connexion = sqlite3.connect(database)
cursor = connexion.cursor()
def config_db(t1, t2):
for row in cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='"+ t2 +"' AND name='"+ t2 +"';"):
print(row)
if ("users" not in row):
cursor.execute("CREATE TABLE users(id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE, nickname VARCHAR(55), fullname VARCHAR(128), password VARCHAR(255), email VARCHAR(128))")
elif ("password" not in row):
cursor.execute("CREATE TABLE password(id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE, host VARCHAR(255), user_id INTEGER, password VARCHAR(255), dt datetime DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY(user_id) REFERENCES users(id));")
connexion.commit()
def create_user():
nickname = input("Nickname: ")
fullname = input("Fullname: ")
password = hashlib.sha224(getpass.getpass().encode("utf-8")).hexdigest()
email = input("Email: ")
|
Create user, read input from stdin and hash password with sha
|
Create user, read input from stdin and hash password with sha
|
Python
|
mit
|
cboin/becon
|
#!/usr/bin/env python3
import sqlite3
import os
from os.path import expanduser
def connect_db():
# get home dir
dir_config = expanduser("~") + "/.config/becon"
# check if config dir exists
if not (os.path.exists(dir_config)):
os.makedirs(dir_config)
# connexion to db
database = dir_config + "/storage.sq3"
connexion = sqlite3.connect(database)
cursor = connexion.cursor()
def config_db(t1, t2):
for row in cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='"+ t2 +"' AND name='"+ t2 +"';"):
print(row)
if ("users" not in row):
cursor.execute("CREATE TABLE users(id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE, nickname VARCHAR(55), fullname VARCHAR(128), password VARCHAR(255), email VARCHAR(128))")
elif ("password" not in row):
cursor.execute("CREATE TABLE password(id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE, host VARCHAR(255), user_id INTEGER, password VARCHAR(255), dt datetime DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY(user_id) REFERENCES users(id));")
connexion.commit()
connexion.close()
Create user, read input from stdin and hash password with sha
|
#!/usr/bin/env python3
# coding: utf8
import sqlite3
import os
import sys
import hashlib
import getpass
from os.path import expanduser
def connect_db():
# get home dir
dir_config = expanduser("~") + "/.config/becon"
# check if config dir exists
if not (os.path.exists(dir_config)):
os.makedirs(dir_config)
# connexion to db
database = dir_config + "/storage.sq3"
connexion = sqlite3.connect(database)
cursor = connexion.cursor()
def config_db(t1, t2):
for row in cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='"+ t2 +"' AND name='"+ t2 +"';"):
print(row)
if ("users" not in row):
cursor.execute("CREATE TABLE users(id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE, nickname VARCHAR(55), fullname VARCHAR(128), password VARCHAR(255), email VARCHAR(128))")
elif ("password" not in row):
cursor.execute("CREATE TABLE password(id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE, host VARCHAR(255), user_id INTEGER, password VARCHAR(255), dt datetime DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY(user_id) REFERENCES users(id));")
connexion.commit()
def create_user():
nickname = input("Nickname: ")
fullname = input("Fullname: ")
password = hashlib.sha224(getpass.getpass().encode("utf-8")).hexdigest()
email = input("Email: ")
|
<commit_before>#!/usr/bin/env python3
import sqlite3
import os
from os.path import expanduser
def connect_db():
# get home dir
dir_config = expanduser("~") + "/.config/becon"
# check if config dir exists
if not (os.path.exists(dir_config)):
os.makedirs(dir_config)
# connexion to db
database = dir_config + "/storage.sq3"
connexion = sqlite3.connect(database)
cursor = connexion.cursor()
def config_db(t1, t2):
for row in cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='"+ t2 +"' AND name='"+ t2 +"';"):
print(row)
if ("users" not in row):
cursor.execute("CREATE TABLE users(id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE, nickname VARCHAR(55), fullname VARCHAR(128), password VARCHAR(255), email VARCHAR(128))")
elif ("password" not in row):
cursor.execute("CREATE TABLE password(id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE, host VARCHAR(255), user_id INTEGER, password VARCHAR(255), dt datetime DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY(user_id) REFERENCES users(id));")
connexion.commit()
connexion.close()
<commit_msg>Create user, read input from stdin and hash password with sha<commit_after>
|
#!/usr/bin/env python3
# coding: utf8
import sqlite3
import os
import sys
import hashlib
import getpass
from os.path import expanduser
def connect_db():
# get home dir
dir_config = expanduser("~") + "/.config/becon"
# check if config dir exists
if not (os.path.exists(dir_config)):
os.makedirs(dir_config)
# connexion to db
database = dir_config + "/storage.sq3"
connexion = sqlite3.connect(database)
cursor = connexion.cursor()
def config_db(t1, t2):
for row in cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='"+ t2 +"' AND name='"+ t2 +"';"):
print(row)
if ("users" not in row):
cursor.execute("CREATE TABLE users(id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE, nickname VARCHAR(55), fullname VARCHAR(128), password VARCHAR(255), email VARCHAR(128))")
elif ("password" not in row):
cursor.execute("CREATE TABLE password(id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE, host VARCHAR(255), user_id INTEGER, password VARCHAR(255), dt datetime DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY(user_id) REFERENCES users(id));")
connexion.commit()
def create_user():
nickname = input("Nickname: ")
fullname = input("Fullname: ")
password = hashlib.sha224(getpass.getpass().encode("utf-8")).hexdigest()
email = input("Email: ")
|
#!/usr/bin/env python3
import sqlite3
import os
from os.path import expanduser
def connect_db():
# get home dir
dir_config = expanduser("~") + "/.config/becon"
# check if config dir exists
if not (os.path.exists(dir_config)):
os.makedirs(dir_config)
# connexion to db
database = dir_config + "/storage.sq3"
connexion = sqlite3.connect(database)
cursor = connexion.cursor()
def config_db(t1, t2):
for row in cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='"+ t2 +"' AND name='"+ t2 +"';"):
print(row)
if ("users" not in row):
cursor.execute("CREATE TABLE users(id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE, nickname VARCHAR(55), fullname VARCHAR(128), password VARCHAR(255), email VARCHAR(128))")
elif ("password" not in row):
cursor.execute("CREATE TABLE password(id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE, host VARCHAR(255), user_id INTEGER, password VARCHAR(255), dt datetime DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY(user_id) REFERENCES users(id));")
connexion.commit()
connexion.close()
Create user, read input from stdin and hash password with sha#!/usr/bin/env python3
# coding: utf8
import sqlite3
import os
import sys
import hashlib
import getpass
from os.path import expanduser
def connect_db():
# get home dir
dir_config = expanduser("~") + "/.config/becon"
# check if config dir exists
if not (os.path.exists(dir_config)):
os.makedirs(dir_config)
# connexion to db
database = dir_config + "/storage.sq3"
connexion = sqlite3.connect(database)
cursor = connexion.cursor()
def config_db(t1, t2):
for row in cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='"+ t2 +"' AND name='"+ t2 +"';"):
print(row)
if ("users" not in row):
cursor.execute("CREATE TABLE users(id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE, nickname VARCHAR(55), fullname VARCHAR(128), password VARCHAR(255), email VARCHAR(128))")
elif ("password" not in row):
cursor.execute("CREATE TABLE password(id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE, host VARCHAR(255), user_id INTEGER, password VARCHAR(255), dt datetime DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY(user_id) REFERENCES users(id));")
connexion.commit()
def create_user():
nickname = input("Nickname: ")
fullname = input("Fullname: ")
password = hashlib.sha224(getpass.getpass().encode("utf-8")).hexdigest()
email = input("Email: ")
|
<commit_before>#!/usr/bin/env python3
import sqlite3
import os
from os.path import expanduser
def connect_db():
# get home dir
dir_config = expanduser("~") + "/.config/becon"
# check if config dir exists
if not (os.path.exists(dir_config)):
os.makedirs(dir_config)
# connexion to db
database = dir_config + "/storage.sq3"
connexion = sqlite3.connect(database)
cursor = connexion.cursor()
def config_db(t1, t2):
for row in cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='"+ t2 +"' AND name='"+ t2 +"';"):
print(row)
if ("users" not in row):
cursor.execute("CREATE TABLE users(id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE, nickname VARCHAR(55), fullname VARCHAR(128), password VARCHAR(255), email VARCHAR(128))")
elif ("password" not in row):
cursor.execute("CREATE TABLE password(id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE, host VARCHAR(255), user_id INTEGER, password VARCHAR(255), dt datetime DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY(user_id) REFERENCES users(id));")
connexion.commit()
connexion.close()
<commit_msg>Create user, read input from stdin and hash password with sha<commit_after>#!/usr/bin/env python3
# coding: utf8
import sqlite3
import os
import sys
import hashlib
import getpass
from os.path import expanduser
def connect_db():
# get home dir
dir_config = expanduser("~") + "/.config/becon"
# check if config dir exists
if not (os.path.exists(dir_config)):
os.makedirs(dir_config)
# connexion to db
database = dir_config + "/storage.sq3"
connexion = sqlite3.connect(database)
cursor = connexion.cursor()
def config_db(t1, t2):
for row in cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='"+ t2 +"' AND name='"+ t2 +"';"):
print(row)
if ("users" not in row):
cursor.execute("CREATE TABLE users(id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE, nickname VARCHAR(55), fullname VARCHAR(128), password VARCHAR(255), email VARCHAR(128))")
elif ("password" not in row):
cursor.execute("CREATE TABLE password(id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE, host VARCHAR(255), user_id INTEGER, password VARCHAR(255), dt datetime DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY(user_id) REFERENCES users(id));")
connexion.commit()
def create_user():
nickname = input("Nickname: ")
fullname = input("Fullname: ")
password = hashlib.sha224(getpass.getpass().encode("utf-8")).hexdigest()
email = input("Email: ")
|
96300cfe78916e7ddc65a48d85cece55ef34ea01
|
scrapers/examples/test.py
|
scrapers/examples/test.py
|
# This line keeps pyflakes from getting mad when it can't find the `scheduler`
# object declared in narcissa.py.
scheduler = globals()['scheduler']
# Write everything inside one giant function so that function can be scheduled
# for later execution.
def scrape_test():
"""
This scraper illustrates the following:
* How to access Narcissa's config
* How to store and access local config variables
* How to schedule a scraper
* How to run a scraper immediately
"""
# Config usually comes first so the user sees it right away.
MY_NAME = 'Lil B the Based God'
# Imports usually come next.
import config
from datetime import datetime
# Program logic goes here. Whatever you use in a normal Python script will
# work as long as it's inside this function.
class MyClass:
def __init__(self):
self.greeting = 'Hello!'
def get_my_name():
return MY_NAME
c = MyClass()
print(c.greeting + ' My name is ' + get_my_name())
print('DB_URI: %s' % config.DB_URI)
print('Right now: %s' % datetime.now())
# Schedule this task to run every 3 seconds.
# It will run immediately as well.
scheduler.every(3).seconds.do(scrape_test)
|
# This line keeps pyflakes from getting mad when it can't find the `scheduler`
# object declared in narcissa.py.
scheduler = globals()['scheduler']
# Write everything inside one giant function so that function can be scheduled
# for later execution.
# This function MUST be named uniquely so it doesn't interfere with other
# scrapers or Narcissa functions. One safe way to name functions is to use the
# scrape_ prefix with the filename of the scraper.
def scrape_test():
"""
This scraper illustrates the following:
* How to access Narcissa's config
* How to store and access local config variables
* How to schedule a scraper
* How to run a scraper immediately
"""
# Config usually comes first so the user sees it right away.
MY_NAME = 'Lil B the Based God'
# Imports usually come next.
import config
from datetime import datetime
# Program logic goes here. Whatever you use in a normal Python script will
# work as long as it's inside this function.
class MyClass:
def __init__(self):
self.greeting = 'Hello!'
def get_my_name():
return MY_NAME
c = MyClass()
print(c.greeting + ' My name is ' + get_my_name())
print('DB_URI: %s' % config.DB_URI)
print('Right now: %s' % datetime.now())
# Schedule this task to run every 3 seconds.
# It will run immediately as well.
scheduler.every(3).seconds.do(scrape_test)
|
Add note about naming functions uniquely
|
Add note about naming functions uniquely
|
Python
|
mit
|
mplewis/narcissa
|
# This line keeps pyflakes from getting mad when it can't find the `scheduler`
# object declared in narcissa.py.
scheduler = globals()['scheduler']
# Write everything inside one giant function so that function can be scheduled
# for later execution.
def scrape_test():
"""
This scraper illustrates the following:
* How to access Narcissa's config
* How to store and access local config variables
* How to schedule a scraper
* How to run a scraper immediately
"""
# Config usually comes first so the user sees it right away.
MY_NAME = 'Lil B the Based God'
# Imports usually come next.
import config
from datetime import datetime
# Program logic goes here. Whatever you use in a normal Python script will
# work as long as it's inside this function.
class MyClass:
def __init__(self):
self.greeting = 'Hello!'
def get_my_name():
return MY_NAME
c = MyClass()
print(c.greeting + ' My name is ' + get_my_name())
print('DB_URI: %s' % config.DB_URI)
print('Right now: %s' % datetime.now())
# Schedule this task to run every 3 seconds.
# It will run immediately as well.
scheduler.every(3).seconds.do(scrape_test)
Add note about naming functions uniquely
|
# This line keeps pyflakes from getting mad when it can't find the `scheduler`
# object declared in narcissa.py.
scheduler = globals()['scheduler']
# Write everything inside one giant function so that function can be scheduled
# for later execution.
# This function MUST be named uniquely so it doesn't interfere with other
# scrapers or Narcissa functions. One safe way to name functions is to use the
# scrape_ prefix with the filename of the scraper.
def scrape_test():
"""
This scraper illustrates the following:
* How to access Narcissa's config
* How to store and access local config variables
* How to schedule a scraper
* How to run a scraper immediately
"""
# Config usually comes first so the user sees it right away.
MY_NAME = 'Lil B the Based God'
# Imports usually come next.
import config
from datetime import datetime
# Program logic goes here. Whatever you use in a normal Python script will
# work as long as it's inside this function.
class MyClass:
def __init__(self):
self.greeting = 'Hello!'
def get_my_name():
return MY_NAME
c = MyClass()
print(c.greeting + ' My name is ' + get_my_name())
print('DB_URI: %s' % config.DB_URI)
print('Right now: %s' % datetime.now())
# Schedule this task to run every 3 seconds.
# It will run immediately as well.
scheduler.every(3).seconds.do(scrape_test)
|
<commit_before># This line keeps pyflakes from getting mad when it can't find the `scheduler`
# object declared in narcissa.py.
scheduler = globals()['scheduler']
# Write everything inside one giant function so that function can be scheduled
# for later execution.
def scrape_test():
"""
This scraper illustrates the following:
* How to access Narcissa's config
* How to store and access local config variables
* How to schedule a scraper
* How to run a scraper immediately
"""
# Config usually comes first so the user sees it right away.
MY_NAME = 'Lil B the Based God'
# Imports usually come next.
import config
from datetime import datetime
# Program logic goes here. Whatever you use in a normal Python script will
# work as long as it's inside this function.
class MyClass:
def __init__(self):
self.greeting = 'Hello!'
def get_my_name():
return MY_NAME
c = MyClass()
print(c.greeting + ' My name is ' + get_my_name())
print('DB_URI: %s' % config.DB_URI)
print('Right now: %s' % datetime.now())
# Schedule this task to run every 3 seconds.
# It will run immediately as well.
scheduler.every(3).seconds.do(scrape_test)
<commit_msg>Add note about naming functions uniquely<commit_after>
|
# This line keeps pyflakes from getting mad when it can't find the `scheduler`
# object declared in narcissa.py.
scheduler = globals()['scheduler']
# Write everything inside one giant function so that function can be scheduled
# for later execution.
# This function MUST be named uniquely so it doesn't interfere with other
# scrapers or Narcissa functions. One safe way to name functions is to use the
# scrape_ prefix with the filename of the scraper.
def scrape_test():
"""
This scraper illustrates the following:
* How to access Narcissa's config
* How to store and access local config variables
* How to schedule a scraper
* How to run a scraper immediately
"""
# Config usually comes first so the user sees it right away.
MY_NAME = 'Lil B the Based God'
# Imports usually come next.
import config
from datetime import datetime
# Program logic goes here. Whatever you use in a normal Python script will
# work as long as it's inside this function.
class MyClass:
def __init__(self):
self.greeting = 'Hello!'
def get_my_name():
return MY_NAME
c = MyClass()
print(c.greeting + ' My name is ' + get_my_name())
print('DB_URI: %s' % config.DB_URI)
print('Right now: %s' % datetime.now())
# Schedule this task to run every 3 seconds.
# It will run immediately as well.
scheduler.every(3).seconds.do(scrape_test)
|
# This line keeps pyflakes from getting mad when it can't find the `scheduler`
# object declared in narcissa.py.
scheduler = globals()['scheduler']
# Write everything inside one giant function so that function can be scheduled
# for later execution.
def scrape_test():
"""
This scraper illustrates the following:
* How to access Narcissa's config
* How to store and access local config variables
* How to schedule a scraper
* How to run a scraper immediately
"""
# Config usually comes first so the user sees it right away.
MY_NAME = 'Lil B the Based God'
# Imports usually come next.
import config
from datetime import datetime
# Program logic goes here. Whatever you use in a normal Python script will
# work as long as it's inside this function.
class MyClass:
def __init__(self):
self.greeting = 'Hello!'
def get_my_name():
return MY_NAME
c = MyClass()
print(c.greeting + ' My name is ' + get_my_name())
print('DB_URI: %s' % config.DB_URI)
print('Right now: %s' % datetime.now())
# Schedule this task to run every 3 seconds.
# It will run immediately as well.
scheduler.every(3).seconds.do(scrape_test)
Add note about naming functions uniquely# This line keeps pyflakes from getting mad when it can't find the `scheduler`
# object declared in narcissa.py.
scheduler = globals()['scheduler']
# Write everything inside one giant function so that function can be scheduled
# for later execution.
# This function MUST be named uniquely so it doesn't interfere with other
# scrapers or Narcissa functions. One safe way to name functions is to use the
# scrape_ prefix with the filename of the scraper.
def scrape_test():
"""
This scraper illustrates the following:
* How to access Narcissa's config
* How to store and access local config variables
* How to schedule a scraper
* How to run a scraper immediately
"""
# Config usually comes first so the user sees it right away.
MY_NAME = 'Lil B the Based God'
# Imports usually come next.
import config
from datetime import datetime
# Program logic goes here. Whatever you use in a normal Python script will
# work as long as it's inside this function.
class MyClass:
def __init__(self):
self.greeting = 'Hello!'
def get_my_name():
return MY_NAME
c = MyClass()
print(c.greeting + ' My name is ' + get_my_name())
print('DB_URI: %s' % config.DB_URI)
print('Right now: %s' % datetime.now())
# Schedule this task to run every 3 seconds.
# It will run immediately as well.
scheduler.every(3).seconds.do(scrape_test)
|
<commit_before># This line keeps pyflakes from getting mad when it can't find the `scheduler`
# object declared in narcissa.py.
scheduler = globals()['scheduler']
# Write everything inside one giant function so that function can be scheduled
# for later execution.
def scrape_test():
"""
This scraper illustrates the following:
* How to access Narcissa's config
* How to store and access local config variables
* How to schedule a scraper
* How to run a scraper immediately
"""
# Config usually comes first so the user sees it right away.
MY_NAME = 'Lil B the Based God'
# Imports usually come next.
import config
from datetime import datetime
# Program logic goes here. Whatever you use in a normal Python script will
# work as long as it's inside this function.
class MyClass:
def __init__(self):
self.greeting = 'Hello!'
def get_my_name():
return MY_NAME
c = MyClass()
print(c.greeting + ' My name is ' + get_my_name())
print('DB_URI: %s' % config.DB_URI)
print('Right now: %s' % datetime.now())
# Schedule this task to run every 3 seconds.
# It will run immediately as well.
scheduler.every(3).seconds.do(scrape_test)
<commit_msg>Add note about naming functions uniquely<commit_after># This line keeps pyflakes from getting mad when it can't find the `scheduler`
# object declared in narcissa.py.
scheduler = globals()['scheduler']
# Write everything inside one giant function so that function can be scheduled
# for later execution.
# This function MUST be named uniquely so it doesn't interfere with other
# scrapers or Narcissa functions. One safe way to name functions is to use the
# scrape_ prefix with the filename of the scraper.
def scrape_test():
"""
This scraper illustrates the following:
* How to access Narcissa's config
* How to store and access local config variables
* How to schedule a scraper
* How to run a scraper immediately
"""
# Config usually comes first so the user sees it right away.
MY_NAME = 'Lil B the Based God'
# Imports usually come next.
import config
from datetime import datetime
# Program logic goes here. Whatever you use in a normal Python script will
# work as long as it's inside this function.
class MyClass:
def __init__(self):
self.greeting = 'Hello!'
def get_my_name():
return MY_NAME
c = MyClass()
print(c.greeting + ' My name is ' + get_my_name())
print('DB_URI: %s' % config.DB_URI)
print('Right now: %s' % datetime.now())
# Schedule this task to run every 3 seconds.
# It will run immediately as well.
scheduler.every(3).seconds.do(scrape_test)
|
7d98adbfd08cbb72b6a9cc4ffe585756203b4e43
|
app/__init__.py
|
app/__init__.py
|
# -*- coding: utf-8 -*-
import logging
from flask import Flask
from flask_socketio import SocketIO
from logging.handlers import RotatingFileHandler
app = Flask(__name__)
app.config.from_object('config')
app.config.from_envvar('POSIO_SETTINGS')
socketio = SocketIO(app)
from app import views
file_handler = RotatingFileHandler('posio.log', 'a', 1 * 1024 * 1024, 10)
file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]'))
app.logger.setLevel(logging.INFO)
file_handler.setLevel(logging.INFO)
app.logger.addHandler(file_handler)
app.logger.info('Startup')
|
# -*- coding: utf-8 -*-
import logging
from os import environ
from flask import Flask
from flask_socketio import SocketIO
from logging.handlers import RotatingFileHandler
app = Flask(__name__)
app.config.from_object('config')
# Override config if needed
if 'POSIO_SETTINGS' in environ:
app.config.from_envvar('POSIO_SETTINGS')
socketio = SocketIO(app)
from app import views
file_handler = RotatingFileHandler('posio.log', 'a', 1 * 1024 * 1024, 10)
file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]'))
app.logger.setLevel(logging.INFO)
file_handler.setLevel(logging.INFO)
app.logger.addHandler(file_handler)
app.logger.info('Startup')
|
Check if custom config file is set
|
Check if custom config file is set
|
Python
|
mit
|
abrenaut/posio,abrenaut/posio,abrenaut/posio
|
# -*- coding: utf-8 -*-
import logging
from flask import Flask
from flask_socketio import SocketIO
from logging.handlers import RotatingFileHandler
app = Flask(__name__)
app.config.from_object('config')
app.config.from_envvar('POSIO_SETTINGS')
socketio = SocketIO(app)
from app import views
file_handler = RotatingFileHandler('posio.log', 'a', 1 * 1024 * 1024, 10)
file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]'))
app.logger.setLevel(logging.INFO)
file_handler.setLevel(logging.INFO)
app.logger.addHandler(file_handler)
app.logger.info('Startup')
Check if custom config file is set
|
# -*- coding: utf-8 -*-
import logging
from os import environ
from flask import Flask
from flask_socketio import SocketIO
from logging.handlers import RotatingFileHandler
app = Flask(__name__)
app.config.from_object('config')
# Override config if needed
if 'POSIO_SETTINGS' in environ:
app.config.from_envvar('POSIO_SETTINGS')
socketio = SocketIO(app)
from app import views
file_handler = RotatingFileHandler('posio.log', 'a', 1 * 1024 * 1024, 10)
file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]'))
app.logger.setLevel(logging.INFO)
file_handler.setLevel(logging.INFO)
app.logger.addHandler(file_handler)
app.logger.info('Startup')
|
<commit_before># -*- coding: utf-8 -*-
import logging
from flask import Flask
from flask_socketio import SocketIO
from logging.handlers import RotatingFileHandler
app = Flask(__name__)
app.config.from_object('config')
app.config.from_envvar('POSIO_SETTINGS')
socketio = SocketIO(app)
from app import views
file_handler = RotatingFileHandler('posio.log', 'a', 1 * 1024 * 1024, 10)
file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]'))
app.logger.setLevel(logging.INFO)
file_handler.setLevel(logging.INFO)
app.logger.addHandler(file_handler)
app.logger.info('Startup')
<commit_msg>Check if custom config file is set<commit_after>
|
# -*- coding: utf-8 -*-
import logging
from os import environ
from flask import Flask
from flask_socketio import SocketIO
from logging.handlers import RotatingFileHandler
app = Flask(__name__)
app.config.from_object('config')
# Override config if needed
if 'POSIO_SETTINGS' in environ:
app.config.from_envvar('POSIO_SETTINGS')
socketio = SocketIO(app)
from app import views
file_handler = RotatingFileHandler('posio.log', 'a', 1 * 1024 * 1024, 10)
file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]'))
app.logger.setLevel(logging.INFO)
file_handler.setLevel(logging.INFO)
app.logger.addHandler(file_handler)
app.logger.info('Startup')
|
# -*- coding: utf-8 -*-
import logging
from flask import Flask
from flask_socketio import SocketIO
from logging.handlers import RotatingFileHandler
app = Flask(__name__)
app.config.from_object('config')
app.config.from_envvar('POSIO_SETTINGS')
socketio = SocketIO(app)
from app import views
file_handler = RotatingFileHandler('posio.log', 'a', 1 * 1024 * 1024, 10)
file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]'))
app.logger.setLevel(logging.INFO)
file_handler.setLevel(logging.INFO)
app.logger.addHandler(file_handler)
app.logger.info('Startup')
Check if custom config file is set# -*- coding: utf-8 -*-
import logging
from os import environ
from flask import Flask
from flask_socketio import SocketIO
from logging.handlers import RotatingFileHandler
app = Flask(__name__)
app.config.from_object('config')
# Override config if needed
if 'POSIO_SETTINGS' in environ:
app.config.from_envvar('POSIO_SETTINGS')
socketio = SocketIO(app)
from app import views
file_handler = RotatingFileHandler('posio.log', 'a', 1 * 1024 * 1024, 10)
file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]'))
app.logger.setLevel(logging.INFO)
file_handler.setLevel(logging.INFO)
app.logger.addHandler(file_handler)
app.logger.info('Startup')
|
<commit_before># -*- coding: utf-8 -*-
import logging
from flask import Flask
from flask_socketio import SocketIO
from logging.handlers import RotatingFileHandler
app = Flask(__name__)
app.config.from_object('config')
app.config.from_envvar('POSIO_SETTINGS')
socketio = SocketIO(app)
from app import views
file_handler = RotatingFileHandler('posio.log', 'a', 1 * 1024 * 1024, 10)
file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]'))
app.logger.setLevel(logging.INFO)
file_handler.setLevel(logging.INFO)
app.logger.addHandler(file_handler)
app.logger.info('Startup')
<commit_msg>Check if custom config file is set<commit_after># -*- coding: utf-8 -*-
import logging
from os import environ
from flask import Flask
from flask_socketio import SocketIO
from logging.handlers import RotatingFileHandler
app = Flask(__name__)
app.config.from_object('config')
# Override config if needed
if 'POSIO_SETTINGS' in environ:
app.config.from_envvar('POSIO_SETTINGS')
socketio = SocketIO(app)
from app import views
file_handler = RotatingFileHandler('posio.log', 'a', 1 * 1024 * 1024, 10)
file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]'))
app.logger.setLevel(logging.INFO)
file_handler.setLevel(logging.INFO)
app.logger.addHandler(file_handler)
app.logger.info('Startup')
|
035fb3211fca0a1627db08f7e91f27cd1addeef6
|
froide/publicbody/forms.py
|
froide/publicbody/forms.py
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from froide.helper.form_utils import JSONMixin
from .models import PublicBody
from .widgets import PublicBodySelect
class PublicBodyForm(JSONMixin, forms.Form):
publicbody = forms.ModelChoiceField(
queryset=PublicBody.objects.all(),
widget=PublicBodySelect,
label=_("Search for a topic or a public body:")
)
def as_data(self):
data = super(PublicBodyForm, self).as_data()
if self.is_bound and self.is_valid():
data['cleaned_data'] = self.cleaned_data
return data
class MultiplePublicBodyForm(JSONMixin, forms.Form):
publicbody = forms.ModelMultipleChoiceField(
queryset=PublicBody.objects.all(),
label=_("Search for a topic or a public body:")
)
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from froide.helper.form_utils import JSONMixin
from .models import PublicBody
from .widgets import PublicBodySelect
class PublicBodyForm(JSONMixin, forms.Form):
publicbody = forms.ModelChoiceField(
queryset=PublicBody.objects.all(),
widget=PublicBodySelect,
label=_("Search for a topic or a public body:")
)
def as_data(self):
data = super(PublicBodyForm, self).as_data()
if self.is_bound and self.is_valid():
data['cleaned_data'] = self.cleaned_data
return data
def get_publicbodies(self):
if self.is_valid():
return [self.cleaned_data['publicbody']]
return []
class MultiplePublicBodyForm(JSONMixin, forms.Form):
publicbody = forms.ModelMultipleChoiceField(
queryset=PublicBody.objects.all(),
label=_("Search for a topic or a public body:")
)
def get_publicbodies(self):
if self.is_valid():
return self.cleaned_data['publicbody']
return []
|
Add uniform API for public body widgets
|
Add uniform API for public body widgets
|
Python
|
mit
|
fin/froide,fin/froide,stefanw/froide,fin/froide,fin/froide,stefanw/froide,stefanw/froide,stefanw/froide,stefanw/froide
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from froide.helper.form_utils import JSONMixin
from .models import PublicBody
from .widgets import PublicBodySelect
class PublicBodyForm(JSONMixin, forms.Form):
publicbody = forms.ModelChoiceField(
queryset=PublicBody.objects.all(),
widget=PublicBodySelect,
label=_("Search for a topic or a public body:")
)
def as_data(self):
data = super(PublicBodyForm, self).as_data()
if self.is_bound and self.is_valid():
data['cleaned_data'] = self.cleaned_data
return data
class MultiplePublicBodyForm(JSONMixin, forms.Form):
publicbody = forms.ModelMultipleChoiceField(
queryset=PublicBody.objects.all(),
label=_("Search for a topic or a public body:")
)
Add uniform API for public body widgets
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from froide.helper.form_utils import JSONMixin
from .models import PublicBody
from .widgets import PublicBodySelect
class PublicBodyForm(JSONMixin, forms.Form):
publicbody = forms.ModelChoiceField(
queryset=PublicBody.objects.all(),
widget=PublicBodySelect,
label=_("Search for a topic or a public body:")
)
def as_data(self):
data = super(PublicBodyForm, self).as_data()
if self.is_bound and self.is_valid():
data['cleaned_data'] = self.cleaned_data
return data
def get_publicbodies(self):
if self.is_valid():
return [self.cleaned_data['publicbody']]
return []
class MultiplePublicBodyForm(JSONMixin, forms.Form):
publicbody = forms.ModelMultipleChoiceField(
queryset=PublicBody.objects.all(),
label=_("Search for a topic or a public body:")
)
def get_publicbodies(self):
if self.is_valid():
return self.cleaned_data['publicbody']
return []
|
<commit_before>from django import forms
from django.utils.translation import ugettext_lazy as _
from froide.helper.form_utils import JSONMixin
from .models import PublicBody
from .widgets import PublicBodySelect
class PublicBodyForm(JSONMixin, forms.Form):
publicbody = forms.ModelChoiceField(
queryset=PublicBody.objects.all(),
widget=PublicBodySelect,
label=_("Search for a topic or a public body:")
)
def as_data(self):
data = super(PublicBodyForm, self).as_data()
if self.is_bound and self.is_valid():
data['cleaned_data'] = self.cleaned_data
return data
class MultiplePublicBodyForm(JSONMixin, forms.Form):
publicbody = forms.ModelMultipleChoiceField(
queryset=PublicBody.objects.all(),
label=_("Search for a topic or a public body:")
)
<commit_msg>Add uniform API for public body widgets<commit_after>
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from froide.helper.form_utils import JSONMixin
from .models import PublicBody
from .widgets import PublicBodySelect
class PublicBodyForm(JSONMixin, forms.Form):
publicbody = forms.ModelChoiceField(
queryset=PublicBody.objects.all(),
widget=PublicBodySelect,
label=_("Search for a topic or a public body:")
)
def as_data(self):
data = super(PublicBodyForm, self).as_data()
if self.is_bound and self.is_valid():
data['cleaned_data'] = self.cleaned_data
return data
def get_publicbodies(self):
if self.is_valid():
return [self.cleaned_data['publicbody']]
return []
class MultiplePublicBodyForm(JSONMixin, forms.Form):
publicbody = forms.ModelMultipleChoiceField(
queryset=PublicBody.objects.all(),
label=_("Search for a topic or a public body:")
)
def get_publicbodies(self):
if self.is_valid():
return self.cleaned_data['publicbody']
return []
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from froide.helper.form_utils import JSONMixin
from .models import PublicBody
from .widgets import PublicBodySelect
class PublicBodyForm(JSONMixin, forms.Form):
publicbody = forms.ModelChoiceField(
queryset=PublicBody.objects.all(),
widget=PublicBodySelect,
label=_("Search for a topic or a public body:")
)
def as_data(self):
data = super(PublicBodyForm, self).as_data()
if self.is_bound and self.is_valid():
data['cleaned_data'] = self.cleaned_data
return data
class MultiplePublicBodyForm(JSONMixin, forms.Form):
publicbody = forms.ModelMultipleChoiceField(
queryset=PublicBody.objects.all(),
label=_("Search for a topic or a public body:")
)
Add uniform API for public body widgetsfrom django import forms
from django.utils.translation import ugettext_lazy as _
from froide.helper.form_utils import JSONMixin
from .models import PublicBody
from .widgets import PublicBodySelect
class PublicBodyForm(JSONMixin, forms.Form):
publicbody = forms.ModelChoiceField(
queryset=PublicBody.objects.all(),
widget=PublicBodySelect,
label=_("Search for a topic or a public body:")
)
def as_data(self):
data = super(PublicBodyForm, self).as_data()
if self.is_bound and self.is_valid():
data['cleaned_data'] = self.cleaned_data
return data
def get_publicbodies(self):
if self.is_valid():
return [self.cleaned_data['publicbody']]
return []
class MultiplePublicBodyForm(JSONMixin, forms.Form):
publicbody = forms.ModelMultipleChoiceField(
queryset=PublicBody.objects.all(),
label=_("Search for a topic or a public body:")
)
def get_publicbodies(self):
if self.is_valid():
return self.cleaned_data['publicbody']
return []
|
<commit_before>from django import forms
from django.utils.translation import ugettext_lazy as _
from froide.helper.form_utils import JSONMixin
from .models import PublicBody
from .widgets import PublicBodySelect
class PublicBodyForm(JSONMixin, forms.Form):
publicbody = forms.ModelChoiceField(
queryset=PublicBody.objects.all(),
widget=PublicBodySelect,
label=_("Search for a topic or a public body:")
)
def as_data(self):
data = super(PublicBodyForm, self).as_data()
if self.is_bound and self.is_valid():
data['cleaned_data'] = self.cleaned_data
return data
class MultiplePublicBodyForm(JSONMixin, forms.Form):
publicbody = forms.ModelMultipleChoiceField(
queryset=PublicBody.objects.all(),
label=_("Search for a topic or a public body:")
)
<commit_msg>Add uniform API for public body widgets<commit_after>from django import forms
from django.utils.translation import ugettext_lazy as _
from froide.helper.form_utils import JSONMixin
from .models import PublicBody
from .widgets import PublicBodySelect
class PublicBodyForm(JSONMixin, forms.Form):
publicbody = forms.ModelChoiceField(
queryset=PublicBody.objects.all(),
widget=PublicBodySelect,
label=_("Search for a topic or a public body:")
)
def as_data(self):
data = super(PublicBodyForm, self).as_data()
if self.is_bound and self.is_valid():
data['cleaned_data'] = self.cleaned_data
return data
def get_publicbodies(self):
if self.is_valid():
return [self.cleaned_data['publicbody']]
return []
class MultiplePublicBodyForm(JSONMixin, forms.Form):
publicbody = forms.ModelMultipleChoiceField(
queryset=PublicBody.objects.all(),
label=_("Search for a topic or a public body:")
)
def get_publicbodies(self):
if self.is_valid():
return self.cleaned_data['publicbody']
return []
|
50bab0199e2d209dc177f5e3b5f193330048e403
|
blinktCP.py
|
blinktCP.py
|
#!/usr/bin/env python
# Blue Dot Blinkt Colour Picker
# 02/06/2017
# David Glaude
from bluedot import BlueDot
import colorsys
import time
import blinkt
last_time = time.time()
def setall(r,g,b):
for x in range(blinkt.NUM_PIXELS):
blinkt.set_pixel(x, r, g, b)
blinkt.show()
def move(pos):
h=((pos.angle+180) % 360) / 360
s=pos.distance
v=1.0
r, g, b = [int(c*255) for c in colorsys.hsv_to_rgb(h, s, v)]
setall(r,g,b)
def rmove(pos):
global last_time
current_time=time.time()
delta = current_time-last_time
last_time = current_time
if (delta<0.3) :
setall(0,0,0)
blinkt.set_brightness(0.1)
blinkt.set_clear_on_exit()
bd = BlueDot()
bd.wait_for_press()
bd.when_pressed = move
bd.when_moved = move
bd.when_released = rmove
while True:
time.sleep(1)
|
#!/usr/bin/env python
# Blue Dot Blinkt Colour Picker
# 02/06/2017
# David Glaude
from bluedot import BlueDot
import colorsys
import time
import blinkt
last_time = time.time()
def setall(r,g,b):
# for x in range(blinkt.NUM_PIXELS):
# blinkt.set_pixel(x, r, g, b)
blinkt.set_all(r, g, b)
blinkt.show()
def move(pos):
h=((pos.angle+180) % 360) / 360
s=pos.distance
v=1.0
r, g, b = [int(c*255) for c in colorsys.hsv_to_rgb(h, s, v)]
setall(r,g,b)
def rmove(pos):
global last_time
current_time=time.time()
delta = current_time-last_time
last_time = current_time
if (delta<0.3) :
setall(0,0,0)
blinkt.set_brightness(0.1)
blinkt.set_clear_on_exit()
bd = BlueDot()
bd.wait_for_press()
bd.when_pressed = move
bd.when_moved = move
bd.when_released = rmove
while True:
time.sleep(1)
|
Use the Blinkt! library set_all rather than to loop on 8 pixels.
|
Use the Blinkt! library set_all rather than to loop on 8 pixels.
|
Python
|
mit
|
dglaude/Blue-Dot-Colour-Picker
|
#!/usr/bin/env python
# Blue Dot Blinkt Colour Picker
# 02/06/2017
# David Glaude
from bluedot import BlueDot
import colorsys
import time
import blinkt
last_time = time.time()
def setall(r,g,b):
for x in range(blinkt.NUM_PIXELS):
blinkt.set_pixel(x, r, g, b)
blinkt.show()
def move(pos):
h=((pos.angle+180) % 360) / 360
s=pos.distance
v=1.0
r, g, b = [int(c*255) for c in colorsys.hsv_to_rgb(h, s, v)]
setall(r,g,b)
def rmove(pos):
global last_time
current_time=time.time()
delta = current_time-last_time
last_time = current_time
if (delta<0.3) :
setall(0,0,0)
blinkt.set_brightness(0.1)
blinkt.set_clear_on_exit()
bd = BlueDot()
bd.wait_for_press()
bd.when_pressed = move
bd.when_moved = move
bd.when_released = rmove
while True:
time.sleep(1)
Use the Blinkt! library set_all rather than to loop on 8 pixels.
|
#!/usr/bin/env python
# Blue Dot Blinkt Colour Picker
# 02/06/2017
# David Glaude
from bluedot import BlueDot
import colorsys
import time
import blinkt
last_time = time.time()
def setall(r,g,b):
# for x in range(blinkt.NUM_PIXELS):
# blinkt.set_pixel(x, r, g, b)
blinkt.set_all(r, g, b)
blinkt.show()
def move(pos):
h=((pos.angle+180) % 360) / 360
s=pos.distance
v=1.0
r, g, b = [int(c*255) for c in colorsys.hsv_to_rgb(h, s, v)]
setall(r,g,b)
def rmove(pos):
global last_time
current_time=time.time()
delta = current_time-last_time
last_time = current_time
if (delta<0.3) :
setall(0,0,0)
blinkt.set_brightness(0.1)
blinkt.set_clear_on_exit()
bd = BlueDot()
bd.wait_for_press()
bd.when_pressed = move
bd.when_moved = move
bd.when_released = rmove
while True:
time.sleep(1)
|
<commit_before>#!/usr/bin/env python
# Blue Dot Blinkt Colour Picker
# 02/06/2017
# David Glaude
from bluedot import BlueDot
import colorsys
import time
import blinkt
last_time = time.time()
def setall(r,g,b):
for x in range(blinkt.NUM_PIXELS):
blinkt.set_pixel(x, r, g, b)
blinkt.show()
def move(pos):
h=((pos.angle+180) % 360) / 360
s=pos.distance
v=1.0
r, g, b = [int(c*255) for c in colorsys.hsv_to_rgb(h, s, v)]
setall(r,g,b)
def rmove(pos):
global last_time
current_time=time.time()
delta = current_time-last_time
last_time = current_time
if (delta<0.3) :
setall(0,0,0)
blinkt.set_brightness(0.1)
blinkt.set_clear_on_exit()
bd = BlueDot()
bd.wait_for_press()
bd.when_pressed = move
bd.when_moved = move
bd.when_released = rmove
while True:
time.sleep(1)
<commit_msg>Use the Blinkt! library set_all rather than to loop on 8 pixels.<commit_after>
|
#!/usr/bin/env python
# Blue Dot Blinkt Colour Picker
# 02/06/2017
# David Glaude
from bluedot import BlueDot
import colorsys
import time
import blinkt
last_time = time.time()
def setall(r,g,b):
# for x in range(blinkt.NUM_PIXELS):
# blinkt.set_pixel(x, r, g, b)
blinkt.set_all(r, g, b)
blinkt.show()
def move(pos):
h=((pos.angle+180) % 360) / 360
s=pos.distance
v=1.0
r, g, b = [int(c*255) for c in colorsys.hsv_to_rgb(h, s, v)]
setall(r,g,b)
def rmove(pos):
global last_time
current_time=time.time()
delta = current_time-last_time
last_time = current_time
if (delta<0.3) :
setall(0,0,0)
blinkt.set_brightness(0.1)
blinkt.set_clear_on_exit()
bd = BlueDot()
bd.wait_for_press()
bd.when_pressed = move
bd.when_moved = move
bd.when_released = rmove
while True:
time.sleep(1)
|
#!/usr/bin/env python
# Blue Dot Blinkt Colour Picker
# 02/06/2017
# David Glaude
from bluedot import BlueDot
import colorsys
import time
import blinkt
last_time = time.time()
def setall(r,g,b):
for x in range(blinkt.NUM_PIXELS):
blinkt.set_pixel(x, r, g, b)
blinkt.show()
def move(pos):
h=((pos.angle+180) % 360) / 360
s=pos.distance
v=1.0
r, g, b = [int(c*255) for c in colorsys.hsv_to_rgb(h, s, v)]
setall(r,g,b)
def rmove(pos):
global last_time
current_time=time.time()
delta = current_time-last_time
last_time = current_time
if (delta<0.3) :
setall(0,0,0)
blinkt.set_brightness(0.1)
blinkt.set_clear_on_exit()
bd = BlueDot()
bd.wait_for_press()
bd.when_pressed = move
bd.when_moved = move
bd.when_released = rmove
while True:
time.sleep(1)
Use the Blinkt! library set_all rather than to loop on 8 pixels.#!/usr/bin/env python
# Blue Dot Blinkt Colour Picker
# 02/06/2017
# David Glaude
from bluedot import BlueDot
import colorsys
import time
import blinkt
last_time = time.time()
def setall(r,g,b):
# for x in range(blinkt.NUM_PIXELS):
# blinkt.set_pixel(x, r, g, b)
blinkt.set_all(r, g, b)
blinkt.show()
def move(pos):
h=((pos.angle+180) % 360) / 360
s=pos.distance
v=1.0
r, g, b = [int(c*255) for c in colorsys.hsv_to_rgb(h, s, v)]
setall(r,g,b)
def rmove(pos):
global last_time
current_time=time.time()
delta = current_time-last_time
last_time = current_time
if (delta<0.3) :
setall(0,0,0)
blinkt.set_brightness(0.1)
blinkt.set_clear_on_exit()
bd = BlueDot()
bd.wait_for_press()
bd.when_pressed = move
bd.when_moved = move
bd.when_released = rmove
while True:
time.sleep(1)
|
<commit_before>#!/usr/bin/env python
# Blue Dot Blinkt Colour Picker
# 02/06/2017
# David Glaude
from bluedot import BlueDot
import colorsys
import time
import blinkt
last_time = time.time()
def setall(r,g,b):
for x in range(blinkt.NUM_PIXELS):
blinkt.set_pixel(x, r, g, b)
blinkt.show()
def move(pos):
h=((pos.angle+180) % 360) / 360
s=pos.distance
v=1.0
r, g, b = [int(c*255) for c in colorsys.hsv_to_rgb(h, s, v)]
setall(r,g,b)
def rmove(pos):
global last_time
current_time=time.time()
delta = current_time-last_time
last_time = current_time
if (delta<0.3) :
setall(0,0,0)
blinkt.set_brightness(0.1)
blinkt.set_clear_on_exit()
bd = BlueDot()
bd.wait_for_press()
bd.when_pressed = move
bd.when_moved = move
bd.when_released = rmove
while True:
time.sleep(1)
<commit_msg>Use the Blinkt! library set_all rather than to loop on 8 pixels.<commit_after>#!/usr/bin/env python
# Blue Dot Blinkt Colour Picker
# 02/06/2017
# David Glaude
from bluedot import BlueDot
import colorsys
import time
import blinkt
last_time = time.time()
def setall(r,g,b):
# for x in range(blinkt.NUM_PIXELS):
# blinkt.set_pixel(x, r, g, b)
blinkt.set_all(r, g, b)
blinkt.show()
def move(pos):
h=((pos.angle+180) % 360) / 360
s=pos.distance
v=1.0
r, g, b = [int(c*255) for c in colorsys.hsv_to_rgb(h, s, v)]
setall(r,g,b)
def rmove(pos):
global last_time
current_time=time.time()
delta = current_time-last_time
last_time = current_time
if (delta<0.3) :
setall(0,0,0)
blinkt.set_brightness(0.1)
blinkt.set_clear_on_exit()
bd = BlueDot()
bd.wait_for_press()
bd.when_pressed = move
bd.when_moved = move
bd.when_released = rmove
while True:
time.sleep(1)
|
3ef3d9944c905a2c46ebb205d094350b768f00c9
|
googlebot/middleware.py
|
googlebot/middleware.py
|
import socket
from django.contrib.auth.models import AnonymousUser
from django.contrib.auth.models import User
class GooglebotMiddleware(object):
"""
Middleware to automatically log in the Googlebot with the user account 'googlebot'
"""
def process_request(self, request):
request.is_googlebot = False # Assume false, until proven
if request.user == AnonymousUser():
if request.META.get('HTTP_USER_AGENT'):
if 'Googlebot' in request.META['HTTP_USER_AGENT']:
try:
remote_ip = request.META['REMOTE_ADDR']
hostname = socket.gethostbyaddr(remote_ip)[0]
if hostname.endswith('googlebot.com'):
request.user = User.objects.get_or_create(username='googlebot') # login our googlebot user :)
request.is_googlebot = True
else:
# FAKE googlebot!!!!
request.is_googlebot = False
except Exception, e:
pass # Don't bring down the site
return None
|
import socket
from django.contrib.auth.models import AnonymousUser
from django.contrib.auth.models import User
class GooglebotMiddleware(object):
"""
Middleware to automatically log in the Googlebot with the user account 'googlebot'
"""
def process_request(self, request):
request.is_googlebot = False # Assume false, until proven
if request.user == AnonymousUser():
if request.META.get('HTTP_USER_AGENT'):
if 'Googlebot' in request.META['HTTP_USER_AGENT']:
try:
remote_ip = request.META['REMOTE_ADDR']
hostname = socket.gethostbyaddr(remote_ip)[0]
if hostname.endswith('googlebot.com'):
request.user, created = User.objects.get_or_create(username='googlebot') # login our googlebot user :)
request.is_googlebot = True
else:
# FAKE googlebot!!!!
request.is_googlebot = False
except Exception, e:
pass # Don't bring down the site
return None
|
Correct way to use get or create
|
Correct way to use get or create
|
Python
|
bsd-3-clause
|
macropin/django-googlebot
|
import socket
from django.contrib.auth.models import AnonymousUser
from django.contrib.auth.models import User
class GooglebotMiddleware(object):
"""
Middleware to automatically log in the Googlebot with the user account 'googlebot'
"""
def process_request(self, request):
request.is_googlebot = False # Assume false, until proven
if request.user == AnonymousUser():
if request.META.get('HTTP_USER_AGENT'):
if 'Googlebot' in request.META['HTTP_USER_AGENT']:
try:
remote_ip = request.META['REMOTE_ADDR']
hostname = socket.gethostbyaddr(remote_ip)[0]
if hostname.endswith('googlebot.com'):
request.user = User.objects.get_or_create(username='googlebot') # login our googlebot user :)
request.is_googlebot = True
else:
# FAKE googlebot!!!!
request.is_googlebot = False
except Exception, e:
pass # Don't bring down the site
return None
Correct way to use get or create
|
import socket
from django.contrib.auth.models import AnonymousUser
from django.contrib.auth.models import User
class GooglebotMiddleware(object):
"""
Middleware to automatically log in the Googlebot with the user account 'googlebot'
"""
def process_request(self, request):
request.is_googlebot = False # Assume false, until proven
if request.user == AnonymousUser():
if request.META.get('HTTP_USER_AGENT'):
if 'Googlebot' in request.META['HTTP_USER_AGENT']:
try:
remote_ip = request.META['REMOTE_ADDR']
hostname = socket.gethostbyaddr(remote_ip)[0]
if hostname.endswith('googlebot.com'):
request.user, created = User.objects.get_or_create(username='googlebot') # login our googlebot user :)
request.is_googlebot = True
else:
# FAKE googlebot!!!!
request.is_googlebot = False
except Exception, e:
pass # Don't bring down the site
return None
|
<commit_before>import socket
from django.contrib.auth.models import AnonymousUser
from django.contrib.auth.models import User
class GooglebotMiddleware(object):
"""
Middleware to automatically log in the Googlebot with the user account 'googlebot'
"""
def process_request(self, request):
request.is_googlebot = False # Assume false, until proven
if request.user == AnonymousUser():
if request.META.get('HTTP_USER_AGENT'):
if 'Googlebot' in request.META['HTTP_USER_AGENT']:
try:
remote_ip = request.META['REMOTE_ADDR']
hostname = socket.gethostbyaddr(remote_ip)[0]
if hostname.endswith('googlebot.com'):
request.user = User.objects.get_or_create(username='googlebot') # login our googlebot user :)
request.is_googlebot = True
else:
# FAKE googlebot!!!!
request.is_googlebot = False
except Exception, e:
pass # Don't bring down the site
return None
<commit_msg>Correct way to use get or create<commit_after>
|
import socket
from django.contrib.auth.models import AnonymousUser
from django.contrib.auth.models import User
class GooglebotMiddleware(object):
"""
Middleware to automatically log in the Googlebot with the user account 'googlebot'
"""
def process_request(self, request):
request.is_googlebot = False # Assume false, until proven
if request.user == AnonymousUser():
if request.META.get('HTTP_USER_AGENT'):
if 'Googlebot' in request.META['HTTP_USER_AGENT']:
try:
remote_ip = request.META['REMOTE_ADDR']
hostname = socket.gethostbyaddr(remote_ip)[0]
if hostname.endswith('googlebot.com'):
request.user, created = User.objects.get_or_create(username='googlebot') # login our googlebot user :)
request.is_googlebot = True
else:
# FAKE googlebot!!!!
request.is_googlebot = False
except Exception, e:
pass # Don't bring down the site
return None
|
import socket
from django.contrib.auth.models import AnonymousUser
from django.contrib.auth.models import User
class GooglebotMiddleware(object):
"""
Middleware to automatically log in the Googlebot with the user account 'googlebot'
"""
def process_request(self, request):
request.is_googlebot = False # Assume false, until proven
if request.user == AnonymousUser():
if request.META.get('HTTP_USER_AGENT'):
if 'Googlebot' in request.META['HTTP_USER_AGENT']:
try:
remote_ip = request.META['REMOTE_ADDR']
hostname = socket.gethostbyaddr(remote_ip)[0]
if hostname.endswith('googlebot.com'):
request.user = User.objects.get_or_create(username='googlebot') # login our googlebot user :)
request.is_googlebot = True
else:
# FAKE googlebot!!!!
request.is_googlebot = False
except Exception, e:
pass # Don't bring down the site
return None
Correct way to use get or createimport socket
from django.contrib.auth.models import AnonymousUser
from django.contrib.auth.models import User
class GooglebotMiddleware(object):
"""
Middleware to automatically log in the Googlebot with the user account 'googlebot'
"""
def process_request(self, request):
request.is_googlebot = False # Assume false, until proven
if request.user == AnonymousUser():
if request.META.get('HTTP_USER_AGENT'):
if 'Googlebot' in request.META['HTTP_USER_AGENT']:
try:
remote_ip = request.META['REMOTE_ADDR']
hostname = socket.gethostbyaddr(remote_ip)[0]
if hostname.endswith('googlebot.com'):
request.user, created = User.objects.get_or_create(username='googlebot') # login our googlebot user :)
request.is_googlebot = True
else:
# FAKE googlebot!!!!
request.is_googlebot = False
except Exception, e:
pass # Don't bring down the site
return None
|
<commit_before>import socket
from django.contrib.auth.models import AnonymousUser
from django.contrib.auth.models import User
class GooglebotMiddleware(object):
"""
Middleware to automatically log in the Googlebot with the user account 'googlebot'
"""
def process_request(self, request):
request.is_googlebot = False # Assume false, until proven
if request.user == AnonymousUser():
if request.META.get('HTTP_USER_AGENT'):
if 'Googlebot' in request.META['HTTP_USER_AGENT']:
try:
remote_ip = request.META['REMOTE_ADDR']
hostname = socket.gethostbyaddr(remote_ip)[0]
if hostname.endswith('googlebot.com'):
request.user = User.objects.get_or_create(username='googlebot') # login our googlebot user :)
request.is_googlebot = True
else:
# FAKE googlebot!!!!
request.is_googlebot = False
except Exception, e:
pass # Don't bring down the site
return None
<commit_msg>Correct way to use get or create<commit_after>import socket
from django.contrib.auth.models import AnonymousUser
from django.contrib.auth.models import User
class GooglebotMiddleware(object):
"""
Middleware to automatically log in the Googlebot with the user account 'googlebot'
"""
def process_request(self, request):
request.is_googlebot = False # Assume false, until proven
if request.user == AnonymousUser():
if request.META.get('HTTP_USER_AGENT'):
if 'Googlebot' in request.META['HTTP_USER_AGENT']:
try:
remote_ip = request.META['REMOTE_ADDR']
hostname = socket.gethostbyaddr(remote_ip)[0]
if hostname.endswith('googlebot.com'):
request.user, created = User.objects.get_or_create(username='googlebot') # login our googlebot user :)
request.is_googlebot = True
else:
# FAKE googlebot!!!!
request.is_googlebot = False
except Exception, e:
pass # Don't bring down the site
return None
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.