commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
45e3a01380cd5c4487a241aad14d69c88649d96e
|
bcelldb_init.py
|
bcelldb_init.py
|
#!/usr/bin/env python-2.7
"""
Module for common processes in bcelldb computing:
get information from config file
"""
import re
def get_config():
"""
Look for config file in . and than ../
Return config key value pairs in dictionary conf[].
"""
# try to open config file in .
try:
config_file = open("config","r")
except IOError:
# try from ../ directory
try:
config_file = open("../config", "r")
except IOError:
print "no config file found"
# global dictionary conf that will be exported
conf = dict()
# read lines of config
for line in config_file:
# every line with # is used as a comment line
if re.search('=', line) and not re.match('\s?#', line):
# split entries into key-value
[key, value] = re.split("=", line)
# get rid of new line
conf[key] = value[:-1]
# return conf[]
return conf
|
#!/usr/bin/env python-2.7
"""
Module for common processes in bcelldb computing:
get information from config file
"""
import re
re_key_value = re.compile("^\s*([_A-Za-z][_0-9A-Za-z]+)=(.*?)\s*;?\s*$")
re_inline_comment = re.compile("^(.*?)(?<!\\\\)#.*")
def get_config():
"""
Look for config file in . and than ../
Return config key value pairs in dictionary conf[].
"""
# try to open config file in .
try:
config_file = open("config","r")
except IOError:
# try from ../ directory
try:
config_file = open("../config", "r")
except IOError:
print "no config file found"
# global dictionary conf that will be exported
conf = dict()
# read lines of config
for line in config_file:
line = line.rstrip()
if not re.match("^\s*$", line) and not re.match("^\s*#", line):
# Split entries into key-value.
line = re_inline_comment.sub('\g<1>', line)
key, value = re_key_value.match(line).group(1,2)
conf[key] = value
return conf
|
Fix handling of separator characters by Python modules
|
Fix handling of separator characters by Python modules
Currently the Python modules do not tolerate more than one equal
sign ("=") in each line of the config file. However REs with
look-ahead functionality require this character. Introduce new
RE-based line-splitting mechanism. Fix handling of in-line
comments.
|
Python
|
agpl-3.0
|
b-cell-immunology/sciReptor,b-cell-immunology/sciReptor,b-cell-immunology/sciReptor,b-cell-immunology/sciReptor
|
#!/usr/bin/env python-2.7
"""
Module for common processes in bcelldb computing:
get information from config file
"""
import re
def get_config():
"""
Look for config file in . and than ../
Return config key value pairs in dictionary conf[].
"""
# try to open config file in .
try:
config_file = open("config","r")
except IOError:
# try from ../ directory
try:
config_file = open("../config", "r")
except IOError:
print "no config file found"
# global dictionary conf that will be exported
conf = dict()
# read lines of config
for line in config_file:
# every line with # is used as a comment line
if re.search('=', line) and not re.match('\s?#', line):
# split entries into key-value
[key, value] = re.split("=", line)
# get rid of new line
conf[key] = value[:-1]
# return conf[]
return conf
Fix handling of separator characters by Python modules
Currently the Python modules do not tolerate more than one equal
sign ("=") in each line of the config file. However REs with
look-ahead functionality require this character. Introduce new
RE-based line-splitting mechanism. Fix handling of in-line
comments.
|
#!/usr/bin/env python-2.7
"""
Module for common processes in bcelldb computing:
get information from config file
"""
import re
re_key_value = re.compile("^\s*([_A-Za-z][_0-9A-Za-z]+)=(.*?)\s*;?\s*$")
re_inline_comment = re.compile("^(.*?)(?<!\\\\)#.*")
def get_config():
"""
Look for config file in . and than ../
Return config key value pairs in dictionary conf[].
"""
# try to open config file in .
try:
config_file = open("config","r")
except IOError:
# try from ../ directory
try:
config_file = open("../config", "r")
except IOError:
print "no config file found"
# global dictionary conf that will be exported
conf = dict()
# read lines of config
for line in config_file:
line = line.rstrip()
if not re.match("^\s*$", line) and not re.match("^\s*#", line):
# Split entries into key-value.
line = re_inline_comment.sub('\g<1>', line)
key, value = re_key_value.match(line).group(1,2)
conf[key] = value
return conf
|
<commit_before>#!/usr/bin/env python-2.7
"""
Module for common processes in bcelldb computing:
get information from config file
"""
import re
def get_config():
"""
Look for config file in . and than ../
Return config key value pairs in dictionary conf[].
"""
# try to open config file in .
try:
config_file = open("config","r")
except IOError:
# try from ../ directory
try:
config_file = open("../config", "r")
except IOError:
print "no config file found"
# global dictionary conf that will be exported
conf = dict()
# read lines of config
for line in config_file:
# every line with # is used as a comment line
if re.search('=', line) and not re.match('\s?#', line):
# split entries into key-value
[key, value] = re.split("=", line)
# get rid of new line
conf[key] = value[:-1]
# return conf[]
return conf
<commit_msg>Fix handling of separator characters by Python modules
Currently the Python modules do not tolerate more than one equal
sign ("=") in each line of the config file. However REs with
look-ahead functionality require this character. Introduce new
RE-based line-splitting mechanism. Fix handling of in-line
comments.<commit_after>
|
#!/usr/bin/env python-2.7
"""
Module for common processes in bcelldb computing:
get information from config file
"""
import re
re_key_value = re.compile("^\s*([_A-Za-z][_0-9A-Za-z]+)=(.*?)\s*;?\s*$")
re_inline_comment = re.compile("^(.*?)(?<!\\\\)#.*")
def get_config():
"""
Look for config file in . and than ../
Return config key value pairs in dictionary conf[].
"""
# try to open config file in .
try:
config_file = open("config","r")
except IOError:
# try from ../ directory
try:
config_file = open("../config", "r")
except IOError:
print "no config file found"
# global dictionary conf that will be exported
conf = dict()
# read lines of config
for line in config_file:
line = line.rstrip()
if not re.match("^\s*$", line) and not re.match("^\s*#", line):
# Split entries into key-value.
line = re_inline_comment.sub('\g<1>', line)
key, value = re_key_value.match(line).group(1,2)
conf[key] = value
return conf
|
#!/usr/bin/env python-2.7
"""
Module for common processes in bcelldb computing:
get information from config file
"""
import re
def get_config():
"""
Look for config file in . and than ../
Return config key value pairs in dictionary conf[].
"""
# try to open config file in .
try:
config_file = open("config","r")
except IOError:
# try from ../ directory
try:
config_file = open("../config", "r")
except IOError:
print "no config file found"
# global dictionary conf that will be exported
conf = dict()
# read lines of config
for line in config_file:
# every line with # is used as a comment line
if re.search('=', line) and not re.match('\s?#', line):
# split entries into key-value
[key, value] = re.split("=", line)
# get rid of new line
conf[key] = value[:-1]
# return conf[]
return conf
Fix handling of separator characters by Python modules
Currently the Python modules do not tolerate more than one equal
sign ("=") in each line of the config file. However REs with
look-ahead functionality require this character. Introduce new
RE-based line-splitting mechanism. Fix handling of in-line
comments.#!/usr/bin/env python-2.7
"""
Module for common processes in bcelldb computing:
get information from config file
"""
import re
re_key_value = re.compile("^\s*([_A-Za-z][_0-9A-Za-z]+)=(.*?)\s*;?\s*$")
re_inline_comment = re.compile("^(.*?)(?<!\\\\)#.*")
def get_config():
"""
Look for config file in . and than ../
Return config key value pairs in dictionary conf[].
"""
# try to open config file in .
try:
config_file = open("config","r")
except IOError:
# try from ../ directory
try:
config_file = open("../config", "r")
except IOError:
print "no config file found"
# global dictionary conf that will be exported
conf = dict()
# read lines of config
for line in config_file:
line = line.rstrip()
if not re.match("^\s*$", line) and not re.match("^\s*#", line):
# Split entries into key-value.
line = re_inline_comment.sub('\g<1>', line)
key, value = re_key_value.match(line).group(1,2)
conf[key] = value
return conf
|
<commit_before>#!/usr/bin/env python-2.7
"""
Module for common processes in bcelldb computing:
get information from config file
"""
import re
def get_config():
"""
Look for config file in . and than ../
Return config key value pairs in dictionary conf[].
"""
# try to open config file in .
try:
config_file = open("config","r")
except IOError:
# try from ../ directory
try:
config_file = open("../config", "r")
except IOError:
print "no config file found"
# global dictionary conf that will be exported
conf = dict()
# read lines of config
for line in config_file:
# every line with # is used as a comment line
if re.search('=', line) and not re.match('\s?#', line):
# split entries into key-value
[key, value] = re.split("=", line)
# get rid of new line
conf[key] = value[:-1]
# return conf[]
return conf
<commit_msg>Fix handling of separator characters by Python modules
Currently the Python modules do not tolerate more than one equal
sign ("=") in each line of the config file. However REs with
look-ahead functionality require this character. Introduce new
RE-based line-splitting mechanism. Fix handling of in-line
comments.<commit_after>#!/usr/bin/env python-2.7
"""
Module for common processes in bcelldb computing:
get information from config file
"""
import re
re_key_value = re.compile("^\s*([_A-Za-z][_0-9A-Za-z]+)=(.*?)\s*;?\s*$")
re_inline_comment = re.compile("^(.*?)(?<!\\\\)#.*")
def get_config():
"""
Look for config file in . and than ../
Return config key value pairs in dictionary conf[].
"""
# try to open config file in .
try:
config_file = open("config","r")
except IOError:
# try from ../ directory
try:
config_file = open("../config", "r")
except IOError:
print "no config file found"
# global dictionary conf that will be exported
conf = dict()
# read lines of config
for line in config_file:
line = line.rstrip()
if not re.match("^\s*$", line) and not re.match("^\s*#", line):
# Split entries into key-value.
line = re_inline_comment.sub('\g<1>', line)
key, value = re_key_value.match(line).group(1,2)
conf[key] = value
return conf
|
5a66aaa7b7640ef616bf1817a9e2999e10d97404
|
tests/test_wsgi_graphql.py
|
tests/test_wsgi_graphql.py
|
from webtest import TestApp as Client
from wsgi_graphql import wsgi_graphql
from graphql.core.type import (
GraphQLEnumType,
GraphQLEnumValue,
GraphQLInterfaceType,
GraphQLObjectType,
GraphQLField,
GraphQLArgument,
GraphQLList,
GraphQLNonNull,
GraphQLSchema,
GraphQLString,
)
def raises(*_):
raise Exception("Raises!")
TestSchema = GraphQLSchema(
query=GraphQLObjectType(
'Root',
fields=lambda: {
'test': GraphQLField(
GraphQLString,
args={
'who': GraphQLArgument(
type=GraphQLString
)
},
resolver=lambda root, args, *_: 'Hello ' + (args['who'] or 'World')
),
'thrower': GraphQLField(
GraphQLNonNull(GraphQLString),
resolver=raises
)
}
)
)
def test_basic():
wsgi = wsgi_graphql(TestSchema)
c = Client(wsgi)
response = c.get('/?query={test}')
assert response.json == {
'data': {
'test': 'Hello World'
}
}
|
import json
from webtest import TestApp as Client
from wsgi_graphql import wsgi_graphql
from graphql.core.type import (
GraphQLObjectType,
GraphQLField,
GraphQLArgument,
GraphQLNonNull,
GraphQLSchema,
GraphQLString,
)
def raises(*_):
raise Exception("Raises!")
TestSchema = GraphQLSchema(
query=GraphQLObjectType(
'Root',
fields=lambda: {
'test': GraphQLField(
GraphQLString,
args={
'who': GraphQLArgument(
type=GraphQLString
)
},
resolver=lambda root, args, *_: 'Hello ' + (args['who'] or 'World')
),
'thrower': GraphQLField(
GraphQLNonNull(GraphQLString),
resolver=raises
)
}
)
)
def test_allows_GET_with_query_param():
wsgi = wsgi_graphql(TestSchema)
c = Client(wsgi)
response = c.get('/', {'query': '{test}'})
assert response.json == {
'data': {
'test': 'Hello World'
}
}
def test_allows_GET_with_variable_values():
wsgi = wsgi_graphql(TestSchema)
c = Client(wsgi)
response = c.get('/', {
'query': 'query helloWho($who: String){ test(who: $who) }',
'variables': json.dumps({'who': 'Dolly'})
})
assert response.json == {
'data': {
'test': 'Hello Dolly'
}
}
|
Expand test to cover variables.
|
Expand test to cover variables.
|
Python
|
mit
|
ecreall/graphql-wsgi,faassen/graphql-wsgi,faassen/wsgi_graphql
|
from webtest import TestApp as Client
from wsgi_graphql import wsgi_graphql
from graphql.core.type import (
GraphQLEnumType,
GraphQLEnumValue,
GraphQLInterfaceType,
GraphQLObjectType,
GraphQLField,
GraphQLArgument,
GraphQLList,
GraphQLNonNull,
GraphQLSchema,
GraphQLString,
)
def raises(*_):
raise Exception("Raises!")
TestSchema = GraphQLSchema(
query=GraphQLObjectType(
'Root',
fields=lambda: {
'test': GraphQLField(
GraphQLString,
args={
'who': GraphQLArgument(
type=GraphQLString
)
},
resolver=lambda root, args, *_: 'Hello ' + (args['who'] or 'World')
),
'thrower': GraphQLField(
GraphQLNonNull(GraphQLString),
resolver=raises
)
}
)
)
def test_basic():
wsgi = wsgi_graphql(TestSchema)
c = Client(wsgi)
response = c.get('/?query={test}')
assert response.json == {
'data': {
'test': 'Hello World'
}
}
Expand test to cover variables.
|
import json
from webtest import TestApp as Client
from wsgi_graphql import wsgi_graphql
from graphql.core.type import (
GraphQLObjectType,
GraphQLField,
GraphQLArgument,
GraphQLNonNull,
GraphQLSchema,
GraphQLString,
)
def raises(*_):
raise Exception("Raises!")
TestSchema = GraphQLSchema(
query=GraphQLObjectType(
'Root',
fields=lambda: {
'test': GraphQLField(
GraphQLString,
args={
'who': GraphQLArgument(
type=GraphQLString
)
},
resolver=lambda root, args, *_: 'Hello ' + (args['who'] or 'World')
),
'thrower': GraphQLField(
GraphQLNonNull(GraphQLString),
resolver=raises
)
}
)
)
def test_allows_GET_with_query_param():
wsgi = wsgi_graphql(TestSchema)
c = Client(wsgi)
response = c.get('/', {'query': '{test}'})
assert response.json == {
'data': {
'test': 'Hello World'
}
}
def test_allows_GET_with_variable_values():
wsgi = wsgi_graphql(TestSchema)
c = Client(wsgi)
response = c.get('/', {
'query': 'query helloWho($who: String){ test(who: $who) }',
'variables': json.dumps({'who': 'Dolly'})
})
assert response.json == {
'data': {
'test': 'Hello Dolly'
}
}
|
<commit_before>from webtest import TestApp as Client
from wsgi_graphql import wsgi_graphql
from graphql.core.type import (
GraphQLEnumType,
GraphQLEnumValue,
GraphQLInterfaceType,
GraphQLObjectType,
GraphQLField,
GraphQLArgument,
GraphQLList,
GraphQLNonNull,
GraphQLSchema,
GraphQLString,
)
def raises(*_):
raise Exception("Raises!")
TestSchema = GraphQLSchema(
query=GraphQLObjectType(
'Root',
fields=lambda: {
'test': GraphQLField(
GraphQLString,
args={
'who': GraphQLArgument(
type=GraphQLString
)
},
resolver=lambda root, args, *_: 'Hello ' + (args['who'] or 'World')
),
'thrower': GraphQLField(
GraphQLNonNull(GraphQLString),
resolver=raises
)
}
)
)
def test_basic():
wsgi = wsgi_graphql(TestSchema)
c = Client(wsgi)
response = c.get('/?query={test}')
assert response.json == {
'data': {
'test': 'Hello World'
}
}
<commit_msg>Expand test to cover variables.<commit_after>
|
import json
from webtest import TestApp as Client
from wsgi_graphql import wsgi_graphql
from graphql.core.type import (
GraphQLObjectType,
GraphQLField,
GraphQLArgument,
GraphQLNonNull,
GraphQLSchema,
GraphQLString,
)
def raises(*_):
raise Exception("Raises!")
TestSchema = GraphQLSchema(
query=GraphQLObjectType(
'Root',
fields=lambda: {
'test': GraphQLField(
GraphQLString,
args={
'who': GraphQLArgument(
type=GraphQLString
)
},
resolver=lambda root, args, *_: 'Hello ' + (args['who'] or 'World')
),
'thrower': GraphQLField(
GraphQLNonNull(GraphQLString),
resolver=raises
)
}
)
)
def test_allows_GET_with_query_param():
wsgi = wsgi_graphql(TestSchema)
c = Client(wsgi)
response = c.get('/', {'query': '{test}'})
assert response.json == {
'data': {
'test': 'Hello World'
}
}
def test_allows_GET_with_variable_values():
wsgi = wsgi_graphql(TestSchema)
c = Client(wsgi)
response = c.get('/', {
'query': 'query helloWho($who: String){ test(who: $who) }',
'variables': json.dumps({'who': 'Dolly'})
})
assert response.json == {
'data': {
'test': 'Hello Dolly'
}
}
|
from webtest import TestApp as Client
from wsgi_graphql import wsgi_graphql
from graphql.core.type import (
GraphQLEnumType,
GraphQLEnumValue,
GraphQLInterfaceType,
GraphQLObjectType,
GraphQLField,
GraphQLArgument,
GraphQLList,
GraphQLNonNull,
GraphQLSchema,
GraphQLString,
)
def raises(*_):
raise Exception("Raises!")
TestSchema = GraphQLSchema(
query=GraphQLObjectType(
'Root',
fields=lambda: {
'test': GraphQLField(
GraphQLString,
args={
'who': GraphQLArgument(
type=GraphQLString
)
},
resolver=lambda root, args, *_: 'Hello ' + (args['who'] or 'World')
),
'thrower': GraphQLField(
GraphQLNonNull(GraphQLString),
resolver=raises
)
}
)
)
def test_basic():
wsgi = wsgi_graphql(TestSchema)
c = Client(wsgi)
response = c.get('/?query={test}')
assert response.json == {
'data': {
'test': 'Hello World'
}
}
Expand test to cover variables.import json
from webtest import TestApp as Client
from wsgi_graphql import wsgi_graphql
from graphql.core.type import (
GraphQLObjectType,
GraphQLField,
GraphQLArgument,
GraphQLNonNull,
GraphQLSchema,
GraphQLString,
)
def raises(*_):
raise Exception("Raises!")
TestSchema = GraphQLSchema(
query=GraphQLObjectType(
'Root',
fields=lambda: {
'test': GraphQLField(
GraphQLString,
args={
'who': GraphQLArgument(
type=GraphQLString
)
},
resolver=lambda root, args, *_: 'Hello ' + (args['who'] or 'World')
),
'thrower': GraphQLField(
GraphQLNonNull(GraphQLString),
resolver=raises
)
}
)
)
def test_allows_GET_with_query_param():
wsgi = wsgi_graphql(TestSchema)
c = Client(wsgi)
response = c.get('/', {'query': '{test}'})
assert response.json == {
'data': {
'test': 'Hello World'
}
}
def test_allows_GET_with_variable_values():
wsgi = wsgi_graphql(TestSchema)
c = Client(wsgi)
response = c.get('/', {
'query': 'query helloWho($who: String){ test(who: $who) }',
'variables': json.dumps({'who': 'Dolly'})
})
assert response.json == {
'data': {
'test': 'Hello Dolly'
}
}
|
<commit_before>from webtest import TestApp as Client
from wsgi_graphql import wsgi_graphql
from graphql.core.type import (
GraphQLEnumType,
GraphQLEnumValue,
GraphQLInterfaceType,
GraphQLObjectType,
GraphQLField,
GraphQLArgument,
GraphQLList,
GraphQLNonNull,
GraphQLSchema,
GraphQLString,
)
def raises(*_):
raise Exception("Raises!")
TestSchema = GraphQLSchema(
query=GraphQLObjectType(
'Root',
fields=lambda: {
'test': GraphQLField(
GraphQLString,
args={
'who': GraphQLArgument(
type=GraphQLString
)
},
resolver=lambda root, args, *_: 'Hello ' + (args['who'] or 'World')
),
'thrower': GraphQLField(
GraphQLNonNull(GraphQLString),
resolver=raises
)
}
)
)
def test_basic():
wsgi = wsgi_graphql(TestSchema)
c = Client(wsgi)
response = c.get('/?query={test}')
assert response.json == {
'data': {
'test': 'Hello World'
}
}
<commit_msg>Expand test to cover variables.<commit_after>import json
from webtest import TestApp as Client
from wsgi_graphql import wsgi_graphql
from graphql.core.type import (
GraphQLObjectType,
GraphQLField,
GraphQLArgument,
GraphQLNonNull,
GraphQLSchema,
GraphQLString,
)
def raises(*_):
raise Exception("Raises!")
TestSchema = GraphQLSchema(
query=GraphQLObjectType(
'Root',
fields=lambda: {
'test': GraphQLField(
GraphQLString,
args={
'who': GraphQLArgument(
type=GraphQLString
)
},
resolver=lambda root, args, *_: 'Hello ' + (args['who'] or 'World')
),
'thrower': GraphQLField(
GraphQLNonNull(GraphQLString),
resolver=raises
)
}
)
)
def test_allows_GET_with_query_param():
wsgi = wsgi_graphql(TestSchema)
c = Client(wsgi)
response = c.get('/', {'query': '{test}'})
assert response.json == {
'data': {
'test': 'Hello World'
}
}
def test_allows_GET_with_variable_values():
wsgi = wsgi_graphql(TestSchema)
c = Client(wsgi)
response = c.get('/', {
'query': 'query helloWho($who: String){ test(who: $who) }',
'variables': json.dumps({'who': 'Dolly'})
})
assert response.json == {
'data': {
'test': 'Hello Dolly'
}
}
|
f3937c77366dc5df4a1eb3b62a2f3452c539dbc4
|
cms/models/settingmodels.py
|
cms/models/settingmodels.py
|
# -*- coding: utf-8 -*-
from django.contrib.auth.models import User
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
from cms.utils.compat.dj import force_unicode, python_2_unicode_compatible
@python_2_unicode_compatible
class UserSettings(models.Model):
user = models.ForeignKey(User, editable=False)
language = models.CharField(_("Language"), max_length=10, choices=settings.LANGUAGES,
help_text=_("The language for the admin interface and toolbar"))
clipboard = models.ForeignKey('cms.Placeholder', blank=True, null=True, editable=False)
class Meta:
verbose_name = _('user setting')
verbose_name_plural = _('user settings')
app_label = 'cms'
def __str__(self):
return force_unicode(self.user)
|
# -*- coding: utf-8 -*-
from django.contrib.auth.models import User
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
from cms.utils.compat.dj import force_unicode, python_2_unicode_compatible
@python_2_unicode_compatible
class UserSettings(models.Model):
user = models.ForeignKey(User, editable=False, related_name='djangocms_usersettings')
language = models.CharField(_("Language"), max_length=10, choices=settings.LANGUAGES,
help_text=_("The language for the admin interface and toolbar"))
clipboard = models.ForeignKey('cms.Placeholder', blank=True, null=True, editable=False)
class Meta:
verbose_name = _('user setting')
verbose_name_plural = _('user settings')
app_label = 'cms'
def __str__(self):
return force_unicode(self.user)
|
Define a custom related_name in UserSettings->User relation
|
Define a custom related_name in UserSettings->User relation
|
Python
|
bsd-3-clause
|
vxsx/django-cms,netzkolchose/django-cms,owers19856/django-cms,MagicSolutions/django-cms,intip/django-cms,czpython/django-cms,Vegasvikk/django-cms,farhaadila/django-cms,saintbird/django-cms,astagi/django-cms,SachaMPS/django-cms,cyberintruder/django-cms,qnub/django-cms,stefanfoulis/django-cms,bittner/django-cms,AlexProfi/django-cms,sznekol/django-cms,DylannCordel/django-cms,yakky/django-cms,vstoykov/django-cms,stefanw/django-cms,jproffitt/django-cms,nostalgiaz/django-cms,MagicSolutions/django-cms,stefanfoulis/django-cms,rsalmaso/django-cms,evildmp/django-cms,wyg3958/django-cms,rscnt/django-cms,philippze/django-cms,keimlink/django-cms,youprofit/django-cms,selecsosi/django-cms,ScholzVolkmer/django-cms,SofiaReis/django-cms,divio/django-cms,petecummings/django-cms,sznekol/django-cms,youprofit/django-cms,jeffreylu9/django-cms,selecsosi/django-cms,irudayarajisawa/django-cms,bittner/django-cms,youprofit/django-cms,chkir/django-cms,frnhr/django-cms,360youlun/django-cms,rryan/django-cms,divio/django-cms,liuyisiyisi/django-cms,jrclaramunt/django-cms,rscnt/django-cms,yakky/django-cms,isotoma/django-cms,jeffreylu9/django-cms,mkoistinen/django-cms,takeshineshiro/django-cms,bittner/django-cms,vstoykov/django-cms,andyzsf/django-cms,netzkolchose/django-cms,andyzsf/django-cms,astagi/django-cms,jproffitt/django-cms,liuyisiyisi/django-cms,nostalgiaz/django-cms,wyg3958/django-cms,wuzhihui1123/django-cms,AlexProfi/django-cms,takeshineshiro/django-cms,isotoma/django-cms,memnonila/django-cms,evildmp/django-cms,bittner/django-cms,Jaccorot/django-cms,rsalmaso/django-cms,keimlink/django-cms,FinalAngel/django-cms,jsma/django-cms,rryan/django-cms,saintbird/django-cms,nimbis/django-cms,stefanw/django-cms,leture/django-cms,datakortet/django-cms,keimlink/django-cms,sephii/django-cms,Jaccorot/django-cms,kk9599/django-cms,intip/django-cms,Vegasvikk/django-cms,selecsosi/django-cms,robmagee/django-cms,jeffreylu9/django-cms,Vegasvikk/django-cms,jrief/django-cms,rsalmaso/django-cms,benzkji/django-cms,jproffitt/django-cms,philippze/django-cms,robmagee/django-cms,webu/django-cms,vxsx/django-cms,stefanw/django-cms,FinalAngel/django-cms,andyzsf/django-cms,jproffitt/django-cms,timgraham/django-cms,360youlun/django-cms,SmithsonianEnterprises/django-cms,iddqd1/django-cms,vxsx/django-cms,leture/django-cms,irudayarajisawa/django-cms,netzkolchose/django-cms,mkoistinen/django-cms,intgr/django-cms,SofiaReis/django-cms,cyberintruder/django-cms,datakortet/django-cms,Jaccorot/django-cms,intgr/django-cms,vad/django-cms,yakky/django-cms,owers19856/django-cms,czpython/django-cms,robmagee/django-cms,Livefyre/django-cms,nostalgiaz/django-cms,vxsx/django-cms,takeshineshiro/django-cms,AlexProfi/django-cms,saintbird/django-cms,owers19856/django-cms,jsma/django-cms,memnonila/django-cms,petecummings/django-cms,SmithsonianEnterprises/django-cms,stefanw/django-cms,josjevv/django-cms,jeffreylu9/django-cms,farhaadila/django-cms,nostalgiaz/django-cms,andyzsf/django-cms,petecummings/django-cms,chmberl/django-cms,intip/django-cms,rryan/django-cms,vad/django-cms,kk9599/django-cms,chmberl/django-cms,datakortet/django-cms,intgr/django-cms,sephii/django-cms,vstoykov/django-cms,chmberl/django-cms,chkir/django-cms,donce/django-cms,rryan/django-cms,wuzhihui1123/django-cms,liuyisiyisi/django-cms,wyg3958/django-cms,josjevv/django-cms,FinalAngel/django-cms,webu/django-cms,stefanfoulis/django-cms,evildmp/django-cms,SmithsonianEnterprises/django-cms,benzkji/django-cms,FinalAngel/django-cms,frnhr/django-cms,nimbis/django-cms,webu/django-cms,wuzhihui1123/django-cms,jrief/django-cms,donce/django-cms,SofiaReis/django-cms,jrclaramunt/django-cms,sephii/django-cms,timgraham/django-cms,vad/django-cms,philippze/django-cms,dhorelik/django-cms,divio/django-cms,donce/django-cms,ScholzVolkmer/django-cms,Livefyre/django-cms,vad/django-cms,selecsosi/django-cms,dhorelik/django-cms,benzkji/django-cms,jsma/django-cms,benzkji/django-cms,frnhr/django-cms,mkoistinen/django-cms,josjevv/django-cms,MagicSolutions/django-cms,timgraham/django-cms,mkoistinen/django-cms,Livefyre/django-cms,qnub/django-cms,farhaadila/django-cms,astagi/django-cms,datakortet/django-cms,DylannCordel/django-cms,intip/django-cms,memnonila/django-cms,nimbis/django-cms,isotoma/django-cms,irudayarajisawa/django-cms,netzkolchose/django-cms,360youlun/django-cms,SachaMPS/django-cms,stefanfoulis/django-cms,chkir/django-cms,qnub/django-cms,sznekol/django-cms,divio/django-cms,jrief/django-cms,sephii/django-cms,rsalmaso/django-cms,SachaMPS/django-cms,czpython/django-cms,kk9599/django-cms,iddqd1/django-cms,iddqd1/django-cms,nimbis/django-cms,ScholzVolkmer/django-cms,jrief/django-cms,cyberintruder/django-cms,DylannCordel/django-cms,dhorelik/django-cms,evildmp/django-cms,leture/django-cms,jsma/django-cms,czpython/django-cms,Livefyre/django-cms,jrclaramunt/django-cms,rscnt/django-cms,yakky/django-cms,frnhr/django-cms,intgr/django-cms,isotoma/django-cms,wuzhihui1123/django-cms
|
# -*- coding: utf-8 -*-
from django.contrib.auth.models import User
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
from cms.utils.compat.dj import force_unicode, python_2_unicode_compatible
@python_2_unicode_compatible
class UserSettings(models.Model):
user = models.ForeignKey(User, editable=False)
language = models.CharField(_("Language"), max_length=10, choices=settings.LANGUAGES,
help_text=_("The language for the admin interface and toolbar"))
clipboard = models.ForeignKey('cms.Placeholder', blank=True, null=True, editable=False)
class Meta:
verbose_name = _('user setting')
verbose_name_plural = _('user settings')
app_label = 'cms'
def __str__(self):
return force_unicode(self.user)
Define a custom related_name in UserSettings->User relation
|
# -*- coding: utf-8 -*-
from django.contrib.auth.models import User
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
from cms.utils.compat.dj import force_unicode, python_2_unicode_compatible
@python_2_unicode_compatible
class UserSettings(models.Model):
user = models.ForeignKey(User, editable=False, related_name='djangocms_usersettings')
language = models.CharField(_("Language"), max_length=10, choices=settings.LANGUAGES,
help_text=_("The language for the admin interface and toolbar"))
clipboard = models.ForeignKey('cms.Placeholder', blank=True, null=True, editable=False)
class Meta:
verbose_name = _('user setting')
verbose_name_plural = _('user settings')
app_label = 'cms'
def __str__(self):
return force_unicode(self.user)
|
<commit_before># -*- coding: utf-8 -*-
from django.contrib.auth.models import User
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
from cms.utils.compat.dj import force_unicode, python_2_unicode_compatible
@python_2_unicode_compatible
class UserSettings(models.Model):
user = models.ForeignKey(User, editable=False)
language = models.CharField(_("Language"), max_length=10, choices=settings.LANGUAGES,
help_text=_("The language for the admin interface and toolbar"))
clipboard = models.ForeignKey('cms.Placeholder', blank=True, null=True, editable=False)
class Meta:
verbose_name = _('user setting')
verbose_name_plural = _('user settings')
app_label = 'cms'
def __str__(self):
return force_unicode(self.user)
<commit_msg>Define a custom related_name in UserSettings->User relation<commit_after>
|
# -*- coding: utf-8 -*-
from django.contrib.auth.models import User
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
from cms.utils.compat.dj import force_unicode, python_2_unicode_compatible
@python_2_unicode_compatible
class UserSettings(models.Model):
user = models.ForeignKey(User, editable=False, related_name='djangocms_usersettings')
language = models.CharField(_("Language"), max_length=10, choices=settings.LANGUAGES,
help_text=_("The language for the admin interface and toolbar"))
clipboard = models.ForeignKey('cms.Placeholder', blank=True, null=True, editable=False)
class Meta:
verbose_name = _('user setting')
verbose_name_plural = _('user settings')
app_label = 'cms'
def __str__(self):
return force_unicode(self.user)
|
# -*- coding: utf-8 -*-
from django.contrib.auth.models import User
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
from cms.utils.compat.dj import force_unicode, python_2_unicode_compatible
@python_2_unicode_compatible
class UserSettings(models.Model):
user = models.ForeignKey(User, editable=False)
language = models.CharField(_("Language"), max_length=10, choices=settings.LANGUAGES,
help_text=_("The language for the admin interface and toolbar"))
clipboard = models.ForeignKey('cms.Placeholder', blank=True, null=True, editable=False)
class Meta:
verbose_name = _('user setting')
verbose_name_plural = _('user settings')
app_label = 'cms'
def __str__(self):
return force_unicode(self.user)
Define a custom related_name in UserSettings->User relation# -*- coding: utf-8 -*-
from django.contrib.auth.models import User
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
from cms.utils.compat.dj import force_unicode, python_2_unicode_compatible
@python_2_unicode_compatible
class UserSettings(models.Model):
user = models.ForeignKey(User, editable=False, related_name='djangocms_usersettings')
language = models.CharField(_("Language"), max_length=10, choices=settings.LANGUAGES,
help_text=_("The language for the admin interface and toolbar"))
clipboard = models.ForeignKey('cms.Placeholder', blank=True, null=True, editable=False)
class Meta:
verbose_name = _('user setting')
verbose_name_plural = _('user settings')
app_label = 'cms'
def __str__(self):
return force_unicode(self.user)
|
<commit_before># -*- coding: utf-8 -*-
from django.contrib.auth.models import User
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
from cms.utils.compat.dj import force_unicode, python_2_unicode_compatible
@python_2_unicode_compatible
class UserSettings(models.Model):
user = models.ForeignKey(User, editable=False)
language = models.CharField(_("Language"), max_length=10, choices=settings.LANGUAGES,
help_text=_("The language for the admin interface and toolbar"))
clipboard = models.ForeignKey('cms.Placeholder', blank=True, null=True, editable=False)
class Meta:
verbose_name = _('user setting')
verbose_name_plural = _('user settings')
app_label = 'cms'
def __str__(self):
return force_unicode(self.user)
<commit_msg>Define a custom related_name in UserSettings->User relation<commit_after># -*- coding: utf-8 -*-
from django.contrib.auth.models import User
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
from cms.utils.compat.dj import force_unicode, python_2_unicode_compatible
@python_2_unicode_compatible
class UserSettings(models.Model):
user = models.ForeignKey(User, editable=False, related_name='djangocms_usersettings')
language = models.CharField(_("Language"), max_length=10, choices=settings.LANGUAGES,
help_text=_("The language for the admin interface and toolbar"))
clipboard = models.ForeignKey('cms.Placeholder', blank=True, null=True, editable=False)
class Meta:
verbose_name = _('user setting')
verbose_name_plural = _('user settings')
app_label = 'cms'
def __str__(self):
return force_unicode(self.user)
|
2aef104da6bf6ce98619fe5bac5718533e2e7530
|
yunity/utils/tests/misc.py
|
yunity/utils/tests/misc.py
|
from importlib import import_module
from json import dumps as dump_json
from json import loads as load_json
def json_stringify(data):
return dump_json(data, sort_keys=True, separators=(',', ':')).encode("utf-8") if data else None
def content_json(response):
return load_json(response.content.decode("utf-8"))
def is_test_resource(resource):
return resource.startswith('test_')
def maybe_import(resource):
try:
return import_module(resource)
except ImportError:
return None
|
from importlib import import_module
from json import dumps as dump_json
from json import loads as load_json
def json_stringify(data):
return dump_json(data, sort_keys=True, separators=(',', ':')).encode("utf-8") if data else None
def content_json(response):
try:
return load_json(response.content.decode("utf-8"))
except ValueError:
raise ValueError('invalid json content in response')
def is_test_resource(resource):
return resource.startswith('test_')
def maybe_import(resource):
try:
return import_module(resource)
except ImportError:
return None
|
Improve error on invalid JSON response content
|
Improve error on invalid JSON response content
with @NerdyProjects
|
Python
|
agpl-3.0
|
yunity/yunity-core,yunity/foodsaving-backend,yunity/foodsaving-backend,yunity/yunity-core,yunity/foodsaving-backend
|
from importlib import import_module
from json import dumps as dump_json
from json import loads as load_json
def json_stringify(data):
return dump_json(data, sort_keys=True, separators=(',', ':')).encode("utf-8") if data else None
def content_json(response):
return load_json(response.content.decode("utf-8"))
def is_test_resource(resource):
return resource.startswith('test_')
def maybe_import(resource):
try:
return import_module(resource)
except ImportError:
return None
Improve error on invalid JSON response content
with @NerdyProjects
|
from importlib import import_module
from json import dumps as dump_json
from json import loads as load_json
def json_stringify(data):
return dump_json(data, sort_keys=True, separators=(',', ':')).encode("utf-8") if data else None
def content_json(response):
try:
return load_json(response.content.decode("utf-8"))
except ValueError:
raise ValueError('invalid json content in response')
def is_test_resource(resource):
return resource.startswith('test_')
def maybe_import(resource):
try:
return import_module(resource)
except ImportError:
return None
|
<commit_before>from importlib import import_module
from json import dumps as dump_json
from json import loads as load_json
def json_stringify(data):
return dump_json(data, sort_keys=True, separators=(',', ':')).encode("utf-8") if data else None
def content_json(response):
return load_json(response.content.decode("utf-8"))
def is_test_resource(resource):
return resource.startswith('test_')
def maybe_import(resource):
try:
return import_module(resource)
except ImportError:
return None
<commit_msg>Improve error on invalid JSON response content
with @NerdyProjects<commit_after>
|
from importlib import import_module
from json import dumps as dump_json
from json import loads as load_json
def json_stringify(data):
return dump_json(data, sort_keys=True, separators=(',', ':')).encode("utf-8") if data else None
def content_json(response):
try:
return load_json(response.content.decode("utf-8"))
except ValueError:
raise ValueError('invalid json content in response')
def is_test_resource(resource):
return resource.startswith('test_')
def maybe_import(resource):
try:
return import_module(resource)
except ImportError:
return None
|
from importlib import import_module
from json import dumps as dump_json
from json import loads as load_json
def json_stringify(data):
return dump_json(data, sort_keys=True, separators=(',', ':')).encode("utf-8") if data else None
def content_json(response):
return load_json(response.content.decode("utf-8"))
def is_test_resource(resource):
return resource.startswith('test_')
def maybe_import(resource):
try:
return import_module(resource)
except ImportError:
return None
Improve error on invalid JSON response content
with @NerdyProjectsfrom importlib import import_module
from json import dumps as dump_json
from json import loads as load_json
def json_stringify(data):
return dump_json(data, sort_keys=True, separators=(',', ':')).encode("utf-8") if data else None
def content_json(response):
try:
return load_json(response.content.decode("utf-8"))
except ValueError:
raise ValueError('invalid json content in response')
def is_test_resource(resource):
return resource.startswith('test_')
def maybe_import(resource):
try:
return import_module(resource)
except ImportError:
return None
|
<commit_before>from importlib import import_module
from json import dumps as dump_json
from json import loads as load_json
def json_stringify(data):
return dump_json(data, sort_keys=True, separators=(',', ':')).encode("utf-8") if data else None
def content_json(response):
return load_json(response.content.decode("utf-8"))
def is_test_resource(resource):
return resource.startswith('test_')
def maybe_import(resource):
try:
return import_module(resource)
except ImportError:
return None
<commit_msg>Improve error on invalid JSON response content
with @NerdyProjects<commit_after>from importlib import import_module
from json import dumps as dump_json
from json import loads as load_json
def json_stringify(data):
return dump_json(data, sort_keys=True, separators=(',', ':')).encode("utf-8") if data else None
def content_json(response):
try:
return load_json(response.content.decode("utf-8"))
except ValueError:
raise ValueError('invalid json content in response')
def is_test_resource(resource):
return resource.startswith('test_')
def maybe_import(resource):
try:
return import_module(resource)
except ImportError:
return None
|
991b0da117956e4d523732edc03bc287edf6a680
|
identity/app.py
|
identity/app.py
|
from __future__ import unicode_literals, absolute_import
import os
from identity import app
if __name__ == '__main__':
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0',
port=port,
debug=os.environ.get('DEBUG', False))
|
from __future__ import unicode_literals, absolute_import
import os
if os.environ.get('ENV') and os.path.exists(os.environ['ENV']):
for line in open(os.environ['ENV']):
var = line.strip().split('=')
if len(var) == 2:
os.environ[var[0]] = var[1]
from identity import app
if __name__ == '__main__':
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0',
port=port,
debug=os.environ.get('DEBUG', False))
|
Load env-vars from a .env file
|
Load env-vars from a .env file
|
Python
|
mit
|
ErinCall/identity,ErinCall/identity
|
from __future__ import unicode_literals, absolute_import
import os
from identity import app
if __name__ == '__main__':
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0',
port=port,
debug=os.environ.get('DEBUG', False))
Load env-vars from a .env file
|
from __future__ import unicode_literals, absolute_import
import os
if os.environ.get('ENV') and os.path.exists(os.environ['ENV']):
for line in open(os.environ['ENV']):
var = line.strip().split('=')
if len(var) == 2:
os.environ[var[0]] = var[1]
from identity import app
if __name__ == '__main__':
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0',
port=port,
debug=os.environ.get('DEBUG', False))
|
<commit_before>from __future__ import unicode_literals, absolute_import
import os
from identity import app
if __name__ == '__main__':
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0',
port=port,
debug=os.environ.get('DEBUG', False))
<commit_msg>Load env-vars from a .env file<commit_after>
|
from __future__ import unicode_literals, absolute_import
import os
if os.environ.get('ENV') and os.path.exists(os.environ['ENV']):
for line in open(os.environ['ENV']):
var = line.strip().split('=')
if len(var) == 2:
os.environ[var[0]] = var[1]
from identity import app
if __name__ == '__main__':
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0',
port=port,
debug=os.environ.get('DEBUG', False))
|
from __future__ import unicode_literals, absolute_import
import os
from identity import app
if __name__ == '__main__':
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0',
port=port,
debug=os.environ.get('DEBUG', False))
Load env-vars from a .env filefrom __future__ import unicode_literals, absolute_import
import os
if os.environ.get('ENV') and os.path.exists(os.environ['ENV']):
for line in open(os.environ['ENV']):
var = line.strip().split('=')
if len(var) == 2:
os.environ[var[0]] = var[1]
from identity import app
if __name__ == '__main__':
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0',
port=port,
debug=os.environ.get('DEBUG', False))
|
<commit_before>from __future__ import unicode_literals, absolute_import
import os
from identity import app
if __name__ == '__main__':
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0',
port=port,
debug=os.environ.get('DEBUG', False))
<commit_msg>Load env-vars from a .env file<commit_after>from __future__ import unicode_literals, absolute_import
import os
if os.environ.get('ENV') and os.path.exists(os.environ['ENV']):
for line in open(os.environ['ENV']):
var = line.strip().split('=')
if len(var) == 2:
os.environ[var[0]] = var[1]
from identity import app
if __name__ == '__main__':
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0',
port=port,
debug=os.environ.get('DEBUG', False))
|
d9d9270f0577a6969f7cb2ccf48a8c0aa859b44a
|
circular-buffer/circular_buffer.py
|
circular-buffer/circular_buffer.py
|
# File: circular_buffer.py
# Purpose: A data structure that uses a single, fixed-size buffer
# as if it were connected end-to-end.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Thursday 29 September 2016, 10:48 PM
class CircularBuffer(object):
def __init__(self, size_max):
self.max = bytearray(size_max) # bytearray represents a mutable sequence of bytes.
self.read_head, self.write_head = 0
|
# File: circular_buffer.py
# Purpose: A data structure that uses a single, fixed-size buffer
# as if it were connected end-to-end.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Thursday 29 September 2016, 10:48 PM
class CircularBuffer(object):
def __init__(self, size_max):
self.maxBuffer = bytearray(size_max) # bytearray represents a mutable sequence of bytes.
self.readHead, self.writeHead = 0
def insert_data(self, value):
self.maxBuffer[self.writeHead] = value
def clean(self):
self.maxBuffer = bytearray(len(self.maxBuffer))
|
Add functions to insert and clear data
|
Add functions to insert and clear data
|
Python
|
mit
|
amalshehu/exercism-python
|
# File: circular_buffer.py
# Purpose: A data structure that uses a single, fixed-size buffer
# as if it were connected end-to-end.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Thursday 29 September 2016, 10:48 PM
class CircularBuffer(object):
def __init__(self, size_max):
self.max = bytearray(size_max) # bytearray represents a mutable sequence of bytes.
self.read_head, self.write_head = 0
Add functions to insert and clear data
|
# File: circular_buffer.py
# Purpose: A data structure that uses a single, fixed-size buffer
# as if it were connected end-to-end.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Thursday 29 September 2016, 10:48 PM
class CircularBuffer(object):
def __init__(self, size_max):
self.maxBuffer = bytearray(size_max) # bytearray represents a mutable sequence of bytes.
self.readHead, self.writeHead = 0
def insert_data(self, value):
self.maxBuffer[self.writeHead] = value
def clean(self):
self.maxBuffer = bytearray(len(self.maxBuffer))
|
<commit_before># File: circular_buffer.py
# Purpose: A data structure that uses a single, fixed-size buffer
# as if it were connected end-to-end.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Thursday 29 September 2016, 10:48 PM
class CircularBuffer(object):
def __init__(self, size_max):
self.max = bytearray(size_max) # bytearray represents a mutable sequence of bytes.
self.read_head, self.write_head = 0
<commit_msg>Add functions to insert and clear data<commit_after>
|
# File: circular_buffer.py
# Purpose: A data structure that uses a single, fixed-size buffer
# as if it were connected end-to-end.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Thursday 29 September 2016, 10:48 PM
class CircularBuffer(object):
def __init__(self, size_max):
self.maxBuffer = bytearray(size_max) # bytearray represents a mutable sequence of bytes.
self.readHead, self.writeHead = 0
def insert_data(self, value):
self.maxBuffer[self.writeHead] = value
def clean(self):
self.maxBuffer = bytearray(len(self.maxBuffer))
|
# File: circular_buffer.py
# Purpose: A data structure that uses a single, fixed-size buffer
# as if it were connected end-to-end.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Thursday 29 September 2016, 10:48 PM
class CircularBuffer(object):
def __init__(self, size_max):
self.max = bytearray(size_max) # bytearray represents a mutable sequence of bytes.
self.read_head, self.write_head = 0
Add functions to insert and clear data# File: circular_buffer.py
# Purpose: A data structure that uses a single, fixed-size buffer
# as if it were connected end-to-end.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Thursday 29 September 2016, 10:48 PM
class CircularBuffer(object):
def __init__(self, size_max):
self.maxBuffer = bytearray(size_max) # bytearray represents a mutable sequence of bytes.
self.readHead, self.writeHead = 0
def insert_data(self, value):
self.maxBuffer[self.writeHead] = value
def clean(self):
self.maxBuffer = bytearray(len(self.maxBuffer))
|
<commit_before># File: circular_buffer.py
# Purpose: A data structure that uses a single, fixed-size buffer
# as if it were connected end-to-end.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Thursday 29 September 2016, 10:48 PM
class CircularBuffer(object):
def __init__(self, size_max):
self.max = bytearray(size_max) # bytearray represents a mutable sequence of bytes.
self.read_head, self.write_head = 0
<commit_msg>Add functions to insert and clear data<commit_after># File: circular_buffer.py
# Purpose: A data structure that uses a single, fixed-size buffer
# as if it were connected end-to-end.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Thursday 29 September 2016, 10:48 PM
class CircularBuffer(object):
def __init__(self, size_max):
self.maxBuffer = bytearray(size_max) # bytearray represents a mutable sequence of bytes.
self.readHead, self.writeHead = 0
def insert_data(self, value):
self.maxBuffer[self.writeHead] = value
def clean(self):
self.maxBuffer = bytearray(len(self.maxBuffer))
|
d850f4785340f73a417653f46c4de275a6eeeb8c
|
utilities/ticker-update.py
|
utilities/ticker-update.py
|
import requests
from bs4 import BeautifulSoup
URL = 'https://finance.yahoo.com/quote/'
securities = ['bgcp', 'cvx', 'f', 'ge', 'intc', 'lumn', 'src', 't']
for security in securities:
query = URL + security
page = requests.get(query)
soup = BeautifulSoup(page.content, 'html.parser')
span = soup.find('span', {'class': "Trsdu(0.3s) Fw(b) Fz(36px) Mb(-4px) D(ib)"})
price = span.get_text()
table_row = soup.select('table td')
open = table_row[3].text
print(f"{security:>6}: {open:<6} {price:<6}")
|
import requests
from bs4 import BeautifulSoup
URL = 'https://finance.yahoo.com/quote/'
secutities = []
with open("ticker-updates,cong", r) as conf_file:
securities = conf_file.readlines()
securities = [s.strip() for s in securities]
for security in securities:
query = URL + security
page = requests.get(query)
soup = BeautifulSoup(page.content, 'html.parser')
span = soup.find('span', {'class': "Trsdu(0.3s) Fw(b) Fz(36px) Mb(-4px) D(ib)"})
price = span.get_text()
table_row = soup.select('table td')
open = table_row[3].text
print(f"{security:>6}: {open:<6} {price:<6}")
|
Read securities from conf file
|
Read securities from conf file
|
Python
|
mit
|
daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various
|
import requests
from bs4 import BeautifulSoup
URL = 'https://finance.yahoo.com/quote/'
securities = ['bgcp', 'cvx', 'f', 'ge', 'intc', 'lumn', 'src', 't']
for security in securities:
query = URL + security
page = requests.get(query)
soup = BeautifulSoup(page.content, 'html.parser')
span = soup.find('span', {'class': "Trsdu(0.3s) Fw(b) Fz(36px) Mb(-4px) D(ib)"})
price = span.get_text()
table_row = soup.select('table td')
open = table_row[3].text
print(f"{security:>6}: {open:<6} {price:<6}")
Read securities from conf file
|
import requests
from bs4 import BeautifulSoup
URL = 'https://finance.yahoo.com/quote/'
secutities = []
with open("ticker-updates,cong", r) as conf_file:
securities = conf_file.readlines()
securities = [s.strip() for s in securities]
for security in securities:
query = URL + security
page = requests.get(query)
soup = BeautifulSoup(page.content, 'html.parser')
span = soup.find('span', {'class': "Trsdu(0.3s) Fw(b) Fz(36px) Mb(-4px) D(ib)"})
price = span.get_text()
table_row = soup.select('table td')
open = table_row[3].text
print(f"{security:>6}: {open:<6} {price:<6}")
|
<commit_before>import requests
from bs4 import BeautifulSoup
URL = 'https://finance.yahoo.com/quote/'
securities = ['bgcp', 'cvx', 'f', 'ge', 'intc', 'lumn', 'src', 't']
for security in securities:
query = URL + security
page = requests.get(query)
soup = BeautifulSoup(page.content, 'html.parser')
span = soup.find('span', {'class': "Trsdu(0.3s) Fw(b) Fz(36px) Mb(-4px) D(ib)"})
price = span.get_text()
table_row = soup.select('table td')
open = table_row[3].text
print(f"{security:>6}: {open:<6} {price:<6}")
<commit_msg>Read securities from conf file<commit_after>
|
import requests
from bs4 import BeautifulSoup
URL = 'https://finance.yahoo.com/quote/'
secutities = []
with open("ticker-updates,cong", r) as conf_file:
securities = conf_file.readlines()
securities = [s.strip() for s in securities]
for security in securities:
query = URL + security
page = requests.get(query)
soup = BeautifulSoup(page.content, 'html.parser')
span = soup.find('span', {'class': "Trsdu(0.3s) Fw(b) Fz(36px) Mb(-4px) D(ib)"})
price = span.get_text()
table_row = soup.select('table td')
open = table_row[3].text
print(f"{security:>6}: {open:<6} {price:<6}")
|
import requests
from bs4 import BeautifulSoup
URL = 'https://finance.yahoo.com/quote/'
securities = ['bgcp', 'cvx', 'f', 'ge', 'intc', 'lumn', 'src', 't']
for security in securities:
query = URL + security
page = requests.get(query)
soup = BeautifulSoup(page.content, 'html.parser')
span = soup.find('span', {'class': "Trsdu(0.3s) Fw(b) Fz(36px) Mb(-4px) D(ib)"})
price = span.get_text()
table_row = soup.select('table td')
open = table_row[3].text
print(f"{security:>6}: {open:<6} {price:<6}")
Read securities from conf fileimport requests
from bs4 import BeautifulSoup
URL = 'https://finance.yahoo.com/quote/'
secutities = []
with open("ticker-updates,cong", r) as conf_file:
securities = conf_file.readlines()
securities = [s.strip() for s in securities]
for security in securities:
query = URL + security
page = requests.get(query)
soup = BeautifulSoup(page.content, 'html.parser')
span = soup.find('span', {'class': "Trsdu(0.3s) Fw(b) Fz(36px) Mb(-4px) D(ib)"})
price = span.get_text()
table_row = soup.select('table td')
open = table_row[3].text
print(f"{security:>6}: {open:<6} {price:<6}")
|
<commit_before>import requests
from bs4 import BeautifulSoup
URL = 'https://finance.yahoo.com/quote/'
securities = ['bgcp', 'cvx', 'f', 'ge', 'intc', 'lumn', 'src', 't']
for security in securities:
query = URL + security
page = requests.get(query)
soup = BeautifulSoup(page.content, 'html.parser')
span = soup.find('span', {'class': "Trsdu(0.3s) Fw(b) Fz(36px) Mb(-4px) D(ib)"})
price = span.get_text()
table_row = soup.select('table td')
open = table_row[3].text
print(f"{security:>6}: {open:<6} {price:<6}")
<commit_msg>Read securities from conf file<commit_after>import requests
from bs4 import BeautifulSoup
URL = 'https://finance.yahoo.com/quote/'
secutities = []
with open("ticker-updates,cong", r) as conf_file:
securities = conf_file.readlines()
securities = [s.strip() for s in securities]
for security in securities:
query = URL + security
page = requests.get(query)
soup = BeautifulSoup(page.content, 'html.parser')
span = soup.find('span', {'class': "Trsdu(0.3s) Fw(b) Fz(36px) Mb(-4px) D(ib)"})
price = span.get_text()
table_row = soup.select('table td')
open = table_row[3].text
print(f"{security:>6}: {open:<6} {price:<6}")
|
4c5b6217015610fe7cf3064b59e1b8de1fa41575
|
PyFloraBook/input_output/data_coordinator.py
|
PyFloraBook/input_output/data_coordinator.py
|
from pathlib import Path
import json
import inspect
import sys
import PyFloraBook
OBSERVATIONS_FOLDER = "observation_data"
RAW_DATA_FOLDER = "raw"
def locate_project_folder() -> Path:
"""Locate top-level project folder
Returns:
Path of the project folder
"""
source_path = Path(inspect.getsourcefile(PyFloraBook)).parent
# This assumes that the highest-level project __init__ file is contained
# in a sub-folder of the project folder
return source_path.parent
def locate_data_folder() -> Path:
"""Return path of the data IO folder
Returns:
Path of data IO folder
"""
return Path(load_configuration()["data_folder"])
def locate_raw_data_folder() -> Path:
"""Return path of the raw data folder
Returns:
Path of raw data folder
"""
return locate_data_folder() / OBSERVATIONS_FOLDER / RAW_DATA_FOLDER
def load_configuration() -> dict:
"""Load project configuration info
Returns:
Dictionary of configuration info.
"""
configuration = Path(locate_project_folder() / "configuration.json")
with configuration.open() as config_file:
return json.load(config_file)
def locate_current_script_folder() -> Path:
"""Return path of the currently running script
Returns:
Path of current script
"""
return Path(sys.path[0])
|
from pathlib import Path
import json
import inspect
import sys
import PyFloraBook
# Globals
OBSERVATIONS_FOLDER = "observation_data"
RAW_OBSERVATIONS_FOLDER = "raw_observations"
RAW_COUNTS_FOLDER = "raw_counts"
def locate_project_folder() -> Path:
"""Locate top-level project folder
Returns:
Path of the project folder
"""
source_path = Path(inspect.getsourcefile(PyFloraBook)).parent
# This assumes that the highest-level project __init__ file is contained
# in a sub-folder of the project folder
return source_path.parent
def locate_data_folder() -> Path:
"""Return path of the data IO folder
Returns:
Path of data IO folder
"""
return Path(load_configuration()["data_folder"])
def locate_raw_observations_folder() -> Path:
"""Return path of the raw observations data folder
Returns:
Path of raw observations data folder
"""
return (locate_data_folder() / OBSERVATIONS_FOLDER /
RAW_OBSERVATIONS_FOLDER)
def locate_raw_counts_folder() -> Path:
"""Return path of the raw counts data folder
Returns:
Path of raw counts data folder
"""
return locate_data_folder() / OBSERVATIONS_FOLDER / RAW_COUNTS_FOLDER
def load_configuration() -> dict:
"""Load project configuration info
Returns:
Dictionary of configuration info.
"""
configuration = Path(locate_project_folder() / "configuration.json")
with configuration.open() as config_file:
return json.load(config_file)
def locate_current_script_folder() -> Path:
"""Return path of the currently running script
Returns:
Path of current script
"""
return Path(sys.path[0])
|
Support separate folders for raw observations and raw counts
|
Support separate folders for raw observations and raw counts
|
Python
|
mit
|
jnfrye/local_plants_book
|
from pathlib import Path
import json
import inspect
import sys
import PyFloraBook
OBSERVATIONS_FOLDER = "observation_data"
RAW_DATA_FOLDER = "raw"
def locate_project_folder() -> Path:
"""Locate top-level project folder
Returns:
Path of the project folder
"""
source_path = Path(inspect.getsourcefile(PyFloraBook)).parent
# This assumes that the highest-level project __init__ file is contained
# in a sub-folder of the project folder
return source_path.parent
def locate_data_folder() -> Path:
"""Return path of the data IO folder
Returns:
Path of data IO folder
"""
return Path(load_configuration()["data_folder"])
def locate_raw_data_folder() -> Path:
"""Return path of the raw data folder
Returns:
Path of raw data folder
"""
return locate_data_folder() / OBSERVATIONS_FOLDER / RAW_DATA_FOLDER
def load_configuration() -> dict:
"""Load project configuration info
Returns:
Dictionary of configuration info.
"""
configuration = Path(locate_project_folder() / "configuration.json")
with configuration.open() as config_file:
return json.load(config_file)
def locate_current_script_folder() -> Path:
"""Return path of the currently running script
Returns:
Path of current script
"""
return Path(sys.path[0])
Support separate folders for raw observations and raw counts
|
from pathlib import Path
import json
import inspect
import sys
import PyFloraBook
# Globals
OBSERVATIONS_FOLDER = "observation_data"
RAW_OBSERVATIONS_FOLDER = "raw_observations"
RAW_COUNTS_FOLDER = "raw_counts"
def locate_project_folder() -> Path:
"""Locate top-level project folder
Returns:
Path of the project folder
"""
source_path = Path(inspect.getsourcefile(PyFloraBook)).parent
# This assumes that the highest-level project __init__ file is contained
# in a sub-folder of the project folder
return source_path.parent
def locate_data_folder() -> Path:
"""Return path of the data IO folder
Returns:
Path of data IO folder
"""
return Path(load_configuration()["data_folder"])
def locate_raw_observations_folder() -> Path:
"""Return path of the raw observations data folder
Returns:
Path of raw observations data folder
"""
return (locate_data_folder() / OBSERVATIONS_FOLDER /
RAW_OBSERVATIONS_FOLDER)
def locate_raw_counts_folder() -> Path:
"""Return path of the raw counts data folder
Returns:
Path of raw counts data folder
"""
return locate_data_folder() / OBSERVATIONS_FOLDER / RAW_COUNTS_FOLDER
def load_configuration() -> dict:
"""Load project configuration info
Returns:
Dictionary of configuration info.
"""
configuration = Path(locate_project_folder() / "configuration.json")
with configuration.open() as config_file:
return json.load(config_file)
def locate_current_script_folder() -> Path:
"""Return path of the currently running script
Returns:
Path of current script
"""
return Path(sys.path[0])
|
<commit_before>from pathlib import Path
import json
import inspect
import sys
import PyFloraBook
OBSERVATIONS_FOLDER = "observation_data"
RAW_DATA_FOLDER = "raw"
def locate_project_folder() -> Path:
"""Locate top-level project folder
Returns:
Path of the project folder
"""
source_path = Path(inspect.getsourcefile(PyFloraBook)).parent
# This assumes that the highest-level project __init__ file is contained
# in a sub-folder of the project folder
return source_path.parent
def locate_data_folder() -> Path:
"""Return path of the data IO folder
Returns:
Path of data IO folder
"""
return Path(load_configuration()["data_folder"])
def locate_raw_data_folder() -> Path:
"""Return path of the raw data folder
Returns:
Path of raw data folder
"""
return locate_data_folder() / OBSERVATIONS_FOLDER / RAW_DATA_FOLDER
def load_configuration() -> dict:
"""Load project configuration info
Returns:
Dictionary of configuration info.
"""
configuration = Path(locate_project_folder() / "configuration.json")
with configuration.open() as config_file:
return json.load(config_file)
def locate_current_script_folder() -> Path:
"""Return path of the currently running script
Returns:
Path of current script
"""
return Path(sys.path[0])
<commit_msg>Support separate folders for raw observations and raw counts<commit_after>
|
from pathlib import Path
import json
import inspect
import sys
import PyFloraBook
# Globals
OBSERVATIONS_FOLDER = "observation_data"
RAW_OBSERVATIONS_FOLDER = "raw_observations"
RAW_COUNTS_FOLDER = "raw_counts"
def locate_project_folder() -> Path:
"""Locate top-level project folder
Returns:
Path of the project folder
"""
source_path = Path(inspect.getsourcefile(PyFloraBook)).parent
# This assumes that the highest-level project __init__ file is contained
# in a sub-folder of the project folder
return source_path.parent
def locate_data_folder() -> Path:
"""Return path of the data IO folder
Returns:
Path of data IO folder
"""
return Path(load_configuration()["data_folder"])
def locate_raw_observations_folder() -> Path:
"""Return path of the raw observations data folder
Returns:
Path of raw observations data folder
"""
return (locate_data_folder() / OBSERVATIONS_FOLDER /
RAW_OBSERVATIONS_FOLDER)
def locate_raw_counts_folder() -> Path:
"""Return path of the raw counts data folder
Returns:
Path of raw counts data folder
"""
return locate_data_folder() / OBSERVATIONS_FOLDER / RAW_COUNTS_FOLDER
def load_configuration() -> dict:
"""Load project configuration info
Returns:
Dictionary of configuration info.
"""
configuration = Path(locate_project_folder() / "configuration.json")
with configuration.open() as config_file:
return json.load(config_file)
def locate_current_script_folder() -> Path:
"""Return path of the currently running script
Returns:
Path of current script
"""
return Path(sys.path[0])
|
from pathlib import Path
import json
import inspect
import sys
import PyFloraBook
OBSERVATIONS_FOLDER = "observation_data"
RAW_DATA_FOLDER = "raw"
def locate_project_folder() -> Path:
"""Locate top-level project folder
Returns:
Path of the project folder
"""
source_path = Path(inspect.getsourcefile(PyFloraBook)).parent
# This assumes that the highest-level project __init__ file is contained
# in a sub-folder of the project folder
return source_path.parent
def locate_data_folder() -> Path:
"""Return path of the data IO folder
Returns:
Path of data IO folder
"""
return Path(load_configuration()["data_folder"])
def locate_raw_data_folder() -> Path:
"""Return path of the raw data folder
Returns:
Path of raw data folder
"""
return locate_data_folder() / OBSERVATIONS_FOLDER / RAW_DATA_FOLDER
def load_configuration() -> dict:
"""Load project configuration info
Returns:
Dictionary of configuration info.
"""
configuration = Path(locate_project_folder() / "configuration.json")
with configuration.open() as config_file:
return json.load(config_file)
def locate_current_script_folder() -> Path:
"""Return path of the currently running script
Returns:
Path of current script
"""
return Path(sys.path[0])
Support separate folders for raw observations and raw countsfrom pathlib import Path
import json
import inspect
import sys
import PyFloraBook
# Globals
OBSERVATIONS_FOLDER = "observation_data"
RAW_OBSERVATIONS_FOLDER = "raw_observations"
RAW_COUNTS_FOLDER = "raw_counts"
def locate_project_folder() -> Path:
"""Locate top-level project folder
Returns:
Path of the project folder
"""
source_path = Path(inspect.getsourcefile(PyFloraBook)).parent
# This assumes that the highest-level project __init__ file is contained
# in a sub-folder of the project folder
return source_path.parent
def locate_data_folder() -> Path:
"""Return path of the data IO folder
Returns:
Path of data IO folder
"""
return Path(load_configuration()["data_folder"])
def locate_raw_observations_folder() -> Path:
"""Return path of the raw observations data folder
Returns:
Path of raw observations data folder
"""
return (locate_data_folder() / OBSERVATIONS_FOLDER /
RAW_OBSERVATIONS_FOLDER)
def locate_raw_counts_folder() -> Path:
"""Return path of the raw counts data folder
Returns:
Path of raw counts data folder
"""
return locate_data_folder() / OBSERVATIONS_FOLDER / RAW_COUNTS_FOLDER
def load_configuration() -> dict:
"""Load project configuration info
Returns:
Dictionary of configuration info.
"""
configuration = Path(locate_project_folder() / "configuration.json")
with configuration.open() as config_file:
return json.load(config_file)
def locate_current_script_folder() -> Path:
"""Return path of the currently running script
Returns:
Path of current script
"""
return Path(sys.path[0])
|
<commit_before>from pathlib import Path
import json
import inspect
import sys
import PyFloraBook
OBSERVATIONS_FOLDER = "observation_data"
RAW_DATA_FOLDER = "raw"
def locate_project_folder() -> Path:
"""Locate top-level project folder
Returns:
Path of the project folder
"""
source_path = Path(inspect.getsourcefile(PyFloraBook)).parent
# This assumes that the highest-level project __init__ file is contained
# in a sub-folder of the project folder
return source_path.parent
def locate_data_folder() -> Path:
"""Return path of the data IO folder
Returns:
Path of data IO folder
"""
return Path(load_configuration()["data_folder"])
def locate_raw_data_folder() -> Path:
"""Return path of the raw data folder
Returns:
Path of raw data folder
"""
return locate_data_folder() / OBSERVATIONS_FOLDER / RAW_DATA_FOLDER
def load_configuration() -> dict:
"""Load project configuration info
Returns:
Dictionary of configuration info.
"""
configuration = Path(locate_project_folder() / "configuration.json")
with configuration.open() as config_file:
return json.load(config_file)
def locate_current_script_folder() -> Path:
"""Return path of the currently running script
Returns:
Path of current script
"""
return Path(sys.path[0])
<commit_msg>Support separate folders for raw observations and raw counts<commit_after>from pathlib import Path
import json
import inspect
import sys
import PyFloraBook
# Globals
OBSERVATIONS_FOLDER = "observation_data"
RAW_OBSERVATIONS_FOLDER = "raw_observations"
RAW_COUNTS_FOLDER = "raw_counts"
def locate_project_folder() -> Path:
"""Locate top-level project folder
Returns:
Path of the project folder
"""
source_path = Path(inspect.getsourcefile(PyFloraBook)).parent
# This assumes that the highest-level project __init__ file is contained
# in a sub-folder of the project folder
return source_path.parent
def locate_data_folder() -> Path:
"""Return path of the data IO folder
Returns:
Path of data IO folder
"""
return Path(load_configuration()["data_folder"])
def locate_raw_observations_folder() -> Path:
"""Return path of the raw observations data folder
Returns:
Path of raw observations data folder
"""
return (locate_data_folder() / OBSERVATIONS_FOLDER /
RAW_OBSERVATIONS_FOLDER)
def locate_raw_counts_folder() -> Path:
"""Return path of the raw counts data folder
Returns:
Path of raw counts data folder
"""
return locate_data_folder() / OBSERVATIONS_FOLDER / RAW_COUNTS_FOLDER
def load_configuration() -> dict:
"""Load project configuration info
Returns:
Dictionary of configuration info.
"""
configuration = Path(locate_project_folder() / "configuration.json")
with configuration.open() as config_file:
return json.load(config_file)
def locate_current_script_folder() -> Path:
"""Return path of the currently running script
Returns:
Path of current script
"""
return Path(sys.path[0])
|
db32ee58b5247dbc281d5f4633f5b9c2fe704ad1
|
metaci/testresults/utils.py
|
metaci/testresults/utils.py
|
from django.core.exceptions import PermissionDenied
from django.shortcuts import get_object_or_404
from metaci.build.models import Build
from metaci.build.models import BuildFlow
def find_buildflow(request, build_id, flow):
""" given a build_id and flow name, find a single BuildFlow (ala tests/ urls patterns). """
build = get_object_or_404(Build, id=build_id)
if not request.user.has_perm('plan.view_builds', build):
raise PermissionDenied()
query = {'build_id': build_id, 'flow': flow}
if build.current_rebuild:
query['rebuild_id'] = build.current_rebuild
build_flow = get_object_or_404(BuildFlow, **query)
return build_flow
|
from django.core.exceptions import PermissionDenied
from django.shortcuts import get_object_or_404
from metaci.build.models import Build
from metaci.build.models import BuildFlow
def find_buildflow(request, build_id, flow):
""" given a build_id and flow name, find a single BuildFlow (ala tests/ urls patterns). """
build = get_object_or_404(Build, id=build_id)
if not request.user.has_perm('plan.view_builds', build.planrepo):
raise PermissionDenied()
query = {'build_id': build_id, 'flow': flow}
if build.current_rebuild:
query['rebuild_id'] = build.current_rebuild
build_flow = get_object_or_404(BuildFlow, **query)
return build_flow
|
Use PlanRepository as the object for permission check instead of Build
|
Use PlanRepository as the object for permission check instead of Build
|
Python
|
bsd-3-clause
|
SalesforceFoundation/mrbelvedereci,SalesforceFoundation/mrbelvedereci,SalesforceFoundation/mrbelvedereci,SalesforceFoundation/mrbelvedereci
|
from django.core.exceptions import PermissionDenied
from django.shortcuts import get_object_or_404
from metaci.build.models import Build
from metaci.build.models import BuildFlow
def find_buildflow(request, build_id, flow):
""" given a build_id and flow name, find a single BuildFlow (ala tests/ urls patterns). """
build = get_object_or_404(Build, id=build_id)
if not request.user.has_perm('plan.view_builds', build):
raise PermissionDenied()
query = {'build_id': build_id, 'flow': flow}
if build.current_rebuild:
query['rebuild_id'] = build.current_rebuild
build_flow = get_object_or_404(BuildFlow, **query)
return build_flow
Use PlanRepository as the object for permission check instead of Build
|
from django.core.exceptions import PermissionDenied
from django.shortcuts import get_object_or_404
from metaci.build.models import Build
from metaci.build.models import BuildFlow
def find_buildflow(request, build_id, flow):
""" given a build_id and flow name, find a single BuildFlow (ala tests/ urls patterns). """
build = get_object_or_404(Build, id=build_id)
if not request.user.has_perm('plan.view_builds', build.planrepo):
raise PermissionDenied()
query = {'build_id': build_id, 'flow': flow}
if build.current_rebuild:
query['rebuild_id'] = build.current_rebuild
build_flow = get_object_or_404(BuildFlow, **query)
return build_flow
|
<commit_before>from django.core.exceptions import PermissionDenied
from django.shortcuts import get_object_or_404
from metaci.build.models import Build
from metaci.build.models import BuildFlow
def find_buildflow(request, build_id, flow):
""" given a build_id and flow name, find a single BuildFlow (ala tests/ urls patterns). """
build = get_object_or_404(Build, id=build_id)
if not request.user.has_perm('plan.view_builds', build):
raise PermissionDenied()
query = {'build_id': build_id, 'flow': flow}
if build.current_rebuild:
query['rebuild_id'] = build.current_rebuild
build_flow = get_object_or_404(BuildFlow, **query)
return build_flow
<commit_msg>Use PlanRepository as the object for permission check instead of Build<commit_after>
|
from django.core.exceptions import PermissionDenied
from django.shortcuts import get_object_or_404
from metaci.build.models import Build
from metaci.build.models import BuildFlow
def find_buildflow(request, build_id, flow):
""" given a build_id and flow name, find a single BuildFlow (ala tests/ urls patterns). """
build = get_object_or_404(Build, id=build_id)
if not request.user.has_perm('plan.view_builds', build.planrepo):
raise PermissionDenied()
query = {'build_id': build_id, 'flow': flow}
if build.current_rebuild:
query['rebuild_id'] = build.current_rebuild
build_flow = get_object_or_404(BuildFlow, **query)
return build_flow
|
from django.core.exceptions import PermissionDenied
from django.shortcuts import get_object_or_404
from metaci.build.models import Build
from metaci.build.models import BuildFlow
def find_buildflow(request, build_id, flow):
""" given a build_id and flow name, find a single BuildFlow (ala tests/ urls patterns). """
build = get_object_or_404(Build, id=build_id)
if not request.user.has_perm('plan.view_builds', build):
raise PermissionDenied()
query = {'build_id': build_id, 'flow': flow}
if build.current_rebuild:
query['rebuild_id'] = build.current_rebuild
build_flow = get_object_or_404(BuildFlow, **query)
return build_flow
Use PlanRepository as the object for permission check instead of Buildfrom django.core.exceptions import PermissionDenied
from django.shortcuts import get_object_or_404
from metaci.build.models import Build
from metaci.build.models import BuildFlow
def find_buildflow(request, build_id, flow):
""" given a build_id and flow name, find a single BuildFlow (ala tests/ urls patterns). """
build = get_object_or_404(Build, id=build_id)
if not request.user.has_perm('plan.view_builds', build.planrepo):
raise PermissionDenied()
query = {'build_id': build_id, 'flow': flow}
if build.current_rebuild:
query['rebuild_id'] = build.current_rebuild
build_flow = get_object_or_404(BuildFlow, **query)
return build_flow
|
<commit_before>from django.core.exceptions import PermissionDenied
from django.shortcuts import get_object_or_404
from metaci.build.models import Build
from metaci.build.models import BuildFlow
def find_buildflow(request, build_id, flow):
""" given a build_id and flow name, find a single BuildFlow (ala tests/ urls patterns). """
build = get_object_or_404(Build, id=build_id)
if not request.user.has_perm('plan.view_builds', build):
raise PermissionDenied()
query = {'build_id': build_id, 'flow': flow}
if build.current_rebuild:
query['rebuild_id'] = build.current_rebuild
build_flow = get_object_or_404(BuildFlow, **query)
return build_flow
<commit_msg>Use PlanRepository as the object for permission check instead of Build<commit_after>from django.core.exceptions import PermissionDenied
from django.shortcuts import get_object_or_404
from metaci.build.models import Build
from metaci.build.models import BuildFlow
def find_buildflow(request, build_id, flow):
""" given a build_id and flow name, find a single BuildFlow (ala tests/ urls patterns). """
build = get_object_or_404(Build, id=build_id)
if not request.user.has_perm('plan.view_builds', build.planrepo):
raise PermissionDenied()
query = {'build_id': build_id, 'flow': flow}
if build.current_rebuild:
query['rebuild_id'] = build.current_rebuild
build_flow = get_object_or_404(BuildFlow, **query)
return build_flow
|
ff5cc4bc97999572dfb1db5731fca307d32fb1a3
|
infi/pyutils/decorators.py
|
infi/pyutils/decorators.py
|
import functools
import inspect
def wraps(wrapped):
""" a convenience function on top of functools.wraps:
- adds the original function to the wrapped function as __wrapped__ attribute."""
def new_decorator(f):
returned = functools.wraps(wrapped)(f)
returned.__wrapped__ = wrapped
return returned
return new_decorator
def getargspec(func):
"""calls inspect's getargspec with func.__wrapped__ if exists, else with func"""
wrapped = getattr(func, "__wrapped__", None)
if wrapped is not None:
return getargspec(wrapped)
return inspect._getargspec(func)
def monkeypatch_inspect():
"""applies getarspec monkeypatch on inspect"""
inspect._getargspec = inspect.getargspec
inspect.getargspec = getargspec
inspect.__patched_by_infi__ = True
if not getattr(inspect, "__patched_by_infi__", False):
monkeypatch_inspect()
|
import functools
import inspect
def wraps(wrapped):
""" a convenience function on top of functools.wraps:
- adds the original function to the wrapped function as __wrapped__ attribute."""
def new_decorator(f):
returned = functools.wraps(wrapped)(f)
returned.__wrapped__ = wrapped
return returned
return new_decorator
def inspect_getargspec_patch(func):
"""calls inspect's getargspec with func.__wrapped__ if exists, else with func"""
return inspect._infi_patched_getargspec(_get_innner_func(func))
def ipython_getargspec_patch(func):
return _ipython_inspect_module._infi_patched_getargspec(_get_innner_func(func))
def _get_innner_func(f):
while True:
wrapped = getattr(f, "__wrapped__", None)
if wrapped is None:
return f
f = wrapped
_PATCHED_NAME_PREFIX = "_infi_patched_"
def monkey_patch(module, name, replacement):
original_name = _PATCHED_NAME_PREFIX + name
if getattr(module, original_name, None) is None:
setattr(module, original_name, getattr(module, name))
setattr(module, name, replacement)
monkey_patch(inspect, "getargspec", inspect_getargspec_patch)
_ipython_inspect_module = None
try:
# ipython 0.11
from IPython.core import oinspect as _ipython_inspect_module
except ImportError:
try:
# ipython 0.10.2
from IPython import OInspect as _ipython_inspect_module
except ImportError:
pass
if _ipython_inspect_module is not None:
monkey_patch(_ipython_inspect_module, "getargspec", ipython_getargspec_patch)
|
Support introspection hack for IPython
|
Support introspection hack for IPython
|
Python
|
bsd-3-clause
|
Infinidat/infi.pyutils
|
import functools
import inspect
def wraps(wrapped):
""" a convenience function on top of functools.wraps:
- adds the original function to the wrapped function as __wrapped__ attribute."""
def new_decorator(f):
returned = functools.wraps(wrapped)(f)
returned.__wrapped__ = wrapped
return returned
return new_decorator
def getargspec(func):
"""calls inspect's getargspec with func.__wrapped__ if exists, else with func"""
wrapped = getattr(func, "__wrapped__", None)
if wrapped is not None:
return getargspec(wrapped)
return inspect._getargspec(func)
def monkeypatch_inspect():
"""applies getarspec monkeypatch on inspect"""
inspect._getargspec = inspect.getargspec
inspect.getargspec = getargspec
inspect.__patched_by_infi__ = True
if not getattr(inspect, "__patched_by_infi__", False):
monkeypatch_inspect()
Support introspection hack for IPython
|
import functools
import inspect
def wraps(wrapped):
""" a convenience function on top of functools.wraps:
- adds the original function to the wrapped function as __wrapped__ attribute."""
def new_decorator(f):
returned = functools.wraps(wrapped)(f)
returned.__wrapped__ = wrapped
return returned
return new_decorator
def inspect_getargspec_patch(func):
"""calls inspect's getargspec with func.__wrapped__ if exists, else with func"""
return inspect._infi_patched_getargspec(_get_innner_func(func))
def ipython_getargspec_patch(func):
return _ipython_inspect_module._infi_patched_getargspec(_get_innner_func(func))
def _get_innner_func(f):
while True:
wrapped = getattr(f, "__wrapped__", None)
if wrapped is None:
return f
f = wrapped
_PATCHED_NAME_PREFIX = "_infi_patched_"
def monkey_patch(module, name, replacement):
original_name = _PATCHED_NAME_PREFIX + name
if getattr(module, original_name, None) is None:
setattr(module, original_name, getattr(module, name))
setattr(module, name, replacement)
monkey_patch(inspect, "getargspec", inspect_getargspec_patch)
_ipython_inspect_module = None
try:
# ipython 0.11
from IPython.core import oinspect as _ipython_inspect_module
except ImportError:
try:
# ipython 0.10.2
from IPython import OInspect as _ipython_inspect_module
except ImportError:
pass
if _ipython_inspect_module is not None:
monkey_patch(_ipython_inspect_module, "getargspec", ipython_getargspec_patch)
|
<commit_before>import functools
import inspect
def wraps(wrapped):
""" a convenience function on top of functools.wraps:
- adds the original function to the wrapped function as __wrapped__ attribute."""
def new_decorator(f):
returned = functools.wraps(wrapped)(f)
returned.__wrapped__ = wrapped
return returned
return new_decorator
def getargspec(func):
"""calls inspect's getargspec with func.__wrapped__ if exists, else with func"""
wrapped = getattr(func, "__wrapped__", None)
if wrapped is not None:
return getargspec(wrapped)
return inspect._getargspec(func)
def monkeypatch_inspect():
"""applies getarspec monkeypatch on inspect"""
inspect._getargspec = inspect.getargspec
inspect.getargspec = getargspec
inspect.__patched_by_infi__ = True
if not getattr(inspect, "__patched_by_infi__", False):
monkeypatch_inspect()
<commit_msg>Support introspection hack for IPython<commit_after>
|
import functools
import inspect
def wraps(wrapped):
""" a convenience function on top of functools.wraps:
- adds the original function to the wrapped function as __wrapped__ attribute."""
def new_decorator(f):
returned = functools.wraps(wrapped)(f)
returned.__wrapped__ = wrapped
return returned
return new_decorator
def inspect_getargspec_patch(func):
"""calls inspect's getargspec with func.__wrapped__ if exists, else with func"""
return inspect._infi_patched_getargspec(_get_innner_func(func))
def ipython_getargspec_patch(func):
return _ipython_inspect_module._infi_patched_getargspec(_get_innner_func(func))
def _get_innner_func(f):
while True:
wrapped = getattr(f, "__wrapped__", None)
if wrapped is None:
return f
f = wrapped
_PATCHED_NAME_PREFIX = "_infi_patched_"
def monkey_patch(module, name, replacement):
original_name = _PATCHED_NAME_PREFIX + name
if getattr(module, original_name, None) is None:
setattr(module, original_name, getattr(module, name))
setattr(module, name, replacement)
monkey_patch(inspect, "getargspec", inspect_getargspec_patch)
_ipython_inspect_module = None
try:
# ipython 0.11
from IPython.core import oinspect as _ipython_inspect_module
except ImportError:
try:
# ipython 0.10.2
from IPython import OInspect as _ipython_inspect_module
except ImportError:
pass
if _ipython_inspect_module is not None:
monkey_patch(_ipython_inspect_module, "getargspec", ipython_getargspec_patch)
|
import functools
import inspect
def wraps(wrapped):
""" a convenience function on top of functools.wraps:
- adds the original function to the wrapped function as __wrapped__ attribute."""
def new_decorator(f):
returned = functools.wraps(wrapped)(f)
returned.__wrapped__ = wrapped
return returned
return new_decorator
def getargspec(func):
"""calls inspect's getargspec with func.__wrapped__ if exists, else with func"""
wrapped = getattr(func, "__wrapped__", None)
if wrapped is not None:
return getargspec(wrapped)
return inspect._getargspec(func)
def monkeypatch_inspect():
"""applies getarspec monkeypatch on inspect"""
inspect._getargspec = inspect.getargspec
inspect.getargspec = getargspec
inspect.__patched_by_infi__ = True
if not getattr(inspect, "__patched_by_infi__", False):
monkeypatch_inspect()
Support introspection hack for IPythonimport functools
import inspect
def wraps(wrapped):
""" a convenience function on top of functools.wraps:
- adds the original function to the wrapped function as __wrapped__ attribute."""
def new_decorator(f):
returned = functools.wraps(wrapped)(f)
returned.__wrapped__ = wrapped
return returned
return new_decorator
def inspect_getargspec_patch(func):
"""calls inspect's getargspec with func.__wrapped__ if exists, else with func"""
return inspect._infi_patched_getargspec(_get_innner_func(func))
def ipython_getargspec_patch(func):
return _ipython_inspect_module._infi_patched_getargspec(_get_innner_func(func))
def _get_innner_func(f):
while True:
wrapped = getattr(f, "__wrapped__", None)
if wrapped is None:
return f
f = wrapped
_PATCHED_NAME_PREFIX = "_infi_patched_"
def monkey_patch(module, name, replacement):
original_name = _PATCHED_NAME_PREFIX + name
if getattr(module, original_name, None) is None:
setattr(module, original_name, getattr(module, name))
setattr(module, name, replacement)
monkey_patch(inspect, "getargspec", inspect_getargspec_patch)
_ipython_inspect_module = None
try:
# ipython 0.11
from IPython.core import oinspect as _ipython_inspect_module
except ImportError:
try:
# ipython 0.10.2
from IPython import OInspect as _ipython_inspect_module
except ImportError:
pass
if _ipython_inspect_module is not None:
monkey_patch(_ipython_inspect_module, "getargspec", ipython_getargspec_patch)
|
<commit_before>import functools
import inspect
def wraps(wrapped):
""" a convenience function on top of functools.wraps:
- adds the original function to the wrapped function as __wrapped__ attribute."""
def new_decorator(f):
returned = functools.wraps(wrapped)(f)
returned.__wrapped__ = wrapped
return returned
return new_decorator
def getargspec(func):
"""calls inspect's getargspec with func.__wrapped__ if exists, else with func"""
wrapped = getattr(func, "__wrapped__", None)
if wrapped is not None:
return getargspec(wrapped)
return inspect._getargspec(func)
def monkeypatch_inspect():
"""applies getarspec monkeypatch on inspect"""
inspect._getargspec = inspect.getargspec
inspect.getargspec = getargspec
inspect.__patched_by_infi__ = True
if not getattr(inspect, "__patched_by_infi__", False):
monkeypatch_inspect()
<commit_msg>Support introspection hack for IPython<commit_after>import functools
import inspect
def wraps(wrapped):
""" a convenience function on top of functools.wraps:
- adds the original function to the wrapped function as __wrapped__ attribute."""
def new_decorator(f):
returned = functools.wraps(wrapped)(f)
returned.__wrapped__ = wrapped
return returned
return new_decorator
def inspect_getargspec_patch(func):
"""calls inspect's getargspec with func.__wrapped__ if exists, else with func"""
return inspect._infi_patched_getargspec(_get_innner_func(func))
def ipython_getargspec_patch(func):
return _ipython_inspect_module._infi_patched_getargspec(_get_innner_func(func))
def _get_innner_func(f):
while True:
wrapped = getattr(f, "__wrapped__", None)
if wrapped is None:
return f
f = wrapped
_PATCHED_NAME_PREFIX = "_infi_patched_"
def monkey_patch(module, name, replacement):
original_name = _PATCHED_NAME_PREFIX + name
if getattr(module, original_name, None) is None:
setattr(module, original_name, getattr(module, name))
setattr(module, name, replacement)
monkey_patch(inspect, "getargspec", inspect_getargspec_patch)
_ipython_inspect_module = None
try:
# ipython 0.11
from IPython.core import oinspect as _ipython_inspect_module
except ImportError:
try:
# ipython 0.10.2
from IPython import OInspect as _ipython_inspect_module
except ImportError:
pass
if _ipython_inspect_module is not None:
monkey_patch(_ipython_inspect_module, "getargspec", ipython_getargspec_patch)
|
90ca5fdd66d11cb0d746fb4ab006445ded860d69
|
modoboa_webmail/__init__.py
|
modoboa_webmail/__init__.py
|
# -*- coding: utf-8 -*-
"""DMARC related tools for Modoboa."""
from __future__ import unicode_literals
from pkg_resources import get_distribution, DistributionNotFound
try:
__version__ = get_distribution(__name__).version
except DistributionNotFound:
# package is not installed
pass
default_app_config = "modoboa_webmail.apps.WebmailConfig"
|
# -*- coding: utf-8 -*-
"""DMARC related tools for Modoboa."""
from __future__ import unicode_literals
from pkg_resources import get_distribution, DistributionNotFound
try:
__version__ = get_distribution(__name__).version
except DistributionNotFound:
# package is not installed
__version__ = '9.9.9'
default_app_config = "modoboa_webmail.apps.WebmailConfig"
|
Fix crash in development mode with python 3
|
Fix crash in development mode with python 3
|
Python
|
mit
|
modoboa/modoboa-webmail,modoboa/modoboa-webmail,modoboa/modoboa-webmail
|
# -*- coding: utf-8 -*-
"""DMARC related tools for Modoboa."""
from __future__ import unicode_literals
from pkg_resources import get_distribution, DistributionNotFound
try:
__version__ = get_distribution(__name__).version
except DistributionNotFound:
# package is not installed
pass
default_app_config = "modoboa_webmail.apps.WebmailConfig"
Fix crash in development mode with python 3
|
# -*- coding: utf-8 -*-
"""DMARC related tools for Modoboa."""
from __future__ import unicode_literals
from pkg_resources import get_distribution, DistributionNotFound
try:
__version__ = get_distribution(__name__).version
except DistributionNotFound:
# package is not installed
__version__ = '9.9.9'
default_app_config = "modoboa_webmail.apps.WebmailConfig"
|
<commit_before># -*- coding: utf-8 -*-
"""DMARC related tools for Modoboa."""
from __future__ import unicode_literals
from pkg_resources import get_distribution, DistributionNotFound
try:
__version__ = get_distribution(__name__).version
except DistributionNotFound:
# package is not installed
pass
default_app_config = "modoboa_webmail.apps.WebmailConfig"
<commit_msg>Fix crash in development mode with python 3<commit_after>
|
# -*- coding: utf-8 -*-
"""DMARC related tools for Modoboa."""
from __future__ import unicode_literals
from pkg_resources import get_distribution, DistributionNotFound
try:
__version__ = get_distribution(__name__).version
except DistributionNotFound:
# package is not installed
__version__ = '9.9.9'
default_app_config = "modoboa_webmail.apps.WebmailConfig"
|
# -*- coding: utf-8 -*-
"""DMARC related tools for Modoboa."""
from __future__ import unicode_literals
from pkg_resources import get_distribution, DistributionNotFound
try:
__version__ = get_distribution(__name__).version
except DistributionNotFound:
# package is not installed
pass
default_app_config = "modoboa_webmail.apps.WebmailConfig"
Fix crash in development mode with python 3# -*- coding: utf-8 -*-
"""DMARC related tools for Modoboa."""
from __future__ import unicode_literals
from pkg_resources import get_distribution, DistributionNotFound
try:
__version__ = get_distribution(__name__).version
except DistributionNotFound:
# package is not installed
__version__ = '9.9.9'
default_app_config = "modoboa_webmail.apps.WebmailConfig"
|
<commit_before># -*- coding: utf-8 -*-
"""DMARC related tools for Modoboa."""
from __future__ import unicode_literals
from pkg_resources import get_distribution, DistributionNotFound
try:
__version__ = get_distribution(__name__).version
except DistributionNotFound:
# package is not installed
pass
default_app_config = "modoboa_webmail.apps.WebmailConfig"
<commit_msg>Fix crash in development mode with python 3<commit_after># -*- coding: utf-8 -*-
"""DMARC related tools for Modoboa."""
from __future__ import unicode_literals
from pkg_resources import get_distribution, DistributionNotFound
try:
__version__ = get_distribution(__name__).version
except DistributionNotFound:
# package is not installed
__version__ = '9.9.9'
default_app_config = "modoboa_webmail.apps.WebmailConfig"
|
a0c63a21114cd0d42a18235e7d6c9249b05e571e
|
availsim/dht/__init__.py
|
availsim/dht/__init__.py
|
import chord, dhash, oracle, totalrecall
# For automatic use by availsim
known_types = {
'chord': chord,
'dhash': dhash.dhash,
'fragments': dhash.dhash_fragments,
'replica': dhash.dhash_replica,
'cates': dhash.dhash_cates,
'replica_durability_oracle': oracle.durability_oracle,
'replica_availability_oracle': oracle.availability_oracle,
'total_recall_replica': totalrecall.totalrecall_lazy_replica
}
|
import chord, dhash, oracle, totalrecall
# For automatic use by availsim
known_types = {
'chord': chord,
'dhash': dhash.dhash,
'fragments': dhash.dhash_fragments,
'replica': dhash.dhash_replica,
'cates': dhash.dhash_cates,
'durability_oracle_replica': oracle.durability_oracle,
'availability_oracle_replica': oracle.availability_oracle,
'total_recall_replica': totalrecall.totalrecall_lazy_replica
}
|
Rename oracle command line names for consistency and prefixfreeness.
|
Rename oracle command line names for consistency and prefixfreeness.
|
Python
|
mit
|
weidezhang/dht,sit/dht,sit/dht,sit/dht,weidezhang/dht,weidezhang/dht,sit/dht,weidezhang/dht,sit/dht,weidezhang/dht
|
import chord, dhash, oracle, totalrecall
# For automatic use by availsim
known_types = {
'chord': chord,
'dhash': dhash.dhash,
'fragments': dhash.dhash_fragments,
'replica': dhash.dhash_replica,
'cates': dhash.dhash_cates,
'replica_durability_oracle': oracle.durability_oracle,
'replica_availability_oracle': oracle.availability_oracle,
'total_recall_replica': totalrecall.totalrecall_lazy_replica
}
Rename oracle command line names for consistency and prefixfreeness.
|
import chord, dhash, oracle, totalrecall
# For automatic use by availsim
known_types = {
'chord': chord,
'dhash': dhash.dhash,
'fragments': dhash.dhash_fragments,
'replica': dhash.dhash_replica,
'cates': dhash.dhash_cates,
'durability_oracle_replica': oracle.durability_oracle,
'availability_oracle_replica': oracle.availability_oracle,
'total_recall_replica': totalrecall.totalrecall_lazy_replica
}
|
<commit_before>import chord, dhash, oracle, totalrecall
# For automatic use by availsim
known_types = {
'chord': chord,
'dhash': dhash.dhash,
'fragments': dhash.dhash_fragments,
'replica': dhash.dhash_replica,
'cates': dhash.dhash_cates,
'replica_durability_oracle': oracle.durability_oracle,
'replica_availability_oracle': oracle.availability_oracle,
'total_recall_replica': totalrecall.totalrecall_lazy_replica
}
<commit_msg>Rename oracle command line names for consistency and prefixfreeness.<commit_after>
|
import chord, dhash, oracle, totalrecall
# For automatic use by availsim
known_types = {
'chord': chord,
'dhash': dhash.dhash,
'fragments': dhash.dhash_fragments,
'replica': dhash.dhash_replica,
'cates': dhash.dhash_cates,
'durability_oracle_replica': oracle.durability_oracle,
'availability_oracle_replica': oracle.availability_oracle,
'total_recall_replica': totalrecall.totalrecall_lazy_replica
}
|
import chord, dhash, oracle, totalrecall
# For automatic use by availsim
known_types = {
'chord': chord,
'dhash': dhash.dhash,
'fragments': dhash.dhash_fragments,
'replica': dhash.dhash_replica,
'cates': dhash.dhash_cates,
'replica_durability_oracle': oracle.durability_oracle,
'replica_availability_oracle': oracle.availability_oracle,
'total_recall_replica': totalrecall.totalrecall_lazy_replica
}
Rename oracle command line names for consistency and prefixfreeness.import chord, dhash, oracle, totalrecall
# For automatic use by availsim
known_types = {
'chord': chord,
'dhash': dhash.dhash,
'fragments': dhash.dhash_fragments,
'replica': dhash.dhash_replica,
'cates': dhash.dhash_cates,
'durability_oracle_replica': oracle.durability_oracle,
'availability_oracle_replica': oracle.availability_oracle,
'total_recall_replica': totalrecall.totalrecall_lazy_replica
}
|
<commit_before>import chord, dhash, oracle, totalrecall
# For automatic use by availsim
known_types = {
'chord': chord,
'dhash': dhash.dhash,
'fragments': dhash.dhash_fragments,
'replica': dhash.dhash_replica,
'cates': dhash.dhash_cates,
'replica_durability_oracle': oracle.durability_oracle,
'replica_availability_oracle': oracle.availability_oracle,
'total_recall_replica': totalrecall.totalrecall_lazy_replica
}
<commit_msg>Rename oracle command line names for consistency and prefixfreeness.<commit_after>import chord, dhash, oracle, totalrecall
# For automatic use by availsim
known_types = {
'chord': chord,
'dhash': dhash.dhash,
'fragments': dhash.dhash_fragments,
'replica': dhash.dhash_replica,
'cates': dhash.dhash_cates,
'durability_oracle_replica': oracle.durability_oracle,
'availability_oracle_replica': oracle.availability_oracle,
'total_recall_replica': totalrecall.totalrecall_lazy_replica
}
|
129f85bda12d97ad5b51daa9a43e0990619ea496
|
fore/database.py
|
fore/database.py
|
import apikeys
import psycopg2
import utils
_conn = psycopg2.connect(apikeys.db_connect_string)
class Track(object):
def __init__(self, id, filename):
self.id = id
self.filename = filename
self.obj = utils.Magic_Anything("Track_"+str(id))
def get_mp3(some_specifier):
with _conn.cursor():
# TODO: Fetch an MP3 and return its raw data
pass
def get_many_mp3():
with _conn.cursor() as cur:
cur.execute("select id,filename from tracks order by id")
return [Track(int(row[0]),row[1]) for row in cur.fetchall()]
# Etcetera.
|
import apikeys
import psycopg2
import utils
_conn = psycopg2.connect(apikeys.db_connect_string)
class Track(object):
def __init__(self, id, filename):
self.id = id
self.filename = filename
# Add some stubby metadata (in an attribute that desperately
# wants to be renamed to something mildly useful)
self.obj = {
'id': id,
'artist': 'Picasso',
'title': 'Your Majesty',
}
def get_mp3(some_specifier):
with _conn.cursor():
# TODO: Fetch an MP3 and return its raw data
pass
def get_many_mp3():
with _conn.cursor() as cur:
cur.execute("select id,filename from tracks order by id")
return [Track(int(row[0]),row[1]) for row in cur.fetchall()]
# Etcetera.
|
Change the metadata object from a Magic_Anything to a straight dict
|
Change the metadata object from a Magic_Anything to a straight dict
Lose the magic! Lose the magic! (That is *so* not what Anna said.)
|
Python
|
artistic-2.0
|
MikeiLL/appension,MikeiLL/appension,Rosuav/appension,Rosuav/appension,Rosuav/appension,Rosuav/appension,MikeiLL/appension,MikeiLL/appension
|
import apikeys
import psycopg2
import utils
_conn = psycopg2.connect(apikeys.db_connect_string)
class Track(object):
def __init__(self, id, filename):
self.id = id
self.filename = filename
self.obj = utils.Magic_Anything("Track_"+str(id))
def get_mp3(some_specifier):
with _conn.cursor():
# TODO: Fetch an MP3 and return its raw data
pass
def get_many_mp3():
with _conn.cursor() as cur:
cur.execute("select id,filename from tracks order by id")
return [Track(int(row[0]),row[1]) for row in cur.fetchall()]
# Etcetera.
Change the metadata object from a Magic_Anything to a straight dict
Lose the magic! Lose the magic! (That is *so* not what Anna said.)
|
import apikeys
import psycopg2
import utils
_conn = psycopg2.connect(apikeys.db_connect_string)
class Track(object):
def __init__(self, id, filename):
self.id = id
self.filename = filename
# Add some stubby metadata (in an attribute that desperately
# wants to be renamed to something mildly useful)
self.obj = {
'id': id,
'artist': 'Picasso',
'title': 'Your Majesty',
}
def get_mp3(some_specifier):
with _conn.cursor():
# TODO: Fetch an MP3 and return its raw data
pass
def get_many_mp3():
with _conn.cursor() as cur:
cur.execute("select id,filename from tracks order by id")
return [Track(int(row[0]),row[1]) for row in cur.fetchall()]
# Etcetera.
|
<commit_before>import apikeys
import psycopg2
import utils
_conn = psycopg2.connect(apikeys.db_connect_string)
class Track(object):
def __init__(self, id, filename):
self.id = id
self.filename = filename
self.obj = utils.Magic_Anything("Track_"+str(id))
def get_mp3(some_specifier):
with _conn.cursor():
# TODO: Fetch an MP3 and return its raw data
pass
def get_many_mp3():
with _conn.cursor() as cur:
cur.execute("select id,filename from tracks order by id")
return [Track(int(row[0]),row[1]) for row in cur.fetchall()]
# Etcetera.
<commit_msg>Change the metadata object from a Magic_Anything to a straight dict
Lose the magic! Lose the magic! (That is *so* not what Anna said.)<commit_after>
|
import apikeys
import psycopg2
import utils
_conn = psycopg2.connect(apikeys.db_connect_string)
class Track(object):
def __init__(self, id, filename):
self.id = id
self.filename = filename
# Add some stubby metadata (in an attribute that desperately
# wants to be renamed to something mildly useful)
self.obj = {
'id': id,
'artist': 'Picasso',
'title': 'Your Majesty',
}
def get_mp3(some_specifier):
with _conn.cursor():
# TODO: Fetch an MP3 and return its raw data
pass
def get_many_mp3():
with _conn.cursor() as cur:
cur.execute("select id,filename from tracks order by id")
return [Track(int(row[0]),row[1]) for row in cur.fetchall()]
# Etcetera.
|
import apikeys
import psycopg2
import utils
_conn = psycopg2.connect(apikeys.db_connect_string)
class Track(object):
def __init__(self, id, filename):
self.id = id
self.filename = filename
self.obj = utils.Magic_Anything("Track_"+str(id))
def get_mp3(some_specifier):
with _conn.cursor():
# TODO: Fetch an MP3 and return its raw data
pass
def get_many_mp3():
with _conn.cursor() as cur:
cur.execute("select id,filename from tracks order by id")
return [Track(int(row[0]),row[1]) for row in cur.fetchall()]
# Etcetera.
Change the metadata object from a Magic_Anything to a straight dict
Lose the magic! Lose the magic! (That is *so* not what Anna said.)import apikeys
import psycopg2
import utils
_conn = psycopg2.connect(apikeys.db_connect_string)
class Track(object):
def __init__(self, id, filename):
self.id = id
self.filename = filename
# Add some stubby metadata (in an attribute that desperately
# wants to be renamed to something mildly useful)
self.obj = {
'id': id,
'artist': 'Picasso',
'title': 'Your Majesty',
}
def get_mp3(some_specifier):
with _conn.cursor():
# TODO: Fetch an MP3 and return its raw data
pass
def get_many_mp3():
with _conn.cursor() as cur:
cur.execute("select id,filename from tracks order by id")
return [Track(int(row[0]),row[1]) for row in cur.fetchall()]
# Etcetera.
|
<commit_before>import apikeys
import psycopg2
import utils
_conn = psycopg2.connect(apikeys.db_connect_string)
class Track(object):
def __init__(self, id, filename):
self.id = id
self.filename = filename
self.obj = utils.Magic_Anything("Track_"+str(id))
def get_mp3(some_specifier):
with _conn.cursor():
# TODO: Fetch an MP3 and return its raw data
pass
def get_many_mp3():
with _conn.cursor() as cur:
cur.execute("select id,filename from tracks order by id")
return [Track(int(row[0]),row[1]) for row in cur.fetchall()]
# Etcetera.
<commit_msg>Change the metadata object from a Magic_Anything to a straight dict
Lose the magic! Lose the magic! (That is *so* not what Anna said.)<commit_after>import apikeys
import psycopg2
import utils
_conn = psycopg2.connect(apikeys.db_connect_string)
class Track(object):
def __init__(self, id, filename):
self.id = id
self.filename = filename
# Add some stubby metadata (in an attribute that desperately
# wants to be renamed to something mildly useful)
self.obj = {
'id': id,
'artist': 'Picasso',
'title': 'Your Majesty',
}
def get_mp3(some_specifier):
with _conn.cursor():
# TODO: Fetch an MP3 and return its raw data
pass
def get_many_mp3():
with _conn.cursor() as cur:
cur.execute("select id,filename from tracks order by id")
return [Track(int(row[0]),row[1]) for row in cur.fetchall()]
# Etcetera.
|
384d57efa59665f0dd47c07062a8177a2eedde9a
|
run_tests.py
|
run_tests.py
|
#!/usr/bin/python
import optparse
import sys
# Install the Python unittest2 package before you run this script.
import unittest2
USAGE = """%prog SDK_PATH
Run unit tests for App Engine apps.
The SDK Path is probably /usr/local/google_appengine on Mac OS
SDK_PATH Path to the SDK installation"""
def main(sdk_path, test_pattern):
sys.path.insert(0, sdk_path)
import dev_appserver
dev_appserver.fix_sys_path()
suite = unittest2.loader.TestLoader().discover("tests", test_pattern)
tests = unittest2.TextTestRunner(verbosity=2).run(suite)
if tests.wasSuccessful() == True:
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
parser = optparse.OptionParser(USAGE)
options, args = parser.parse_args()
if len(args) < 1:
print 'Warning: Trying default SDK path.'
sdk_path = "/usr/local/google_appengine"
else:
sdk_path = args[0]
test_pattern = "test*.py"
if len(args) > 1:
test_pattern = args[1]
main(sdk_path, test_pattern)
|
#!/usr/bin/python
import optparse
import sys
import warnings
# Install the Python unittest2 package before you run this script.
import unittest2
USAGE = """%prog SDK_PATH
Run unit tests for App Engine apps.
The SDK Path is probably /usr/local/google_appengine on Mac OS
SDK_PATH Path to the SDK installation"""
def main(sdk_path, test_pattern):
sys.path.insert(0, sdk_path)
import dev_appserver
dev_appserver.fix_sys_path()
suite = unittest2.loader.TestLoader().discover("tests", test_pattern)
tests = unittest2.TextTestRunner(verbosity=2).run(suite)
if tests.wasSuccessful() == True:
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
parser = optparse.OptionParser(USAGE)
options, args = parser.parse_args()
if len(args) < 1:
warnings.warn('Trying default SDK path.', RuntimeWarning)
sdk_path = "/usr/local/google_appengine"
else:
sdk_path = args[0]
test_pattern = "test*.py"
if len(args) > 1:
test_pattern = args[1]
main(sdk_path, test_pattern)
|
Replace print statement with `warnings.warn`.
|
Replace print statement with `warnings.warn`.
Also so that it doesn't need to be converted for Python3 compat.
|
Python
|
mit
|
verycumbersome/the-blue-alliance,the-blue-alliance/the-blue-alliance,1fish2/the-blue-alliance,phil-lopreiato/the-blue-alliance,tsteward/the-blue-alliance,verycumbersome/the-blue-alliance,bdaroz/the-blue-alliance,verycumbersome/the-blue-alliance,bdaroz/the-blue-alliance,nwalters512/the-blue-alliance,synth3tk/the-blue-alliance,fangeugene/the-blue-alliance,phil-lopreiato/the-blue-alliance,bdaroz/the-blue-alliance,nwalters512/the-blue-alliance,josephbisch/the-blue-alliance,jaredhasenklein/the-blue-alliance,synth3tk/the-blue-alliance,bvisness/the-blue-alliance,synth3tk/the-blue-alliance,tsteward/the-blue-alliance,the-blue-alliance/the-blue-alliance,bvisness/the-blue-alliance,josephbisch/the-blue-alliance,synth3tk/the-blue-alliance,jaredhasenklein/the-blue-alliance,synth3tk/the-blue-alliance,jaredhasenklein/the-blue-alliance,fangeugene/the-blue-alliance,bdaroz/the-blue-alliance,fangeugene/the-blue-alliance,nwalters512/the-blue-alliance,nwalters512/the-blue-alliance,phil-lopreiato/the-blue-alliance,bvisness/the-blue-alliance,1fish2/the-blue-alliance,tsteward/the-blue-alliance,fangeugene/the-blue-alliance,phil-lopreiato/the-blue-alliance,tsteward/the-blue-alliance,jaredhasenklein/the-blue-alliance,bvisness/the-blue-alliance,josephbisch/the-blue-alliance,josephbisch/the-blue-alliance,the-blue-alliance/the-blue-alliance,phil-lopreiato/the-blue-alliance,jaredhasenklein/the-blue-alliance,tsteward/the-blue-alliance,the-blue-alliance/the-blue-alliance,josephbisch/the-blue-alliance,synth3tk/the-blue-alliance,bdaroz/the-blue-alliance,the-blue-alliance/the-blue-alliance,1fish2/the-blue-alliance,bvisness/the-blue-alliance,josephbisch/the-blue-alliance,verycumbersome/the-blue-alliance,bvisness/the-blue-alliance,jaredhasenklein/the-blue-alliance,fangeugene/the-blue-alliance,1fish2/the-blue-alliance,tsteward/the-blue-alliance,phil-lopreiato/the-blue-alliance,nwalters512/the-blue-alliance,fangeugene/the-blue-alliance,1fish2/the-blue-alliance,verycumbersome/the-blue-alliance,1fish2/the-blue-alliance,the-blue-alliance/the-blue-alliance,verycumbersome/the-blue-alliance,nwalters512/the-blue-alliance,bdaroz/the-blue-alliance
|
#!/usr/bin/python
import optparse
import sys
# Install the Python unittest2 package before you run this script.
import unittest2
USAGE = """%prog SDK_PATH
Run unit tests for App Engine apps.
The SDK Path is probably /usr/local/google_appengine on Mac OS
SDK_PATH Path to the SDK installation"""
def main(sdk_path, test_pattern):
sys.path.insert(0, sdk_path)
import dev_appserver
dev_appserver.fix_sys_path()
suite = unittest2.loader.TestLoader().discover("tests", test_pattern)
tests = unittest2.TextTestRunner(verbosity=2).run(suite)
if tests.wasSuccessful() == True:
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
parser = optparse.OptionParser(USAGE)
options, args = parser.parse_args()
if len(args) < 1:
print 'Warning: Trying default SDK path.'
sdk_path = "/usr/local/google_appengine"
else:
sdk_path = args[0]
test_pattern = "test*.py"
if len(args) > 1:
test_pattern = args[1]
main(sdk_path, test_pattern)Replace print statement with `warnings.warn`.
Also so that it doesn't need to be converted for Python3 compat.
|
#!/usr/bin/python
import optparse
import sys
import warnings
# Install the Python unittest2 package before you run this script.
import unittest2
USAGE = """%prog SDK_PATH
Run unit tests for App Engine apps.
The SDK Path is probably /usr/local/google_appengine on Mac OS
SDK_PATH Path to the SDK installation"""
def main(sdk_path, test_pattern):
sys.path.insert(0, sdk_path)
import dev_appserver
dev_appserver.fix_sys_path()
suite = unittest2.loader.TestLoader().discover("tests", test_pattern)
tests = unittest2.TextTestRunner(verbosity=2).run(suite)
if tests.wasSuccessful() == True:
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
parser = optparse.OptionParser(USAGE)
options, args = parser.parse_args()
if len(args) < 1:
warnings.warn('Trying default SDK path.', RuntimeWarning)
sdk_path = "/usr/local/google_appengine"
else:
sdk_path = args[0]
test_pattern = "test*.py"
if len(args) > 1:
test_pattern = args[1]
main(sdk_path, test_pattern)
|
<commit_before>#!/usr/bin/python
import optparse
import sys
# Install the Python unittest2 package before you run this script.
import unittest2
USAGE = """%prog SDK_PATH
Run unit tests for App Engine apps.
The SDK Path is probably /usr/local/google_appengine on Mac OS
SDK_PATH Path to the SDK installation"""
def main(sdk_path, test_pattern):
sys.path.insert(0, sdk_path)
import dev_appserver
dev_appserver.fix_sys_path()
suite = unittest2.loader.TestLoader().discover("tests", test_pattern)
tests = unittest2.TextTestRunner(verbosity=2).run(suite)
if tests.wasSuccessful() == True:
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
parser = optparse.OptionParser(USAGE)
options, args = parser.parse_args()
if len(args) < 1:
print 'Warning: Trying default SDK path.'
sdk_path = "/usr/local/google_appengine"
else:
sdk_path = args[0]
test_pattern = "test*.py"
if len(args) > 1:
test_pattern = args[1]
main(sdk_path, test_pattern)<commit_msg>Replace print statement with `warnings.warn`.
Also so that it doesn't need to be converted for Python3 compat.<commit_after>
|
#!/usr/bin/python
import optparse
import sys
import warnings
# Install the Python unittest2 package before you run this script.
import unittest2
USAGE = """%prog SDK_PATH
Run unit tests for App Engine apps.
The SDK Path is probably /usr/local/google_appengine on Mac OS
SDK_PATH Path to the SDK installation"""
def main(sdk_path, test_pattern):
sys.path.insert(0, sdk_path)
import dev_appserver
dev_appserver.fix_sys_path()
suite = unittest2.loader.TestLoader().discover("tests", test_pattern)
tests = unittest2.TextTestRunner(verbosity=2).run(suite)
if tests.wasSuccessful() == True:
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
parser = optparse.OptionParser(USAGE)
options, args = parser.parse_args()
if len(args) < 1:
warnings.warn('Trying default SDK path.', RuntimeWarning)
sdk_path = "/usr/local/google_appengine"
else:
sdk_path = args[0]
test_pattern = "test*.py"
if len(args) > 1:
test_pattern = args[1]
main(sdk_path, test_pattern)
|
#!/usr/bin/python
import optparse
import sys
# Install the Python unittest2 package before you run this script.
import unittest2
USAGE = """%prog SDK_PATH
Run unit tests for App Engine apps.
The SDK Path is probably /usr/local/google_appengine on Mac OS
SDK_PATH Path to the SDK installation"""
def main(sdk_path, test_pattern):
sys.path.insert(0, sdk_path)
import dev_appserver
dev_appserver.fix_sys_path()
suite = unittest2.loader.TestLoader().discover("tests", test_pattern)
tests = unittest2.TextTestRunner(verbosity=2).run(suite)
if tests.wasSuccessful() == True:
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
parser = optparse.OptionParser(USAGE)
options, args = parser.parse_args()
if len(args) < 1:
print 'Warning: Trying default SDK path.'
sdk_path = "/usr/local/google_appengine"
else:
sdk_path = args[0]
test_pattern = "test*.py"
if len(args) > 1:
test_pattern = args[1]
main(sdk_path, test_pattern)Replace print statement with `warnings.warn`.
Also so that it doesn't need to be converted for Python3 compat.#!/usr/bin/python
import optparse
import sys
import warnings
# Install the Python unittest2 package before you run this script.
import unittest2
USAGE = """%prog SDK_PATH
Run unit tests for App Engine apps.
The SDK Path is probably /usr/local/google_appengine on Mac OS
SDK_PATH Path to the SDK installation"""
def main(sdk_path, test_pattern):
sys.path.insert(0, sdk_path)
import dev_appserver
dev_appserver.fix_sys_path()
suite = unittest2.loader.TestLoader().discover("tests", test_pattern)
tests = unittest2.TextTestRunner(verbosity=2).run(suite)
if tests.wasSuccessful() == True:
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
parser = optparse.OptionParser(USAGE)
options, args = parser.parse_args()
if len(args) < 1:
warnings.warn('Trying default SDK path.', RuntimeWarning)
sdk_path = "/usr/local/google_appengine"
else:
sdk_path = args[0]
test_pattern = "test*.py"
if len(args) > 1:
test_pattern = args[1]
main(sdk_path, test_pattern)
|
<commit_before>#!/usr/bin/python
import optparse
import sys
# Install the Python unittest2 package before you run this script.
import unittest2
USAGE = """%prog SDK_PATH
Run unit tests for App Engine apps.
The SDK Path is probably /usr/local/google_appengine on Mac OS
SDK_PATH Path to the SDK installation"""
def main(sdk_path, test_pattern):
sys.path.insert(0, sdk_path)
import dev_appserver
dev_appserver.fix_sys_path()
suite = unittest2.loader.TestLoader().discover("tests", test_pattern)
tests = unittest2.TextTestRunner(verbosity=2).run(suite)
if tests.wasSuccessful() == True:
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
parser = optparse.OptionParser(USAGE)
options, args = parser.parse_args()
if len(args) < 1:
print 'Warning: Trying default SDK path.'
sdk_path = "/usr/local/google_appengine"
else:
sdk_path = args[0]
test_pattern = "test*.py"
if len(args) > 1:
test_pattern = args[1]
main(sdk_path, test_pattern)<commit_msg>Replace print statement with `warnings.warn`.
Also so that it doesn't need to be converted for Python3 compat.<commit_after>#!/usr/bin/python
import optparse
import sys
import warnings
# Install the Python unittest2 package before you run this script.
import unittest2
USAGE = """%prog SDK_PATH
Run unit tests for App Engine apps.
The SDK Path is probably /usr/local/google_appengine on Mac OS
SDK_PATH Path to the SDK installation"""
def main(sdk_path, test_pattern):
sys.path.insert(0, sdk_path)
import dev_appserver
dev_appserver.fix_sys_path()
suite = unittest2.loader.TestLoader().discover("tests", test_pattern)
tests = unittest2.TextTestRunner(verbosity=2).run(suite)
if tests.wasSuccessful() == True:
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
parser = optparse.OptionParser(USAGE)
options, args = parser.parse_args()
if len(args) < 1:
warnings.warn('Trying default SDK path.', RuntimeWarning)
sdk_path = "/usr/local/google_appengine"
else:
sdk_path = args[0]
test_pattern = "test*.py"
if len(args) > 1:
test_pattern = args[1]
main(sdk_path, test_pattern)
|
04d7e76cf372802e99ff3108cccd836d7aada0df
|
games/views/installers.py
|
games/views/installers.py
|
from __future__ import absolute_import
from rest_framework import generics
from reversion.models import Version
from common.permissions import IsAdminOrReadOnly
from games import models, serializers
class InstallerListView(generics.ListAPIView):
serializer_class = serializers.InstallerSerializer
queryset = models.Installer.objects.all()
class InstallerDetailView(generics.RetrieveUpdateDestroyAPIView):
permission_classes = [IsAdminOrReadOnly]
serializer_class = serializers.InstallerSerializer
queryset = models.Installer.objects.all()
class InstallerRevisionListView(generics.ListAPIView):
permission_classes = [IsAdminOrReadOnly]
serializer_class = serializers.InstallerRevisionSerializer
def get_queryset(self):
print "InstallerRevisionListView"
installer_id = self.request.parser_context['kwargs']['pk']
versions = []
for version in Version.objects.filter(content_type__model='installer',
object_id=installer_id):
versions.append(models.InstallerRevision(version.id))
return versions
class InstallerRevisionDetailView(generics.RetrieveAPIView):
permission_classes = [IsAdminOrReadOnly]
serializer_class = serializers.InstallerRevisionSerializer
def get_object(self):
revision_id = self.request.parser_context['kwargs']['pk']
version = models.InstallerRevision(revision_id)
return version
|
from __future__ import absolute_import
from rest_framework import generics
from reversion.models import Version
from common.permissions import IsAdminOrReadOnly
from games import models, serializers
class InstallerListView(generics.ListAPIView):
serializer_class = serializers.InstallerSerializer
queryset = models.Installer.objects.all()
class InstallerDetailView(generics.RetrieveUpdateDestroyAPIView):
permission_classes = [IsAdminOrReadOnly]
serializer_class = serializers.InstallerSerializer
queryset = models.Installer.objects.all()
class InstallerRevisionListView(generics.ListAPIView):
permission_classes = [IsAdminOrReadOnly]
serializer_class = serializers.InstallerRevisionSerializer
def get_queryset(self):
installer_id = self.request.parser_context['kwargs']['pk']
return [
models.InstallerRevision(version.id)
for version
in Version.objects.filter(
content_type__model='installer', object_id=installer_id
)
]
class InstallerRevisionDetailView(generics.RetrieveAPIView):
permission_classes = [IsAdminOrReadOnly]
serializer_class = serializers.InstallerRevisionSerializer
def get_object(self):
revision_id = self.request.parser_context['kwargs']['pk']
return models.InstallerRevision(revision_id)
|
Simplify Installer revision API views
|
Simplify Installer revision API views
|
Python
|
agpl-3.0
|
lutris/website,lutris/website,Turupawn/website,lutris/website,Turupawn/website,lutris/website,Turupawn/website,Turupawn/website
|
from __future__ import absolute_import
from rest_framework import generics
from reversion.models import Version
from common.permissions import IsAdminOrReadOnly
from games import models, serializers
class InstallerListView(generics.ListAPIView):
serializer_class = serializers.InstallerSerializer
queryset = models.Installer.objects.all()
class InstallerDetailView(generics.RetrieveUpdateDestroyAPIView):
permission_classes = [IsAdminOrReadOnly]
serializer_class = serializers.InstallerSerializer
queryset = models.Installer.objects.all()
class InstallerRevisionListView(generics.ListAPIView):
permission_classes = [IsAdminOrReadOnly]
serializer_class = serializers.InstallerRevisionSerializer
def get_queryset(self):
print "InstallerRevisionListView"
installer_id = self.request.parser_context['kwargs']['pk']
versions = []
for version in Version.objects.filter(content_type__model='installer',
object_id=installer_id):
versions.append(models.InstallerRevision(version.id))
return versions
class InstallerRevisionDetailView(generics.RetrieveAPIView):
permission_classes = [IsAdminOrReadOnly]
serializer_class = serializers.InstallerRevisionSerializer
def get_object(self):
revision_id = self.request.parser_context['kwargs']['pk']
version = models.InstallerRevision(revision_id)
return version
Simplify Installer revision API views
|
from __future__ import absolute_import
from rest_framework import generics
from reversion.models import Version
from common.permissions import IsAdminOrReadOnly
from games import models, serializers
class InstallerListView(generics.ListAPIView):
serializer_class = serializers.InstallerSerializer
queryset = models.Installer.objects.all()
class InstallerDetailView(generics.RetrieveUpdateDestroyAPIView):
permission_classes = [IsAdminOrReadOnly]
serializer_class = serializers.InstallerSerializer
queryset = models.Installer.objects.all()
class InstallerRevisionListView(generics.ListAPIView):
permission_classes = [IsAdminOrReadOnly]
serializer_class = serializers.InstallerRevisionSerializer
def get_queryset(self):
installer_id = self.request.parser_context['kwargs']['pk']
return [
models.InstallerRevision(version.id)
for version
in Version.objects.filter(
content_type__model='installer', object_id=installer_id
)
]
class InstallerRevisionDetailView(generics.RetrieveAPIView):
permission_classes = [IsAdminOrReadOnly]
serializer_class = serializers.InstallerRevisionSerializer
def get_object(self):
revision_id = self.request.parser_context['kwargs']['pk']
return models.InstallerRevision(revision_id)
|
<commit_before>from __future__ import absolute_import
from rest_framework import generics
from reversion.models import Version
from common.permissions import IsAdminOrReadOnly
from games import models, serializers
class InstallerListView(generics.ListAPIView):
serializer_class = serializers.InstallerSerializer
queryset = models.Installer.objects.all()
class InstallerDetailView(generics.RetrieveUpdateDestroyAPIView):
permission_classes = [IsAdminOrReadOnly]
serializer_class = serializers.InstallerSerializer
queryset = models.Installer.objects.all()
class InstallerRevisionListView(generics.ListAPIView):
permission_classes = [IsAdminOrReadOnly]
serializer_class = serializers.InstallerRevisionSerializer
def get_queryset(self):
print "InstallerRevisionListView"
installer_id = self.request.parser_context['kwargs']['pk']
versions = []
for version in Version.objects.filter(content_type__model='installer',
object_id=installer_id):
versions.append(models.InstallerRevision(version.id))
return versions
class InstallerRevisionDetailView(generics.RetrieveAPIView):
permission_classes = [IsAdminOrReadOnly]
serializer_class = serializers.InstallerRevisionSerializer
def get_object(self):
revision_id = self.request.parser_context['kwargs']['pk']
version = models.InstallerRevision(revision_id)
return version
<commit_msg>Simplify Installer revision API views<commit_after>
|
from __future__ import absolute_import
from rest_framework import generics
from reversion.models import Version
from common.permissions import IsAdminOrReadOnly
from games import models, serializers
class InstallerListView(generics.ListAPIView):
serializer_class = serializers.InstallerSerializer
queryset = models.Installer.objects.all()
class InstallerDetailView(generics.RetrieveUpdateDestroyAPIView):
permission_classes = [IsAdminOrReadOnly]
serializer_class = serializers.InstallerSerializer
queryset = models.Installer.objects.all()
class InstallerRevisionListView(generics.ListAPIView):
permission_classes = [IsAdminOrReadOnly]
serializer_class = serializers.InstallerRevisionSerializer
def get_queryset(self):
installer_id = self.request.parser_context['kwargs']['pk']
return [
models.InstallerRevision(version.id)
for version
in Version.objects.filter(
content_type__model='installer', object_id=installer_id
)
]
class InstallerRevisionDetailView(generics.RetrieveAPIView):
permission_classes = [IsAdminOrReadOnly]
serializer_class = serializers.InstallerRevisionSerializer
def get_object(self):
revision_id = self.request.parser_context['kwargs']['pk']
return models.InstallerRevision(revision_id)
|
from __future__ import absolute_import
from rest_framework import generics
from reversion.models import Version
from common.permissions import IsAdminOrReadOnly
from games import models, serializers
class InstallerListView(generics.ListAPIView):
serializer_class = serializers.InstallerSerializer
queryset = models.Installer.objects.all()
class InstallerDetailView(generics.RetrieveUpdateDestroyAPIView):
permission_classes = [IsAdminOrReadOnly]
serializer_class = serializers.InstallerSerializer
queryset = models.Installer.objects.all()
class InstallerRevisionListView(generics.ListAPIView):
permission_classes = [IsAdminOrReadOnly]
serializer_class = serializers.InstallerRevisionSerializer
def get_queryset(self):
print "InstallerRevisionListView"
installer_id = self.request.parser_context['kwargs']['pk']
versions = []
for version in Version.objects.filter(content_type__model='installer',
object_id=installer_id):
versions.append(models.InstallerRevision(version.id))
return versions
class InstallerRevisionDetailView(generics.RetrieveAPIView):
permission_classes = [IsAdminOrReadOnly]
serializer_class = serializers.InstallerRevisionSerializer
def get_object(self):
revision_id = self.request.parser_context['kwargs']['pk']
version = models.InstallerRevision(revision_id)
return version
Simplify Installer revision API viewsfrom __future__ import absolute_import
from rest_framework import generics
from reversion.models import Version
from common.permissions import IsAdminOrReadOnly
from games import models, serializers
class InstallerListView(generics.ListAPIView):
serializer_class = serializers.InstallerSerializer
queryset = models.Installer.objects.all()
class InstallerDetailView(generics.RetrieveUpdateDestroyAPIView):
permission_classes = [IsAdminOrReadOnly]
serializer_class = serializers.InstallerSerializer
queryset = models.Installer.objects.all()
class InstallerRevisionListView(generics.ListAPIView):
permission_classes = [IsAdminOrReadOnly]
serializer_class = serializers.InstallerRevisionSerializer
def get_queryset(self):
installer_id = self.request.parser_context['kwargs']['pk']
return [
models.InstallerRevision(version.id)
for version
in Version.objects.filter(
content_type__model='installer', object_id=installer_id
)
]
class InstallerRevisionDetailView(generics.RetrieveAPIView):
permission_classes = [IsAdminOrReadOnly]
serializer_class = serializers.InstallerRevisionSerializer
def get_object(self):
revision_id = self.request.parser_context['kwargs']['pk']
return models.InstallerRevision(revision_id)
|
<commit_before>from __future__ import absolute_import
from rest_framework import generics
from reversion.models import Version
from common.permissions import IsAdminOrReadOnly
from games import models, serializers
class InstallerListView(generics.ListAPIView):
serializer_class = serializers.InstallerSerializer
queryset = models.Installer.objects.all()
class InstallerDetailView(generics.RetrieveUpdateDestroyAPIView):
permission_classes = [IsAdminOrReadOnly]
serializer_class = serializers.InstallerSerializer
queryset = models.Installer.objects.all()
class InstallerRevisionListView(generics.ListAPIView):
permission_classes = [IsAdminOrReadOnly]
serializer_class = serializers.InstallerRevisionSerializer
def get_queryset(self):
print "InstallerRevisionListView"
installer_id = self.request.parser_context['kwargs']['pk']
versions = []
for version in Version.objects.filter(content_type__model='installer',
object_id=installer_id):
versions.append(models.InstallerRevision(version.id))
return versions
class InstallerRevisionDetailView(generics.RetrieveAPIView):
permission_classes = [IsAdminOrReadOnly]
serializer_class = serializers.InstallerRevisionSerializer
def get_object(self):
revision_id = self.request.parser_context['kwargs']['pk']
version = models.InstallerRevision(revision_id)
return version
<commit_msg>Simplify Installer revision API views<commit_after>from __future__ import absolute_import
from rest_framework import generics
from reversion.models import Version
from common.permissions import IsAdminOrReadOnly
from games import models, serializers
class InstallerListView(generics.ListAPIView):
serializer_class = serializers.InstallerSerializer
queryset = models.Installer.objects.all()
class InstallerDetailView(generics.RetrieveUpdateDestroyAPIView):
permission_classes = [IsAdminOrReadOnly]
serializer_class = serializers.InstallerSerializer
queryset = models.Installer.objects.all()
class InstallerRevisionListView(generics.ListAPIView):
permission_classes = [IsAdminOrReadOnly]
serializer_class = serializers.InstallerRevisionSerializer
def get_queryset(self):
installer_id = self.request.parser_context['kwargs']['pk']
return [
models.InstallerRevision(version.id)
for version
in Version.objects.filter(
content_type__model='installer', object_id=installer_id
)
]
class InstallerRevisionDetailView(generics.RetrieveAPIView):
permission_classes = [IsAdminOrReadOnly]
serializer_class = serializers.InstallerRevisionSerializer
def get_object(self):
revision_id = self.request.parser_context['kwargs']['pk']
return models.InstallerRevision(revision_id)
|
3f4844c61c4bb8d2e578727ed220de07b0385a74
|
speaker/appstore/review.py
|
speaker/appstore/review.py
|
import asyncio
import json
from helper.filter import remove_emoji
from helper.lang import find_out_language
from lxml import etree
from datetime import datetime
from helper.http_client import request
from speaker.appstore import NAMESPACE, REGIONS
@asyncio.coroutine
def latest_reviews(code, region, buffer_size):
url = "https://itunes.apple.com/{}/rss/customerreviews/id={}/sortBy=mostRecent/json".format(region, code)
body = yield from request(url)
reviews = list()
feed = json.loads(body.decode('utf-8')).get('feed')
if feed is None:
return reviews
entries = feed.get('entry')
if entries is None:
return reviews
for entry in entries:
try:
if entry.get('author') is None:
continue
title = entry['title']['label']
content = entry['content']['label']
reviews.append({
'id': entry['id']['label'],
'title': title,
'content': content,
'name': entry['author']['name']['label'],
'score': score(entry['im:rating']['label']),
'version': entry['im:version']['label'],
'lang': find_out_language(REGIONS[region]['langs'], content, title),
'region': region
})
except IndexError:
pass
return reviews
def score(rating):
return int(rating) * 20
|
import asyncio
import json
from helper.filter import remove_emoji
from helper.lang import find_out_language
from lxml import etree
from datetime import datetime
from helper.http_client import request
from speaker.appstore import NAMESPACE, REGIONS
@asyncio.coroutine
def latest_reviews(code, region, buffer_size):
url = "https://itunes.apple.com/{}/rss/customerreviews/id={}/sortBy=mostRecent/json".format(region, code)
body = yield from request(url)
reviews = list()
feed = json.loads(body.decode('utf-8')).get('feed')
if feed is None:
return reviews
if region == 'sg':
entries = feed.get('entry')
else:
entries = feed.get('feed').get('entry')
if entries is None:
return reviews
for entry in entries:
try:
if entry.get('author') is None:
continue
title = entry['title']['label']
content = entry['content']['label']
reviews.append({
'id': entry['id']['label'],
'title': title,
'content': content,
'name': entry['author']['name']['label'],
'score': score(entry['im:rating']['label']),
'version': entry['im:version']['label'],
'lang': find_out_language(REGIONS[region]['langs'], content, title),
'region': region
})
except IndexError:
pass
return reviews
def score(rating):
return int(rating) * 20
|
Fix appstore json parsing process.
|
Fix appstore json parsing process.
|
Python
|
mit
|
oldsup/clerk
|
import asyncio
import json
from helper.filter import remove_emoji
from helper.lang import find_out_language
from lxml import etree
from datetime import datetime
from helper.http_client import request
from speaker.appstore import NAMESPACE, REGIONS
@asyncio.coroutine
def latest_reviews(code, region, buffer_size):
url = "https://itunes.apple.com/{}/rss/customerreviews/id={}/sortBy=mostRecent/json".format(region, code)
body = yield from request(url)
reviews = list()
feed = json.loads(body.decode('utf-8')).get('feed')
if feed is None:
return reviews
entries = feed.get('entry')
if entries is None:
return reviews
for entry in entries:
try:
if entry.get('author') is None:
continue
title = entry['title']['label']
content = entry['content']['label']
reviews.append({
'id': entry['id']['label'],
'title': title,
'content': content,
'name': entry['author']['name']['label'],
'score': score(entry['im:rating']['label']),
'version': entry['im:version']['label'],
'lang': find_out_language(REGIONS[region]['langs'], content, title),
'region': region
})
except IndexError:
pass
return reviews
def score(rating):
return int(rating) * 20
Fix appstore json parsing process.
|
import asyncio
import json
from helper.filter import remove_emoji
from helper.lang import find_out_language
from lxml import etree
from datetime import datetime
from helper.http_client import request
from speaker.appstore import NAMESPACE, REGIONS
@asyncio.coroutine
def latest_reviews(code, region, buffer_size):
url = "https://itunes.apple.com/{}/rss/customerreviews/id={}/sortBy=mostRecent/json".format(region, code)
body = yield from request(url)
reviews = list()
feed = json.loads(body.decode('utf-8')).get('feed')
if feed is None:
return reviews
if region == 'sg':
entries = feed.get('entry')
else:
entries = feed.get('feed').get('entry')
if entries is None:
return reviews
for entry in entries:
try:
if entry.get('author') is None:
continue
title = entry['title']['label']
content = entry['content']['label']
reviews.append({
'id': entry['id']['label'],
'title': title,
'content': content,
'name': entry['author']['name']['label'],
'score': score(entry['im:rating']['label']),
'version': entry['im:version']['label'],
'lang': find_out_language(REGIONS[region]['langs'], content, title),
'region': region
})
except IndexError:
pass
return reviews
def score(rating):
return int(rating) * 20
|
<commit_before>import asyncio
import json
from helper.filter import remove_emoji
from helper.lang import find_out_language
from lxml import etree
from datetime import datetime
from helper.http_client import request
from speaker.appstore import NAMESPACE, REGIONS
@asyncio.coroutine
def latest_reviews(code, region, buffer_size):
url = "https://itunes.apple.com/{}/rss/customerreviews/id={}/sortBy=mostRecent/json".format(region, code)
body = yield from request(url)
reviews = list()
feed = json.loads(body.decode('utf-8')).get('feed')
if feed is None:
return reviews
entries = feed.get('entry')
if entries is None:
return reviews
for entry in entries:
try:
if entry.get('author') is None:
continue
title = entry['title']['label']
content = entry['content']['label']
reviews.append({
'id': entry['id']['label'],
'title': title,
'content': content,
'name': entry['author']['name']['label'],
'score': score(entry['im:rating']['label']),
'version': entry['im:version']['label'],
'lang': find_out_language(REGIONS[region]['langs'], content, title),
'region': region
})
except IndexError:
pass
return reviews
def score(rating):
return int(rating) * 20
<commit_msg>Fix appstore json parsing process.<commit_after>
|
import asyncio
import json
from helper.filter import remove_emoji
from helper.lang import find_out_language
from lxml import etree
from datetime import datetime
from helper.http_client import request
from speaker.appstore import NAMESPACE, REGIONS
@asyncio.coroutine
def latest_reviews(code, region, buffer_size):
url = "https://itunes.apple.com/{}/rss/customerreviews/id={}/sortBy=mostRecent/json".format(region, code)
body = yield from request(url)
reviews = list()
feed = json.loads(body.decode('utf-8')).get('feed')
if feed is None:
return reviews
if region == 'sg':
entries = feed.get('entry')
else:
entries = feed.get('feed').get('entry')
if entries is None:
return reviews
for entry in entries:
try:
if entry.get('author') is None:
continue
title = entry['title']['label']
content = entry['content']['label']
reviews.append({
'id': entry['id']['label'],
'title': title,
'content': content,
'name': entry['author']['name']['label'],
'score': score(entry['im:rating']['label']),
'version': entry['im:version']['label'],
'lang': find_out_language(REGIONS[region]['langs'], content, title),
'region': region
})
except IndexError:
pass
return reviews
def score(rating):
return int(rating) * 20
|
import asyncio
import json
from helper.filter import remove_emoji
from helper.lang import find_out_language
from lxml import etree
from datetime import datetime
from helper.http_client import request
from speaker.appstore import NAMESPACE, REGIONS
@asyncio.coroutine
def latest_reviews(code, region, buffer_size):
url = "https://itunes.apple.com/{}/rss/customerreviews/id={}/sortBy=mostRecent/json".format(region, code)
body = yield from request(url)
reviews = list()
feed = json.loads(body.decode('utf-8')).get('feed')
if feed is None:
return reviews
entries = feed.get('entry')
if entries is None:
return reviews
for entry in entries:
try:
if entry.get('author') is None:
continue
title = entry['title']['label']
content = entry['content']['label']
reviews.append({
'id': entry['id']['label'],
'title': title,
'content': content,
'name': entry['author']['name']['label'],
'score': score(entry['im:rating']['label']),
'version': entry['im:version']['label'],
'lang': find_out_language(REGIONS[region]['langs'], content, title),
'region': region
})
except IndexError:
pass
return reviews
def score(rating):
return int(rating) * 20
Fix appstore json parsing process.import asyncio
import json
from helper.filter import remove_emoji
from helper.lang import find_out_language
from lxml import etree
from datetime import datetime
from helper.http_client import request
from speaker.appstore import NAMESPACE, REGIONS
@asyncio.coroutine
def latest_reviews(code, region, buffer_size):
url = "https://itunes.apple.com/{}/rss/customerreviews/id={}/sortBy=mostRecent/json".format(region, code)
body = yield from request(url)
reviews = list()
feed = json.loads(body.decode('utf-8')).get('feed')
if feed is None:
return reviews
if region == 'sg':
entries = feed.get('entry')
else:
entries = feed.get('feed').get('entry')
if entries is None:
return reviews
for entry in entries:
try:
if entry.get('author') is None:
continue
title = entry['title']['label']
content = entry['content']['label']
reviews.append({
'id': entry['id']['label'],
'title': title,
'content': content,
'name': entry['author']['name']['label'],
'score': score(entry['im:rating']['label']),
'version': entry['im:version']['label'],
'lang': find_out_language(REGIONS[region]['langs'], content, title),
'region': region
})
except IndexError:
pass
return reviews
def score(rating):
return int(rating) * 20
|
<commit_before>import asyncio
import json
from helper.filter import remove_emoji
from helper.lang import find_out_language
from lxml import etree
from datetime import datetime
from helper.http_client import request
from speaker.appstore import NAMESPACE, REGIONS
@asyncio.coroutine
def latest_reviews(code, region, buffer_size):
url = "https://itunes.apple.com/{}/rss/customerreviews/id={}/sortBy=mostRecent/json".format(region, code)
body = yield from request(url)
reviews = list()
feed = json.loads(body.decode('utf-8')).get('feed')
if feed is None:
return reviews
entries = feed.get('entry')
if entries is None:
return reviews
for entry in entries:
try:
if entry.get('author') is None:
continue
title = entry['title']['label']
content = entry['content']['label']
reviews.append({
'id': entry['id']['label'],
'title': title,
'content': content,
'name': entry['author']['name']['label'],
'score': score(entry['im:rating']['label']),
'version': entry['im:version']['label'],
'lang': find_out_language(REGIONS[region]['langs'], content, title),
'region': region
})
except IndexError:
pass
return reviews
def score(rating):
return int(rating) * 20
<commit_msg>Fix appstore json parsing process.<commit_after>import asyncio
import json
from helper.filter import remove_emoji
from helper.lang import find_out_language
from lxml import etree
from datetime import datetime
from helper.http_client import request
from speaker.appstore import NAMESPACE, REGIONS
@asyncio.coroutine
def latest_reviews(code, region, buffer_size):
url = "https://itunes.apple.com/{}/rss/customerreviews/id={}/sortBy=mostRecent/json".format(region, code)
body = yield from request(url)
reviews = list()
feed = json.loads(body.decode('utf-8')).get('feed')
if feed is None:
return reviews
if region == 'sg':
entries = feed.get('entry')
else:
entries = feed.get('feed').get('entry')
if entries is None:
return reviews
for entry in entries:
try:
if entry.get('author') is None:
continue
title = entry['title']['label']
content = entry['content']['label']
reviews.append({
'id': entry['id']['label'],
'title': title,
'content': content,
'name': entry['author']['name']['label'],
'score': score(entry['im:rating']['label']),
'version': entry['im:version']['label'],
'lang': find_out_language(REGIONS[region]['langs'], content, title),
'region': region
})
except IndexError:
pass
return reviews
def score(rating):
return int(rating) * 20
|
c9580f8d700308df2d3bf5710261314d402fc826
|
democracy_club/settings/testing.py
|
democracy_club/settings/testing.py
|
from .base import * # noqa
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'dc_website_test',
'USER': 'postgres',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
|
from .base import * # noqa
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'dc_website_test',
'USER': 'postgres',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
BACKLOG_TRELLO_BOARD_ID = "empty"
BACKLOG_TRELLO_DEFAULT_LIST_ID = "empty"
BACKLOG_TRELLO_KEY = "empty"
BACKLOG_TRELLO_TOKEN = "empty"
|
Add placeholder values for trello items in tests
|
Add placeholder values for trello items in tests
|
Python
|
bsd-3-clause
|
DemocracyClub/Website,DemocracyClub/Website,DemocracyClub/Website,DemocracyClub/Website
|
from .base import * # noqa
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'dc_website_test',
'USER': 'postgres',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
Add placeholder values for trello items in tests
|
from .base import * # noqa
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'dc_website_test',
'USER': 'postgres',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
BACKLOG_TRELLO_BOARD_ID = "empty"
BACKLOG_TRELLO_DEFAULT_LIST_ID = "empty"
BACKLOG_TRELLO_KEY = "empty"
BACKLOG_TRELLO_TOKEN = "empty"
|
<commit_before>from .base import * # noqa
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'dc_website_test',
'USER': 'postgres',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
<commit_msg>Add placeholder values for trello items in tests<commit_after>
|
from .base import * # noqa
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'dc_website_test',
'USER': 'postgres',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
BACKLOG_TRELLO_BOARD_ID = "empty"
BACKLOG_TRELLO_DEFAULT_LIST_ID = "empty"
BACKLOG_TRELLO_KEY = "empty"
BACKLOG_TRELLO_TOKEN = "empty"
|
from .base import * # noqa
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'dc_website_test',
'USER': 'postgres',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
Add placeholder values for trello items in testsfrom .base import * # noqa
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'dc_website_test',
'USER': 'postgres',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
BACKLOG_TRELLO_BOARD_ID = "empty"
BACKLOG_TRELLO_DEFAULT_LIST_ID = "empty"
BACKLOG_TRELLO_KEY = "empty"
BACKLOG_TRELLO_TOKEN = "empty"
|
<commit_before>from .base import * # noqa
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'dc_website_test',
'USER': 'postgres',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
<commit_msg>Add placeholder values for trello items in tests<commit_after>from .base import * # noqa
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'dc_website_test',
'USER': 'postgres',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
BACKLOG_TRELLO_BOARD_ID = "empty"
BACKLOG_TRELLO_DEFAULT_LIST_ID = "empty"
BACKLOG_TRELLO_KEY = "empty"
BACKLOG_TRELLO_TOKEN = "empty"
|
fea2cbcbc80d76a75f41fb81ea6ded93312bd11b
|
imhotep_rubocop/plugin.py
|
imhotep_rubocop/plugin.py
|
from imhotep.tools import Tool
from collections import defaultdict
import json
import os
class RubyLintLinter(Tool):
def invoke(self, dirname, filenames=set()):
retval = defaultdict(lambda: defaultdict(list))
if len(filenames) == 0:
cmd = "find %s -name '*.rb' | xargs rubocop -f j" % dirname
else:
ruby_files = []
for filename in filenames:
if '.rb' in filename:
ruby_files.append("%s/%s" % (dirname, filename))
cmd = "rubocop -f j %s" % (" ".join(ruby_files))
try:
output = json.loads(self.executor(cmd))
for linted_file in output['files']:
# The path should be relative to the repo,
# without a leading slash
# example db/file.rb
file_name = os.path.abspath(linted_file['path'])
file_name = file_name.replace(dirname, "")[1:]
for offence in linted_file['offences']:
line_number = str(offence['location']['line'])
retval[str(file_name)][line_number].append(
str(offence['message']))
retval[str(file_name)][line_number] = list(set(retval[str(file_name)][line_number]))
except:
pass
return retval
|
from imhotep.tools import Tool
from collections import defaultdict
import json
import os
class RubyLintLinter(Tool):
def invoke(self, dirname, filenames=set(), linter_configs=set()):
retval = defaultdict(lambda: defaultdict(list))
config = ''
for config_file in linter_configs:
if 'rubocop' in config_file:
config = "-c %s " % config_file
if len(filenames) == 0:
cmd = "find %s -name '*.rb' | xargs rubocop %s -f j" % (dirname, config)
else:
ruby_files = []
for filename in filenames:
if '.rb' in filename:
ruby_files.append("%s/%s" % (dirname, filename))
cmd = "rubocop %s -f j %s" % (config, " ".join(ruby_files))
try:
output = json.loads(self.executor(cmd))
for linted_file in output['files']:
# The path should be relative to the repo,
# without a leading slash
# example db/file.rb
file_name = os.path.abspath(linted_file['path'])
file_name = file_name.replace(dirname, "")[1:]
for offence in linted_file['offences']:
line_number = str(offence['location']['line'])
retval[str(file_name)][line_number].append(
str(offence['message']))
retval[str(file_name)][line_number] = list(set(retval[str(file_name)][line_number]))
except:
pass
return retval
|
Update to support config files that are passed to it.
|
Update to support config files that are passed to it.
|
Python
|
mit
|
scottjab/imhotep_rubocop
|
from imhotep.tools import Tool
from collections import defaultdict
import json
import os
class RubyLintLinter(Tool):
def invoke(self, dirname, filenames=set()):
retval = defaultdict(lambda: defaultdict(list))
if len(filenames) == 0:
cmd = "find %s -name '*.rb' | xargs rubocop -f j" % dirname
else:
ruby_files = []
for filename in filenames:
if '.rb' in filename:
ruby_files.append("%s/%s" % (dirname, filename))
cmd = "rubocop -f j %s" % (" ".join(ruby_files))
try:
output = json.loads(self.executor(cmd))
for linted_file in output['files']:
# The path should be relative to the repo,
# without a leading slash
# example db/file.rb
file_name = os.path.abspath(linted_file['path'])
file_name = file_name.replace(dirname, "")[1:]
for offence in linted_file['offences']:
line_number = str(offence['location']['line'])
retval[str(file_name)][line_number].append(
str(offence['message']))
retval[str(file_name)][line_number] = list(set(retval[str(file_name)][line_number]))
except:
pass
return retval
Update to support config files that are passed to it.
|
from imhotep.tools import Tool
from collections import defaultdict
import json
import os
class RubyLintLinter(Tool):
def invoke(self, dirname, filenames=set(), linter_configs=set()):
retval = defaultdict(lambda: defaultdict(list))
config = ''
for config_file in linter_configs:
if 'rubocop' in config_file:
config = "-c %s " % config_file
if len(filenames) == 0:
cmd = "find %s -name '*.rb' | xargs rubocop %s -f j" % (dirname, config)
else:
ruby_files = []
for filename in filenames:
if '.rb' in filename:
ruby_files.append("%s/%s" % (dirname, filename))
cmd = "rubocop %s -f j %s" % (config, " ".join(ruby_files))
try:
output = json.loads(self.executor(cmd))
for linted_file in output['files']:
# The path should be relative to the repo,
# without a leading slash
# example db/file.rb
file_name = os.path.abspath(linted_file['path'])
file_name = file_name.replace(dirname, "")[1:]
for offence in linted_file['offences']:
line_number = str(offence['location']['line'])
retval[str(file_name)][line_number].append(
str(offence['message']))
retval[str(file_name)][line_number] = list(set(retval[str(file_name)][line_number]))
except:
pass
return retval
|
<commit_before>from imhotep.tools import Tool
from collections import defaultdict
import json
import os
class RubyLintLinter(Tool):
def invoke(self, dirname, filenames=set()):
retval = defaultdict(lambda: defaultdict(list))
if len(filenames) == 0:
cmd = "find %s -name '*.rb' | xargs rubocop -f j" % dirname
else:
ruby_files = []
for filename in filenames:
if '.rb' in filename:
ruby_files.append("%s/%s" % (dirname, filename))
cmd = "rubocop -f j %s" % (" ".join(ruby_files))
try:
output = json.loads(self.executor(cmd))
for linted_file in output['files']:
# The path should be relative to the repo,
# without a leading slash
# example db/file.rb
file_name = os.path.abspath(linted_file['path'])
file_name = file_name.replace(dirname, "")[1:]
for offence in linted_file['offences']:
line_number = str(offence['location']['line'])
retval[str(file_name)][line_number].append(
str(offence['message']))
retval[str(file_name)][line_number] = list(set(retval[str(file_name)][line_number]))
except:
pass
return retval
<commit_msg>Update to support config files that are passed to it.<commit_after>
|
from imhotep.tools import Tool
from collections import defaultdict
import json
import os
class RubyLintLinter(Tool):
def invoke(self, dirname, filenames=set(), linter_configs=set()):
retval = defaultdict(lambda: defaultdict(list))
config = ''
for config_file in linter_configs:
if 'rubocop' in config_file:
config = "-c %s " % config_file
if len(filenames) == 0:
cmd = "find %s -name '*.rb' | xargs rubocop %s -f j" % (dirname, config)
else:
ruby_files = []
for filename in filenames:
if '.rb' in filename:
ruby_files.append("%s/%s" % (dirname, filename))
cmd = "rubocop %s -f j %s" % (config, " ".join(ruby_files))
try:
output = json.loads(self.executor(cmd))
for linted_file in output['files']:
# The path should be relative to the repo,
# without a leading slash
# example db/file.rb
file_name = os.path.abspath(linted_file['path'])
file_name = file_name.replace(dirname, "")[1:]
for offence in linted_file['offences']:
line_number = str(offence['location']['line'])
retval[str(file_name)][line_number].append(
str(offence['message']))
retval[str(file_name)][line_number] = list(set(retval[str(file_name)][line_number]))
except:
pass
return retval
|
from imhotep.tools import Tool
from collections import defaultdict
import json
import os
class RubyLintLinter(Tool):
def invoke(self, dirname, filenames=set()):
retval = defaultdict(lambda: defaultdict(list))
if len(filenames) == 0:
cmd = "find %s -name '*.rb' | xargs rubocop -f j" % dirname
else:
ruby_files = []
for filename in filenames:
if '.rb' in filename:
ruby_files.append("%s/%s" % (dirname, filename))
cmd = "rubocop -f j %s" % (" ".join(ruby_files))
try:
output = json.loads(self.executor(cmd))
for linted_file in output['files']:
# The path should be relative to the repo,
# without a leading slash
# example db/file.rb
file_name = os.path.abspath(linted_file['path'])
file_name = file_name.replace(dirname, "")[1:]
for offence in linted_file['offences']:
line_number = str(offence['location']['line'])
retval[str(file_name)][line_number].append(
str(offence['message']))
retval[str(file_name)][line_number] = list(set(retval[str(file_name)][line_number]))
except:
pass
return retval
Update to support config files that are passed to it.from imhotep.tools import Tool
from collections import defaultdict
import json
import os
class RubyLintLinter(Tool):
def invoke(self, dirname, filenames=set(), linter_configs=set()):
retval = defaultdict(lambda: defaultdict(list))
config = ''
for config_file in linter_configs:
if 'rubocop' in config_file:
config = "-c %s " % config_file
if len(filenames) == 0:
cmd = "find %s -name '*.rb' | xargs rubocop %s -f j" % (dirname, config)
else:
ruby_files = []
for filename in filenames:
if '.rb' in filename:
ruby_files.append("%s/%s" % (dirname, filename))
cmd = "rubocop %s -f j %s" % (config, " ".join(ruby_files))
try:
output = json.loads(self.executor(cmd))
for linted_file in output['files']:
# The path should be relative to the repo,
# without a leading slash
# example db/file.rb
file_name = os.path.abspath(linted_file['path'])
file_name = file_name.replace(dirname, "")[1:]
for offence in linted_file['offences']:
line_number = str(offence['location']['line'])
retval[str(file_name)][line_number].append(
str(offence['message']))
retval[str(file_name)][line_number] = list(set(retval[str(file_name)][line_number]))
except:
pass
return retval
|
<commit_before>from imhotep.tools import Tool
from collections import defaultdict
import json
import os
class RubyLintLinter(Tool):
def invoke(self, dirname, filenames=set()):
retval = defaultdict(lambda: defaultdict(list))
if len(filenames) == 0:
cmd = "find %s -name '*.rb' | xargs rubocop -f j" % dirname
else:
ruby_files = []
for filename in filenames:
if '.rb' in filename:
ruby_files.append("%s/%s" % (dirname, filename))
cmd = "rubocop -f j %s" % (" ".join(ruby_files))
try:
output = json.loads(self.executor(cmd))
for linted_file in output['files']:
# The path should be relative to the repo,
# without a leading slash
# example db/file.rb
file_name = os.path.abspath(linted_file['path'])
file_name = file_name.replace(dirname, "")[1:]
for offence in linted_file['offences']:
line_number = str(offence['location']['line'])
retval[str(file_name)][line_number].append(
str(offence['message']))
retval[str(file_name)][line_number] = list(set(retval[str(file_name)][line_number]))
except:
pass
return retval
<commit_msg>Update to support config files that are passed to it.<commit_after>from imhotep.tools import Tool
from collections import defaultdict
import json
import os
class RubyLintLinter(Tool):
def invoke(self, dirname, filenames=set(), linter_configs=set()):
retval = defaultdict(lambda: defaultdict(list))
config = ''
for config_file in linter_configs:
if 'rubocop' in config_file:
config = "-c %s " % config_file
if len(filenames) == 0:
cmd = "find %s -name '*.rb' | xargs rubocop %s -f j" % (dirname, config)
else:
ruby_files = []
for filename in filenames:
if '.rb' in filename:
ruby_files.append("%s/%s" % (dirname, filename))
cmd = "rubocop %s -f j %s" % (config, " ".join(ruby_files))
try:
output = json.loads(self.executor(cmd))
for linted_file in output['files']:
# The path should be relative to the repo,
# without a leading slash
# example db/file.rb
file_name = os.path.abspath(linted_file['path'])
file_name = file_name.replace(dirname, "")[1:]
for offence in linted_file['offences']:
line_number = str(offence['location']['line'])
retval[str(file_name)][line_number].append(
str(offence['message']))
retval[str(file_name)][line_number] = list(set(retval[str(file_name)][line_number]))
except:
pass
return retval
|
7dc734641c1bc7006c9d382afa00c3a8c0b16c50
|
admin/common_auth/forms.py
|
admin/common_auth/forms.py
|
from __future__ import absolute_import
from django import forms
from django.contrib.auth.forms import UserCreationForm
from django.contrib.admin.widgets import FilteredSelectMultiple
from django.contrib.auth.models import Group
from admin.common_auth.models import MyUser
class LoginForm(forms.Form):
email = forms.CharField(label=u'Email', required=True)
password = forms.CharField(
label=u'Password',
widget=forms.PasswordInput(render_value=False),
required=True
)
class UserRegistrationForm(UserCreationForm):
group_perms = forms.ModelMultipleChoiceField(
queryset=Group.objects.filter(name='prereg_group'),
widget=FilteredSelectMultiple('verbose name', is_stacked=False),
required=False
)
class Meta:
model = MyUser
fields = ['password1', 'password2', 'first_name', 'last_name', 'email', 'is_active', 'is_staff',
'is_superuser', 'groups', 'user_permissions', 'last_login', 'group_perms', 'osf_id']
def __init__(self, *args, **kwargs):
super(UserRegistrationForm, self).__init__(*args, **kwargs)
self.fields['first_name'].required = True
self.fields['last_name'].required = True
self.fields['osf_id'].required = True
class DeskUserForm(forms.ModelForm):
class Meta:
model = MyUser
fields = ['desk_token', 'desk_token_secret']
|
from __future__ import absolute_import
from django import forms
from django.contrib.auth.forms import UserCreationForm
from django.contrib.admin.widgets import FilteredSelectMultiple
from django.contrib.auth.models import Group
from osf.models.user import OSFUser
from admin.common_auth.models import AdminProfile
class LoginForm(forms.Form):
email = forms.CharField(label=u'Email', required=True)
password = forms.CharField(
label=u'Password',
widget=forms.PasswordInput(render_value=False),
required=True
)
class UserRegistrationForm(UserCreationForm):
group_perms = forms.ModelMultipleChoiceField(
queryset=Group.objects.filter(name='prereg_group'),
widget=FilteredSelectMultiple('verbose name', is_stacked=False),
required=False
)
class Meta:
model = OSFUser
fields = ['given_name', 'username']
def __init__(self, *args, **kwargs):
super(UserRegistrationForm, self).__init__(*args, **kwargs)
self.fields['first_name'].required = True
self.fields['last_name'].required = True
self.fields['osf_id'].required = True
class DeskUserForm(forms.ModelForm):
class Meta:
model = AdminProfile
fields = ['desk_token', 'desk_token_secret']
|
Update desk form and add update form as well
|
Update desk form and add update form as well
|
Python
|
apache-2.0
|
aaxelb/osf.io,sloria/osf.io,erinspace/osf.io,baylee-d/osf.io,laurenrevere/osf.io,mattclark/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,aaxelb/osf.io,mluo613/osf.io,aaxelb/osf.io,alexschiller/osf.io,binoculars/osf.io,alexschiller/osf.io,alexschiller/osf.io,icereval/osf.io,baylee-d/osf.io,saradbowman/osf.io,pattisdr/osf.io,acshi/osf.io,baylee-d/osf.io,caneruguz/osf.io,caneruguz/osf.io,crcresearch/osf.io,pattisdr/osf.io,TomBaxter/osf.io,chrisseto/osf.io,brianjgeiger/osf.io,leb2dg/osf.io,leb2dg/osf.io,erinspace/osf.io,Nesiehr/osf.io,cwisecarver/osf.io,acshi/osf.io,caneruguz/osf.io,binoculars/osf.io,CenterForOpenScience/osf.io,acshi/osf.io,brianjgeiger/osf.io,mfraezz/osf.io,sloria/osf.io,hmoco/osf.io,sloria/osf.io,acshi/osf.io,Nesiehr/osf.io,chrisseto/osf.io,leb2dg/osf.io,Nesiehr/osf.io,hmoco/osf.io,felliott/osf.io,HalcyonChimera/osf.io,felliott/osf.io,chennan47/osf.io,TomBaxter/osf.io,caseyrollins/osf.io,hmoco/osf.io,alexschiller/osf.io,caneruguz/osf.io,mattclark/osf.io,mluo613/osf.io,cslzchen/osf.io,mattclark/osf.io,mfraezz/osf.io,monikagrabowska/osf.io,pattisdr/osf.io,Johnetordoff/osf.io,cslzchen/osf.io,monikagrabowska/osf.io,caseyrollins/osf.io,Johnetordoff/osf.io,icereval/osf.io,laurenrevere/osf.io,chennan47/osf.io,hmoco/osf.io,adlius/osf.io,chrisseto/osf.io,monikagrabowska/osf.io,crcresearch/osf.io,mluo613/osf.io,cslzchen/osf.io,cwisecarver/osf.io,aaxelb/osf.io,HalcyonChimera/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,brianjgeiger/osf.io,crcresearch/osf.io,erinspace/osf.io,felliott/osf.io,acshi/osf.io,leb2dg/osf.io,alexschiller/osf.io,cwisecarver/osf.io,mluo613/osf.io,icereval/osf.io,HalcyonChimera/osf.io,saradbowman/osf.io,cslzchen/osf.io,chrisseto/osf.io,binoculars/osf.io,Johnetordoff/osf.io,brianjgeiger/osf.io,adlius/osf.io,cwisecarver/osf.io,HalcyonChimera/osf.io,monikagrabowska/osf.io,adlius/osf.io,TomBaxter/osf.io,mluo613/osf.io,laurenrevere/osf.io,CenterForOpenScience/osf.io,Johnetordoff/osf.io,monikagrabowska/osf.io,felliott/osf.io,caseyrollins/osf.io,adlius/osf.io,chennan47/osf.io,Nesiehr/osf.io
|
from __future__ import absolute_import
from django import forms
from django.contrib.auth.forms import UserCreationForm
from django.contrib.admin.widgets import FilteredSelectMultiple
from django.contrib.auth.models import Group
from admin.common_auth.models import MyUser
class LoginForm(forms.Form):
email = forms.CharField(label=u'Email', required=True)
password = forms.CharField(
label=u'Password',
widget=forms.PasswordInput(render_value=False),
required=True
)
class UserRegistrationForm(UserCreationForm):
group_perms = forms.ModelMultipleChoiceField(
queryset=Group.objects.filter(name='prereg_group'),
widget=FilteredSelectMultiple('verbose name', is_stacked=False),
required=False
)
class Meta:
model = MyUser
fields = ['password1', 'password2', 'first_name', 'last_name', 'email', 'is_active', 'is_staff',
'is_superuser', 'groups', 'user_permissions', 'last_login', 'group_perms', 'osf_id']
def __init__(self, *args, **kwargs):
super(UserRegistrationForm, self).__init__(*args, **kwargs)
self.fields['first_name'].required = True
self.fields['last_name'].required = True
self.fields['osf_id'].required = True
class DeskUserForm(forms.ModelForm):
class Meta:
model = MyUser
fields = ['desk_token', 'desk_token_secret']
Update desk form and add update form as well
|
from __future__ import absolute_import
from django import forms
from django.contrib.auth.forms import UserCreationForm
from django.contrib.admin.widgets import FilteredSelectMultiple
from django.contrib.auth.models import Group
from osf.models.user import OSFUser
from admin.common_auth.models import AdminProfile
class LoginForm(forms.Form):
email = forms.CharField(label=u'Email', required=True)
password = forms.CharField(
label=u'Password',
widget=forms.PasswordInput(render_value=False),
required=True
)
class UserRegistrationForm(UserCreationForm):
group_perms = forms.ModelMultipleChoiceField(
queryset=Group.objects.filter(name='prereg_group'),
widget=FilteredSelectMultiple('verbose name', is_stacked=False),
required=False
)
class Meta:
model = OSFUser
fields = ['given_name', 'username']
def __init__(self, *args, **kwargs):
super(UserRegistrationForm, self).__init__(*args, **kwargs)
self.fields['first_name'].required = True
self.fields['last_name'].required = True
self.fields['osf_id'].required = True
class DeskUserForm(forms.ModelForm):
class Meta:
model = AdminProfile
fields = ['desk_token', 'desk_token_secret']
|
<commit_before>from __future__ import absolute_import
from django import forms
from django.contrib.auth.forms import UserCreationForm
from django.contrib.admin.widgets import FilteredSelectMultiple
from django.contrib.auth.models import Group
from admin.common_auth.models import MyUser
class LoginForm(forms.Form):
email = forms.CharField(label=u'Email', required=True)
password = forms.CharField(
label=u'Password',
widget=forms.PasswordInput(render_value=False),
required=True
)
class UserRegistrationForm(UserCreationForm):
group_perms = forms.ModelMultipleChoiceField(
queryset=Group.objects.filter(name='prereg_group'),
widget=FilteredSelectMultiple('verbose name', is_stacked=False),
required=False
)
class Meta:
model = MyUser
fields = ['password1', 'password2', 'first_name', 'last_name', 'email', 'is_active', 'is_staff',
'is_superuser', 'groups', 'user_permissions', 'last_login', 'group_perms', 'osf_id']
def __init__(self, *args, **kwargs):
super(UserRegistrationForm, self).__init__(*args, **kwargs)
self.fields['first_name'].required = True
self.fields['last_name'].required = True
self.fields['osf_id'].required = True
class DeskUserForm(forms.ModelForm):
class Meta:
model = MyUser
fields = ['desk_token', 'desk_token_secret']
<commit_msg>Update desk form and add update form as well<commit_after>
|
from __future__ import absolute_import
from django import forms
from django.contrib.auth.forms import UserCreationForm
from django.contrib.admin.widgets import FilteredSelectMultiple
from django.contrib.auth.models import Group
from osf.models.user import OSFUser
from admin.common_auth.models import AdminProfile
class LoginForm(forms.Form):
email = forms.CharField(label=u'Email', required=True)
password = forms.CharField(
label=u'Password',
widget=forms.PasswordInput(render_value=False),
required=True
)
class UserRegistrationForm(UserCreationForm):
group_perms = forms.ModelMultipleChoiceField(
queryset=Group.objects.filter(name='prereg_group'),
widget=FilteredSelectMultiple('verbose name', is_stacked=False),
required=False
)
class Meta:
model = OSFUser
fields = ['given_name', 'username']
def __init__(self, *args, **kwargs):
super(UserRegistrationForm, self).__init__(*args, **kwargs)
self.fields['first_name'].required = True
self.fields['last_name'].required = True
self.fields['osf_id'].required = True
class DeskUserForm(forms.ModelForm):
class Meta:
model = AdminProfile
fields = ['desk_token', 'desk_token_secret']
|
from __future__ import absolute_import
from django import forms
from django.contrib.auth.forms import UserCreationForm
from django.contrib.admin.widgets import FilteredSelectMultiple
from django.contrib.auth.models import Group
from admin.common_auth.models import MyUser
class LoginForm(forms.Form):
email = forms.CharField(label=u'Email', required=True)
password = forms.CharField(
label=u'Password',
widget=forms.PasswordInput(render_value=False),
required=True
)
class UserRegistrationForm(UserCreationForm):
group_perms = forms.ModelMultipleChoiceField(
queryset=Group.objects.filter(name='prereg_group'),
widget=FilteredSelectMultiple('verbose name', is_stacked=False),
required=False
)
class Meta:
model = MyUser
fields = ['password1', 'password2', 'first_name', 'last_name', 'email', 'is_active', 'is_staff',
'is_superuser', 'groups', 'user_permissions', 'last_login', 'group_perms', 'osf_id']
def __init__(self, *args, **kwargs):
super(UserRegistrationForm, self).__init__(*args, **kwargs)
self.fields['first_name'].required = True
self.fields['last_name'].required = True
self.fields['osf_id'].required = True
class DeskUserForm(forms.ModelForm):
class Meta:
model = MyUser
fields = ['desk_token', 'desk_token_secret']
Update desk form and add update form as wellfrom __future__ import absolute_import
from django import forms
from django.contrib.auth.forms import UserCreationForm
from django.contrib.admin.widgets import FilteredSelectMultiple
from django.contrib.auth.models import Group
from osf.models.user import OSFUser
from admin.common_auth.models import AdminProfile
class LoginForm(forms.Form):
email = forms.CharField(label=u'Email', required=True)
password = forms.CharField(
label=u'Password',
widget=forms.PasswordInput(render_value=False),
required=True
)
class UserRegistrationForm(UserCreationForm):
group_perms = forms.ModelMultipleChoiceField(
queryset=Group.objects.filter(name='prereg_group'),
widget=FilteredSelectMultiple('verbose name', is_stacked=False),
required=False
)
class Meta:
model = OSFUser
fields = ['given_name', 'username']
def __init__(self, *args, **kwargs):
super(UserRegistrationForm, self).__init__(*args, **kwargs)
self.fields['first_name'].required = True
self.fields['last_name'].required = True
self.fields['osf_id'].required = True
class DeskUserForm(forms.ModelForm):
class Meta:
model = AdminProfile
fields = ['desk_token', 'desk_token_secret']
|
<commit_before>from __future__ import absolute_import
from django import forms
from django.contrib.auth.forms import UserCreationForm
from django.contrib.admin.widgets import FilteredSelectMultiple
from django.contrib.auth.models import Group
from admin.common_auth.models import MyUser
class LoginForm(forms.Form):
email = forms.CharField(label=u'Email', required=True)
password = forms.CharField(
label=u'Password',
widget=forms.PasswordInput(render_value=False),
required=True
)
class UserRegistrationForm(UserCreationForm):
group_perms = forms.ModelMultipleChoiceField(
queryset=Group.objects.filter(name='prereg_group'),
widget=FilteredSelectMultiple('verbose name', is_stacked=False),
required=False
)
class Meta:
model = MyUser
fields = ['password1', 'password2', 'first_name', 'last_name', 'email', 'is_active', 'is_staff',
'is_superuser', 'groups', 'user_permissions', 'last_login', 'group_perms', 'osf_id']
def __init__(self, *args, **kwargs):
super(UserRegistrationForm, self).__init__(*args, **kwargs)
self.fields['first_name'].required = True
self.fields['last_name'].required = True
self.fields['osf_id'].required = True
class DeskUserForm(forms.ModelForm):
class Meta:
model = MyUser
fields = ['desk_token', 'desk_token_secret']
<commit_msg>Update desk form and add update form as well<commit_after>from __future__ import absolute_import
from django import forms
from django.contrib.auth.forms import UserCreationForm
from django.contrib.admin.widgets import FilteredSelectMultiple
from django.contrib.auth.models import Group
from osf.models.user import OSFUser
from admin.common_auth.models import AdminProfile
class LoginForm(forms.Form):
email = forms.CharField(label=u'Email', required=True)
password = forms.CharField(
label=u'Password',
widget=forms.PasswordInput(render_value=False),
required=True
)
class UserRegistrationForm(UserCreationForm):
group_perms = forms.ModelMultipleChoiceField(
queryset=Group.objects.filter(name='prereg_group'),
widget=FilteredSelectMultiple('verbose name', is_stacked=False),
required=False
)
class Meta:
model = OSFUser
fields = ['given_name', 'username']
def __init__(self, *args, **kwargs):
super(UserRegistrationForm, self).__init__(*args, **kwargs)
self.fields['first_name'].required = True
self.fields['last_name'].required = True
self.fields['osf_id'].required = True
class DeskUserForm(forms.ModelForm):
class Meta:
model = AdminProfile
fields = ['desk_token', 'desk_token_secret']
|
5d8dafb9bd6a6c5c5964f7076b7d398d285aaf8d
|
zeus/artifacts/__init__.py
|
zeus/artifacts/__init__.py
|
from __future__ import absolute_import, print_function
from .manager import Manager
from .checkstyle import CheckstyleHandler
from .coverage import CoverageHandler
from .pycodestyle import PyCodeStyleHandler
from .xunit import XunitHandler
manager = Manager()
manager.register(CheckstyleHandler, [
'checkstyle.xml', '*.checkstyle.xml'])
manager.register(CoverageHandler, [
'coverage.xml', '*.coverage.xml'])
manager.register(XunitHandler, [
'xunit.xml', 'junit.xml', '*.xunit.xml', '*.junit.xml'])
manager.register(PyCodeStyleHandler, [
'pep8.txt', '*.pep8.txt', 'pycodestyle.txt', '*.pycodestyle.txt'])
|
from __future__ import absolute_import, print_function
from .manager import Manager
from .checkstyle import CheckstyleHandler
from .coverage import CoverageHandler
from .pycodestyle import PyCodeStyleHandler
from .pylint import PyLintHandler
from .xunit import XunitHandler
manager = Manager()
manager.register(CheckstyleHandler, [
'checkstyle.xml', '*.checkstyle.xml'])
manager.register(CoverageHandler, [
'coverage.xml', '*.coverage.xml'])
manager.register(XunitHandler, [
'xunit.xml', 'junit.xml', '*.xunit.xml', '*.junit.xml'])
manager.register(PyCodeStyleHandler, [
'pep8.txt', '*.pep8.txt', 'pycodestyle.txt', '*.pycodestyle.txt'])
manager.register(PyLintHandler, [
'pylint.txt', '*.pylint.txt'])
|
Add PyLintHandler to artifact manager
|
fix: Add PyLintHandler to artifact manager
|
Python
|
apache-2.0
|
getsentry/zeus,getsentry/zeus,getsentry/zeus,getsentry/zeus
|
from __future__ import absolute_import, print_function
from .manager import Manager
from .checkstyle import CheckstyleHandler
from .coverage import CoverageHandler
from .pycodestyle import PyCodeStyleHandler
from .xunit import XunitHandler
manager = Manager()
manager.register(CheckstyleHandler, [
'checkstyle.xml', '*.checkstyle.xml'])
manager.register(CoverageHandler, [
'coverage.xml', '*.coverage.xml'])
manager.register(XunitHandler, [
'xunit.xml', 'junit.xml', '*.xunit.xml', '*.junit.xml'])
manager.register(PyCodeStyleHandler, [
'pep8.txt', '*.pep8.txt', 'pycodestyle.txt', '*.pycodestyle.txt'])
fix: Add PyLintHandler to artifact manager
|
from __future__ import absolute_import, print_function
from .manager import Manager
from .checkstyle import CheckstyleHandler
from .coverage import CoverageHandler
from .pycodestyle import PyCodeStyleHandler
from .pylint import PyLintHandler
from .xunit import XunitHandler
manager = Manager()
manager.register(CheckstyleHandler, [
'checkstyle.xml', '*.checkstyle.xml'])
manager.register(CoverageHandler, [
'coverage.xml', '*.coverage.xml'])
manager.register(XunitHandler, [
'xunit.xml', 'junit.xml', '*.xunit.xml', '*.junit.xml'])
manager.register(PyCodeStyleHandler, [
'pep8.txt', '*.pep8.txt', 'pycodestyle.txt', '*.pycodestyle.txt'])
manager.register(PyLintHandler, [
'pylint.txt', '*.pylint.txt'])
|
<commit_before>from __future__ import absolute_import, print_function
from .manager import Manager
from .checkstyle import CheckstyleHandler
from .coverage import CoverageHandler
from .pycodestyle import PyCodeStyleHandler
from .xunit import XunitHandler
manager = Manager()
manager.register(CheckstyleHandler, [
'checkstyle.xml', '*.checkstyle.xml'])
manager.register(CoverageHandler, [
'coverage.xml', '*.coverage.xml'])
manager.register(XunitHandler, [
'xunit.xml', 'junit.xml', '*.xunit.xml', '*.junit.xml'])
manager.register(PyCodeStyleHandler, [
'pep8.txt', '*.pep8.txt', 'pycodestyle.txt', '*.pycodestyle.txt'])
<commit_msg>fix: Add PyLintHandler to artifact manager<commit_after>
|
from __future__ import absolute_import, print_function
from .manager import Manager
from .checkstyle import CheckstyleHandler
from .coverage import CoverageHandler
from .pycodestyle import PyCodeStyleHandler
from .pylint import PyLintHandler
from .xunit import XunitHandler
manager = Manager()
manager.register(CheckstyleHandler, [
'checkstyle.xml', '*.checkstyle.xml'])
manager.register(CoverageHandler, [
'coverage.xml', '*.coverage.xml'])
manager.register(XunitHandler, [
'xunit.xml', 'junit.xml', '*.xunit.xml', '*.junit.xml'])
manager.register(PyCodeStyleHandler, [
'pep8.txt', '*.pep8.txt', 'pycodestyle.txt', '*.pycodestyle.txt'])
manager.register(PyLintHandler, [
'pylint.txt', '*.pylint.txt'])
|
from __future__ import absolute_import, print_function
from .manager import Manager
from .checkstyle import CheckstyleHandler
from .coverage import CoverageHandler
from .pycodestyle import PyCodeStyleHandler
from .xunit import XunitHandler
manager = Manager()
manager.register(CheckstyleHandler, [
'checkstyle.xml', '*.checkstyle.xml'])
manager.register(CoverageHandler, [
'coverage.xml', '*.coverage.xml'])
manager.register(XunitHandler, [
'xunit.xml', 'junit.xml', '*.xunit.xml', '*.junit.xml'])
manager.register(PyCodeStyleHandler, [
'pep8.txt', '*.pep8.txt', 'pycodestyle.txt', '*.pycodestyle.txt'])
fix: Add PyLintHandler to artifact managerfrom __future__ import absolute_import, print_function
from .manager import Manager
from .checkstyle import CheckstyleHandler
from .coverage import CoverageHandler
from .pycodestyle import PyCodeStyleHandler
from .pylint import PyLintHandler
from .xunit import XunitHandler
manager = Manager()
manager.register(CheckstyleHandler, [
'checkstyle.xml', '*.checkstyle.xml'])
manager.register(CoverageHandler, [
'coverage.xml', '*.coverage.xml'])
manager.register(XunitHandler, [
'xunit.xml', 'junit.xml', '*.xunit.xml', '*.junit.xml'])
manager.register(PyCodeStyleHandler, [
'pep8.txt', '*.pep8.txt', 'pycodestyle.txt', '*.pycodestyle.txt'])
manager.register(PyLintHandler, [
'pylint.txt', '*.pylint.txt'])
|
<commit_before>from __future__ import absolute_import, print_function
from .manager import Manager
from .checkstyle import CheckstyleHandler
from .coverage import CoverageHandler
from .pycodestyle import PyCodeStyleHandler
from .xunit import XunitHandler
manager = Manager()
manager.register(CheckstyleHandler, [
'checkstyle.xml', '*.checkstyle.xml'])
manager.register(CoverageHandler, [
'coverage.xml', '*.coverage.xml'])
manager.register(XunitHandler, [
'xunit.xml', 'junit.xml', '*.xunit.xml', '*.junit.xml'])
manager.register(PyCodeStyleHandler, [
'pep8.txt', '*.pep8.txt', 'pycodestyle.txt', '*.pycodestyle.txt'])
<commit_msg>fix: Add PyLintHandler to artifact manager<commit_after>from __future__ import absolute_import, print_function
from .manager import Manager
from .checkstyle import CheckstyleHandler
from .coverage import CoverageHandler
from .pycodestyle import PyCodeStyleHandler
from .pylint import PyLintHandler
from .xunit import XunitHandler
manager = Manager()
manager.register(CheckstyleHandler, [
'checkstyle.xml', '*.checkstyle.xml'])
manager.register(CoverageHandler, [
'coverage.xml', '*.coverage.xml'])
manager.register(XunitHandler, [
'xunit.xml', 'junit.xml', '*.xunit.xml', '*.junit.xml'])
manager.register(PyCodeStyleHandler, [
'pep8.txt', '*.pep8.txt', 'pycodestyle.txt', '*.pycodestyle.txt'])
manager.register(PyLintHandler, [
'pylint.txt', '*.pylint.txt'])
|
511561918ad5f7620211341ebda373d5dd928377
|
Rubik/event.py
|
Rubik/event.py
|
class Event:
source = None
event = None
data = None
def __init__(self, source, event, data=None):
self.source = source
self.event = event
self.data = data
SOURCE_OTHER = 0
SOURCE_GUI = 1
SOURCE_RUBIK = 2
SOURCE_SIMON = 3
SOURCE_GEARS = 4
EVENT_DEFAULT = 0
EVENT_BUTTON1 = 1
EVENT_BUTTON2 = 2
EVENT_BUTTON3 = 3
EVENT_BUTTON4 = 4
EVENT_BUTTON5 = 5
EVENT_CUBE_LIFT = 6
EVENT_CUBE_SET = 7
EVENT_SUCCESS = 8
EVENT_FAILURE = 9
EVENT_BUTTON_RESET = -1
EVENT_STRINGS = {
EVENT_DEFAULT: "DEFAULT",
EVENT_BUTTON1: "BUTTON1",
EVENT_BUTTON2: "BUTTON2",
EVENT_BUTTON3: "BUTTON3",
EVENT_BUTTON4: "BUTTON4",
EVENT_BUTTON5: "BUTTON5",
EVENT_CUBE_LIFT: "CUBE LIFT",
EVENT_CUBE_SET: "CUBE SET",
EVENT_SUCCESS: "SUCCESS",
EVENT_FAILURE: "FAILURE",
EVENT_BUTTON_RESET: "RESET"
}
|
class Event:
def __init__(self, source, event, data=None):
self.source = source
self.event = event
self.data = data
SOURCE_OTHER = 0
SOURCE_GUI = 1
SOURCE_RUBIK = 2
SOURCE_SIMON = 3
SOURCE_GEARS = 4
EVENT_DEFAULT = 0
EVENT_BUTTON1 = 1
EVENT_BUTTON2 = 2
EVENT_BUTTON3 = 3
EVENT_BUTTON4 = 4
EVENT_BUTTON5 = 5
EVENT_CUBE_LIFT = 6
EVENT_CUBE_SET = 7
EVENT_SUCCESS = 8
EVENT_FAILURE = 9
EVENT_BUTTON_RESET = -1
EVENT_STRINGS = {
EVENT_DEFAULT: "DEFAULT",
EVENT_BUTTON1: "BUTTON1",
EVENT_BUTTON2: "BUTTON2",
EVENT_BUTTON3: "BUTTON3",
EVENT_BUTTON4: "BUTTON4",
EVENT_BUTTON5: "BUTTON5",
EVENT_CUBE_LIFT: "CUBE LIFT",
EVENT_CUBE_SET: "CUBE SET",
EVENT_SUCCESS: "SUCCESS",
EVENT_FAILURE: "FAILURE",
EVENT_BUTTON_RESET: "RESET"
}
|
Fix Event class instance variables
|
Fix Event class instance variables
|
Python
|
apache-2.0
|
RoboErik/RUBIK,RoboErik/RUBIK,RoboErik/RUBIK
|
class Event:
source = None
event = None
data = None
def __init__(self, source, event, data=None):
self.source = source
self.event = event
self.data = data
SOURCE_OTHER = 0
SOURCE_GUI = 1
SOURCE_RUBIK = 2
SOURCE_SIMON = 3
SOURCE_GEARS = 4
EVENT_DEFAULT = 0
EVENT_BUTTON1 = 1
EVENT_BUTTON2 = 2
EVENT_BUTTON3 = 3
EVENT_BUTTON4 = 4
EVENT_BUTTON5 = 5
EVENT_CUBE_LIFT = 6
EVENT_CUBE_SET = 7
EVENT_SUCCESS = 8
EVENT_FAILURE = 9
EVENT_BUTTON_RESET = -1
EVENT_STRINGS = {
EVENT_DEFAULT: "DEFAULT",
EVENT_BUTTON1: "BUTTON1",
EVENT_BUTTON2: "BUTTON2",
EVENT_BUTTON3: "BUTTON3",
EVENT_BUTTON4: "BUTTON4",
EVENT_BUTTON5: "BUTTON5",
EVENT_CUBE_LIFT: "CUBE LIFT",
EVENT_CUBE_SET: "CUBE SET",
EVENT_SUCCESS: "SUCCESS",
EVENT_FAILURE: "FAILURE",
EVENT_BUTTON_RESET: "RESET"
}
Fix Event class instance variables
|
class Event:
def __init__(self, source, event, data=None):
self.source = source
self.event = event
self.data = data
SOURCE_OTHER = 0
SOURCE_GUI = 1
SOURCE_RUBIK = 2
SOURCE_SIMON = 3
SOURCE_GEARS = 4
EVENT_DEFAULT = 0
EVENT_BUTTON1 = 1
EVENT_BUTTON2 = 2
EVENT_BUTTON3 = 3
EVENT_BUTTON4 = 4
EVENT_BUTTON5 = 5
EVENT_CUBE_LIFT = 6
EVENT_CUBE_SET = 7
EVENT_SUCCESS = 8
EVENT_FAILURE = 9
EVENT_BUTTON_RESET = -1
EVENT_STRINGS = {
EVENT_DEFAULT: "DEFAULT",
EVENT_BUTTON1: "BUTTON1",
EVENT_BUTTON2: "BUTTON2",
EVENT_BUTTON3: "BUTTON3",
EVENT_BUTTON4: "BUTTON4",
EVENT_BUTTON5: "BUTTON5",
EVENT_CUBE_LIFT: "CUBE LIFT",
EVENT_CUBE_SET: "CUBE SET",
EVENT_SUCCESS: "SUCCESS",
EVENT_FAILURE: "FAILURE",
EVENT_BUTTON_RESET: "RESET"
}
|
<commit_before>class Event:
source = None
event = None
data = None
def __init__(self, source, event, data=None):
self.source = source
self.event = event
self.data = data
SOURCE_OTHER = 0
SOURCE_GUI = 1
SOURCE_RUBIK = 2
SOURCE_SIMON = 3
SOURCE_GEARS = 4
EVENT_DEFAULT = 0
EVENT_BUTTON1 = 1
EVENT_BUTTON2 = 2
EVENT_BUTTON3 = 3
EVENT_BUTTON4 = 4
EVENT_BUTTON5 = 5
EVENT_CUBE_LIFT = 6
EVENT_CUBE_SET = 7
EVENT_SUCCESS = 8
EVENT_FAILURE = 9
EVENT_BUTTON_RESET = -1
EVENT_STRINGS = {
EVENT_DEFAULT: "DEFAULT",
EVENT_BUTTON1: "BUTTON1",
EVENT_BUTTON2: "BUTTON2",
EVENT_BUTTON3: "BUTTON3",
EVENT_BUTTON4: "BUTTON4",
EVENT_BUTTON5: "BUTTON5",
EVENT_CUBE_LIFT: "CUBE LIFT",
EVENT_CUBE_SET: "CUBE SET",
EVENT_SUCCESS: "SUCCESS",
EVENT_FAILURE: "FAILURE",
EVENT_BUTTON_RESET: "RESET"
}
<commit_msg>Fix Event class instance variables<commit_after>
|
class Event:
def __init__(self, source, event, data=None):
self.source = source
self.event = event
self.data = data
SOURCE_OTHER = 0
SOURCE_GUI = 1
SOURCE_RUBIK = 2
SOURCE_SIMON = 3
SOURCE_GEARS = 4
EVENT_DEFAULT = 0
EVENT_BUTTON1 = 1
EVENT_BUTTON2 = 2
EVENT_BUTTON3 = 3
EVENT_BUTTON4 = 4
EVENT_BUTTON5 = 5
EVENT_CUBE_LIFT = 6
EVENT_CUBE_SET = 7
EVENT_SUCCESS = 8
EVENT_FAILURE = 9
EVENT_BUTTON_RESET = -1
EVENT_STRINGS = {
EVENT_DEFAULT: "DEFAULT",
EVENT_BUTTON1: "BUTTON1",
EVENT_BUTTON2: "BUTTON2",
EVENT_BUTTON3: "BUTTON3",
EVENT_BUTTON4: "BUTTON4",
EVENT_BUTTON5: "BUTTON5",
EVENT_CUBE_LIFT: "CUBE LIFT",
EVENT_CUBE_SET: "CUBE SET",
EVENT_SUCCESS: "SUCCESS",
EVENT_FAILURE: "FAILURE",
EVENT_BUTTON_RESET: "RESET"
}
|
class Event:
source = None
event = None
data = None
def __init__(self, source, event, data=None):
self.source = source
self.event = event
self.data = data
SOURCE_OTHER = 0
SOURCE_GUI = 1
SOURCE_RUBIK = 2
SOURCE_SIMON = 3
SOURCE_GEARS = 4
EVENT_DEFAULT = 0
EVENT_BUTTON1 = 1
EVENT_BUTTON2 = 2
EVENT_BUTTON3 = 3
EVENT_BUTTON4 = 4
EVENT_BUTTON5 = 5
EVENT_CUBE_LIFT = 6
EVENT_CUBE_SET = 7
EVENT_SUCCESS = 8
EVENT_FAILURE = 9
EVENT_BUTTON_RESET = -1
EVENT_STRINGS = {
EVENT_DEFAULT: "DEFAULT",
EVENT_BUTTON1: "BUTTON1",
EVENT_BUTTON2: "BUTTON2",
EVENT_BUTTON3: "BUTTON3",
EVENT_BUTTON4: "BUTTON4",
EVENT_BUTTON5: "BUTTON5",
EVENT_CUBE_LIFT: "CUBE LIFT",
EVENT_CUBE_SET: "CUBE SET",
EVENT_SUCCESS: "SUCCESS",
EVENT_FAILURE: "FAILURE",
EVENT_BUTTON_RESET: "RESET"
}
Fix Event class instance variablesclass Event:
def __init__(self, source, event, data=None):
self.source = source
self.event = event
self.data = data
SOURCE_OTHER = 0
SOURCE_GUI = 1
SOURCE_RUBIK = 2
SOURCE_SIMON = 3
SOURCE_GEARS = 4
EVENT_DEFAULT = 0
EVENT_BUTTON1 = 1
EVENT_BUTTON2 = 2
EVENT_BUTTON3 = 3
EVENT_BUTTON4 = 4
EVENT_BUTTON5 = 5
EVENT_CUBE_LIFT = 6
EVENT_CUBE_SET = 7
EVENT_SUCCESS = 8
EVENT_FAILURE = 9
EVENT_BUTTON_RESET = -1
EVENT_STRINGS = {
EVENT_DEFAULT: "DEFAULT",
EVENT_BUTTON1: "BUTTON1",
EVENT_BUTTON2: "BUTTON2",
EVENT_BUTTON3: "BUTTON3",
EVENT_BUTTON4: "BUTTON4",
EVENT_BUTTON5: "BUTTON5",
EVENT_CUBE_LIFT: "CUBE LIFT",
EVENT_CUBE_SET: "CUBE SET",
EVENT_SUCCESS: "SUCCESS",
EVENT_FAILURE: "FAILURE",
EVENT_BUTTON_RESET: "RESET"
}
|
<commit_before>class Event:
source = None
event = None
data = None
def __init__(self, source, event, data=None):
self.source = source
self.event = event
self.data = data
SOURCE_OTHER = 0
SOURCE_GUI = 1
SOURCE_RUBIK = 2
SOURCE_SIMON = 3
SOURCE_GEARS = 4
EVENT_DEFAULT = 0
EVENT_BUTTON1 = 1
EVENT_BUTTON2 = 2
EVENT_BUTTON3 = 3
EVENT_BUTTON4 = 4
EVENT_BUTTON5 = 5
EVENT_CUBE_LIFT = 6
EVENT_CUBE_SET = 7
EVENT_SUCCESS = 8
EVENT_FAILURE = 9
EVENT_BUTTON_RESET = -1
EVENT_STRINGS = {
EVENT_DEFAULT: "DEFAULT",
EVENT_BUTTON1: "BUTTON1",
EVENT_BUTTON2: "BUTTON2",
EVENT_BUTTON3: "BUTTON3",
EVENT_BUTTON4: "BUTTON4",
EVENT_BUTTON5: "BUTTON5",
EVENT_CUBE_LIFT: "CUBE LIFT",
EVENT_CUBE_SET: "CUBE SET",
EVENT_SUCCESS: "SUCCESS",
EVENT_FAILURE: "FAILURE",
EVENT_BUTTON_RESET: "RESET"
}
<commit_msg>Fix Event class instance variables<commit_after>class Event:
def __init__(self, source, event, data=None):
self.source = source
self.event = event
self.data = data
SOURCE_OTHER = 0
SOURCE_GUI = 1
SOURCE_RUBIK = 2
SOURCE_SIMON = 3
SOURCE_GEARS = 4
EVENT_DEFAULT = 0
EVENT_BUTTON1 = 1
EVENT_BUTTON2 = 2
EVENT_BUTTON3 = 3
EVENT_BUTTON4 = 4
EVENT_BUTTON5 = 5
EVENT_CUBE_LIFT = 6
EVENT_CUBE_SET = 7
EVENT_SUCCESS = 8
EVENT_FAILURE = 9
EVENT_BUTTON_RESET = -1
EVENT_STRINGS = {
EVENT_DEFAULT: "DEFAULT",
EVENT_BUTTON1: "BUTTON1",
EVENT_BUTTON2: "BUTTON2",
EVENT_BUTTON3: "BUTTON3",
EVENT_BUTTON4: "BUTTON4",
EVENT_BUTTON5: "BUTTON5",
EVENT_CUBE_LIFT: "CUBE LIFT",
EVENT_CUBE_SET: "CUBE SET",
EVENT_SUCCESS: "SUCCESS",
EVENT_FAILURE: "FAILURE",
EVENT_BUTTON_RESET: "RESET"
}
|
37669b43ba35767d28494848e2f1d10d662ddf47
|
joblib/test/test_logger.py
|
joblib/test/test_logger.py
|
"""
Test the logger module.
"""
# Author: Gael Varoquaux <gael dot varoquaux at normalesup dot org>
# Copyright (c) 2009 Gael Varoquaux
# License: BSD Style, 3 clauses.
import shutil
import os
from tempfile import mkdtemp
import nose
from ..logger import PrintTime
################################################################################
# Test fixtures
def setup():
""" Test setup.
"""
global cachedir
cachedir = mkdtemp()
#cachedir = 'foobar'
if os.path.exists(cachedir):
shutil.rmtree(cachedir)
os.makedirs(cachedir)
def teardown():
""" Test teardown.
"""
#return True
shutil.rmtree(cachedir)
################################################################################
# Tests
def smoke_test_print_time():
""" A simple smoke test for PrintTime.
"""
print_time = PrintTime(logfile=os.path.join(cachedir, 'test.log'))
print_time('Foo')
|
"""
Test the logger module.
"""
# Author: Gael Varoquaux <gael dot varoquaux at normalesup dot org>
# Copyright (c) 2009 Gael Varoquaux
# License: BSD Style, 3 clauses.
import shutil
import os
from tempfile import mkdtemp
import nose
from ..logger import PrintTime
################################################################################
# Test fixtures
def setup():
""" Test setup.
"""
global cachedir
cachedir = mkdtemp()
#cachedir = 'foobar'
if os.path.exists(cachedir):
shutil.rmtree(cachedir)
os.makedirs(cachedir)
def teardown():
""" Test teardown.
"""
#return True
shutil.rmtree(cachedir)
################################################################################
# Tests
def smoke_test_print_time():
""" A simple smoke test for PrintTime.
"""
print_time = PrintTime(logfile=os.path.join(cachedir, 'test.log'))
print_time('Foo')
# Create a second time, to smoke test log rotation.
print_time = PrintTime(logfile=os.path.join(cachedir, 'test.log'))
print_time('Foo')
|
Improve smoke test coverage for the logger.
|
Improve smoke test coverage for the logger.
|
Python
|
bsd-3-clause
|
lesteve/joblib,lesteve/joblib,tomMoral/joblib,aabadie/joblib,karandesai-96/joblib,joblib/joblib,joblib/joblib,karandesai-96/joblib,tomMoral/joblib,aabadie/joblib
|
"""
Test the logger module.
"""
# Author: Gael Varoquaux <gael dot varoquaux at normalesup dot org>
# Copyright (c) 2009 Gael Varoquaux
# License: BSD Style, 3 clauses.
import shutil
import os
from tempfile import mkdtemp
import nose
from ..logger import PrintTime
################################################################################
# Test fixtures
def setup():
""" Test setup.
"""
global cachedir
cachedir = mkdtemp()
#cachedir = 'foobar'
if os.path.exists(cachedir):
shutil.rmtree(cachedir)
os.makedirs(cachedir)
def teardown():
""" Test teardown.
"""
#return True
shutil.rmtree(cachedir)
################################################################################
# Tests
def smoke_test_print_time():
""" A simple smoke test for PrintTime.
"""
print_time = PrintTime(logfile=os.path.join(cachedir, 'test.log'))
print_time('Foo')
Improve smoke test coverage for the logger.
|
"""
Test the logger module.
"""
# Author: Gael Varoquaux <gael dot varoquaux at normalesup dot org>
# Copyright (c) 2009 Gael Varoquaux
# License: BSD Style, 3 clauses.
import shutil
import os
from tempfile import mkdtemp
import nose
from ..logger import PrintTime
################################################################################
# Test fixtures
def setup():
""" Test setup.
"""
global cachedir
cachedir = mkdtemp()
#cachedir = 'foobar'
if os.path.exists(cachedir):
shutil.rmtree(cachedir)
os.makedirs(cachedir)
def teardown():
""" Test teardown.
"""
#return True
shutil.rmtree(cachedir)
################################################################################
# Tests
def smoke_test_print_time():
""" A simple smoke test for PrintTime.
"""
print_time = PrintTime(logfile=os.path.join(cachedir, 'test.log'))
print_time('Foo')
# Create a second time, to smoke test log rotation.
print_time = PrintTime(logfile=os.path.join(cachedir, 'test.log'))
print_time('Foo')
|
<commit_before>"""
Test the logger module.
"""
# Author: Gael Varoquaux <gael dot varoquaux at normalesup dot org>
# Copyright (c) 2009 Gael Varoquaux
# License: BSD Style, 3 clauses.
import shutil
import os
from tempfile import mkdtemp
import nose
from ..logger import PrintTime
################################################################################
# Test fixtures
def setup():
""" Test setup.
"""
global cachedir
cachedir = mkdtemp()
#cachedir = 'foobar'
if os.path.exists(cachedir):
shutil.rmtree(cachedir)
os.makedirs(cachedir)
def teardown():
""" Test teardown.
"""
#return True
shutil.rmtree(cachedir)
################################################################################
# Tests
def smoke_test_print_time():
""" A simple smoke test for PrintTime.
"""
print_time = PrintTime(logfile=os.path.join(cachedir, 'test.log'))
print_time('Foo')
<commit_msg>Improve smoke test coverage for the logger.<commit_after>
|
"""
Test the logger module.
"""
# Author: Gael Varoquaux <gael dot varoquaux at normalesup dot org>
# Copyright (c) 2009 Gael Varoquaux
# License: BSD Style, 3 clauses.
import shutil
import os
from tempfile import mkdtemp
import nose
from ..logger import PrintTime
################################################################################
# Test fixtures
def setup():
""" Test setup.
"""
global cachedir
cachedir = mkdtemp()
#cachedir = 'foobar'
if os.path.exists(cachedir):
shutil.rmtree(cachedir)
os.makedirs(cachedir)
def teardown():
""" Test teardown.
"""
#return True
shutil.rmtree(cachedir)
################################################################################
# Tests
def smoke_test_print_time():
""" A simple smoke test for PrintTime.
"""
print_time = PrintTime(logfile=os.path.join(cachedir, 'test.log'))
print_time('Foo')
# Create a second time, to smoke test log rotation.
print_time = PrintTime(logfile=os.path.join(cachedir, 'test.log'))
print_time('Foo')
|
"""
Test the logger module.
"""
# Author: Gael Varoquaux <gael dot varoquaux at normalesup dot org>
# Copyright (c) 2009 Gael Varoquaux
# License: BSD Style, 3 clauses.
import shutil
import os
from tempfile import mkdtemp
import nose
from ..logger import PrintTime
################################################################################
# Test fixtures
def setup():
""" Test setup.
"""
global cachedir
cachedir = mkdtemp()
#cachedir = 'foobar'
if os.path.exists(cachedir):
shutil.rmtree(cachedir)
os.makedirs(cachedir)
def teardown():
""" Test teardown.
"""
#return True
shutil.rmtree(cachedir)
################################################################################
# Tests
def smoke_test_print_time():
""" A simple smoke test for PrintTime.
"""
print_time = PrintTime(logfile=os.path.join(cachedir, 'test.log'))
print_time('Foo')
Improve smoke test coverage for the logger."""
Test the logger module.
"""
# Author: Gael Varoquaux <gael dot varoquaux at normalesup dot org>
# Copyright (c) 2009 Gael Varoquaux
# License: BSD Style, 3 clauses.
import shutil
import os
from tempfile import mkdtemp
import nose
from ..logger import PrintTime
################################################################################
# Test fixtures
def setup():
""" Test setup.
"""
global cachedir
cachedir = mkdtemp()
#cachedir = 'foobar'
if os.path.exists(cachedir):
shutil.rmtree(cachedir)
os.makedirs(cachedir)
def teardown():
""" Test teardown.
"""
#return True
shutil.rmtree(cachedir)
################################################################################
# Tests
def smoke_test_print_time():
""" A simple smoke test for PrintTime.
"""
print_time = PrintTime(logfile=os.path.join(cachedir, 'test.log'))
print_time('Foo')
# Create a second time, to smoke test log rotation.
print_time = PrintTime(logfile=os.path.join(cachedir, 'test.log'))
print_time('Foo')
|
<commit_before>"""
Test the logger module.
"""
# Author: Gael Varoquaux <gael dot varoquaux at normalesup dot org>
# Copyright (c) 2009 Gael Varoquaux
# License: BSD Style, 3 clauses.
import shutil
import os
from tempfile import mkdtemp
import nose
from ..logger import PrintTime
################################################################################
# Test fixtures
def setup():
""" Test setup.
"""
global cachedir
cachedir = mkdtemp()
#cachedir = 'foobar'
if os.path.exists(cachedir):
shutil.rmtree(cachedir)
os.makedirs(cachedir)
def teardown():
""" Test teardown.
"""
#return True
shutil.rmtree(cachedir)
################################################################################
# Tests
def smoke_test_print_time():
""" A simple smoke test for PrintTime.
"""
print_time = PrintTime(logfile=os.path.join(cachedir, 'test.log'))
print_time('Foo')
<commit_msg>Improve smoke test coverage for the logger.<commit_after>"""
Test the logger module.
"""
# Author: Gael Varoquaux <gael dot varoquaux at normalesup dot org>
# Copyright (c) 2009 Gael Varoquaux
# License: BSD Style, 3 clauses.
import shutil
import os
from tempfile import mkdtemp
import nose
from ..logger import PrintTime
################################################################################
# Test fixtures
def setup():
""" Test setup.
"""
global cachedir
cachedir = mkdtemp()
#cachedir = 'foobar'
if os.path.exists(cachedir):
shutil.rmtree(cachedir)
os.makedirs(cachedir)
def teardown():
""" Test teardown.
"""
#return True
shutil.rmtree(cachedir)
################################################################################
# Tests
def smoke_test_print_time():
""" A simple smoke test for PrintTime.
"""
print_time = PrintTime(logfile=os.path.join(cachedir, 'test.log'))
print_time('Foo')
# Create a second time, to smoke test log rotation.
print_time = PrintTime(logfile=os.path.join(cachedir, 'test.log'))
print_time('Foo')
|
e5d42af3e94869bb40225c808121b40ed8f94a29
|
tools/misc/python/test-data-in-out.py
|
tools/misc/python/test-data-in-out.py
|
# TOOL test-data-in-out.py: "Test data input and output in Python" (Data input output test.)
# INPUT input TYPE GENERIC
# OUTPUT output
import shutil
shutil.copyfile('input', 'output')
|
# TOOL test-data-in-out.py: "Test data input and output in Python" (Data input output test.)
# INPUT input TYPE GENERIC
# OUTPUT output
# OUTPUT OPTIONAL missing_output.txt
import shutil
shutil.copyfile('input', 'output')
|
Test that missing optional outputs aren't created
|
Test that missing optional outputs aren't created
|
Python
|
mit
|
chipster/chipster-tools,chipster/chipster-tools,chipster/chipster-tools,chipster/chipster-tools
|
# TOOL test-data-in-out.py: "Test data input and output in Python" (Data input output test.)
# INPUT input TYPE GENERIC
# OUTPUT output
import shutil
shutil.copyfile('input', 'output')
Test that missing optional outputs aren't created
|
# TOOL test-data-in-out.py: "Test data input and output in Python" (Data input output test.)
# INPUT input TYPE GENERIC
# OUTPUT output
# OUTPUT OPTIONAL missing_output.txt
import shutil
shutil.copyfile('input', 'output')
|
<commit_before># TOOL test-data-in-out.py: "Test data input and output in Python" (Data input output test.)
# INPUT input TYPE GENERIC
# OUTPUT output
import shutil
shutil.copyfile('input', 'output')
<commit_msg>Test that missing optional outputs aren't created<commit_after>
|
# TOOL test-data-in-out.py: "Test data input and output in Python" (Data input output test.)
# INPUT input TYPE GENERIC
# OUTPUT output
# OUTPUT OPTIONAL missing_output.txt
import shutil
shutil.copyfile('input', 'output')
|
# TOOL test-data-in-out.py: "Test data input and output in Python" (Data input output test.)
# INPUT input TYPE GENERIC
# OUTPUT output
import shutil
shutil.copyfile('input', 'output')
Test that missing optional outputs aren't created# TOOL test-data-in-out.py: "Test data input and output in Python" (Data input output test.)
# INPUT input TYPE GENERIC
# OUTPUT output
# OUTPUT OPTIONAL missing_output.txt
import shutil
shutil.copyfile('input', 'output')
|
<commit_before># TOOL test-data-in-out.py: "Test data input and output in Python" (Data input output test.)
# INPUT input TYPE GENERIC
# OUTPUT output
import shutil
shutil.copyfile('input', 'output')
<commit_msg>Test that missing optional outputs aren't created<commit_after># TOOL test-data-in-out.py: "Test data input and output in Python" (Data input output test.)
# INPUT input TYPE GENERIC
# OUTPUT output
# OUTPUT OPTIONAL missing_output.txt
import shutil
shutil.copyfile('input', 'output')
|
ca4dc41a350210ad54a9ef89d861fa1a1866cd5d
|
dailydevo/desiringgod_actions.py
|
dailydevo/desiringgod_actions.py
|
# coding=utf8
# Local modules
from common import debug
from common.action import action_classes
from common.telegram import telegram_utils
from dailydevo import desiringgod_utils
from user import user_actions
PROMPT = "Here are today's articles from desiringgod.org!\nTap on any one to get the article!"
class DGDevoAction(action_classes.Action):
def identifier(self):
return "/desiringgod"
def name(self):
return "Desiring God Articles"
def description(self):
return "Articles from DesiringGod.org"
def resolve(self, userObj, msg):
refs = desiringgod_utils.get_desiringgod()
if refs is not None:
done = {"title":user_actions.UserDoneAction.name(), "link":""}
debug.log("Created done")
refs.append(done)
debug.log("Converting to buttons")
options = [telegram_utils.make_button(text=ref["title"], fields={"url":ref["link"]}) for ref in refs]
debug.log("Got the buttons: " + options)
telegram_utils.send_url_keyboard(PROMPT, userObj.get_uid(), options, 1)
return True
def get():
return [DGDevoAction()]
|
# coding=utf8
# Local modules
from common import debug
from common.action import action_classes
from common.telegram import telegram_utils
from dailydevo import desiringgod_utils
from user import user_actions
PROMPT = "Here are today's articles from desiringgod.org!\nTap on any one to get the article!"
class DGDevoAction(action_classes.Action):
def identifier(self):
return "/desiringgod"
def name(self):
return "Desiring God Articles"
def description(self):
return "Articles from DesiringGod.org"
def resolve(self, userObj, msg):
refs = desiringgod_utils.get_desiringgod()
if refs is not None:
refs.append({"title":user_actions.UserDoneAction().name(), "link":""})
options = [telegram_utils.make_button(text=ref["title"], fields={"url":ref["link"]}) for ref in refs]
telegram_utils.send_url_keyboard(PROMPT, userObj.get_uid(), options, 1)
return True
def get():
return [DGDevoAction()]
|
Fix for done action bug
|
Fix for done action bug
|
Python
|
mit
|
julwrites/biblicabot
|
# coding=utf8
# Local modules
from common import debug
from common.action import action_classes
from common.telegram import telegram_utils
from dailydevo import desiringgod_utils
from user import user_actions
PROMPT = "Here are today's articles from desiringgod.org!\nTap on any one to get the article!"
class DGDevoAction(action_classes.Action):
def identifier(self):
return "/desiringgod"
def name(self):
return "Desiring God Articles"
def description(self):
return "Articles from DesiringGod.org"
def resolve(self, userObj, msg):
refs = desiringgod_utils.get_desiringgod()
if refs is not None:
done = {"title":user_actions.UserDoneAction.name(), "link":""}
debug.log("Created done")
refs.append(done)
debug.log("Converting to buttons")
options = [telegram_utils.make_button(text=ref["title"], fields={"url":ref["link"]}) for ref in refs]
debug.log("Got the buttons: " + options)
telegram_utils.send_url_keyboard(PROMPT, userObj.get_uid(), options, 1)
return True
def get():
return [DGDevoAction()]
Fix for done action bug
|
# coding=utf8
# Local modules
from common import debug
from common.action import action_classes
from common.telegram import telegram_utils
from dailydevo import desiringgod_utils
from user import user_actions
PROMPT = "Here are today's articles from desiringgod.org!\nTap on any one to get the article!"
class DGDevoAction(action_classes.Action):
def identifier(self):
return "/desiringgod"
def name(self):
return "Desiring God Articles"
def description(self):
return "Articles from DesiringGod.org"
def resolve(self, userObj, msg):
refs = desiringgod_utils.get_desiringgod()
if refs is not None:
refs.append({"title":user_actions.UserDoneAction().name(), "link":""})
options = [telegram_utils.make_button(text=ref["title"], fields={"url":ref["link"]}) for ref in refs]
telegram_utils.send_url_keyboard(PROMPT, userObj.get_uid(), options, 1)
return True
def get():
return [DGDevoAction()]
|
<commit_before># coding=utf8
# Local modules
from common import debug
from common.action import action_classes
from common.telegram import telegram_utils
from dailydevo import desiringgod_utils
from user import user_actions
PROMPT = "Here are today's articles from desiringgod.org!\nTap on any one to get the article!"
class DGDevoAction(action_classes.Action):
def identifier(self):
return "/desiringgod"
def name(self):
return "Desiring God Articles"
def description(self):
return "Articles from DesiringGod.org"
def resolve(self, userObj, msg):
refs = desiringgod_utils.get_desiringgod()
if refs is not None:
done = {"title":user_actions.UserDoneAction.name(), "link":""}
debug.log("Created done")
refs.append(done)
debug.log("Converting to buttons")
options = [telegram_utils.make_button(text=ref["title"], fields={"url":ref["link"]}) for ref in refs]
debug.log("Got the buttons: " + options)
telegram_utils.send_url_keyboard(PROMPT, userObj.get_uid(), options, 1)
return True
def get():
return [DGDevoAction()]
<commit_msg>Fix for done action bug<commit_after>
|
# coding=utf8
# Local modules
from common import debug
from common.action import action_classes
from common.telegram import telegram_utils
from dailydevo import desiringgod_utils
from user import user_actions
PROMPT = "Here are today's articles from desiringgod.org!\nTap on any one to get the article!"
class DGDevoAction(action_classes.Action):
def identifier(self):
return "/desiringgod"
def name(self):
return "Desiring God Articles"
def description(self):
return "Articles from DesiringGod.org"
def resolve(self, userObj, msg):
refs = desiringgod_utils.get_desiringgod()
if refs is not None:
refs.append({"title":user_actions.UserDoneAction().name(), "link":""})
options = [telegram_utils.make_button(text=ref["title"], fields={"url":ref["link"]}) for ref in refs]
telegram_utils.send_url_keyboard(PROMPT, userObj.get_uid(), options, 1)
return True
def get():
return [DGDevoAction()]
|
# coding=utf8
# Local modules
from common import debug
from common.action import action_classes
from common.telegram import telegram_utils
from dailydevo import desiringgod_utils
from user import user_actions
PROMPT = "Here are today's articles from desiringgod.org!\nTap on any one to get the article!"
class DGDevoAction(action_classes.Action):
def identifier(self):
return "/desiringgod"
def name(self):
return "Desiring God Articles"
def description(self):
return "Articles from DesiringGod.org"
def resolve(self, userObj, msg):
refs = desiringgod_utils.get_desiringgod()
if refs is not None:
done = {"title":user_actions.UserDoneAction.name(), "link":""}
debug.log("Created done")
refs.append(done)
debug.log("Converting to buttons")
options = [telegram_utils.make_button(text=ref["title"], fields={"url":ref["link"]}) for ref in refs]
debug.log("Got the buttons: " + options)
telegram_utils.send_url_keyboard(PROMPT, userObj.get_uid(), options, 1)
return True
def get():
return [DGDevoAction()]
Fix for done action bug# coding=utf8
# Local modules
from common import debug
from common.action import action_classes
from common.telegram import telegram_utils
from dailydevo import desiringgod_utils
from user import user_actions
PROMPT = "Here are today's articles from desiringgod.org!\nTap on any one to get the article!"
class DGDevoAction(action_classes.Action):
def identifier(self):
return "/desiringgod"
def name(self):
return "Desiring God Articles"
def description(self):
return "Articles from DesiringGod.org"
def resolve(self, userObj, msg):
refs = desiringgod_utils.get_desiringgod()
if refs is not None:
refs.append({"title":user_actions.UserDoneAction().name(), "link":""})
options = [telegram_utils.make_button(text=ref["title"], fields={"url":ref["link"]}) for ref in refs]
telegram_utils.send_url_keyboard(PROMPT, userObj.get_uid(), options, 1)
return True
def get():
return [DGDevoAction()]
|
<commit_before># coding=utf8
# Local modules
from common import debug
from common.action import action_classes
from common.telegram import telegram_utils
from dailydevo import desiringgod_utils
from user import user_actions
PROMPT = "Here are today's articles from desiringgod.org!\nTap on any one to get the article!"
class DGDevoAction(action_classes.Action):
def identifier(self):
return "/desiringgod"
def name(self):
return "Desiring God Articles"
def description(self):
return "Articles from DesiringGod.org"
def resolve(self, userObj, msg):
refs = desiringgod_utils.get_desiringgod()
if refs is not None:
done = {"title":user_actions.UserDoneAction.name(), "link":""}
debug.log("Created done")
refs.append(done)
debug.log("Converting to buttons")
options = [telegram_utils.make_button(text=ref["title"], fields={"url":ref["link"]}) for ref in refs]
debug.log("Got the buttons: " + options)
telegram_utils.send_url_keyboard(PROMPT, userObj.get_uid(), options, 1)
return True
def get():
return [DGDevoAction()]
<commit_msg>Fix for done action bug<commit_after># coding=utf8
# Local modules
from common import debug
from common.action import action_classes
from common.telegram import telegram_utils
from dailydevo import desiringgod_utils
from user import user_actions
PROMPT = "Here are today's articles from desiringgod.org!\nTap on any one to get the article!"
class DGDevoAction(action_classes.Action):
def identifier(self):
return "/desiringgod"
def name(self):
return "Desiring God Articles"
def description(self):
return "Articles from DesiringGod.org"
def resolve(self, userObj, msg):
refs = desiringgod_utils.get_desiringgod()
if refs is not None:
refs.append({"title":user_actions.UserDoneAction().name(), "link":""})
options = [telegram_utils.make_button(text=ref["title"], fields={"url":ref["link"]}) for ref in refs]
telegram_utils.send_url_keyboard(PROMPT, userObj.get_uid(), options, 1)
return True
def get():
return [DGDevoAction()]
|
e2cecaa99bae3635fcaa58ea57d67bce7dc83768
|
src/psd2svg/rasterizer/batik_rasterizer.py
|
src/psd2svg/rasterizer/batik_rasterizer.py
|
# -*- coding: utf-8 -*-
"""
Chromium-based rasterizer module.
Prerequisite:
sudo apt-get install -y chromedriver chromium
"""
from __future__ import absolute_import, unicode_literals
from PIL import Image
import logging
import os
import subprocess
from psd2svg.utils import temporary_directory
logger = logging.getLogger(__name__)
BATIK_PATH = os.environ.get(
'BATIK_PATH', "/usr/share/java/batik-rasterizer.jar")
class BatikRasterizer(object):
"""Batik rasterizer."""
def __init__(self, jar_path=None, **kwargs):
self.jar_path = jar_path if jar_path else BATIK_PATH
assert os.path.exists(self.jar_path)
def rasterize(self, url, size=None, format="png"):
with temporary_directory() as d:
output_file = os.path.join(d, "output.{}".format(format))
cmd = ["java", "-Djava.awt.headless=true", "-jar", self.jar_path,
"{}".format(url), "-d", output_file]
if size:
cmd += ["-w", size[0], "-h", size[1]]
subprocess.check_call(cmd, stdout=subprocess.PIPE)
assert os.path.exists(output_file)
return Image.open(output_file)
|
# -*- coding: utf-8 -*-
"""
Chromium-based rasterizer module.
Prerequisite:
sudo apt-get install -y chromedriver chromium
"""
from __future__ import absolute_import, unicode_literals
from PIL import Image
import logging
import os
import subprocess
from psd2svg.utils import temporary_directory
logger = logging.getLogger(__name__)
BATIK_PATH = os.environ.get(
'BATIK_PATH', "/usr/share/java/batik-rasterizer.jar")
class BatikRasterizer(object):
"""Batik rasterizer."""
def __init__(self, jar_path=None, **kwargs):
self.jar_path = jar_path if jar_path else BATIK_PATH
assert os.path.exists(self.jar_path)
def rasterize(self, url, size=None, format="png"):
with temporary_directory() as d:
output_file = os.path.join(d, "output.{}".format(format))
cmd = ["java", "-Djava.awt.headless=true",
"-jar", self.jar_path,
"-bg", "0,255,255,255",
"-d", output_file,
"{}".format(url),
]
if size:
cmd += ["-w", size[0], "-h", size[1]]
subprocess.check_call(cmd, stdout=subprocess.PIPE)
assert os.path.exists(output_file)
return Image.open(output_file)
|
Add bg option in batik rasterizer
|
Add bg option in batik rasterizer
|
Python
|
mit
|
kyamagu/psd2svg
|
# -*- coding: utf-8 -*-
"""
Chromium-based rasterizer module.
Prerequisite:
sudo apt-get install -y chromedriver chromium
"""
from __future__ import absolute_import, unicode_literals
from PIL import Image
import logging
import os
import subprocess
from psd2svg.utils import temporary_directory
logger = logging.getLogger(__name__)
BATIK_PATH = os.environ.get(
'BATIK_PATH', "/usr/share/java/batik-rasterizer.jar")
class BatikRasterizer(object):
"""Batik rasterizer."""
def __init__(self, jar_path=None, **kwargs):
self.jar_path = jar_path if jar_path else BATIK_PATH
assert os.path.exists(self.jar_path)
def rasterize(self, url, size=None, format="png"):
with temporary_directory() as d:
output_file = os.path.join(d, "output.{}".format(format))
cmd = ["java", "-Djava.awt.headless=true", "-jar", self.jar_path,
"{}".format(url), "-d", output_file]
if size:
cmd += ["-w", size[0], "-h", size[1]]
subprocess.check_call(cmd, stdout=subprocess.PIPE)
assert os.path.exists(output_file)
return Image.open(output_file)
Add bg option in batik rasterizer
|
# -*- coding: utf-8 -*-
"""
Chromium-based rasterizer module.
Prerequisite:
sudo apt-get install -y chromedriver chromium
"""
from __future__ import absolute_import, unicode_literals
from PIL import Image
import logging
import os
import subprocess
from psd2svg.utils import temporary_directory
logger = logging.getLogger(__name__)
BATIK_PATH = os.environ.get(
'BATIK_PATH', "/usr/share/java/batik-rasterizer.jar")
class BatikRasterizer(object):
"""Batik rasterizer."""
def __init__(self, jar_path=None, **kwargs):
self.jar_path = jar_path if jar_path else BATIK_PATH
assert os.path.exists(self.jar_path)
def rasterize(self, url, size=None, format="png"):
with temporary_directory() as d:
output_file = os.path.join(d, "output.{}".format(format))
cmd = ["java", "-Djava.awt.headless=true",
"-jar", self.jar_path,
"-bg", "0,255,255,255",
"-d", output_file,
"{}".format(url),
]
if size:
cmd += ["-w", size[0], "-h", size[1]]
subprocess.check_call(cmd, stdout=subprocess.PIPE)
assert os.path.exists(output_file)
return Image.open(output_file)
|
<commit_before># -*- coding: utf-8 -*-
"""
Chromium-based rasterizer module.
Prerequisite:
sudo apt-get install -y chromedriver chromium
"""
from __future__ import absolute_import, unicode_literals
from PIL import Image
import logging
import os
import subprocess
from psd2svg.utils import temporary_directory
logger = logging.getLogger(__name__)
BATIK_PATH = os.environ.get(
'BATIK_PATH', "/usr/share/java/batik-rasterizer.jar")
class BatikRasterizer(object):
"""Batik rasterizer."""
def __init__(self, jar_path=None, **kwargs):
self.jar_path = jar_path if jar_path else BATIK_PATH
assert os.path.exists(self.jar_path)
def rasterize(self, url, size=None, format="png"):
with temporary_directory() as d:
output_file = os.path.join(d, "output.{}".format(format))
cmd = ["java", "-Djava.awt.headless=true", "-jar", self.jar_path,
"{}".format(url), "-d", output_file]
if size:
cmd += ["-w", size[0], "-h", size[1]]
subprocess.check_call(cmd, stdout=subprocess.PIPE)
assert os.path.exists(output_file)
return Image.open(output_file)
<commit_msg>Add bg option in batik rasterizer<commit_after>
|
# -*- coding: utf-8 -*-
"""
Chromium-based rasterizer module.
Prerequisite:
sudo apt-get install -y chromedriver chromium
"""
from __future__ import absolute_import, unicode_literals
from PIL import Image
import logging
import os
import subprocess
from psd2svg.utils import temporary_directory
logger = logging.getLogger(__name__)
BATIK_PATH = os.environ.get(
'BATIK_PATH', "/usr/share/java/batik-rasterizer.jar")
class BatikRasterizer(object):
"""Batik rasterizer."""
def __init__(self, jar_path=None, **kwargs):
self.jar_path = jar_path if jar_path else BATIK_PATH
assert os.path.exists(self.jar_path)
def rasterize(self, url, size=None, format="png"):
with temporary_directory() as d:
output_file = os.path.join(d, "output.{}".format(format))
cmd = ["java", "-Djava.awt.headless=true",
"-jar", self.jar_path,
"-bg", "0,255,255,255",
"-d", output_file,
"{}".format(url),
]
if size:
cmd += ["-w", size[0], "-h", size[1]]
subprocess.check_call(cmd, stdout=subprocess.PIPE)
assert os.path.exists(output_file)
return Image.open(output_file)
|
# -*- coding: utf-8 -*-
"""
Chromium-based rasterizer module.
Prerequisite:
sudo apt-get install -y chromedriver chromium
"""
from __future__ import absolute_import, unicode_literals
from PIL import Image
import logging
import os
import subprocess
from psd2svg.utils import temporary_directory
logger = logging.getLogger(__name__)
BATIK_PATH = os.environ.get(
'BATIK_PATH', "/usr/share/java/batik-rasterizer.jar")
class BatikRasterizer(object):
"""Batik rasterizer."""
def __init__(self, jar_path=None, **kwargs):
self.jar_path = jar_path if jar_path else BATIK_PATH
assert os.path.exists(self.jar_path)
def rasterize(self, url, size=None, format="png"):
with temporary_directory() as d:
output_file = os.path.join(d, "output.{}".format(format))
cmd = ["java", "-Djava.awt.headless=true", "-jar", self.jar_path,
"{}".format(url), "-d", output_file]
if size:
cmd += ["-w", size[0], "-h", size[1]]
subprocess.check_call(cmd, stdout=subprocess.PIPE)
assert os.path.exists(output_file)
return Image.open(output_file)
Add bg option in batik rasterizer# -*- coding: utf-8 -*-
"""
Chromium-based rasterizer module.
Prerequisite:
sudo apt-get install -y chromedriver chromium
"""
from __future__ import absolute_import, unicode_literals
from PIL import Image
import logging
import os
import subprocess
from psd2svg.utils import temporary_directory
logger = logging.getLogger(__name__)
BATIK_PATH = os.environ.get(
'BATIK_PATH', "/usr/share/java/batik-rasterizer.jar")
class BatikRasterizer(object):
"""Batik rasterizer."""
def __init__(self, jar_path=None, **kwargs):
self.jar_path = jar_path if jar_path else BATIK_PATH
assert os.path.exists(self.jar_path)
def rasterize(self, url, size=None, format="png"):
with temporary_directory() as d:
output_file = os.path.join(d, "output.{}".format(format))
cmd = ["java", "-Djava.awt.headless=true",
"-jar", self.jar_path,
"-bg", "0,255,255,255",
"-d", output_file,
"{}".format(url),
]
if size:
cmd += ["-w", size[0], "-h", size[1]]
subprocess.check_call(cmd, stdout=subprocess.PIPE)
assert os.path.exists(output_file)
return Image.open(output_file)
|
<commit_before># -*- coding: utf-8 -*-
"""
Chromium-based rasterizer module.
Prerequisite:
sudo apt-get install -y chromedriver chromium
"""
from __future__ import absolute_import, unicode_literals
from PIL import Image
import logging
import os
import subprocess
from psd2svg.utils import temporary_directory
logger = logging.getLogger(__name__)
BATIK_PATH = os.environ.get(
'BATIK_PATH', "/usr/share/java/batik-rasterizer.jar")
class BatikRasterizer(object):
"""Batik rasterizer."""
def __init__(self, jar_path=None, **kwargs):
self.jar_path = jar_path if jar_path else BATIK_PATH
assert os.path.exists(self.jar_path)
def rasterize(self, url, size=None, format="png"):
with temporary_directory() as d:
output_file = os.path.join(d, "output.{}".format(format))
cmd = ["java", "-Djava.awt.headless=true", "-jar", self.jar_path,
"{}".format(url), "-d", output_file]
if size:
cmd += ["-w", size[0], "-h", size[1]]
subprocess.check_call(cmd, stdout=subprocess.PIPE)
assert os.path.exists(output_file)
return Image.open(output_file)
<commit_msg>Add bg option in batik rasterizer<commit_after># -*- coding: utf-8 -*-
"""
Chromium-based rasterizer module.
Prerequisite:
sudo apt-get install -y chromedriver chromium
"""
from __future__ import absolute_import, unicode_literals
from PIL import Image
import logging
import os
import subprocess
from psd2svg.utils import temporary_directory
logger = logging.getLogger(__name__)
BATIK_PATH = os.environ.get(
'BATIK_PATH', "/usr/share/java/batik-rasterizer.jar")
class BatikRasterizer(object):
"""Batik rasterizer."""
def __init__(self, jar_path=None, **kwargs):
self.jar_path = jar_path if jar_path else BATIK_PATH
assert os.path.exists(self.jar_path)
def rasterize(self, url, size=None, format="png"):
with temporary_directory() as d:
output_file = os.path.join(d, "output.{}".format(format))
cmd = ["java", "-Djava.awt.headless=true",
"-jar", self.jar_path,
"-bg", "0,255,255,255",
"-d", output_file,
"{}".format(url),
]
if size:
cmd += ["-w", size[0], "-h", size[1]]
subprocess.check_call(cmd, stdout=subprocess.PIPE)
assert os.path.exists(output_file)
return Image.open(output_file)
|
11485f52c8c89fb402d859b3f15068255109a0f5
|
website/user/middleware.py
|
website/user/middleware.py
|
import base64
from .models import Device
class BasicAuthRemote(object):
def __init__(self, get_response):
self.get_response = get_response
def get_user_token(self, email, token):
try:
device = Device.objects.get(token=token).select_related('user')
if device.user.email is not email:
return None
return user
except:
return None
def __call__(self, request):
if not request.user.is_authenticated and 'HTTP_AUTHORIZATION' in request.META:
auth = request.META['HTTP_AUTHORIZATION'].split()
if len(auth) == 2 and auth[0].lower() == "basic":
email, token = base64.b64decode(auth[1]).decode('utf-8').split(':')
user = self.get_user_token(email, token)
if user:
request.user = user
return self.get_response(request)
|
import base64
from .models import Device
class BasicAuthRemote(object):
def __init__(self, get_response):
self.get_response = get_response
def get_user_token(self, email, token):
try:
device = Device.objects.get(token=token)
if device.user.email != email:
return None
return user
except:
return None
def __call__(self, request):
if not request.user.is_authenticated and 'HTTP_AUTHORIZATION' in request.META:
auth = request.META['HTTP_AUTHORIZATION'].split()
if len(auth) == 2 and auth[0].lower() == "basic":
email, token = base64.b64decode(auth[1]).decode('utf-8').split(':')
user = self.get_user_token(email, token)
if user:
request.user = user
return self.get_response(request)
|
Fix 401 error for requests that require authentication
|
Fix 401 error for requests that require authentication
|
Python
|
mit
|
ava-project/ava-website,ava-project/ava-website,ava-project/ava-website
|
import base64
from .models import Device
class BasicAuthRemote(object):
def __init__(self, get_response):
self.get_response = get_response
def get_user_token(self, email, token):
try:
device = Device.objects.get(token=token).select_related('user')
if device.user.email is not email:
return None
return user
except:
return None
def __call__(self, request):
if not request.user.is_authenticated and 'HTTP_AUTHORIZATION' in request.META:
auth = request.META['HTTP_AUTHORIZATION'].split()
if len(auth) == 2 and auth[0].lower() == "basic":
email, token = base64.b64decode(auth[1]).decode('utf-8').split(':')
user = self.get_user_token(email, token)
if user:
request.user = user
return self.get_response(request)
Fix 401 error for requests that require authentication
|
import base64
from .models import Device
class BasicAuthRemote(object):
def __init__(self, get_response):
self.get_response = get_response
def get_user_token(self, email, token):
try:
device = Device.objects.get(token=token)
if device.user.email != email:
return None
return user
except:
return None
def __call__(self, request):
if not request.user.is_authenticated and 'HTTP_AUTHORIZATION' in request.META:
auth = request.META['HTTP_AUTHORIZATION'].split()
if len(auth) == 2 and auth[0].lower() == "basic":
email, token = base64.b64decode(auth[1]).decode('utf-8').split(':')
user = self.get_user_token(email, token)
if user:
request.user = user
return self.get_response(request)
|
<commit_before>import base64
from .models import Device
class BasicAuthRemote(object):
def __init__(self, get_response):
self.get_response = get_response
def get_user_token(self, email, token):
try:
device = Device.objects.get(token=token).select_related('user')
if device.user.email is not email:
return None
return user
except:
return None
def __call__(self, request):
if not request.user.is_authenticated and 'HTTP_AUTHORIZATION' in request.META:
auth = request.META['HTTP_AUTHORIZATION'].split()
if len(auth) == 2 and auth[0].lower() == "basic":
email, token = base64.b64decode(auth[1]).decode('utf-8').split(':')
user = self.get_user_token(email, token)
if user:
request.user = user
return self.get_response(request)
<commit_msg>Fix 401 error for requests that require authentication<commit_after>
|
import base64
from .models import Device
class BasicAuthRemote(object):
def __init__(self, get_response):
self.get_response = get_response
def get_user_token(self, email, token):
try:
device = Device.objects.get(token=token)
if device.user.email != email:
return None
return user
except:
return None
def __call__(self, request):
if not request.user.is_authenticated and 'HTTP_AUTHORIZATION' in request.META:
auth = request.META['HTTP_AUTHORIZATION'].split()
if len(auth) == 2 and auth[0].lower() == "basic":
email, token = base64.b64decode(auth[1]).decode('utf-8').split(':')
user = self.get_user_token(email, token)
if user:
request.user = user
return self.get_response(request)
|
import base64
from .models import Device
class BasicAuthRemote(object):
def __init__(self, get_response):
self.get_response = get_response
def get_user_token(self, email, token):
try:
device = Device.objects.get(token=token).select_related('user')
if device.user.email is not email:
return None
return user
except:
return None
def __call__(self, request):
if not request.user.is_authenticated and 'HTTP_AUTHORIZATION' in request.META:
auth = request.META['HTTP_AUTHORIZATION'].split()
if len(auth) == 2 and auth[0].lower() == "basic":
email, token = base64.b64decode(auth[1]).decode('utf-8').split(':')
user = self.get_user_token(email, token)
if user:
request.user = user
return self.get_response(request)
Fix 401 error for requests that require authenticationimport base64
from .models import Device
class BasicAuthRemote(object):
def __init__(self, get_response):
self.get_response = get_response
def get_user_token(self, email, token):
try:
device = Device.objects.get(token=token)
if device.user.email != email:
return None
return user
except:
return None
def __call__(self, request):
if not request.user.is_authenticated and 'HTTP_AUTHORIZATION' in request.META:
auth = request.META['HTTP_AUTHORIZATION'].split()
if len(auth) == 2 and auth[0].lower() == "basic":
email, token = base64.b64decode(auth[1]).decode('utf-8').split(':')
user = self.get_user_token(email, token)
if user:
request.user = user
return self.get_response(request)
|
<commit_before>import base64
from .models import Device
class BasicAuthRemote(object):
def __init__(self, get_response):
self.get_response = get_response
def get_user_token(self, email, token):
try:
device = Device.objects.get(token=token).select_related('user')
if device.user.email is not email:
return None
return user
except:
return None
def __call__(self, request):
if not request.user.is_authenticated and 'HTTP_AUTHORIZATION' in request.META:
auth = request.META['HTTP_AUTHORIZATION'].split()
if len(auth) == 2 and auth[0].lower() == "basic":
email, token = base64.b64decode(auth[1]).decode('utf-8').split(':')
user = self.get_user_token(email, token)
if user:
request.user = user
return self.get_response(request)
<commit_msg>Fix 401 error for requests that require authentication<commit_after>import base64
from .models import Device
class BasicAuthRemote(object):
def __init__(self, get_response):
self.get_response = get_response
def get_user_token(self, email, token):
try:
device = Device.objects.get(token=token)
if device.user.email != email:
return None
return user
except:
return None
def __call__(self, request):
if not request.user.is_authenticated and 'HTTP_AUTHORIZATION' in request.META:
auth = request.META['HTTP_AUTHORIZATION'].split()
if len(auth) == 2 and auth[0].lower() == "basic":
email, token = base64.b64decode(auth[1]).decode('utf-8').split(':')
user = self.get_user_token(email, token)
if user:
request.user = user
return self.get_response(request)
|
1a71fba6224a9757f19e702a3b9a1cebf496a754
|
src/loop+blkback/plugin.py
|
src/loop+blkback/plugin.py
|
#!/usr/bin/env python
import os
import sys
import xapi
import xapi.plugin
from xapi.storage.datapath import log
class Implementation(xapi.plugin.Plugin_skeleton):
def query(self, dbg):
return {
"plugin": "loopdev+blkback",
"name": "The loopdev+blkback kernel-space datapath plugin",
"description": ("This plugin manages and configures loop"
" devices which can be connected to VMs"
" directly via kernel-space blkback"),
"vendor": "Citrix",
"copyright": "(C) 2015 Citrix Inc",
"version": "3.0",
"required_api_version": "3.0",
"features": [
],
"configuration": {},
"required_cluster_stack": []}
if __name__ == "__main__":
log.log_call_argv()
cmd = xapi.plugin.Plugin_commandline(Implementation())
base = os.path.basename(sys.argv[0])
if base == "Plugin.Query":
cmd.query()
else:
raise xapi.plugin.Unimplemented(base)
|
#!/usr/bin/env python
import os
import sys
import xapi
import xapi.storage.api.plugin
from xapi.storage import log
class Implementation(xapi.storage.api.plugin.Plugin_skeleton):
def query(self, dbg):
return {
"plugin": "loopdev+blkback",
"name": "The loopdev+blkback kernel-space datapath plugin",
"description": ("This plugin manages and configures loop"
" devices which can be connected to VMs"
" directly via kernel-space blkback"),
"vendor": "Citrix",
"copyright": "(C) 2015 Citrix Inc",
"version": "3.0",
"required_api_version": "3.0",
"features": [
],
"configuration": {},
"required_cluster_stack": []}
if __name__ == "__main__":
log.log_call_argv()
cmd = xapi.storage.api.plugin.Plugin_commandline(Implementation())
base = os.path.basename(sys.argv[0])
if base == "Plugin.Query":
cmd.query()
else:
raise xapi.storage.api.plugin.Unimplemented(base)
|
Use the new xapi.storage package hierarchy
|
Use the new xapi.storage package hierarchy
Signed-off-by: David Scott <63c9eb0ea83039690fefa11afe17873ba8278a56@eu.citrix.com>
|
Python
|
lgpl-2.1
|
jjd27/xapi-storage-datapath-plugins,robertbreker/xapi-storage-datapath-plugins,djs55/xapi-storage-datapath-plugins,xapi-project/xapi-storage-datapath-plugins,stefanopanella/xapi-storage-plugins,stefanopanella/xapi-storage-plugins,stefanopanella/xapi-storage-plugins
|
#!/usr/bin/env python
import os
import sys
import xapi
import xapi.plugin
from xapi.storage.datapath import log
class Implementation(xapi.plugin.Plugin_skeleton):
def query(self, dbg):
return {
"plugin": "loopdev+blkback",
"name": "The loopdev+blkback kernel-space datapath plugin",
"description": ("This plugin manages and configures loop"
" devices which can be connected to VMs"
" directly via kernel-space blkback"),
"vendor": "Citrix",
"copyright": "(C) 2015 Citrix Inc",
"version": "3.0",
"required_api_version": "3.0",
"features": [
],
"configuration": {},
"required_cluster_stack": []}
if __name__ == "__main__":
log.log_call_argv()
cmd = xapi.plugin.Plugin_commandline(Implementation())
base = os.path.basename(sys.argv[0])
if base == "Plugin.Query":
cmd.query()
else:
raise xapi.plugin.Unimplemented(base)
Use the new xapi.storage package hierarchy
Signed-off-by: David Scott <63c9eb0ea83039690fefa11afe17873ba8278a56@eu.citrix.com>
|
#!/usr/bin/env python
import os
import sys
import xapi
import xapi.storage.api.plugin
from xapi.storage import log
class Implementation(xapi.storage.api.plugin.Plugin_skeleton):
def query(self, dbg):
return {
"plugin": "loopdev+blkback",
"name": "The loopdev+blkback kernel-space datapath plugin",
"description": ("This plugin manages and configures loop"
" devices which can be connected to VMs"
" directly via kernel-space blkback"),
"vendor": "Citrix",
"copyright": "(C) 2015 Citrix Inc",
"version": "3.0",
"required_api_version": "3.0",
"features": [
],
"configuration": {},
"required_cluster_stack": []}
if __name__ == "__main__":
log.log_call_argv()
cmd = xapi.storage.api.plugin.Plugin_commandline(Implementation())
base = os.path.basename(sys.argv[0])
if base == "Plugin.Query":
cmd.query()
else:
raise xapi.storage.api.plugin.Unimplemented(base)
|
<commit_before>#!/usr/bin/env python
import os
import sys
import xapi
import xapi.plugin
from xapi.storage.datapath import log
class Implementation(xapi.plugin.Plugin_skeleton):
def query(self, dbg):
return {
"plugin": "loopdev+blkback",
"name": "The loopdev+blkback kernel-space datapath plugin",
"description": ("This plugin manages and configures loop"
" devices which can be connected to VMs"
" directly via kernel-space blkback"),
"vendor": "Citrix",
"copyright": "(C) 2015 Citrix Inc",
"version": "3.0",
"required_api_version": "3.0",
"features": [
],
"configuration": {},
"required_cluster_stack": []}
if __name__ == "__main__":
log.log_call_argv()
cmd = xapi.plugin.Plugin_commandline(Implementation())
base = os.path.basename(sys.argv[0])
if base == "Plugin.Query":
cmd.query()
else:
raise xapi.plugin.Unimplemented(base)
<commit_msg>Use the new xapi.storage package hierarchy
Signed-off-by: David Scott <63c9eb0ea83039690fefa11afe17873ba8278a56@eu.citrix.com><commit_after>
|
#!/usr/bin/env python
import os
import sys
import xapi
import xapi.storage.api.plugin
from xapi.storage import log
class Implementation(xapi.storage.api.plugin.Plugin_skeleton):
def query(self, dbg):
return {
"plugin": "loopdev+blkback",
"name": "The loopdev+blkback kernel-space datapath plugin",
"description": ("This plugin manages and configures loop"
" devices which can be connected to VMs"
" directly via kernel-space blkback"),
"vendor": "Citrix",
"copyright": "(C) 2015 Citrix Inc",
"version": "3.0",
"required_api_version": "3.0",
"features": [
],
"configuration": {},
"required_cluster_stack": []}
if __name__ == "__main__":
log.log_call_argv()
cmd = xapi.storage.api.plugin.Plugin_commandline(Implementation())
base = os.path.basename(sys.argv[0])
if base == "Plugin.Query":
cmd.query()
else:
raise xapi.storage.api.plugin.Unimplemented(base)
|
#!/usr/bin/env python
import os
import sys
import xapi
import xapi.plugin
from xapi.storage.datapath import log
class Implementation(xapi.plugin.Plugin_skeleton):
def query(self, dbg):
return {
"plugin": "loopdev+blkback",
"name": "The loopdev+blkback kernel-space datapath plugin",
"description": ("This plugin manages and configures loop"
" devices which can be connected to VMs"
" directly via kernel-space blkback"),
"vendor": "Citrix",
"copyright": "(C) 2015 Citrix Inc",
"version": "3.0",
"required_api_version": "3.0",
"features": [
],
"configuration": {},
"required_cluster_stack": []}
if __name__ == "__main__":
log.log_call_argv()
cmd = xapi.plugin.Plugin_commandline(Implementation())
base = os.path.basename(sys.argv[0])
if base == "Plugin.Query":
cmd.query()
else:
raise xapi.plugin.Unimplemented(base)
Use the new xapi.storage package hierarchy
Signed-off-by: David Scott <63c9eb0ea83039690fefa11afe17873ba8278a56@eu.citrix.com>#!/usr/bin/env python
import os
import sys
import xapi
import xapi.storage.api.plugin
from xapi.storage import log
class Implementation(xapi.storage.api.plugin.Plugin_skeleton):
def query(self, dbg):
return {
"plugin": "loopdev+blkback",
"name": "The loopdev+blkback kernel-space datapath plugin",
"description": ("This plugin manages and configures loop"
" devices which can be connected to VMs"
" directly via kernel-space blkback"),
"vendor": "Citrix",
"copyright": "(C) 2015 Citrix Inc",
"version": "3.0",
"required_api_version": "3.0",
"features": [
],
"configuration": {},
"required_cluster_stack": []}
if __name__ == "__main__":
log.log_call_argv()
cmd = xapi.storage.api.plugin.Plugin_commandline(Implementation())
base = os.path.basename(sys.argv[0])
if base == "Plugin.Query":
cmd.query()
else:
raise xapi.storage.api.plugin.Unimplemented(base)
|
<commit_before>#!/usr/bin/env python
import os
import sys
import xapi
import xapi.plugin
from xapi.storage.datapath import log
class Implementation(xapi.plugin.Plugin_skeleton):
def query(self, dbg):
return {
"plugin": "loopdev+blkback",
"name": "The loopdev+blkback kernel-space datapath plugin",
"description": ("This plugin manages and configures loop"
" devices which can be connected to VMs"
" directly via kernel-space blkback"),
"vendor": "Citrix",
"copyright": "(C) 2015 Citrix Inc",
"version": "3.0",
"required_api_version": "3.0",
"features": [
],
"configuration": {},
"required_cluster_stack": []}
if __name__ == "__main__":
log.log_call_argv()
cmd = xapi.plugin.Plugin_commandline(Implementation())
base = os.path.basename(sys.argv[0])
if base == "Plugin.Query":
cmd.query()
else:
raise xapi.plugin.Unimplemented(base)
<commit_msg>Use the new xapi.storage package hierarchy
Signed-off-by: David Scott <63c9eb0ea83039690fefa11afe17873ba8278a56@eu.citrix.com><commit_after>#!/usr/bin/env python
import os
import sys
import xapi
import xapi.storage.api.plugin
from xapi.storage import log
class Implementation(xapi.storage.api.plugin.Plugin_skeleton):
def query(self, dbg):
return {
"plugin": "loopdev+blkback",
"name": "The loopdev+blkback kernel-space datapath plugin",
"description": ("This plugin manages and configures loop"
" devices which can be connected to VMs"
" directly via kernel-space blkback"),
"vendor": "Citrix",
"copyright": "(C) 2015 Citrix Inc",
"version": "3.0",
"required_api_version": "3.0",
"features": [
],
"configuration": {},
"required_cluster_stack": []}
if __name__ == "__main__":
log.log_call_argv()
cmd = xapi.storage.api.plugin.Plugin_commandline(Implementation())
base = os.path.basename(sys.argv[0])
if base == "Plugin.Query":
cmd.query()
else:
raise xapi.storage.api.plugin.Unimplemented(base)
|
cb6c73b59ddfdd01f6a2f75b65e8a9e06339c87d
|
src/setup.py
|
src/setup.py
|
import os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.txt')) as f:
long_description = f.read()
setup(
name="pyshark",
version="0.3.7.2",
packages=find_packages(),
package_data={'': ['*.ini', '*.pcapng']},
# Temporarily using trollis 1.0.4 until issue https://github.com/haypo/trollius/issues/4 is resolved.
install_requires=['lxml', 'py', 'trollius==1.0.4', 'logbook'],
tests_require=['mock', 'pytest'],
url="https://github.com/KimiNewt/pyshark",
long_description=long_description,
author="KimiNewt",
description="Python wrapper for tshark, allowing python packet parsing using wireshark dissectors",
keywords="wireshark capture packets parsing packet",
use_2to3=True,
)
|
import os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.txt')) as f:
long_description = f.read()
setup(
name="pyshark",
version="0.3.7.2",
packages=find_packages(),
package_data={'': ['*.ini', '*.pcapng']},
# Temporarily using trollis 1.0.4 until issue https://github.com/haypo/trollius/issues/4 is resolved.
install_requires=['lxml', 'py', 'trollius==1.0.4', 'logbook'],
tests_require=['mock', 'pytest'],
url="https://github.com/KimiNewt/pyshark",
long_description=long_description,
author="KimiNewt",
description="Python wrapper for tshark, allowing python packet parsing using wireshark dissectors",
keywords="wireshark capture packets parsing packet",
use_2to3=True,
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
|
Declare MIT license and support for Python 2 and 3
|
Declare MIT license and support for Python 2 and 3
|
Python
|
mit
|
KimiNewt/pyshark
|
import os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.txt')) as f:
long_description = f.read()
setup(
name="pyshark",
version="0.3.7.2",
packages=find_packages(),
package_data={'': ['*.ini', '*.pcapng']},
# Temporarily using trollis 1.0.4 until issue https://github.com/haypo/trollius/issues/4 is resolved.
install_requires=['lxml', 'py', 'trollius==1.0.4', 'logbook'],
tests_require=['mock', 'pytest'],
url="https://github.com/KimiNewt/pyshark",
long_description=long_description,
author="KimiNewt",
description="Python wrapper for tshark, allowing python packet parsing using wireshark dissectors",
keywords="wireshark capture packets parsing packet",
use_2to3=True,
)
Declare MIT license and support for Python 2 and 3
|
import os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.txt')) as f:
long_description = f.read()
setup(
name="pyshark",
version="0.3.7.2",
packages=find_packages(),
package_data={'': ['*.ini', '*.pcapng']},
# Temporarily using trollis 1.0.4 until issue https://github.com/haypo/trollius/issues/4 is resolved.
install_requires=['lxml', 'py', 'trollius==1.0.4', 'logbook'],
tests_require=['mock', 'pytest'],
url="https://github.com/KimiNewt/pyshark",
long_description=long_description,
author="KimiNewt",
description="Python wrapper for tshark, allowing python packet parsing using wireshark dissectors",
keywords="wireshark capture packets parsing packet",
use_2to3=True,
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
|
<commit_before>import os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.txt')) as f:
long_description = f.read()
setup(
name="pyshark",
version="0.3.7.2",
packages=find_packages(),
package_data={'': ['*.ini', '*.pcapng']},
# Temporarily using trollis 1.0.4 until issue https://github.com/haypo/trollius/issues/4 is resolved.
install_requires=['lxml', 'py', 'trollius==1.0.4', 'logbook'],
tests_require=['mock', 'pytest'],
url="https://github.com/KimiNewt/pyshark",
long_description=long_description,
author="KimiNewt",
description="Python wrapper for tshark, allowing python packet parsing using wireshark dissectors",
keywords="wireshark capture packets parsing packet",
use_2to3=True,
)
<commit_msg>Declare MIT license and support for Python 2 and 3<commit_after>
|
import os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.txt')) as f:
long_description = f.read()
setup(
name="pyshark",
version="0.3.7.2",
packages=find_packages(),
package_data={'': ['*.ini', '*.pcapng']},
# Temporarily using trollis 1.0.4 until issue https://github.com/haypo/trollius/issues/4 is resolved.
install_requires=['lxml', 'py', 'trollius==1.0.4', 'logbook'],
tests_require=['mock', 'pytest'],
url="https://github.com/KimiNewt/pyshark",
long_description=long_description,
author="KimiNewt",
description="Python wrapper for tshark, allowing python packet parsing using wireshark dissectors",
keywords="wireshark capture packets parsing packet",
use_2to3=True,
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
|
import os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.txt')) as f:
long_description = f.read()
setup(
name="pyshark",
version="0.3.7.2",
packages=find_packages(),
package_data={'': ['*.ini', '*.pcapng']},
# Temporarily using trollis 1.0.4 until issue https://github.com/haypo/trollius/issues/4 is resolved.
install_requires=['lxml', 'py', 'trollius==1.0.4', 'logbook'],
tests_require=['mock', 'pytest'],
url="https://github.com/KimiNewt/pyshark",
long_description=long_description,
author="KimiNewt",
description="Python wrapper for tshark, allowing python packet parsing using wireshark dissectors",
keywords="wireshark capture packets parsing packet",
use_2to3=True,
)
Declare MIT license and support for Python 2 and 3import os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.txt')) as f:
long_description = f.read()
setup(
name="pyshark",
version="0.3.7.2",
packages=find_packages(),
package_data={'': ['*.ini', '*.pcapng']},
# Temporarily using trollis 1.0.4 until issue https://github.com/haypo/trollius/issues/4 is resolved.
install_requires=['lxml', 'py', 'trollius==1.0.4', 'logbook'],
tests_require=['mock', 'pytest'],
url="https://github.com/KimiNewt/pyshark",
long_description=long_description,
author="KimiNewt",
description="Python wrapper for tshark, allowing python packet parsing using wireshark dissectors",
keywords="wireshark capture packets parsing packet",
use_2to3=True,
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
|
<commit_before>import os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.txt')) as f:
long_description = f.read()
setup(
name="pyshark",
version="0.3.7.2",
packages=find_packages(),
package_data={'': ['*.ini', '*.pcapng']},
# Temporarily using trollis 1.0.4 until issue https://github.com/haypo/trollius/issues/4 is resolved.
install_requires=['lxml', 'py', 'trollius==1.0.4', 'logbook'],
tests_require=['mock', 'pytest'],
url="https://github.com/KimiNewt/pyshark",
long_description=long_description,
author="KimiNewt",
description="Python wrapper for tshark, allowing python packet parsing using wireshark dissectors",
keywords="wireshark capture packets parsing packet",
use_2to3=True,
)
<commit_msg>Declare MIT license and support for Python 2 and 3<commit_after>import os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.txt')) as f:
long_description = f.read()
setup(
name="pyshark",
version="0.3.7.2",
packages=find_packages(),
package_data={'': ['*.ini', '*.pcapng']},
# Temporarily using trollis 1.0.4 until issue https://github.com/haypo/trollius/issues/4 is resolved.
install_requires=['lxml', 'py', 'trollius==1.0.4', 'logbook'],
tests_require=['mock', 'pytest'],
url="https://github.com/KimiNewt/pyshark",
long_description=long_description,
author="KimiNewt",
description="Python wrapper for tshark, allowing python packet parsing using wireshark dissectors",
keywords="wireshark capture packets parsing packet",
use_2to3=True,
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
|
18afda24f10e06bab6780beb9a489a34110dc482
|
aboutdialog.py
|
aboutdialog.py
|
# -*- coding: utf-8 -*-
from PyQt4.QtGui import QDialog, qApp
from ui.aboutdialog import Ui_AboutDialog
from common import dataDirPath
from version import VERSION
class AboutDialog(QDialog, Ui_AboutDialog):
def __init__(self, parent=None):
super(AboutDialog, self).__init__(parent)
self.setupUi(self)
icon = qApp.windowIcon()
self.appIcon.setPixmap(icon.pixmap(64, 64))
self.tbAbout.setOpenExternalLinks(True)
self.tbLicense.setOpenExternalLinks(True)
self.__initTabs()
def __initTabs(self):
about = "<center><h3>gitc " + VERSION + "</h3></center>"
about += "<center>"
about += self.tr("Git file conflicts and logs viewer")
about += "</center>"
about += "<center><a href=https://github.com/timxx/gitc>"
about += self.tr("Visit project host")
about += "</a></center><br/>"
about += "<center>Copyright © 2017-2018 Weitian Leung</center>"
self.tbAbout.setHtml(about)
licenseFile = dataDirPath() + "/licenses/Apache-2.0.html"
with open(licenseFile) as f:
self.tbLicense.setHtml(f.read())
|
# -*- coding: utf-8 -*-
from PyQt4.QtGui import QDialog, qApp
from ui.aboutdialog import Ui_AboutDialog
from common import dataDirPath
from version import VERSION
class AboutDialog(QDialog, Ui_AboutDialog):
def __init__(self, parent=None):
super(AboutDialog, self).__init__(parent)
self.setupUi(self)
icon = qApp.windowIcon()
self.appIcon.setPixmap(icon.pixmap(64, 64))
self.tbAbout.setOpenExternalLinks(True)
self.tbLicense.setOpenExternalLinks(True)
self.__initTabs()
def __initTabs(self):
about = "<center><h3>gitc " + VERSION + "</h3></center>"
about += "<center>"
about += self.tr("Git file conflicts and logs viewer")
about += "</center>"
about += "<center><a href=https://github.com/timxx/gitc>"
about += self.tr("Visit project host")
about += "</a></center><br/>"
about += "<center>Copyright © 2016-2018 Weitian Leung</center>"
self.tbAbout.setHtml(about)
licenseFile = dataDirPath() + "/licenses/Apache-2.0.html"
with open(licenseFile) as f:
self.tbLicense.setHtml(f.read())
|
Fix copyright year, it should start from 2016 :(
|
Fix copyright year, it should start from 2016 :(
|
Python
|
apache-2.0
|
timxx/gitc,timxx/gitc
|
# -*- coding: utf-8 -*-
from PyQt4.QtGui import QDialog, qApp
from ui.aboutdialog import Ui_AboutDialog
from common import dataDirPath
from version import VERSION
class AboutDialog(QDialog, Ui_AboutDialog):
def __init__(self, parent=None):
super(AboutDialog, self).__init__(parent)
self.setupUi(self)
icon = qApp.windowIcon()
self.appIcon.setPixmap(icon.pixmap(64, 64))
self.tbAbout.setOpenExternalLinks(True)
self.tbLicense.setOpenExternalLinks(True)
self.__initTabs()
def __initTabs(self):
about = "<center><h3>gitc " + VERSION + "</h3></center>"
about += "<center>"
about += self.tr("Git file conflicts and logs viewer")
about += "</center>"
about += "<center><a href=https://github.com/timxx/gitc>"
about += self.tr("Visit project host")
about += "</a></center><br/>"
about += "<center>Copyright © 2017-2018 Weitian Leung</center>"
self.tbAbout.setHtml(about)
licenseFile = dataDirPath() + "/licenses/Apache-2.0.html"
with open(licenseFile) as f:
self.tbLicense.setHtml(f.read())
Fix copyright year, it should start from 2016 :(
|
# -*- coding: utf-8 -*-
from PyQt4.QtGui import QDialog, qApp
from ui.aboutdialog import Ui_AboutDialog
from common import dataDirPath
from version import VERSION
class AboutDialog(QDialog, Ui_AboutDialog):
def __init__(self, parent=None):
super(AboutDialog, self).__init__(parent)
self.setupUi(self)
icon = qApp.windowIcon()
self.appIcon.setPixmap(icon.pixmap(64, 64))
self.tbAbout.setOpenExternalLinks(True)
self.tbLicense.setOpenExternalLinks(True)
self.__initTabs()
def __initTabs(self):
about = "<center><h3>gitc " + VERSION + "</h3></center>"
about += "<center>"
about += self.tr("Git file conflicts and logs viewer")
about += "</center>"
about += "<center><a href=https://github.com/timxx/gitc>"
about += self.tr("Visit project host")
about += "</a></center><br/>"
about += "<center>Copyright © 2016-2018 Weitian Leung</center>"
self.tbAbout.setHtml(about)
licenseFile = dataDirPath() + "/licenses/Apache-2.0.html"
with open(licenseFile) as f:
self.tbLicense.setHtml(f.read())
|
<commit_before># -*- coding: utf-8 -*-
from PyQt4.QtGui import QDialog, qApp
from ui.aboutdialog import Ui_AboutDialog
from common import dataDirPath
from version import VERSION
class AboutDialog(QDialog, Ui_AboutDialog):
def __init__(self, parent=None):
super(AboutDialog, self).__init__(parent)
self.setupUi(self)
icon = qApp.windowIcon()
self.appIcon.setPixmap(icon.pixmap(64, 64))
self.tbAbout.setOpenExternalLinks(True)
self.tbLicense.setOpenExternalLinks(True)
self.__initTabs()
def __initTabs(self):
about = "<center><h3>gitc " + VERSION + "</h3></center>"
about += "<center>"
about += self.tr("Git file conflicts and logs viewer")
about += "</center>"
about += "<center><a href=https://github.com/timxx/gitc>"
about += self.tr("Visit project host")
about += "</a></center><br/>"
about += "<center>Copyright © 2017-2018 Weitian Leung</center>"
self.tbAbout.setHtml(about)
licenseFile = dataDirPath() + "/licenses/Apache-2.0.html"
with open(licenseFile) as f:
self.tbLicense.setHtml(f.read())
<commit_msg>Fix copyright year, it should start from 2016 :(<commit_after>
|
# -*- coding: utf-8 -*-
from PyQt4.QtGui import QDialog, qApp
from ui.aboutdialog import Ui_AboutDialog
from common import dataDirPath
from version import VERSION
class AboutDialog(QDialog, Ui_AboutDialog):
def __init__(self, parent=None):
super(AboutDialog, self).__init__(parent)
self.setupUi(self)
icon = qApp.windowIcon()
self.appIcon.setPixmap(icon.pixmap(64, 64))
self.tbAbout.setOpenExternalLinks(True)
self.tbLicense.setOpenExternalLinks(True)
self.__initTabs()
def __initTabs(self):
about = "<center><h3>gitc " + VERSION + "</h3></center>"
about += "<center>"
about += self.tr("Git file conflicts and logs viewer")
about += "</center>"
about += "<center><a href=https://github.com/timxx/gitc>"
about += self.tr("Visit project host")
about += "</a></center><br/>"
about += "<center>Copyright © 2016-2018 Weitian Leung</center>"
self.tbAbout.setHtml(about)
licenseFile = dataDirPath() + "/licenses/Apache-2.0.html"
with open(licenseFile) as f:
self.tbLicense.setHtml(f.read())
|
# -*- coding: utf-8 -*-
from PyQt4.QtGui import QDialog, qApp
from ui.aboutdialog import Ui_AboutDialog
from common import dataDirPath
from version import VERSION
class AboutDialog(QDialog, Ui_AboutDialog):
def __init__(self, parent=None):
super(AboutDialog, self).__init__(parent)
self.setupUi(self)
icon = qApp.windowIcon()
self.appIcon.setPixmap(icon.pixmap(64, 64))
self.tbAbout.setOpenExternalLinks(True)
self.tbLicense.setOpenExternalLinks(True)
self.__initTabs()
def __initTabs(self):
about = "<center><h3>gitc " + VERSION + "</h3></center>"
about += "<center>"
about += self.tr("Git file conflicts and logs viewer")
about += "</center>"
about += "<center><a href=https://github.com/timxx/gitc>"
about += self.tr("Visit project host")
about += "</a></center><br/>"
about += "<center>Copyright © 2017-2018 Weitian Leung</center>"
self.tbAbout.setHtml(about)
licenseFile = dataDirPath() + "/licenses/Apache-2.0.html"
with open(licenseFile) as f:
self.tbLicense.setHtml(f.read())
Fix copyright year, it should start from 2016 :(# -*- coding: utf-8 -*-
from PyQt4.QtGui import QDialog, qApp
from ui.aboutdialog import Ui_AboutDialog
from common import dataDirPath
from version import VERSION
class AboutDialog(QDialog, Ui_AboutDialog):
def __init__(self, parent=None):
super(AboutDialog, self).__init__(parent)
self.setupUi(self)
icon = qApp.windowIcon()
self.appIcon.setPixmap(icon.pixmap(64, 64))
self.tbAbout.setOpenExternalLinks(True)
self.tbLicense.setOpenExternalLinks(True)
self.__initTabs()
def __initTabs(self):
about = "<center><h3>gitc " + VERSION + "</h3></center>"
about += "<center>"
about += self.tr("Git file conflicts and logs viewer")
about += "</center>"
about += "<center><a href=https://github.com/timxx/gitc>"
about += self.tr("Visit project host")
about += "</a></center><br/>"
about += "<center>Copyright © 2016-2018 Weitian Leung</center>"
self.tbAbout.setHtml(about)
licenseFile = dataDirPath() + "/licenses/Apache-2.0.html"
with open(licenseFile) as f:
self.tbLicense.setHtml(f.read())
|
<commit_before># -*- coding: utf-8 -*-
from PyQt4.QtGui import QDialog, qApp
from ui.aboutdialog import Ui_AboutDialog
from common import dataDirPath
from version import VERSION
class AboutDialog(QDialog, Ui_AboutDialog):
def __init__(self, parent=None):
super(AboutDialog, self).__init__(parent)
self.setupUi(self)
icon = qApp.windowIcon()
self.appIcon.setPixmap(icon.pixmap(64, 64))
self.tbAbout.setOpenExternalLinks(True)
self.tbLicense.setOpenExternalLinks(True)
self.__initTabs()
def __initTabs(self):
about = "<center><h3>gitc " + VERSION + "</h3></center>"
about += "<center>"
about += self.tr("Git file conflicts and logs viewer")
about += "</center>"
about += "<center><a href=https://github.com/timxx/gitc>"
about += self.tr("Visit project host")
about += "</a></center><br/>"
about += "<center>Copyright © 2017-2018 Weitian Leung</center>"
self.tbAbout.setHtml(about)
licenseFile = dataDirPath() + "/licenses/Apache-2.0.html"
with open(licenseFile) as f:
self.tbLicense.setHtml(f.read())
<commit_msg>Fix copyright year, it should start from 2016 :(<commit_after># -*- coding: utf-8 -*-
from PyQt4.QtGui import QDialog, qApp
from ui.aboutdialog import Ui_AboutDialog
from common import dataDirPath
from version import VERSION
class AboutDialog(QDialog, Ui_AboutDialog):
def __init__(self, parent=None):
super(AboutDialog, self).__init__(parent)
self.setupUi(self)
icon = qApp.windowIcon()
self.appIcon.setPixmap(icon.pixmap(64, 64))
self.tbAbout.setOpenExternalLinks(True)
self.tbLicense.setOpenExternalLinks(True)
self.__initTabs()
def __initTabs(self):
about = "<center><h3>gitc " + VERSION + "</h3></center>"
about += "<center>"
about += self.tr("Git file conflicts and logs viewer")
about += "</center>"
about += "<center><a href=https://github.com/timxx/gitc>"
about += self.tr("Visit project host")
about += "</a></center><br/>"
about += "<center>Copyright © 2016-2018 Weitian Leung</center>"
self.tbAbout.setHtml(about)
licenseFile = dataDirPath() + "/licenses/Apache-2.0.html"
with open(licenseFile) as f:
self.tbLicense.setHtml(f.read())
|
625a0c88283d838093fdfd6601c7482a3cc003c9
|
cptm/experiment_calculate_perspective_jsd.py
|
cptm/experiment_calculate_perspective_jsd.py
|
import logging
import argparse
from utils.experiment import load_config, get_corpus
from utils.controversialissues import perspective_jsd_matrix
logging.basicConfig(format='%(levelname)s : %(message)s', level=logging.DEBUG)
logger = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
parser.add_argument('json', help='json file containing experiment '
'configuration.')
args = parser.parse_args()
config = load_config(args.json)
corpus = get_corpus(config)
nTopics = config.get('nTopics')
perspectives = [p.name for p in corpus.perspectives]
perspective_jsd = perspective_jsd_matrix(config, nTopics, perspectives)
print perspective_jsd
print perspective_jsd.sum(axis=(2, 1))
|
import logging
import argparse
import numpy as np
from utils.experiment import load_config, get_corpus
from utils.controversialissues import perspective_jsd_matrix
logging.basicConfig(format='%(levelname)s : %(message)s', level=logging.DEBUG)
logger = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
parser.add_argument('json', help='json file containing experiment '
'configuration.')
args = parser.parse_args()
config = load_config(args.json)
corpus = get_corpus(config)
nTopics = config.get('nTopics')
perspectives = [p.name for p in corpus.perspectives]
perspective_jsd = perspective_jsd_matrix(config, nTopics, perspectives)
print perspective_jsd
print perspective_jsd.sum(axis=(2, 1))
np.save(config.get('outDir').format('perspective_jsd.npy'), perspective_jsd)
|
Save results of perspective jsd calculation to file
|
Save results of perspective jsd calculation to file
|
Python
|
apache-2.0
|
NLeSC/cptm,NLeSC/cptm
|
import logging
import argparse
from utils.experiment import load_config, get_corpus
from utils.controversialissues import perspective_jsd_matrix
logging.basicConfig(format='%(levelname)s : %(message)s', level=logging.DEBUG)
logger = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
parser.add_argument('json', help='json file containing experiment '
'configuration.')
args = parser.parse_args()
config = load_config(args.json)
corpus = get_corpus(config)
nTopics = config.get('nTopics')
perspectives = [p.name for p in corpus.perspectives]
perspective_jsd = perspective_jsd_matrix(config, nTopics, perspectives)
print perspective_jsd
print perspective_jsd.sum(axis=(2, 1))
Save results of perspective jsd calculation to file
|
import logging
import argparse
import numpy as np
from utils.experiment import load_config, get_corpus
from utils.controversialissues import perspective_jsd_matrix
logging.basicConfig(format='%(levelname)s : %(message)s', level=logging.DEBUG)
logger = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
parser.add_argument('json', help='json file containing experiment '
'configuration.')
args = parser.parse_args()
config = load_config(args.json)
corpus = get_corpus(config)
nTopics = config.get('nTopics')
perspectives = [p.name for p in corpus.perspectives]
perspective_jsd = perspective_jsd_matrix(config, nTopics, perspectives)
print perspective_jsd
print perspective_jsd.sum(axis=(2, 1))
np.save(config.get('outDir').format('perspective_jsd.npy'), perspective_jsd)
|
<commit_before>import logging
import argparse
from utils.experiment import load_config, get_corpus
from utils.controversialissues import perspective_jsd_matrix
logging.basicConfig(format='%(levelname)s : %(message)s', level=logging.DEBUG)
logger = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
parser.add_argument('json', help='json file containing experiment '
'configuration.')
args = parser.parse_args()
config = load_config(args.json)
corpus = get_corpus(config)
nTopics = config.get('nTopics')
perspectives = [p.name for p in corpus.perspectives]
perspective_jsd = perspective_jsd_matrix(config, nTopics, perspectives)
print perspective_jsd
print perspective_jsd.sum(axis=(2, 1))
<commit_msg>Save results of perspective jsd calculation to file<commit_after>
|
import logging
import argparse
import numpy as np
from utils.experiment import load_config, get_corpus
from utils.controversialissues import perspective_jsd_matrix
logging.basicConfig(format='%(levelname)s : %(message)s', level=logging.DEBUG)
logger = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
parser.add_argument('json', help='json file containing experiment '
'configuration.')
args = parser.parse_args()
config = load_config(args.json)
corpus = get_corpus(config)
nTopics = config.get('nTopics')
perspectives = [p.name for p in corpus.perspectives]
perspective_jsd = perspective_jsd_matrix(config, nTopics, perspectives)
print perspective_jsd
print perspective_jsd.sum(axis=(2, 1))
np.save(config.get('outDir').format('perspective_jsd.npy'), perspective_jsd)
|
import logging
import argparse
from utils.experiment import load_config, get_corpus
from utils.controversialissues import perspective_jsd_matrix
logging.basicConfig(format='%(levelname)s : %(message)s', level=logging.DEBUG)
logger = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
parser.add_argument('json', help='json file containing experiment '
'configuration.')
args = parser.parse_args()
config = load_config(args.json)
corpus = get_corpus(config)
nTopics = config.get('nTopics')
perspectives = [p.name for p in corpus.perspectives]
perspective_jsd = perspective_jsd_matrix(config, nTopics, perspectives)
print perspective_jsd
print perspective_jsd.sum(axis=(2, 1))
Save results of perspective jsd calculation to fileimport logging
import argparse
import numpy as np
from utils.experiment import load_config, get_corpus
from utils.controversialissues import perspective_jsd_matrix
logging.basicConfig(format='%(levelname)s : %(message)s', level=logging.DEBUG)
logger = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
parser.add_argument('json', help='json file containing experiment '
'configuration.')
args = parser.parse_args()
config = load_config(args.json)
corpus = get_corpus(config)
nTopics = config.get('nTopics')
perspectives = [p.name for p in corpus.perspectives]
perspective_jsd = perspective_jsd_matrix(config, nTopics, perspectives)
print perspective_jsd
print perspective_jsd.sum(axis=(2, 1))
np.save(config.get('outDir').format('perspective_jsd.npy'), perspective_jsd)
|
<commit_before>import logging
import argparse
from utils.experiment import load_config, get_corpus
from utils.controversialissues import perspective_jsd_matrix
logging.basicConfig(format='%(levelname)s : %(message)s', level=logging.DEBUG)
logger = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
parser.add_argument('json', help='json file containing experiment '
'configuration.')
args = parser.parse_args()
config = load_config(args.json)
corpus = get_corpus(config)
nTopics = config.get('nTopics')
perspectives = [p.name for p in corpus.perspectives]
perspective_jsd = perspective_jsd_matrix(config, nTopics, perspectives)
print perspective_jsd
print perspective_jsd.sum(axis=(2, 1))
<commit_msg>Save results of perspective jsd calculation to file<commit_after>import logging
import argparse
import numpy as np
from utils.experiment import load_config, get_corpus
from utils.controversialissues import perspective_jsd_matrix
logging.basicConfig(format='%(levelname)s : %(message)s', level=logging.DEBUG)
logger = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
parser.add_argument('json', help='json file containing experiment '
'configuration.')
args = parser.parse_args()
config = load_config(args.json)
corpus = get_corpus(config)
nTopics = config.get('nTopics')
perspectives = [p.name for p in corpus.perspectives]
perspective_jsd = perspective_jsd_matrix(config, nTopics, perspectives)
print perspective_jsd
print perspective_jsd.sum(axis=(2, 1))
np.save(config.get('outDir').format('perspective_jsd.npy'), perspective_jsd)
|
683257082b9e2d0aba27e6124cd419a4cf19d2a9
|
docupload/htmlify.py
|
docupload/htmlify.py
|
'''
HTMLify: Convert any fileformat supported by pandoc to HTML5
'''
import pypandoc
def get_html(doc_file):
'''Uses pypandoc to convert uploaded file to HTML5'''
tmp_loc = '/tmp/uploaded_' + str(doc_file)
with open(tmp_loc, 'wb') as tmp_file:
for chunk in doc_file.chunks():
tmp_file.write(chunk)
return pypandoc.convert(tmp_loc, 'html5')
class HTMLifier():
'''
HTMLifier: Class which handles conversion of any docx/md/tex file to HTML
'''
def __init__(self, doc_base_path='.'):
self.doc_base_path = doc_base_path
def convert(self, doc_file):
'''Middleware function to interface with different <format>_convert functions'''
file_name = str(doc_file)
ext = file_name.split('.')[-1]
file_name = file_name[:len(file_name) - len(ext) - 1]
doc_dir = self.doc_base_path
html = get_html(doc_file)
with open(doc_dir + file_name + '.html', 'wb') as doc_stored:
doc_stored.write(bytes(html, 'utf-8'))
return file_name + '.html'
|
'''
HTMLify: Convert any fileformat supported by pandoc to HTML5
'''
import os
import pypandoc
def get_html(doc_file):
'''Uses pypandoc to convert uploaded file to HTML5'''
tmp_loc = '/tmp/uploaded_' + str(doc_file)
with open(tmp_loc, 'wb') as tmp_file:
for chunk in doc_file.chunks():
tmp_file.write(chunk)
html = pypandoc.convert(tmp_loc, 'html5')
os.remove(tmp_loc)
return html
class HTMLifier():
'''
HTMLifier: Class which handles conversion of any docx/md/tex file to HTML
'''
def __init__(self, doc_base_path='.'):
self.doc_base_path = doc_base_path
def convert(self, doc_file):
'''Middleware function to interface with different <format>_convert functions'''
file_name = str(doc_file)
ext = file_name.split('.')[-1]
file_name = file_name[:len(file_name) - len(ext) - 1]
doc_dir = self.doc_base_path
html = get_html(doc_file)
with open(doc_dir + file_name + '.html', 'wb') as doc_stored:
doc_stored.write(bytes(html, 'utf-8'))
return file_name + '.html'
|
Remove tmp file after conversion
|
Remove tmp file after conversion
|
Python
|
mit
|
vaibhawW/oksp,vaibhawW/oksp
|
'''
HTMLify: Convert any fileformat supported by pandoc to HTML5
'''
import pypandoc
def get_html(doc_file):
'''Uses pypandoc to convert uploaded file to HTML5'''
tmp_loc = '/tmp/uploaded_' + str(doc_file)
with open(tmp_loc, 'wb') as tmp_file:
for chunk in doc_file.chunks():
tmp_file.write(chunk)
return pypandoc.convert(tmp_loc, 'html5')
class HTMLifier():
'''
HTMLifier: Class which handles conversion of any docx/md/tex file to HTML
'''
def __init__(self, doc_base_path='.'):
self.doc_base_path = doc_base_path
def convert(self, doc_file):
'''Middleware function to interface with different <format>_convert functions'''
file_name = str(doc_file)
ext = file_name.split('.')[-1]
file_name = file_name[:len(file_name) - len(ext) - 1]
doc_dir = self.doc_base_path
html = get_html(doc_file)
with open(doc_dir + file_name + '.html', 'wb') as doc_stored:
doc_stored.write(bytes(html, 'utf-8'))
return file_name + '.html'
Remove tmp file after conversion
|
'''
HTMLify: Convert any fileformat supported by pandoc to HTML5
'''
import os
import pypandoc
def get_html(doc_file):
'''Uses pypandoc to convert uploaded file to HTML5'''
tmp_loc = '/tmp/uploaded_' + str(doc_file)
with open(tmp_loc, 'wb') as tmp_file:
for chunk in doc_file.chunks():
tmp_file.write(chunk)
html = pypandoc.convert(tmp_loc, 'html5')
os.remove(tmp_loc)
return html
class HTMLifier():
'''
HTMLifier: Class which handles conversion of any docx/md/tex file to HTML
'''
def __init__(self, doc_base_path='.'):
self.doc_base_path = doc_base_path
def convert(self, doc_file):
'''Middleware function to interface with different <format>_convert functions'''
file_name = str(doc_file)
ext = file_name.split('.')[-1]
file_name = file_name[:len(file_name) - len(ext) - 1]
doc_dir = self.doc_base_path
html = get_html(doc_file)
with open(doc_dir + file_name + '.html', 'wb') as doc_stored:
doc_stored.write(bytes(html, 'utf-8'))
return file_name + '.html'
|
<commit_before>'''
HTMLify: Convert any fileformat supported by pandoc to HTML5
'''
import pypandoc
def get_html(doc_file):
'''Uses pypandoc to convert uploaded file to HTML5'''
tmp_loc = '/tmp/uploaded_' + str(doc_file)
with open(tmp_loc, 'wb') as tmp_file:
for chunk in doc_file.chunks():
tmp_file.write(chunk)
return pypandoc.convert(tmp_loc, 'html5')
class HTMLifier():
'''
HTMLifier: Class which handles conversion of any docx/md/tex file to HTML
'''
def __init__(self, doc_base_path='.'):
self.doc_base_path = doc_base_path
def convert(self, doc_file):
'''Middleware function to interface with different <format>_convert functions'''
file_name = str(doc_file)
ext = file_name.split('.')[-1]
file_name = file_name[:len(file_name) - len(ext) - 1]
doc_dir = self.doc_base_path
html = get_html(doc_file)
with open(doc_dir + file_name + '.html', 'wb') as doc_stored:
doc_stored.write(bytes(html, 'utf-8'))
return file_name + '.html'
<commit_msg>Remove tmp file after conversion<commit_after>
|
'''
HTMLify: Convert any fileformat supported by pandoc to HTML5
'''
import os
import pypandoc
def get_html(doc_file):
'''Uses pypandoc to convert uploaded file to HTML5'''
tmp_loc = '/tmp/uploaded_' + str(doc_file)
with open(tmp_loc, 'wb') as tmp_file:
for chunk in doc_file.chunks():
tmp_file.write(chunk)
html = pypandoc.convert(tmp_loc, 'html5')
os.remove(tmp_loc)
return html
class HTMLifier():
'''
HTMLifier: Class which handles conversion of any docx/md/tex file to HTML
'''
def __init__(self, doc_base_path='.'):
self.doc_base_path = doc_base_path
def convert(self, doc_file):
'''Middleware function to interface with different <format>_convert functions'''
file_name = str(doc_file)
ext = file_name.split('.')[-1]
file_name = file_name[:len(file_name) - len(ext) - 1]
doc_dir = self.doc_base_path
html = get_html(doc_file)
with open(doc_dir + file_name + '.html', 'wb') as doc_stored:
doc_stored.write(bytes(html, 'utf-8'))
return file_name + '.html'
|
'''
HTMLify: Convert any fileformat supported by pandoc to HTML5
'''
import pypandoc
def get_html(doc_file):
'''Uses pypandoc to convert uploaded file to HTML5'''
tmp_loc = '/tmp/uploaded_' + str(doc_file)
with open(tmp_loc, 'wb') as tmp_file:
for chunk in doc_file.chunks():
tmp_file.write(chunk)
return pypandoc.convert(tmp_loc, 'html5')
class HTMLifier():
'''
HTMLifier: Class which handles conversion of any docx/md/tex file to HTML
'''
def __init__(self, doc_base_path='.'):
self.doc_base_path = doc_base_path
def convert(self, doc_file):
'''Middleware function to interface with different <format>_convert functions'''
file_name = str(doc_file)
ext = file_name.split('.')[-1]
file_name = file_name[:len(file_name) - len(ext) - 1]
doc_dir = self.doc_base_path
html = get_html(doc_file)
with open(doc_dir + file_name + '.html', 'wb') as doc_stored:
doc_stored.write(bytes(html, 'utf-8'))
return file_name + '.html'
Remove tmp file after conversion'''
HTMLify: Convert any fileformat supported by pandoc to HTML5
'''
import os
import pypandoc
def get_html(doc_file):
'''Uses pypandoc to convert uploaded file to HTML5'''
tmp_loc = '/tmp/uploaded_' + str(doc_file)
with open(tmp_loc, 'wb') as tmp_file:
for chunk in doc_file.chunks():
tmp_file.write(chunk)
html = pypandoc.convert(tmp_loc, 'html5')
os.remove(tmp_loc)
return html
class HTMLifier():
'''
HTMLifier: Class which handles conversion of any docx/md/tex file to HTML
'''
def __init__(self, doc_base_path='.'):
self.doc_base_path = doc_base_path
def convert(self, doc_file):
'''Middleware function to interface with different <format>_convert functions'''
file_name = str(doc_file)
ext = file_name.split('.')[-1]
file_name = file_name[:len(file_name) - len(ext) - 1]
doc_dir = self.doc_base_path
html = get_html(doc_file)
with open(doc_dir + file_name + '.html', 'wb') as doc_stored:
doc_stored.write(bytes(html, 'utf-8'))
return file_name + '.html'
|
<commit_before>'''
HTMLify: Convert any fileformat supported by pandoc to HTML5
'''
import pypandoc
def get_html(doc_file):
'''Uses pypandoc to convert uploaded file to HTML5'''
tmp_loc = '/tmp/uploaded_' + str(doc_file)
with open(tmp_loc, 'wb') as tmp_file:
for chunk in doc_file.chunks():
tmp_file.write(chunk)
return pypandoc.convert(tmp_loc, 'html5')
class HTMLifier():
'''
HTMLifier: Class which handles conversion of any docx/md/tex file to HTML
'''
def __init__(self, doc_base_path='.'):
self.doc_base_path = doc_base_path
def convert(self, doc_file):
'''Middleware function to interface with different <format>_convert functions'''
file_name = str(doc_file)
ext = file_name.split('.')[-1]
file_name = file_name[:len(file_name) - len(ext) - 1]
doc_dir = self.doc_base_path
html = get_html(doc_file)
with open(doc_dir + file_name + '.html', 'wb') as doc_stored:
doc_stored.write(bytes(html, 'utf-8'))
return file_name + '.html'
<commit_msg>Remove tmp file after conversion<commit_after>'''
HTMLify: Convert any fileformat supported by pandoc to HTML5
'''
import os
import pypandoc
def get_html(doc_file):
'''Uses pypandoc to convert uploaded file to HTML5'''
tmp_loc = '/tmp/uploaded_' + str(doc_file)
with open(tmp_loc, 'wb') as tmp_file:
for chunk in doc_file.chunks():
tmp_file.write(chunk)
html = pypandoc.convert(tmp_loc, 'html5')
os.remove(tmp_loc)
return html
class HTMLifier():
'''
HTMLifier: Class which handles conversion of any docx/md/tex file to HTML
'''
def __init__(self, doc_base_path='.'):
self.doc_base_path = doc_base_path
def convert(self, doc_file):
'''Middleware function to interface with different <format>_convert functions'''
file_name = str(doc_file)
ext = file_name.split('.')[-1]
file_name = file_name[:len(file_name) - len(ext) - 1]
doc_dir = self.doc_base_path
html = get_html(doc_file)
with open(doc_dir + file_name + '.html', 'wb') as doc_stored:
doc_stored.write(bytes(html, 'utf-8'))
return file_name + '.html'
|
f66fc484cc11b212fc3db22d8956be5f4fd6c0b7
|
firecares/settings/production.py
|
firecares/settings/production.py
|
from firecares.settings.base import *
INSTALLED_APPS = (
'django_statsd',
) + INSTALLED_APPS
STATSD_HOST = 'stats.garnertb.com'
STATSD_PREFIX = 'firecares'
STATSD_PATCHES = [
'django_statsd.patches.db',
'django_statsd.patches.cache',
]
MIDDLEWARE_CLASSES = (
'django_statsd.middleware.GraphiteRequestTimingMiddleware',
'django_statsd.middleware.GraphiteMiddleware',
'django_statsd.middleware.TastyPieRequestTimingMiddleware'
) + MIDDLEWARE_CLASSES
STATSD_PATCHES = [
'django_statsd.patches.db',
'django_statsd.patches.cache',
]
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': '127.0.0.1:11211',
}
}
AWS_STORAGE_BUCKET_NAME = 'firecares-static'
COMPRESS_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
COMPRESS_URL = "https://s3.amazonaws.com/firecares-static/"
COMPRESS_STORAGE = "firecares.utils.CachedS3BotoStorage"
STATICFILES_STORAGE = "firecares.utils.CachedS3BotoStorage"
STATIC_URL = COMPRESS_URL
DEBUG = False
try:
from local_settings import * # noqa
except ImportError:
pass
|
from firecares.settings.base import *
INSTALLED_APPS = (
'django_statsd',
) + INSTALLED_APPS
STATSD_HOST = 'stats.garnertb.com'
STATSD_PREFIX = 'firecares'
STATSD_PATCHES = [
'django_statsd.patches.db',
'django_statsd.patches.cache',
]
MIDDLEWARE_CLASSES = (
'django_statsd.middleware.GraphiteRequestTimingMiddleware',
'django_statsd.middleware.GraphiteMiddleware',
'django_statsd.middleware.TastyPieRequestTimingMiddleware'
) + MIDDLEWARE_CLASSES
STATSD_PATCHES = [
'django_statsd.patches.db',
'django_statsd.patches.cache',
]
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': '127.0.0.1:11211',
}
}
AWS_STORAGE_BUCKET_NAME = 'firecares-static'
COMPRESS_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
COMPRESS_URL = "https://s3.amazonaws.com/firecares-static/"
COMPRESS_STORAGE = "firecares.utils.CachedS3BotoStorage"
STATICFILES_STORAGE = "firecares.utils.CachedS3BotoStorage"
STATIC_URL = COMPRESS_URL
DEBUG = False
AWS_QUERYSTRING_AUTH = False
try:
from local_settings import * # noqa
except ImportError:
pass
|
Disable query string auth for django compressor.
|
Disable query string auth for django compressor.
|
Python
|
mit
|
HunterConnelly/firecares,ROGUE-JCTD/vida,garnertb/firecares,HunterConnelly/firecares,FireCARES/firecares,ROGUE-JCTD/vida,garnertb/firecares,garnertb/firecares,FireCARES/firecares,meilinger/firecares,garnertb/firecares,ROGUE-JCTD/vida,HunterConnelly/firecares,meilinger/firecares,HunterConnelly/firecares,meilinger/firecares,FireCARES/firecares,FireCARES/firecares,meilinger/firecares,FireCARES/firecares,ROGUE-JCTD/vida,ROGUE-JCTD/vida
|
from firecares.settings.base import *
INSTALLED_APPS = (
'django_statsd',
) + INSTALLED_APPS
STATSD_HOST = 'stats.garnertb.com'
STATSD_PREFIX = 'firecares'
STATSD_PATCHES = [
'django_statsd.patches.db',
'django_statsd.patches.cache',
]
MIDDLEWARE_CLASSES = (
'django_statsd.middleware.GraphiteRequestTimingMiddleware',
'django_statsd.middleware.GraphiteMiddleware',
'django_statsd.middleware.TastyPieRequestTimingMiddleware'
) + MIDDLEWARE_CLASSES
STATSD_PATCHES = [
'django_statsd.patches.db',
'django_statsd.patches.cache',
]
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': '127.0.0.1:11211',
}
}
AWS_STORAGE_BUCKET_NAME = 'firecares-static'
COMPRESS_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
COMPRESS_URL = "https://s3.amazonaws.com/firecares-static/"
COMPRESS_STORAGE = "firecares.utils.CachedS3BotoStorage"
STATICFILES_STORAGE = "firecares.utils.CachedS3BotoStorage"
STATIC_URL = COMPRESS_URL
DEBUG = False
try:
from local_settings import * # noqa
except ImportError:
passDisable query string auth for django compressor.
|
from firecares.settings.base import *
INSTALLED_APPS = (
'django_statsd',
) + INSTALLED_APPS
STATSD_HOST = 'stats.garnertb.com'
STATSD_PREFIX = 'firecares'
STATSD_PATCHES = [
'django_statsd.patches.db',
'django_statsd.patches.cache',
]
MIDDLEWARE_CLASSES = (
'django_statsd.middleware.GraphiteRequestTimingMiddleware',
'django_statsd.middleware.GraphiteMiddleware',
'django_statsd.middleware.TastyPieRequestTimingMiddleware'
) + MIDDLEWARE_CLASSES
STATSD_PATCHES = [
'django_statsd.patches.db',
'django_statsd.patches.cache',
]
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': '127.0.0.1:11211',
}
}
AWS_STORAGE_BUCKET_NAME = 'firecares-static'
COMPRESS_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
COMPRESS_URL = "https://s3.amazonaws.com/firecares-static/"
COMPRESS_STORAGE = "firecares.utils.CachedS3BotoStorage"
STATICFILES_STORAGE = "firecares.utils.CachedS3BotoStorage"
STATIC_URL = COMPRESS_URL
DEBUG = False
AWS_QUERYSTRING_AUTH = False
try:
from local_settings import * # noqa
except ImportError:
pass
|
<commit_before>from firecares.settings.base import *
INSTALLED_APPS = (
'django_statsd',
) + INSTALLED_APPS
STATSD_HOST = 'stats.garnertb.com'
STATSD_PREFIX = 'firecares'
STATSD_PATCHES = [
'django_statsd.patches.db',
'django_statsd.patches.cache',
]
MIDDLEWARE_CLASSES = (
'django_statsd.middleware.GraphiteRequestTimingMiddleware',
'django_statsd.middleware.GraphiteMiddleware',
'django_statsd.middleware.TastyPieRequestTimingMiddleware'
) + MIDDLEWARE_CLASSES
STATSD_PATCHES = [
'django_statsd.patches.db',
'django_statsd.patches.cache',
]
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': '127.0.0.1:11211',
}
}
AWS_STORAGE_BUCKET_NAME = 'firecares-static'
COMPRESS_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
COMPRESS_URL = "https://s3.amazonaws.com/firecares-static/"
COMPRESS_STORAGE = "firecares.utils.CachedS3BotoStorage"
STATICFILES_STORAGE = "firecares.utils.CachedS3BotoStorage"
STATIC_URL = COMPRESS_URL
DEBUG = False
try:
from local_settings import * # noqa
except ImportError:
pass<commit_msg>Disable query string auth for django compressor.<commit_after>
|
from firecares.settings.base import *
INSTALLED_APPS = (
'django_statsd',
) + INSTALLED_APPS
STATSD_HOST = 'stats.garnertb.com'
STATSD_PREFIX = 'firecares'
STATSD_PATCHES = [
'django_statsd.patches.db',
'django_statsd.patches.cache',
]
MIDDLEWARE_CLASSES = (
'django_statsd.middleware.GraphiteRequestTimingMiddleware',
'django_statsd.middleware.GraphiteMiddleware',
'django_statsd.middleware.TastyPieRequestTimingMiddleware'
) + MIDDLEWARE_CLASSES
STATSD_PATCHES = [
'django_statsd.patches.db',
'django_statsd.patches.cache',
]
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': '127.0.0.1:11211',
}
}
AWS_STORAGE_BUCKET_NAME = 'firecares-static'
COMPRESS_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
COMPRESS_URL = "https://s3.amazonaws.com/firecares-static/"
COMPRESS_STORAGE = "firecares.utils.CachedS3BotoStorage"
STATICFILES_STORAGE = "firecares.utils.CachedS3BotoStorage"
STATIC_URL = COMPRESS_URL
DEBUG = False
AWS_QUERYSTRING_AUTH = False
try:
from local_settings import * # noqa
except ImportError:
pass
|
from firecares.settings.base import *
INSTALLED_APPS = (
'django_statsd',
) + INSTALLED_APPS
STATSD_HOST = 'stats.garnertb.com'
STATSD_PREFIX = 'firecares'
STATSD_PATCHES = [
'django_statsd.patches.db',
'django_statsd.patches.cache',
]
MIDDLEWARE_CLASSES = (
'django_statsd.middleware.GraphiteRequestTimingMiddleware',
'django_statsd.middleware.GraphiteMiddleware',
'django_statsd.middleware.TastyPieRequestTimingMiddleware'
) + MIDDLEWARE_CLASSES
STATSD_PATCHES = [
'django_statsd.patches.db',
'django_statsd.patches.cache',
]
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': '127.0.0.1:11211',
}
}
AWS_STORAGE_BUCKET_NAME = 'firecares-static'
COMPRESS_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
COMPRESS_URL = "https://s3.amazonaws.com/firecares-static/"
COMPRESS_STORAGE = "firecares.utils.CachedS3BotoStorage"
STATICFILES_STORAGE = "firecares.utils.CachedS3BotoStorage"
STATIC_URL = COMPRESS_URL
DEBUG = False
try:
from local_settings import * # noqa
except ImportError:
passDisable query string auth for django compressor.from firecares.settings.base import *
INSTALLED_APPS = (
'django_statsd',
) + INSTALLED_APPS
STATSD_HOST = 'stats.garnertb.com'
STATSD_PREFIX = 'firecares'
STATSD_PATCHES = [
'django_statsd.patches.db',
'django_statsd.patches.cache',
]
MIDDLEWARE_CLASSES = (
'django_statsd.middleware.GraphiteRequestTimingMiddleware',
'django_statsd.middleware.GraphiteMiddleware',
'django_statsd.middleware.TastyPieRequestTimingMiddleware'
) + MIDDLEWARE_CLASSES
STATSD_PATCHES = [
'django_statsd.patches.db',
'django_statsd.patches.cache',
]
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': '127.0.0.1:11211',
}
}
AWS_STORAGE_BUCKET_NAME = 'firecares-static'
COMPRESS_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
COMPRESS_URL = "https://s3.amazonaws.com/firecares-static/"
COMPRESS_STORAGE = "firecares.utils.CachedS3BotoStorage"
STATICFILES_STORAGE = "firecares.utils.CachedS3BotoStorage"
STATIC_URL = COMPRESS_URL
DEBUG = False
AWS_QUERYSTRING_AUTH = False
try:
from local_settings import * # noqa
except ImportError:
pass
|
<commit_before>from firecares.settings.base import *
INSTALLED_APPS = (
'django_statsd',
) + INSTALLED_APPS
STATSD_HOST = 'stats.garnertb.com'
STATSD_PREFIX = 'firecares'
STATSD_PATCHES = [
'django_statsd.patches.db',
'django_statsd.patches.cache',
]
MIDDLEWARE_CLASSES = (
'django_statsd.middleware.GraphiteRequestTimingMiddleware',
'django_statsd.middleware.GraphiteMiddleware',
'django_statsd.middleware.TastyPieRequestTimingMiddleware'
) + MIDDLEWARE_CLASSES
STATSD_PATCHES = [
'django_statsd.patches.db',
'django_statsd.patches.cache',
]
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': '127.0.0.1:11211',
}
}
AWS_STORAGE_BUCKET_NAME = 'firecares-static'
COMPRESS_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
COMPRESS_URL = "https://s3.amazonaws.com/firecares-static/"
COMPRESS_STORAGE = "firecares.utils.CachedS3BotoStorage"
STATICFILES_STORAGE = "firecares.utils.CachedS3BotoStorage"
STATIC_URL = COMPRESS_URL
DEBUG = False
try:
from local_settings import * # noqa
except ImportError:
pass<commit_msg>Disable query string auth for django compressor.<commit_after>from firecares.settings.base import *
INSTALLED_APPS = (
'django_statsd',
) + INSTALLED_APPS
STATSD_HOST = 'stats.garnertb.com'
STATSD_PREFIX = 'firecares'
STATSD_PATCHES = [
'django_statsd.patches.db',
'django_statsd.patches.cache',
]
MIDDLEWARE_CLASSES = (
'django_statsd.middleware.GraphiteRequestTimingMiddleware',
'django_statsd.middleware.GraphiteMiddleware',
'django_statsd.middleware.TastyPieRequestTimingMiddleware'
) + MIDDLEWARE_CLASSES
STATSD_PATCHES = [
'django_statsd.patches.db',
'django_statsd.patches.cache',
]
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': '127.0.0.1:11211',
}
}
AWS_STORAGE_BUCKET_NAME = 'firecares-static'
COMPRESS_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
COMPRESS_URL = "https://s3.amazonaws.com/firecares-static/"
COMPRESS_STORAGE = "firecares.utils.CachedS3BotoStorage"
STATICFILES_STORAGE = "firecares.utils.CachedS3BotoStorage"
STATIC_URL = COMPRESS_URL
DEBUG = False
AWS_QUERYSTRING_AUTH = False
try:
from local_settings import * # noqa
except ImportError:
pass
|
e9cb0bff470dc6bfc926f0b4ac6214ae8a028e61
|
vcr/files.py
|
vcr/files.py
|
import os
import yaml
from .cassette import Cassette
def load_cassette(cassette_path):
try:
pc = yaml.load(open(cassette_path))
cassette = Cassette(pc)
return cassette
except IOError:
return None
def save_cassette(cassette_path, cassette):
dirname, filename = os.path.split(cassette_path)
if not os.path.exists(dirname):
os.makedirs(dirname)
with open(cassette_path, 'a') as cassette_file:
cassette_file.write(yaml.dump(cassette.serialize()))
|
import os
import yaml
from .cassette import Cassette
# Use the libYAML versions if possible
try:
from yaml import CLoader as Loader, CDumper as Dumper
except ImportError:
from yaml import Loader, Dumper
def load_cassette(cassette_path):
try:
pc = yaml.load(open(cassette_path), Loader=Loader)
cassette = Cassette(pc)
return cassette
except IOError:
return None
def save_cassette(cassette_path, cassette):
dirname, filename = os.path.split(cassette_path)
if not os.path.exists(dirname):
os.makedirs(dirname)
with open(cassette_path, 'a') as cassette_file:
cassette_file.write(yaml.dump(cassette.serialize(), Dumper=Dumper))
|
Use the libYAML version of yaml if it's available
|
Use the libYAML version of yaml if it's available
|
Python
|
mit
|
ByteInternet/vcrpy,aclevy/vcrpy,ByteInternet/vcrpy,kevin1024/vcrpy,poussik/vcrpy,bcen/vcrpy,yarikoptic/vcrpy,agriffis/vcrpy,graingert/vcrpy,poussik/vcrpy,gwillem/vcrpy,mgeisler/vcrpy,kevin1024/vcrpy,IvanMalison/vcrpy,graingert/vcrpy
|
import os
import yaml
from .cassette import Cassette
def load_cassette(cassette_path):
try:
pc = yaml.load(open(cassette_path))
cassette = Cassette(pc)
return cassette
except IOError:
return None
def save_cassette(cassette_path, cassette):
dirname, filename = os.path.split(cassette_path)
if not os.path.exists(dirname):
os.makedirs(dirname)
with open(cassette_path, 'a') as cassette_file:
cassette_file.write(yaml.dump(cassette.serialize()))
Use the libYAML version of yaml if it's available
|
import os
import yaml
from .cassette import Cassette
# Use the libYAML versions if possible
try:
from yaml import CLoader as Loader, CDumper as Dumper
except ImportError:
from yaml import Loader, Dumper
def load_cassette(cassette_path):
try:
pc = yaml.load(open(cassette_path), Loader=Loader)
cassette = Cassette(pc)
return cassette
except IOError:
return None
def save_cassette(cassette_path, cassette):
dirname, filename = os.path.split(cassette_path)
if not os.path.exists(dirname):
os.makedirs(dirname)
with open(cassette_path, 'a') as cassette_file:
cassette_file.write(yaml.dump(cassette.serialize(), Dumper=Dumper))
|
<commit_before>import os
import yaml
from .cassette import Cassette
def load_cassette(cassette_path):
try:
pc = yaml.load(open(cassette_path))
cassette = Cassette(pc)
return cassette
except IOError:
return None
def save_cassette(cassette_path, cassette):
dirname, filename = os.path.split(cassette_path)
if not os.path.exists(dirname):
os.makedirs(dirname)
with open(cassette_path, 'a') as cassette_file:
cassette_file.write(yaml.dump(cassette.serialize()))
<commit_msg>Use the libYAML version of yaml if it's available<commit_after>
|
import os
import yaml
from .cassette import Cassette
# Use the libYAML versions if possible
try:
from yaml import CLoader as Loader, CDumper as Dumper
except ImportError:
from yaml import Loader, Dumper
def load_cassette(cassette_path):
try:
pc = yaml.load(open(cassette_path), Loader=Loader)
cassette = Cassette(pc)
return cassette
except IOError:
return None
def save_cassette(cassette_path, cassette):
dirname, filename = os.path.split(cassette_path)
if not os.path.exists(dirname):
os.makedirs(dirname)
with open(cassette_path, 'a') as cassette_file:
cassette_file.write(yaml.dump(cassette.serialize(), Dumper=Dumper))
|
import os
import yaml
from .cassette import Cassette
def load_cassette(cassette_path):
try:
pc = yaml.load(open(cassette_path))
cassette = Cassette(pc)
return cassette
except IOError:
return None
def save_cassette(cassette_path, cassette):
dirname, filename = os.path.split(cassette_path)
if not os.path.exists(dirname):
os.makedirs(dirname)
with open(cassette_path, 'a') as cassette_file:
cassette_file.write(yaml.dump(cassette.serialize()))
Use the libYAML version of yaml if it's availableimport os
import yaml
from .cassette import Cassette
# Use the libYAML versions if possible
try:
from yaml import CLoader as Loader, CDumper as Dumper
except ImportError:
from yaml import Loader, Dumper
def load_cassette(cassette_path):
try:
pc = yaml.load(open(cassette_path), Loader=Loader)
cassette = Cassette(pc)
return cassette
except IOError:
return None
def save_cassette(cassette_path, cassette):
dirname, filename = os.path.split(cassette_path)
if not os.path.exists(dirname):
os.makedirs(dirname)
with open(cassette_path, 'a') as cassette_file:
cassette_file.write(yaml.dump(cassette.serialize(), Dumper=Dumper))
|
<commit_before>import os
import yaml
from .cassette import Cassette
def load_cassette(cassette_path):
try:
pc = yaml.load(open(cassette_path))
cassette = Cassette(pc)
return cassette
except IOError:
return None
def save_cassette(cassette_path, cassette):
dirname, filename = os.path.split(cassette_path)
if not os.path.exists(dirname):
os.makedirs(dirname)
with open(cassette_path, 'a') as cassette_file:
cassette_file.write(yaml.dump(cassette.serialize()))
<commit_msg>Use the libYAML version of yaml if it's available<commit_after>import os
import yaml
from .cassette import Cassette
# Use the libYAML versions if possible
try:
from yaml import CLoader as Loader, CDumper as Dumper
except ImportError:
from yaml import Loader, Dumper
def load_cassette(cassette_path):
try:
pc = yaml.load(open(cassette_path), Loader=Loader)
cassette = Cassette(pc)
return cassette
except IOError:
return None
def save_cassette(cassette_path, cassette):
dirname, filename = os.path.split(cassette_path)
if not os.path.exists(dirname):
os.makedirs(dirname)
with open(cassette_path, 'a') as cassette_file:
cassette_file.write(yaml.dump(cassette.serialize(), Dumper=Dumper))
|
525a624047fecce2acf4484c39bc244ad16e11c5
|
src/state/objects/Learn.py
|
src/state/objects/Learn.py
|
import sys
import os
sys.path.append(os.path.dirname(__file__) + "/../../")
from state import state
from state.stateEnum import StateEnum
from helpers import configHelper
from helpers import processorHelper
class Learn:
current_word = 0
def __init__(self, level_number):
self.number = level_number
self.words = configHelper.get_level_config('learn', self.number)['words']
self.output = processorHelper.get_output_processor()
print("Nivel " + str(self.number) + " de aprendizaje")
self._print_word()
def process_input(self, input_value):
if input_value == "right":
self.current_word += 1
self._verify_overflow()
self._print_word()
if input_value == "left":
self.current_word -= 1
self._verify_overflow()
self._print_word()
if input_value == "back":
print("Regresando a " + StateEnum.MENU.realName)
state.set_state(StateEnum.MENU.key)
def _verify_overflow(self):
pass
def _print_word(self):
word = self.words[self.current_word]
print(word)
self.output.write(word.upper())
|
import sys
import os
sys.path.append(os.path.dirname(__file__) + "/../../")
from state import state
from state.stateEnum import StateEnum
from helpers import configHelper
from helpers import processorHelper
class Learn:
current_word = 0
def __init__(self, level_number):
self.number = level_number
self.words = configHelper.get_level_config('learn', self.number)['words']
self.output = processorHelper.get_output_processor()
print("Nivel " + str(self.number) + " de aprendizaje")
self._print_word()
def process_input(self, input_value):
if input_value == "right":
self.current_word += 1
if not self._verify_overflow():
self._print_word()
else:
print("Se termino el nivel " + str(self.number) + " de aprendizaje. Saliendo al menu principal ...")
state.set_state(StateEnum.MENU.key)
if input_value == "left":
self.current_word -= 1
if not self._verify_overflow():
self._print_word()
else:
self.current_word = 0
if input_value == "back":
print("Regresando a " + StateEnum.MENU.realName)
state.set_state(StateEnum.MENU.key)
def _verify_overflow(self):
return self.current_word == -1 or self.current_word == len(self.words)
def _print_word(self):
word = self.words[self.current_word]
print(word)
self.output.write(word.upper())
|
Verify overflow in words array into the learn module
|
Verify overflow in words array into the learn module
|
Python
|
mit
|
Blindle/Raspberry
|
import sys
import os
sys.path.append(os.path.dirname(__file__) + "/../../")
from state import state
from state.stateEnum import StateEnum
from helpers import configHelper
from helpers import processorHelper
class Learn:
current_word = 0
def __init__(self, level_number):
self.number = level_number
self.words = configHelper.get_level_config('learn', self.number)['words']
self.output = processorHelper.get_output_processor()
print("Nivel " + str(self.number) + " de aprendizaje")
self._print_word()
def process_input(self, input_value):
if input_value == "right":
self.current_word += 1
self._verify_overflow()
self._print_word()
if input_value == "left":
self.current_word -= 1
self._verify_overflow()
self._print_word()
if input_value == "back":
print("Regresando a " + StateEnum.MENU.realName)
state.set_state(StateEnum.MENU.key)
def _verify_overflow(self):
pass
def _print_word(self):
word = self.words[self.current_word]
print(word)
self.output.write(word.upper())Verify overflow in words array into the learn module
|
import sys
import os
sys.path.append(os.path.dirname(__file__) + "/../../")
from state import state
from state.stateEnum import StateEnum
from helpers import configHelper
from helpers import processorHelper
class Learn:
current_word = 0
def __init__(self, level_number):
self.number = level_number
self.words = configHelper.get_level_config('learn', self.number)['words']
self.output = processorHelper.get_output_processor()
print("Nivel " + str(self.number) + " de aprendizaje")
self._print_word()
def process_input(self, input_value):
if input_value == "right":
self.current_word += 1
if not self._verify_overflow():
self._print_word()
else:
print("Se termino el nivel " + str(self.number) + " de aprendizaje. Saliendo al menu principal ...")
state.set_state(StateEnum.MENU.key)
if input_value == "left":
self.current_word -= 1
if not self._verify_overflow():
self._print_word()
else:
self.current_word = 0
if input_value == "back":
print("Regresando a " + StateEnum.MENU.realName)
state.set_state(StateEnum.MENU.key)
def _verify_overflow(self):
return self.current_word == -1 or self.current_word == len(self.words)
def _print_word(self):
word = self.words[self.current_word]
print(word)
self.output.write(word.upper())
|
<commit_before>import sys
import os
sys.path.append(os.path.dirname(__file__) + "/../../")
from state import state
from state.stateEnum import StateEnum
from helpers import configHelper
from helpers import processorHelper
class Learn:
current_word = 0
def __init__(self, level_number):
self.number = level_number
self.words = configHelper.get_level_config('learn', self.number)['words']
self.output = processorHelper.get_output_processor()
print("Nivel " + str(self.number) + " de aprendizaje")
self._print_word()
def process_input(self, input_value):
if input_value == "right":
self.current_word += 1
self._verify_overflow()
self._print_word()
if input_value == "left":
self.current_word -= 1
self._verify_overflow()
self._print_word()
if input_value == "back":
print("Regresando a " + StateEnum.MENU.realName)
state.set_state(StateEnum.MENU.key)
def _verify_overflow(self):
pass
def _print_word(self):
word = self.words[self.current_word]
print(word)
self.output.write(word.upper())<commit_msg>Verify overflow in words array into the learn module<commit_after>
|
import sys
import os
sys.path.append(os.path.dirname(__file__) + "/../../")
from state import state
from state.stateEnum import StateEnum
from helpers import configHelper
from helpers import processorHelper
class Learn:
current_word = 0
def __init__(self, level_number):
self.number = level_number
self.words = configHelper.get_level_config('learn', self.number)['words']
self.output = processorHelper.get_output_processor()
print("Nivel " + str(self.number) + " de aprendizaje")
self._print_word()
def process_input(self, input_value):
if input_value == "right":
self.current_word += 1
if not self._verify_overflow():
self._print_word()
else:
print("Se termino el nivel " + str(self.number) + " de aprendizaje. Saliendo al menu principal ...")
state.set_state(StateEnum.MENU.key)
if input_value == "left":
self.current_word -= 1
if not self._verify_overflow():
self._print_word()
else:
self.current_word = 0
if input_value == "back":
print("Regresando a " + StateEnum.MENU.realName)
state.set_state(StateEnum.MENU.key)
def _verify_overflow(self):
return self.current_word == -1 or self.current_word == len(self.words)
def _print_word(self):
word = self.words[self.current_word]
print(word)
self.output.write(word.upper())
|
import sys
import os
sys.path.append(os.path.dirname(__file__) + "/../../")
from state import state
from state.stateEnum import StateEnum
from helpers import configHelper
from helpers import processorHelper
class Learn:
current_word = 0
def __init__(self, level_number):
self.number = level_number
self.words = configHelper.get_level_config('learn', self.number)['words']
self.output = processorHelper.get_output_processor()
print("Nivel " + str(self.number) + " de aprendizaje")
self._print_word()
def process_input(self, input_value):
if input_value == "right":
self.current_word += 1
self._verify_overflow()
self._print_word()
if input_value == "left":
self.current_word -= 1
self._verify_overflow()
self._print_word()
if input_value == "back":
print("Regresando a " + StateEnum.MENU.realName)
state.set_state(StateEnum.MENU.key)
def _verify_overflow(self):
pass
def _print_word(self):
word = self.words[self.current_word]
print(word)
self.output.write(word.upper())Verify overflow in words array into the learn moduleimport sys
import os
sys.path.append(os.path.dirname(__file__) + "/../../")
from state import state
from state.stateEnum import StateEnum
from helpers import configHelper
from helpers import processorHelper
class Learn:
current_word = 0
def __init__(self, level_number):
self.number = level_number
self.words = configHelper.get_level_config('learn', self.number)['words']
self.output = processorHelper.get_output_processor()
print("Nivel " + str(self.number) + " de aprendizaje")
self._print_word()
def process_input(self, input_value):
if input_value == "right":
self.current_word += 1
if not self._verify_overflow():
self._print_word()
else:
print("Se termino el nivel " + str(self.number) + " de aprendizaje. Saliendo al menu principal ...")
state.set_state(StateEnum.MENU.key)
if input_value == "left":
self.current_word -= 1
if not self._verify_overflow():
self._print_word()
else:
self.current_word = 0
if input_value == "back":
print("Regresando a " + StateEnum.MENU.realName)
state.set_state(StateEnum.MENU.key)
def _verify_overflow(self):
return self.current_word == -1 or self.current_word == len(self.words)
def _print_word(self):
word = self.words[self.current_word]
print(word)
self.output.write(word.upper())
|
<commit_before>import sys
import os
sys.path.append(os.path.dirname(__file__) + "/../../")
from state import state
from state.stateEnum import StateEnum
from helpers import configHelper
from helpers import processorHelper
class Learn:
current_word = 0
def __init__(self, level_number):
self.number = level_number
self.words = configHelper.get_level_config('learn', self.number)['words']
self.output = processorHelper.get_output_processor()
print("Nivel " + str(self.number) + " de aprendizaje")
self._print_word()
def process_input(self, input_value):
if input_value == "right":
self.current_word += 1
self._verify_overflow()
self._print_word()
if input_value == "left":
self.current_word -= 1
self._verify_overflow()
self._print_word()
if input_value == "back":
print("Regresando a " + StateEnum.MENU.realName)
state.set_state(StateEnum.MENU.key)
def _verify_overflow(self):
pass
def _print_word(self):
word = self.words[self.current_word]
print(word)
self.output.write(word.upper())<commit_msg>Verify overflow in words array into the learn module<commit_after>import sys
import os
sys.path.append(os.path.dirname(__file__) + "/../../")
from state import state
from state.stateEnum import StateEnum
from helpers import configHelper
from helpers import processorHelper
class Learn:
current_word = 0
def __init__(self, level_number):
self.number = level_number
self.words = configHelper.get_level_config('learn', self.number)['words']
self.output = processorHelper.get_output_processor()
print("Nivel " + str(self.number) + " de aprendizaje")
self._print_word()
def process_input(self, input_value):
if input_value == "right":
self.current_word += 1
if not self._verify_overflow():
self._print_word()
else:
print("Se termino el nivel " + str(self.number) + " de aprendizaje. Saliendo al menu principal ...")
state.set_state(StateEnum.MENU.key)
if input_value == "left":
self.current_word -= 1
if not self._verify_overflow():
self._print_word()
else:
self.current_word = 0
if input_value == "back":
print("Regresando a " + StateEnum.MENU.realName)
state.set_state(StateEnum.MENU.key)
def _verify_overflow(self):
return self.current_word == -1 or self.current_word == len(self.words)
def _print_word(self):
word = self.words[self.current_word]
print(word)
self.output.write(word.upper())
|
a3dbd77875ab33e17ecc44efccc9c99dfbc27a7c
|
comics/comics/mortenm.py
|
comics/comics/mortenm.py
|
# encoding: utf-8
from comics.aggregator.crawler import BaseComicCrawler
from comics.meta.base import BaseComicMeta
class ComicMeta(BaseComicMeta):
name = 'Morten M (vg.no)'
language = 'no'
url = 'http://www.vg.no/spesial/mortenm/'
start_date = '1978-01-01'
history_capable_days = 120
schedule = 'Mo,Tu,We,Th,Fr,Sa,Su'
time_zone = 1
rights = 'Morten M. Kristiansen'
class ComicCrawler(BaseComicCrawler):
def crawl(self):
self.url = 'http://static.vg.no/gfx/mortenm/output/%(year)s/%(month)s/%(year)-%(month)s-%(day).jpg' % {
'year': self.pub_date.strftime("%Y"),
'month': self.pub_date.strftime("%m"),
'day': self.pub_date.strftime("%d"),
}
|
# encoding: utf-8
from comics.aggregator.crawler import BaseComicCrawler
from comics.meta.base import BaseComicMeta
class ComicMeta(BaseComicMeta):
name = 'Morten M (vg.no)'
language = 'no'
url = 'http://www.vg.no/spesial/mortenm/'
start_date = '1978-01-01'
history_capable_days = 120
schedule = 'Mo,Tu,We,Th,Fr,Sa,Su'
time_zone = 1
rights = 'Morten M. Kristiansen'
class ComicCrawler(BaseComicCrawler):
def crawl(self):
self.url = 'http://static.vg.no/gfx/mortenm/output/%(year)s/%(month)s/%(year)s-%(month)s-%(day)s.jpg' % {
'year': self.pub_date.strftime("%Y"),
'month': self.pub_date.strftime("%m"),
'day': self.pub_date.strftime("%d"),
}
|
Add missing chars in URL for 'Morten M' crawler
|
Add missing chars in URL for 'Morten M' crawler
|
Python
|
agpl-3.0
|
datagutten/comics,datagutten/comics,klette/comics,jodal/comics,klette/comics,jodal/comics,jodal/comics,klette/comics,jodal/comics,datagutten/comics,datagutten/comics
|
# encoding: utf-8
from comics.aggregator.crawler import BaseComicCrawler
from comics.meta.base import BaseComicMeta
class ComicMeta(BaseComicMeta):
name = 'Morten M (vg.no)'
language = 'no'
url = 'http://www.vg.no/spesial/mortenm/'
start_date = '1978-01-01'
history_capable_days = 120
schedule = 'Mo,Tu,We,Th,Fr,Sa,Su'
time_zone = 1
rights = 'Morten M. Kristiansen'
class ComicCrawler(BaseComicCrawler):
def crawl(self):
self.url = 'http://static.vg.no/gfx/mortenm/output/%(year)s/%(month)s/%(year)-%(month)s-%(day).jpg' % {
'year': self.pub_date.strftime("%Y"),
'month': self.pub_date.strftime("%m"),
'day': self.pub_date.strftime("%d"),
}
Add missing chars in URL for 'Morten M' crawler
|
# encoding: utf-8
from comics.aggregator.crawler import BaseComicCrawler
from comics.meta.base import BaseComicMeta
class ComicMeta(BaseComicMeta):
name = 'Morten M (vg.no)'
language = 'no'
url = 'http://www.vg.no/spesial/mortenm/'
start_date = '1978-01-01'
history_capable_days = 120
schedule = 'Mo,Tu,We,Th,Fr,Sa,Su'
time_zone = 1
rights = 'Morten M. Kristiansen'
class ComicCrawler(BaseComicCrawler):
def crawl(self):
self.url = 'http://static.vg.no/gfx/mortenm/output/%(year)s/%(month)s/%(year)s-%(month)s-%(day)s.jpg' % {
'year': self.pub_date.strftime("%Y"),
'month': self.pub_date.strftime("%m"),
'day': self.pub_date.strftime("%d"),
}
|
<commit_before># encoding: utf-8
from comics.aggregator.crawler import BaseComicCrawler
from comics.meta.base import BaseComicMeta
class ComicMeta(BaseComicMeta):
name = 'Morten M (vg.no)'
language = 'no'
url = 'http://www.vg.no/spesial/mortenm/'
start_date = '1978-01-01'
history_capable_days = 120
schedule = 'Mo,Tu,We,Th,Fr,Sa,Su'
time_zone = 1
rights = 'Morten M. Kristiansen'
class ComicCrawler(BaseComicCrawler):
def crawl(self):
self.url = 'http://static.vg.no/gfx/mortenm/output/%(year)s/%(month)s/%(year)-%(month)s-%(day).jpg' % {
'year': self.pub_date.strftime("%Y"),
'month': self.pub_date.strftime("%m"),
'day': self.pub_date.strftime("%d"),
}
<commit_msg>Add missing chars in URL for 'Morten M' crawler<commit_after>
|
# encoding: utf-8
from comics.aggregator.crawler import BaseComicCrawler
from comics.meta.base import BaseComicMeta
class ComicMeta(BaseComicMeta):
name = 'Morten M (vg.no)'
language = 'no'
url = 'http://www.vg.no/spesial/mortenm/'
start_date = '1978-01-01'
history_capable_days = 120
schedule = 'Mo,Tu,We,Th,Fr,Sa,Su'
time_zone = 1
rights = 'Morten M. Kristiansen'
class ComicCrawler(BaseComicCrawler):
def crawl(self):
self.url = 'http://static.vg.no/gfx/mortenm/output/%(year)s/%(month)s/%(year)s-%(month)s-%(day)s.jpg' % {
'year': self.pub_date.strftime("%Y"),
'month': self.pub_date.strftime("%m"),
'day': self.pub_date.strftime("%d"),
}
|
# encoding: utf-8
from comics.aggregator.crawler import BaseComicCrawler
from comics.meta.base import BaseComicMeta
class ComicMeta(BaseComicMeta):
name = 'Morten M (vg.no)'
language = 'no'
url = 'http://www.vg.no/spesial/mortenm/'
start_date = '1978-01-01'
history_capable_days = 120
schedule = 'Mo,Tu,We,Th,Fr,Sa,Su'
time_zone = 1
rights = 'Morten M. Kristiansen'
class ComicCrawler(BaseComicCrawler):
def crawl(self):
self.url = 'http://static.vg.no/gfx/mortenm/output/%(year)s/%(month)s/%(year)-%(month)s-%(day).jpg' % {
'year': self.pub_date.strftime("%Y"),
'month': self.pub_date.strftime("%m"),
'day': self.pub_date.strftime("%d"),
}
Add missing chars in URL for 'Morten M' crawler# encoding: utf-8
from comics.aggregator.crawler import BaseComicCrawler
from comics.meta.base import BaseComicMeta
class ComicMeta(BaseComicMeta):
name = 'Morten M (vg.no)'
language = 'no'
url = 'http://www.vg.no/spesial/mortenm/'
start_date = '1978-01-01'
history_capable_days = 120
schedule = 'Mo,Tu,We,Th,Fr,Sa,Su'
time_zone = 1
rights = 'Morten M. Kristiansen'
class ComicCrawler(BaseComicCrawler):
def crawl(self):
self.url = 'http://static.vg.no/gfx/mortenm/output/%(year)s/%(month)s/%(year)s-%(month)s-%(day)s.jpg' % {
'year': self.pub_date.strftime("%Y"),
'month': self.pub_date.strftime("%m"),
'day': self.pub_date.strftime("%d"),
}
|
<commit_before># encoding: utf-8
from comics.aggregator.crawler import BaseComicCrawler
from comics.meta.base import BaseComicMeta
class ComicMeta(BaseComicMeta):
name = 'Morten M (vg.no)'
language = 'no'
url = 'http://www.vg.no/spesial/mortenm/'
start_date = '1978-01-01'
history_capable_days = 120
schedule = 'Mo,Tu,We,Th,Fr,Sa,Su'
time_zone = 1
rights = 'Morten M. Kristiansen'
class ComicCrawler(BaseComicCrawler):
def crawl(self):
self.url = 'http://static.vg.no/gfx/mortenm/output/%(year)s/%(month)s/%(year)-%(month)s-%(day).jpg' % {
'year': self.pub_date.strftime("%Y"),
'month': self.pub_date.strftime("%m"),
'day': self.pub_date.strftime("%d"),
}
<commit_msg>Add missing chars in URL for 'Morten M' crawler<commit_after># encoding: utf-8
from comics.aggregator.crawler import BaseComicCrawler
from comics.meta.base import BaseComicMeta
class ComicMeta(BaseComicMeta):
name = 'Morten M (vg.no)'
language = 'no'
url = 'http://www.vg.no/spesial/mortenm/'
start_date = '1978-01-01'
history_capable_days = 120
schedule = 'Mo,Tu,We,Th,Fr,Sa,Su'
time_zone = 1
rights = 'Morten M. Kristiansen'
class ComicCrawler(BaseComicCrawler):
def crawl(self):
self.url = 'http://static.vg.no/gfx/mortenm/output/%(year)s/%(month)s/%(year)s-%(month)s-%(day)s.jpg' % {
'year': self.pub_date.strftime("%Y"),
'month': self.pub_date.strftime("%m"),
'day': self.pub_date.strftime("%d"),
}
|
8e6670a554694e540c02c9528fc6b22d9f0d6e15
|
django_cron/admin.py
|
django_cron/admin.py
|
from django.contrib import admin
from django_cron.models import CronJobLog
class CronJobLogAdmin(admin.ModelAdmin):
class Meta:
model = CronJobLog
search_fields = ('code', 'message')
ordering = ('-start_time',)
list_display = ('code', 'start_time', 'is_success')
admin.site.register(CronJobLog, CronJobLogAdmin)
|
from django.contrib import admin
from django_cron.models import CronJobLog
class CronJobLogAdmin(admin.ModelAdmin):
class Meta:
model = CronJobLog
search_fields = ('code', 'message')
ordering = ('-start_time',)
list_display = ('code', 'start_time', 'is_success')
def get_readonly_fields(self, request, obj=None):
if not request.user.is_superuser and obj is not None:
names = [f.name for f in CronJobLog._meta.fields if f.name != 'id']
return self.readonly_fields + tuple(names)
return self.readonly_fields
admin.site.register(CronJobLog, CronJobLogAdmin)
|
Make cron job logs readonly for non-superuser
|
Make cron job logs readonly for non-superuser
|
Python
|
mit
|
mozillazg/django-cron,philippeowagner/django-cronium,eriktelepovsky/django-cron,Tivix/django-cron
|
from django.contrib import admin
from django_cron.models import CronJobLog
class CronJobLogAdmin(admin.ModelAdmin):
class Meta:
model = CronJobLog
search_fields = ('code', 'message')
ordering = ('-start_time',)
list_display = ('code', 'start_time', 'is_success')
admin.site.register(CronJobLog, CronJobLogAdmin)
Make cron job logs readonly for non-superuser
|
from django.contrib import admin
from django_cron.models import CronJobLog
class CronJobLogAdmin(admin.ModelAdmin):
class Meta:
model = CronJobLog
search_fields = ('code', 'message')
ordering = ('-start_time',)
list_display = ('code', 'start_time', 'is_success')
def get_readonly_fields(self, request, obj=None):
if not request.user.is_superuser and obj is not None:
names = [f.name for f in CronJobLog._meta.fields if f.name != 'id']
return self.readonly_fields + tuple(names)
return self.readonly_fields
admin.site.register(CronJobLog, CronJobLogAdmin)
|
<commit_before>from django.contrib import admin
from django_cron.models import CronJobLog
class CronJobLogAdmin(admin.ModelAdmin):
class Meta:
model = CronJobLog
search_fields = ('code', 'message')
ordering = ('-start_time',)
list_display = ('code', 'start_time', 'is_success')
admin.site.register(CronJobLog, CronJobLogAdmin)
<commit_msg>Make cron job logs readonly for non-superuser<commit_after>
|
from django.contrib import admin
from django_cron.models import CronJobLog
class CronJobLogAdmin(admin.ModelAdmin):
class Meta:
model = CronJobLog
search_fields = ('code', 'message')
ordering = ('-start_time',)
list_display = ('code', 'start_time', 'is_success')
def get_readonly_fields(self, request, obj=None):
if not request.user.is_superuser and obj is not None:
names = [f.name for f in CronJobLog._meta.fields if f.name != 'id']
return self.readonly_fields + tuple(names)
return self.readonly_fields
admin.site.register(CronJobLog, CronJobLogAdmin)
|
from django.contrib import admin
from django_cron.models import CronJobLog
class CronJobLogAdmin(admin.ModelAdmin):
class Meta:
model = CronJobLog
search_fields = ('code', 'message')
ordering = ('-start_time',)
list_display = ('code', 'start_time', 'is_success')
admin.site.register(CronJobLog, CronJobLogAdmin)
Make cron job logs readonly for non-superuserfrom django.contrib import admin
from django_cron.models import CronJobLog
class CronJobLogAdmin(admin.ModelAdmin):
class Meta:
model = CronJobLog
search_fields = ('code', 'message')
ordering = ('-start_time',)
list_display = ('code', 'start_time', 'is_success')
def get_readonly_fields(self, request, obj=None):
if not request.user.is_superuser and obj is not None:
names = [f.name for f in CronJobLog._meta.fields if f.name != 'id']
return self.readonly_fields + tuple(names)
return self.readonly_fields
admin.site.register(CronJobLog, CronJobLogAdmin)
|
<commit_before>from django.contrib import admin
from django_cron.models import CronJobLog
class CronJobLogAdmin(admin.ModelAdmin):
class Meta:
model = CronJobLog
search_fields = ('code', 'message')
ordering = ('-start_time',)
list_display = ('code', 'start_time', 'is_success')
admin.site.register(CronJobLog, CronJobLogAdmin)
<commit_msg>Make cron job logs readonly for non-superuser<commit_after>from django.contrib import admin
from django_cron.models import CronJobLog
class CronJobLogAdmin(admin.ModelAdmin):
class Meta:
model = CronJobLog
search_fields = ('code', 'message')
ordering = ('-start_time',)
list_display = ('code', 'start_time', 'is_success')
def get_readonly_fields(self, request, obj=None):
if not request.user.is_superuser and obj is not None:
names = [f.name for f in CronJobLog._meta.fields if f.name != 'id']
return self.readonly_fields + tuple(names)
return self.readonly_fields
admin.site.register(CronJobLog, CronJobLogAdmin)
|
d57a1b223b46923bfe5211d4f189b65cfcbffcad
|
msoffcrypto/format/base.py
|
msoffcrypto/format/base.py
|
import abc
# For 2 and 3 compatibility
# https://stackoverflow.com/questions/35673474/
ABC = abc.ABCMeta('ABC', (object,), {'__slots__': ()})
class BaseOfficeFile(ABC):
def __init__(self):
pass
@abc.abstractmethod
def load_key(self):
pass
@abc.abstractmethod
def decrypt(self):
pass
|
import abc
# For 2 and 3 compatibility
# https://stackoverflow.com/questions/35673474/
ABC = abc.ABCMeta('ABC', (object,), {'__slots__': ()})
class BaseOfficeFile(ABC):
def __init__(self):
pass
@abc.abstractmethod
def load_key(self):
pass
@abc.abstractmethod
def decrypt(self):
pass
@abc.abstractmethod
def is_encrypted(self):
pass
|
Add is_encrypted() to abstract methods
|
Add is_encrypted() to abstract methods
|
Python
|
mit
|
nolze/ms-offcrypto-tool,nolze/ms-offcrypto-tool,nolze/msoffcrypto-tool,nolze/msoffcrypto-tool
|
import abc
# For 2 and 3 compatibility
# https://stackoverflow.com/questions/35673474/
ABC = abc.ABCMeta('ABC', (object,), {'__slots__': ()})
class BaseOfficeFile(ABC):
def __init__(self):
pass
@abc.abstractmethod
def load_key(self):
pass
@abc.abstractmethod
def decrypt(self):
pass
Add is_encrypted() to abstract methods
|
import abc
# For 2 and 3 compatibility
# https://stackoverflow.com/questions/35673474/
ABC = abc.ABCMeta('ABC', (object,), {'__slots__': ()})
class BaseOfficeFile(ABC):
def __init__(self):
pass
@abc.abstractmethod
def load_key(self):
pass
@abc.abstractmethod
def decrypt(self):
pass
@abc.abstractmethod
def is_encrypted(self):
pass
|
<commit_before>import abc
# For 2 and 3 compatibility
# https://stackoverflow.com/questions/35673474/
ABC = abc.ABCMeta('ABC', (object,), {'__slots__': ()})
class BaseOfficeFile(ABC):
def __init__(self):
pass
@abc.abstractmethod
def load_key(self):
pass
@abc.abstractmethod
def decrypt(self):
pass
<commit_msg>Add is_encrypted() to abstract methods<commit_after>
|
import abc
# For 2 and 3 compatibility
# https://stackoverflow.com/questions/35673474/
ABC = abc.ABCMeta('ABC', (object,), {'__slots__': ()})
class BaseOfficeFile(ABC):
def __init__(self):
pass
@abc.abstractmethod
def load_key(self):
pass
@abc.abstractmethod
def decrypt(self):
pass
@abc.abstractmethod
def is_encrypted(self):
pass
|
import abc
# For 2 and 3 compatibility
# https://stackoverflow.com/questions/35673474/
ABC = abc.ABCMeta('ABC', (object,), {'__slots__': ()})
class BaseOfficeFile(ABC):
def __init__(self):
pass
@abc.abstractmethod
def load_key(self):
pass
@abc.abstractmethod
def decrypt(self):
pass
Add is_encrypted() to abstract methodsimport abc
# For 2 and 3 compatibility
# https://stackoverflow.com/questions/35673474/
ABC = abc.ABCMeta('ABC', (object,), {'__slots__': ()})
class BaseOfficeFile(ABC):
def __init__(self):
pass
@abc.abstractmethod
def load_key(self):
pass
@abc.abstractmethod
def decrypt(self):
pass
@abc.abstractmethod
def is_encrypted(self):
pass
|
<commit_before>import abc
# For 2 and 3 compatibility
# https://stackoverflow.com/questions/35673474/
ABC = abc.ABCMeta('ABC', (object,), {'__slots__': ()})
class BaseOfficeFile(ABC):
def __init__(self):
pass
@abc.abstractmethod
def load_key(self):
pass
@abc.abstractmethod
def decrypt(self):
pass
<commit_msg>Add is_encrypted() to abstract methods<commit_after>import abc
# For 2 and 3 compatibility
# https://stackoverflow.com/questions/35673474/
ABC = abc.ABCMeta('ABC', (object,), {'__slots__': ()})
class BaseOfficeFile(ABC):
def __init__(self):
pass
@abc.abstractmethod
def load_key(self):
pass
@abc.abstractmethod
def decrypt(self):
pass
@abc.abstractmethod
def is_encrypted(self):
pass
|
f7ff5e6278acaecff7583518cc97bd945fceddc3
|
netmiko/aruba/aruba_ssh.py
|
netmiko/aruba/aruba_ssh.py
|
"""Aruba OS support"""
from netmiko.cisco_base_connection import CiscoSSHConnection
class ArubaSSH(CiscoSSHConnection):
"""Aruba OS support"""
def session_preparation(self):
"""Aruba OS requires enable mode to disable paging."""
self.set_base_prompt()
self.enable()
self.disable_paging(command="no paging")
def check_config_mode(self, check_string='(config) #', pattern=''):
"""
Checks if the device is in configuration mode or not.
Aruba uses "(<controller name>) (config) #" as config prompt
"""
if not pattern:
pattern = self.base_prompt[:16]
return super(ArubaSSH, self).check_config_mode(check_string=check_string,
pattern=pattern)
|
"""Aruba OS support"""
from netmiko.cisco_base_connection import CiscoSSHConnection
class ArubaSSH(CiscoSSHConnection):
"""Aruba OS support"""
def session_preparation(self):
"""Aruba OS requires enable mode to disable paging."""
delay_factor = self.select_delay_factor(delay_factor=0)
time.sleep(1 * delay_factor)
self.set_base_prompt()
self.enable()
self.disable_paging(command="no paging")
def check_config_mode(self, check_string='(config) #', pattern=''):
"""
Checks if the device is in configuration mode or not.
Aruba uses "(<controller name>) (config) #" as config prompt
"""
if not pattern:
pattern = self.base_prompt[:16]
return super(ArubaSSH, self).check_config_mode(check_string=check_string,
pattern=pattern)
|
Increase aruba delay post login
|
Increase aruba delay post login
|
Python
|
mit
|
fooelisa/netmiko,ktbyers/netmiko,fooelisa/netmiko,isidroamv/netmiko,shamanu4/netmiko,isidroamv/netmiko,ktbyers/netmiko,shamanu4/netmiko
|
"""Aruba OS support"""
from netmiko.cisco_base_connection import CiscoSSHConnection
class ArubaSSH(CiscoSSHConnection):
"""Aruba OS support"""
def session_preparation(self):
"""Aruba OS requires enable mode to disable paging."""
self.set_base_prompt()
self.enable()
self.disable_paging(command="no paging")
def check_config_mode(self, check_string='(config) #', pattern=''):
"""
Checks if the device is in configuration mode or not.
Aruba uses "(<controller name>) (config) #" as config prompt
"""
if not pattern:
pattern = self.base_prompt[:16]
return super(ArubaSSH, self).check_config_mode(check_string=check_string,
pattern=pattern)
Increase aruba delay post login
|
"""Aruba OS support"""
from netmiko.cisco_base_connection import CiscoSSHConnection
class ArubaSSH(CiscoSSHConnection):
"""Aruba OS support"""
def session_preparation(self):
"""Aruba OS requires enable mode to disable paging."""
delay_factor = self.select_delay_factor(delay_factor=0)
time.sleep(1 * delay_factor)
self.set_base_prompt()
self.enable()
self.disable_paging(command="no paging")
def check_config_mode(self, check_string='(config) #', pattern=''):
"""
Checks if the device is in configuration mode or not.
Aruba uses "(<controller name>) (config) #" as config prompt
"""
if not pattern:
pattern = self.base_prompt[:16]
return super(ArubaSSH, self).check_config_mode(check_string=check_string,
pattern=pattern)
|
<commit_before>"""Aruba OS support"""
from netmiko.cisco_base_connection import CiscoSSHConnection
class ArubaSSH(CiscoSSHConnection):
"""Aruba OS support"""
def session_preparation(self):
"""Aruba OS requires enable mode to disable paging."""
self.set_base_prompt()
self.enable()
self.disable_paging(command="no paging")
def check_config_mode(self, check_string='(config) #', pattern=''):
"""
Checks if the device is in configuration mode or not.
Aruba uses "(<controller name>) (config) #" as config prompt
"""
if not pattern:
pattern = self.base_prompt[:16]
return super(ArubaSSH, self).check_config_mode(check_string=check_string,
pattern=pattern)
<commit_msg>Increase aruba delay post login<commit_after>
|
"""Aruba OS support"""
from netmiko.cisco_base_connection import CiscoSSHConnection
class ArubaSSH(CiscoSSHConnection):
"""Aruba OS support"""
def session_preparation(self):
"""Aruba OS requires enable mode to disable paging."""
delay_factor = self.select_delay_factor(delay_factor=0)
time.sleep(1 * delay_factor)
self.set_base_prompt()
self.enable()
self.disable_paging(command="no paging")
def check_config_mode(self, check_string='(config) #', pattern=''):
"""
Checks if the device is in configuration mode or not.
Aruba uses "(<controller name>) (config) #" as config prompt
"""
if not pattern:
pattern = self.base_prompt[:16]
return super(ArubaSSH, self).check_config_mode(check_string=check_string,
pattern=pattern)
|
"""Aruba OS support"""
from netmiko.cisco_base_connection import CiscoSSHConnection
class ArubaSSH(CiscoSSHConnection):
"""Aruba OS support"""
def session_preparation(self):
"""Aruba OS requires enable mode to disable paging."""
self.set_base_prompt()
self.enable()
self.disable_paging(command="no paging")
def check_config_mode(self, check_string='(config) #', pattern=''):
"""
Checks if the device is in configuration mode or not.
Aruba uses "(<controller name>) (config) #" as config prompt
"""
if not pattern:
pattern = self.base_prompt[:16]
return super(ArubaSSH, self).check_config_mode(check_string=check_string,
pattern=pattern)
Increase aruba delay post login"""Aruba OS support"""
from netmiko.cisco_base_connection import CiscoSSHConnection
class ArubaSSH(CiscoSSHConnection):
"""Aruba OS support"""
def session_preparation(self):
"""Aruba OS requires enable mode to disable paging."""
delay_factor = self.select_delay_factor(delay_factor=0)
time.sleep(1 * delay_factor)
self.set_base_prompt()
self.enable()
self.disable_paging(command="no paging")
def check_config_mode(self, check_string='(config) #', pattern=''):
"""
Checks if the device is in configuration mode or not.
Aruba uses "(<controller name>) (config) #" as config prompt
"""
if not pattern:
pattern = self.base_prompt[:16]
return super(ArubaSSH, self).check_config_mode(check_string=check_string,
pattern=pattern)
|
<commit_before>"""Aruba OS support"""
from netmiko.cisco_base_connection import CiscoSSHConnection
class ArubaSSH(CiscoSSHConnection):
"""Aruba OS support"""
def session_preparation(self):
"""Aruba OS requires enable mode to disable paging."""
self.set_base_prompt()
self.enable()
self.disable_paging(command="no paging")
def check_config_mode(self, check_string='(config) #', pattern=''):
"""
Checks if the device is in configuration mode or not.
Aruba uses "(<controller name>) (config) #" as config prompt
"""
if not pattern:
pattern = self.base_prompt[:16]
return super(ArubaSSH, self).check_config_mode(check_string=check_string,
pattern=pattern)
<commit_msg>Increase aruba delay post login<commit_after>"""Aruba OS support"""
from netmiko.cisco_base_connection import CiscoSSHConnection
class ArubaSSH(CiscoSSHConnection):
"""Aruba OS support"""
def session_preparation(self):
"""Aruba OS requires enable mode to disable paging."""
delay_factor = self.select_delay_factor(delay_factor=0)
time.sleep(1 * delay_factor)
self.set_base_prompt()
self.enable()
self.disable_paging(command="no paging")
def check_config_mode(self, check_string='(config) #', pattern=''):
"""
Checks if the device is in configuration mode or not.
Aruba uses "(<controller name>) (config) #" as config prompt
"""
if not pattern:
pattern = self.base_prompt[:16]
return super(ArubaSSH, self).check_config_mode(check_string=check_string,
pattern=pattern)
|
1dc376e811db2572581b6895536abb8cf0853076
|
drcli/plugins/apps/debug.py
|
drcli/plugins/apps/debug.py
|
import msgpack
import pprint
from drcli.api import App
from drcli.appargs import ISTREAM_AP, OSTREAM_AP
class DumpApp(App):
"""
Debug: unpack the stream and pretty-print it.
"""
arg_parsers = (ISTREAM_AP, OSTREAM_AP)
def dump(self, obj):
pprint.pprint(obj, self.args.out_stream)
def __call__(self):
unpacker = msgpack.Unpacker(self.args.in_stream)
for obj in unpacker:
self.dump(obj)
DumpApp.register_name('dump')
|
import msgpack
import pprint
import json
from schwa import dr
from schwa.dr.constants import FIELD_TYPE_NAME
from drcli.api import App
from drcli.appargs import ArgumentParser, ISTREAM_AP, OSTREAM_AP, DESERIALISE_AP
META_TYPE = 0
class DumpApp(App):
"""
Debug: unpack the stream and pretty-print it.
"""
dump_ap = ArgumentParser()
dump_ap.add_argument('-m', '--human', dest='human_readable', action='store_true', default=False, help='Reinterpret the messages to be more human-readable by integrating headers into content.')
arg_parsers = (dump_ap, ISTREAM_AP, OSTREAM_AP)
def dump(self, obj):
pprint.pprint(obj, self.args.out_stream)
def __call__(self):
unpacker = msgpack.Unpacker(self.args.in_stream)
if self.args.human_readable:
unpacker = self._integrate_names(unpacker)
for obj in unpacker:
self.dump(obj)
def _integrate_names(self, unpacker):
while True:
obj = {}
types = unpacker.unpack()
if types is None:
# No new header
break
store_defs = self._process_store_defs(unpacker.unpack(), types)
nbytes = unpacker.unpack()
obj['__meta__'] = self._process_annot(unpacker.unpack(), types[META_TYPE][1])
for store_name, store in store_defs:
nbytes = unpacker.unpack()
store['items'] = [self._process_annot(item, store['fields']) for item in unpacker.unpack()]
# store.pop('fields')
obj[store_name] = store
yield obj
def _process_store_defs(self, msg, types):
for name, typ, size in msg:
try:
type_name, type_fields = types[typ]
except IndexError:
# for robustness to broken data
type_name, type_fields = '??MissingType={0}'.format(typ), ()
yield name, {'type': type_name, 'fields': type_fields, 'count': size}
def _process_annot(self, msg, fields):
return dict((fields[fnum][FIELD_TYPE_NAME], val) for fnum, val in msg.iteritems())
DumpApp.register_name('dump')
|
Extend dr dump to interpret headers for human-readability
|
Extend dr dump to interpret headers for human-readability
|
Python
|
mit
|
schwa-lab/dr-apps-python
|
import msgpack
import pprint
from drcli.api import App
from drcli.appargs import ISTREAM_AP, OSTREAM_AP
class DumpApp(App):
"""
Debug: unpack the stream and pretty-print it.
"""
arg_parsers = (ISTREAM_AP, OSTREAM_AP)
def dump(self, obj):
pprint.pprint(obj, self.args.out_stream)
def __call__(self):
unpacker = msgpack.Unpacker(self.args.in_stream)
for obj in unpacker:
self.dump(obj)
DumpApp.register_name('dump')
Extend dr dump to interpret headers for human-readability
|
import msgpack
import pprint
import json
from schwa import dr
from schwa.dr.constants import FIELD_TYPE_NAME
from drcli.api import App
from drcli.appargs import ArgumentParser, ISTREAM_AP, OSTREAM_AP, DESERIALISE_AP
META_TYPE = 0
class DumpApp(App):
"""
Debug: unpack the stream and pretty-print it.
"""
dump_ap = ArgumentParser()
dump_ap.add_argument('-m', '--human', dest='human_readable', action='store_true', default=False, help='Reinterpret the messages to be more human-readable by integrating headers into content.')
arg_parsers = (dump_ap, ISTREAM_AP, OSTREAM_AP)
def dump(self, obj):
pprint.pprint(obj, self.args.out_stream)
def __call__(self):
unpacker = msgpack.Unpacker(self.args.in_stream)
if self.args.human_readable:
unpacker = self._integrate_names(unpacker)
for obj in unpacker:
self.dump(obj)
def _integrate_names(self, unpacker):
while True:
obj = {}
types = unpacker.unpack()
if types is None:
# No new header
break
store_defs = self._process_store_defs(unpacker.unpack(), types)
nbytes = unpacker.unpack()
obj['__meta__'] = self._process_annot(unpacker.unpack(), types[META_TYPE][1])
for store_name, store in store_defs:
nbytes = unpacker.unpack()
store['items'] = [self._process_annot(item, store['fields']) for item in unpacker.unpack()]
# store.pop('fields')
obj[store_name] = store
yield obj
def _process_store_defs(self, msg, types):
for name, typ, size in msg:
try:
type_name, type_fields = types[typ]
except IndexError:
# for robustness to broken data
type_name, type_fields = '??MissingType={0}'.format(typ), ()
yield name, {'type': type_name, 'fields': type_fields, 'count': size}
def _process_annot(self, msg, fields):
return dict((fields[fnum][FIELD_TYPE_NAME], val) for fnum, val in msg.iteritems())
DumpApp.register_name('dump')
|
<commit_before>
import msgpack
import pprint
from drcli.api import App
from drcli.appargs import ISTREAM_AP, OSTREAM_AP
class DumpApp(App):
"""
Debug: unpack the stream and pretty-print it.
"""
arg_parsers = (ISTREAM_AP, OSTREAM_AP)
def dump(self, obj):
pprint.pprint(obj, self.args.out_stream)
def __call__(self):
unpacker = msgpack.Unpacker(self.args.in_stream)
for obj in unpacker:
self.dump(obj)
DumpApp.register_name('dump')
<commit_msg>Extend dr dump to interpret headers for human-readability<commit_after>
|
import msgpack
import pprint
import json
from schwa import dr
from schwa.dr.constants import FIELD_TYPE_NAME
from drcli.api import App
from drcli.appargs import ArgumentParser, ISTREAM_AP, OSTREAM_AP, DESERIALISE_AP
META_TYPE = 0
class DumpApp(App):
"""
Debug: unpack the stream and pretty-print it.
"""
dump_ap = ArgumentParser()
dump_ap.add_argument('-m', '--human', dest='human_readable', action='store_true', default=False, help='Reinterpret the messages to be more human-readable by integrating headers into content.')
arg_parsers = (dump_ap, ISTREAM_AP, OSTREAM_AP)
def dump(self, obj):
pprint.pprint(obj, self.args.out_stream)
def __call__(self):
unpacker = msgpack.Unpacker(self.args.in_stream)
if self.args.human_readable:
unpacker = self._integrate_names(unpacker)
for obj in unpacker:
self.dump(obj)
def _integrate_names(self, unpacker):
while True:
obj = {}
types = unpacker.unpack()
if types is None:
# No new header
break
store_defs = self._process_store_defs(unpacker.unpack(), types)
nbytes = unpacker.unpack()
obj['__meta__'] = self._process_annot(unpacker.unpack(), types[META_TYPE][1])
for store_name, store in store_defs:
nbytes = unpacker.unpack()
store['items'] = [self._process_annot(item, store['fields']) for item in unpacker.unpack()]
# store.pop('fields')
obj[store_name] = store
yield obj
def _process_store_defs(self, msg, types):
for name, typ, size in msg:
try:
type_name, type_fields = types[typ]
except IndexError:
# for robustness to broken data
type_name, type_fields = '??MissingType={0}'.format(typ), ()
yield name, {'type': type_name, 'fields': type_fields, 'count': size}
def _process_annot(self, msg, fields):
return dict((fields[fnum][FIELD_TYPE_NAME], val) for fnum, val in msg.iteritems())
DumpApp.register_name('dump')
|
import msgpack
import pprint
from drcli.api import App
from drcli.appargs import ISTREAM_AP, OSTREAM_AP
class DumpApp(App):
"""
Debug: unpack the stream and pretty-print it.
"""
arg_parsers = (ISTREAM_AP, OSTREAM_AP)
def dump(self, obj):
pprint.pprint(obj, self.args.out_stream)
def __call__(self):
unpacker = msgpack.Unpacker(self.args.in_stream)
for obj in unpacker:
self.dump(obj)
DumpApp.register_name('dump')
Extend dr dump to interpret headers for human-readability
import msgpack
import pprint
import json
from schwa import dr
from schwa.dr.constants import FIELD_TYPE_NAME
from drcli.api import App
from drcli.appargs import ArgumentParser, ISTREAM_AP, OSTREAM_AP, DESERIALISE_AP
META_TYPE = 0
class DumpApp(App):
"""
Debug: unpack the stream and pretty-print it.
"""
dump_ap = ArgumentParser()
dump_ap.add_argument('-m', '--human', dest='human_readable', action='store_true', default=False, help='Reinterpret the messages to be more human-readable by integrating headers into content.')
arg_parsers = (dump_ap, ISTREAM_AP, OSTREAM_AP)
def dump(self, obj):
pprint.pprint(obj, self.args.out_stream)
def __call__(self):
unpacker = msgpack.Unpacker(self.args.in_stream)
if self.args.human_readable:
unpacker = self._integrate_names(unpacker)
for obj in unpacker:
self.dump(obj)
def _integrate_names(self, unpacker):
while True:
obj = {}
types = unpacker.unpack()
if types is None:
# No new header
break
store_defs = self._process_store_defs(unpacker.unpack(), types)
nbytes = unpacker.unpack()
obj['__meta__'] = self._process_annot(unpacker.unpack(), types[META_TYPE][1])
for store_name, store in store_defs:
nbytes = unpacker.unpack()
store['items'] = [self._process_annot(item, store['fields']) for item in unpacker.unpack()]
# store.pop('fields')
obj[store_name] = store
yield obj
def _process_store_defs(self, msg, types):
for name, typ, size in msg:
try:
type_name, type_fields = types[typ]
except IndexError:
# for robustness to broken data
type_name, type_fields = '??MissingType={0}'.format(typ), ()
yield name, {'type': type_name, 'fields': type_fields, 'count': size}
def _process_annot(self, msg, fields):
return dict((fields[fnum][FIELD_TYPE_NAME], val) for fnum, val in msg.iteritems())
DumpApp.register_name('dump')
|
<commit_before>
import msgpack
import pprint
from drcli.api import App
from drcli.appargs import ISTREAM_AP, OSTREAM_AP
class DumpApp(App):
"""
Debug: unpack the stream and pretty-print it.
"""
arg_parsers = (ISTREAM_AP, OSTREAM_AP)
def dump(self, obj):
pprint.pprint(obj, self.args.out_stream)
def __call__(self):
unpacker = msgpack.Unpacker(self.args.in_stream)
for obj in unpacker:
self.dump(obj)
DumpApp.register_name('dump')
<commit_msg>Extend dr dump to interpret headers for human-readability<commit_after>
import msgpack
import pprint
import json
from schwa import dr
from schwa.dr.constants import FIELD_TYPE_NAME
from drcli.api import App
from drcli.appargs import ArgumentParser, ISTREAM_AP, OSTREAM_AP, DESERIALISE_AP
META_TYPE = 0
class DumpApp(App):
"""
Debug: unpack the stream and pretty-print it.
"""
dump_ap = ArgumentParser()
dump_ap.add_argument('-m', '--human', dest='human_readable', action='store_true', default=False, help='Reinterpret the messages to be more human-readable by integrating headers into content.')
arg_parsers = (dump_ap, ISTREAM_AP, OSTREAM_AP)
def dump(self, obj):
pprint.pprint(obj, self.args.out_stream)
def __call__(self):
unpacker = msgpack.Unpacker(self.args.in_stream)
if self.args.human_readable:
unpacker = self._integrate_names(unpacker)
for obj in unpacker:
self.dump(obj)
def _integrate_names(self, unpacker):
while True:
obj = {}
types = unpacker.unpack()
if types is None:
# No new header
break
store_defs = self._process_store_defs(unpacker.unpack(), types)
nbytes = unpacker.unpack()
obj['__meta__'] = self._process_annot(unpacker.unpack(), types[META_TYPE][1])
for store_name, store in store_defs:
nbytes = unpacker.unpack()
store['items'] = [self._process_annot(item, store['fields']) for item in unpacker.unpack()]
# store.pop('fields')
obj[store_name] = store
yield obj
def _process_store_defs(self, msg, types):
for name, typ, size in msg:
try:
type_name, type_fields = types[typ]
except IndexError:
# for robustness to broken data
type_name, type_fields = '??MissingType={0}'.format(typ), ()
yield name, {'type': type_name, 'fields': type_fields, 'count': size}
def _process_annot(self, msg, fields):
return dict((fields[fnum][FIELD_TYPE_NAME], val) for fnum, val in msg.iteritems())
DumpApp.register_name('dump')
|
1ed41f3673ccef3955ac8d7feae23563f7454530
|
examples/dirwatch.py
|
examples/dirwatch.py
|
#!/usr/bin/env python
"""Directory Watch Example
This example demonstrates the inotify I/O Component ``Notify`` which can
be used for real-time monitoring of file system events. The example simply
takes a path to watch as the first Command Line Argument and prints to
stdour every file system event it sees.
"""
import sys
from circuits import Debugger
from circuits.io import Notify
# Configure the system
app = (Notify() + Debugger()) # app gets assigned the Notify instance
# Add the path to watch
app.add_path(sys.argv[1])
# Run the system
app.run()
|
#!/usr/bin/env python
"""Directory Watch Example
This example demonstrates the inotify I/O Component ``Notify`` which can
be used for real-time monitoring of file system events. The example simply
takes a path to watch as the first Command Line Argument and prints to
stdour every file system event it sees.
"""
import sys
from circuits import Debugger
from circuits.io import Notify
# Configure the system
app = Notify()
Debugger().register(app)
# Add the path to watch
app.add_path(sys.argv[1])
# Run the system
app.run()
|
Use explicit registration in examples
|
Use explicit registration in examples
|
Python
|
mit
|
treemo/circuits,treemo/circuits,nizox/circuits,eriol/circuits,eriol/circuits,treemo/circuits,eriol/circuits
|
#!/usr/bin/env python
"""Directory Watch Example
This example demonstrates the inotify I/O Component ``Notify`` which can
be used for real-time monitoring of file system events. The example simply
takes a path to watch as the first Command Line Argument and prints to
stdour every file system event it sees.
"""
import sys
from circuits import Debugger
from circuits.io import Notify
# Configure the system
app = (Notify() + Debugger()) # app gets assigned the Notify instance
# Add the path to watch
app.add_path(sys.argv[1])
# Run the system
app.run()
Use explicit registration in examples
|
#!/usr/bin/env python
"""Directory Watch Example
This example demonstrates the inotify I/O Component ``Notify`` which can
be used for real-time monitoring of file system events. The example simply
takes a path to watch as the first Command Line Argument and prints to
stdour every file system event it sees.
"""
import sys
from circuits import Debugger
from circuits.io import Notify
# Configure the system
app = Notify()
Debugger().register(app)
# Add the path to watch
app.add_path(sys.argv[1])
# Run the system
app.run()
|
<commit_before>#!/usr/bin/env python
"""Directory Watch Example
This example demonstrates the inotify I/O Component ``Notify`` which can
be used for real-time monitoring of file system events. The example simply
takes a path to watch as the first Command Line Argument and prints to
stdour every file system event it sees.
"""
import sys
from circuits import Debugger
from circuits.io import Notify
# Configure the system
app = (Notify() + Debugger()) # app gets assigned the Notify instance
# Add the path to watch
app.add_path(sys.argv[1])
# Run the system
app.run()
<commit_msg>Use explicit registration in examples<commit_after>
|
#!/usr/bin/env python
"""Directory Watch Example
This example demonstrates the inotify I/O Component ``Notify`` which can
be used for real-time monitoring of file system events. The example simply
takes a path to watch as the first Command Line Argument and prints to
stdour every file system event it sees.
"""
import sys
from circuits import Debugger
from circuits.io import Notify
# Configure the system
app = Notify()
Debugger().register(app)
# Add the path to watch
app.add_path(sys.argv[1])
# Run the system
app.run()
|
#!/usr/bin/env python
"""Directory Watch Example
This example demonstrates the inotify I/O Component ``Notify`` which can
be used for real-time monitoring of file system events. The example simply
takes a path to watch as the first Command Line Argument and prints to
stdour every file system event it sees.
"""
import sys
from circuits import Debugger
from circuits.io import Notify
# Configure the system
app = (Notify() + Debugger()) # app gets assigned the Notify instance
# Add the path to watch
app.add_path(sys.argv[1])
# Run the system
app.run()
Use explicit registration in examples#!/usr/bin/env python
"""Directory Watch Example
This example demonstrates the inotify I/O Component ``Notify`` which can
be used for real-time monitoring of file system events. The example simply
takes a path to watch as the first Command Line Argument and prints to
stdour every file system event it sees.
"""
import sys
from circuits import Debugger
from circuits.io import Notify
# Configure the system
app = Notify()
Debugger().register(app)
# Add the path to watch
app.add_path(sys.argv[1])
# Run the system
app.run()
|
<commit_before>#!/usr/bin/env python
"""Directory Watch Example
This example demonstrates the inotify I/O Component ``Notify`` which can
be used for real-time monitoring of file system events. The example simply
takes a path to watch as the first Command Line Argument and prints to
stdour every file system event it sees.
"""
import sys
from circuits import Debugger
from circuits.io import Notify
# Configure the system
app = (Notify() + Debugger()) # app gets assigned the Notify instance
# Add the path to watch
app.add_path(sys.argv[1])
# Run the system
app.run()
<commit_msg>Use explicit registration in examples<commit_after>#!/usr/bin/env python
"""Directory Watch Example
This example demonstrates the inotify I/O Component ``Notify`` which can
be used for real-time monitoring of file system events. The example simply
takes a path to watch as the first Command Line Argument and prints to
stdour every file system event it sees.
"""
import sys
from circuits import Debugger
from circuits.io import Notify
# Configure the system
app = Notify()
Debugger().register(app)
# Add the path to watch
app.add_path(sys.argv[1])
# Run the system
app.run()
|
839cb6f1d1a04f420d818406652eb9ce51d290dd
|
epitran/bin/migraterules.py
|
epitran/bin/migraterules.py
|
#!/usr/bin/env Python
# -*- coding: utf-8 -*-
from __future__ import (print_function, unicode_literals, absolute_import)
import glob
import re
import io
import unicodecsv
def build_rule(fields):
try:
a, b, X, Y = fields
b = 0 if not b else b
a = 0 if not a else a
return '{} -> {} / {} _ {}'.format(a, b, X, Y)
except ValueError:
print('Malformed rule: {}'.format(','.join(fields)))
def main():
for csv in glob.glob('*.csv'):
txt = re.match('[A-Za-z-]+', csv).group(0) + '.txt'
with open(csv, 'rb') as f, io.open(txt, 'w', encoding='utf-8') as g:
reader = unicodecsv.reader(f, encoding='utf-8')
next(reader)
for fields in reader:
if re.match('\s*%', fields[0]):
print(','.join([x for x in fields if x]), file=g)
else:
rule = build_rule(fields)
rule = re.sub('[ ]+', ' ', rule)
rule = re.sub('[ ]$', '', rule)
print(rule, file=g)
if __name__ == '__main__':
main()
|
#!/usr/bin/env Python
# -*- coding: utf-8 -*-
from __future__ import (print_function, unicode_literals, absolute_import)
import glob
import re
import io
import unicodecsv
def build_rule(fields):
try:
a, b, X, Y = fields
b = "0" if not b else b
a = "0" if not a else a
return '{} -> {} / {} _ {}'.format(a, b, X, Y)
except ValueError:
print('Malformed rule: {}'.format(','.join(fields)))
def main():
for csv in glob.glob('*.csv'):
txt = re.match('[A-Za-z-]+', csv).group(0) + '.txt'
with open(csv, 'rb') as f, io.open(txt, 'w', encoding='utf-8') as g:
reader = unicodecsv.reader(f, encoding='utf-8')
next(reader)
for fields in reader:
if re.match('\s*%', fields[0]):
print(','.join([x for x in fields if x]), file=g)
else:
rule = build_rule(fields)
rule = re.sub('[ ]+', ' ', rule)
rule = re.sub('[ ]$', '', rule)
print(rule, file=g)
if __name__ == '__main__':
main()
|
Use strings instead of numerals for "0" in rules
|
Use strings instead of numerals for "0" in rules
|
Python
|
mit
|
dmort27/epitran,dmort27/epitran
|
#!/usr/bin/env Python
# -*- coding: utf-8 -*-
from __future__ import (print_function, unicode_literals, absolute_import)
import glob
import re
import io
import unicodecsv
def build_rule(fields):
try:
a, b, X, Y = fields
b = 0 if not b else b
a = 0 if not a else a
return '{} -> {} / {} _ {}'.format(a, b, X, Y)
except ValueError:
print('Malformed rule: {}'.format(','.join(fields)))
def main():
for csv in glob.glob('*.csv'):
txt = re.match('[A-Za-z-]+', csv).group(0) + '.txt'
with open(csv, 'rb') as f, io.open(txt, 'w', encoding='utf-8') as g:
reader = unicodecsv.reader(f, encoding='utf-8')
next(reader)
for fields in reader:
if re.match('\s*%', fields[0]):
print(','.join([x for x in fields if x]), file=g)
else:
rule = build_rule(fields)
rule = re.sub('[ ]+', ' ', rule)
rule = re.sub('[ ]$', '', rule)
print(rule, file=g)
if __name__ == '__main__':
main()
Use strings instead of numerals for "0" in rules
|
#!/usr/bin/env Python
# -*- coding: utf-8 -*-
from __future__ import (print_function, unicode_literals, absolute_import)
import glob
import re
import io
import unicodecsv
def build_rule(fields):
try:
a, b, X, Y = fields
b = "0" if not b else b
a = "0" if not a else a
return '{} -> {} / {} _ {}'.format(a, b, X, Y)
except ValueError:
print('Malformed rule: {}'.format(','.join(fields)))
def main():
for csv in glob.glob('*.csv'):
txt = re.match('[A-Za-z-]+', csv).group(0) + '.txt'
with open(csv, 'rb') as f, io.open(txt, 'w', encoding='utf-8') as g:
reader = unicodecsv.reader(f, encoding='utf-8')
next(reader)
for fields in reader:
if re.match('\s*%', fields[0]):
print(','.join([x for x in fields if x]), file=g)
else:
rule = build_rule(fields)
rule = re.sub('[ ]+', ' ', rule)
rule = re.sub('[ ]$', '', rule)
print(rule, file=g)
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env Python
# -*- coding: utf-8 -*-
from __future__ import (print_function, unicode_literals, absolute_import)
import glob
import re
import io
import unicodecsv
def build_rule(fields):
try:
a, b, X, Y = fields
b = 0 if not b else b
a = 0 if not a else a
return '{} -> {} / {} _ {}'.format(a, b, X, Y)
except ValueError:
print('Malformed rule: {}'.format(','.join(fields)))
def main():
for csv in glob.glob('*.csv'):
txt = re.match('[A-Za-z-]+', csv).group(0) + '.txt'
with open(csv, 'rb') as f, io.open(txt, 'w', encoding='utf-8') as g:
reader = unicodecsv.reader(f, encoding='utf-8')
next(reader)
for fields in reader:
if re.match('\s*%', fields[0]):
print(','.join([x for x in fields if x]), file=g)
else:
rule = build_rule(fields)
rule = re.sub('[ ]+', ' ', rule)
rule = re.sub('[ ]$', '', rule)
print(rule, file=g)
if __name__ == '__main__':
main()
<commit_msg>Use strings instead of numerals for "0" in rules<commit_after>
|
#!/usr/bin/env Python
# -*- coding: utf-8 -*-
from __future__ import (print_function, unicode_literals, absolute_import)
import glob
import re
import io
import unicodecsv
def build_rule(fields):
try:
a, b, X, Y = fields
b = "0" if not b else b
a = "0" if not a else a
return '{} -> {} / {} _ {}'.format(a, b, X, Y)
except ValueError:
print('Malformed rule: {}'.format(','.join(fields)))
def main():
for csv in glob.glob('*.csv'):
txt = re.match('[A-Za-z-]+', csv).group(0) + '.txt'
with open(csv, 'rb') as f, io.open(txt, 'w', encoding='utf-8') as g:
reader = unicodecsv.reader(f, encoding='utf-8')
next(reader)
for fields in reader:
if re.match('\s*%', fields[0]):
print(','.join([x for x in fields if x]), file=g)
else:
rule = build_rule(fields)
rule = re.sub('[ ]+', ' ', rule)
rule = re.sub('[ ]$', '', rule)
print(rule, file=g)
if __name__ == '__main__':
main()
|
#!/usr/bin/env Python
# -*- coding: utf-8 -*-
from __future__ import (print_function, unicode_literals, absolute_import)
import glob
import re
import io
import unicodecsv
def build_rule(fields):
try:
a, b, X, Y = fields
b = 0 if not b else b
a = 0 if not a else a
return '{} -> {} / {} _ {}'.format(a, b, X, Y)
except ValueError:
print('Malformed rule: {}'.format(','.join(fields)))
def main():
for csv in glob.glob('*.csv'):
txt = re.match('[A-Za-z-]+', csv).group(0) + '.txt'
with open(csv, 'rb') as f, io.open(txt, 'w', encoding='utf-8') as g:
reader = unicodecsv.reader(f, encoding='utf-8')
next(reader)
for fields in reader:
if re.match('\s*%', fields[0]):
print(','.join([x for x in fields if x]), file=g)
else:
rule = build_rule(fields)
rule = re.sub('[ ]+', ' ', rule)
rule = re.sub('[ ]$', '', rule)
print(rule, file=g)
if __name__ == '__main__':
main()
Use strings instead of numerals for "0" in rules#!/usr/bin/env Python
# -*- coding: utf-8 -*-
from __future__ import (print_function, unicode_literals, absolute_import)
import glob
import re
import io
import unicodecsv
def build_rule(fields):
try:
a, b, X, Y = fields
b = "0" if not b else b
a = "0" if not a else a
return '{} -> {} / {} _ {}'.format(a, b, X, Y)
except ValueError:
print('Malformed rule: {}'.format(','.join(fields)))
def main():
for csv in glob.glob('*.csv'):
txt = re.match('[A-Za-z-]+', csv).group(0) + '.txt'
with open(csv, 'rb') as f, io.open(txt, 'w', encoding='utf-8') as g:
reader = unicodecsv.reader(f, encoding='utf-8')
next(reader)
for fields in reader:
if re.match('\s*%', fields[0]):
print(','.join([x for x in fields if x]), file=g)
else:
rule = build_rule(fields)
rule = re.sub('[ ]+', ' ', rule)
rule = re.sub('[ ]$', '', rule)
print(rule, file=g)
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env Python
# -*- coding: utf-8 -*-
from __future__ import (print_function, unicode_literals, absolute_import)
import glob
import re
import io
import unicodecsv
def build_rule(fields):
try:
a, b, X, Y = fields
b = 0 if not b else b
a = 0 if not a else a
return '{} -> {} / {} _ {}'.format(a, b, X, Y)
except ValueError:
print('Malformed rule: {}'.format(','.join(fields)))
def main():
for csv in glob.glob('*.csv'):
txt = re.match('[A-Za-z-]+', csv).group(0) + '.txt'
with open(csv, 'rb') as f, io.open(txt, 'w', encoding='utf-8') as g:
reader = unicodecsv.reader(f, encoding='utf-8')
next(reader)
for fields in reader:
if re.match('\s*%', fields[0]):
print(','.join([x for x in fields if x]), file=g)
else:
rule = build_rule(fields)
rule = re.sub('[ ]+', ' ', rule)
rule = re.sub('[ ]$', '', rule)
print(rule, file=g)
if __name__ == '__main__':
main()
<commit_msg>Use strings instead of numerals for "0" in rules<commit_after>#!/usr/bin/env Python
# -*- coding: utf-8 -*-
from __future__ import (print_function, unicode_literals, absolute_import)
import glob
import re
import io
import unicodecsv
def build_rule(fields):
try:
a, b, X, Y = fields
b = "0" if not b else b
a = "0" if not a else a
return '{} -> {} / {} _ {}'.format(a, b, X, Y)
except ValueError:
print('Malformed rule: {}'.format(','.join(fields)))
def main():
for csv in glob.glob('*.csv'):
txt = re.match('[A-Za-z-]+', csv).group(0) + '.txt'
with open(csv, 'rb') as f, io.open(txt, 'w', encoding='utf-8') as g:
reader = unicodecsv.reader(f, encoding='utf-8')
next(reader)
for fields in reader:
if re.match('\s*%', fields[0]):
print(','.join([x for x in fields if x]), file=g)
else:
rule = build_rule(fields)
rule = re.sub('[ ]+', ' ', rule)
rule = re.sub('[ ]$', '', rule)
print(rule, file=g)
if __name__ == '__main__':
main()
|
68faeb845e50b4038157fc9fc5155bdeb6f3742b
|
common/apps.py
|
common/apps.py
|
from django.apps import AppConfig
from django.conf import settings
from common.helpers.db import db_is_initialized
class CommonConfig(AppConfig):
name = 'common'
def ready(self):
self.display_missing_environment_variables()
from common.helpers.tags import import_tags_from_csv
if db_is_initialized():
import_tags_from_csv()
def display_missing_environment_variables(self):
missing_required_variables = []
for key, value in settings.ENVIRONMENT_VARIABLE_WARNINGS.items():
if not (hasattr(settings, key)):
if value['error']:
missing_required_variables.append(key)
print(key + ' not set: ' + value['message'])
if len(missing_required_variables) > 0:
raise EnvironmentError('Required environment variables missing: ' + ','.join(missing_required_variables))
|
import sys
from django.apps import AppConfig
from django.conf import settings
from common.helpers.db import db_is_initialized
class CommonConfig(AppConfig):
name = 'common'
def ready(self):
self.display_missing_environment_variables()
from common.helpers.tags import import_tags_from_csv
if 'loaddata' in sys.argv:
self.loaddata_clean()
elif db_is_initialized():
import_tags_from_csv()
def display_missing_environment_variables(self):
missing_required_variables = []
for key, value in settings.ENVIRONMENT_VARIABLE_WARNINGS.items():
if not (hasattr(settings, key)):
if value['error']:
missing_required_variables.append(key)
print(key + ' not set: ' + value['message'])
if len(missing_required_variables) > 0:
raise EnvironmentError('Required environment variables missing: ' + ','.join(missing_required_variables))
def loaddata_clean(self):
from django.contrib.contenttypes.models import ContentType
ContentType.objects.all().delete()
|
Clean content types table and don't load tags when running loaddata
|
Clean content types table and don't load tags when running loaddata
|
Python
|
mit
|
DemocracyLab/CivicTechExchange,DemocracyLab/CivicTechExchange,DemocracyLab/CivicTechExchange,DemocracyLab/CivicTechExchange
|
from django.apps import AppConfig
from django.conf import settings
from common.helpers.db import db_is_initialized
class CommonConfig(AppConfig):
name = 'common'
def ready(self):
self.display_missing_environment_variables()
from common.helpers.tags import import_tags_from_csv
if db_is_initialized():
import_tags_from_csv()
def display_missing_environment_variables(self):
missing_required_variables = []
for key, value in settings.ENVIRONMENT_VARIABLE_WARNINGS.items():
if not (hasattr(settings, key)):
if value['error']:
missing_required_variables.append(key)
print(key + ' not set: ' + value['message'])
if len(missing_required_variables) > 0:
raise EnvironmentError('Required environment variables missing: ' + ','.join(missing_required_variables))
Clean content types table and don't load tags when running loaddata
|
import sys
from django.apps import AppConfig
from django.conf import settings
from common.helpers.db import db_is_initialized
class CommonConfig(AppConfig):
name = 'common'
def ready(self):
self.display_missing_environment_variables()
from common.helpers.tags import import_tags_from_csv
if 'loaddata' in sys.argv:
self.loaddata_clean()
elif db_is_initialized():
import_tags_from_csv()
def display_missing_environment_variables(self):
missing_required_variables = []
for key, value in settings.ENVIRONMENT_VARIABLE_WARNINGS.items():
if not (hasattr(settings, key)):
if value['error']:
missing_required_variables.append(key)
print(key + ' not set: ' + value['message'])
if len(missing_required_variables) > 0:
raise EnvironmentError('Required environment variables missing: ' + ','.join(missing_required_variables))
def loaddata_clean(self):
from django.contrib.contenttypes.models import ContentType
ContentType.objects.all().delete()
|
<commit_before>from django.apps import AppConfig
from django.conf import settings
from common.helpers.db import db_is_initialized
class CommonConfig(AppConfig):
name = 'common'
def ready(self):
self.display_missing_environment_variables()
from common.helpers.tags import import_tags_from_csv
if db_is_initialized():
import_tags_from_csv()
def display_missing_environment_variables(self):
missing_required_variables = []
for key, value in settings.ENVIRONMENT_VARIABLE_WARNINGS.items():
if not (hasattr(settings, key)):
if value['error']:
missing_required_variables.append(key)
print(key + ' not set: ' + value['message'])
if len(missing_required_variables) > 0:
raise EnvironmentError('Required environment variables missing: ' + ','.join(missing_required_variables))
<commit_msg>Clean content types table and don't load tags when running loaddata<commit_after>
|
import sys
from django.apps import AppConfig
from django.conf import settings
from common.helpers.db import db_is_initialized
class CommonConfig(AppConfig):
name = 'common'
def ready(self):
self.display_missing_environment_variables()
from common.helpers.tags import import_tags_from_csv
if 'loaddata' in sys.argv:
self.loaddata_clean()
elif db_is_initialized():
import_tags_from_csv()
def display_missing_environment_variables(self):
missing_required_variables = []
for key, value in settings.ENVIRONMENT_VARIABLE_WARNINGS.items():
if not (hasattr(settings, key)):
if value['error']:
missing_required_variables.append(key)
print(key + ' not set: ' + value['message'])
if len(missing_required_variables) > 0:
raise EnvironmentError('Required environment variables missing: ' + ','.join(missing_required_variables))
def loaddata_clean(self):
from django.contrib.contenttypes.models import ContentType
ContentType.objects.all().delete()
|
from django.apps import AppConfig
from django.conf import settings
from common.helpers.db import db_is_initialized
class CommonConfig(AppConfig):
name = 'common'
def ready(self):
self.display_missing_environment_variables()
from common.helpers.tags import import_tags_from_csv
if db_is_initialized():
import_tags_from_csv()
def display_missing_environment_variables(self):
missing_required_variables = []
for key, value in settings.ENVIRONMENT_VARIABLE_WARNINGS.items():
if not (hasattr(settings, key)):
if value['error']:
missing_required_variables.append(key)
print(key + ' not set: ' + value['message'])
if len(missing_required_variables) > 0:
raise EnvironmentError('Required environment variables missing: ' + ','.join(missing_required_variables))
Clean content types table and don't load tags when running loaddataimport sys
from django.apps import AppConfig
from django.conf import settings
from common.helpers.db import db_is_initialized
class CommonConfig(AppConfig):
name = 'common'
def ready(self):
self.display_missing_environment_variables()
from common.helpers.tags import import_tags_from_csv
if 'loaddata' in sys.argv:
self.loaddata_clean()
elif db_is_initialized():
import_tags_from_csv()
def display_missing_environment_variables(self):
missing_required_variables = []
for key, value in settings.ENVIRONMENT_VARIABLE_WARNINGS.items():
if not (hasattr(settings, key)):
if value['error']:
missing_required_variables.append(key)
print(key + ' not set: ' + value['message'])
if len(missing_required_variables) > 0:
raise EnvironmentError('Required environment variables missing: ' + ','.join(missing_required_variables))
def loaddata_clean(self):
from django.contrib.contenttypes.models import ContentType
ContentType.objects.all().delete()
|
<commit_before>from django.apps import AppConfig
from django.conf import settings
from common.helpers.db import db_is_initialized
class CommonConfig(AppConfig):
name = 'common'
def ready(self):
self.display_missing_environment_variables()
from common.helpers.tags import import_tags_from_csv
if db_is_initialized():
import_tags_from_csv()
def display_missing_environment_variables(self):
missing_required_variables = []
for key, value in settings.ENVIRONMENT_VARIABLE_WARNINGS.items():
if not (hasattr(settings, key)):
if value['error']:
missing_required_variables.append(key)
print(key + ' not set: ' + value['message'])
if len(missing_required_variables) > 0:
raise EnvironmentError('Required environment variables missing: ' + ','.join(missing_required_variables))
<commit_msg>Clean content types table and don't load tags when running loaddata<commit_after>import sys
from django.apps import AppConfig
from django.conf import settings
from common.helpers.db import db_is_initialized
class CommonConfig(AppConfig):
name = 'common'
def ready(self):
self.display_missing_environment_variables()
from common.helpers.tags import import_tags_from_csv
if 'loaddata' in sys.argv:
self.loaddata_clean()
elif db_is_initialized():
import_tags_from_csv()
def display_missing_environment_variables(self):
missing_required_variables = []
for key, value in settings.ENVIRONMENT_VARIABLE_WARNINGS.items():
if not (hasattr(settings, key)):
if value['error']:
missing_required_variables.append(key)
print(key + ' not set: ' + value['message'])
if len(missing_required_variables) > 0:
raise EnvironmentError('Required environment variables missing: ' + ','.join(missing_required_variables))
def loaddata_clean(self):
from django.contrib.contenttypes.models import ContentType
ContentType.objects.all().delete()
|
100260936d433cf468c0437b9cb135bc871d27d1
|
sphinx-plugin/pydispatch_sphinx/__init__.py
|
sphinx-plugin/pydispatch_sphinx/__init__.py
|
import typing as tp
import pkg_resources
try:
__version__ = pkg_resources.require('python-dispatch-sphinx')[0].version
except: # pragma: no cover
__version__ = 'unknown'
from sphinx.application import Sphinx
from . import directives
from . import documenters
def setup(app: Sphinx) -> tp.Dict[str, tp.Any]:
app.setup_extension(directives.__name__)
app.setup_extension(documenters.__name__)
return {
'version':__version__,
'parallel_read_safe':True,
'parallel_write_safe':True,
}
|
import typing as tp
import importlib.metadata
__version__ = importlib.metadata.version('python-dispatch-sphinx')
from sphinx.application import Sphinx
from . import directives
from . import documenters
def setup(app: Sphinx) -> tp.Dict[str, tp.Any]:
app.setup_extension(directives.__name__)
app.setup_extension(documenters.__name__)
return {
'version':__version__,
'parallel_read_safe':True,
'parallel_write_safe':True,
}
|
Use importlib.metadata for version retrieval
|
Use importlib.metadata for version retrieval
|
Python
|
mit
|
nocarryr/python-dispatch
|
import typing as tp
import pkg_resources
try:
__version__ = pkg_resources.require('python-dispatch-sphinx')[0].version
except: # pragma: no cover
__version__ = 'unknown'
from sphinx.application import Sphinx
from . import directives
from . import documenters
def setup(app: Sphinx) -> tp.Dict[str, tp.Any]:
app.setup_extension(directives.__name__)
app.setup_extension(documenters.__name__)
return {
'version':__version__,
'parallel_read_safe':True,
'parallel_write_safe':True,
}
Use importlib.metadata for version retrieval
|
import typing as tp
import importlib.metadata
__version__ = importlib.metadata.version('python-dispatch-sphinx')
from sphinx.application import Sphinx
from . import directives
from . import documenters
def setup(app: Sphinx) -> tp.Dict[str, tp.Any]:
app.setup_extension(directives.__name__)
app.setup_extension(documenters.__name__)
return {
'version':__version__,
'parallel_read_safe':True,
'parallel_write_safe':True,
}
|
<commit_before>import typing as tp
import pkg_resources
try:
__version__ = pkg_resources.require('python-dispatch-sphinx')[0].version
except: # pragma: no cover
__version__ = 'unknown'
from sphinx.application import Sphinx
from . import directives
from . import documenters
def setup(app: Sphinx) -> tp.Dict[str, tp.Any]:
app.setup_extension(directives.__name__)
app.setup_extension(documenters.__name__)
return {
'version':__version__,
'parallel_read_safe':True,
'parallel_write_safe':True,
}
<commit_msg>Use importlib.metadata for version retrieval<commit_after>
|
import typing as tp
import importlib.metadata
__version__ = importlib.metadata.version('python-dispatch-sphinx')
from sphinx.application import Sphinx
from . import directives
from . import documenters
def setup(app: Sphinx) -> tp.Dict[str, tp.Any]:
app.setup_extension(directives.__name__)
app.setup_extension(documenters.__name__)
return {
'version':__version__,
'parallel_read_safe':True,
'parallel_write_safe':True,
}
|
import typing as tp
import pkg_resources
try:
__version__ = pkg_resources.require('python-dispatch-sphinx')[0].version
except: # pragma: no cover
__version__ = 'unknown'
from sphinx.application import Sphinx
from . import directives
from . import documenters
def setup(app: Sphinx) -> tp.Dict[str, tp.Any]:
app.setup_extension(directives.__name__)
app.setup_extension(documenters.__name__)
return {
'version':__version__,
'parallel_read_safe':True,
'parallel_write_safe':True,
}
Use importlib.metadata for version retrievalimport typing as tp
import importlib.metadata
__version__ = importlib.metadata.version('python-dispatch-sphinx')
from sphinx.application import Sphinx
from . import directives
from . import documenters
def setup(app: Sphinx) -> tp.Dict[str, tp.Any]:
app.setup_extension(directives.__name__)
app.setup_extension(documenters.__name__)
return {
'version':__version__,
'parallel_read_safe':True,
'parallel_write_safe':True,
}
|
<commit_before>import typing as tp
import pkg_resources
try:
__version__ = pkg_resources.require('python-dispatch-sphinx')[0].version
except: # pragma: no cover
__version__ = 'unknown'
from sphinx.application import Sphinx
from . import directives
from . import documenters
def setup(app: Sphinx) -> tp.Dict[str, tp.Any]:
app.setup_extension(directives.__name__)
app.setup_extension(documenters.__name__)
return {
'version':__version__,
'parallel_read_safe':True,
'parallel_write_safe':True,
}
<commit_msg>Use importlib.metadata for version retrieval<commit_after>import typing as tp
import importlib.metadata
__version__ = importlib.metadata.version('python-dispatch-sphinx')
from sphinx.application import Sphinx
from . import directives
from . import documenters
def setup(app: Sphinx) -> tp.Dict[str, tp.Any]:
app.setup_extension(directives.__name__)
app.setup_extension(documenters.__name__)
return {
'version':__version__,
'parallel_read_safe':True,
'parallel_write_safe':True,
}
|
39b45111efdece4e68615ca123e0062b0d1edaae
|
organizations/__init__.py
|
organizations/__init__.py
|
"""
edx-organizations app initialization module
"""
__version__ = '2.0.1' # pragma: no cover
|
"""
edx-organizations app initialization module
"""
__version__ = '2.0.2' # pragma: no cover
|
Update the version so we can do another release once this is all done.
|
Update the version so we can do another release once this is all done.
|
Python
|
agpl-3.0
|
edx/edx-organizations
|
"""
edx-organizations app initialization module
"""
__version__ = '2.0.1' # pragma: no cover
Update the version so we can do another release once this is all done.
|
"""
edx-organizations app initialization module
"""
__version__ = '2.0.2' # pragma: no cover
|
<commit_before>"""
edx-organizations app initialization module
"""
__version__ = '2.0.1' # pragma: no cover
<commit_msg>Update the version so we can do another release once this is all done.<commit_after>
|
"""
edx-organizations app initialization module
"""
__version__ = '2.0.2' # pragma: no cover
|
"""
edx-organizations app initialization module
"""
__version__ = '2.0.1' # pragma: no cover
Update the version so we can do another release once this is all done."""
edx-organizations app initialization module
"""
__version__ = '2.0.2' # pragma: no cover
|
<commit_before>"""
edx-organizations app initialization module
"""
__version__ = '2.0.1' # pragma: no cover
<commit_msg>Update the version so we can do another release once this is all done.<commit_after>"""
edx-organizations app initialization module
"""
__version__ = '2.0.2' # pragma: no cover
|
453ef9f96a2441f2835bfc514862ae7000e1fdc1
|
opalescence/__init__.py
|
opalescence/__init__.py
|
# -*- coding: utf-8 -*-
"""
Package containing the main opalescence application logic.
"""
__author__ = """Brian Houston Morrow"""
__email__ = "bhm@brianmorrow.net"
__version__ = "0.5.0"
__year__ = "2021"
import dataclasses
@dataclasses.dataclass
class AppConfig:
use_cli: bool
_AppConfig = AppConfig(False)
def get_app_config():
return _AppConfig
|
# -*- coding: utf-8 -*-
"""
Package containing the main opalescence application logic.
"""
__author__ = """Brian Houston Morrow"""
__email__ = "bhm@brianmorrow.net"
__version__ = "0.5.0"
__year__ = "2021"
import dataclasses
# TODO: Remove ASAP
@dataclasses.dataclass
class AppConfig:
use_cli: bool = False
update_sec: int = 2
max_peers: int = 2
_AppConfig = AppConfig()
def get_app_config():
return _AppConfig
|
Add a couple more pieces of info to app config.
|
Add a couple more pieces of info to app config.
|
Python
|
mit
|
killerbat00/opalescence
|
# -*- coding: utf-8 -*-
"""
Package containing the main opalescence application logic.
"""
__author__ = """Brian Houston Morrow"""
__email__ = "bhm@brianmorrow.net"
__version__ = "0.5.0"
__year__ = "2021"
import dataclasses
@dataclasses.dataclass
class AppConfig:
use_cli: bool
_AppConfig = AppConfig(False)
def get_app_config():
return _AppConfig
Add a couple more pieces of info to app config.
|
# -*- coding: utf-8 -*-
"""
Package containing the main opalescence application logic.
"""
__author__ = """Brian Houston Morrow"""
__email__ = "bhm@brianmorrow.net"
__version__ = "0.5.0"
__year__ = "2021"
import dataclasses
# TODO: Remove ASAP
@dataclasses.dataclass
class AppConfig:
use_cli: bool = False
update_sec: int = 2
max_peers: int = 2
_AppConfig = AppConfig()
def get_app_config():
return _AppConfig
|
<commit_before># -*- coding: utf-8 -*-
"""
Package containing the main opalescence application logic.
"""
__author__ = """Brian Houston Morrow"""
__email__ = "bhm@brianmorrow.net"
__version__ = "0.5.0"
__year__ = "2021"
import dataclasses
@dataclasses.dataclass
class AppConfig:
use_cli: bool
_AppConfig = AppConfig(False)
def get_app_config():
return _AppConfig
<commit_msg>Add a couple more pieces of info to app config.<commit_after>
|
# -*- coding: utf-8 -*-
"""
Package containing the main opalescence application logic.
"""
__author__ = """Brian Houston Morrow"""
__email__ = "bhm@brianmorrow.net"
__version__ = "0.5.0"
__year__ = "2021"
import dataclasses
# TODO: Remove ASAP
@dataclasses.dataclass
class AppConfig:
use_cli: bool = False
update_sec: int = 2
max_peers: int = 2
_AppConfig = AppConfig()
def get_app_config():
return _AppConfig
|
# -*- coding: utf-8 -*-
"""
Package containing the main opalescence application logic.
"""
__author__ = """Brian Houston Morrow"""
__email__ = "bhm@brianmorrow.net"
__version__ = "0.5.0"
__year__ = "2021"
import dataclasses
@dataclasses.dataclass
class AppConfig:
use_cli: bool
_AppConfig = AppConfig(False)
def get_app_config():
return _AppConfig
Add a couple more pieces of info to app config.# -*- coding: utf-8 -*-
"""
Package containing the main opalescence application logic.
"""
__author__ = """Brian Houston Morrow"""
__email__ = "bhm@brianmorrow.net"
__version__ = "0.5.0"
__year__ = "2021"
import dataclasses
# TODO: Remove ASAP
@dataclasses.dataclass
class AppConfig:
use_cli: bool = False
update_sec: int = 2
max_peers: int = 2
_AppConfig = AppConfig()
def get_app_config():
return _AppConfig
|
<commit_before># -*- coding: utf-8 -*-
"""
Package containing the main opalescence application logic.
"""
__author__ = """Brian Houston Morrow"""
__email__ = "bhm@brianmorrow.net"
__version__ = "0.5.0"
__year__ = "2021"
import dataclasses
@dataclasses.dataclass
class AppConfig:
use_cli: bool
_AppConfig = AppConfig(False)
def get_app_config():
return _AppConfig
<commit_msg>Add a couple more pieces of info to app config.<commit_after># -*- coding: utf-8 -*-
"""
Package containing the main opalescence application logic.
"""
__author__ = """Brian Houston Morrow"""
__email__ = "bhm@brianmorrow.net"
__version__ = "0.5.0"
__year__ = "2021"
import dataclasses
# TODO: Remove ASAP
@dataclasses.dataclass
class AppConfig:
use_cli: bool = False
update_sec: int = 2
max_peers: int = 2
_AppConfig = AppConfig()
def get_app_config():
return _AppConfig
|
b088d21b91dbfda0f18b1e4886f6aa01f2c72cbe
|
os_vif/objects/route.py
|
os_vif/objects/route.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_versionedobjects import base
from oslo_versionedobjects import fields
from os_vif.objects import base as osv_base
@base.VersionedObjectRegistry.register
class Route(osv_base.VersionedObject):
"""Represents a route."""
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'cidr': fields.IPNetworkField(),
'gateway': fields.IPAddressField(),
'interface': fields.StringField(),
}
@base.VersionedObjectRegistry.register
class RouteList(osv_base.VersionedObject, base.ObjectListBase):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'objects': fields.ListOfObjectsField('Route'),
}
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_versionedobjects import base
from oslo_versionedobjects import fields
from os_vif.objects import base as osv_base
@base.VersionedObjectRegistry.register
class Route(osv_base.VersionedObject):
"""Represents a route."""
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'cidr': fields.IPNetworkField(),
'gateway': fields.IPAddressField(),
# TODO(mriedem): This field is never set by Nova, remove it in v2.0
# of this object.
'interface': fields.StringField(),
}
@base.VersionedObjectRegistry.register
class RouteList(osv_base.VersionedObject, base.ObjectListBase):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'objects': fields.ListOfObjectsField('Route'),
}
|
Add a reminder to remove Route.interface field
|
Add a reminder to remove Route.interface field
Nova never sets the Route.interface value to anything but
None which fails with an error:
"ValueError: Fieldinterface' cannot be None"
This looks like a carry-over from the nova.network.model.Route
class which has an interface field which is set to None by default
but that field is never set to anything else in Nova, neither
for nova-network or Neutron.
Furthermore, it looks like 'interface' is not something that's
in the Route data model in Neutron either.
We don't hit this in the gate because the subnets we're testing
with don't have host_routes set.
The ValueError was fixed in Nova by not setting the attribute:
1d57c1fd53e930b02c3ce0e9914f95ef68dd1f87
This change adds a TODO to remove it in version 2.0 of the Route object.
Change-Id: Ib25a79514fe4335f4df222c02fefc9da62fe04ce
Closes-Bug: #1612812
|
Python
|
apache-2.0
|
openstack/os-vif
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_versionedobjects import base
from oslo_versionedobjects import fields
from os_vif.objects import base as osv_base
@base.VersionedObjectRegistry.register
class Route(osv_base.VersionedObject):
"""Represents a route."""
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'cidr': fields.IPNetworkField(),
'gateway': fields.IPAddressField(),
'interface': fields.StringField(),
}
@base.VersionedObjectRegistry.register
class RouteList(osv_base.VersionedObject, base.ObjectListBase):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'objects': fields.ListOfObjectsField('Route'),
}
Add a reminder to remove Route.interface field
Nova never sets the Route.interface value to anything but
None which fails with an error:
"ValueError: Fieldinterface' cannot be None"
This looks like a carry-over from the nova.network.model.Route
class which has an interface field which is set to None by default
but that field is never set to anything else in Nova, neither
for nova-network or Neutron.
Furthermore, it looks like 'interface' is not something that's
in the Route data model in Neutron either.
We don't hit this in the gate because the subnets we're testing
with don't have host_routes set.
The ValueError was fixed in Nova by not setting the attribute:
1d57c1fd53e930b02c3ce0e9914f95ef68dd1f87
This change adds a TODO to remove it in version 2.0 of the Route object.
Change-Id: Ib25a79514fe4335f4df222c02fefc9da62fe04ce
Closes-Bug: #1612812
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_versionedobjects import base
from oslo_versionedobjects import fields
from os_vif.objects import base as osv_base
@base.VersionedObjectRegistry.register
class Route(osv_base.VersionedObject):
"""Represents a route."""
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'cidr': fields.IPNetworkField(),
'gateway': fields.IPAddressField(),
# TODO(mriedem): This field is never set by Nova, remove it in v2.0
# of this object.
'interface': fields.StringField(),
}
@base.VersionedObjectRegistry.register
class RouteList(osv_base.VersionedObject, base.ObjectListBase):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'objects': fields.ListOfObjectsField('Route'),
}
|
<commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_versionedobjects import base
from oslo_versionedobjects import fields
from os_vif.objects import base as osv_base
@base.VersionedObjectRegistry.register
class Route(osv_base.VersionedObject):
"""Represents a route."""
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'cidr': fields.IPNetworkField(),
'gateway': fields.IPAddressField(),
'interface': fields.StringField(),
}
@base.VersionedObjectRegistry.register
class RouteList(osv_base.VersionedObject, base.ObjectListBase):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'objects': fields.ListOfObjectsField('Route'),
}
<commit_msg>Add a reminder to remove Route.interface field
Nova never sets the Route.interface value to anything but
None which fails with an error:
"ValueError: Fieldinterface' cannot be None"
This looks like a carry-over from the nova.network.model.Route
class which has an interface field which is set to None by default
but that field is never set to anything else in Nova, neither
for nova-network or Neutron.
Furthermore, it looks like 'interface' is not something that's
in the Route data model in Neutron either.
We don't hit this in the gate because the subnets we're testing
with don't have host_routes set.
The ValueError was fixed in Nova by not setting the attribute:
1d57c1fd53e930b02c3ce0e9914f95ef68dd1f87
This change adds a TODO to remove it in version 2.0 of the Route object.
Change-Id: Ib25a79514fe4335f4df222c02fefc9da62fe04ce
Closes-Bug: #1612812<commit_after>
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_versionedobjects import base
from oslo_versionedobjects import fields
from os_vif.objects import base as osv_base
@base.VersionedObjectRegistry.register
class Route(osv_base.VersionedObject):
"""Represents a route."""
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'cidr': fields.IPNetworkField(),
'gateway': fields.IPAddressField(),
# TODO(mriedem): This field is never set by Nova, remove it in v2.0
# of this object.
'interface': fields.StringField(),
}
@base.VersionedObjectRegistry.register
class RouteList(osv_base.VersionedObject, base.ObjectListBase):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'objects': fields.ListOfObjectsField('Route'),
}
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_versionedobjects import base
from oslo_versionedobjects import fields
from os_vif.objects import base as osv_base
@base.VersionedObjectRegistry.register
class Route(osv_base.VersionedObject):
"""Represents a route."""
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'cidr': fields.IPNetworkField(),
'gateway': fields.IPAddressField(),
'interface': fields.StringField(),
}
@base.VersionedObjectRegistry.register
class RouteList(osv_base.VersionedObject, base.ObjectListBase):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'objects': fields.ListOfObjectsField('Route'),
}
Add a reminder to remove Route.interface field
Nova never sets the Route.interface value to anything but
None which fails with an error:
"ValueError: Fieldinterface' cannot be None"
This looks like a carry-over from the nova.network.model.Route
class which has an interface field which is set to None by default
but that field is never set to anything else in Nova, neither
for nova-network or Neutron.
Furthermore, it looks like 'interface' is not something that's
in the Route data model in Neutron either.
We don't hit this in the gate because the subnets we're testing
with don't have host_routes set.
The ValueError was fixed in Nova by not setting the attribute:
1d57c1fd53e930b02c3ce0e9914f95ef68dd1f87
This change adds a TODO to remove it in version 2.0 of the Route object.
Change-Id: Ib25a79514fe4335f4df222c02fefc9da62fe04ce
Closes-Bug: #1612812# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_versionedobjects import base
from oslo_versionedobjects import fields
from os_vif.objects import base as osv_base
@base.VersionedObjectRegistry.register
class Route(osv_base.VersionedObject):
"""Represents a route."""
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'cidr': fields.IPNetworkField(),
'gateway': fields.IPAddressField(),
# TODO(mriedem): This field is never set by Nova, remove it in v2.0
# of this object.
'interface': fields.StringField(),
}
@base.VersionedObjectRegistry.register
class RouteList(osv_base.VersionedObject, base.ObjectListBase):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'objects': fields.ListOfObjectsField('Route'),
}
|
<commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_versionedobjects import base
from oslo_versionedobjects import fields
from os_vif.objects import base as osv_base
@base.VersionedObjectRegistry.register
class Route(osv_base.VersionedObject):
"""Represents a route."""
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'cidr': fields.IPNetworkField(),
'gateway': fields.IPAddressField(),
'interface': fields.StringField(),
}
@base.VersionedObjectRegistry.register
class RouteList(osv_base.VersionedObject, base.ObjectListBase):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'objects': fields.ListOfObjectsField('Route'),
}
<commit_msg>Add a reminder to remove Route.interface field
Nova never sets the Route.interface value to anything but
None which fails with an error:
"ValueError: Fieldinterface' cannot be None"
This looks like a carry-over from the nova.network.model.Route
class which has an interface field which is set to None by default
but that field is never set to anything else in Nova, neither
for nova-network or Neutron.
Furthermore, it looks like 'interface' is not something that's
in the Route data model in Neutron either.
We don't hit this in the gate because the subnets we're testing
with don't have host_routes set.
The ValueError was fixed in Nova by not setting the attribute:
1d57c1fd53e930b02c3ce0e9914f95ef68dd1f87
This change adds a TODO to remove it in version 2.0 of the Route object.
Change-Id: Ib25a79514fe4335f4df222c02fefc9da62fe04ce
Closes-Bug: #1612812<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_versionedobjects import base
from oslo_versionedobjects import fields
from os_vif.objects import base as osv_base
@base.VersionedObjectRegistry.register
class Route(osv_base.VersionedObject):
"""Represents a route."""
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'cidr': fields.IPNetworkField(),
'gateway': fields.IPAddressField(),
# TODO(mriedem): This field is never set by Nova, remove it in v2.0
# of this object.
'interface': fields.StringField(),
}
@base.VersionedObjectRegistry.register
class RouteList(osv_base.VersionedObject, base.ObjectListBase):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'objects': fields.ListOfObjectsField('Route'),
}
|
8fbd37c1858d84dbee9695999c719f60e6014ed5
|
kitchen/backends/plugins/virt_memory_usage.py
|
kitchen/backends/plugins/virt_memory_usage.py
|
"""Plugin that adds guest memory usage in GB"""
MIN_HOST_MEM = 1000000 # kB
def inject(node):
"""Adds guest RAM usage data to the host"""
node.setdefault('kitchen', {})
node['kitchen'].setdefault('data', {})
node['kitchen']['data']['memory_usage'] = MIN_HOST_MEM
for guest in node.get('virtualization', {}).get('guests', []):
memory = int(guest.get('memory').get('total').rstrip('kB'))
node['kitchen']['data']['memory_usage'] += memory
# transform into GB
node['kitchen']['data']['memory_usage'] /= 1048576
|
"""Plugin that adds guest memory usage in GB"""
MIN_HOST_MEM = 1000000 # kB
def inject(node):
"""Adds guest RAM usage data to the host"""
node.setdefault('kitchen', {})
node['kitchen'].setdefault('data', {})
node['kitchen']['data']['memory_usage'] = MIN_HOST_MEM
for guest in node.get('virtualization', {}).get('guests', []):
memory = int(guest.get('memory', {}).get('total', '').rstrip('kB'))
node['kitchen']['data']['memory_usage'] += memory
# transform into GB
node['kitchen']['data']['memory_usage'] /= 1048576
|
Handle cases where memory key is not present
|
Handle cases where memory key is not present
|
Python
|
apache-2.0
|
edelight/kitchen,edelight/kitchen,edelight/kitchen,edelight/kitchen
|
"""Plugin that adds guest memory usage in GB"""
MIN_HOST_MEM = 1000000 # kB
def inject(node):
"""Adds guest RAM usage data to the host"""
node.setdefault('kitchen', {})
node['kitchen'].setdefault('data', {})
node['kitchen']['data']['memory_usage'] = MIN_HOST_MEM
for guest in node.get('virtualization', {}).get('guests', []):
memory = int(guest.get('memory').get('total').rstrip('kB'))
node['kitchen']['data']['memory_usage'] += memory
# transform into GB
node['kitchen']['data']['memory_usage'] /= 1048576
Handle cases where memory key is not present
|
"""Plugin that adds guest memory usage in GB"""
MIN_HOST_MEM = 1000000 # kB
def inject(node):
"""Adds guest RAM usage data to the host"""
node.setdefault('kitchen', {})
node['kitchen'].setdefault('data', {})
node['kitchen']['data']['memory_usage'] = MIN_HOST_MEM
for guest in node.get('virtualization', {}).get('guests', []):
memory = int(guest.get('memory', {}).get('total', '').rstrip('kB'))
node['kitchen']['data']['memory_usage'] += memory
# transform into GB
node['kitchen']['data']['memory_usage'] /= 1048576
|
<commit_before>"""Plugin that adds guest memory usage in GB"""
MIN_HOST_MEM = 1000000 # kB
def inject(node):
"""Adds guest RAM usage data to the host"""
node.setdefault('kitchen', {})
node['kitchen'].setdefault('data', {})
node['kitchen']['data']['memory_usage'] = MIN_HOST_MEM
for guest in node.get('virtualization', {}).get('guests', []):
memory = int(guest.get('memory').get('total').rstrip('kB'))
node['kitchen']['data']['memory_usage'] += memory
# transform into GB
node['kitchen']['data']['memory_usage'] /= 1048576
<commit_msg>Handle cases where memory key is not present<commit_after>
|
"""Plugin that adds guest memory usage in GB"""
MIN_HOST_MEM = 1000000 # kB
def inject(node):
"""Adds guest RAM usage data to the host"""
node.setdefault('kitchen', {})
node['kitchen'].setdefault('data', {})
node['kitchen']['data']['memory_usage'] = MIN_HOST_MEM
for guest in node.get('virtualization', {}).get('guests', []):
memory = int(guest.get('memory', {}).get('total', '').rstrip('kB'))
node['kitchen']['data']['memory_usage'] += memory
# transform into GB
node['kitchen']['data']['memory_usage'] /= 1048576
|
"""Plugin that adds guest memory usage in GB"""
MIN_HOST_MEM = 1000000 # kB
def inject(node):
"""Adds guest RAM usage data to the host"""
node.setdefault('kitchen', {})
node['kitchen'].setdefault('data', {})
node['kitchen']['data']['memory_usage'] = MIN_HOST_MEM
for guest in node.get('virtualization', {}).get('guests', []):
memory = int(guest.get('memory').get('total').rstrip('kB'))
node['kitchen']['data']['memory_usage'] += memory
# transform into GB
node['kitchen']['data']['memory_usage'] /= 1048576
Handle cases where memory key is not present"""Plugin that adds guest memory usage in GB"""
MIN_HOST_MEM = 1000000 # kB
def inject(node):
"""Adds guest RAM usage data to the host"""
node.setdefault('kitchen', {})
node['kitchen'].setdefault('data', {})
node['kitchen']['data']['memory_usage'] = MIN_HOST_MEM
for guest in node.get('virtualization', {}).get('guests', []):
memory = int(guest.get('memory', {}).get('total', '').rstrip('kB'))
node['kitchen']['data']['memory_usage'] += memory
# transform into GB
node['kitchen']['data']['memory_usage'] /= 1048576
|
<commit_before>"""Plugin that adds guest memory usage in GB"""
MIN_HOST_MEM = 1000000 # kB
def inject(node):
"""Adds guest RAM usage data to the host"""
node.setdefault('kitchen', {})
node['kitchen'].setdefault('data', {})
node['kitchen']['data']['memory_usage'] = MIN_HOST_MEM
for guest in node.get('virtualization', {}).get('guests', []):
memory = int(guest.get('memory').get('total').rstrip('kB'))
node['kitchen']['data']['memory_usage'] += memory
# transform into GB
node['kitchen']['data']['memory_usage'] /= 1048576
<commit_msg>Handle cases where memory key is not present<commit_after>"""Plugin that adds guest memory usage in GB"""
MIN_HOST_MEM = 1000000 # kB
def inject(node):
"""Adds guest RAM usage data to the host"""
node.setdefault('kitchen', {})
node['kitchen'].setdefault('data', {})
node['kitchen']['data']['memory_usage'] = MIN_HOST_MEM
for guest in node.get('virtualization', {}).get('guests', []):
memory = int(guest.get('memory', {}).get('total', '').rstrip('kB'))
node['kitchen']['data']['memory_usage'] += memory
# transform into GB
node['kitchen']['data']['memory_usage'] /= 1048576
|
027e4be84588e2ea62eea7e8f60ec2db1969e92c
|
testStats.py
|
testStats.py
|
import time
import HTU21DF
def median(x):
m,r= divmod(len(x),2)
if r:
return sorted(x)[m]
return sum(sorted(x)[m-1:m+1])/2
def average(x):
return sum(x)/len(x)
tempList = []
for x in range(100):
HTU21DF.htu_reset
tempList.append(HTU21DF.read_temperature())
print 'median is {0}'.format(median(tempList))
print 'average is {0}'.format(average(tempList))
print 'minimum value is {0}, maximum value is {1}'.format(min(tempList), max(tempList))
|
import time
import HTU21DF
def median(x):
m,r= divmod(len(x),2)
if r:
return sorted(x)[m]
return sum(sorted(x)[m-1:m+1])/2
def average(x):
return sum(x)/len(x)
tempList = []
for x in range(1000):
HTU21DF.htu_reset
tempList.append(HTU21DF.read_temperature())
print 'median is {0}'.format(median(tempList))
print 'average is {0}'.format(average(tempList))
print 'minimum value is {0}, maximum value is {1}'.format(min(tempList), max(tempList))
print 'difference is {0}'.format(max(tempList) - min(tempList))
|
Add difference and up range
|
Add difference and up range
|
Python
|
mit
|
khuisman/project-cool-attic
|
import time
import HTU21DF
def median(x):
m,r= divmod(len(x),2)
if r:
return sorted(x)[m]
return sum(sorted(x)[m-1:m+1])/2
def average(x):
return sum(x)/len(x)
tempList = []
for x in range(100):
HTU21DF.htu_reset
tempList.append(HTU21DF.read_temperature())
print 'median is {0}'.format(median(tempList))
print 'average is {0}'.format(average(tempList))
print 'minimum value is {0}, maximum value is {1}'.format(min(tempList), max(tempList))Add difference and up range
|
import time
import HTU21DF
def median(x):
m,r= divmod(len(x),2)
if r:
return sorted(x)[m]
return sum(sorted(x)[m-1:m+1])/2
def average(x):
return sum(x)/len(x)
tempList = []
for x in range(1000):
HTU21DF.htu_reset
tempList.append(HTU21DF.read_temperature())
print 'median is {0}'.format(median(tempList))
print 'average is {0}'.format(average(tempList))
print 'minimum value is {0}, maximum value is {1}'.format(min(tempList), max(tempList))
print 'difference is {0}'.format(max(tempList) - min(tempList))
|
<commit_before>import time
import HTU21DF
def median(x):
m,r= divmod(len(x),2)
if r:
return sorted(x)[m]
return sum(sorted(x)[m-1:m+1])/2
def average(x):
return sum(x)/len(x)
tempList = []
for x in range(100):
HTU21DF.htu_reset
tempList.append(HTU21DF.read_temperature())
print 'median is {0}'.format(median(tempList))
print 'average is {0}'.format(average(tempList))
print 'minimum value is {0}, maximum value is {1}'.format(min(tempList), max(tempList))<commit_msg>Add difference and up range<commit_after>
|
import time
import HTU21DF
def median(x):
m,r= divmod(len(x),2)
if r:
return sorted(x)[m]
return sum(sorted(x)[m-1:m+1])/2
def average(x):
return sum(x)/len(x)
tempList = []
for x in range(1000):
HTU21DF.htu_reset
tempList.append(HTU21DF.read_temperature())
print 'median is {0}'.format(median(tempList))
print 'average is {0}'.format(average(tempList))
print 'minimum value is {0}, maximum value is {1}'.format(min(tempList), max(tempList))
print 'difference is {0}'.format(max(tempList) - min(tempList))
|
import time
import HTU21DF
def median(x):
m,r= divmod(len(x),2)
if r:
return sorted(x)[m]
return sum(sorted(x)[m-1:m+1])/2
def average(x):
return sum(x)/len(x)
tempList = []
for x in range(100):
HTU21DF.htu_reset
tempList.append(HTU21DF.read_temperature())
print 'median is {0}'.format(median(tempList))
print 'average is {0}'.format(average(tempList))
print 'minimum value is {0}, maximum value is {1}'.format(min(tempList), max(tempList))Add difference and up rangeimport time
import HTU21DF
def median(x):
m,r= divmod(len(x),2)
if r:
return sorted(x)[m]
return sum(sorted(x)[m-1:m+1])/2
def average(x):
return sum(x)/len(x)
tempList = []
for x in range(1000):
HTU21DF.htu_reset
tempList.append(HTU21DF.read_temperature())
print 'median is {0}'.format(median(tempList))
print 'average is {0}'.format(average(tempList))
print 'minimum value is {0}, maximum value is {1}'.format(min(tempList), max(tempList))
print 'difference is {0}'.format(max(tempList) - min(tempList))
|
<commit_before>import time
import HTU21DF
def median(x):
m,r= divmod(len(x),2)
if r:
return sorted(x)[m]
return sum(sorted(x)[m-1:m+1])/2
def average(x):
return sum(x)/len(x)
tempList = []
for x in range(100):
HTU21DF.htu_reset
tempList.append(HTU21DF.read_temperature())
print 'median is {0}'.format(median(tempList))
print 'average is {0}'.format(average(tempList))
print 'minimum value is {0}, maximum value is {1}'.format(min(tempList), max(tempList))<commit_msg>Add difference and up range<commit_after>import time
import HTU21DF
def median(x):
m,r= divmod(len(x),2)
if r:
return sorted(x)[m]
return sum(sorted(x)[m-1:m+1])/2
def average(x):
return sum(x)/len(x)
tempList = []
for x in range(1000):
HTU21DF.htu_reset
tempList.append(HTU21DF.read_temperature())
print 'median is {0}'.format(median(tempList))
print 'average is {0}'.format(average(tempList))
print 'minimum value is {0}, maximum value is {1}'.format(min(tempList), max(tempList))
print 'difference is {0}'.format(max(tempList) - min(tempList))
|
187026ce695dee79c4897c0e8e014bb208de5a83
|
gaia_tools/load/__init__.py
|
gaia_tools/load/__init__.py
|
import os, os.path
import astropy.io.ascii
from gaia_tools.load import path, download
def galah(dr=1):
filePath, ReadMePath= path.galahPath(dr=dr)
if not os.path.exists(filePath):
download.galah(dr=dr)
data= astropy.io.ascii.read(filePath,readme=ReadMePath)
return data
|
import os, os.path
import numpy
import astropy.io.ascii
from gaia_tools.load import path, download
def galah(dr=1):
filePath, ReadMePath= path.galahPath(dr=dr)
if not os.path.exists(filePath):
download.galah(dr=dr)
data= astropy.io.ascii.read(filePath,readme=ReadMePath)
data['RA']._fill_value= numpy.array([-9999.99])
data['dec']._fill_value= numpy.array([-9999.99])
return data
|
Set fill value of GALAH RA and Dec explicitly
|
Set fill value of GALAH RA and Dec explicitly
|
Python
|
mit
|
jobovy/gaia_tools
|
import os, os.path
import astropy.io.ascii
from gaia_tools.load import path, download
def galah(dr=1):
filePath, ReadMePath= path.galahPath(dr=dr)
if not os.path.exists(filePath):
download.galah(dr=dr)
data= astropy.io.ascii.read(filePath,readme=ReadMePath)
return data
Set fill value of GALAH RA and Dec explicitly
|
import os, os.path
import numpy
import astropy.io.ascii
from gaia_tools.load import path, download
def galah(dr=1):
filePath, ReadMePath= path.galahPath(dr=dr)
if not os.path.exists(filePath):
download.galah(dr=dr)
data= astropy.io.ascii.read(filePath,readme=ReadMePath)
data['RA']._fill_value= numpy.array([-9999.99])
data['dec']._fill_value= numpy.array([-9999.99])
return data
|
<commit_before>import os, os.path
import astropy.io.ascii
from gaia_tools.load import path, download
def galah(dr=1):
filePath, ReadMePath= path.galahPath(dr=dr)
if not os.path.exists(filePath):
download.galah(dr=dr)
data= astropy.io.ascii.read(filePath,readme=ReadMePath)
return data
<commit_msg>Set fill value of GALAH RA and Dec explicitly<commit_after>
|
import os, os.path
import numpy
import astropy.io.ascii
from gaia_tools.load import path, download
def galah(dr=1):
filePath, ReadMePath= path.galahPath(dr=dr)
if not os.path.exists(filePath):
download.galah(dr=dr)
data= astropy.io.ascii.read(filePath,readme=ReadMePath)
data['RA']._fill_value= numpy.array([-9999.99])
data['dec']._fill_value= numpy.array([-9999.99])
return data
|
import os, os.path
import astropy.io.ascii
from gaia_tools.load import path, download
def galah(dr=1):
filePath, ReadMePath= path.galahPath(dr=dr)
if not os.path.exists(filePath):
download.galah(dr=dr)
data= astropy.io.ascii.read(filePath,readme=ReadMePath)
return data
Set fill value of GALAH RA and Dec explicitlyimport os, os.path
import numpy
import astropy.io.ascii
from gaia_tools.load import path, download
def galah(dr=1):
filePath, ReadMePath= path.galahPath(dr=dr)
if not os.path.exists(filePath):
download.galah(dr=dr)
data= astropy.io.ascii.read(filePath,readme=ReadMePath)
data['RA']._fill_value= numpy.array([-9999.99])
data['dec']._fill_value= numpy.array([-9999.99])
return data
|
<commit_before>import os, os.path
import astropy.io.ascii
from gaia_tools.load import path, download
def galah(dr=1):
filePath, ReadMePath= path.galahPath(dr=dr)
if not os.path.exists(filePath):
download.galah(dr=dr)
data= astropy.io.ascii.read(filePath,readme=ReadMePath)
return data
<commit_msg>Set fill value of GALAH RA and Dec explicitly<commit_after>import os, os.path
import numpy
import astropy.io.ascii
from gaia_tools.load import path, download
def galah(dr=1):
filePath, ReadMePath= path.galahPath(dr=dr)
if not os.path.exists(filePath):
download.galah(dr=dr)
data= astropy.io.ascii.read(filePath,readme=ReadMePath)
data['RA']._fill_value= numpy.array([-9999.99])
data['dec']._fill_value= numpy.array([-9999.99])
return data
|
f4eea63ee7658a16733cce23a42aac8f5b7fe49a
|
handoverservice/handover_api/serializers.py
|
handoverservice/handover_api/serializers.py
|
from handover_api.models import Handover, Draft, User
from rest_framework import serializers
class HandoverSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Handover
fields = ('project_id','from_user_id','to_user_id','state')
class DraftSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Draft
fields = ('project_id','from_user_id','to_user_id','state')
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('dds_id', 'api_key')
|
from handover_api.models import Handover, Draft, User
from rest_framework import serializers
class HandoverSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Handover
fields = ('id','url','project_id','from_user_id','to_user_id','state')
class DraftSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Draft
fields = ('id','url','project_id','from_user_id','to_user_id','state')
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('id','url','dds_id', 'api_key')
|
Include id and url in models
|
Include id and url in models
|
Python
|
mit
|
Duke-GCB/DukeDSHandoverService,Duke-GCB/DukeDSHandoverService,Duke-GCB/DukeDSHandoverService
|
from handover_api.models import Handover, Draft, User
from rest_framework import serializers
class HandoverSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Handover
fields = ('project_id','from_user_id','to_user_id','state')
class DraftSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Draft
fields = ('project_id','from_user_id','to_user_id','state')
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('dds_id', 'api_key')Include id and url in models
|
from handover_api.models import Handover, Draft, User
from rest_framework import serializers
class HandoverSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Handover
fields = ('id','url','project_id','from_user_id','to_user_id','state')
class DraftSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Draft
fields = ('id','url','project_id','from_user_id','to_user_id','state')
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('id','url','dds_id', 'api_key')
|
<commit_before>from handover_api.models import Handover, Draft, User
from rest_framework import serializers
class HandoverSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Handover
fields = ('project_id','from_user_id','to_user_id','state')
class DraftSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Draft
fields = ('project_id','from_user_id','to_user_id','state')
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('dds_id', 'api_key')<commit_msg>Include id and url in models<commit_after>
|
from handover_api.models import Handover, Draft, User
from rest_framework import serializers
class HandoverSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Handover
fields = ('id','url','project_id','from_user_id','to_user_id','state')
class DraftSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Draft
fields = ('id','url','project_id','from_user_id','to_user_id','state')
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('id','url','dds_id', 'api_key')
|
from handover_api.models import Handover, Draft, User
from rest_framework import serializers
class HandoverSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Handover
fields = ('project_id','from_user_id','to_user_id','state')
class DraftSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Draft
fields = ('project_id','from_user_id','to_user_id','state')
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('dds_id', 'api_key')Include id and url in modelsfrom handover_api.models import Handover, Draft, User
from rest_framework import serializers
class HandoverSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Handover
fields = ('id','url','project_id','from_user_id','to_user_id','state')
class DraftSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Draft
fields = ('id','url','project_id','from_user_id','to_user_id','state')
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('id','url','dds_id', 'api_key')
|
<commit_before>from handover_api.models import Handover, Draft, User
from rest_framework import serializers
class HandoverSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Handover
fields = ('project_id','from_user_id','to_user_id','state')
class DraftSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Draft
fields = ('project_id','from_user_id','to_user_id','state')
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('dds_id', 'api_key')<commit_msg>Include id and url in models<commit_after>from handover_api.models import Handover, Draft, User
from rest_framework import serializers
class HandoverSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Handover
fields = ('id','url','project_id','from_user_id','to_user_id','state')
class DraftSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Draft
fields = ('id','url','project_id','from_user_id','to_user_id','state')
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('id','url','dds_id', 'api_key')
|
d491aea2da5d52245001f4da24331f33e4a3a299
|
importlib_metadata/_meta.py
|
importlib_metadata/_meta.py
|
from ._compat import Protocol
from typing import Any, Dict, Iterator, List, TypeVar, Union
_T = TypeVar("_T")
class PackageMetadata(Protocol):
def __len__(self) -> int:
... # pragma: no cover
def __contains__(self, item: str) -> bool:
... # pragma: no cover
def __getitem__(self, key: str) -> str:
... # pragma: no cover
def __iter__(self) -> Iterator[str]:
... # pragma: no cover
def get_all(self, name: str, failobj: _T = ...) -> Union[List[Any], _T]:
"""
Return all values associated with a possibly multi-valued key.
"""
@property
def json(self) -> Dict[str, Union[str, List[str]]]:
"""
A JSON-compatible form of the metadata.
"""
class SimplePath(Protocol):
"""
A minimal subset of pathlib.Path required by PathDistribution.
"""
def joinpath(self) -> 'SimplePath':
... # pragma: no cover
def __div__(self) -> 'SimplePath':
... # pragma: no cover
def parent(self) -> 'SimplePath':
... # pragma: no cover
def read_text(self) -> str:
... # pragma: no cover
|
from ._compat import Protocol
from typing import Any, Dict, Iterator, List, TypeVar, Union
_T = TypeVar("_T")
class PackageMetadata(Protocol):
def __len__(self) -> int:
... # pragma: no cover
def __contains__(self, item: str) -> bool:
... # pragma: no cover
def __getitem__(self, key: str) -> str:
... # pragma: no cover
def __iter__(self) -> Iterator[str]:
... # pragma: no cover
def get_all(self, name: str, failobj: _T = ...) -> Union[List[Any], _T]:
"""
Return all values associated with a possibly multi-valued key.
"""
@property
def json(self) -> Dict[str, Union[str, List[str]]]:
"""
A JSON-compatible form of the metadata.
"""
class SimplePath(Protocol):
"""
A minimal subset of pathlib.Path required by PathDistribution.
>>> import pathlib
>>> import typing
>>> _: SimplePath = typing.cast(pathlib.Path, None)
"""
def joinpath(self) -> 'SimplePath':
... # pragma: no cover
def __div__(self) -> 'SimplePath':
... # pragma: no cover
def parent(self) -> 'SimplePath':
... # pragma: no cover
def read_text(self) -> str:
... # pragma: no cover
|
Add test purported to capture the failure, but it still passes.
|
Add test purported to capture the failure, but it still passes.
|
Python
|
apache-2.0
|
python/importlib_metadata
|
from ._compat import Protocol
from typing import Any, Dict, Iterator, List, TypeVar, Union
_T = TypeVar("_T")
class PackageMetadata(Protocol):
def __len__(self) -> int:
... # pragma: no cover
def __contains__(self, item: str) -> bool:
... # pragma: no cover
def __getitem__(self, key: str) -> str:
... # pragma: no cover
def __iter__(self) -> Iterator[str]:
... # pragma: no cover
def get_all(self, name: str, failobj: _T = ...) -> Union[List[Any], _T]:
"""
Return all values associated with a possibly multi-valued key.
"""
@property
def json(self) -> Dict[str, Union[str, List[str]]]:
"""
A JSON-compatible form of the metadata.
"""
class SimplePath(Protocol):
"""
A minimal subset of pathlib.Path required by PathDistribution.
"""
def joinpath(self) -> 'SimplePath':
... # pragma: no cover
def __div__(self) -> 'SimplePath':
... # pragma: no cover
def parent(self) -> 'SimplePath':
... # pragma: no cover
def read_text(self) -> str:
... # pragma: no cover
Add test purported to capture the failure, but it still passes.
|
from ._compat import Protocol
from typing import Any, Dict, Iterator, List, TypeVar, Union
_T = TypeVar("_T")
class PackageMetadata(Protocol):
def __len__(self) -> int:
... # pragma: no cover
def __contains__(self, item: str) -> bool:
... # pragma: no cover
def __getitem__(self, key: str) -> str:
... # pragma: no cover
def __iter__(self) -> Iterator[str]:
... # pragma: no cover
def get_all(self, name: str, failobj: _T = ...) -> Union[List[Any], _T]:
"""
Return all values associated with a possibly multi-valued key.
"""
@property
def json(self) -> Dict[str, Union[str, List[str]]]:
"""
A JSON-compatible form of the metadata.
"""
class SimplePath(Protocol):
"""
A minimal subset of pathlib.Path required by PathDistribution.
>>> import pathlib
>>> import typing
>>> _: SimplePath = typing.cast(pathlib.Path, None)
"""
def joinpath(self) -> 'SimplePath':
... # pragma: no cover
def __div__(self) -> 'SimplePath':
... # pragma: no cover
def parent(self) -> 'SimplePath':
... # pragma: no cover
def read_text(self) -> str:
... # pragma: no cover
|
<commit_before>from ._compat import Protocol
from typing import Any, Dict, Iterator, List, TypeVar, Union
_T = TypeVar("_T")
class PackageMetadata(Protocol):
def __len__(self) -> int:
... # pragma: no cover
def __contains__(self, item: str) -> bool:
... # pragma: no cover
def __getitem__(self, key: str) -> str:
... # pragma: no cover
def __iter__(self) -> Iterator[str]:
... # pragma: no cover
def get_all(self, name: str, failobj: _T = ...) -> Union[List[Any], _T]:
"""
Return all values associated with a possibly multi-valued key.
"""
@property
def json(self) -> Dict[str, Union[str, List[str]]]:
"""
A JSON-compatible form of the metadata.
"""
class SimplePath(Protocol):
"""
A minimal subset of pathlib.Path required by PathDistribution.
"""
def joinpath(self) -> 'SimplePath':
... # pragma: no cover
def __div__(self) -> 'SimplePath':
... # pragma: no cover
def parent(self) -> 'SimplePath':
... # pragma: no cover
def read_text(self) -> str:
... # pragma: no cover
<commit_msg>Add test purported to capture the failure, but it still passes.<commit_after>
|
from ._compat import Protocol
from typing import Any, Dict, Iterator, List, TypeVar, Union
_T = TypeVar("_T")
class PackageMetadata(Protocol):
def __len__(self) -> int:
... # pragma: no cover
def __contains__(self, item: str) -> bool:
... # pragma: no cover
def __getitem__(self, key: str) -> str:
... # pragma: no cover
def __iter__(self) -> Iterator[str]:
... # pragma: no cover
def get_all(self, name: str, failobj: _T = ...) -> Union[List[Any], _T]:
"""
Return all values associated with a possibly multi-valued key.
"""
@property
def json(self) -> Dict[str, Union[str, List[str]]]:
"""
A JSON-compatible form of the metadata.
"""
class SimplePath(Protocol):
"""
A minimal subset of pathlib.Path required by PathDistribution.
>>> import pathlib
>>> import typing
>>> _: SimplePath = typing.cast(pathlib.Path, None)
"""
def joinpath(self) -> 'SimplePath':
... # pragma: no cover
def __div__(self) -> 'SimplePath':
... # pragma: no cover
def parent(self) -> 'SimplePath':
... # pragma: no cover
def read_text(self) -> str:
... # pragma: no cover
|
from ._compat import Protocol
from typing import Any, Dict, Iterator, List, TypeVar, Union
_T = TypeVar("_T")
class PackageMetadata(Protocol):
def __len__(self) -> int:
... # pragma: no cover
def __contains__(self, item: str) -> bool:
... # pragma: no cover
def __getitem__(self, key: str) -> str:
... # pragma: no cover
def __iter__(self) -> Iterator[str]:
... # pragma: no cover
def get_all(self, name: str, failobj: _T = ...) -> Union[List[Any], _T]:
"""
Return all values associated with a possibly multi-valued key.
"""
@property
def json(self) -> Dict[str, Union[str, List[str]]]:
"""
A JSON-compatible form of the metadata.
"""
class SimplePath(Protocol):
"""
A minimal subset of pathlib.Path required by PathDistribution.
"""
def joinpath(self) -> 'SimplePath':
... # pragma: no cover
def __div__(self) -> 'SimplePath':
... # pragma: no cover
def parent(self) -> 'SimplePath':
... # pragma: no cover
def read_text(self) -> str:
... # pragma: no cover
Add test purported to capture the failure, but it still passes.from ._compat import Protocol
from typing import Any, Dict, Iterator, List, TypeVar, Union
_T = TypeVar("_T")
class PackageMetadata(Protocol):
def __len__(self) -> int:
... # pragma: no cover
def __contains__(self, item: str) -> bool:
... # pragma: no cover
def __getitem__(self, key: str) -> str:
... # pragma: no cover
def __iter__(self) -> Iterator[str]:
... # pragma: no cover
def get_all(self, name: str, failobj: _T = ...) -> Union[List[Any], _T]:
"""
Return all values associated with a possibly multi-valued key.
"""
@property
def json(self) -> Dict[str, Union[str, List[str]]]:
"""
A JSON-compatible form of the metadata.
"""
class SimplePath(Protocol):
"""
A minimal subset of pathlib.Path required by PathDistribution.
>>> import pathlib
>>> import typing
>>> _: SimplePath = typing.cast(pathlib.Path, None)
"""
def joinpath(self) -> 'SimplePath':
... # pragma: no cover
def __div__(self) -> 'SimplePath':
... # pragma: no cover
def parent(self) -> 'SimplePath':
... # pragma: no cover
def read_text(self) -> str:
... # pragma: no cover
|
<commit_before>from ._compat import Protocol
from typing import Any, Dict, Iterator, List, TypeVar, Union
_T = TypeVar("_T")
class PackageMetadata(Protocol):
def __len__(self) -> int:
... # pragma: no cover
def __contains__(self, item: str) -> bool:
... # pragma: no cover
def __getitem__(self, key: str) -> str:
... # pragma: no cover
def __iter__(self) -> Iterator[str]:
... # pragma: no cover
def get_all(self, name: str, failobj: _T = ...) -> Union[List[Any], _T]:
"""
Return all values associated with a possibly multi-valued key.
"""
@property
def json(self) -> Dict[str, Union[str, List[str]]]:
"""
A JSON-compatible form of the metadata.
"""
class SimplePath(Protocol):
"""
A minimal subset of pathlib.Path required by PathDistribution.
"""
def joinpath(self) -> 'SimplePath':
... # pragma: no cover
def __div__(self) -> 'SimplePath':
... # pragma: no cover
def parent(self) -> 'SimplePath':
... # pragma: no cover
def read_text(self) -> str:
... # pragma: no cover
<commit_msg>Add test purported to capture the failure, but it still passes.<commit_after>from ._compat import Protocol
from typing import Any, Dict, Iterator, List, TypeVar, Union
_T = TypeVar("_T")
class PackageMetadata(Protocol):
def __len__(self) -> int:
... # pragma: no cover
def __contains__(self, item: str) -> bool:
... # pragma: no cover
def __getitem__(self, key: str) -> str:
... # pragma: no cover
def __iter__(self) -> Iterator[str]:
... # pragma: no cover
def get_all(self, name: str, failobj: _T = ...) -> Union[List[Any], _T]:
"""
Return all values associated with a possibly multi-valued key.
"""
@property
def json(self) -> Dict[str, Union[str, List[str]]]:
"""
A JSON-compatible form of the metadata.
"""
class SimplePath(Protocol):
"""
A minimal subset of pathlib.Path required by PathDistribution.
>>> import pathlib
>>> import typing
>>> _: SimplePath = typing.cast(pathlib.Path, None)
"""
def joinpath(self) -> 'SimplePath':
... # pragma: no cover
def __div__(self) -> 'SimplePath':
... # pragma: no cover
def parent(self) -> 'SimplePath':
... # pragma: no cover
def read_text(self) -> str:
... # pragma: no cover
|
8cd319b59cb28e4ae2fe277205f586983dd4ed63
|
tst/utils.py
|
tst/utils.py
|
from __future__ import print_function
import sys
import string
from colors import *
def is_posix_filename(name, extra_chars=""):
CHARS = string.letters + string.digits + "._-" + extra_chars
return all(c in CHARS for c in name)
def cprint(color, msg, file=sys.stdout, end='\n'):
print(color + msg + RESET, file=file, end=end)
def _assert(condition, msg):
if condition:
return
cprint(LRED, msg)
sys.exit(1)
def to_unicode(obj, encoding='utf-8'):
assert isinstance(obj, basestring), type(obj)
if isinstance(obj, unicode):
return obj
for encoding in ['utf-8', 'latin1']:
try:
obj = unicode(obj, encoding)
return obj
except UnicodeDecodeError:
pass
assert False, "tst: non-recognized encoding"
|
from __future__ import print_function
import sys
import string
from colors import *
def is_posix_filename(name, extra_chars=""):
CHARS = string.letters + string.digits + "._-" + extra_chars
return all(c in CHARS for c in name)
def cprint(color, msg, file=sys.stdout, end='\n'):
data = msg.__str__() if hasattr(msg, '__str__') else msg
print(color + data + RESET, file=file, end=end)
def _assert(condition, msg):
if condition:
return
cprint(LRED, msg)
sys.exit(1)
def to_unicode(obj, encoding='utf-8'):
assert isinstance(obj, basestring), type(obj)
if isinstance(obj, unicode):
return obj
for encoding in ['utf-8', 'latin1']:
try:
obj = unicode(obj, encoding)
return obj
except UnicodeDecodeError:
pass
assert False, "tst: non-recognized encoding"
|
Improve cprint to use __str__ method if available
|
Improve cprint to use __str__ method if available
|
Python
|
agpl-3.0
|
daltonserey/tst,daltonserey/tst
|
from __future__ import print_function
import sys
import string
from colors import *
def is_posix_filename(name, extra_chars=""):
CHARS = string.letters + string.digits + "._-" + extra_chars
return all(c in CHARS for c in name)
def cprint(color, msg, file=sys.stdout, end='\n'):
print(color + msg + RESET, file=file, end=end)
def _assert(condition, msg):
if condition:
return
cprint(LRED, msg)
sys.exit(1)
def to_unicode(obj, encoding='utf-8'):
assert isinstance(obj, basestring), type(obj)
if isinstance(obj, unicode):
return obj
for encoding in ['utf-8', 'latin1']:
try:
obj = unicode(obj, encoding)
return obj
except UnicodeDecodeError:
pass
assert False, "tst: non-recognized encoding"
Improve cprint to use __str__ method if available
|
from __future__ import print_function
import sys
import string
from colors import *
def is_posix_filename(name, extra_chars=""):
CHARS = string.letters + string.digits + "._-" + extra_chars
return all(c in CHARS for c in name)
def cprint(color, msg, file=sys.stdout, end='\n'):
data = msg.__str__() if hasattr(msg, '__str__') else msg
print(color + data + RESET, file=file, end=end)
def _assert(condition, msg):
if condition:
return
cprint(LRED, msg)
sys.exit(1)
def to_unicode(obj, encoding='utf-8'):
assert isinstance(obj, basestring), type(obj)
if isinstance(obj, unicode):
return obj
for encoding in ['utf-8', 'latin1']:
try:
obj = unicode(obj, encoding)
return obj
except UnicodeDecodeError:
pass
assert False, "tst: non-recognized encoding"
|
<commit_before>from __future__ import print_function
import sys
import string
from colors import *
def is_posix_filename(name, extra_chars=""):
CHARS = string.letters + string.digits + "._-" + extra_chars
return all(c in CHARS for c in name)
def cprint(color, msg, file=sys.stdout, end='\n'):
print(color + msg + RESET, file=file, end=end)
def _assert(condition, msg):
if condition:
return
cprint(LRED, msg)
sys.exit(1)
def to_unicode(obj, encoding='utf-8'):
assert isinstance(obj, basestring), type(obj)
if isinstance(obj, unicode):
return obj
for encoding in ['utf-8', 'latin1']:
try:
obj = unicode(obj, encoding)
return obj
except UnicodeDecodeError:
pass
assert False, "tst: non-recognized encoding"
<commit_msg>Improve cprint to use __str__ method if available<commit_after>
|
from __future__ import print_function
import sys
import string
from colors import *
def is_posix_filename(name, extra_chars=""):
CHARS = string.letters + string.digits + "._-" + extra_chars
return all(c in CHARS for c in name)
def cprint(color, msg, file=sys.stdout, end='\n'):
data = msg.__str__() if hasattr(msg, '__str__') else msg
print(color + data + RESET, file=file, end=end)
def _assert(condition, msg):
if condition:
return
cprint(LRED, msg)
sys.exit(1)
def to_unicode(obj, encoding='utf-8'):
assert isinstance(obj, basestring), type(obj)
if isinstance(obj, unicode):
return obj
for encoding in ['utf-8', 'latin1']:
try:
obj = unicode(obj, encoding)
return obj
except UnicodeDecodeError:
pass
assert False, "tst: non-recognized encoding"
|
from __future__ import print_function
import sys
import string
from colors import *
def is_posix_filename(name, extra_chars=""):
CHARS = string.letters + string.digits + "._-" + extra_chars
return all(c in CHARS for c in name)
def cprint(color, msg, file=sys.stdout, end='\n'):
print(color + msg + RESET, file=file, end=end)
def _assert(condition, msg):
if condition:
return
cprint(LRED, msg)
sys.exit(1)
def to_unicode(obj, encoding='utf-8'):
assert isinstance(obj, basestring), type(obj)
if isinstance(obj, unicode):
return obj
for encoding in ['utf-8', 'latin1']:
try:
obj = unicode(obj, encoding)
return obj
except UnicodeDecodeError:
pass
assert False, "tst: non-recognized encoding"
Improve cprint to use __str__ method if availablefrom __future__ import print_function
import sys
import string
from colors import *
def is_posix_filename(name, extra_chars=""):
CHARS = string.letters + string.digits + "._-" + extra_chars
return all(c in CHARS for c in name)
def cprint(color, msg, file=sys.stdout, end='\n'):
data = msg.__str__() if hasattr(msg, '__str__') else msg
print(color + data + RESET, file=file, end=end)
def _assert(condition, msg):
if condition:
return
cprint(LRED, msg)
sys.exit(1)
def to_unicode(obj, encoding='utf-8'):
assert isinstance(obj, basestring), type(obj)
if isinstance(obj, unicode):
return obj
for encoding in ['utf-8', 'latin1']:
try:
obj = unicode(obj, encoding)
return obj
except UnicodeDecodeError:
pass
assert False, "tst: non-recognized encoding"
|
<commit_before>from __future__ import print_function
import sys
import string
from colors import *
def is_posix_filename(name, extra_chars=""):
CHARS = string.letters + string.digits + "._-" + extra_chars
return all(c in CHARS for c in name)
def cprint(color, msg, file=sys.stdout, end='\n'):
print(color + msg + RESET, file=file, end=end)
def _assert(condition, msg):
if condition:
return
cprint(LRED, msg)
sys.exit(1)
def to_unicode(obj, encoding='utf-8'):
assert isinstance(obj, basestring), type(obj)
if isinstance(obj, unicode):
return obj
for encoding in ['utf-8', 'latin1']:
try:
obj = unicode(obj, encoding)
return obj
except UnicodeDecodeError:
pass
assert False, "tst: non-recognized encoding"
<commit_msg>Improve cprint to use __str__ method if available<commit_after>from __future__ import print_function
import sys
import string
from colors import *
def is_posix_filename(name, extra_chars=""):
CHARS = string.letters + string.digits + "._-" + extra_chars
return all(c in CHARS for c in name)
def cprint(color, msg, file=sys.stdout, end='\n'):
data = msg.__str__() if hasattr(msg, '__str__') else msg
print(color + data + RESET, file=file, end=end)
def _assert(condition, msg):
if condition:
return
cprint(LRED, msg)
sys.exit(1)
def to_unicode(obj, encoding='utf-8'):
assert isinstance(obj, basestring), type(obj)
if isinstance(obj, unicode):
return obj
for encoding in ['utf-8', 'latin1']:
try:
obj = unicode(obj, encoding)
return obj
except UnicodeDecodeError:
pass
assert False, "tst: non-recognized encoding"
|
e8eb21a81587bb2f6c6b783f8345e6f167e15691
|
flycam.py
|
flycam.py
|
import capture
from picamera import PiCamera
def image_cap_loop(camera):
"""Set image parameters, capture image, set wait time, repeat"""
images = 18
status = None
resolution = (854, 480)
latest = capture.cap(camera, resolution, status)
status = latest[0]
size = capture.image_size(latest[1])
day = 1000
if size > day:
wait = 60
else:
wait = 600
status = capture.shutdown(camera)
print('Next capture begins in {} seconds.'.format(wait))
time.sleep(wait)
status = shutdown(camera)
# image_cap_loop(camera)
def main():
camera = PiCamera()
image_cap_loop(camera)
print("Images captured")
if __name__ == '__main__':
main()
|
import capture
from picamera import PiCamera
import time
def image_cap_loop(camera):
"""Set image parameters, capture image, set wait time, repeat"""
images = 18
status = None
resolution = (854, 480)
latest = capture.cap(camera, resolution, status)
status = latest[0]
size = capture.image_size(latest[1])
day = 1000
if size > day:
wait = 60
else:
wait = 600
status = capture.shutdown(camera)
print('Next capture begins in {} seconds.'.format(wait))
time.sleep(wait)
status = shutdown(camera)
# image_cap_loop(camera)
def main():
camera = PiCamera()
image_cap_loop(camera)
print("Images captured")
if __name__ == '__main__':
main()
|
Add time to imported modules.
|
Add time to imported modules.
|
Python
|
mit
|
gnfrazier/YardCam
|
import capture
from picamera import PiCamera
def image_cap_loop(camera):
"""Set image parameters, capture image, set wait time, repeat"""
images = 18
status = None
resolution = (854, 480)
latest = capture.cap(camera, resolution, status)
status = latest[0]
size = capture.image_size(latest[1])
day = 1000
if size > day:
wait = 60
else:
wait = 600
status = capture.shutdown(camera)
print('Next capture begins in {} seconds.'.format(wait))
time.sleep(wait)
status = shutdown(camera)
# image_cap_loop(camera)
def main():
camera = PiCamera()
image_cap_loop(camera)
print("Images captured")
if __name__ == '__main__':
main()
Add time to imported modules.
|
import capture
from picamera import PiCamera
import time
def image_cap_loop(camera):
"""Set image parameters, capture image, set wait time, repeat"""
images = 18
status = None
resolution = (854, 480)
latest = capture.cap(camera, resolution, status)
status = latest[0]
size = capture.image_size(latest[1])
day = 1000
if size > day:
wait = 60
else:
wait = 600
status = capture.shutdown(camera)
print('Next capture begins in {} seconds.'.format(wait))
time.sleep(wait)
status = shutdown(camera)
# image_cap_loop(camera)
def main():
camera = PiCamera()
image_cap_loop(camera)
print("Images captured")
if __name__ == '__main__':
main()
|
<commit_before>import capture
from picamera import PiCamera
def image_cap_loop(camera):
"""Set image parameters, capture image, set wait time, repeat"""
images = 18
status = None
resolution = (854, 480)
latest = capture.cap(camera, resolution, status)
status = latest[0]
size = capture.image_size(latest[1])
day = 1000
if size > day:
wait = 60
else:
wait = 600
status = capture.shutdown(camera)
print('Next capture begins in {} seconds.'.format(wait))
time.sleep(wait)
status = shutdown(camera)
# image_cap_loop(camera)
def main():
camera = PiCamera()
image_cap_loop(camera)
print("Images captured")
if __name__ == '__main__':
main()
<commit_msg>Add time to imported modules.<commit_after>
|
import capture
from picamera import PiCamera
import time
def image_cap_loop(camera):
"""Set image parameters, capture image, set wait time, repeat"""
images = 18
status = None
resolution = (854, 480)
latest = capture.cap(camera, resolution, status)
status = latest[0]
size = capture.image_size(latest[1])
day = 1000
if size > day:
wait = 60
else:
wait = 600
status = capture.shutdown(camera)
print('Next capture begins in {} seconds.'.format(wait))
time.sleep(wait)
status = shutdown(camera)
# image_cap_loop(camera)
def main():
camera = PiCamera()
image_cap_loop(camera)
print("Images captured")
if __name__ == '__main__':
main()
|
import capture
from picamera import PiCamera
def image_cap_loop(camera):
"""Set image parameters, capture image, set wait time, repeat"""
images = 18
status = None
resolution = (854, 480)
latest = capture.cap(camera, resolution, status)
status = latest[0]
size = capture.image_size(latest[1])
day = 1000
if size > day:
wait = 60
else:
wait = 600
status = capture.shutdown(camera)
print('Next capture begins in {} seconds.'.format(wait))
time.sleep(wait)
status = shutdown(camera)
# image_cap_loop(camera)
def main():
camera = PiCamera()
image_cap_loop(camera)
print("Images captured")
if __name__ == '__main__':
main()
Add time to imported modules.import capture
from picamera import PiCamera
import time
def image_cap_loop(camera):
"""Set image parameters, capture image, set wait time, repeat"""
images = 18
status = None
resolution = (854, 480)
latest = capture.cap(camera, resolution, status)
status = latest[0]
size = capture.image_size(latest[1])
day = 1000
if size > day:
wait = 60
else:
wait = 600
status = capture.shutdown(camera)
print('Next capture begins in {} seconds.'.format(wait))
time.sleep(wait)
status = shutdown(camera)
# image_cap_loop(camera)
def main():
camera = PiCamera()
image_cap_loop(camera)
print("Images captured")
if __name__ == '__main__':
main()
|
<commit_before>import capture
from picamera import PiCamera
def image_cap_loop(camera):
"""Set image parameters, capture image, set wait time, repeat"""
images = 18
status = None
resolution = (854, 480)
latest = capture.cap(camera, resolution, status)
status = latest[0]
size = capture.image_size(latest[1])
day = 1000
if size > day:
wait = 60
else:
wait = 600
status = capture.shutdown(camera)
print('Next capture begins in {} seconds.'.format(wait))
time.sleep(wait)
status = shutdown(camera)
# image_cap_loop(camera)
def main():
camera = PiCamera()
image_cap_loop(camera)
print("Images captured")
if __name__ == '__main__':
main()
<commit_msg>Add time to imported modules.<commit_after>import capture
from picamera import PiCamera
import time
def image_cap_loop(camera):
"""Set image parameters, capture image, set wait time, repeat"""
images = 18
status = None
resolution = (854, 480)
latest = capture.cap(camera, resolution, status)
status = latest[0]
size = capture.image_size(latest[1])
day = 1000
if size > day:
wait = 60
else:
wait = 600
status = capture.shutdown(camera)
print('Next capture begins in {} seconds.'.format(wait))
time.sleep(wait)
status = shutdown(camera)
# image_cap_loop(camera)
def main():
camera = PiCamera()
image_cap_loop(camera)
print("Images captured")
if __name__ == '__main__':
main()
|
a810493a9ccf26d25b467ab5f7d2b0a9718c1442
|
login/management/commands/demo_data_login.py
|
login/management/commands/demo_data_login.py
|
from django.core.management.base import BaseCommand
from login.tests.model_maker import make_superuser
from login.tests.model_maker import make_user
class Command(BaseCommand):
help = "Create demo data for 'login'"
def handle(self, *args, **options):
make_superuser('admin', 'admin')
make_user('staff', 'staff', is_staff=True)
make_user('web', 'web')
print("Created 'login' demo data...")
|
from django.core.management.base import BaseCommand
from login.tests.scenario import (
user_contractor,
user_default,
)
class Command(BaseCommand):
help = "Create demo data for 'login'"
def handle(self, *args, **options):
user_contractor()
user_default()
print("Created 'login' demo data...")
|
Use the standard scenario for demo data
|
Use the standard scenario for demo data
|
Python
|
apache-2.0
|
pkimber/login,pkimber/login,pkimber/login
|
from django.core.management.base import BaseCommand
from login.tests.model_maker import make_superuser
from login.tests.model_maker import make_user
class Command(BaseCommand):
help = "Create demo data for 'login'"
def handle(self, *args, **options):
make_superuser('admin', 'admin')
make_user('staff', 'staff', is_staff=True)
make_user('web', 'web')
print("Created 'login' demo data...")
Use the standard scenario for demo data
|
from django.core.management.base import BaseCommand
from login.tests.scenario import (
user_contractor,
user_default,
)
class Command(BaseCommand):
help = "Create demo data for 'login'"
def handle(self, *args, **options):
user_contractor()
user_default()
print("Created 'login' demo data...")
|
<commit_before>from django.core.management.base import BaseCommand
from login.tests.model_maker import make_superuser
from login.tests.model_maker import make_user
class Command(BaseCommand):
help = "Create demo data for 'login'"
def handle(self, *args, **options):
make_superuser('admin', 'admin')
make_user('staff', 'staff', is_staff=True)
make_user('web', 'web')
print("Created 'login' demo data...")
<commit_msg>Use the standard scenario for demo data<commit_after>
|
from django.core.management.base import BaseCommand
from login.tests.scenario import (
user_contractor,
user_default,
)
class Command(BaseCommand):
help = "Create demo data for 'login'"
def handle(self, *args, **options):
user_contractor()
user_default()
print("Created 'login' demo data...")
|
from django.core.management.base import BaseCommand
from login.tests.model_maker import make_superuser
from login.tests.model_maker import make_user
class Command(BaseCommand):
help = "Create demo data for 'login'"
def handle(self, *args, **options):
make_superuser('admin', 'admin')
make_user('staff', 'staff', is_staff=True)
make_user('web', 'web')
print("Created 'login' demo data...")
Use the standard scenario for demo datafrom django.core.management.base import BaseCommand
from login.tests.scenario import (
user_contractor,
user_default,
)
class Command(BaseCommand):
help = "Create demo data for 'login'"
def handle(self, *args, **options):
user_contractor()
user_default()
print("Created 'login' demo data...")
|
<commit_before>from django.core.management.base import BaseCommand
from login.tests.model_maker import make_superuser
from login.tests.model_maker import make_user
class Command(BaseCommand):
help = "Create demo data for 'login'"
def handle(self, *args, **options):
make_superuser('admin', 'admin')
make_user('staff', 'staff', is_staff=True)
make_user('web', 'web')
print("Created 'login' demo data...")
<commit_msg>Use the standard scenario for demo data<commit_after>from django.core.management.base import BaseCommand
from login.tests.scenario import (
user_contractor,
user_default,
)
class Command(BaseCommand):
help = "Create demo data for 'login'"
def handle(self, *args, **options):
user_contractor()
user_default()
print("Created 'login' demo data...")
|
8762ae185d3febe06f6ef5acfa082b26063358a2
|
example_migration.py
|
example_migration.py
|
from metadatastore.mds import MDSRO
from portable_mds.mongoquery.mds import MDS
source_config = {'host': 'localhost',
'port': 27017,
'database': 'metadatastore_production_v1',
'timezone': 'US/Eastern'}
dest_config = {'directory': 'some_directory'}
source = MDSRO(source_config)
dest = MDS(dest_config)
for run_start in source.find_run_starts():
dest.insert_run_start(**run_start)
for desc in source.find_descriptors(run_start=run_start):
events = source.get_events_generator(descriptor=desc)
dest.insert_descriptor(**desc)
dest.bulk_insert_events(desc, events)
dest.insert_run_stop(**source.stop_by_start(run_start))
|
from metadatastore.mds import MDSRO
from portable_mds.mongoquery.mds import MDS
source_config = {'host': 'localhost',
'port': 27017,
'database': 'metadatastore_production_v1',
'timezone': 'US/Eastern'}
dest_config = {'directory': 'some_directory',
'timezone': 'US/Eastern'}
source = MDSRO(source_config)
dest = MDS(dest_config)
for run_start in source.find_run_starts():
dest.insert_run_start(**run_start)
for desc in source.find_descriptors(run_start=run_start):
events = source.get_events_generator(descriptor=desc)
dest.insert_descriptor(**desc)
dest.bulk_insert_events(desc, events)
dest.insert_run_stop(**source.stop_by_start(run_start))
|
Add timezone to example to keep Broker happy.
|
Add timezone to example to keep Broker happy.
|
Python
|
bsd-3-clause
|
ericdill/databroker,ericdill/databroker
|
from metadatastore.mds import MDSRO
from portable_mds.mongoquery.mds import MDS
source_config = {'host': 'localhost',
'port': 27017,
'database': 'metadatastore_production_v1',
'timezone': 'US/Eastern'}
dest_config = {'directory': 'some_directory'}
source = MDSRO(source_config)
dest = MDS(dest_config)
for run_start in source.find_run_starts():
dest.insert_run_start(**run_start)
for desc in source.find_descriptors(run_start=run_start):
events = source.get_events_generator(descriptor=desc)
dest.insert_descriptor(**desc)
dest.bulk_insert_events(desc, events)
dest.insert_run_stop(**source.stop_by_start(run_start))
Add timezone to example to keep Broker happy.
|
from metadatastore.mds import MDSRO
from portable_mds.mongoquery.mds import MDS
source_config = {'host': 'localhost',
'port': 27017,
'database': 'metadatastore_production_v1',
'timezone': 'US/Eastern'}
dest_config = {'directory': 'some_directory',
'timezone': 'US/Eastern'}
source = MDSRO(source_config)
dest = MDS(dest_config)
for run_start in source.find_run_starts():
dest.insert_run_start(**run_start)
for desc in source.find_descriptors(run_start=run_start):
events = source.get_events_generator(descriptor=desc)
dest.insert_descriptor(**desc)
dest.bulk_insert_events(desc, events)
dest.insert_run_stop(**source.stop_by_start(run_start))
|
<commit_before>from metadatastore.mds import MDSRO
from portable_mds.mongoquery.mds import MDS
source_config = {'host': 'localhost',
'port': 27017,
'database': 'metadatastore_production_v1',
'timezone': 'US/Eastern'}
dest_config = {'directory': 'some_directory'}
source = MDSRO(source_config)
dest = MDS(dest_config)
for run_start in source.find_run_starts():
dest.insert_run_start(**run_start)
for desc in source.find_descriptors(run_start=run_start):
events = source.get_events_generator(descriptor=desc)
dest.insert_descriptor(**desc)
dest.bulk_insert_events(desc, events)
dest.insert_run_stop(**source.stop_by_start(run_start))
<commit_msg>Add timezone to example to keep Broker happy.<commit_after>
|
from metadatastore.mds import MDSRO
from portable_mds.mongoquery.mds import MDS
source_config = {'host': 'localhost',
'port': 27017,
'database': 'metadatastore_production_v1',
'timezone': 'US/Eastern'}
dest_config = {'directory': 'some_directory',
'timezone': 'US/Eastern'}
source = MDSRO(source_config)
dest = MDS(dest_config)
for run_start in source.find_run_starts():
dest.insert_run_start(**run_start)
for desc in source.find_descriptors(run_start=run_start):
events = source.get_events_generator(descriptor=desc)
dest.insert_descriptor(**desc)
dest.bulk_insert_events(desc, events)
dest.insert_run_stop(**source.stop_by_start(run_start))
|
from metadatastore.mds import MDSRO
from portable_mds.mongoquery.mds import MDS
source_config = {'host': 'localhost',
'port': 27017,
'database': 'metadatastore_production_v1',
'timezone': 'US/Eastern'}
dest_config = {'directory': 'some_directory'}
source = MDSRO(source_config)
dest = MDS(dest_config)
for run_start in source.find_run_starts():
dest.insert_run_start(**run_start)
for desc in source.find_descriptors(run_start=run_start):
events = source.get_events_generator(descriptor=desc)
dest.insert_descriptor(**desc)
dest.bulk_insert_events(desc, events)
dest.insert_run_stop(**source.stop_by_start(run_start))
Add timezone to example to keep Broker happy.from metadatastore.mds import MDSRO
from portable_mds.mongoquery.mds import MDS
source_config = {'host': 'localhost',
'port': 27017,
'database': 'metadatastore_production_v1',
'timezone': 'US/Eastern'}
dest_config = {'directory': 'some_directory',
'timezone': 'US/Eastern'}
source = MDSRO(source_config)
dest = MDS(dest_config)
for run_start in source.find_run_starts():
dest.insert_run_start(**run_start)
for desc in source.find_descriptors(run_start=run_start):
events = source.get_events_generator(descriptor=desc)
dest.insert_descriptor(**desc)
dest.bulk_insert_events(desc, events)
dest.insert_run_stop(**source.stop_by_start(run_start))
|
<commit_before>from metadatastore.mds import MDSRO
from portable_mds.mongoquery.mds import MDS
source_config = {'host': 'localhost',
'port': 27017,
'database': 'metadatastore_production_v1',
'timezone': 'US/Eastern'}
dest_config = {'directory': 'some_directory'}
source = MDSRO(source_config)
dest = MDS(dest_config)
for run_start in source.find_run_starts():
dest.insert_run_start(**run_start)
for desc in source.find_descriptors(run_start=run_start):
events = source.get_events_generator(descriptor=desc)
dest.insert_descriptor(**desc)
dest.bulk_insert_events(desc, events)
dest.insert_run_stop(**source.stop_by_start(run_start))
<commit_msg>Add timezone to example to keep Broker happy.<commit_after>from metadatastore.mds import MDSRO
from portable_mds.mongoquery.mds import MDS
source_config = {'host': 'localhost',
'port': 27017,
'database': 'metadatastore_production_v1',
'timezone': 'US/Eastern'}
dest_config = {'directory': 'some_directory',
'timezone': 'US/Eastern'}
source = MDSRO(source_config)
dest = MDS(dest_config)
for run_start in source.find_run_starts():
dest.insert_run_start(**run_start)
for desc in source.find_descriptors(run_start=run_start):
events = source.get_events_generator(descriptor=desc)
dest.insert_descriptor(**desc)
dest.bulk_insert_events(desc, events)
dest.insert_run_stop(**source.stop_by_start(run_start))
|
ef7f732b9db4f0c835746d535f10e7e91e0484d7
|
l10n_br_zip/__openerp__.py
|
l10n_br_zip/__openerp__.py
|
# -*- coding: utf-8 -*-
# Copyright (C) 2009 Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
{
'name': 'Brazilian Localisation ZIP Codes',
'license': 'AGPL-3',
'author': 'Akretion, Odoo Community Association (OCA)',
'version': '8.0.1.0.1',
'depends': [
'l10n_br_base',
],
'data': [
'views/l10n_br_zip_view.xml',
'views/res_partner_view.xml',
'views/res_company_view.xml',
'views/res_bank_view.xml',
'wizard/l10n_br_zip_search_view.xml',
'security/ir.model.access.csv',
],
'test': ['test/zip_demo.yml'],
'category': 'Localization',
'installable': False,
}
|
# -*- coding: utf-8 -*-
# Copyright (C) 2009 Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
{
'name': 'Brazilian Localisation ZIP Codes',
'license': 'AGPL-3',
'author': 'Akretion, Odoo Community Association (OCA)',
'version': '9.0.1.0.0',
'depends': [
'l10n_br_base',
],
'data': [
'views/l10n_br_zip_view.xml',
'views/res_partner_view.xml',
'views/res_company_view.xml',
'views/res_bank_view.xml',
'wizard/l10n_br_zip_search_view.xml',
'security/ir.model.access.csv',
],
'test': [
'test/zip_demo.yml'
],
'category': 'Localization',
'installable': True,
}
|
Change the version of module.
|
[MIG] Change the version of module.
|
Python
|
agpl-3.0
|
odoo-brazil/l10n-brazil-wip,thinkopensolutions/l10n-brazil,odoo-brazil/l10n-brazil-wip,thinkopensolutions/l10n-brazil
|
# -*- coding: utf-8 -*-
# Copyright (C) 2009 Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
{
'name': 'Brazilian Localisation ZIP Codes',
'license': 'AGPL-3',
'author': 'Akretion, Odoo Community Association (OCA)',
'version': '8.0.1.0.1',
'depends': [
'l10n_br_base',
],
'data': [
'views/l10n_br_zip_view.xml',
'views/res_partner_view.xml',
'views/res_company_view.xml',
'views/res_bank_view.xml',
'wizard/l10n_br_zip_search_view.xml',
'security/ir.model.access.csv',
],
'test': ['test/zip_demo.yml'],
'category': 'Localization',
'installable': False,
}
[MIG] Change the version of module.
|
# -*- coding: utf-8 -*-
# Copyright (C) 2009 Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
{
'name': 'Brazilian Localisation ZIP Codes',
'license': 'AGPL-3',
'author': 'Akretion, Odoo Community Association (OCA)',
'version': '9.0.1.0.0',
'depends': [
'l10n_br_base',
],
'data': [
'views/l10n_br_zip_view.xml',
'views/res_partner_view.xml',
'views/res_company_view.xml',
'views/res_bank_view.xml',
'wizard/l10n_br_zip_search_view.xml',
'security/ir.model.access.csv',
],
'test': [
'test/zip_demo.yml'
],
'category': 'Localization',
'installable': True,
}
|
<commit_before># -*- coding: utf-8 -*-
# Copyright (C) 2009 Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
{
'name': 'Brazilian Localisation ZIP Codes',
'license': 'AGPL-3',
'author': 'Akretion, Odoo Community Association (OCA)',
'version': '8.0.1.0.1',
'depends': [
'l10n_br_base',
],
'data': [
'views/l10n_br_zip_view.xml',
'views/res_partner_view.xml',
'views/res_company_view.xml',
'views/res_bank_view.xml',
'wizard/l10n_br_zip_search_view.xml',
'security/ir.model.access.csv',
],
'test': ['test/zip_demo.yml'],
'category': 'Localization',
'installable': False,
}
<commit_msg>[MIG] Change the version of module.<commit_after>
|
# -*- coding: utf-8 -*-
# Copyright (C) 2009 Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
{
'name': 'Brazilian Localisation ZIP Codes',
'license': 'AGPL-3',
'author': 'Akretion, Odoo Community Association (OCA)',
'version': '9.0.1.0.0',
'depends': [
'l10n_br_base',
],
'data': [
'views/l10n_br_zip_view.xml',
'views/res_partner_view.xml',
'views/res_company_view.xml',
'views/res_bank_view.xml',
'wizard/l10n_br_zip_search_view.xml',
'security/ir.model.access.csv',
],
'test': [
'test/zip_demo.yml'
],
'category': 'Localization',
'installable': True,
}
|
# -*- coding: utf-8 -*-
# Copyright (C) 2009 Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
{
'name': 'Brazilian Localisation ZIP Codes',
'license': 'AGPL-3',
'author': 'Akretion, Odoo Community Association (OCA)',
'version': '8.0.1.0.1',
'depends': [
'l10n_br_base',
],
'data': [
'views/l10n_br_zip_view.xml',
'views/res_partner_view.xml',
'views/res_company_view.xml',
'views/res_bank_view.xml',
'wizard/l10n_br_zip_search_view.xml',
'security/ir.model.access.csv',
],
'test': ['test/zip_demo.yml'],
'category': 'Localization',
'installable': False,
}
[MIG] Change the version of module.# -*- coding: utf-8 -*-
# Copyright (C) 2009 Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
{
'name': 'Brazilian Localisation ZIP Codes',
'license': 'AGPL-3',
'author': 'Akretion, Odoo Community Association (OCA)',
'version': '9.0.1.0.0',
'depends': [
'l10n_br_base',
],
'data': [
'views/l10n_br_zip_view.xml',
'views/res_partner_view.xml',
'views/res_company_view.xml',
'views/res_bank_view.xml',
'wizard/l10n_br_zip_search_view.xml',
'security/ir.model.access.csv',
],
'test': [
'test/zip_demo.yml'
],
'category': 'Localization',
'installable': True,
}
|
<commit_before># -*- coding: utf-8 -*-
# Copyright (C) 2009 Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
{
'name': 'Brazilian Localisation ZIP Codes',
'license': 'AGPL-3',
'author': 'Akretion, Odoo Community Association (OCA)',
'version': '8.0.1.0.1',
'depends': [
'l10n_br_base',
],
'data': [
'views/l10n_br_zip_view.xml',
'views/res_partner_view.xml',
'views/res_company_view.xml',
'views/res_bank_view.xml',
'wizard/l10n_br_zip_search_view.xml',
'security/ir.model.access.csv',
],
'test': ['test/zip_demo.yml'],
'category': 'Localization',
'installable': False,
}
<commit_msg>[MIG] Change the version of module.<commit_after># -*- coding: utf-8 -*-
# Copyright (C) 2009 Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
{
'name': 'Brazilian Localisation ZIP Codes',
'license': 'AGPL-3',
'author': 'Akretion, Odoo Community Association (OCA)',
'version': '9.0.1.0.0',
'depends': [
'l10n_br_base',
],
'data': [
'views/l10n_br_zip_view.xml',
'views/res_partner_view.xml',
'views/res_company_view.xml',
'views/res_bank_view.xml',
'wizard/l10n_br_zip_search_view.xml',
'security/ir.model.access.csv',
],
'test': [
'test/zip_demo.yml'
],
'category': 'Localization',
'installable': True,
}
|
d6c2b891e63655fd1106d20f83b1eda54fb87541
|
abilian/testing/__init__.py
|
abilian/testing/__init__.py
|
"""Base stuff for testing.
"""
import subprocess
assert not 'twill' in subprocess.__file__
from flask.ext.testing import TestCase
from abilian.application import Application
from abilian.core.entities import db
__all__ = ['TestConfig', 'BaseTestCase']
class TestConfig(object):
SQLALCHEMY_DATABASE_URI = "sqlite://"
SQLALCHEMY_ECHO = False
TESTING = True
SECRET_KEY = "SECRET"
class BaseTestCase(TestCase):
config_class = TestConfig
application_class = Application
def create_app(self):
config = self.config_class()
self.app = self.application_class(config)
return self.app
def setUp(self):
self.app.create_db()
self.session = db.session
def tearDown(self):
db.session.remove()
db.drop_all()
db.engine.dispose()
# Useful for debugging
def dump_routes(self):
rules = list(self.app.url_map.iter_rules())
rules.sort(key=lambda x: x.rule)
for rule in rules:
print rule, rule.methods, rule.endpoint
|
"""Base stuff for testing.
"""
import subprocess
assert not 'twill' in subprocess.__file__
from flask.ext.testing import TestCase
from abilian.application import Application
__all__ = ['TestConfig', 'BaseTestCase']
class TestConfig(object):
SQLALCHEMY_DATABASE_URI = "sqlite://"
SQLALCHEMY_ECHO = False
TESTING = True
SECRET_KEY = "SECRET"
class BaseTestCase(TestCase):
config_class = TestConfig
application_class = Application
def create_app(self):
config = self.config_class()
self.app = self.application_class(config)
return self.app
def setUp(self):
self.app.create_db()
self.session = self.db.session
def tearDown(self):
self.db.session.remove()
self.db.drop_all()
self.db.engine.dispose()
@property
def db(self):
return self.app.extensions['sqlalchemy'].db
# Useful for debugging
def dump_routes(self):
rules = list(self.app.url_map.iter_rules())
rules.sort(key=lambda x: x.rule)
for rule in rules:
print rule, rule.methods, rule.endpoint
|
Add convenience method on test case.
|
Add convenience method on test case.
|
Python
|
lgpl-2.1
|
abilian/abilian-core,abilian/abilian-core,abilian/abilian-core,abilian/abilian-core,abilian/abilian-core
|
"""Base stuff for testing.
"""
import subprocess
assert not 'twill' in subprocess.__file__
from flask.ext.testing import TestCase
from abilian.application import Application
from abilian.core.entities import db
__all__ = ['TestConfig', 'BaseTestCase']
class TestConfig(object):
SQLALCHEMY_DATABASE_URI = "sqlite://"
SQLALCHEMY_ECHO = False
TESTING = True
SECRET_KEY = "SECRET"
class BaseTestCase(TestCase):
config_class = TestConfig
application_class = Application
def create_app(self):
config = self.config_class()
self.app = self.application_class(config)
return self.app
def setUp(self):
self.app.create_db()
self.session = db.session
def tearDown(self):
db.session.remove()
db.drop_all()
db.engine.dispose()
# Useful for debugging
def dump_routes(self):
rules = list(self.app.url_map.iter_rules())
rules.sort(key=lambda x: x.rule)
for rule in rules:
print rule, rule.methods, rule.endpoint
Add convenience method on test case.
|
"""Base stuff for testing.
"""
import subprocess
assert not 'twill' in subprocess.__file__
from flask.ext.testing import TestCase
from abilian.application import Application
__all__ = ['TestConfig', 'BaseTestCase']
class TestConfig(object):
SQLALCHEMY_DATABASE_URI = "sqlite://"
SQLALCHEMY_ECHO = False
TESTING = True
SECRET_KEY = "SECRET"
class BaseTestCase(TestCase):
config_class = TestConfig
application_class = Application
def create_app(self):
config = self.config_class()
self.app = self.application_class(config)
return self.app
def setUp(self):
self.app.create_db()
self.session = self.db.session
def tearDown(self):
self.db.session.remove()
self.db.drop_all()
self.db.engine.dispose()
@property
def db(self):
return self.app.extensions['sqlalchemy'].db
# Useful for debugging
def dump_routes(self):
rules = list(self.app.url_map.iter_rules())
rules.sort(key=lambda x: x.rule)
for rule in rules:
print rule, rule.methods, rule.endpoint
|
<commit_before>"""Base stuff for testing.
"""
import subprocess
assert not 'twill' in subprocess.__file__
from flask.ext.testing import TestCase
from abilian.application import Application
from abilian.core.entities import db
__all__ = ['TestConfig', 'BaseTestCase']
class TestConfig(object):
SQLALCHEMY_DATABASE_URI = "sqlite://"
SQLALCHEMY_ECHO = False
TESTING = True
SECRET_KEY = "SECRET"
class BaseTestCase(TestCase):
config_class = TestConfig
application_class = Application
def create_app(self):
config = self.config_class()
self.app = self.application_class(config)
return self.app
def setUp(self):
self.app.create_db()
self.session = db.session
def tearDown(self):
db.session.remove()
db.drop_all()
db.engine.dispose()
# Useful for debugging
def dump_routes(self):
rules = list(self.app.url_map.iter_rules())
rules.sort(key=lambda x: x.rule)
for rule in rules:
print rule, rule.methods, rule.endpoint
<commit_msg>Add convenience method on test case.<commit_after>
|
"""Base stuff for testing.
"""
import subprocess
assert not 'twill' in subprocess.__file__
from flask.ext.testing import TestCase
from abilian.application import Application
__all__ = ['TestConfig', 'BaseTestCase']
class TestConfig(object):
SQLALCHEMY_DATABASE_URI = "sqlite://"
SQLALCHEMY_ECHO = False
TESTING = True
SECRET_KEY = "SECRET"
class BaseTestCase(TestCase):
config_class = TestConfig
application_class = Application
def create_app(self):
config = self.config_class()
self.app = self.application_class(config)
return self.app
def setUp(self):
self.app.create_db()
self.session = self.db.session
def tearDown(self):
self.db.session.remove()
self.db.drop_all()
self.db.engine.dispose()
@property
def db(self):
return self.app.extensions['sqlalchemy'].db
# Useful for debugging
def dump_routes(self):
rules = list(self.app.url_map.iter_rules())
rules.sort(key=lambda x: x.rule)
for rule in rules:
print rule, rule.methods, rule.endpoint
|
"""Base stuff for testing.
"""
import subprocess
assert not 'twill' in subprocess.__file__
from flask.ext.testing import TestCase
from abilian.application import Application
from abilian.core.entities import db
__all__ = ['TestConfig', 'BaseTestCase']
class TestConfig(object):
SQLALCHEMY_DATABASE_URI = "sqlite://"
SQLALCHEMY_ECHO = False
TESTING = True
SECRET_KEY = "SECRET"
class BaseTestCase(TestCase):
config_class = TestConfig
application_class = Application
def create_app(self):
config = self.config_class()
self.app = self.application_class(config)
return self.app
def setUp(self):
self.app.create_db()
self.session = db.session
def tearDown(self):
db.session.remove()
db.drop_all()
db.engine.dispose()
# Useful for debugging
def dump_routes(self):
rules = list(self.app.url_map.iter_rules())
rules.sort(key=lambda x: x.rule)
for rule in rules:
print rule, rule.methods, rule.endpoint
Add convenience method on test case."""Base stuff for testing.
"""
import subprocess
assert not 'twill' in subprocess.__file__
from flask.ext.testing import TestCase
from abilian.application import Application
__all__ = ['TestConfig', 'BaseTestCase']
class TestConfig(object):
SQLALCHEMY_DATABASE_URI = "sqlite://"
SQLALCHEMY_ECHO = False
TESTING = True
SECRET_KEY = "SECRET"
class BaseTestCase(TestCase):
config_class = TestConfig
application_class = Application
def create_app(self):
config = self.config_class()
self.app = self.application_class(config)
return self.app
def setUp(self):
self.app.create_db()
self.session = self.db.session
def tearDown(self):
self.db.session.remove()
self.db.drop_all()
self.db.engine.dispose()
@property
def db(self):
return self.app.extensions['sqlalchemy'].db
# Useful for debugging
def dump_routes(self):
rules = list(self.app.url_map.iter_rules())
rules.sort(key=lambda x: x.rule)
for rule in rules:
print rule, rule.methods, rule.endpoint
|
<commit_before>"""Base stuff for testing.
"""
import subprocess
assert not 'twill' in subprocess.__file__
from flask.ext.testing import TestCase
from abilian.application import Application
from abilian.core.entities import db
__all__ = ['TestConfig', 'BaseTestCase']
class TestConfig(object):
SQLALCHEMY_DATABASE_URI = "sqlite://"
SQLALCHEMY_ECHO = False
TESTING = True
SECRET_KEY = "SECRET"
class BaseTestCase(TestCase):
config_class = TestConfig
application_class = Application
def create_app(self):
config = self.config_class()
self.app = self.application_class(config)
return self.app
def setUp(self):
self.app.create_db()
self.session = db.session
def tearDown(self):
db.session.remove()
db.drop_all()
db.engine.dispose()
# Useful for debugging
def dump_routes(self):
rules = list(self.app.url_map.iter_rules())
rules.sort(key=lambda x: x.rule)
for rule in rules:
print rule, rule.methods, rule.endpoint
<commit_msg>Add convenience method on test case.<commit_after>"""Base stuff for testing.
"""
import subprocess
assert not 'twill' in subprocess.__file__
from flask.ext.testing import TestCase
from abilian.application import Application
__all__ = ['TestConfig', 'BaseTestCase']
class TestConfig(object):
SQLALCHEMY_DATABASE_URI = "sqlite://"
SQLALCHEMY_ECHO = False
TESTING = True
SECRET_KEY = "SECRET"
class BaseTestCase(TestCase):
config_class = TestConfig
application_class = Application
def create_app(self):
config = self.config_class()
self.app = self.application_class(config)
return self.app
def setUp(self):
self.app.create_db()
self.session = self.db.session
def tearDown(self):
self.db.session.remove()
self.db.drop_all()
self.db.engine.dispose()
@property
def db(self):
return self.app.extensions['sqlalchemy'].db
# Useful for debugging
def dump_routes(self):
rules = list(self.app.url_map.iter_rules())
rules.sort(key=lambda x: x.rule)
for rule in rules:
print rule, rule.methods, rule.endpoint
|
8087a56b959ddb9371125dd2732550405df14e0f
|
src/webapp/cfg/config_example.py
|
src/webapp/cfg/config_example.py
|
# The secret key is used for signing the session and creating the csrf hmacs
SECRET_KEY = "gocu5eYoosh8oocoozeeG9queeghae7ushahp9ufaighoo5gex1vulaexohtepha"
# this is the dbapi connection string for sqlalchemy
DB_CONNECTION = None
# Turn this off in production!
DEBUG = True
SERVER_NAME = 'localhost:5000'
APPLICATION_ROOT = "/"
# Mail configuration
MAIL_SERVER = "localhost"
MAIL_PORT = 25
MAIL_USE_TLS = False
MAIL_USE_SSL = False
MAIL_DEBUG = DEBUG
MAIL_USERNAME = None
MAIL_PASSWORD = None
MAIL_DEFAULT_SENDER = "meet-and-eat@exmatrikulationsamt.de"
CONFIRM_SUBJECT = "Meet & Eat Aktivierung"
ERROR_ADDRESS = ['meetandeat@exmatrikulationsamt.de']
ERROR_SENDER = 'server-error@exmatrikulationsamt.de'
ERROR_FORMAT = '''
Message type: %(levelname)s
Location: %(pathname)s:%(lineno)d
Module: %(module)s
Function: %(funcName)s
Time: %(asctime)s
Message:
%(message)s
'''
ERROR_SUBJECT = "Fehler in der Meet&Eat Registrierung"
ADMIN_USER = "admin"
ADMIN_PASSWORD = "test"
|
# The secret key is used for signing the session and creating the csrf hmacs
SECRET_KEY = "gocu5eYoosh8oocoozeeG9queeghae7ushahp9ufaighoo5gex1vulaexohtepha"
# this is the dbapi connection string for sqlalchemy
DB_CONNECTION = None
# Turn this off in production!
DEBUG = True
SERVER_NAME = 'localhost:5000'
APPLICATION_ROOT = "/"
# Mail configuration
MAIL_SERVER = "localhost"
MAIL_PORT = 25
MAIL_USE_TLS = False
MAIL_USE_SSL = False
MAIL_DEBUG = DEBUG
MAIL_USERNAME = None
MAIL_PASSWORD = None
MAIL_DEFAULT_SENDER = "meet-and-eat@exmatrikulationsamt.de"
CONFIRM_SUBJECT = "Meet & Eat Aktivierung"
ERROR_ADDRESS = ['meetandeat@exmatrikulationsamt.de']
ERROR_SENDER = 'server-error@exmatrikulationsamt.de'
ERROR_FORMAT = '''
Message type: %(levelname)s
Location: %(pathname)s:%(lineno)d
Module: %(module)s
Function: %(funcName)s
Time: %(asctime)s
Message:
%(message)s
'''
ERROR_SUBJECT = "Fehler in der Meet&Eat Registrierung"
ADMIN_USER = "admin"
ADMIN_PASSWORD = "test"
# end date for registing
# Format: "yyyy-mm-dd HH:MM"
REGISTER_END = "2020-05-01 22:30"
|
Add end date to the config.
|
Add end date to the config.
|
Python
|
bsd-3-clause
|
janLo/meet-and-eat-registration-system,janLo/meet-and-eat-registration-system,eXma/meet-and-eat-registration-system,eXma/meet-and-eat-registration-system,janLo/meet-and-eat-registration-system,janLo/meet-and-eat-registration-system,eXma/meet-and-eat-registration-system,eXma/meet-and-eat-registration-system
|
# The secret key is used for signing the session and creating the csrf hmacs
SECRET_KEY = "gocu5eYoosh8oocoozeeG9queeghae7ushahp9ufaighoo5gex1vulaexohtepha"
# this is the dbapi connection string for sqlalchemy
DB_CONNECTION = None
# Turn this off in production!
DEBUG = True
SERVER_NAME = 'localhost:5000'
APPLICATION_ROOT = "/"
# Mail configuration
MAIL_SERVER = "localhost"
MAIL_PORT = 25
MAIL_USE_TLS = False
MAIL_USE_SSL = False
MAIL_DEBUG = DEBUG
MAIL_USERNAME = None
MAIL_PASSWORD = None
MAIL_DEFAULT_SENDER = "meet-and-eat@exmatrikulationsamt.de"
CONFIRM_SUBJECT = "Meet & Eat Aktivierung"
ERROR_ADDRESS = ['meetandeat@exmatrikulationsamt.de']
ERROR_SENDER = 'server-error@exmatrikulationsamt.de'
ERROR_FORMAT = '''
Message type: %(levelname)s
Location: %(pathname)s:%(lineno)d
Module: %(module)s
Function: %(funcName)s
Time: %(asctime)s
Message:
%(message)s
'''
ERROR_SUBJECT = "Fehler in der Meet&Eat Registrierung"
ADMIN_USER = "admin"
ADMIN_PASSWORD = "test"Add end date to the config.
|
# The secret key is used for signing the session and creating the csrf hmacs
SECRET_KEY = "gocu5eYoosh8oocoozeeG9queeghae7ushahp9ufaighoo5gex1vulaexohtepha"
# this is the dbapi connection string for sqlalchemy
DB_CONNECTION = None
# Turn this off in production!
DEBUG = True
SERVER_NAME = 'localhost:5000'
APPLICATION_ROOT = "/"
# Mail configuration
MAIL_SERVER = "localhost"
MAIL_PORT = 25
MAIL_USE_TLS = False
MAIL_USE_SSL = False
MAIL_DEBUG = DEBUG
MAIL_USERNAME = None
MAIL_PASSWORD = None
MAIL_DEFAULT_SENDER = "meet-and-eat@exmatrikulationsamt.de"
CONFIRM_SUBJECT = "Meet & Eat Aktivierung"
ERROR_ADDRESS = ['meetandeat@exmatrikulationsamt.de']
ERROR_SENDER = 'server-error@exmatrikulationsamt.de'
ERROR_FORMAT = '''
Message type: %(levelname)s
Location: %(pathname)s:%(lineno)d
Module: %(module)s
Function: %(funcName)s
Time: %(asctime)s
Message:
%(message)s
'''
ERROR_SUBJECT = "Fehler in der Meet&Eat Registrierung"
ADMIN_USER = "admin"
ADMIN_PASSWORD = "test"
# end date for registing
# Format: "yyyy-mm-dd HH:MM"
REGISTER_END = "2020-05-01 22:30"
|
<commit_before># The secret key is used for signing the session and creating the csrf hmacs
SECRET_KEY = "gocu5eYoosh8oocoozeeG9queeghae7ushahp9ufaighoo5gex1vulaexohtepha"
# this is the dbapi connection string for sqlalchemy
DB_CONNECTION = None
# Turn this off in production!
DEBUG = True
SERVER_NAME = 'localhost:5000'
APPLICATION_ROOT = "/"
# Mail configuration
MAIL_SERVER = "localhost"
MAIL_PORT = 25
MAIL_USE_TLS = False
MAIL_USE_SSL = False
MAIL_DEBUG = DEBUG
MAIL_USERNAME = None
MAIL_PASSWORD = None
MAIL_DEFAULT_SENDER = "meet-and-eat@exmatrikulationsamt.de"
CONFIRM_SUBJECT = "Meet & Eat Aktivierung"
ERROR_ADDRESS = ['meetandeat@exmatrikulationsamt.de']
ERROR_SENDER = 'server-error@exmatrikulationsamt.de'
ERROR_FORMAT = '''
Message type: %(levelname)s
Location: %(pathname)s:%(lineno)d
Module: %(module)s
Function: %(funcName)s
Time: %(asctime)s
Message:
%(message)s
'''
ERROR_SUBJECT = "Fehler in der Meet&Eat Registrierung"
ADMIN_USER = "admin"
ADMIN_PASSWORD = "test"<commit_msg>Add end date to the config.<commit_after>
|
# The secret key is used for signing the session and creating the csrf hmacs
SECRET_KEY = "gocu5eYoosh8oocoozeeG9queeghae7ushahp9ufaighoo5gex1vulaexohtepha"
# this is the dbapi connection string for sqlalchemy
DB_CONNECTION = None
# Turn this off in production!
DEBUG = True
SERVER_NAME = 'localhost:5000'
APPLICATION_ROOT = "/"
# Mail configuration
MAIL_SERVER = "localhost"
MAIL_PORT = 25
MAIL_USE_TLS = False
MAIL_USE_SSL = False
MAIL_DEBUG = DEBUG
MAIL_USERNAME = None
MAIL_PASSWORD = None
MAIL_DEFAULT_SENDER = "meet-and-eat@exmatrikulationsamt.de"
CONFIRM_SUBJECT = "Meet & Eat Aktivierung"
ERROR_ADDRESS = ['meetandeat@exmatrikulationsamt.de']
ERROR_SENDER = 'server-error@exmatrikulationsamt.de'
ERROR_FORMAT = '''
Message type: %(levelname)s
Location: %(pathname)s:%(lineno)d
Module: %(module)s
Function: %(funcName)s
Time: %(asctime)s
Message:
%(message)s
'''
ERROR_SUBJECT = "Fehler in der Meet&Eat Registrierung"
ADMIN_USER = "admin"
ADMIN_PASSWORD = "test"
# end date for registing
# Format: "yyyy-mm-dd HH:MM"
REGISTER_END = "2020-05-01 22:30"
|
# The secret key is used for signing the session and creating the csrf hmacs
SECRET_KEY = "gocu5eYoosh8oocoozeeG9queeghae7ushahp9ufaighoo5gex1vulaexohtepha"
# this is the dbapi connection string for sqlalchemy
DB_CONNECTION = None
# Turn this off in production!
DEBUG = True
SERVER_NAME = 'localhost:5000'
APPLICATION_ROOT = "/"
# Mail configuration
MAIL_SERVER = "localhost"
MAIL_PORT = 25
MAIL_USE_TLS = False
MAIL_USE_SSL = False
MAIL_DEBUG = DEBUG
MAIL_USERNAME = None
MAIL_PASSWORD = None
MAIL_DEFAULT_SENDER = "meet-and-eat@exmatrikulationsamt.de"
CONFIRM_SUBJECT = "Meet & Eat Aktivierung"
ERROR_ADDRESS = ['meetandeat@exmatrikulationsamt.de']
ERROR_SENDER = 'server-error@exmatrikulationsamt.de'
ERROR_FORMAT = '''
Message type: %(levelname)s
Location: %(pathname)s:%(lineno)d
Module: %(module)s
Function: %(funcName)s
Time: %(asctime)s
Message:
%(message)s
'''
ERROR_SUBJECT = "Fehler in der Meet&Eat Registrierung"
ADMIN_USER = "admin"
ADMIN_PASSWORD = "test"Add end date to the config.# The secret key is used for signing the session and creating the csrf hmacs
SECRET_KEY = "gocu5eYoosh8oocoozeeG9queeghae7ushahp9ufaighoo5gex1vulaexohtepha"
# this is the dbapi connection string for sqlalchemy
DB_CONNECTION = None
# Turn this off in production!
DEBUG = True
SERVER_NAME = 'localhost:5000'
APPLICATION_ROOT = "/"
# Mail configuration
MAIL_SERVER = "localhost"
MAIL_PORT = 25
MAIL_USE_TLS = False
MAIL_USE_SSL = False
MAIL_DEBUG = DEBUG
MAIL_USERNAME = None
MAIL_PASSWORD = None
MAIL_DEFAULT_SENDER = "meet-and-eat@exmatrikulationsamt.de"
CONFIRM_SUBJECT = "Meet & Eat Aktivierung"
ERROR_ADDRESS = ['meetandeat@exmatrikulationsamt.de']
ERROR_SENDER = 'server-error@exmatrikulationsamt.de'
ERROR_FORMAT = '''
Message type: %(levelname)s
Location: %(pathname)s:%(lineno)d
Module: %(module)s
Function: %(funcName)s
Time: %(asctime)s
Message:
%(message)s
'''
ERROR_SUBJECT = "Fehler in der Meet&Eat Registrierung"
ADMIN_USER = "admin"
ADMIN_PASSWORD = "test"
# end date for registing
# Format: "yyyy-mm-dd HH:MM"
REGISTER_END = "2020-05-01 22:30"
|
<commit_before># The secret key is used for signing the session and creating the csrf hmacs
SECRET_KEY = "gocu5eYoosh8oocoozeeG9queeghae7ushahp9ufaighoo5gex1vulaexohtepha"
# this is the dbapi connection string for sqlalchemy
DB_CONNECTION = None
# Turn this off in production!
DEBUG = True
SERVER_NAME = 'localhost:5000'
APPLICATION_ROOT = "/"
# Mail configuration
MAIL_SERVER = "localhost"
MAIL_PORT = 25
MAIL_USE_TLS = False
MAIL_USE_SSL = False
MAIL_DEBUG = DEBUG
MAIL_USERNAME = None
MAIL_PASSWORD = None
MAIL_DEFAULT_SENDER = "meet-and-eat@exmatrikulationsamt.de"
CONFIRM_SUBJECT = "Meet & Eat Aktivierung"
ERROR_ADDRESS = ['meetandeat@exmatrikulationsamt.de']
ERROR_SENDER = 'server-error@exmatrikulationsamt.de'
ERROR_FORMAT = '''
Message type: %(levelname)s
Location: %(pathname)s:%(lineno)d
Module: %(module)s
Function: %(funcName)s
Time: %(asctime)s
Message:
%(message)s
'''
ERROR_SUBJECT = "Fehler in der Meet&Eat Registrierung"
ADMIN_USER = "admin"
ADMIN_PASSWORD = "test"<commit_msg>Add end date to the config.<commit_after># The secret key is used for signing the session and creating the csrf hmacs
SECRET_KEY = "gocu5eYoosh8oocoozeeG9queeghae7ushahp9ufaighoo5gex1vulaexohtepha"
# this is the dbapi connection string for sqlalchemy
DB_CONNECTION = None
# Turn this off in production!
DEBUG = True
SERVER_NAME = 'localhost:5000'
APPLICATION_ROOT = "/"
# Mail configuration
MAIL_SERVER = "localhost"
MAIL_PORT = 25
MAIL_USE_TLS = False
MAIL_USE_SSL = False
MAIL_DEBUG = DEBUG
MAIL_USERNAME = None
MAIL_PASSWORD = None
MAIL_DEFAULT_SENDER = "meet-and-eat@exmatrikulationsamt.de"
CONFIRM_SUBJECT = "Meet & Eat Aktivierung"
ERROR_ADDRESS = ['meetandeat@exmatrikulationsamt.de']
ERROR_SENDER = 'server-error@exmatrikulationsamt.de'
ERROR_FORMAT = '''
Message type: %(levelname)s
Location: %(pathname)s:%(lineno)d
Module: %(module)s
Function: %(funcName)s
Time: %(asctime)s
Message:
%(message)s
'''
ERROR_SUBJECT = "Fehler in der Meet&Eat Registrierung"
ADMIN_USER = "admin"
ADMIN_PASSWORD = "test"
# end date for registing
# Format: "yyyy-mm-dd HH:MM"
REGISTER_END = "2020-05-01 22:30"
|
644ae4d4f204799160cd2a75f7a8be514d7735f1
|
gunicorn/__init__.py
|
gunicorn/__init__.py
|
# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
version_info = (17, 6)
__version__ = ".".join([str(v) for v in version_info])
SERVER_SOFTWARE = "gunicorn/%s" % __version__
|
# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
version_info = (18, 0)
__version__ = ".".join([str(v) for v in version_info])
SERVER_SOFTWARE = "gunicorn/%s" % __version__
|
Revert "oups this the 17.6 version"
|
Revert "oups this the 17.6 version"
This reverts commit bb71bc841f7422333324b95c988ba0677779304d.
|
Python
|
mit
|
gtrdotmcs/gunicorn,zhoucen/gunicorn,GitHublong/gunicorn,ammaraskar/gunicorn,elelianghh/gunicorn,harrisonfeng/gunicorn,ccl0326/gunicorn,tejasmanohar/gunicorn,mvaled/gunicorn,alex/gunicorn,alex/gunicorn,mvaled/gunicorn,zhoucen/gunicorn,tempbottle/gunicorn,jamesblunt/gunicorn,wong2/gunicorn,prezi/gunicorn,malept/gunicorn,ephes/gunicorn,MrKiven/gunicorn,WSDC-NITWarangal/gunicorn,z-fork/gunicorn,jamesblunt/gunicorn,wong2/gunicorn,prezi/gunicorn,1stvamp/gunicorn,malept/gunicorn,gtrdotmcs/gunicorn,1stvamp/gunicorn,alex/gunicorn,gtrdotmcs/gunicorn,ccl0326/gunicorn,wong2/gunicorn,beni55/gunicorn,ccl0326/gunicorn,mvaled/gunicorn,prezi/gunicorn,keakon/gunicorn,malept/gunicorn,1stvamp/gunicorn,jamesblunt/gunicorn,zhoucen/gunicorn
|
# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
version_info = (17, 6)
__version__ = ".".join([str(v) for v in version_info])
SERVER_SOFTWARE = "gunicorn/%s" % __version__
Revert "oups this the 17.6 version"
This reverts commit bb71bc841f7422333324b95c988ba0677779304d.
|
# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
version_info = (18, 0)
__version__ = ".".join([str(v) for v in version_info])
SERVER_SOFTWARE = "gunicorn/%s" % __version__
|
<commit_before># -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
version_info = (17, 6)
__version__ = ".".join([str(v) for v in version_info])
SERVER_SOFTWARE = "gunicorn/%s" % __version__
<commit_msg>Revert "oups this the 17.6 version"
This reverts commit bb71bc841f7422333324b95c988ba0677779304d.<commit_after>
|
# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
version_info = (18, 0)
__version__ = ".".join([str(v) for v in version_info])
SERVER_SOFTWARE = "gunicorn/%s" % __version__
|
# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
version_info = (17, 6)
__version__ = ".".join([str(v) for v in version_info])
SERVER_SOFTWARE = "gunicorn/%s" % __version__
Revert "oups this the 17.6 version"
This reverts commit bb71bc841f7422333324b95c988ba0677779304d.# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
version_info = (18, 0)
__version__ = ".".join([str(v) for v in version_info])
SERVER_SOFTWARE = "gunicorn/%s" % __version__
|
<commit_before># -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
version_info = (17, 6)
__version__ = ".".join([str(v) for v in version_info])
SERVER_SOFTWARE = "gunicorn/%s" % __version__
<commit_msg>Revert "oups this the 17.6 version"
This reverts commit bb71bc841f7422333324b95c988ba0677779304d.<commit_after># -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
version_info = (18, 0)
__version__ = ".".join([str(v) for v in version_info])
SERVER_SOFTWARE = "gunicorn/%s" % __version__
|
fa1b31785c52f0e4a14ed57663a3904d0ecd976d
|
akanda/horizon/overrides.py
|
akanda/horizon/overrides.py
|
from horizon.base import Horizon
nova_dashboard = Horizon.get_dashboard('project')
compute_panel_group = nova_dashboard.get_panel_group('compute')
compute_panel_group.panels.append('networking')
|
from horizon.base import Horizon
nova_dashboard = Horizon.get_dashboard('project')
compute_panel_group = nova_dashboard.get_panel_group('network')
compute_panel_group.panels.append('networking')
|
Move the Akanda Networking panel in the new Manage Network section
|
Move the Akanda Networking panel in the new Manage Network section
In the Grizzly version of Horizon the navigation tree on the left
is been split in two section, this fix move the Akanda Networking
panel from the Manage Compute sections to the newly created
Manage Network section.
Change-Id: Iea6156bcc85496e3efac8c2bf4c50543dfb11a3d
Signed-off-by: Rosario Di Somma <73b2fe5f91895aea2b4d0e8942a5edf9f18fa897@dreamhost.com>
|
Python
|
apache-2.0
|
dreamhost/akanda-horizon,dreamhost/akanda-horizon
|
from horizon.base import Horizon
nova_dashboard = Horizon.get_dashboard('project')
compute_panel_group = nova_dashboard.get_panel_group('compute')
compute_panel_group.panels.append('networking')
Move the Akanda Networking panel in the new Manage Network section
In the Grizzly version of Horizon the navigation tree on the left
is been split in two section, this fix move the Akanda Networking
panel from the Manage Compute sections to the newly created
Manage Network section.
Change-Id: Iea6156bcc85496e3efac8c2bf4c50543dfb11a3d
Signed-off-by: Rosario Di Somma <73b2fe5f91895aea2b4d0e8942a5edf9f18fa897@dreamhost.com>
|
from horizon.base import Horizon
nova_dashboard = Horizon.get_dashboard('project')
compute_panel_group = nova_dashboard.get_panel_group('network')
compute_panel_group.panels.append('networking')
|
<commit_before>from horizon.base import Horizon
nova_dashboard = Horizon.get_dashboard('project')
compute_panel_group = nova_dashboard.get_panel_group('compute')
compute_panel_group.panels.append('networking')
<commit_msg>Move the Akanda Networking panel in the new Manage Network section
In the Grizzly version of Horizon the navigation tree on the left
is been split in two section, this fix move the Akanda Networking
panel from the Manage Compute sections to the newly created
Manage Network section.
Change-Id: Iea6156bcc85496e3efac8c2bf4c50543dfb11a3d
Signed-off-by: Rosario Di Somma <73b2fe5f91895aea2b4d0e8942a5edf9f18fa897@dreamhost.com><commit_after>
|
from horizon.base import Horizon
nova_dashboard = Horizon.get_dashboard('project')
compute_panel_group = nova_dashboard.get_panel_group('network')
compute_panel_group.panels.append('networking')
|
from horizon.base import Horizon
nova_dashboard = Horizon.get_dashboard('project')
compute_panel_group = nova_dashboard.get_panel_group('compute')
compute_panel_group.panels.append('networking')
Move the Akanda Networking panel in the new Manage Network section
In the Grizzly version of Horizon the navigation tree on the left
is been split in two section, this fix move the Akanda Networking
panel from the Manage Compute sections to the newly created
Manage Network section.
Change-Id: Iea6156bcc85496e3efac8c2bf4c50543dfb11a3d
Signed-off-by: Rosario Di Somma <73b2fe5f91895aea2b4d0e8942a5edf9f18fa897@dreamhost.com>from horizon.base import Horizon
nova_dashboard = Horizon.get_dashboard('project')
compute_panel_group = nova_dashboard.get_panel_group('network')
compute_panel_group.panels.append('networking')
|
<commit_before>from horizon.base import Horizon
nova_dashboard = Horizon.get_dashboard('project')
compute_panel_group = nova_dashboard.get_panel_group('compute')
compute_panel_group.panels.append('networking')
<commit_msg>Move the Akanda Networking panel in the new Manage Network section
In the Grizzly version of Horizon the navigation tree on the left
is been split in two section, this fix move the Akanda Networking
panel from the Manage Compute sections to the newly created
Manage Network section.
Change-Id: Iea6156bcc85496e3efac8c2bf4c50543dfb11a3d
Signed-off-by: Rosario Di Somma <73b2fe5f91895aea2b4d0e8942a5edf9f18fa897@dreamhost.com><commit_after>from horizon.base import Horizon
nova_dashboard = Horizon.get_dashboard('project')
compute_panel_group = nova_dashboard.get_panel_group('network')
compute_panel_group.panels.append('networking')
|
bd7c5c5544a6d09062da05a4780524e8981f1737
|
captainhook/checkers/block_branches.py
|
captainhook/checkers/block_branches.py
|
# # # # # # # # # # # # # #
# CAPTAINHOOK IDENTIFIER #
# # # # # # # # # # # # # #
import argparse
from .utils import bash
CHECK_NAME = 'block_branch'
def run(files, temp_folder, arg=None):
"Check we're not committing to a blocked branch"
parser = get_parser()
argos = parser.parse_args(arg.split())
current_branch = bash('git symbolic-ref HEAD').value().decode('utf-8')
current_branch = current_branch.replace('refs/heads/', '').strip()
if current_branch in argos.branches:
return ("Branch '{0}' is blocked from being "
"committed to.".format(current_branch))
def get_parser():
parser = argparse.ArgumentParser()
parser.add_argument('branches', metavar='B', nargs='+',
help='a branch to block commits to')
return parser
|
# # # # # # # # # # # # # #
# CAPTAINHOOK IDENTIFIER #
# # # # # # # # # # # # # #
import argparse
from .utils import bash
CHECK_NAME = 'block_branch'
def run(files, temp_folder, arg=None):
"Check we're not committing to a blocked branch"
parser = get_parser()
argos = parser.parse_args(arg.split())
current_branch = bash('git symbolic-ref HEAD').value()
current_branch = current_branch.replace('refs/heads/', '').strip()
if current_branch in argos.branches:
return ("Branch '{0}' is blocked from being "
"committed to.".format(current_branch))
def get_parser():
parser = argparse.ArgumentParser()
parser.add_argument('branches', metavar='B', nargs='+',
help='a branch to block commits to')
return parser
|
Remove decode from block branches check
|
Remove decode from block branches check
It’s now done by `bash()`.
|
Python
|
bsd-3-clause
|
alexcouper/captainhook
|
# # # # # # # # # # # # # #
# CAPTAINHOOK IDENTIFIER #
# # # # # # # # # # # # # #
import argparse
from .utils import bash
CHECK_NAME = 'block_branch'
def run(files, temp_folder, arg=None):
"Check we're not committing to a blocked branch"
parser = get_parser()
argos = parser.parse_args(arg.split())
current_branch = bash('git symbolic-ref HEAD').value().decode('utf-8')
current_branch = current_branch.replace('refs/heads/', '').strip()
if current_branch in argos.branches:
return ("Branch '{0}' is blocked from being "
"committed to.".format(current_branch))
def get_parser():
parser = argparse.ArgumentParser()
parser.add_argument('branches', metavar='B', nargs='+',
help='a branch to block commits to')
return parser
Remove decode from block branches check
It’s now done by `bash()`.
|
# # # # # # # # # # # # # #
# CAPTAINHOOK IDENTIFIER #
# # # # # # # # # # # # # #
import argparse
from .utils import bash
CHECK_NAME = 'block_branch'
def run(files, temp_folder, arg=None):
"Check we're not committing to a blocked branch"
parser = get_parser()
argos = parser.parse_args(arg.split())
current_branch = bash('git symbolic-ref HEAD').value()
current_branch = current_branch.replace('refs/heads/', '').strip()
if current_branch in argos.branches:
return ("Branch '{0}' is blocked from being "
"committed to.".format(current_branch))
def get_parser():
parser = argparse.ArgumentParser()
parser.add_argument('branches', metavar='B', nargs='+',
help='a branch to block commits to')
return parser
|
<commit_before># # # # # # # # # # # # # #
# CAPTAINHOOK IDENTIFIER #
# # # # # # # # # # # # # #
import argparse
from .utils import bash
CHECK_NAME = 'block_branch'
def run(files, temp_folder, arg=None):
"Check we're not committing to a blocked branch"
parser = get_parser()
argos = parser.parse_args(arg.split())
current_branch = bash('git symbolic-ref HEAD').value().decode('utf-8')
current_branch = current_branch.replace('refs/heads/', '').strip()
if current_branch in argos.branches:
return ("Branch '{0}' is blocked from being "
"committed to.".format(current_branch))
def get_parser():
parser = argparse.ArgumentParser()
parser.add_argument('branches', metavar='B', nargs='+',
help='a branch to block commits to')
return parser
<commit_msg>Remove decode from block branches check
It’s now done by `bash()`.<commit_after>
|
# # # # # # # # # # # # # #
# CAPTAINHOOK IDENTIFIER #
# # # # # # # # # # # # # #
import argparse
from .utils import bash
CHECK_NAME = 'block_branch'
def run(files, temp_folder, arg=None):
"Check we're not committing to a blocked branch"
parser = get_parser()
argos = parser.parse_args(arg.split())
current_branch = bash('git symbolic-ref HEAD').value()
current_branch = current_branch.replace('refs/heads/', '').strip()
if current_branch in argos.branches:
return ("Branch '{0}' is blocked from being "
"committed to.".format(current_branch))
def get_parser():
parser = argparse.ArgumentParser()
parser.add_argument('branches', metavar='B', nargs='+',
help='a branch to block commits to')
return parser
|
# # # # # # # # # # # # # #
# CAPTAINHOOK IDENTIFIER #
# # # # # # # # # # # # # #
import argparse
from .utils import bash
CHECK_NAME = 'block_branch'
def run(files, temp_folder, arg=None):
"Check we're not committing to a blocked branch"
parser = get_parser()
argos = parser.parse_args(arg.split())
current_branch = bash('git symbolic-ref HEAD').value().decode('utf-8')
current_branch = current_branch.replace('refs/heads/', '').strip()
if current_branch in argos.branches:
return ("Branch '{0}' is blocked from being "
"committed to.".format(current_branch))
def get_parser():
parser = argparse.ArgumentParser()
parser.add_argument('branches', metavar='B', nargs='+',
help='a branch to block commits to')
return parser
Remove decode from block branches check
It’s now done by `bash()`.# # # # # # # # # # # # # #
# CAPTAINHOOK IDENTIFIER #
# # # # # # # # # # # # # #
import argparse
from .utils import bash
CHECK_NAME = 'block_branch'
def run(files, temp_folder, arg=None):
"Check we're not committing to a blocked branch"
parser = get_parser()
argos = parser.parse_args(arg.split())
current_branch = bash('git symbolic-ref HEAD').value()
current_branch = current_branch.replace('refs/heads/', '').strip()
if current_branch in argos.branches:
return ("Branch '{0}' is blocked from being "
"committed to.".format(current_branch))
def get_parser():
parser = argparse.ArgumentParser()
parser.add_argument('branches', metavar='B', nargs='+',
help='a branch to block commits to')
return parser
|
<commit_before># # # # # # # # # # # # # #
# CAPTAINHOOK IDENTIFIER #
# # # # # # # # # # # # # #
import argparse
from .utils import bash
CHECK_NAME = 'block_branch'
def run(files, temp_folder, arg=None):
"Check we're not committing to a blocked branch"
parser = get_parser()
argos = parser.parse_args(arg.split())
current_branch = bash('git symbolic-ref HEAD').value().decode('utf-8')
current_branch = current_branch.replace('refs/heads/', '').strip()
if current_branch in argos.branches:
return ("Branch '{0}' is blocked from being "
"committed to.".format(current_branch))
def get_parser():
parser = argparse.ArgumentParser()
parser.add_argument('branches', metavar='B', nargs='+',
help='a branch to block commits to')
return parser
<commit_msg>Remove decode from block branches check
It’s now done by `bash()`.<commit_after># # # # # # # # # # # # # #
# CAPTAINHOOK IDENTIFIER #
# # # # # # # # # # # # # #
import argparse
from .utils import bash
CHECK_NAME = 'block_branch'
def run(files, temp_folder, arg=None):
"Check we're not committing to a blocked branch"
parser = get_parser()
argos = parser.parse_args(arg.split())
current_branch = bash('git symbolic-ref HEAD').value()
current_branch = current_branch.replace('refs/heads/', '').strip()
if current_branch in argos.branches:
return ("Branch '{0}' is blocked from being "
"committed to.".format(current_branch))
def get_parser():
parser = argparse.ArgumentParser()
parser.add_argument('branches', metavar='B', nargs='+',
help='a branch to block commits to')
return parser
|
b1a562ea2105e4992fa51d7ba49a99c1955b01b3
|
stats/urls.py
|
stats/urls.py
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.front_page, name='front_page'),
url(r'^engine/(?P<name>[ a-zA-Z]+)/$', views.engine_players, name='engine'),
url(r'^engine/(?P<name>[ a-zA-Z]+)/wads/$', views.engine_wads, name='wads'),
url(r'^engine/(?P<name>[ a-zA-Z]+)/servers/$', views.engine_servers, name='servers'),
url(r'^about/$', views.about, name='about')
]
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.front_page, name='front_page'),
url(r'^engine/(?P<name>[ a-zA-Z0-9]+)/$', views.engine_players, name='engine'),
url(r'^engine/(?P<name>[ a-zA-Z0-9]+)/wads/$', views.engine_wads, name='wads'),
url(r'^engine/(?P<name>[ a-zA-Z0-9]+)/servers/$', views.engine_servers, name='servers'),
url(r'^about/$', views.about, name='about')
]
|
Support game engines with numbers in name.
|
Support game engines with numbers in name.
|
Python
|
bsd-2-clause
|
Zalewa/doomstats,Zalewa/doomstats,Zalewa/doomstats,Zalewa/doomstats
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.front_page, name='front_page'),
url(r'^engine/(?P<name>[ a-zA-Z]+)/$', views.engine_players, name='engine'),
url(r'^engine/(?P<name>[ a-zA-Z]+)/wads/$', views.engine_wads, name='wads'),
url(r'^engine/(?P<name>[ a-zA-Z]+)/servers/$', views.engine_servers, name='servers'),
url(r'^about/$', views.about, name='about')
]
Support game engines with numbers in name.
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.front_page, name='front_page'),
url(r'^engine/(?P<name>[ a-zA-Z0-9]+)/$', views.engine_players, name='engine'),
url(r'^engine/(?P<name>[ a-zA-Z0-9]+)/wads/$', views.engine_wads, name='wads'),
url(r'^engine/(?P<name>[ a-zA-Z0-9]+)/servers/$', views.engine_servers, name='servers'),
url(r'^about/$', views.about, name='about')
]
|
<commit_before>from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.front_page, name='front_page'),
url(r'^engine/(?P<name>[ a-zA-Z]+)/$', views.engine_players, name='engine'),
url(r'^engine/(?P<name>[ a-zA-Z]+)/wads/$', views.engine_wads, name='wads'),
url(r'^engine/(?P<name>[ a-zA-Z]+)/servers/$', views.engine_servers, name='servers'),
url(r'^about/$', views.about, name='about')
]
<commit_msg>Support game engines with numbers in name.<commit_after>
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.front_page, name='front_page'),
url(r'^engine/(?P<name>[ a-zA-Z0-9]+)/$', views.engine_players, name='engine'),
url(r'^engine/(?P<name>[ a-zA-Z0-9]+)/wads/$', views.engine_wads, name='wads'),
url(r'^engine/(?P<name>[ a-zA-Z0-9]+)/servers/$', views.engine_servers, name='servers'),
url(r'^about/$', views.about, name='about')
]
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.front_page, name='front_page'),
url(r'^engine/(?P<name>[ a-zA-Z]+)/$', views.engine_players, name='engine'),
url(r'^engine/(?P<name>[ a-zA-Z]+)/wads/$', views.engine_wads, name='wads'),
url(r'^engine/(?P<name>[ a-zA-Z]+)/servers/$', views.engine_servers, name='servers'),
url(r'^about/$', views.about, name='about')
]
Support game engines with numbers in name.from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.front_page, name='front_page'),
url(r'^engine/(?P<name>[ a-zA-Z0-9]+)/$', views.engine_players, name='engine'),
url(r'^engine/(?P<name>[ a-zA-Z0-9]+)/wads/$', views.engine_wads, name='wads'),
url(r'^engine/(?P<name>[ a-zA-Z0-9]+)/servers/$', views.engine_servers, name='servers'),
url(r'^about/$', views.about, name='about')
]
|
<commit_before>from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.front_page, name='front_page'),
url(r'^engine/(?P<name>[ a-zA-Z]+)/$', views.engine_players, name='engine'),
url(r'^engine/(?P<name>[ a-zA-Z]+)/wads/$', views.engine_wads, name='wads'),
url(r'^engine/(?P<name>[ a-zA-Z]+)/servers/$', views.engine_servers, name='servers'),
url(r'^about/$', views.about, name='about')
]
<commit_msg>Support game engines with numbers in name.<commit_after>from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.front_page, name='front_page'),
url(r'^engine/(?P<name>[ a-zA-Z0-9]+)/$', views.engine_players, name='engine'),
url(r'^engine/(?P<name>[ a-zA-Z0-9]+)/wads/$', views.engine_wads, name='wads'),
url(r'^engine/(?P<name>[ a-zA-Z0-9]+)/servers/$', views.engine_servers, name='servers'),
url(r'^about/$', views.about, name='about')
]
|
c589fffe7834d7a187c25316f95a5d1ca12c5669
|
active-env.py
|
active-env.py
|
#! /usr/bin/env python
# Author: Joseph Lisee <jlisee@gmail.com>
import os
import sys
# Get the current directory
cur_dir, _ = os.path.split(__file__)
def main():
# Get our path
env_dir = os.path.abspath(os.path.join(cur_dir, 'env'))
# Set our path vars
env_paths = {
'PATH' : os.path.join(env_dir, 'bin'),
'LD_LIBRARY_PATH' : os.path.join(env_dir, 'lib'),
}
for varname, varpath in env_paths.iteritems():
cur_var = os.environ.get(varname, None)
if cur_var:
os.environ[varname] = '%s;%s' % (varpath, cur_var)
else:
os.environ[varname] = varpath
# Setup up the PS1
os.environ['PS1'] = '(xpm) \u@\h:\w\$'
# Step into shell
print os.execvp('bash', ['bash'])
if __name__ == '__main__':
sys.exit(main())
|
#! /usr/bin/env python
# Author: Joseph Lisee <jlisee@gmail.com>
import os
import sys
# Get the current directory
cur_dir, _ = os.path.split(__file__)
def main():
# Get our path
env_dir = os.path.abspath(os.path.join(cur_dir, 'env'))
# Set our path vars
env_paths = {
'PATH' : os.path.join(env_dir, 'bin'),
'LD_LIBRARY_PATH' : os.path.join(env_dir, 'lib'),
}
for varname, varpath in env_paths.iteritems():
cur_var = os.environ.get(varname, None)
if cur_var:
os.environ[varname] = cur_var + os.pathsep + varpath
else:
os.environ[varname] = varpath
# Setup up the PS1 (this doesn't work)
os.environ['PS1'] = '(xpm) \u@\h:\w\$'
# Step into shell
os.execvp('bash', ['bash'])
if __name__ == '__main__':
sys.exit(main())
|
Make bash setup script actually work
|
Make bash setup script actually work
Fixes call to exec and labels PS1 work as non working (over-ridden by
bashrc).
|
Python
|
bsd-3-clause
|
jlisee/xpkg,jlisee/xpkg,jlisee/xpkg
|
#! /usr/bin/env python
# Author: Joseph Lisee <jlisee@gmail.com>
import os
import sys
# Get the current directory
cur_dir, _ = os.path.split(__file__)
def main():
# Get our path
env_dir = os.path.abspath(os.path.join(cur_dir, 'env'))
# Set our path vars
env_paths = {
'PATH' : os.path.join(env_dir, 'bin'),
'LD_LIBRARY_PATH' : os.path.join(env_dir, 'lib'),
}
for varname, varpath in env_paths.iteritems():
cur_var = os.environ.get(varname, None)
if cur_var:
os.environ[varname] = '%s;%s' % (varpath, cur_var)
else:
os.environ[varname] = varpath
# Setup up the PS1
os.environ['PS1'] = '(xpm) \u@\h:\w\$'
# Step into shell
print os.execvp('bash', ['bash'])
if __name__ == '__main__':
sys.exit(main())
Make bash setup script actually work
Fixes call to exec and labels PS1 work as non working (over-ridden by
bashrc).
|
#! /usr/bin/env python
# Author: Joseph Lisee <jlisee@gmail.com>
import os
import sys
# Get the current directory
cur_dir, _ = os.path.split(__file__)
def main():
# Get our path
env_dir = os.path.abspath(os.path.join(cur_dir, 'env'))
# Set our path vars
env_paths = {
'PATH' : os.path.join(env_dir, 'bin'),
'LD_LIBRARY_PATH' : os.path.join(env_dir, 'lib'),
}
for varname, varpath in env_paths.iteritems():
cur_var = os.environ.get(varname, None)
if cur_var:
os.environ[varname] = cur_var + os.pathsep + varpath
else:
os.environ[varname] = varpath
# Setup up the PS1 (this doesn't work)
os.environ['PS1'] = '(xpm) \u@\h:\w\$'
# Step into shell
os.execvp('bash', ['bash'])
if __name__ == '__main__':
sys.exit(main())
|
<commit_before>#! /usr/bin/env python
# Author: Joseph Lisee <jlisee@gmail.com>
import os
import sys
# Get the current directory
cur_dir, _ = os.path.split(__file__)
def main():
# Get our path
env_dir = os.path.abspath(os.path.join(cur_dir, 'env'))
# Set our path vars
env_paths = {
'PATH' : os.path.join(env_dir, 'bin'),
'LD_LIBRARY_PATH' : os.path.join(env_dir, 'lib'),
}
for varname, varpath in env_paths.iteritems():
cur_var = os.environ.get(varname, None)
if cur_var:
os.environ[varname] = '%s;%s' % (varpath, cur_var)
else:
os.environ[varname] = varpath
# Setup up the PS1
os.environ['PS1'] = '(xpm) \u@\h:\w\$'
# Step into shell
print os.execvp('bash', ['bash'])
if __name__ == '__main__':
sys.exit(main())
<commit_msg>Make bash setup script actually work
Fixes call to exec and labels PS1 work as non working (over-ridden by
bashrc).<commit_after>
|
#! /usr/bin/env python
# Author: Joseph Lisee <jlisee@gmail.com>
import os
import sys
# Get the current directory
cur_dir, _ = os.path.split(__file__)
def main():
# Get our path
env_dir = os.path.abspath(os.path.join(cur_dir, 'env'))
# Set our path vars
env_paths = {
'PATH' : os.path.join(env_dir, 'bin'),
'LD_LIBRARY_PATH' : os.path.join(env_dir, 'lib'),
}
for varname, varpath in env_paths.iteritems():
cur_var = os.environ.get(varname, None)
if cur_var:
os.environ[varname] = cur_var + os.pathsep + varpath
else:
os.environ[varname] = varpath
# Setup up the PS1 (this doesn't work)
os.environ['PS1'] = '(xpm) \u@\h:\w\$'
# Step into shell
os.execvp('bash', ['bash'])
if __name__ == '__main__':
sys.exit(main())
|
#! /usr/bin/env python
# Author: Joseph Lisee <jlisee@gmail.com>
import os
import sys
# Get the current directory
cur_dir, _ = os.path.split(__file__)
def main():
# Get our path
env_dir = os.path.abspath(os.path.join(cur_dir, 'env'))
# Set our path vars
env_paths = {
'PATH' : os.path.join(env_dir, 'bin'),
'LD_LIBRARY_PATH' : os.path.join(env_dir, 'lib'),
}
for varname, varpath in env_paths.iteritems():
cur_var = os.environ.get(varname, None)
if cur_var:
os.environ[varname] = '%s;%s' % (varpath, cur_var)
else:
os.environ[varname] = varpath
# Setup up the PS1
os.environ['PS1'] = '(xpm) \u@\h:\w\$'
# Step into shell
print os.execvp('bash', ['bash'])
if __name__ == '__main__':
sys.exit(main())
Make bash setup script actually work
Fixes call to exec and labels PS1 work as non working (over-ridden by
bashrc).#! /usr/bin/env python
# Author: Joseph Lisee <jlisee@gmail.com>
import os
import sys
# Get the current directory
cur_dir, _ = os.path.split(__file__)
def main():
# Get our path
env_dir = os.path.abspath(os.path.join(cur_dir, 'env'))
# Set our path vars
env_paths = {
'PATH' : os.path.join(env_dir, 'bin'),
'LD_LIBRARY_PATH' : os.path.join(env_dir, 'lib'),
}
for varname, varpath in env_paths.iteritems():
cur_var = os.environ.get(varname, None)
if cur_var:
os.environ[varname] = cur_var + os.pathsep + varpath
else:
os.environ[varname] = varpath
# Setup up the PS1 (this doesn't work)
os.environ['PS1'] = '(xpm) \u@\h:\w\$'
# Step into shell
os.execvp('bash', ['bash'])
if __name__ == '__main__':
sys.exit(main())
|
<commit_before>#! /usr/bin/env python
# Author: Joseph Lisee <jlisee@gmail.com>
import os
import sys
# Get the current directory
cur_dir, _ = os.path.split(__file__)
def main():
# Get our path
env_dir = os.path.abspath(os.path.join(cur_dir, 'env'))
# Set our path vars
env_paths = {
'PATH' : os.path.join(env_dir, 'bin'),
'LD_LIBRARY_PATH' : os.path.join(env_dir, 'lib'),
}
for varname, varpath in env_paths.iteritems():
cur_var = os.environ.get(varname, None)
if cur_var:
os.environ[varname] = '%s;%s' % (varpath, cur_var)
else:
os.environ[varname] = varpath
# Setup up the PS1
os.environ['PS1'] = '(xpm) \u@\h:\w\$'
# Step into shell
print os.execvp('bash', ['bash'])
if __name__ == '__main__':
sys.exit(main())
<commit_msg>Make bash setup script actually work
Fixes call to exec and labels PS1 work as non working (over-ridden by
bashrc).<commit_after>#! /usr/bin/env python
# Author: Joseph Lisee <jlisee@gmail.com>
import os
import sys
# Get the current directory
cur_dir, _ = os.path.split(__file__)
def main():
# Get our path
env_dir = os.path.abspath(os.path.join(cur_dir, 'env'))
# Set our path vars
env_paths = {
'PATH' : os.path.join(env_dir, 'bin'),
'LD_LIBRARY_PATH' : os.path.join(env_dir, 'lib'),
}
for varname, varpath in env_paths.iteritems():
cur_var = os.environ.get(varname, None)
if cur_var:
os.environ[varname] = cur_var + os.pathsep + varpath
else:
os.environ[varname] = varpath
# Setup up the PS1 (this doesn't work)
os.environ['PS1'] = '(xpm) \u@\h:\w\$'
# Step into shell
os.execvp('bash', ['bash'])
if __name__ == '__main__':
sys.exit(main())
|
85df3afc75f52a2183ef46560f57bb6993091238
|
trex/urls.py
|
trex/urls.py
|
# -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
from django.conf.urls import patterns, include, url
from django.contrib import admin
from trex.views import project
urlpatterns = patterns(
'',
url(r"^admin/", include(admin.site.urls)),
url(r"^api/1/projects/$",
project.ProjectListCreateAPIView.as_view(),
name="project-list"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/$",
project.ProjectDetailAPIView.as_view(),
name="project-detail"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/entries$",
project.ProjectEntriesListAPIView.as_view(),
name="project-detail"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/zeiterfassung/$",
project.ProjectZeiterfassungAPIView.as_view(),
name="project-zeiterfassung"),
url(r"^api/1/entries/(?P<pk>[0-9]+)/$",
project.EntryDetailAPIView.as_view(),
name="entry-detail"),
)
|
# -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
from django.conf.urls import patterns, include, url
from django.contrib import admin
from trex.views import project
urlpatterns = patterns(
'',
url(r"^api/1/projects/$",
project.ProjectListCreateAPIView.as_view(),
name="project-list"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/$",
project.ProjectDetailAPIView.as_view(),
name="project-detail"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/entries$",
project.ProjectEntriesListAPIView.as_view(),
name="project-detail"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/zeiterfassung/$",
project.ProjectZeiterfassungAPIView.as_view(),
name="project-zeiterfassung"),
url(r"^api/1/entries/(?P<pk>[0-9]+)/$",
project.EntryDetailAPIView.as_view(),
name="entry-detail"),
)
|
Remove the admin url mapping
|
Remove the admin url mapping
|
Python
|
mit
|
bjoernricks/trex,bjoernricks/trex
|
# -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
from django.conf.urls import patterns, include, url
from django.contrib import admin
from trex.views import project
urlpatterns = patterns(
'',
url(r"^admin/", include(admin.site.urls)),
url(r"^api/1/projects/$",
project.ProjectListCreateAPIView.as_view(),
name="project-list"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/$",
project.ProjectDetailAPIView.as_view(),
name="project-detail"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/entries$",
project.ProjectEntriesListAPIView.as_view(),
name="project-detail"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/zeiterfassung/$",
project.ProjectZeiterfassungAPIView.as_view(),
name="project-zeiterfassung"),
url(r"^api/1/entries/(?P<pk>[0-9]+)/$",
project.EntryDetailAPIView.as_view(),
name="entry-detail"),
)
Remove the admin url mapping
|
# -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
from django.conf.urls import patterns, include, url
from django.contrib import admin
from trex.views import project
urlpatterns = patterns(
'',
url(r"^api/1/projects/$",
project.ProjectListCreateAPIView.as_view(),
name="project-list"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/$",
project.ProjectDetailAPIView.as_view(),
name="project-detail"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/entries$",
project.ProjectEntriesListAPIView.as_view(),
name="project-detail"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/zeiterfassung/$",
project.ProjectZeiterfassungAPIView.as_view(),
name="project-zeiterfassung"),
url(r"^api/1/entries/(?P<pk>[0-9]+)/$",
project.EntryDetailAPIView.as_view(),
name="entry-detail"),
)
|
<commit_before># -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
from django.conf.urls import patterns, include, url
from django.contrib import admin
from trex.views import project
urlpatterns = patterns(
'',
url(r"^admin/", include(admin.site.urls)),
url(r"^api/1/projects/$",
project.ProjectListCreateAPIView.as_view(),
name="project-list"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/$",
project.ProjectDetailAPIView.as_view(),
name="project-detail"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/entries$",
project.ProjectEntriesListAPIView.as_view(),
name="project-detail"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/zeiterfassung/$",
project.ProjectZeiterfassungAPIView.as_view(),
name="project-zeiterfassung"),
url(r"^api/1/entries/(?P<pk>[0-9]+)/$",
project.EntryDetailAPIView.as_view(),
name="entry-detail"),
)
<commit_msg>Remove the admin url mapping<commit_after>
|
# -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
from django.conf.urls import patterns, include, url
from django.contrib import admin
from trex.views import project
urlpatterns = patterns(
'',
url(r"^api/1/projects/$",
project.ProjectListCreateAPIView.as_view(),
name="project-list"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/$",
project.ProjectDetailAPIView.as_view(),
name="project-detail"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/entries$",
project.ProjectEntriesListAPIView.as_view(),
name="project-detail"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/zeiterfassung/$",
project.ProjectZeiterfassungAPIView.as_view(),
name="project-zeiterfassung"),
url(r"^api/1/entries/(?P<pk>[0-9]+)/$",
project.EntryDetailAPIView.as_view(),
name="entry-detail"),
)
|
# -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
from django.conf.urls import patterns, include, url
from django.contrib import admin
from trex.views import project
urlpatterns = patterns(
'',
url(r"^admin/", include(admin.site.urls)),
url(r"^api/1/projects/$",
project.ProjectListCreateAPIView.as_view(),
name="project-list"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/$",
project.ProjectDetailAPIView.as_view(),
name="project-detail"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/entries$",
project.ProjectEntriesListAPIView.as_view(),
name="project-detail"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/zeiterfassung/$",
project.ProjectZeiterfassungAPIView.as_view(),
name="project-zeiterfassung"),
url(r"^api/1/entries/(?P<pk>[0-9]+)/$",
project.EntryDetailAPIView.as_view(),
name="entry-detail"),
)
Remove the admin url mapping# -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
from django.conf.urls import patterns, include, url
from django.contrib import admin
from trex.views import project
urlpatterns = patterns(
'',
url(r"^api/1/projects/$",
project.ProjectListCreateAPIView.as_view(),
name="project-list"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/$",
project.ProjectDetailAPIView.as_view(),
name="project-detail"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/entries$",
project.ProjectEntriesListAPIView.as_view(),
name="project-detail"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/zeiterfassung/$",
project.ProjectZeiterfassungAPIView.as_view(),
name="project-zeiterfassung"),
url(r"^api/1/entries/(?P<pk>[0-9]+)/$",
project.EntryDetailAPIView.as_view(),
name="entry-detail"),
)
|
<commit_before># -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
from django.conf.urls import patterns, include, url
from django.contrib import admin
from trex.views import project
urlpatterns = patterns(
'',
url(r"^admin/", include(admin.site.urls)),
url(r"^api/1/projects/$",
project.ProjectListCreateAPIView.as_view(),
name="project-list"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/$",
project.ProjectDetailAPIView.as_view(),
name="project-detail"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/entries$",
project.ProjectEntriesListAPIView.as_view(),
name="project-detail"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/zeiterfassung/$",
project.ProjectZeiterfassungAPIView.as_view(),
name="project-zeiterfassung"),
url(r"^api/1/entries/(?P<pk>[0-9]+)/$",
project.EntryDetailAPIView.as_view(),
name="entry-detail"),
)
<commit_msg>Remove the admin url mapping<commit_after># -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
from django.conf.urls import patterns, include, url
from django.contrib import admin
from trex.views import project
urlpatterns = patterns(
'',
url(r"^api/1/projects/$",
project.ProjectListCreateAPIView.as_view(),
name="project-list"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/$",
project.ProjectDetailAPIView.as_view(),
name="project-detail"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/entries$",
project.ProjectEntriesListAPIView.as_view(),
name="project-detail"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/zeiterfassung/$",
project.ProjectZeiterfassungAPIView.as_view(),
name="project-zeiterfassung"),
url(r"^api/1/entries/(?P<pk>[0-9]+)/$",
project.EntryDetailAPIView.as_view(),
name="entry-detail"),
)
|
faf77acc7ddb6a5e2bc198fcfec129f83d2a7678
|
plotly/tests/test_core/test_file/test_file.py
|
plotly/tests/test_core/test_file/test_file.py
|
"""
test_meta:
==========
A module intended for use with Nose.
"""
from nose.tools import raises
from nose import with_setup
import random
import string
import requests
import plotly.plotly as py
import plotly.tools as tls
from plotly.exceptions import PlotlyRequestError
def _random_filename():
random_chars = [random.choice(string.ascii_uppercase) for _ in range(5)]
unique_filename = 'Valid Folder'+''.join(random_chars)
return unique_filename
def init():
py.sign_in('PythonTest', '9v9f20pext')
@with_setup(init)
def test_create_folder():
py.file_ops.mkdirs(_random_filename())
@with_setup(init)
def test_create_nested_folders():
first_folder = _random_filename()
nested_folder = '{0}/{1}'.format(first_folder, _random_filename())
py.file_ops.mkdirs(nested_folder)
@with_setup(init)
def test_duplicate_folders():
first_folder = _random_filename()
py.file_ops.mkdirs(first_folder)
try:
py.file_ops.mkdirs(first_folder)
except PlotlyRequestError as e:
if e.status_code != 409:
raise e
|
"""
test_meta:
==========
A module intended for use with Nose.
"""
import random
import string
from unittest import TestCase
import plotly.plotly as py
from plotly.exceptions import PlotlyRequestError
class FolderAPITestCase(TestCase):
def setUp(self):
py.sign_in('PythonTest', '9v9f20pext')
def _random_filename(self):
random_chars = [random.choice(string.ascii_uppercase)
for _ in range(5)]
unique_filename = 'Valid Folder'+''.join(random_chars)
return unique_filename
def test_create_folder(self):
try:
py.file_ops.mkdirs(self._random_filename())
except PlotlyRequestError as e:
self.fail('Expected this *not* to fail! Status: {}'
.format(e.status_code))
def test_create_nested_folders(self):
first_folder = self._random_filename()
nested_folder = '{0}/{1}'.format(first_folder, self._random_filename())
try:
py.file_ops.mkdirs(nested_folder)
except PlotlyRequestError as e:
self.fail('Expected this *not* to fail! Status: {}'
.format(e.status_code))
def test_duplicate_folders(self):
first_folder = self._random_filename()
py.file_ops.mkdirs(first_folder)
try:
py.file_ops.mkdirs(first_folder)
except PlotlyRequestError as e:
self.assertTrue(400 <= e.status_code < 500)
else:
self.fail('Expected this to fail!')
|
Fix failing test and refact to TestCase.
|
Fix failing test and refact to TestCase.
|
Python
|
mit
|
ee-in/python-api,plotly/plotly.py,plotly/python-api,ee-in/python-api,plotly/python-api,plotly/python-api,plotly/plotly.py,plotly/plotly.py,ee-in/python-api
|
"""
test_meta:
==========
A module intended for use with Nose.
"""
from nose.tools import raises
from nose import with_setup
import random
import string
import requests
import plotly.plotly as py
import plotly.tools as tls
from plotly.exceptions import PlotlyRequestError
def _random_filename():
random_chars = [random.choice(string.ascii_uppercase) for _ in range(5)]
unique_filename = 'Valid Folder'+''.join(random_chars)
return unique_filename
def init():
py.sign_in('PythonTest', '9v9f20pext')
@with_setup(init)
def test_create_folder():
py.file_ops.mkdirs(_random_filename())
@with_setup(init)
def test_create_nested_folders():
first_folder = _random_filename()
nested_folder = '{0}/{1}'.format(first_folder, _random_filename())
py.file_ops.mkdirs(nested_folder)
@with_setup(init)
def test_duplicate_folders():
first_folder = _random_filename()
py.file_ops.mkdirs(first_folder)
try:
py.file_ops.mkdirs(first_folder)
except PlotlyRequestError as e:
if e.status_code != 409:
raise e
Fix failing test and refact to TestCase.
|
"""
test_meta:
==========
A module intended for use with Nose.
"""
import random
import string
from unittest import TestCase
import plotly.plotly as py
from plotly.exceptions import PlotlyRequestError
class FolderAPITestCase(TestCase):
def setUp(self):
py.sign_in('PythonTest', '9v9f20pext')
def _random_filename(self):
random_chars = [random.choice(string.ascii_uppercase)
for _ in range(5)]
unique_filename = 'Valid Folder'+''.join(random_chars)
return unique_filename
def test_create_folder(self):
try:
py.file_ops.mkdirs(self._random_filename())
except PlotlyRequestError as e:
self.fail('Expected this *not* to fail! Status: {}'
.format(e.status_code))
def test_create_nested_folders(self):
first_folder = self._random_filename()
nested_folder = '{0}/{1}'.format(first_folder, self._random_filename())
try:
py.file_ops.mkdirs(nested_folder)
except PlotlyRequestError as e:
self.fail('Expected this *not* to fail! Status: {}'
.format(e.status_code))
def test_duplicate_folders(self):
first_folder = self._random_filename()
py.file_ops.mkdirs(first_folder)
try:
py.file_ops.mkdirs(first_folder)
except PlotlyRequestError as e:
self.assertTrue(400 <= e.status_code < 500)
else:
self.fail('Expected this to fail!')
|
<commit_before>"""
test_meta:
==========
A module intended for use with Nose.
"""
from nose.tools import raises
from nose import with_setup
import random
import string
import requests
import plotly.plotly as py
import plotly.tools as tls
from plotly.exceptions import PlotlyRequestError
def _random_filename():
random_chars = [random.choice(string.ascii_uppercase) for _ in range(5)]
unique_filename = 'Valid Folder'+''.join(random_chars)
return unique_filename
def init():
py.sign_in('PythonTest', '9v9f20pext')
@with_setup(init)
def test_create_folder():
py.file_ops.mkdirs(_random_filename())
@with_setup(init)
def test_create_nested_folders():
first_folder = _random_filename()
nested_folder = '{0}/{1}'.format(first_folder, _random_filename())
py.file_ops.mkdirs(nested_folder)
@with_setup(init)
def test_duplicate_folders():
first_folder = _random_filename()
py.file_ops.mkdirs(first_folder)
try:
py.file_ops.mkdirs(first_folder)
except PlotlyRequestError as e:
if e.status_code != 409:
raise e
<commit_msg>Fix failing test and refact to TestCase.<commit_after>
|
"""
test_meta:
==========
A module intended for use with Nose.
"""
import random
import string
from unittest import TestCase
import plotly.plotly as py
from plotly.exceptions import PlotlyRequestError
class FolderAPITestCase(TestCase):
def setUp(self):
py.sign_in('PythonTest', '9v9f20pext')
def _random_filename(self):
random_chars = [random.choice(string.ascii_uppercase)
for _ in range(5)]
unique_filename = 'Valid Folder'+''.join(random_chars)
return unique_filename
def test_create_folder(self):
try:
py.file_ops.mkdirs(self._random_filename())
except PlotlyRequestError as e:
self.fail('Expected this *not* to fail! Status: {}'
.format(e.status_code))
def test_create_nested_folders(self):
first_folder = self._random_filename()
nested_folder = '{0}/{1}'.format(first_folder, self._random_filename())
try:
py.file_ops.mkdirs(nested_folder)
except PlotlyRequestError as e:
self.fail('Expected this *not* to fail! Status: {}'
.format(e.status_code))
def test_duplicate_folders(self):
first_folder = self._random_filename()
py.file_ops.mkdirs(first_folder)
try:
py.file_ops.mkdirs(first_folder)
except PlotlyRequestError as e:
self.assertTrue(400 <= e.status_code < 500)
else:
self.fail('Expected this to fail!')
|
"""
test_meta:
==========
A module intended for use with Nose.
"""
from nose.tools import raises
from nose import with_setup
import random
import string
import requests
import plotly.plotly as py
import plotly.tools as tls
from plotly.exceptions import PlotlyRequestError
def _random_filename():
random_chars = [random.choice(string.ascii_uppercase) for _ in range(5)]
unique_filename = 'Valid Folder'+''.join(random_chars)
return unique_filename
def init():
py.sign_in('PythonTest', '9v9f20pext')
@with_setup(init)
def test_create_folder():
py.file_ops.mkdirs(_random_filename())
@with_setup(init)
def test_create_nested_folders():
first_folder = _random_filename()
nested_folder = '{0}/{1}'.format(first_folder, _random_filename())
py.file_ops.mkdirs(nested_folder)
@with_setup(init)
def test_duplicate_folders():
first_folder = _random_filename()
py.file_ops.mkdirs(first_folder)
try:
py.file_ops.mkdirs(first_folder)
except PlotlyRequestError as e:
if e.status_code != 409:
raise e
Fix failing test and refact to TestCase."""
test_meta:
==========
A module intended for use with Nose.
"""
import random
import string
from unittest import TestCase
import plotly.plotly as py
from plotly.exceptions import PlotlyRequestError
class FolderAPITestCase(TestCase):
def setUp(self):
py.sign_in('PythonTest', '9v9f20pext')
def _random_filename(self):
random_chars = [random.choice(string.ascii_uppercase)
for _ in range(5)]
unique_filename = 'Valid Folder'+''.join(random_chars)
return unique_filename
def test_create_folder(self):
try:
py.file_ops.mkdirs(self._random_filename())
except PlotlyRequestError as e:
self.fail('Expected this *not* to fail! Status: {}'
.format(e.status_code))
def test_create_nested_folders(self):
first_folder = self._random_filename()
nested_folder = '{0}/{1}'.format(first_folder, self._random_filename())
try:
py.file_ops.mkdirs(nested_folder)
except PlotlyRequestError as e:
self.fail('Expected this *not* to fail! Status: {}'
.format(e.status_code))
def test_duplicate_folders(self):
first_folder = self._random_filename()
py.file_ops.mkdirs(first_folder)
try:
py.file_ops.mkdirs(first_folder)
except PlotlyRequestError as e:
self.assertTrue(400 <= e.status_code < 500)
else:
self.fail('Expected this to fail!')
|
<commit_before>"""
test_meta:
==========
A module intended for use with Nose.
"""
from nose.tools import raises
from nose import with_setup
import random
import string
import requests
import plotly.plotly as py
import plotly.tools as tls
from plotly.exceptions import PlotlyRequestError
def _random_filename():
random_chars = [random.choice(string.ascii_uppercase) for _ in range(5)]
unique_filename = 'Valid Folder'+''.join(random_chars)
return unique_filename
def init():
py.sign_in('PythonTest', '9v9f20pext')
@with_setup(init)
def test_create_folder():
py.file_ops.mkdirs(_random_filename())
@with_setup(init)
def test_create_nested_folders():
first_folder = _random_filename()
nested_folder = '{0}/{1}'.format(first_folder, _random_filename())
py.file_ops.mkdirs(nested_folder)
@with_setup(init)
def test_duplicate_folders():
first_folder = _random_filename()
py.file_ops.mkdirs(first_folder)
try:
py.file_ops.mkdirs(first_folder)
except PlotlyRequestError as e:
if e.status_code != 409:
raise e
<commit_msg>Fix failing test and refact to TestCase.<commit_after>"""
test_meta:
==========
A module intended for use with Nose.
"""
import random
import string
from unittest import TestCase
import plotly.plotly as py
from plotly.exceptions import PlotlyRequestError
class FolderAPITestCase(TestCase):
def setUp(self):
py.sign_in('PythonTest', '9v9f20pext')
def _random_filename(self):
random_chars = [random.choice(string.ascii_uppercase)
for _ in range(5)]
unique_filename = 'Valid Folder'+''.join(random_chars)
return unique_filename
def test_create_folder(self):
try:
py.file_ops.mkdirs(self._random_filename())
except PlotlyRequestError as e:
self.fail('Expected this *not* to fail! Status: {}'
.format(e.status_code))
def test_create_nested_folders(self):
first_folder = self._random_filename()
nested_folder = '{0}/{1}'.format(first_folder, self._random_filename())
try:
py.file_ops.mkdirs(nested_folder)
except PlotlyRequestError as e:
self.fail('Expected this *not* to fail! Status: {}'
.format(e.status_code))
def test_duplicate_folders(self):
first_folder = self._random_filename()
py.file_ops.mkdirs(first_folder)
try:
py.file_ops.mkdirs(first_folder)
except PlotlyRequestError as e:
self.assertTrue(400 <= e.status_code < 500)
else:
self.fail('Expected this to fail!')
|
530bd321f38a0131eb250148bd0a67d9a59da34c
|
uno_image.py
|
uno_image.py
|
"""
Example usage of UNO, graphic objects and networking in LO extension
"""
import unohelper
from com.sun.star.task import XJobExecutor
class ImageExample(unohelper.Base, XJobExecutor):
'''Class that implements the service registered in LibreOffice'''
def __init__(self, context):
self.context = context
g_ImplementationHelper = unohelper.ImplementationHelper()
g_ImplementationHelper.addImplementation(
ImageExample,
'org.libreoffice.imageexample.ImageExample',
('com.sun.star.task.JobExecutor',))
|
"""
Example usage of UNO, graphic objects and networking in LO extension
"""
import unohelper
from com.sun.star.task import XJobExecutor
class ImageExample(unohelper.Base, XJobExecutor):
'''Class that implements the service registered in LibreOffice'''
def __init__(self, context):
self.context = context
self.desktop = self.createUnoService("com.sun.star.frame.Desktop")
self.graphics = self.createUnoService("com.sun.star.graphic.GraphicProvider")
def createUnoService(self, name):
return self.context.ServiceManager.createInstanceWithContext(name, self.context)
g_ImplementationHelper = unohelper.ImplementationHelper()
g_ImplementationHelper.addImplementation(
ImageExample,
'org.libreoffice.imageexample.ImageExample',
('com.sun.star.task.JobExecutor',))
|
Add code to create needed uno services
|
Add code to create needed uno services
|
Python
|
mpl-2.0
|
JIghtuse/uno-image-manipulation-example
|
"""
Example usage of UNO, graphic objects and networking in LO extension
"""
import unohelper
from com.sun.star.task import XJobExecutor
class ImageExample(unohelper.Base, XJobExecutor):
'''Class that implements the service registered in LibreOffice'''
def __init__(self, context):
self.context = context
g_ImplementationHelper = unohelper.ImplementationHelper()
g_ImplementationHelper.addImplementation(
ImageExample,
'org.libreoffice.imageexample.ImageExample',
('com.sun.star.task.JobExecutor',))
Add code to create needed uno services
|
"""
Example usage of UNO, graphic objects and networking in LO extension
"""
import unohelper
from com.sun.star.task import XJobExecutor
class ImageExample(unohelper.Base, XJobExecutor):
'''Class that implements the service registered in LibreOffice'''
def __init__(self, context):
self.context = context
self.desktop = self.createUnoService("com.sun.star.frame.Desktop")
self.graphics = self.createUnoService("com.sun.star.graphic.GraphicProvider")
def createUnoService(self, name):
return self.context.ServiceManager.createInstanceWithContext(name, self.context)
g_ImplementationHelper = unohelper.ImplementationHelper()
g_ImplementationHelper.addImplementation(
ImageExample,
'org.libreoffice.imageexample.ImageExample',
('com.sun.star.task.JobExecutor',))
|
<commit_before>"""
Example usage of UNO, graphic objects and networking in LO extension
"""
import unohelper
from com.sun.star.task import XJobExecutor
class ImageExample(unohelper.Base, XJobExecutor):
'''Class that implements the service registered in LibreOffice'''
def __init__(self, context):
self.context = context
g_ImplementationHelper = unohelper.ImplementationHelper()
g_ImplementationHelper.addImplementation(
ImageExample,
'org.libreoffice.imageexample.ImageExample',
('com.sun.star.task.JobExecutor',))
<commit_msg>Add code to create needed uno services<commit_after>
|
"""
Example usage of UNO, graphic objects and networking in LO extension
"""
import unohelper
from com.sun.star.task import XJobExecutor
class ImageExample(unohelper.Base, XJobExecutor):
'''Class that implements the service registered in LibreOffice'''
def __init__(self, context):
self.context = context
self.desktop = self.createUnoService("com.sun.star.frame.Desktop")
self.graphics = self.createUnoService("com.sun.star.graphic.GraphicProvider")
def createUnoService(self, name):
return self.context.ServiceManager.createInstanceWithContext(name, self.context)
g_ImplementationHelper = unohelper.ImplementationHelper()
g_ImplementationHelper.addImplementation(
ImageExample,
'org.libreoffice.imageexample.ImageExample',
('com.sun.star.task.JobExecutor',))
|
"""
Example usage of UNO, graphic objects and networking in LO extension
"""
import unohelper
from com.sun.star.task import XJobExecutor
class ImageExample(unohelper.Base, XJobExecutor):
'''Class that implements the service registered in LibreOffice'''
def __init__(self, context):
self.context = context
g_ImplementationHelper = unohelper.ImplementationHelper()
g_ImplementationHelper.addImplementation(
ImageExample,
'org.libreoffice.imageexample.ImageExample',
('com.sun.star.task.JobExecutor',))
Add code to create needed uno services"""
Example usage of UNO, graphic objects and networking in LO extension
"""
import unohelper
from com.sun.star.task import XJobExecutor
class ImageExample(unohelper.Base, XJobExecutor):
'''Class that implements the service registered in LibreOffice'''
def __init__(self, context):
self.context = context
self.desktop = self.createUnoService("com.sun.star.frame.Desktop")
self.graphics = self.createUnoService("com.sun.star.graphic.GraphicProvider")
def createUnoService(self, name):
return self.context.ServiceManager.createInstanceWithContext(name, self.context)
g_ImplementationHelper = unohelper.ImplementationHelper()
g_ImplementationHelper.addImplementation(
ImageExample,
'org.libreoffice.imageexample.ImageExample',
('com.sun.star.task.JobExecutor',))
|
<commit_before>"""
Example usage of UNO, graphic objects and networking in LO extension
"""
import unohelper
from com.sun.star.task import XJobExecutor
class ImageExample(unohelper.Base, XJobExecutor):
'''Class that implements the service registered in LibreOffice'''
def __init__(self, context):
self.context = context
g_ImplementationHelper = unohelper.ImplementationHelper()
g_ImplementationHelper.addImplementation(
ImageExample,
'org.libreoffice.imageexample.ImageExample',
('com.sun.star.task.JobExecutor',))
<commit_msg>Add code to create needed uno services<commit_after>"""
Example usage of UNO, graphic objects and networking in LO extension
"""
import unohelper
from com.sun.star.task import XJobExecutor
class ImageExample(unohelper.Base, XJobExecutor):
'''Class that implements the service registered in LibreOffice'''
def __init__(self, context):
self.context = context
self.desktop = self.createUnoService("com.sun.star.frame.Desktop")
self.graphics = self.createUnoService("com.sun.star.graphic.GraphicProvider")
def createUnoService(self, name):
return self.context.ServiceManager.createInstanceWithContext(name, self.context)
g_ImplementationHelper = unohelper.ImplementationHelper()
g_ImplementationHelper.addImplementation(
ImageExample,
'org.libreoffice.imageexample.ImageExample',
('com.sun.star.task.JobExecutor',))
|
f2ce77ce713610ddd7ee1b08768d2a84121f0803
|
hunter/reviewsapi.py
|
hunter/reviewsapi.py
|
import requests
import os
from .endpoints import *
class UnauthorizedToken(Exception):
pass
class ReviewsAPI:
def __init__(self):
token = os.environ['UDACITY_AUTH_TOKEN']
self.headers = {'Authorization': token, 'Content-Length': '0'}
def execute(self, request):
try:
raw_response = request()
response = raw_response.json()
raw_response.raise_for_status()
return response
except requests.exceptions.HTTPError:
raise UnauthorizedToken('Maybe it\'s time to change you token!')
def certifications(self):
return self.execute(lambda : requests.get(CERTIFICATIONS_URL, headers=self.headers))
def certified_languages(self):
response = self.execute(lambda : requests.get(REVIEWER_URL, headers=self.headers))
return [language for language in response['application']['languages']]
def request_reviews(self, certifications_list):
projects = self.projects_with_languages(certifications_list)
return self.execute(lambda : requests.post(SUBMISSION_REQUESTS_URL, json=projects, headers=self.headers))
def projects_with_languages(self, certifications_list):
languages_list = self.certified_languages()
projects_list = [{'project_id': project_id, 'language': language} for project_id in certifications_list for language in languages_list]
return {'projects': projects_list}
|
import requests
import os
from .endpoints import *
class UnauthorizedToken(Exception):
pass
class ReviewsAPI:
def __init__(self):
token = os.environ['UDACITY_AUTH_TOKEN']
self.headers = {'Authorization': token, 'Content-Length': '0'}
def execute(self, request):
try:
raw_response = request()
response = raw_response.json()
raw_response.raise_for_status()
return response
except requests.exceptions.HTTPError:
raise UnauthorizedToken('Maybe it\'s time to change your token!')
def certifications(self):
return self.execute(lambda : requests.get(CERTIFICATIONS_URL, headers=self.headers))
def certified_languages(self):
response = self.execute(lambda : requests.get(REVIEWER_URL, headers=self.headers))
return [language for language in response['application']['languages']]
def request_reviews(self, certifications_list):
projects = self.projects_with_languages(certifications_list)
return self.execute(lambda : requests.post(SUBMISSION_REQUESTS_URL, json=projects, headers=self.headers))
def projects_with_languages(self, certifications_list):
languages_list = self.certified_languages()
projects_list = [{'project_id': project_id, 'language': language} for project_id in certifications_list for language in languages_list]
return {'projects': projects_list}
|
Fix a typo on error message
|
Fix a typo on error message
|
Python
|
mit
|
anapaulagomes/reviews-assigner
|
import requests
import os
from .endpoints import *
class UnauthorizedToken(Exception):
pass
class ReviewsAPI:
def __init__(self):
token = os.environ['UDACITY_AUTH_TOKEN']
self.headers = {'Authorization': token, 'Content-Length': '0'}
def execute(self, request):
try:
raw_response = request()
response = raw_response.json()
raw_response.raise_for_status()
return response
except requests.exceptions.HTTPError:
raise UnauthorizedToken('Maybe it\'s time to change you token!')
def certifications(self):
return self.execute(lambda : requests.get(CERTIFICATIONS_URL, headers=self.headers))
def certified_languages(self):
response = self.execute(lambda : requests.get(REVIEWER_URL, headers=self.headers))
return [language for language in response['application']['languages']]
def request_reviews(self, certifications_list):
projects = self.projects_with_languages(certifications_list)
return self.execute(lambda : requests.post(SUBMISSION_REQUESTS_URL, json=projects, headers=self.headers))
def projects_with_languages(self, certifications_list):
languages_list = self.certified_languages()
projects_list = [{'project_id': project_id, 'language': language} for project_id in certifications_list for language in languages_list]
return {'projects': projects_list}
Fix a typo on error message
|
import requests
import os
from .endpoints import *
class UnauthorizedToken(Exception):
pass
class ReviewsAPI:
def __init__(self):
token = os.environ['UDACITY_AUTH_TOKEN']
self.headers = {'Authorization': token, 'Content-Length': '0'}
def execute(self, request):
try:
raw_response = request()
response = raw_response.json()
raw_response.raise_for_status()
return response
except requests.exceptions.HTTPError:
raise UnauthorizedToken('Maybe it\'s time to change your token!')
def certifications(self):
return self.execute(lambda : requests.get(CERTIFICATIONS_URL, headers=self.headers))
def certified_languages(self):
response = self.execute(lambda : requests.get(REVIEWER_URL, headers=self.headers))
return [language for language in response['application']['languages']]
def request_reviews(self, certifications_list):
projects = self.projects_with_languages(certifications_list)
return self.execute(lambda : requests.post(SUBMISSION_REQUESTS_URL, json=projects, headers=self.headers))
def projects_with_languages(self, certifications_list):
languages_list = self.certified_languages()
projects_list = [{'project_id': project_id, 'language': language} for project_id in certifications_list for language in languages_list]
return {'projects': projects_list}
|
<commit_before>import requests
import os
from .endpoints import *
class UnauthorizedToken(Exception):
pass
class ReviewsAPI:
def __init__(self):
token = os.environ['UDACITY_AUTH_TOKEN']
self.headers = {'Authorization': token, 'Content-Length': '0'}
def execute(self, request):
try:
raw_response = request()
response = raw_response.json()
raw_response.raise_for_status()
return response
except requests.exceptions.HTTPError:
raise UnauthorizedToken('Maybe it\'s time to change you token!')
def certifications(self):
return self.execute(lambda : requests.get(CERTIFICATIONS_URL, headers=self.headers))
def certified_languages(self):
response = self.execute(lambda : requests.get(REVIEWER_URL, headers=self.headers))
return [language for language in response['application']['languages']]
def request_reviews(self, certifications_list):
projects = self.projects_with_languages(certifications_list)
return self.execute(lambda : requests.post(SUBMISSION_REQUESTS_URL, json=projects, headers=self.headers))
def projects_with_languages(self, certifications_list):
languages_list = self.certified_languages()
projects_list = [{'project_id': project_id, 'language': language} for project_id in certifications_list for language in languages_list]
return {'projects': projects_list}
<commit_msg>Fix a typo on error message<commit_after>
|
import requests
import os
from .endpoints import *
class UnauthorizedToken(Exception):
pass
class ReviewsAPI:
def __init__(self):
token = os.environ['UDACITY_AUTH_TOKEN']
self.headers = {'Authorization': token, 'Content-Length': '0'}
def execute(self, request):
try:
raw_response = request()
response = raw_response.json()
raw_response.raise_for_status()
return response
except requests.exceptions.HTTPError:
raise UnauthorizedToken('Maybe it\'s time to change your token!')
def certifications(self):
return self.execute(lambda : requests.get(CERTIFICATIONS_URL, headers=self.headers))
def certified_languages(self):
response = self.execute(lambda : requests.get(REVIEWER_URL, headers=self.headers))
return [language for language in response['application']['languages']]
def request_reviews(self, certifications_list):
projects = self.projects_with_languages(certifications_list)
return self.execute(lambda : requests.post(SUBMISSION_REQUESTS_URL, json=projects, headers=self.headers))
def projects_with_languages(self, certifications_list):
languages_list = self.certified_languages()
projects_list = [{'project_id': project_id, 'language': language} for project_id in certifications_list for language in languages_list]
return {'projects': projects_list}
|
import requests
import os
from .endpoints import *
class UnauthorizedToken(Exception):
pass
class ReviewsAPI:
def __init__(self):
token = os.environ['UDACITY_AUTH_TOKEN']
self.headers = {'Authorization': token, 'Content-Length': '0'}
def execute(self, request):
try:
raw_response = request()
response = raw_response.json()
raw_response.raise_for_status()
return response
except requests.exceptions.HTTPError:
raise UnauthorizedToken('Maybe it\'s time to change you token!')
def certifications(self):
return self.execute(lambda : requests.get(CERTIFICATIONS_URL, headers=self.headers))
def certified_languages(self):
response = self.execute(lambda : requests.get(REVIEWER_URL, headers=self.headers))
return [language for language in response['application']['languages']]
def request_reviews(self, certifications_list):
projects = self.projects_with_languages(certifications_list)
return self.execute(lambda : requests.post(SUBMISSION_REQUESTS_URL, json=projects, headers=self.headers))
def projects_with_languages(self, certifications_list):
languages_list = self.certified_languages()
projects_list = [{'project_id': project_id, 'language': language} for project_id in certifications_list for language in languages_list]
return {'projects': projects_list}
Fix a typo on error messageimport requests
import os
from .endpoints import *
class UnauthorizedToken(Exception):
pass
class ReviewsAPI:
def __init__(self):
token = os.environ['UDACITY_AUTH_TOKEN']
self.headers = {'Authorization': token, 'Content-Length': '0'}
def execute(self, request):
try:
raw_response = request()
response = raw_response.json()
raw_response.raise_for_status()
return response
except requests.exceptions.HTTPError:
raise UnauthorizedToken('Maybe it\'s time to change your token!')
def certifications(self):
return self.execute(lambda : requests.get(CERTIFICATIONS_URL, headers=self.headers))
def certified_languages(self):
response = self.execute(lambda : requests.get(REVIEWER_URL, headers=self.headers))
return [language for language in response['application']['languages']]
def request_reviews(self, certifications_list):
projects = self.projects_with_languages(certifications_list)
return self.execute(lambda : requests.post(SUBMISSION_REQUESTS_URL, json=projects, headers=self.headers))
def projects_with_languages(self, certifications_list):
languages_list = self.certified_languages()
projects_list = [{'project_id': project_id, 'language': language} for project_id in certifications_list for language in languages_list]
return {'projects': projects_list}
|
<commit_before>import requests
import os
from .endpoints import *
class UnauthorizedToken(Exception):
pass
class ReviewsAPI:
def __init__(self):
token = os.environ['UDACITY_AUTH_TOKEN']
self.headers = {'Authorization': token, 'Content-Length': '0'}
def execute(self, request):
try:
raw_response = request()
response = raw_response.json()
raw_response.raise_for_status()
return response
except requests.exceptions.HTTPError:
raise UnauthorizedToken('Maybe it\'s time to change you token!')
def certifications(self):
return self.execute(lambda : requests.get(CERTIFICATIONS_URL, headers=self.headers))
def certified_languages(self):
response = self.execute(lambda : requests.get(REVIEWER_URL, headers=self.headers))
return [language for language in response['application']['languages']]
def request_reviews(self, certifications_list):
projects = self.projects_with_languages(certifications_list)
return self.execute(lambda : requests.post(SUBMISSION_REQUESTS_URL, json=projects, headers=self.headers))
def projects_with_languages(self, certifications_list):
languages_list = self.certified_languages()
projects_list = [{'project_id': project_id, 'language': language} for project_id in certifications_list for language in languages_list]
return {'projects': projects_list}
<commit_msg>Fix a typo on error message<commit_after>import requests
import os
from .endpoints import *
class UnauthorizedToken(Exception):
pass
class ReviewsAPI:
def __init__(self):
token = os.environ['UDACITY_AUTH_TOKEN']
self.headers = {'Authorization': token, 'Content-Length': '0'}
def execute(self, request):
try:
raw_response = request()
response = raw_response.json()
raw_response.raise_for_status()
return response
except requests.exceptions.HTTPError:
raise UnauthorizedToken('Maybe it\'s time to change your token!')
def certifications(self):
return self.execute(lambda : requests.get(CERTIFICATIONS_URL, headers=self.headers))
def certified_languages(self):
response = self.execute(lambda : requests.get(REVIEWER_URL, headers=self.headers))
return [language for language in response['application']['languages']]
def request_reviews(self, certifications_list):
projects = self.projects_with_languages(certifications_list)
return self.execute(lambda : requests.post(SUBMISSION_REQUESTS_URL, json=projects, headers=self.headers))
def projects_with_languages(self, certifications_list):
languages_list = self.certified_languages()
projects_list = [{'project_id': project_id, 'language': language} for project_id in certifications_list for language in languages_list]
return {'projects': projects_list}
|
3a77de3c7d863041bea1366c50a95293d1cd2f7a
|
tests/functional/test_warning.py
|
tests/functional/test_warning.py
|
import pytest
import textwrap
@pytest.fixture
def warnings_demo(tmpdir):
demo = tmpdir.joinpath('warnings_demo.py')
demo.write_text(textwrap.dedent('''
from logging import basicConfig
from pip._internal.utils import deprecation
deprecation.install_warning_logger()
basicConfig()
deprecation.deprecated("deprecated!", replacement=None, gone_in=None)
'''))
return demo
def test_deprecation_warnings_are_correct(script, warnings_demo):
result = script.run('python', warnings_demo, expect_stderr=True)
expected = 'WARNING:pip._internal.deprecations:DEPRECATION: deprecated!\n'
assert result.stderr == expected
# NOTE: PYTHONWARNINGS was added in 2.7
script.environ['PYTHONWARNINGS'] = 'ignore'
result = script.run('python', warnings_demo)
assert result.stderr == ''
|
import pytest
import textwrap
@pytest.fixture
def warnings_demo(tmpdir):
demo = tmpdir.joinpath('warnings_demo.py')
demo.write_text(textwrap.dedent('''
from logging import basicConfig
from pip._internal.utils import deprecation
deprecation.install_warning_logger()
basicConfig()
deprecation.deprecated("deprecated!", replacement=None, gone_in=None)
'''))
return demo
def test_deprecation_warnings_are_correct(script, warnings_demo):
result = script.run('python', warnings_demo, expect_stderr=True)
expected = 'WARNING:pip._internal.deprecations:DEPRECATION: deprecated!\n'
assert result.stderr == expected
def test_deprecation_warnings_can_be_silenced(script, warnings_demo):
script.environ['PYTHONWARNINGS'] = 'ignore'
result = script.run('python', warnings_demo)
assert result.stderr == ''
|
Split tests for different functionality
|
Split tests for different functionality
|
Python
|
mit
|
pypa/pip,pradyunsg/pip,xavfernandez/pip,rouge8/pip,pypa/pip,xavfernandez/pip,rouge8/pip,xavfernandez/pip,pfmoore/pip,sbidoul/pip,rouge8/pip,sbidoul/pip,pradyunsg/pip,pfmoore/pip
|
import pytest
import textwrap
@pytest.fixture
def warnings_demo(tmpdir):
demo = tmpdir.joinpath('warnings_demo.py')
demo.write_text(textwrap.dedent('''
from logging import basicConfig
from pip._internal.utils import deprecation
deprecation.install_warning_logger()
basicConfig()
deprecation.deprecated("deprecated!", replacement=None, gone_in=None)
'''))
return demo
def test_deprecation_warnings_are_correct(script, warnings_demo):
result = script.run('python', warnings_demo, expect_stderr=True)
expected = 'WARNING:pip._internal.deprecations:DEPRECATION: deprecated!\n'
assert result.stderr == expected
# NOTE: PYTHONWARNINGS was added in 2.7
script.environ['PYTHONWARNINGS'] = 'ignore'
result = script.run('python', warnings_demo)
assert result.stderr == ''
Split tests for different functionality
|
import pytest
import textwrap
@pytest.fixture
def warnings_demo(tmpdir):
demo = tmpdir.joinpath('warnings_demo.py')
demo.write_text(textwrap.dedent('''
from logging import basicConfig
from pip._internal.utils import deprecation
deprecation.install_warning_logger()
basicConfig()
deprecation.deprecated("deprecated!", replacement=None, gone_in=None)
'''))
return demo
def test_deprecation_warnings_are_correct(script, warnings_demo):
result = script.run('python', warnings_demo, expect_stderr=True)
expected = 'WARNING:pip._internal.deprecations:DEPRECATION: deprecated!\n'
assert result.stderr == expected
def test_deprecation_warnings_can_be_silenced(script, warnings_demo):
script.environ['PYTHONWARNINGS'] = 'ignore'
result = script.run('python', warnings_demo)
assert result.stderr == ''
|
<commit_before>import pytest
import textwrap
@pytest.fixture
def warnings_demo(tmpdir):
demo = tmpdir.joinpath('warnings_demo.py')
demo.write_text(textwrap.dedent('''
from logging import basicConfig
from pip._internal.utils import deprecation
deprecation.install_warning_logger()
basicConfig()
deprecation.deprecated("deprecated!", replacement=None, gone_in=None)
'''))
return demo
def test_deprecation_warnings_are_correct(script, warnings_demo):
result = script.run('python', warnings_demo, expect_stderr=True)
expected = 'WARNING:pip._internal.deprecations:DEPRECATION: deprecated!\n'
assert result.stderr == expected
# NOTE: PYTHONWARNINGS was added in 2.7
script.environ['PYTHONWARNINGS'] = 'ignore'
result = script.run('python', warnings_demo)
assert result.stderr == ''
<commit_msg>Split tests for different functionality<commit_after>
|
import pytest
import textwrap
@pytest.fixture
def warnings_demo(tmpdir):
demo = tmpdir.joinpath('warnings_demo.py')
demo.write_text(textwrap.dedent('''
from logging import basicConfig
from pip._internal.utils import deprecation
deprecation.install_warning_logger()
basicConfig()
deprecation.deprecated("deprecated!", replacement=None, gone_in=None)
'''))
return demo
def test_deprecation_warnings_are_correct(script, warnings_demo):
result = script.run('python', warnings_demo, expect_stderr=True)
expected = 'WARNING:pip._internal.deprecations:DEPRECATION: deprecated!\n'
assert result.stderr == expected
def test_deprecation_warnings_can_be_silenced(script, warnings_demo):
script.environ['PYTHONWARNINGS'] = 'ignore'
result = script.run('python', warnings_demo)
assert result.stderr == ''
|
import pytest
import textwrap
@pytest.fixture
def warnings_demo(tmpdir):
demo = tmpdir.joinpath('warnings_demo.py')
demo.write_text(textwrap.dedent('''
from logging import basicConfig
from pip._internal.utils import deprecation
deprecation.install_warning_logger()
basicConfig()
deprecation.deprecated("deprecated!", replacement=None, gone_in=None)
'''))
return demo
def test_deprecation_warnings_are_correct(script, warnings_demo):
result = script.run('python', warnings_demo, expect_stderr=True)
expected = 'WARNING:pip._internal.deprecations:DEPRECATION: deprecated!\n'
assert result.stderr == expected
# NOTE: PYTHONWARNINGS was added in 2.7
script.environ['PYTHONWARNINGS'] = 'ignore'
result = script.run('python', warnings_demo)
assert result.stderr == ''
Split tests for different functionalityimport pytest
import textwrap
@pytest.fixture
def warnings_demo(tmpdir):
demo = tmpdir.joinpath('warnings_demo.py')
demo.write_text(textwrap.dedent('''
from logging import basicConfig
from pip._internal.utils import deprecation
deprecation.install_warning_logger()
basicConfig()
deprecation.deprecated("deprecated!", replacement=None, gone_in=None)
'''))
return demo
def test_deprecation_warnings_are_correct(script, warnings_demo):
result = script.run('python', warnings_demo, expect_stderr=True)
expected = 'WARNING:pip._internal.deprecations:DEPRECATION: deprecated!\n'
assert result.stderr == expected
def test_deprecation_warnings_can_be_silenced(script, warnings_demo):
script.environ['PYTHONWARNINGS'] = 'ignore'
result = script.run('python', warnings_demo)
assert result.stderr == ''
|
<commit_before>import pytest
import textwrap
@pytest.fixture
def warnings_demo(tmpdir):
demo = tmpdir.joinpath('warnings_demo.py')
demo.write_text(textwrap.dedent('''
from logging import basicConfig
from pip._internal.utils import deprecation
deprecation.install_warning_logger()
basicConfig()
deprecation.deprecated("deprecated!", replacement=None, gone_in=None)
'''))
return demo
def test_deprecation_warnings_are_correct(script, warnings_demo):
result = script.run('python', warnings_demo, expect_stderr=True)
expected = 'WARNING:pip._internal.deprecations:DEPRECATION: deprecated!\n'
assert result.stderr == expected
# NOTE: PYTHONWARNINGS was added in 2.7
script.environ['PYTHONWARNINGS'] = 'ignore'
result = script.run('python', warnings_demo)
assert result.stderr == ''
<commit_msg>Split tests for different functionality<commit_after>import pytest
import textwrap
@pytest.fixture
def warnings_demo(tmpdir):
demo = tmpdir.joinpath('warnings_demo.py')
demo.write_text(textwrap.dedent('''
from logging import basicConfig
from pip._internal.utils import deprecation
deprecation.install_warning_logger()
basicConfig()
deprecation.deprecated("deprecated!", replacement=None, gone_in=None)
'''))
return demo
def test_deprecation_warnings_are_correct(script, warnings_demo):
result = script.run('python', warnings_demo, expect_stderr=True)
expected = 'WARNING:pip._internal.deprecations:DEPRECATION: deprecated!\n'
assert result.stderr == expected
def test_deprecation_warnings_can_be_silenced(script, warnings_demo):
script.environ['PYTHONWARNINGS'] = 'ignore'
result = script.run('python', warnings_demo)
assert result.stderr == ''
|
fe2f37c71f4c46997eb2d2e775bb928a2e7bcad1
|
contentdensity/textifai/modules/gic.py
|
contentdensity/textifai/modules/gic.py
|
from functools import reduce
from ..models import User, Text, Insight, Comment, GeneralInsight
class general_insight_calculator:
name = None
calc = lambda *_: None
def __init__(self, name, calc):
self.name = name
self.calc = calc
def do_calc(self):
return self.calc()
def calc_and_save(self):
entry, created = GeneralInsight.objects.get_or_create(pk=self.name, defaults={'value':''})
entry.value = self.do_calc()
entry.save()
general_insights = [ ]
def add_general_insight(name, func):
global general_insights
general_insights.append(general_insight_calculator(name, func))
def calc_and_save_general_insights():
for insight in general_insights:
insight.calc_and_save()
########################################################################
# Insight calculation implementations
########################################################################
def _calc_total_words():
ret = 0
for text in Text.objects.all():
ret += len(text.content.split())
return ret
add_general_insight('Total Words', _calc_total_words)
|
from functools import reduce
from ..models import User, Text, Insight, Comment, GeneralInsight
class general_insight_calculator:
name = None
calc = lambda *_: None
def __init__(self, name, calc):
self.name = name
self.calc = calc
def do_calc(self):
return self.calc()
def calc_and_save(self):
entry, created = GeneralInsight.objects.get_or_create(pk=self.name, defaults={'value':''})
entry.value = self.do_calc()
entry.save()
# Dictionary of general insight name to general insight calculator
general_insights = { }
def add_general_insight(name, func):
global general_insights
general_insights[name] = general_insight_calculator(name, func)
def calc_and_save_general_insights():
for insight in general_insights:
insight.calc_and_save()
########################################################################
# Insight calculation implementations
########################################################################
def _calc_total_words():
ret = 0
for text in Text.objects.all():
ret += len(text.content.split())
return ret
add_general_insight('Total Words', _calc_total_words)
|
Change from array to dictionary for general insight calculators
|
Change from array to dictionary for general insight calculators
|
Python
|
mit
|
CS326-important/space-deer,CS326-important/space-deer
|
from functools import reduce
from ..models import User, Text, Insight, Comment, GeneralInsight
class general_insight_calculator:
name = None
calc = lambda *_: None
def __init__(self, name, calc):
self.name = name
self.calc = calc
def do_calc(self):
return self.calc()
def calc_and_save(self):
entry, created = GeneralInsight.objects.get_or_create(pk=self.name, defaults={'value':''})
entry.value = self.do_calc()
entry.save()
general_insights = [ ]
def add_general_insight(name, func):
global general_insights
general_insights.append(general_insight_calculator(name, func))
def calc_and_save_general_insights():
for insight in general_insights:
insight.calc_and_save()
########################################################################
# Insight calculation implementations
########################################################################
def _calc_total_words():
ret = 0
for text in Text.objects.all():
ret += len(text.content.split())
return ret
add_general_insight('Total Words', _calc_total_words)
Change from array to dictionary for general insight calculators
|
from functools import reduce
from ..models import User, Text, Insight, Comment, GeneralInsight
class general_insight_calculator:
name = None
calc = lambda *_: None
def __init__(self, name, calc):
self.name = name
self.calc = calc
def do_calc(self):
return self.calc()
def calc_and_save(self):
entry, created = GeneralInsight.objects.get_or_create(pk=self.name, defaults={'value':''})
entry.value = self.do_calc()
entry.save()
# Dictionary of general insight name to general insight calculator
general_insights = { }
def add_general_insight(name, func):
global general_insights
general_insights[name] = general_insight_calculator(name, func)
def calc_and_save_general_insights():
for insight in general_insights:
insight.calc_and_save()
########################################################################
# Insight calculation implementations
########################################################################
def _calc_total_words():
ret = 0
for text in Text.objects.all():
ret += len(text.content.split())
return ret
add_general_insight('Total Words', _calc_total_words)
|
<commit_before>
from functools import reduce
from ..models import User, Text, Insight, Comment, GeneralInsight
class general_insight_calculator:
name = None
calc = lambda *_: None
def __init__(self, name, calc):
self.name = name
self.calc = calc
def do_calc(self):
return self.calc()
def calc_and_save(self):
entry, created = GeneralInsight.objects.get_or_create(pk=self.name, defaults={'value':''})
entry.value = self.do_calc()
entry.save()
general_insights = [ ]
def add_general_insight(name, func):
global general_insights
general_insights.append(general_insight_calculator(name, func))
def calc_and_save_general_insights():
for insight in general_insights:
insight.calc_and_save()
########################################################################
# Insight calculation implementations
########################################################################
def _calc_total_words():
ret = 0
for text in Text.objects.all():
ret += len(text.content.split())
return ret
add_general_insight('Total Words', _calc_total_words)
<commit_msg>Change from array to dictionary for general insight calculators<commit_after>
|
from functools import reduce
from ..models import User, Text, Insight, Comment, GeneralInsight
class general_insight_calculator:
name = None
calc = lambda *_: None
def __init__(self, name, calc):
self.name = name
self.calc = calc
def do_calc(self):
return self.calc()
def calc_and_save(self):
entry, created = GeneralInsight.objects.get_or_create(pk=self.name, defaults={'value':''})
entry.value = self.do_calc()
entry.save()
# Dictionary of general insight name to general insight calculator
general_insights = { }
def add_general_insight(name, func):
global general_insights
general_insights[name] = general_insight_calculator(name, func)
def calc_and_save_general_insights():
for insight in general_insights:
insight.calc_and_save()
########################################################################
# Insight calculation implementations
########################################################################
def _calc_total_words():
ret = 0
for text in Text.objects.all():
ret += len(text.content.split())
return ret
add_general_insight('Total Words', _calc_total_words)
|
from functools import reduce
from ..models import User, Text, Insight, Comment, GeneralInsight
class general_insight_calculator:
name = None
calc = lambda *_: None
def __init__(self, name, calc):
self.name = name
self.calc = calc
def do_calc(self):
return self.calc()
def calc_and_save(self):
entry, created = GeneralInsight.objects.get_or_create(pk=self.name, defaults={'value':''})
entry.value = self.do_calc()
entry.save()
general_insights = [ ]
def add_general_insight(name, func):
global general_insights
general_insights.append(general_insight_calculator(name, func))
def calc_and_save_general_insights():
for insight in general_insights:
insight.calc_and_save()
########################################################################
# Insight calculation implementations
########################################################################
def _calc_total_words():
ret = 0
for text in Text.objects.all():
ret += len(text.content.split())
return ret
add_general_insight('Total Words', _calc_total_words)
Change from array to dictionary for general insight calculators
from functools import reduce
from ..models import User, Text, Insight, Comment, GeneralInsight
class general_insight_calculator:
name = None
calc = lambda *_: None
def __init__(self, name, calc):
self.name = name
self.calc = calc
def do_calc(self):
return self.calc()
def calc_and_save(self):
entry, created = GeneralInsight.objects.get_or_create(pk=self.name, defaults={'value':''})
entry.value = self.do_calc()
entry.save()
# Dictionary of general insight name to general insight calculator
general_insights = { }
def add_general_insight(name, func):
global general_insights
general_insights[name] = general_insight_calculator(name, func)
def calc_and_save_general_insights():
for insight in general_insights:
insight.calc_and_save()
########################################################################
# Insight calculation implementations
########################################################################
def _calc_total_words():
ret = 0
for text in Text.objects.all():
ret += len(text.content.split())
return ret
add_general_insight('Total Words', _calc_total_words)
|
<commit_before>
from functools import reduce
from ..models import User, Text, Insight, Comment, GeneralInsight
class general_insight_calculator:
name = None
calc = lambda *_: None
def __init__(self, name, calc):
self.name = name
self.calc = calc
def do_calc(self):
return self.calc()
def calc_and_save(self):
entry, created = GeneralInsight.objects.get_or_create(pk=self.name, defaults={'value':''})
entry.value = self.do_calc()
entry.save()
general_insights = [ ]
def add_general_insight(name, func):
global general_insights
general_insights.append(general_insight_calculator(name, func))
def calc_and_save_general_insights():
for insight in general_insights:
insight.calc_and_save()
########################################################################
# Insight calculation implementations
########################################################################
def _calc_total_words():
ret = 0
for text in Text.objects.all():
ret += len(text.content.split())
return ret
add_general_insight('Total Words', _calc_total_words)
<commit_msg>Change from array to dictionary for general insight calculators<commit_after>
from functools import reduce
from ..models import User, Text, Insight, Comment, GeneralInsight
class general_insight_calculator:
name = None
calc = lambda *_: None
def __init__(self, name, calc):
self.name = name
self.calc = calc
def do_calc(self):
return self.calc()
def calc_and_save(self):
entry, created = GeneralInsight.objects.get_or_create(pk=self.name, defaults={'value':''})
entry.value = self.do_calc()
entry.save()
# Dictionary of general insight name to general insight calculator
general_insights = { }
def add_general_insight(name, func):
global general_insights
general_insights[name] = general_insight_calculator(name, func)
def calc_and_save_general_insights():
for insight in general_insights:
insight.calc_and_save()
########################################################################
# Insight calculation implementations
########################################################################
def _calc_total_words():
ret = 0
for text in Text.objects.all():
ret += len(text.content.split())
return ret
add_general_insight('Total Words', _calc_total_words)
|
0273fc0109d1ef4a4de0450998a6c420cb90217a
|
util_funcs.py
|
util_funcs.py
|
#!/usr/bin/env python
"""Collection of module netural utility functions"""
from sys import stderr
from ssl import SSLError
try:
from urllib.request import urlopen, HTTPError, URLError
except ImportError:
from urllib2 import urlopen, HTTPError, URLError
class HTMLGetError(Exception):
pass
def get_html(url):
try:
html = urlopen(url)
except (HTTPError, URLError, SSLError) as err:
raise(HTMLGetError(err))
return html.read().decode('utf-8')
def progress_msg(processed, total):
"""Update user on percent done"""
if total > 1:
percent = int((float(processed) / total) * 100)
stderr.write(
"\r[%d/%d] %d%%" % (processed, total, percent)
)
stderr.flush()
|
#!/usr/bin/env python
"""Collection of module netural utility functions"""
from sys import stderr
from ssl import SSLError
from socket import timeout
try:
from urllib.request import urlopen, HTTPError, URLError
except ImportError:
from urllib2 import urlopen, HTTPError, URLError
class HTMLGetError(Exception):
pass
def get_html(url):
try:
html = urlopen(url)
except (HTTPError, URLError, SSLError, timeout) as err:
raise HTMLGetError(err)
return html.read().decode('utf-8')
def progress_msg(processed, total):
"""Update user on percent done"""
if total > 1:
percent = int((float(processed) / total) * 100)
stderr.write(
"\r[%d/%d] %d%%" % (processed, total, percent)
)
stderr.flush()
|
Remove superfluous parens; catch timeout
|
Remove superfluous parens; catch timeout
|
Python
|
mit
|
jblakeman/apt-select,jblakeman/apt-select
|
#!/usr/bin/env python
"""Collection of module netural utility functions"""
from sys import stderr
from ssl import SSLError
try:
from urllib.request import urlopen, HTTPError, URLError
except ImportError:
from urllib2 import urlopen, HTTPError, URLError
class HTMLGetError(Exception):
pass
def get_html(url):
try:
html = urlopen(url)
except (HTTPError, URLError, SSLError) as err:
raise(HTMLGetError(err))
return html.read().decode('utf-8')
def progress_msg(processed, total):
"""Update user on percent done"""
if total > 1:
percent = int((float(processed) / total) * 100)
stderr.write(
"\r[%d/%d] %d%%" % (processed, total, percent)
)
stderr.flush()
Remove superfluous parens; catch timeout
|
#!/usr/bin/env python
"""Collection of module netural utility functions"""
from sys import stderr
from ssl import SSLError
from socket import timeout
try:
from urllib.request import urlopen, HTTPError, URLError
except ImportError:
from urllib2 import urlopen, HTTPError, URLError
class HTMLGetError(Exception):
pass
def get_html(url):
try:
html = urlopen(url)
except (HTTPError, URLError, SSLError, timeout) as err:
raise HTMLGetError(err)
return html.read().decode('utf-8')
def progress_msg(processed, total):
"""Update user on percent done"""
if total > 1:
percent = int((float(processed) / total) * 100)
stderr.write(
"\r[%d/%d] %d%%" % (processed, total, percent)
)
stderr.flush()
|
<commit_before>#!/usr/bin/env python
"""Collection of module netural utility functions"""
from sys import stderr
from ssl import SSLError
try:
from urllib.request import urlopen, HTTPError, URLError
except ImportError:
from urllib2 import urlopen, HTTPError, URLError
class HTMLGetError(Exception):
pass
def get_html(url):
try:
html = urlopen(url)
except (HTTPError, URLError, SSLError) as err:
raise(HTMLGetError(err))
return html.read().decode('utf-8')
def progress_msg(processed, total):
"""Update user on percent done"""
if total > 1:
percent = int((float(processed) / total) * 100)
stderr.write(
"\r[%d/%d] %d%%" % (processed, total, percent)
)
stderr.flush()
<commit_msg>Remove superfluous parens; catch timeout<commit_after>
|
#!/usr/bin/env python
"""Collection of module netural utility functions"""
from sys import stderr
from ssl import SSLError
from socket import timeout
try:
from urllib.request import urlopen, HTTPError, URLError
except ImportError:
from urllib2 import urlopen, HTTPError, URLError
class HTMLGetError(Exception):
pass
def get_html(url):
try:
html = urlopen(url)
except (HTTPError, URLError, SSLError, timeout) as err:
raise HTMLGetError(err)
return html.read().decode('utf-8')
def progress_msg(processed, total):
"""Update user on percent done"""
if total > 1:
percent = int((float(processed) / total) * 100)
stderr.write(
"\r[%d/%d] %d%%" % (processed, total, percent)
)
stderr.flush()
|
#!/usr/bin/env python
"""Collection of module netural utility functions"""
from sys import stderr
from ssl import SSLError
try:
from urllib.request import urlopen, HTTPError, URLError
except ImportError:
from urllib2 import urlopen, HTTPError, URLError
class HTMLGetError(Exception):
pass
def get_html(url):
try:
html = urlopen(url)
except (HTTPError, URLError, SSLError) as err:
raise(HTMLGetError(err))
return html.read().decode('utf-8')
def progress_msg(processed, total):
"""Update user on percent done"""
if total > 1:
percent = int((float(processed) / total) * 100)
stderr.write(
"\r[%d/%d] %d%%" % (processed, total, percent)
)
stderr.flush()
Remove superfluous parens; catch timeout#!/usr/bin/env python
"""Collection of module netural utility functions"""
from sys import stderr
from ssl import SSLError
from socket import timeout
try:
from urllib.request import urlopen, HTTPError, URLError
except ImportError:
from urllib2 import urlopen, HTTPError, URLError
class HTMLGetError(Exception):
pass
def get_html(url):
try:
html = urlopen(url)
except (HTTPError, URLError, SSLError, timeout) as err:
raise HTMLGetError(err)
return html.read().decode('utf-8')
def progress_msg(processed, total):
"""Update user on percent done"""
if total > 1:
percent = int((float(processed) / total) * 100)
stderr.write(
"\r[%d/%d] %d%%" % (processed, total, percent)
)
stderr.flush()
|
<commit_before>#!/usr/bin/env python
"""Collection of module netural utility functions"""
from sys import stderr
from ssl import SSLError
try:
from urllib.request import urlopen, HTTPError, URLError
except ImportError:
from urllib2 import urlopen, HTTPError, URLError
class HTMLGetError(Exception):
pass
def get_html(url):
try:
html = urlopen(url)
except (HTTPError, URLError, SSLError) as err:
raise(HTMLGetError(err))
return html.read().decode('utf-8')
def progress_msg(processed, total):
"""Update user on percent done"""
if total > 1:
percent = int((float(processed) / total) * 100)
stderr.write(
"\r[%d/%d] %d%%" % (processed, total, percent)
)
stderr.flush()
<commit_msg>Remove superfluous parens; catch timeout<commit_after>#!/usr/bin/env python
"""Collection of module netural utility functions"""
from sys import stderr
from ssl import SSLError
from socket import timeout
try:
from urllib.request import urlopen, HTTPError, URLError
except ImportError:
from urllib2 import urlopen, HTTPError, URLError
class HTMLGetError(Exception):
pass
def get_html(url):
try:
html = urlopen(url)
except (HTTPError, URLError, SSLError, timeout) as err:
raise HTMLGetError(err)
return html.read().decode('utf-8')
def progress_msg(processed, total):
"""Update user on percent done"""
if total > 1:
percent = int((float(processed) / total) * 100)
stderr.write(
"\r[%d/%d] %d%%" % (processed, total, percent)
)
stderr.flush()
|
3222fab1b026250d9aee863d068137b03c13a05b
|
tests/test_check_dependencies.py
|
tests/test_check_dependencies.py
|
#! /usr/bin/python
from check_dependencies import CheckDependencies
def test_default():
CheckDependencies(None)
def test_hydrotrend():
CheckDependencies("hydrotrend")
|
#! /usr/bin/python
from check_dependencies import CheckDependencies
def test_default():
CheckDependencies(None)
def test_hydrotrend():
CheckDependencies("hydrotrend")
def test_cem():
CheckDependencies("cem")
|
Add dependency check test for CEM
|
Add dependency check test for CEM
|
Python
|
mit
|
csdms/rpm_models
|
#! /usr/bin/python
from check_dependencies import CheckDependencies
def test_default():
CheckDependencies(None)
def test_hydrotrend():
CheckDependencies("hydrotrend")
Add dependency check test for CEM
|
#! /usr/bin/python
from check_dependencies import CheckDependencies
def test_default():
CheckDependencies(None)
def test_hydrotrend():
CheckDependencies("hydrotrend")
def test_cem():
CheckDependencies("cem")
|
<commit_before>#! /usr/bin/python
from check_dependencies import CheckDependencies
def test_default():
CheckDependencies(None)
def test_hydrotrend():
CheckDependencies("hydrotrend")
<commit_msg>Add dependency check test for CEM<commit_after>
|
#! /usr/bin/python
from check_dependencies import CheckDependencies
def test_default():
CheckDependencies(None)
def test_hydrotrend():
CheckDependencies("hydrotrend")
def test_cem():
CheckDependencies("cem")
|
#! /usr/bin/python
from check_dependencies import CheckDependencies
def test_default():
CheckDependencies(None)
def test_hydrotrend():
CheckDependencies("hydrotrend")
Add dependency check test for CEM#! /usr/bin/python
from check_dependencies import CheckDependencies
def test_default():
CheckDependencies(None)
def test_hydrotrend():
CheckDependencies("hydrotrend")
def test_cem():
CheckDependencies("cem")
|
<commit_before>#! /usr/bin/python
from check_dependencies import CheckDependencies
def test_default():
CheckDependencies(None)
def test_hydrotrend():
CheckDependencies("hydrotrend")
<commit_msg>Add dependency check test for CEM<commit_after>#! /usr/bin/python
from check_dependencies import CheckDependencies
def test_default():
CheckDependencies(None)
def test_hydrotrend():
CheckDependencies("hydrotrend")
def test_cem():
CheckDependencies("cem")
|
85605ab0c08528c772d53ad746eb5eadcd6e495c
|
hook-mcedit2.py
|
hook-mcedit2.py
|
"""
hook-mcedit2.py
Hook for pyinstaller to collect MCEdit's data files
"""
from __future__ import absolute_import, division, print_function#, unicode_literals
import glob
import logging
import os
from PyInstaller.hooks.hookutils import collect_data_files
log = logging.getLogger(__name__)
datas = collect_data_files('mceditlib') + collect_data_files('mcedit2')
|
"""
hook-mcedit2.py
Hook for pyinstaller to collect MCEdit's data files
"""
from __future__ import absolute_import, division, print_function#, unicode_literals
import glob
import logging
import os
from PyInstaller.hooks.hookutils import collect_data_files
log = logging.getLogger(__name__)
# Remove cython and coverage byproducts
def ext_filter(source):
base = os.path.basename(source)
if base == '.coverage':
return False
name, ext = os.path.splitext(base)
return ext not in ('.c', '.html')
mceditlib_datas = collect_data_files('mceditlib')
mceditlib_datas = [(source, dest)
for source, dest in mceditlib_datas
if ext_filter(source)]
mcedit2_datas = collect_data_files('mcedit2')
mcedit2_datas = [(source, dest)
for source, dest in mcedit2_datas
if ext_filter(source)]
datas = mceditlib_datas + mcedit2_datas
|
Exclude secondary cython outputs from pyi spec
|
Exclude secondary cython outputs from pyi spec
|
Python
|
bsd-3-clause
|
Rubisk/mcedit2,vorburger/mcedit2,Rubisk/mcedit2,vorburger/mcedit2
|
"""
hook-mcedit2.py
Hook for pyinstaller to collect MCEdit's data files
"""
from __future__ import absolute_import, division, print_function#, unicode_literals
import glob
import logging
import os
from PyInstaller.hooks.hookutils import collect_data_files
log = logging.getLogger(__name__)
datas = collect_data_files('mceditlib') + collect_data_files('mcedit2')
Exclude secondary cython outputs from pyi spec
|
"""
hook-mcedit2.py
Hook for pyinstaller to collect MCEdit's data files
"""
from __future__ import absolute_import, division, print_function#, unicode_literals
import glob
import logging
import os
from PyInstaller.hooks.hookutils import collect_data_files
log = logging.getLogger(__name__)
# Remove cython and coverage byproducts
def ext_filter(source):
base = os.path.basename(source)
if base == '.coverage':
return False
name, ext = os.path.splitext(base)
return ext not in ('.c', '.html')
mceditlib_datas = collect_data_files('mceditlib')
mceditlib_datas = [(source, dest)
for source, dest in mceditlib_datas
if ext_filter(source)]
mcedit2_datas = collect_data_files('mcedit2')
mcedit2_datas = [(source, dest)
for source, dest in mcedit2_datas
if ext_filter(source)]
datas = mceditlib_datas + mcedit2_datas
|
<commit_before>"""
hook-mcedit2.py
Hook for pyinstaller to collect MCEdit's data files
"""
from __future__ import absolute_import, division, print_function#, unicode_literals
import glob
import logging
import os
from PyInstaller.hooks.hookutils import collect_data_files
log = logging.getLogger(__name__)
datas = collect_data_files('mceditlib') + collect_data_files('mcedit2')
<commit_msg>Exclude secondary cython outputs from pyi spec<commit_after>
|
"""
hook-mcedit2.py
Hook for pyinstaller to collect MCEdit's data files
"""
from __future__ import absolute_import, division, print_function#, unicode_literals
import glob
import logging
import os
from PyInstaller.hooks.hookutils import collect_data_files
log = logging.getLogger(__name__)
# Remove cython and coverage byproducts
def ext_filter(source):
base = os.path.basename(source)
if base == '.coverage':
return False
name, ext = os.path.splitext(base)
return ext not in ('.c', '.html')
mceditlib_datas = collect_data_files('mceditlib')
mceditlib_datas = [(source, dest)
for source, dest in mceditlib_datas
if ext_filter(source)]
mcedit2_datas = collect_data_files('mcedit2')
mcedit2_datas = [(source, dest)
for source, dest in mcedit2_datas
if ext_filter(source)]
datas = mceditlib_datas + mcedit2_datas
|
"""
hook-mcedit2.py
Hook for pyinstaller to collect MCEdit's data files
"""
from __future__ import absolute_import, division, print_function#, unicode_literals
import glob
import logging
import os
from PyInstaller.hooks.hookutils import collect_data_files
log = logging.getLogger(__name__)
datas = collect_data_files('mceditlib') + collect_data_files('mcedit2')
Exclude secondary cython outputs from pyi spec"""
hook-mcedit2.py
Hook for pyinstaller to collect MCEdit's data files
"""
from __future__ import absolute_import, division, print_function#, unicode_literals
import glob
import logging
import os
from PyInstaller.hooks.hookutils import collect_data_files
log = logging.getLogger(__name__)
# Remove cython and coverage byproducts
def ext_filter(source):
base = os.path.basename(source)
if base == '.coverage':
return False
name, ext = os.path.splitext(base)
return ext not in ('.c', '.html')
mceditlib_datas = collect_data_files('mceditlib')
mceditlib_datas = [(source, dest)
for source, dest in mceditlib_datas
if ext_filter(source)]
mcedit2_datas = collect_data_files('mcedit2')
mcedit2_datas = [(source, dest)
for source, dest in mcedit2_datas
if ext_filter(source)]
datas = mceditlib_datas + mcedit2_datas
|
<commit_before>"""
hook-mcedit2.py
Hook for pyinstaller to collect MCEdit's data files
"""
from __future__ import absolute_import, division, print_function#, unicode_literals
import glob
import logging
import os
from PyInstaller.hooks.hookutils import collect_data_files
log = logging.getLogger(__name__)
datas = collect_data_files('mceditlib') + collect_data_files('mcedit2')
<commit_msg>Exclude secondary cython outputs from pyi spec<commit_after>"""
hook-mcedit2.py
Hook for pyinstaller to collect MCEdit's data files
"""
from __future__ import absolute_import, division, print_function#, unicode_literals
import glob
import logging
import os
from PyInstaller.hooks.hookutils import collect_data_files
log = logging.getLogger(__name__)
# Remove cython and coverage byproducts
def ext_filter(source):
base = os.path.basename(source)
if base == '.coverage':
return False
name, ext = os.path.splitext(base)
return ext not in ('.c', '.html')
mceditlib_datas = collect_data_files('mceditlib')
mceditlib_datas = [(source, dest)
for source, dest in mceditlib_datas
if ext_filter(source)]
mcedit2_datas = collect_data_files('mcedit2')
mcedit2_datas = [(source, dest)
for source, dest in mcedit2_datas
if ext_filter(source)]
datas = mceditlib_datas + mcedit2_datas
|
2da3f9cf12c340322f512585711ebc02097c72a1
|
tests/views/test_calls_for_comments_page.py
|
tests/views/test_calls_for_comments_page.py
|
from tests import PMGLiveServerTestCase
from tests.fixtures import dbfixture, CallForCommentData
import urllib.request, urllib.error, urllib.parse
class TestCallsForCommentsPage(PMGLiveServerTestCase):
def setUp(self):
super(TestCallsForCommentsPage, self).setUp()
self.fx = dbfixture.data(CallForCommentData)
self.fx.setup()
def tearDown(self):
self.fx.teardown()
super(TestCallsForCommentsPage, self).tearDown()
def test_calls_for_comments(self):
"""
Test calls for comments page (/calls-for-comments/)
"""
call_for_comment = self.fx.CallForCommentData.arts_call_for_comment_one
self.make_request("/calls-for-comments/")
self.assertIn(call_for_comment.title, self.html)
self.assertTrue(False)
|
from tests import PMGLiveServerTestCase
from tests.fixtures import dbfixture, CallForCommentData
import urllib.request, urllib.error, urllib.parse
class TestCallsForCommentsPage(PMGLiveServerTestCase):
def setUp(self):
super(TestCallsForCommentsPage, self).setUp()
self.fx = dbfixture.data(CallForCommentData)
self.fx.setup()
def tearDown(self):
self.fx.teardown()
super(TestCallsForCommentsPage, self).tearDown()
def test_calls_for_comments(self):
"""
Test calls for comments page (/calls-for-comments/)
"""
call_for_comment = self.fx.CallForCommentData.arts_call_for_comment_one
self.make_request("/calls-for-comments/")
self.assertIn(call_for_comment.title, self.html)
|
Remove false assertion from test
|
Remove false assertion from test
|
Python
|
apache-2.0
|
Code4SA/pmg-cms-2,Code4SA/pmg-cms-2,Code4SA/pmg-cms-2
|
from tests import PMGLiveServerTestCase
from tests.fixtures import dbfixture, CallForCommentData
import urllib.request, urllib.error, urllib.parse
class TestCallsForCommentsPage(PMGLiveServerTestCase):
def setUp(self):
super(TestCallsForCommentsPage, self).setUp()
self.fx = dbfixture.data(CallForCommentData)
self.fx.setup()
def tearDown(self):
self.fx.teardown()
super(TestCallsForCommentsPage, self).tearDown()
def test_calls_for_comments(self):
"""
Test calls for comments page (/calls-for-comments/)
"""
call_for_comment = self.fx.CallForCommentData.arts_call_for_comment_one
self.make_request("/calls-for-comments/")
self.assertIn(call_for_comment.title, self.html)
self.assertTrue(False)
Remove false assertion from test
|
from tests import PMGLiveServerTestCase
from tests.fixtures import dbfixture, CallForCommentData
import urllib.request, urllib.error, urllib.parse
class TestCallsForCommentsPage(PMGLiveServerTestCase):
def setUp(self):
super(TestCallsForCommentsPage, self).setUp()
self.fx = dbfixture.data(CallForCommentData)
self.fx.setup()
def tearDown(self):
self.fx.teardown()
super(TestCallsForCommentsPage, self).tearDown()
def test_calls_for_comments(self):
"""
Test calls for comments page (/calls-for-comments/)
"""
call_for_comment = self.fx.CallForCommentData.arts_call_for_comment_one
self.make_request("/calls-for-comments/")
self.assertIn(call_for_comment.title, self.html)
|
<commit_before>from tests import PMGLiveServerTestCase
from tests.fixtures import dbfixture, CallForCommentData
import urllib.request, urllib.error, urllib.parse
class TestCallsForCommentsPage(PMGLiveServerTestCase):
def setUp(self):
super(TestCallsForCommentsPage, self).setUp()
self.fx = dbfixture.data(CallForCommentData)
self.fx.setup()
def tearDown(self):
self.fx.teardown()
super(TestCallsForCommentsPage, self).tearDown()
def test_calls_for_comments(self):
"""
Test calls for comments page (/calls-for-comments/)
"""
call_for_comment = self.fx.CallForCommentData.arts_call_for_comment_one
self.make_request("/calls-for-comments/")
self.assertIn(call_for_comment.title, self.html)
self.assertTrue(False)
<commit_msg>Remove false assertion from test<commit_after>
|
from tests import PMGLiveServerTestCase
from tests.fixtures import dbfixture, CallForCommentData
import urllib.request, urllib.error, urllib.parse
class TestCallsForCommentsPage(PMGLiveServerTestCase):
def setUp(self):
super(TestCallsForCommentsPage, self).setUp()
self.fx = dbfixture.data(CallForCommentData)
self.fx.setup()
def tearDown(self):
self.fx.teardown()
super(TestCallsForCommentsPage, self).tearDown()
def test_calls_for_comments(self):
"""
Test calls for comments page (/calls-for-comments/)
"""
call_for_comment = self.fx.CallForCommentData.arts_call_for_comment_one
self.make_request("/calls-for-comments/")
self.assertIn(call_for_comment.title, self.html)
|
from tests import PMGLiveServerTestCase
from tests.fixtures import dbfixture, CallForCommentData
import urllib.request, urllib.error, urllib.parse
class TestCallsForCommentsPage(PMGLiveServerTestCase):
def setUp(self):
super(TestCallsForCommentsPage, self).setUp()
self.fx = dbfixture.data(CallForCommentData)
self.fx.setup()
def tearDown(self):
self.fx.teardown()
super(TestCallsForCommentsPage, self).tearDown()
def test_calls_for_comments(self):
"""
Test calls for comments page (/calls-for-comments/)
"""
call_for_comment = self.fx.CallForCommentData.arts_call_for_comment_one
self.make_request("/calls-for-comments/")
self.assertIn(call_for_comment.title, self.html)
self.assertTrue(False)
Remove false assertion from testfrom tests import PMGLiveServerTestCase
from tests.fixtures import dbfixture, CallForCommentData
import urllib.request, urllib.error, urllib.parse
class TestCallsForCommentsPage(PMGLiveServerTestCase):
def setUp(self):
super(TestCallsForCommentsPage, self).setUp()
self.fx = dbfixture.data(CallForCommentData)
self.fx.setup()
def tearDown(self):
self.fx.teardown()
super(TestCallsForCommentsPage, self).tearDown()
def test_calls_for_comments(self):
"""
Test calls for comments page (/calls-for-comments/)
"""
call_for_comment = self.fx.CallForCommentData.arts_call_for_comment_one
self.make_request("/calls-for-comments/")
self.assertIn(call_for_comment.title, self.html)
|
<commit_before>from tests import PMGLiveServerTestCase
from tests.fixtures import dbfixture, CallForCommentData
import urllib.request, urllib.error, urllib.parse
class TestCallsForCommentsPage(PMGLiveServerTestCase):
def setUp(self):
super(TestCallsForCommentsPage, self).setUp()
self.fx = dbfixture.data(CallForCommentData)
self.fx.setup()
def tearDown(self):
self.fx.teardown()
super(TestCallsForCommentsPage, self).tearDown()
def test_calls_for_comments(self):
"""
Test calls for comments page (/calls-for-comments/)
"""
call_for_comment = self.fx.CallForCommentData.arts_call_for_comment_one
self.make_request("/calls-for-comments/")
self.assertIn(call_for_comment.title, self.html)
self.assertTrue(False)
<commit_msg>Remove false assertion from test<commit_after>from tests import PMGLiveServerTestCase
from tests.fixtures import dbfixture, CallForCommentData
import urllib.request, urllib.error, urllib.parse
class TestCallsForCommentsPage(PMGLiveServerTestCase):
def setUp(self):
super(TestCallsForCommentsPage, self).setUp()
self.fx = dbfixture.data(CallForCommentData)
self.fx.setup()
def tearDown(self):
self.fx.teardown()
super(TestCallsForCommentsPage, self).tearDown()
def test_calls_for_comments(self):
"""
Test calls for comments page (/calls-for-comments/)
"""
call_for_comment = self.fx.CallForCommentData.arts_call_for_comment_one
self.make_request("/calls-for-comments/")
self.assertIn(call_for_comment.title, self.html)
|
fcf3511a586b5efe4a86674ccd4c80c67ec2ed14
|
tracker/src/main/tracker/util/connection.py
|
tracker/src/main/tracker/util/connection.py
|
import os
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy import create_engine
from sqlalchemy.orm.scoping import scoped_session
DB_URL = os.environ['DB_URL']
Base = automap_base()
engine = create_engine(DB_URL)
Base.prepare(engine, reflect=True)
session_factory = sessionmaker(bind=engine, expire_on_commit=False)
Session = scoped_session(session_factory)
|
import os
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy import create_engine
from sqlalchemy.orm.scoping import scoped_session
DB_URL = os.environ['DB_URL']
if not DB_URL:
raise ValueError("DB_URL not present in the environment")
Base = automap_base()
engine = create_engine(DB_URL)
Base.prepare(engine, reflect=True)
session_factory = sessionmaker(bind=engine, expire_on_commit=False)
Session = scoped_session(session_factory)
|
Test for DB_URL being present in environment.
|
Test for DB_URL being present in environment.
|
Python
|
mit
|
llevar/germline-regenotyper,llevar/germline-regenotyper
|
import os
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy import create_engine
from sqlalchemy.orm.scoping import scoped_session
DB_URL = os.environ['DB_URL']
Base = automap_base()
engine = create_engine(DB_URL)
Base.prepare(engine, reflect=True)
session_factory = sessionmaker(bind=engine, expire_on_commit=False)
Session = scoped_session(session_factory)Test for DB_URL being present in environment.
|
import os
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy import create_engine
from sqlalchemy.orm.scoping import scoped_session
DB_URL = os.environ['DB_URL']
if not DB_URL:
raise ValueError("DB_URL not present in the environment")
Base = automap_base()
engine = create_engine(DB_URL)
Base.prepare(engine, reflect=True)
session_factory = sessionmaker(bind=engine, expire_on_commit=False)
Session = scoped_session(session_factory)
|
<commit_before>import os
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy import create_engine
from sqlalchemy.orm.scoping import scoped_session
DB_URL = os.environ['DB_URL']
Base = automap_base()
engine = create_engine(DB_URL)
Base.prepare(engine, reflect=True)
session_factory = sessionmaker(bind=engine, expire_on_commit=False)
Session = scoped_session(session_factory)<commit_msg>Test for DB_URL being present in environment.<commit_after>
|
import os
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy import create_engine
from sqlalchemy.orm.scoping import scoped_session
DB_URL = os.environ['DB_URL']
if not DB_URL:
raise ValueError("DB_URL not present in the environment")
Base = automap_base()
engine = create_engine(DB_URL)
Base.prepare(engine, reflect=True)
session_factory = sessionmaker(bind=engine, expire_on_commit=False)
Session = scoped_session(session_factory)
|
import os
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy import create_engine
from sqlalchemy.orm.scoping import scoped_session
DB_URL = os.environ['DB_URL']
Base = automap_base()
engine = create_engine(DB_URL)
Base.prepare(engine, reflect=True)
session_factory = sessionmaker(bind=engine, expire_on_commit=False)
Session = scoped_session(session_factory)Test for DB_URL being present in environment.import os
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy import create_engine
from sqlalchemy.orm.scoping import scoped_session
DB_URL = os.environ['DB_URL']
if not DB_URL:
raise ValueError("DB_URL not present in the environment")
Base = automap_base()
engine = create_engine(DB_URL)
Base.prepare(engine, reflect=True)
session_factory = sessionmaker(bind=engine, expire_on_commit=False)
Session = scoped_session(session_factory)
|
<commit_before>import os
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy import create_engine
from sqlalchemy.orm.scoping import scoped_session
DB_URL = os.environ['DB_URL']
Base = automap_base()
engine = create_engine(DB_URL)
Base.prepare(engine, reflect=True)
session_factory = sessionmaker(bind=engine, expire_on_commit=False)
Session = scoped_session(session_factory)<commit_msg>Test for DB_URL being present in environment.<commit_after>import os
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy import create_engine
from sqlalchemy.orm.scoping import scoped_session
DB_URL = os.environ['DB_URL']
if not DB_URL:
raise ValueError("DB_URL not present in the environment")
Base = automap_base()
engine = create_engine(DB_URL)
Base.prepare(engine, reflect=True)
session_factory = sessionmaker(bind=engine, expire_on_commit=False)
Session = scoped_session(session_factory)
|
9339e0bd1197f8d599309eaff66b83c38721ab29
|
conference/management/commands/add_invoices_for_zero_amount_orders.py
|
conference/management/commands/add_invoices_for_zero_amount_orders.py
|
# -*- coding: UTF-8 -*-
from __future__ import print_function
from django.core.management.base import BaseCommand
from assopy import models as amodels
def generate_invoices_for_zero_amount_orders_for_year(year):
orders = amodels.Order.objects.filter(
created__year=year,
method='bank',
)
for o in orders:
if not o.complete():
continue
print ('Creating invoice for order %r' % o)
o.confirm_order(o.created)
o.complete()
class Command(BaseCommand):
"""
The system did not generate invoices for orders with a zero amount
in 2018 (e.g. as result of using discounts).
We have to add them after the fact.
"""
def handle(self, *args, **options):
generate_invoices_for_zero_amount_orders_for_year(2018)
|
# -*- coding: UTF-8 -*-
from __future__ import print_function
from django.core.management.base import BaseCommand
from assopy import models as amodels
def generate_invoices_for_zero_amount_orders_for_year(year):
orders = amodels.Order.objects.filter(
created__year=year,
method='bank',
)
for o in orders:
if not o.complete():
continue
if o.total() > 0:
continue
print ('Creating invoice for order %r' % o)
o.confirm_order(o.created)
o.complete()
class Command(BaseCommand):
"""
The system did not generate invoices for orders with a zero amount
in 2018 (e.g. as result of using discounts).
We have to add them after the fact.
"""
def handle(self, *args, **options):
generate_invoices_for_zero_amount_orders_for_year(2018)
|
Make sure that only zero amount orders are modified.
|
Make sure that only zero amount orders are modified.
Note really necessary, since we don't have real bank orders,
but better safe than sorry.
|
Python
|
bsd-2-clause
|
EuroPython/epcon,EuroPython/epcon,EuroPython/epcon,EuroPython/epcon
|
# -*- coding: UTF-8 -*-
from __future__ import print_function
from django.core.management.base import BaseCommand
from assopy import models as amodels
def generate_invoices_for_zero_amount_orders_for_year(year):
orders = amodels.Order.objects.filter(
created__year=year,
method='bank',
)
for o in orders:
if not o.complete():
continue
print ('Creating invoice for order %r' % o)
o.confirm_order(o.created)
o.complete()
class Command(BaseCommand):
"""
The system did not generate invoices for orders with a zero amount
in 2018 (e.g. as result of using discounts).
We have to add them after the fact.
"""
def handle(self, *args, **options):
generate_invoices_for_zero_amount_orders_for_year(2018)
Make sure that only zero amount orders are modified.
Note really necessary, since we don't have real bank orders,
but better safe than sorry.
|
# -*- coding: UTF-8 -*-
from __future__ import print_function
from django.core.management.base import BaseCommand
from assopy import models as amodels
def generate_invoices_for_zero_amount_orders_for_year(year):
orders = amodels.Order.objects.filter(
created__year=year,
method='bank',
)
for o in orders:
if not o.complete():
continue
if o.total() > 0:
continue
print ('Creating invoice for order %r' % o)
o.confirm_order(o.created)
o.complete()
class Command(BaseCommand):
"""
The system did not generate invoices for orders with a zero amount
in 2018 (e.g. as result of using discounts).
We have to add them after the fact.
"""
def handle(self, *args, **options):
generate_invoices_for_zero_amount_orders_for_year(2018)
|
<commit_before># -*- coding: UTF-8 -*-
from __future__ import print_function
from django.core.management.base import BaseCommand
from assopy import models as amodels
def generate_invoices_for_zero_amount_orders_for_year(year):
orders = amodels.Order.objects.filter(
created__year=year,
method='bank',
)
for o in orders:
if not o.complete():
continue
print ('Creating invoice for order %r' % o)
o.confirm_order(o.created)
o.complete()
class Command(BaseCommand):
"""
The system did not generate invoices for orders with a zero amount
in 2018 (e.g. as result of using discounts).
We have to add them after the fact.
"""
def handle(self, *args, **options):
generate_invoices_for_zero_amount_orders_for_year(2018)
<commit_msg>Make sure that only zero amount orders are modified.
Note really necessary, since we don't have real bank orders,
but better safe than sorry.<commit_after>
|
# -*- coding: UTF-8 -*-
from __future__ import print_function
from django.core.management.base import BaseCommand
from assopy import models as amodels
def generate_invoices_for_zero_amount_orders_for_year(year):
orders = amodels.Order.objects.filter(
created__year=year,
method='bank',
)
for o in orders:
if not o.complete():
continue
if o.total() > 0:
continue
print ('Creating invoice for order %r' % o)
o.confirm_order(o.created)
o.complete()
class Command(BaseCommand):
"""
The system did not generate invoices for orders with a zero amount
in 2018 (e.g. as result of using discounts).
We have to add them after the fact.
"""
def handle(self, *args, **options):
generate_invoices_for_zero_amount_orders_for_year(2018)
|
# -*- coding: UTF-8 -*-
from __future__ import print_function
from django.core.management.base import BaseCommand
from assopy import models as amodels
def generate_invoices_for_zero_amount_orders_for_year(year):
orders = amodels.Order.objects.filter(
created__year=year,
method='bank',
)
for o in orders:
if not o.complete():
continue
print ('Creating invoice for order %r' % o)
o.confirm_order(o.created)
o.complete()
class Command(BaseCommand):
"""
The system did not generate invoices for orders with a zero amount
in 2018 (e.g. as result of using discounts).
We have to add them after the fact.
"""
def handle(self, *args, **options):
generate_invoices_for_zero_amount_orders_for_year(2018)
Make sure that only zero amount orders are modified.
Note really necessary, since we don't have real bank orders,
but better safe than sorry.# -*- coding: UTF-8 -*-
from __future__ import print_function
from django.core.management.base import BaseCommand
from assopy import models as amodels
def generate_invoices_for_zero_amount_orders_for_year(year):
orders = amodels.Order.objects.filter(
created__year=year,
method='bank',
)
for o in orders:
if not o.complete():
continue
if o.total() > 0:
continue
print ('Creating invoice for order %r' % o)
o.confirm_order(o.created)
o.complete()
class Command(BaseCommand):
"""
The system did not generate invoices for orders with a zero amount
in 2018 (e.g. as result of using discounts).
We have to add them after the fact.
"""
def handle(self, *args, **options):
generate_invoices_for_zero_amount_orders_for_year(2018)
|
<commit_before># -*- coding: UTF-8 -*-
from __future__ import print_function
from django.core.management.base import BaseCommand
from assopy import models as amodels
def generate_invoices_for_zero_amount_orders_for_year(year):
orders = amodels.Order.objects.filter(
created__year=year,
method='bank',
)
for o in orders:
if not o.complete():
continue
print ('Creating invoice for order %r' % o)
o.confirm_order(o.created)
o.complete()
class Command(BaseCommand):
"""
The system did not generate invoices for orders with a zero amount
in 2018 (e.g. as result of using discounts).
We have to add them after the fact.
"""
def handle(self, *args, **options):
generate_invoices_for_zero_amount_orders_for_year(2018)
<commit_msg>Make sure that only zero amount orders are modified.
Note really necessary, since we don't have real bank orders,
but better safe than sorry.<commit_after># -*- coding: UTF-8 -*-
from __future__ import print_function
from django.core.management.base import BaseCommand
from assopy import models as amodels
def generate_invoices_for_zero_amount_orders_for_year(year):
orders = amodels.Order.objects.filter(
created__year=year,
method='bank',
)
for o in orders:
if not o.complete():
continue
if o.total() > 0:
continue
print ('Creating invoice for order %r' % o)
o.confirm_order(o.created)
o.complete()
class Command(BaseCommand):
"""
The system did not generate invoices for orders with a zero amount
in 2018 (e.g. as result of using discounts).
We have to add them after the fact.
"""
def handle(self, *args, **options):
generate_invoices_for_zero_amount_orders_for_year(2018)
|
7b01e17c03893f5a6470cdbac00948e95c216d45
|
keeper/exceptions.py
|
keeper/exceptions.py
|
"""Custom exceptions."""
__all__ = [
"ValidationError",
"Route53Error",
"S3Error",
"FastlyError",
"DasherError",
]
class ValidationError(ValueError):
"""Use a ValidationError whenever a API user provides bad input for PUT,
POST, or PATCH requests.
"""
class Route53Error(Exception):
"""Errors related to Route 53 usage."""
class S3Error(Exception):
"""Errors related to AWS S3 usage."""
class FastlyError(Exception):
""""Errors related to Fastly API usage."""
class DasherError(Exception):
"""Errors related to LTD Dasher."""
|
"""Custom exceptions."""
__all__ = [
"ValidationError",
"Route53Error",
"S3Error",
"FastlyError",
"DasherError",
]
class ValidationError(ValueError):
"""Use a ValidationError whenever a API user provides bad input for PUT,
POST, or PATCH requests.
"""
class Route53Error(Exception):
"""Errors related to Route 53 usage."""
class S3Error(Exception):
"""Errors related to AWS S3 usage."""
class FastlyError(Exception):
"""Errors related to Fastly API usage."""
class DasherError(Exception):
"""Errors related to LTD Dasher."""
|
Fix comment bug caught by latest black
|
Fix comment bug caught by latest black
There were four open quotes instead of the intended three.
|
Python
|
mit
|
lsst-sqre/ltd-keeper,lsst-sqre/ltd-keeper
|
"""Custom exceptions."""
__all__ = [
"ValidationError",
"Route53Error",
"S3Error",
"FastlyError",
"DasherError",
]
class ValidationError(ValueError):
"""Use a ValidationError whenever a API user provides bad input for PUT,
POST, or PATCH requests.
"""
class Route53Error(Exception):
"""Errors related to Route 53 usage."""
class S3Error(Exception):
"""Errors related to AWS S3 usage."""
class FastlyError(Exception):
""""Errors related to Fastly API usage."""
class DasherError(Exception):
"""Errors related to LTD Dasher."""
Fix comment bug caught by latest black
There were four open quotes instead of the intended three.
|
"""Custom exceptions."""
__all__ = [
"ValidationError",
"Route53Error",
"S3Error",
"FastlyError",
"DasherError",
]
class ValidationError(ValueError):
"""Use a ValidationError whenever a API user provides bad input for PUT,
POST, or PATCH requests.
"""
class Route53Error(Exception):
"""Errors related to Route 53 usage."""
class S3Error(Exception):
"""Errors related to AWS S3 usage."""
class FastlyError(Exception):
"""Errors related to Fastly API usage."""
class DasherError(Exception):
"""Errors related to LTD Dasher."""
|
<commit_before>"""Custom exceptions."""
__all__ = [
"ValidationError",
"Route53Error",
"S3Error",
"FastlyError",
"DasherError",
]
class ValidationError(ValueError):
"""Use a ValidationError whenever a API user provides bad input for PUT,
POST, or PATCH requests.
"""
class Route53Error(Exception):
"""Errors related to Route 53 usage."""
class S3Error(Exception):
"""Errors related to AWS S3 usage."""
class FastlyError(Exception):
""""Errors related to Fastly API usage."""
class DasherError(Exception):
"""Errors related to LTD Dasher."""
<commit_msg>Fix comment bug caught by latest black
There were four open quotes instead of the intended three.<commit_after>
|
"""Custom exceptions."""
__all__ = [
"ValidationError",
"Route53Error",
"S3Error",
"FastlyError",
"DasherError",
]
class ValidationError(ValueError):
"""Use a ValidationError whenever a API user provides bad input for PUT,
POST, or PATCH requests.
"""
class Route53Error(Exception):
"""Errors related to Route 53 usage."""
class S3Error(Exception):
"""Errors related to AWS S3 usage."""
class FastlyError(Exception):
"""Errors related to Fastly API usage."""
class DasherError(Exception):
"""Errors related to LTD Dasher."""
|
"""Custom exceptions."""
__all__ = [
"ValidationError",
"Route53Error",
"S3Error",
"FastlyError",
"DasherError",
]
class ValidationError(ValueError):
"""Use a ValidationError whenever a API user provides bad input for PUT,
POST, or PATCH requests.
"""
class Route53Error(Exception):
"""Errors related to Route 53 usage."""
class S3Error(Exception):
"""Errors related to AWS S3 usage."""
class FastlyError(Exception):
""""Errors related to Fastly API usage."""
class DasherError(Exception):
"""Errors related to LTD Dasher."""
Fix comment bug caught by latest black
There were four open quotes instead of the intended three."""Custom exceptions."""
__all__ = [
"ValidationError",
"Route53Error",
"S3Error",
"FastlyError",
"DasherError",
]
class ValidationError(ValueError):
"""Use a ValidationError whenever a API user provides bad input for PUT,
POST, or PATCH requests.
"""
class Route53Error(Exception):
"""Errors related to Route 53 usage."""
class S3Error(Exception):
"""Errors related to AWS S3 usage."""
class FastlyError(Exception):
"""Errors related to Fastly API usage."""
class DasherError(Exception):
"""Errors related to LTD Dasher."""
|
<commit_before>"""Custom exceptions."""
__all__ = [
"ValidationError",
"Route53Error",
"S3Error",
"FastlyError",
"DasherError",
]
class ValidationError(ValueError):
"""Use a ValidationError whenever a API user provides bad input for PUT,
POST, or PATCH requests.
"""
class Route53Error(Exception):
"""Errors related to Route 53 usage."""
class S3Error(Exception):
"""Errors related to AWS S3 usage."""
class FastlyError(Exception):
""""Errors related to Fastly API usage."""
class DasherError(Exception):
"""Errors related to LTD Dasher."""
<commit_msg>Fix comment bug caught by latest black
There were four open quotes instead of the intended three.<commit_after>"""Custom exceptions."""
__all__ = [
"ValidationError",
"Route53Error",
"S3Error",
"FastlyError",
"DasherError",
]
class ValidationError(ValueError):
"""Use a ValidationError whenever a API user provides bad input for PUT,
POST, or PATCH requests.
"""
class Route53Error(Exception):
"""Errors related to Route 53 usage."""
class S3Error(Exception):
"""Errors related to AWS S3 usage."""
class FastlyError(Exception):
"""Errors related to Fastly API usage."""
class DasherError(Exception):
"""Errors related to LTD Dasher."""
|
6bb43304fe08d299eadbd4977aa5db1f26eb90ce
|
build_tools/preseed_home.py
|
build_tools/preseed_home.py
|
import os
import shutil
import tempfile
temphome = tempfile.mkdtemp()
os.environ["KOLIBRI_HOME"] = temphome
from kolibri.main import initialize # noqa E402
from kolibri.deployment.default.sqlite_db_names import ( # noqa E402
ADDITIONAL_SQLITE_DATABASES,
)
from django.core.management import call_command # noqa E402
move_to = os.path.join(os.path.dirname(__file__), "..", "kolibri", "dist", "home")
shutil.rmtree(move_to, ignore_errors=True)
os.mkdir(move_to)
print("Generating preseeded home data in {}".format(temphome))
initialize()
call_command(
"deprovision", "--destroy-all-user-data", "--permanent-irrevocable-data-loss"
)
shutil.move(os.path.join(temphome, "db.sqlite3"), move_to)
for db_name in ADDITIONAL_SQLITE_DATABASES:
shutil.move(os.path.join(temphome, "{}.sqlite3".format(db_name)), move_to)
print("Moved all preseeded home data to {}".format(move_to))
shutil.rmtree(temphome)
|
import os
import shutil
import tempfile
temphome = tempfile.mkdtemp()
os.environ["KOLIBRI_HOME"] = temphome
from kolibri.main import initialize # noqa E402
from kolibri.deployment.default.sqlite_db_names import ( # noqa E402
ADDITIONAL_SQLITE_DATABASES,
)
from django.conf import settings # noqa E402
from django.core.management import call_command # noqa E402
move_to = os.path.join(os.path.dirname(__file__), "..", "kolibri", "dist", "home")
shutil.rmtree(move_to, ignore_errors=True)
os.mkdir(move_to)
print("Generating preseeded home data in {}".format(temphome))
initialize()
call_command(
"deprovision", "--destroy-all-user-data", "--permanent-irrevocable-data-loss"
)
for db_config in settings.DATABASES.values():
if db_config["ENGINE"] == "django.db.backends.sqlite3":
shutil.move(os.path.join(temphome, db_config["NAME"]), move_to)
print("Moved all preseeded home data to {}".format(move_to))
shutil.rmtree(temphome)
|
Make database preseeding sensitive to custom build options.
|
Make database preseeding sensitive to custom build options.
|
Python
|
mit
|
learningequality/kolibri,learningequality/kolibri,learningequality/kolibri,learningequality/kolibri
|
import os
import shutil
import tempfile
temphome = tempfile.mkdtemp()
os.environ["KOLIBRI_HOME"] = temphome
from kolibri.main import initialize # noqa E402
from kolibri.deployment.default.sqlite_db_names import ( # noqa E402
ADDITIONAL_SQLITE_DATABASES,
)
from django.core.management import call_command # noqa E402
move_to = os.path.join(os.path.dirname(__file__), "..", "kolibri", "dist", "home")
shutil.rmtree(move_to, ignore_errors=True)
os.mkdir(move_to)
print("Generating preseeded home data in {}".format(temphome))
initialize()
call_command(
"deprovision", "--destroy-all-user-data", "--permanent-irrevocable-data-loss"
)
shutil.move(os.path.join(temphome, "db.sqlite3"), move_to)
for db_name in ADDITIONAL_SQLITE_DATABASES:
shutil.move(os.path.join(temphome, "{}.sqlite3".format(db_name)), move_to)
print("Moved all preseeded home data to {}".format(move_to))
shutil.rmtree(temphome)
Make database preseeding sensitive to custom build options.
|
import os
import shutil
import tempfile
temphome = tempfile.mkdtemp()
os.environ["KOLIBRI_HOME"] = temphome
from kolibri.main import initialize # noqa E402
from kolibri.deployment.default.sqlite_db_names import ( # noqa E402
ADDITIONAL_SQLITE_DATABASES,
)
from django.conf import settings # noqa E402
from django.core.management import call_command # noqa E402
move_to = os.path.join(os.path.dirname(__file__), "..", "kolibri", "dist", "home")
shutil.rmtree(move_to, ignore_errors=True)
os.mkdir(move_to)
print("Generating preseeded home data in {}".format(temphome))
initialize()
call_command(
"deprovision", "--destroy-all-user-data", "--permanent-irrevocable-data-loss"
)
for db_config in settings.DATABASES.values():
if db_config["ENGINE"] == "django.db.backends.sqlite3":
shutil.move(os.path.join(temphome, db_config["NAME"]), move_to)
print("Moved all preseeded home data to {}".format(move_to))
shutil.rmtree(temphome)
|
<commit_before>import os
import shutil
import tempfile
temphome = tempfile.mkdtemp()
os.environ["KOLIBRI_HOME"] = temphome
from kolibri.main import initialize # noqa E402
from kolibri.deployment.default.sqlite_db_names import ( # noqa E402
ADDITIONAL_SQLITE_DATABASES,
)
from django.core.management import call_command # noqa E402
move_to = os.path.join(os.path.dirname(__file__), "..", "kolibri", "dist", "home")
shutil.rmtree(move_to, ignore_errors=True)
os.mkdir(move_to)
print("Generating preseeded home data in {}".format(temphome))
initialize()
call_command(
"deprovision", "--destroy-all-user-data", "--permanent-irrevocable-data-loss"
)
shutil.move(os.path.join(temphome, "db.sqlite3"), move_to)
for db_name in ADDITIONAL_SQLITE_DATABASES:
shutil.move(os.path.join(temphome, "{}.sqlite3".format(db_name)), move_to)
print("Moved all preseeded home data to {}".format(move_to))
shutil.rmtree(temphome)
<commit_msg>Make database preseeding sensitive to custom build options.<commit_after>
|
import os
import shutil
import tempfile
temphome = tempfile.mkdtemp()
os.environ["KOLIBRI_HOME"] = temphome
from kolibri.main import initialize # noqa E402
from kolibri.deployment.default.sqlite_db_names import ( # noqa E402
ADDITIONAL_SQLITE_DATABASES,
)
from django.conf import settings # noqa E402
from django.core.management import call_command # noqa E402
move_to = os.path.join(os.path.dirname(__file__), "..", "kolibri", "dist", "home")
shutil.rmtree(move_to, ignore_errors=True)
os.mkdir(move_to)
print("Generating preseeded home data in {}".format(temphome))
initialize()
call_command(
"deprovision", "--destroy-all-user-data", "--permanent-irrevocable-data-loss"
)
for db_config in settings.DATABASES.values():
if db_config["ENGINE"] == "django.db.backends.sqlite3":
shutil.move(os.path.join(temphome, db_config["NAME"]), move_to)
print("Moved all preseeded home data to {}".format(move_to))
shutil.rmtree(temphome)
|
import os
import shutil
import tempfile
temphome = tempfile.mkdtemp()
os.environ["KOLIBRI_HOME"] = temphome
from kolibri.main import initialize # noqa E402
from kolibri.deployment.default.sqlite_db_names import ( # noqa E402
ADDITIONAL_SQLITE_DATABASES,
)
from django.core.management import call_command # noqa E402
move_to = os.path.join(os.path.dirname(__file__), "..", "kolibri", "dist", "home")
shutil.rmtree(move_to, ignore_errors=True)
os.mkdir(move_to)
print("Generating preseeded home data in {}".format(temphome))
initialize()
call_command(
"deprovision", "--destroy-all-user-data", "--permanent-irrevocable-data-loss"
)
shutil.move(os.path.join(temphome, "db.sqlite3"), move_to)
for db_name in ADDITIONAL_SQLITE_DATABASES:
shutil.move(os.path.join(temphome, "{}.sqlite3".format(db_name)), move_to)
print("Moved all preseeded home data to {}".format(move_to))
shutil.rmtree(temphome)
Make database preseeding sensitive to custom build options.import os
import shutil
import tempfile
temphome = tempfile.mkdtemp()
os.environ["KOLIBRI_HOME"] = temphome
from kolibri.main import initialize # noqa E402
from kolibri.deployment.default.sqlite_db_names import ( # noqa E402
ADDITIONAL_SQLITE_DATABASES,
)
from django.conf import settings # noqa E402
from django.core.management import call_command # noqa E402
move_to = os.path.join(os.path.dirname(__file__), "..", "kolibri", "dist", "home")
shutil.rmtree(move_to, ignore_errors=True)
os.mkdir(move_to)
print("Generating preseeded home data in {}".format(temphome))
initialize()
call_command(
"deprovision", "--destroy-all-user-data", "--permanent-irrevocable-data-loss"
)
for db_config in settings.DATABASES.values():
if db_config["ENGINE"] == "django.db.backends.sqlite3":
shutil.move(os.path.join(temphome, db_config["NAME"]), move_to)
print("Moved all preseeded home data to {}".format(move_to))
shutil.rmtree(temphome)
|
<commit_before>import os
import shutil
import tempfile
temphome = tempfile.mkdtemp()
os.environ["KOLIBRI_HOME"] = temphome
from kolibri.main import initialize # noqa E402
from kolibri.deployment.default.sqlite_db_names import ( # noqa E402
ADDITIONAL_SQLITE_DATABASES,
)
from django.core.management import call_command # noqa E402
move_to = os.path.join(os.path.dirname(__file__), "..", "kolibri", "dist", "home")
shutil.rmtree(move_to, ignore_errors=True)
os.mkdir(move_to)
print("Generating preseeded home data in {}".format(temphome))
initialize()
call_command(
"deprovision", "--destroy-all-user-data", "--permanent-irrevocable-data-loss"
)
shutil.move(os.path.join(temphome, "db.sqlite3"), move_to)
for db_name in ADDITIONAL_SQLITE_DATABASES:
shutil.move(os.path.join(temphome, "{}.sqlite3".format(db_name)), move_to)
print("Moved all preseeded home data to {}".format(move_to))
shutil.rmtree(temphome)
<commit_msg>Make database preseeding sensitive to custom build options.<commit_after>import os
import shutil
import tempfile
temphome = tempfile.mkdtemp()
os.environ["KOLIBRI_HOME"] = temphome
from kolibri.main import initialize # noqa E402
from kolibri.deployment.default.sqlite_db_names import ( # noqa E402
ADDITIONAL_SQLITE_DATABASES,
)
from django.conf import settings # noqa E402
from django.core.management import call_command # noqa E402
move_to = os.path.join(os.path.dirname(__file__), "..", "kolibri", "dist", "home")
shutil.rmtree(move_to, ignore_errors=True)
os.mkdir(move_to)
print("Generating preseeded home data in {}".format(temphome))
initialize()
call_command(
"deprovision", "--destroy-all-user-data", "--permanent-irrevocable-data-loss"
)
for db_config in settings.DATABASES.values():
if db_config["ENGINE"] == "django.db.backends.sqlite3":
shutil.move(os.path.join(temphome, db_config["NAME"]), move_to)
print("Moved all preseeded home data to {}".format(move_to))
shutil.rmtree(temphome)
|
d559edb42f7a60958a4861e1cdb504e658f5f279
|
python2/setup.py
|
python2/setup.py
|
#!/usr/bin/env python
from distutils.core import setup
setup(name='futures',
version='1.0',
description='Java-style futures implementation in Python 2.x',
author='Brian Quinlan',
author_email='brian@sweetapp.com',
url='http://code.google.com/p/pythonfutures',
download_url='http://pypi.python.org/pypi/futures3/',
packages=['futures'],
license='BSD',
classifiers=['License :: OSI Approved :: BSD License',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2']
)
|
#!/usr/bin/env python
from distutils.core import setup
setup(name='futures',
version='2.0',
description='Java-style futures implementation in Python 2.x',
author='Brian Quinlan',
author_email='brian@sweetapp.com',
url='http://code.google.com/p/pythonfutures',
download_url='http://pypi.python.org/pypi/futures3/',
packages=['futures'],
license='BSD',
classifiers=['License :: OSI Approved :: BSD License',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2']
)
|
Bump version number for Python 3.2-matching release
|
Bump version number for Python 3.2-matching release
|
Python
|
bsd-2-clause
|
danielj7/pythonfutures,danielj7/pythonfutures
|
#!/usr/bin/env python
from distutils.core import setup
setup(name='futures',
version='1.0',
description='Java-style futures implementation in Python 2.x',
author='Brian Quinlan',
author_email='brian@sweetapp.com',
url='http://code.google.com/p/pythonfutures',
download_url='http://pypi.python.org/pypi/futures3/',
packages=['futures'],
license='BSD',
classifiers=['License :: OSI Approved :: BSD License',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2']
)
Bump version number for Python 3.2-matching release
|
#!/usr/bin/env python
from distutils.core import setup
setup(name='futures',
version='2.0',
description='Java-style futures implementation in Python 2.x',
author='Brian Quinlan',
author_email='brian@sweetapp.com',
url='http://code.google.com/p/pythonfutures',
download_url='http://pypi.python.org/pypi/futures3/',
packages=['futures'],
license='BSD',
classifiers=['License :: OSI Approved :: BSD License',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2']
)
|
<commit_before>#!/usr/bin/env python
from distutils.core import setup
setup(name='futures',
version='1.0',
description='Java-style futures implementation in Python 2.x',
author='Brian Quinlan',
author_email='brian@sweetapp.com',
url='http://code.google.com/p/pythonfutures',
download_url='http://pypi.python.org/pypi/futures3/',
packages=['futures'],
license='BSD',
classifiers=['License :: OSI Approved :: BSD License',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2']
)
<commit_msg>Bump version number for Python 3.2-matching release<commit_after>
|
#!/usr/bin/env python
from distutils.core import setup
setup(name='futures',
version='2.0',
description='Java-style futures implementation in Python 2.x',
author='Brian Quinlan',
author_email='brian@sweetapp.com',
url='http://code.google.com/p/pythonfutures',
download_url='http://pypi.python.org/pypi/futures3/',
packages=['futures'],
license='BSD',
classifiers=['License :: OSI Approved :: BSD License',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2']
)
|
#!/usr/bin/env python
from distutils.core import setup
setup(name='futures',
version='1.0',
description='Java-style futures implementation in Python 2.x',
author='Brian Quinlan',
author_email='brian@sweetapp.com',
url='http://code.google.com/p/pythonfutures',
download_url='http://pypi.python.org/pypi/futures3/',
packages=['futures'],
license='BSD',
classifiers=['License :: OSI Approved :: BSD License',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2']
)
Bump version number for Python 3.2-matching release#!/usr/bin/env python
from distutils.core import setup
setup(name='futures',
version='2.0',
description='Java-style futures implementation in Python 2.x',
author='Brian Quinlan',
author_email='brian@sweetapp.com',
url='http://code.google.com/p/pythonfutures',
download_url='http://pypi.python.org/pypi/futures3/',
packages=['futures'],
license='BSD',
classifiers=['License :: OSI Approved :: BSD License',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2']
)
|
<commit_before>#!/usr/bin/env python
from distutils.core import setup
setup(name='futures',
version='1.0',
description='Java-style futures implementation in Python 2.x',
author='Brian Quinlan',
author_email='brian@sweetapp.com',
url='http://code.google.com/p/pythonfutures',
download_url='http://pypi.python.org/pypi/futures3/',
packages=['futures'],
license='BSD',
classifiers=['License :: OSI Approved :: BSD License',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2']
)
<commit_msg>Bump version number for Python 3.2-matching release<commit_after>#!/usr/bin/env python
from distutils.core import setup
setup(name='futures',
version='2.0',
description='Java-style futures implementation in Python 2.x',
author='Brian Quinlan',
author_email='brian@sweetapp.com',
url='http://code.google.com/p/pythonfutures',
download_url='http://pypi.python.org/pypi/futures3/',
packages=['futures'],
license='BSD',
classifiers=['License :: OSI Approved :: BSD License',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2']
)
|
53331e43c2a95f45aaaa91f2c0fe204fd4d8d530
|
keras/constraints.py
|
keras/constraints.py
|
from __future__ import absolute_import
import theano
import theano.tensor as T
import numpy as np
class Constraint(object):
def __call__(self, p):
return p
def get_config(self):
return {"name":self.__class__.__name__}
class MaxNorm(Constraint):
def __init__(self, m=2):
self.m = m
def __call__(self, p):
norms = T.sqrt(T.sum(T.sqr(p), axis=0))
desired = T.clip(norms, 0, self.m)
p = p * (desired / (1e-7 + norms))
return p
def get_config(self):
return {"name":self.__class__.__name__,
"m":self.m}
class NonNeg(Constraint):
def __call__(self, p):
p *= T.ge(p, 0)
return p
class UnitNorm(Constraint):
def __call__(self, p):
return p / T.sqrt(T.sum(p**2, axis=-1, keepdims=True))
identity = Constraint
maxnorm = MaxNorm
nonneg = NonNeg
unitnorm = UnitNorm
from .utils.generic_utils import get_from_module
def get(identifier):
return get_from_module(identifier, globals(), 'constraint', instantiate=True)
|
from __future__ import absolute_import
import theano
import theano.tensor as T
import numpy as np
class Constraint(object):
def __call__(self, p):
return p
def get_config(self):
return {"name":self.__class__.__name__}
class MaxNorm(Constraint):
def __init__(self, m=2):
self.m = m
def __call__(self, p):
norms = T.sqrt(T.sum(T.sqr(p), axis=0))
desired = T.clip(norms, 0, self.m)
p = p * (desired / (1e-7 + norms))
return p
def get_config(self):
return {"name":self.__class__.__name__,
"m":self.m}
class NonNeg(Constraint):
def __call__(self, p):
p *= T.ge(p, 0)
return p
class UnitNorm(Constraint):
def __call__(self, p):
return p / T.sqrt(T.sum(p**2, axis=-1, keepdims=True))
identity = Constraint
maxnorm = MaxNorm
nonneg = NonNeg
unitnorm = UnitNorm
from .utils.generic_utils import get_from_module
def get(identifier, kwargs=None):
return get_from_module(identifier, globals(), 'constraint', instantiate=True, kwargs=kwargs)
|
Allow constraint getter to take parameter dict
|
Allow constraint getter to take parameter dict
|
Python
|
mit
|
rudaoshi/keras,mikekestemont/keras,jonberliner/keras,xurantju/keras,Aureliu/keras,DLlearn/keras,cvfish/keras,stephenbalaban/keras,chenych11/keras,fmacias64/keras,kuza55/keras,ashhher3/keras,sjuvekar/keras,OlafLee/keras,keskarnitish/keras,iamtrask/keras,wubr2000/keras,rlkelly/keras,saurav111/keras,johmathe/keras,untom/keras,llcao/keras,keras-team/keras,ledbetdr/keras,happyboy310/keras,cheng6076/keras,wxs/keras,pthaike/keras,bboalimoe/keras,why11002526/keras,ypkang/keras,zhmz90/keras,DeepGnosis/keras,zxsted/keras,JasonTam/keras,daviddiazvico/keras,ml-lab/keras,EderSantana/keras,yingzha/keras,abayowbo/keras,jiumem/keras,gavinmh/keras,gamer13/keras,nzer0/keras,zhangxujinsh/keras,dribnet/keras,jayhetee/keras,harshhemani/keras,relh/keras,rodrigob/keras,3dconv/keras,amy12xx/keras,printedheart/keras,dhruvparamhans/keras,imcomking/Convolutional-GRU-keras-extension-,jasonyaw/keras,danielforsyth/keras,kemaswill/keras,florentchandelier/keras,asampat3090/keras,zxytim/keras,navyjeff/keras,nt/keras,marchick209/keras,kod3r/keras,keras-team/keras,Smerity/keras,xiaoda99/keras,jbolinge/keras,ekamioka/keras,eulerreich/keras,bottler/keras,brainwater/keras,nehz/keras,hhaoyan/keras,nebw/keras,meanmee/keras,tencrance/keras,LIBOTAO/keras,iScienceLuvr/keras,Cadene/keras,pjadzinsky/keras,dxj19831029/keras,jimgoo/keras,jalexvig/keras,dolaameng/keras,MagicSen/keras,ogrisel/keras,Yingmin-Li/keras,vseledkin/keras
|
from __future__ import absolute_import
import theano
import theano.tensor as T
import numpy as np
class Constraint(object):
def __call__(self, p):
return p
def get_config(self):
return {"name":self.__class__.__name__}
class MaxNorm(Constraint):
def __init__(self, m=2):
self.m = m
def __call__(self, p):
norms = T.sqrt(T.sum(T.sqr(p), axis=0))
desired = T.clip(norms, 0, self.m)
p = p * (desired / (1e-7 + norms))
return p
def get_config(self):
return {"name":self.__class__.__name__,
"m":self.m}
class NonNeg(Constraint):
def __call__(self, p):
p *= T.ge(p, 0)
return p
class UnitNorm(Constraint):
def __call__(self, p):
return p / T.sqrt(T.sum(p**2, axis=-1, keepdims=True))
identity = Constraint
maxnorm = MaxNorm
nonneg = NonNeg
unitnorm = UnitNorm
from .utils.generic_utils import get_from_module
def get(identifier):
return get_from_module(identifier, globals(), 'constraint', instantiate=True)Allow constraint getter to take parameter dict
|
from __future__ import absolute_import
import theano
import theano.tensor as T
import numpy as np
class Constraint(object):
def __call__(self, p):
return p
def get_config(self):
return {"name":self.__class__.__name__}
class MaxNorm(Constraint):
def __init__(self, m=2):
self.m = m
def __call__(self, p):
norms = T.sqrt(T.sum(T.sqr(p), axis=0))
desired = T.clip(norms, 0, self.m)
p = p * (desired / (1e-7 + norms))
return p
def get_config(self):
return {"name":self.__class__.__name__,
"m":self.m}
class NonNeg(Constraint):
def __call__(self, p):
p *= T.ge(p, 0)
return p
class UnitNorm(Constraint):
def __call__(self, p):
return p / T.sqrt(T.sum(p**2, axis=-1, keepdims=True))
identity = Constraint
maxnorm = MaxNorm
nonneg = NonNeg
unitnorm = UnitNorm
from .utils.generic_utils import get_from_module
def get(identifier, kwargs=None):
return get_from_module(identifier, globals(), 'constraint', instantiate=True, kwargs=kwargs)
|
<commit_before>from __future__ import absolute_import
import theano
import theano.tensor as T
import numpy as np
class Constraint(object):
def __call__(self, p):
return p
def get_config(self):
return {"name":self.__class__.__name__}
class MaxNorm(Constraint):
def __init__(self, m=2):
self.m = m
def __call__(self, p):
norms = T.sqrt(T.sum(T.sqr(p), axis=0))
desired = T.clip(norms, 0, self.m)
p = p * (desired / (1e-7 + norms))
return p
def get_config(self):
return {"name":self.__class__.__name__,
"m":self.m}
class NonNeg(Constraint):
def __call__(self, p):
p *= T.ge(p, 0)
return p
class UnitNorm(Constraint):
def __call__(self, p):
return p / T.sqrt(T.sum(p**2, axis=-1, keepdims=True))
identity = Constraint
maxnorm = MaxNorm
nonneg = NonNeg
unitnorm = UnitNorm
from .utils.generic_utils import get_from_module
def get(identifier):
return get_from_module(identifier, globals(), 'constraint', instantiate=True)<commit_msg>Allow constraint getter to take parameter dict<commit_after>
|
from __future__ import absolute_import
import theano
import theano.tensor as T
import numpy as np
class Constraint(object):
def __call__(self, p):
return p
def get_config(self):
return {"name":self.__class__.__name__}
class MaxNorm(Constraint):
def __init__(self, m=2):
self.m = m
def __call__(self, p):
norms = T.sqrt(T.sum(T.sqr(p), axis=0))
desired = T.clip(norms, 0, self.m)
p = p * (desired / (1e-7 + norms))
return p
def get_config(self):
return {"name":self.__class__.__name__,
"m":self.m}
class NonNeg(Constraint):
def __call__(self, p):
p *= T.ge(p, 0)
return p
class UnitNorm(Constraint):
def __call__(self, p):
return p / T.sqrt(T.sum(p**2, axis=-1, keepdims=True))
identity = Constraint
maxnorm = MaxNorm
nonneg = NonNeg
unitnorm = UnitNorm
from .utils.generic_utils import get_from_module
def get(identifier, kwargs=None):
return get_from_module(identifier, globals(), 'constraint', instantiate=True, kwargs=kwargs)
|
from __future__ import absolute_import
import theano
import theano.tensor as T
import numpy as np
class Constraint(object):
def __call__(self, p):
return p
def get_config(self):
return {"name":self.__class__.__name__}
class MaxNorm(Constraint):
def __init__(self, m=2):
self.m = m
def __call__(self, p):
norms = T.sqrt(T.sum(T.sqr(p), axis=0))
desired = T.clip(norms, 0, self.m)
p = p * (desired / (1e-7 + norms))
return p
def get_config(self):
return {"name":self.__class__.__name__,
"m":self.m}
class NonNeg(Constraint):
def __call__(self, p):
p *= T.ge(p, 0)
return p
class UnitNorm(Constraint):
def __call__(self, p):
return p / T.sqrt(T.sum(p**2, axis=-1, keepdims=True))
identity = Constraint
maxnorm = MaxNorm
nonneg = NonNeg
unitnorm = UnitNorm
from .utils.generic_utils import get_from_module
def get(identifier):
return get_from_module(identifier, globals(), 'constraint', instantiate=True)Allow constraint getter to take parameter dictfrom __future__ import absolute_import
import theano
import theano.tensor as T
import numpy as np
class Constraint(object):
def __call__(self, p):
return p
def get_config(self):
return {"name":self.__class__.__name__}
class MaxNorm(Constraint):
def __init__(self, m=2):
self.m = m
def __call__(self, p):
norms = T.sqrt(T.sum(T.sqr(p), axis=0))
desired = T.clip(norms, 0, self.m)
p = p * (desired / (1e-7 + norms))
return p
def get_config(self):
return {"name":self.__class__.__name__,
"m":self.m}
class NonNeg(Constraint):
def __call__(self, p):
p *= T.ge(p, 0)
return p
class UnitNorm(Constraint):
def __call__(self, p):
return p / T.sqrt(T.sum(p**2, axis=-1, keepdims=True))
identity = Constraint
maxnorm = MaxNorm
nonneg = NonNeg
unitnorm = UnitNorm
from .utils.generic_utils import get_from_module
def get(identifier, kwargs=None):
return get_from_module(identifier, globals(), 'constraint', instantiate=True, kwargs=kwargs)
|
<commit_before>from __future__ import absolute_import
import theano
import theano.tensor as T
import numpy as np
class Constraint(object):
def __call__(self, p):
return p
def get_config(self):
return {"name":self.__class__.__name__}
class MaxNorm(Constraint):
def __init__(self, m=2):
self.m = m
def __call__(self, p):
norms = T.sqrt(T.sum(T.sqr(p), axis=0))
desired = T.clip(norms, 0, self.m)
p = p * (desired / (1e-7 + norms))
return p
def get_config(self):
return {"name":self.__class__.__name__,
"m":self.m}
class NonNeg(Constraint):
def __call__(self, p):
p *= T.ge(p, 0)
return p
class UnitNorm(Constraint):
def __call__(self, p):
return p / T.sqrt(T.sum(p**2, axis=-1, keepdims=True))
identity = Constraint
maxnorm = MaxNorm
nonneg = NonNeg
unitnorm = UnitNorm
from .utils.generic_utils import get_from_module
def get(identifier):
return get_from_module(identifier, globals(), 'constraint', instantiate=True)<commit_msg>Allow constraint getter to take parameter dict<commit_after>from __future__ import absolute_import
import theano
import theano.tensor as T
import numpy as np
class Constraint(object):
def __call__(self, p):
return p
def get_config(self):
return {"name":self.__class__.__name__}
class MaxNorm(Constraint):
def __init__(self, m=2):
self.m = m
def __call__(self, p):
norms = T.sqrt(T.sum(T.sqr(p), axis=0))
desired = T.clip(norms, 0, self.m)
p = p * (desired / (1e-7 + norms))
return p
def get_config(self):
return {"name":self.__class__.__name__,
"m":self.m}
class NonNeg(Constraint):
def __call__(self, p):
p *= T.ge(p, 0)
return p
class UnitNorm(Constraint):
def __call__(self, p):
return p / T.sqrt(T.sum(p**2, axis=-1, keepdims=True))
identity = Constraint
maxnorm = MaxNorm
nonneg = NonNeg
unitnorm = UnitNorm
from .utils.generic_utils import get_from_module
def get(identifier, kwargs=None):
return get_from_module(identifier, globals(), 'constraint', instantiate=True, kwargs=kwargs)
|
461dc9a54ae2eb8bc5f1a07557130d5251187573
|
install_deps.py
|
install_deps.py
|
#!/usr/bin/env python
"""
Install the packages you have listed in the requirements file you input as
first argument.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import sys
import fileinput
import subprocess
from pip.req import parse_requirements
def get_requirements(*args):
"""Parse all requirements files given and return a list of the
dependencies"""
install_deps = []
try:
for fpath in args:
install_deps.extend([str(d.req) for d in parse_requirements(fpath)])
except:
print('Error reading {} file looking for dependencies.'.format(fpath))
return install_deps
if __name__ == '__main__':
for line in fileinput.input():
req_filepaths = sys.argv[1:]
deps = get_requirements(*req_filepaths)
try:
for dep_name in deps:
if dep_name == 'None':
continue
cmd = "pip install {0}".format(dep_name)
print('#', cmd)
subprocess.check_call(cmd, shell=True)
except:
print('Error installing {}'.format(dep_name))
|
#!/usr/bin/env python
"""
Install the packages you have listed in the requirements file you input as
first argument.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import sys
import fileinput
import subprocess
from pip.req import parse_requirements
def get_requirements(*args):
"""Parse all requirements files given and return a list of the
dependencies"""
install_deps = []
try:
for fpath in args:
install_deps.extend([str(d.req) for d in parse_requirements(fpath)])
except:
print('Error reading {} file looking for dependencies.'.format(fpath))
return [dep for dep in install_deps if dep != 'None']
if __name__ == '__main__':
for line in fileinput.input():
req_filepaths = sys.argv[1:]
deps = get_requirements(*req_filepaths)
try:
for dep_name in deps:
cmd = "pip install {0}".format(dep_name)
print('#', cmd)
subprocess.check_call(cmd, shell=True)
except:
print('Error installing {}'.format(dep_name))
|
Correct for None appearing in requirements list
|
Correct for None appearing in requirements list
|
Python
|
bsd-3-clause
|
Neurita/boyle
|
#!/usr/bin/env python
"""
Install the packages you have listed in the requirements file you input as
first argument.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import sys
import fileinput
import subprocess
from pip.req import parse_requirements
def get_requirements(*args):
"""Parse all requirements files given and return a list of the
dependencies"""
install_deps = []
try:
for fpath in args:
install_deps.extend([str(d.req) for d in parse_requirements(fpath)])
except:
print('Error reading {} file looking for dependencies.'.format(fpath))
return install_deps
if __name__ == '__main__':
for line in fileinput.input():
req_filepaths = sys.argv[1:]
deps = get_requirements(*req_filepaths)
try:
for dep_name in deps:
if dep_name == 'None':
continue
cmd = "pip install {0}".format(dep_name)
print('#', cmd)
subprocess.check_call(cmd, shell=True)
except:
print('Error installing {}'.format(dep_name))
Correct for None appearing in requirements list
|
#!/usr/bin/env python
"""
Install the packages you have listed in the requirements file you input as
first argument.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import sys
import fileinput
import subprocess
from pip.req import parse_requirements
def get_requirements(*args):
"""Parse all requirements files given and return a list of the
dependencies"""
install_deps = []
try:
for fpath in args:
install_deps.extend([str(d.req) for d in parse_requirements(fpath)])
except:
print('Error reading {} file looking for dependencies.'.format(fpath))
return [dep for dep in install_deps if dep != 'None']
if __name__ == '__main__':
for line in fileinput.input():
req_filepaths = sys.argv[1:]
deps = get_requirements(*req_filepaths)
try:
for dep_name in deps:
cmd = "pip install {0}".format(dep_name)
print('#', cmd)
subprocess.check_call(cmd, shell=True)
except:
print('Error installing {}'.format(dep_name))
|
<commit_before>#!/usr/bin/env python
"""
Install the packages you have listed in the requirements file you input as
first argument.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import sys
import fileinput
import subprocess
from pip.req import parse_requirements
def get_requirements(*args):
"""Parse all requirements files given and return a list of the
dependencies"""
install_deps = []
try:
for fpath in args:
install_deps.extend([str(d.req) for d in parse_requirements(fpath)])
except:
print('Error reading {} file looking for dependencies.'.format(fpath))
return install_deps
if __name__ == '__main__':
for line in fileinput.input():
req_filepaths = sys.argv[1:]
deps = get_requirements(*req_filepaths)
try:
for dep_name in deps:
if dep_name == 'None':
continue
cmd = "pip install {0}".format(dep_name)
print('#', cmd)
subprocess.check_call(cmd, shell=True)
except:
print('Error installing {}'.format(dep_name))
<commit_msg>Correct for None appearing in requirements list<commit_after>
|
#!/usr/bin/env python
"""
Install the packages you have listed in the requirements file you input as
first argument.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import sys
import fileinput
import subprocess
from pip.req import parse_requirements
def get_requirements(*args):
"""Parse all requirements files given and return a list of the
dependencies"""
install_deps = []
try:
for fpath in args:
install_deps.extend([str(d.req) for d in parse_requirements(fpath)])
except:
print('Error reading {} file looking for dependencies.'.format(fpath))
return [dep for dep in install_deps if dep != 'None']
if __name__ == '__main__':
for line in fileinput.input():
req_filepaths = sys.argv[1:]
deps = get_requirements(*req_filepaths)
try:
for dep_name in deps:
cmd = "pip install {0}".format(dep_name)
print('#', cmd)
subprocess.check_call(cmd, shell=True)
except:
print('Error installing {}'.format(dep_name))
|
#!/usr/bin/env python
"""
Install the packages you have listed in the requirements file you input as
first argument.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import sys
import fileinput
import subprocess
from pip.req import parse_requirements
def get_requirements(*args):
"""Parse all requirements files given and return a list of the
dependencies"""
install_deps = []
try:
for fpath in args:
install_deps.extend([str(d.req) for d in parse_requirements(fpath)])
except:
print('Error reading {} file looking for dependencies.'.format(fpath))
return install_deps
if __name__ == '__main__':
for line in fileinput.input():
req_filepaths = sys.argv[1:]
deps = get_requirements(*req_filepaths)
try:
for dep_name in deps:
if dep_name == 'None':
continue
cmd = "pip install {0}".format(dep_name)
print('#', cmd)
subprocess.check_call(cmd, shell=True)
except:
print('Error installing {}'.format(dep_name))
Correct for None appearing in requirements list#!/usr/bin/env python
"""
Install the packages you have listed in the requirements file you input as
first argument.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import sys
import fileinput
import subprocess
from pip.req import parse_requirements
def get_requirements(*args):
"""Parse all requirements files given and return a list of the
dependencies"""
install_deps = []
try:
for fpath in args:
install_deps.extend([str(d.req) for d in parse_requirements(fpath)])
except:
print('Error reading {} file looking for dependencies.'.format(fpath))
return [dep for dep in install_deps if dep != 'None']
if __name__ == '__main__':
for line in fileinput.input():
req_filepaths = sys.argv[1:]
deps = get_requirements(*req_filepaths)
try:
for dep_name in deps:
cmd = "pip install {0}".format(dep_name)
print('#', cmd)
subprocess.check_call(cmd, shell=True)
except:
print('Error installing {}'.format(dep_name))
|
<commit_before>#!/usr/bin/env python
"""
Install the packages you have listed in the requirements file you input as
first argument.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import sys
import fileinput
import subprocess
from pip.req import parse_requirements
def get_requirements(*args):
"""Parse all requirements files given and return a list of the
dependencies"""
install_deps = []
try:
for fpath in args:
install_deps.extend([str(d.req) for d in parse_requirements(fpath)])
except:
print('Error reading {} file looking for dependencies.'.format(fpath))
return install_deps
if __name__ == '__main__':
for line in fileinput.input():
req_filepaths = sys.argv[1:]
deps = get_requirements(*req_filepaths)
try:
for dep_name in deps:
if dep_name == 'None':
continue
cmd = "pip install {0}".format(dep_name)
print('#', cmd)
subprocess.check_call(cmd, shell=True)
except:
print('Error installing {}'.format(dep_name))
<commit_msg>Correct for None appearing in requirements list<commit_after>#!/usr/bin/env python
"""
Install the packages you have listed in the requirements file you input as
first argument.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import sys
import fileinput
import subprocess
from pip.req import parse_requirements
def get_requirements(*args):
"""Parse all requirements files given and return a list of the
dependencies"""
install_deps = []
try:
for fpath in args:
install_deps.extend([str(d.req) for d in parse_requirements(fpath)])
except:
print('Error reading {} file looking for dependencies.'.format(fpath))
return [dep for dep in install_deps if dep != 'None']
if __name__ == '__main__':
for line in fileinput.input():
req_filepaths = sys.argv[1:]
deps = get_requirements(*req_filepaths)
try:
for dep_name in deps:
cmd = "pip install {0}".format(dep_name)
print('#', cmd)
subprocess.check_call(cmd, shell=True)
except:
print('Error installing {}'.format(dep_name))
|
60abdfa788ef40b5bbd34ea3e332089b86b61c88
|
robokassa/migrations/0003_load_source_type.py
|
robokassa/migrations/0003_load_source_type.py
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
"Write your forwards methods here."
# Note: Remember to use orm['appname.ModelName'] rather than "from appname.models..."
from django.core.management import call_command
from os.path import dirname, join
fixture_path = join(dirname(__file__), "payment.sourcetype.json")
print "Loading fixture from %s" % fixture_path
call_command("loaddata", fixture_path)
def backwards(self, orm):
"Write your backwards methods here."
models = {
u'robokassa.successnotification': {
'InvId': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'Meta': {'object_name': 'SuccessNotification'},
'OutSum': ('django.db.models.fields.CharField', [], {'max_length': '15'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
}
}
complete_apps = ['robokassa']
symmetrical = True
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
pass
def backwards(self, orm):
pass
models = {
u'robokassa.successnotification': {
'InvId': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'Meta': {'object_name': 'SuccessNotification'},
'OutSum': ('django.db.models.fields.CharField', [], {'max_length': '15'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
}
}
complete_apps = ['robokassa']
symmetrical = True
|
Remove code that depends on a current model state.
|
Remove code that depends on a current model state.
|
Python
|
mit
|
a-iv/django-oscar-robokassa
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
"Write your forwards methods here."
# Note: Remember to use orm['appname.ModelName'] rather than "from appname.models..."
from django.core.management import call_command
from os.path import dirname, join
fixture_path = join(dirname(__file__), "payment.sourcetype.json")
print "Loading fixture from %s" % fixture_path
call_command("loaddata", fixture_path)
def backwards(self, orm):
"Write your backwards methods here."
models = {
u'robokassa.successnotification': {
'InvId': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'Meta': {'object_name': 'SuccessNotification'},
'OutSum': ('django.db.models.fields.CharField', [], {'max_length': '15'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
}
}
complete_apps = ['robokassa']
symmetrical = True
Remove code that depends on a current model state.
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
pass
def backwards(self, orm):
pass
models = {
u'robokassa.successnotification': {
'InvId': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'Meta': {'object_name': 'SuccessNotification'},
'OutSum': ('django.db.models.fields.CharField', [], {'max_length': '15'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
}
}
complete_apps = ['robokassa']
symmetrical = True
|
<commit_before># -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
"Write your forwards methods here."
# Note: Remember to use orm['appname.ModelName'] rather than "from appname.models..."
from django.core.management import call_command
from os.path import dirname, join
fixture_path = join(dirname(__file__), "payment.sourcetype.json")
print "Loading fixture from %s" % fixture_path
call_command("loaddata", fixture_path)
def backwards(self, orm):
"Write your backwards methods here."
models = {
u'robokassa.successnotification': {
'InvId': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'Meta': {'object_name': 'SuccessNotification'},
'OutSum': ('django.db.models.fields.CharField', [], {'max_length': '15'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
}
}
complete_apps = ['robokassa']
symmetrical = True
<commit_msg>Remove code that depends on a current model state.<commit_after>
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
pass
def backwards(self, orm):
pass
models = {
u'robokassa.successnotification': {
'InvId': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'Meta': {'object_name': 'SuccessNotification'},
'OutSum': ('django.db.models.fields.CharField', [], {'max_length': '15'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
}
}
complete_apps = ['robokassa']
symmetrical = True
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
"Write your forwards methods here."
# Note: Remember to use orm['appname.ModelName'] rather than "from appname.models..."
from django.core.management import call_command
from os.path import dirname, join
fixture_path = join(dirname(__file__), "payment.sourcetype.json")
print "Loading fixture from %s" % fixture_path
call_command("loaddata", fixture_path)
def backwards(self, orm):
"Write your backwards methods here."
models = {
u'robokassa.successnotification': {
'InvId': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'Meta': {'object_name': 'SuccessNotification'},
'OutSum': ('django.db.models.fields.CharField', [], {'max_length': '15'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
}
}
complete_apps = ['robokassa']
symmetrical = True
Remove code that depends on a current model state.# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
pass
def backwards(self, orm):
pass
models = {
u'robokassa.successnotification': {
'InvId': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'Meta': {'object_name': 'SuccessNotification'},
'OutSum': ('django.db.models.fields.CharField', [], {'max_length': '15'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
}
}
complete_apps = ['robokassa']
symmetrical = True
|
<commit_before># -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
"Write your forwards methods here."
# Note: Remember to use orm['appname.ModelName'] rather than "from appname.models..."
from django.core.management import call_command
from os.path import dirname, join
fixture_path = join(dirname(__file__), "payment.sourcetype.json")
print "Loading fixture from %s" % fixture_path
call_command("loaddata", fixture_path)
def backwards(self, orm):
"Write your backwards methods here."
models = {
u'robokassa.successnotification': {
'InvId': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'Meta': {'object_name': 'SuccessNotification'},
'OutSum': ('django.db.models.fields.CharField', [], {'max_length': '15'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
}
}
complete_apps = ['robokassa']
symmetrical = True
<commit_msg>Remove code that depends on a current model state.<commit_after># -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
pass
def backwards(self, orm):
pass
models = {
u'robokassa.successnotification': {
'InvId': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'Meta': {'object_name': 'SuccessNotification'},
'OutSum': ('django.db.models.fields.CharField', [], {'max_length': '15'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
}
}
complete_apps = ['robokassa']
symmetrical = True
|
f4a919b698788dcec8411665290a83537e962413
|
django_alexa/api/fields.py
|
django_alexa/api/fields.py
|
'''These are the only serializer fields support by the Alexa skills kit'''
from rest_framework.serializers import CharField, IntegerField, DateField, TimeField, DurationField, ChoiceField # flake8: noqa
# This maps serializer fields to the amazon intent slot types
INTENT_SLOT_TYPES = {
"CharField": "AMAZON.LITERAL",
"IntegerField": "AMAZON.NUMBER",
"DateField": "AMAZON.DATE",
"TimeField": "AMAZON.TIME",
"DurationField": "AMAZON.DURATION",
}
# Choicefield does not have a amazon mapping because it represents
# a custom slot type which has but has to have a defined choice set in the
# alexa skills kit interaction model
VALID_SLOT_TYPES = INTENT_SLOT_TYPES.keys() + [
"ChoiceField"
]
|
'''These are the only serializer fields support by the Alexa skills kit'''
from rest_framework.serializers import CharField, IntegerField, DateField, TimeField, DurationField, ChoiceField # flake8: noqa
# This maps serializer fields to the amazon intent slot types
INTENT_SLOT_TYPES = {
"CharField": "AMAZON.LITERAL",
"IntegerField": "AMAZON.NUMBER",
"DateField": "AMAZON.DATE",
"TimeField": "AMAZON.TIME",
"DurationField": "AMAZON.DURATION",
"USCityField": "AMAZON.US_CITY",
"FirstNameField": "AMAZON.US_FIRST_NAME",
"USStateField": "AMAZON.US_STATE",
"FourDigitField": "AMAZON.FOUR_DIGIT_NUMBER",
}
# Choicefield does not have a amazon mapping because it represents
# a custom slot type which has but has to have a defined choice set in the
# alexa skills kit interaction model
VALID_SLOT_TYPES = INTENT_SLOT_TYPES.keys() + [
"ChoiceField"
]
class USCityField(CharField):
def __init__(self, **kwargs):
super(USCityField, self).__init__(**kwargs)
class FirstNameField(CharField):
def __init__(self, **kwargs):
super(FirstNameField, self).__init__(**kwargs)
class USStateField(CharField):
def __init__(self, **kwargs):
super(USStateField, self).__init__(**kwargs)
class FourDigitField(IntegerField):
def __init__(self, **kwargs):
super(FourDigitField, self).__init__(**kwargs)
|
Add support for new slot types
|
Add support for new slot types
|
Python
|
mit
|
rocktavious/django-alexa,pycontribs/django-alexa
|
'''These are the only serializer fields support by the Alexa skills kit'''
from rest_framework.serializers import CharField, IntegerField, DateField, TimeField, DurationField, ChoiceField # flake8: noqa
# This maps serializer fields to the amazon intent slot types
INTENT_SLOT_TYPES = {
"CharField": "AMAZON.LITERAL",
"IntegerField": "AMAZON.NUMBER",
"DateField": "AMAZON.DATE",
"TimeField": "AMAZON.TIME",
"DurationField": "AMAZON.DURATION",
}
# Choicefield does not have a amazon mapping because it represents
# a custom slot type which has but has to have a defined choice set in the
# alexa skills kit interaction model
VALID_SLOT_TYPES = INTENT_SLOT_TYPES.keys() + [
"ChoiceField"
]
Add support for new slot types
|
'''These are the only serializer fields support by the Alexa skills kit'''
from rest_framework.serializers import CharField, IntegerField, DateField, TimeField, DurationField, ChoiceField # flake8: noqa
# This maps serializer fields to the amazon intent slot types
INTENT_SLOT_TYPES = {
"CharField": "AMAZON.LITERAL",
"IntegerField": "AMAZON.NUMBER",
"DateField": "AMAZON.DATE",
"TimeField": "AMAZON.TIME",
"DurationField": "AMAZON.DURATION",
"USCityField": "AMAZON.US_CITY",
"FirstNameField": "AMAZON.US_FIRST_NAME",
"USStateField": "AMAZON.US_STATE",
"FourDigitField": "AMAZON.FOUR_DIGIT_NUMBER",
}
# Choicefield does not have a amazon mapping because it represents
# a custom slot type which has but has to have a defined choice set in the
# alexa skills kit interaction model
VALID_SLOT_TYPES = INTENT_SLOT_TYPES.keys() + [
"ChoiceField"
]
class USCityField(CharField):
def __init__(self, **kwargs):
super(USCityField, self).__init__(**kwargs)
class FirstNameField(CharField):
def __init__(self, **kwargs):
super(FirstNameField, self).__init__(**kwargs)
class USStateField(CharField):
def __init__(self, **kwargs):
super(USStateField, self).__init__(**kwargs)
class FourDigitField(IntegerField):
def __init__(self, **kwargs):
super(FourDigitField, self).__init__(**kwargs)
|
<commit_before>'''These are the only serializer fields support by the Alexa skills kit'''
from rest_framework.serializers import CharField, IntegerField, DateField, TimeField, DurationField, ChoiceField # flake8: noqa
# This maps serializer fields to the amazon intent slot types
INTENT_SLOT_TYPES = {
"CharField": "AMAZON.LITERAL",
"IntegerField": "AMAZON.NUMBER",
"DateField": "AMAZON.DATE",
"TimeField": "AMAZON.TIME",
"DurationField": "AMAZON.DURATION",
}
# Choicefield does not have a amazon mapping because it represents
# a custom slot type which has but has to have a defined choice set in the
# alexa skills kit interaction model
VALID_SLOT_TYPES = INTENT_SLOT_TYPES.keys() + [
"ChoiceField"
]
<commit_msg>Add support for new slot types<commit_after>
|
'''These are the only serializer fields support by the Alexa skills kit'''
from rest_framework.serializers import CharField, IntegerField, DateField, TimeField, DurationField, ChoiceField # flake8: noqa
# This maps serializer fields to the amazon intent slot types
INTENT_SLOT_TYPES = {
"CharField": "AMAZON.LITERAL",
"IntegerField": "AMAZON.NUMBER",
"DateField": "AMAZON.DATE",
"TimeField": "AMAZON.TIME",
"DurationField": "AMAZON.DURATION",
"USCityField": "AMAZON.US_CITY",
"FirstNameField": "AMAZON.US_FIRST_NAME",
"USStateField": "AMAZON.US_STATE",
"FourDigitField": "AMAZON.FOUR_DIGIT_NUMBER",
}
# Choicefield does not have a amazon mapping because it represents
# a custom slot type which has but has to have a defined choice set in the
# alexa skills kit interaction model
VALID_SLOT_TYPES = INTENT_SLOT_TYPES.keys() + [
"ChoiceField"
]
class USCityField(CharField):
def __init__(self, **kwargs):
super(USCityField, self).__init__(**kwargs)
class FirstNameField(CharField):
def __init__(self, **kwargs):
super(FirstNameField, self).__init__(**kwargs)
class USStateField(CharField):
def __init__(self, **kwargs):
super(USStateField, self).__init__(**kwargs)
class FourDigitField(IntegerField):
def __init__(self, **kwargs):
super(FourDigitField, self).__init__(**kwargs)
|
'''These are the only serializer fields support by the Alexa skills kit'''
from rest_framework.serializers import CharField, IntegerField, DateField, TimeField, DurationField, ChoiceField # flake8: noqa
# This maps serializer fields to the amazon intent slot types
INTENT_SLOT_TYPES = {
"CharField": "AMAZON.LITERAL",
"IntegerField": "AMAZON.NUMBER",
"DateField": "AMAZON.DATE",
"TimeField": "AMAZON.TIME",
"DurationField": "AMAZON.DURATION",
}
# Choicefield does not have a amazon mapping because it represents
# a custom slot type which has but has to have a defined choice set in the
# alexa skills kit interaction model
VALID_SLOT_TYPES = INTENT_SLOT_TYPES.keys() + [
"ChoiceField"
]
Add support for new slot types'''These are the only serializer fields support by the Alexa skills kit'''
from rest_framework.serializers import CharField, IntegerField, DateField, TimeField, DurationField, ChoiceField # flake8: noqa
# This maps serializer fields to the amazon intent slot types
INTENT_SLOT_TYPES = {
"CharField": "AMAZON.LITERAL",
"IntegerField": "AMAZON.NUMBER",
"DateField": "AMAZON.DATE",
"TimeField": "AMAZON.TIME",
"DurationField": "AMAZON.DURATION",
"USCityField": "AMAZON.US_CITY",
"FirstNameField": "AMAZON.US_FIRST_NAME",
"USStateField": "AMAZON.US_STATE",
"FourDigitField": "AMAZON.FOUR_DIGIT_NUMBER",
}
# Choicefield does not have a amazon mapping because it represents
# a custom slot type which has but has to have a defined choice set in the
# alexa skills kit interaction model
VALID_SLOT_TYPES = INTENT_SLOT_TYPES.keys() + [
"ChoiceField"
]
class USCityField(CharField):
def __init__(self, **kwargs):
super(USCityField, self).__init__(**kwargs)
class FirstNameField(CharField):
def __init__(self, **kwargs):
super(FirstNameField, self).__init__(**kwargs)
class USStateField(CharField):
def __init__(self, **kwargs):
super(USStateField, self).__init__(**kwargs)
class FourDigitField(IntegerField):
def __init__(self, **kwargs):
super(FourDigitField, self).__init__(**kwargs)
|
<commit_before>'''These are the only serializer fields support by the Alexa skills kit'''
from rest_framework.serializers import CharField, IntegerField, DateField, TimeField, DurationField, ChoiceField # flake8: noqa
# This maps serializer fields to the amazon intent slot types
INTENT_SLOT_TYPES = {
"CharField": "AMAZON.LITERAL",
"IntegerField": "AMAZON.NUMBER",
"DateField": "AMAZON.DATE",
"TimeField": "AMAZON.TIME",
"DurationField": "AMAZON.DURATION",
}
# Choicefield does not have a amazon mapping because it represents
# a custom slot type which has but has to have a defined choice set in the
# alexa skills kit interaction model
VALID_SLOT_TYPES = INTENT_SLOT_TYPES.keys() + [
"ChoiceField"
]
<commit_msg>Add support for new slot types<commit_after>'''These are the only serializer fields support by the Alexa skills kit'''
from rest_framework.serializers import CharField, IntegerField, DateField, TimeField, DurationField, ChoiceField # flake8: noqa
# This maps serializer fields to the amazon intent slot types
INTENT_SLOT_TYPES = {
"CharField": "AMAZON.LITERAL",
"IntegerField": "AMAZON.NUMBER",
"DateField": "AMAZON.DATE",
"TimeField": "AMAZON.TIME",
"DurationField": "AMAZON.DURATION",
"USCityField": "AMAZON.US_CITY",
"FirstNameField": "AMAZON.US_FIRST_NAME",
"USStateField": "AMAZON.US_STATE",
"FourDigitField": "AMAZON.FOUR_DIGIT_NUMBER",
}
# Choicefield does not have a amazon mapping because it represents
# a custom slot type which has but has to have a defined choice set in the
# alexa skills kit interaction model
VALID_SLOT_TYPES = INTENT_SLOT_TYPES.keys() + [
"ChoiceField"
]
class USCityField(CharField):
def __init__(self, **kwargs):
super(USCityField, self).__init__(**kwargs)
class FirstNameField(CharField):
def __init__(self, **kwargs):
super(FirstNameField, self).__init__(**kwargs)
class USStateField(CharField):
def __init__(self, **kwargs):
super(USStateField, self).__init__(**kwargs)
class FourDigitField(IntegerField):
def __init__(self, **kwargs):
super(FourDigitField, self).__init__(**kwargs)
|
f908501860858311536a3fef03fda7a632ce5412
|
djohno/tests/test_utils.py
|
djohno/tests/test_utils.py
|
from django.core.exceptions import ValidationError
from django.test import TestCase
import djohno
from djohno.utils import (
is_pretty_from_address,
get_app_versions
)
class DjohnoUtilTests(TestCase):
def test_is_pretty_from_address_fails_on_bare_address(self):
"""
Ensure normal email addresses aren't parsed as being "pretty".
"""
self.assertFalse(is_pretty_from_address('foo@bar.com'))
def test_is_pretty_from_succeeds_on_pretty_address(self):
"""
Ensure pretty addresses (e.g. Foo <foo@bar.com>) are parsed as
being "pretty".
"""
self.assertTrue(is_pretty_from_address('Foo <foo@bar.com>'))
def test_is_pretty_from_raises_validation_error_on_bad_input(self):
"""
Ensure invalid email addresses (e.g. "hello") raise
ValidationError if given invalid inputs.
"""
with self.assertRaises(ValidationError):
self.assertTrue(is_pretty_from_address('hello'))
def test_get_installed_app_versions(self):
versions = get_app_versions()
self.assertEqual(versions['Djohno']['installed'], djohno.__version__)
self.assertEqual(versions['Baz']['installed'], '0.4.2')
self.assertEqual(versions['Moo']['installed'], '0.42')
|
from django.core.exceptions import ValidationError
from django.test import TestCase
import djohno
from djohno.utils import (
is_pretty_from_address,
get_app_versions
)
class DjohnoUtilTests(TestCase):
def test_is_pretty_from_address_fails_on_bare_address(self):
"""
Ensure normal email addresses aren't parsed as being "pretty".
"""
self.assertFalse(is_pretty_from_address('foo@bar.com'))
def test_is_pretty_from_succeeds_on_pretty_address(self):
"""
Ensure pretty addresses (e.g. Foo <foo@bar.com>) are parsed as
being "pretty".
"""
self.assertTrue(is_pretty_from_address('Foo <foo@bar.com>'))
def test_is_pretty_from_raises_validation_error_on_bad_input(self):
"""
Ensure invalid email addresses (e.g. "hello") raise
ValidationError if given invalid inputs.
"""
with self.assertRaises(ValidationError):
self.assertTrue(is_pretty_from_address('hello'))
def test_get_installed_app_versions(self):
"""
Ensure we can correctly get the version of a few simple apps
(Baz and Moo are bundled in djohno.test, and set up in
test_settings.py).
"""
versions = get_app_versions()
self.assertEqual(versions['Djohno']['installed'], djohno.__version__)
self.assertEqual(versions['Baz']['installed'], '0.4.2')
self.assertEqual(versions['Moo']['installed'], '0.42')
|
Add a missing test description
|
Add a missing test description
|
Python
|
bsd-2-clause
|
dominicrodger/djohno,dominicrodger/djohno
|
from django.core.exceptions import ValidationError
from django.test import TestCase
import djohno
from djohno.utils import (
is_pretty_from_address,
get_app_versions
)
class DjohnoUtilTests(TestCase):
def test_is_pretty_from_address_fails_on_bare_address(self):
"""
Ensure normal email addresses aren't parsed as being "pretty".
"""
self.assertFalse(is_pretty_from_address('foo@bar.com'))
def test_is_pretty_from_succeeds_on_pretty_address(self):
"""
Ensure pretty addresses (e.g. Foo <foo@bar.com>) are parsed as
being "pretty".
"""
self.assertTrue(is_pretty_from_address('Foo <foo@bar.com>'))
def test_is_pretty_from_raises_validation_error_on_bad_input(self):
"""
Ensure invalid email addresses (e.g. "hello") raise
ValidationError if given invalid inputs.
"""
with self.assertRaises(ValidationError):
self.assertTrue(is_pretty_from_address('hello'))
def test_get_installed_app_versions(self):
versions = get_app_versions()
self.assertEqual(versions['Djohno']['installed'], djohno.__version__)
self.assertEqual(versions['Baz']['installed'], '0.4.2')
self.assertEqual(versions['Moo']['installed'], '0.42')
Add a missing test description
|
from django.core.exceptions import ValidationError
from django.test import TestCase
import djohno
from djohno.utils import (
is_pretty_from_address,
get_app_versions
)
class DjohnoUtilTests(TestCase):
def test_is_pretty_from_address_fails_on_bare_address(self):
"""
Ensure normal email addresses aren't parsed as being "pretty".
"""
self.assertFalse(is_pretty_from_address('foo@bar.com'))
def test_is_pretty_from_succeeds_on_pretty_address(self):
"""
Ensure pretty addresses (e.g. Foo <foo@bar.com>) are parsed as
being "pretty".
"""
self.assertTrue(is_pretty_from_address('Foo <foo@bar.com>'))
def test_is_pretty_from_raises_validation_error_on_bad_input(self):
"""
Ensure invalid email addresses (e.g. "hello") raise
ValidationError if given invalid inputs.
"""
with self.assertRaises(ValidationError):
self.assertTrue(is_pretty_from_address('hello'))
def test_get_installed_app_versions(self):
"""
Ensure we can correctly get the version of a few simple apps
(Baz and Moo are bundled in djohno.test, and set up in
test_settings.py).
"""
versions = get_app_versions()
self.assertEqual(versions['Djohno']['installed'], djohno.__version__)
self.assertEqual(versions['Baz']['installed'], '0.4.2')
self.assertEqual(versions['Moo']['installed'], '0.42')
|
<commit_before>from django.core.exceptions import ValidationError
from django.test import TestCase
import djohno
from djohno.utils import (
is_pretty_from_address,
get_app_versions
)
class DjohnoUtilTests(TestCase):
def test_is_pretty_from_address_fails_on_bare_address(self):
"""
Ensure normal email addresses aren't parsed as being "pretty".
"""
self.assertFalse(is_pretty_from_address('foo@bar.com'))
def test_is_pretty_from_succeeds_on_pretty_address(self):
"""
Ensure pretty addresses (e.g. Foo <foo@bar.com>) are parsed as
being "pretty".
"""
self.assertTrue(is_pretty_from_address('Foo <foo@bar.com>'))
def test_is_pretty_from_raises_validation_error_on_bad_input(self):
"""
Ensure invalid email addresses (e.g. "hello") raise
ValidationError if given invalid inputs.
"""
with self.assertRaises(ValidationError):
self.assertTrue(is_pretty_from_address('hello'))
def test_get_installed_app_versions(self):
versions = get_app_versions()
self.assertEqual(versions['Djohno']['installed'], djohno.__version__)
self.assertEqual(versions['Baz']['installed'], '0.4.2')
self.assertEqual(versions['Moo']['installed'], '0.42')
<commit_msg>Add a missing test description<commit_after>
|
from django.core.exceptions import ValidationError
from django.test import TestCase
import djohno
from djohno.utils import (
is_pretty_from_address,
get_app_versions
)
class DjohnoUtilTests(TestCase):
def test_is_pretty_from_address_fails_on_bare_address(self):
"""
Ensure normal email addresses aren't parsed as being "pretty".
"""
self.assertFalse(is_pretty_from_address('foo@bar.com'))
def test_is_pretty_from_succeeds_on_pretty_address(self):
"""
Ensure pretty addresses (e.g. Foo <foo@bar.com>) are parsed as
being "pretty".
"""
self.assertTrue(is_pretty_from_address('Foo <foo@bar.com>'))
def test_is_pretty_from_raises_validation_error_on_bad_input(self):
"""
Ensure invalid email addresses (e.g. "hello") raise
ValidationError if given invalid inputs.
"""
with self.assertRaises(ValidationError):
self.assertTrue(is_pretty_from_address('hello'))
def test_get_installed_app_versions(self):
"""
Ensure we can correctly get the version of a few simple apps
(Baz and Moo are bundled in djohno.test, and set up in
test_settings.py).
"""
versions = get_app_versions()
self.assertEqual(versions['Djohno']['installed'], djohno.__version__)
self.assertEqual(versions['Baz']['installed'], '0.4.2')
self.assertEqual(versions['Moo']['installed'], '0.42')
|
from django.core.exceptions import ValidationError
from django.test import TestCase
import djohno
from djohno.utils import (
is_pretty_from_address,
get_app_versions
)
class DjohnoUtilTests(TestCase):
def test_is_pretty_from_address_fails_on_bare_address(self):
"""
Ensure normal email addresses aren't parsed as being "pretty".
"""
self.assertFalse(is_pretty_from_address('foo@bar.com'))
def test_is_pretty_from_succeeds_on_pretty_address(self):
"""
Ensure pretty addresses (e.g. Foo <foo@bar.com>) are parsed as
being "pretty".
"""
self.assertTrue(is_pretty_from_address('Foo <foo@bar.com>'))
def test_is_pretty_from_raises_validation_error_on_bad_input(self):
"""
Ensure invalid email addresses (e.g. "hello") raise
ValidationError if given invalid inputs.
"""
with self.assertRaises(ValidationError):
self.assertTrue(is_pretty_from_address('hello'))
def test_get_installed_app_versions(self):
versions = get_app_versions()
self.assertEqual(versions['Djohno']['installed'], djohno.__version__)
self.assertEqual(versions['Baz']['installed'], '0.4.2')
self.assertEqual(versions['Moo']['installed'], '0.42')
Add a missing test descriptionfrom django.core.exceptions import ValidationError
from django.test import TestCase
import djohno
from djohno.utils import (
is_pretty_from_address,
get_app_versions
)
class DjohnoUtilTests(TestCase):
def test_is_pretty_from_address_fails_on_bare_address(self):
"""
Ensure normal email addresses aren't parsed as being "pretty".
"""
self.assertFalse(is_pretty_from_address('foo@bar.com'))
def test_is_pretty_from_succeeds_on_pretty_address(self):
"""
Ensure pretty addresses (e.g. Foo <foo@bar.com>) are parsed as
being "pretty".
"""
self.assertTrue(is_pretty_from_address('Foo <foo@bar.com>'))
def test_is_pretty_from_raises_validation_error_on_bad_input(self):
"""
Ensure invalid email addresses (e.g. "hello") raise
ValidationError if given invalid inputs.
"""
with self.assertRaises(ValidationError):
self.assertTrue(is_pretty_from_address('hello'))
def test_get_installed_app_versions(self):
"""
Ensure we can correctly get the version of a few simple apps
(Baz and Moo are bundled in djohno.test, and set up in
test_settings.py).
"""
versions = get_app_versions()
self.assertEqual(versions['Djohno']['installed'], djohno.__version__)
self.assertEqual(versions['Baz']['installed'], '0.4.2')
self.assertEqual(versions['Moo']['installed'], '0.42')
|
<commit_before>from django.core.exceptions import ValidationError
from django.test import TestCase
import djohno
from djohno.utils import (
is_pretty_from_address,
get_app_versions
)
class DjohnoUtilTests(TestCase):
def test_is_pretty_from_address_fails_on_bare_address(self):
"""
Ensure normal email addresses aren't parsed as being "pretty".
"""
self.assertFalse(is_pretty_from_address('foo@bar.com'))
def test_is_pretty_from_succeeds_on_pretty_address(self):
"""
Ensure pretty addresses (e.g. Foo <foo@bar.com>) are parsed as
being "pretty".
"""
self.assertTrue(is_pretty_from_address('Foo <foo@bar.com>'))
def test_is_pretty_from_raises_validation_error_on_bad_input(self):
"""
Ensure invalid email addresses (e.g. "hello") raise
ValidationError if given invalid inputs.
"""
with self.assertRaises(ValidationError):
self.assertTrue(is_pretty_from_address('hello'))
def test_get_installed_app_versions(self):
versions = get_app_versions()
self.assertEqual(versions['Djohno']['installed'], djohno.__version__)
self.assertEqual(versions['Baz']['installed'], '0.4.2')
self.assertEqual(versions['Moo']['installed'], '0.42')
<commit_msg>Add a missing test description<commit_after>from django.core.exceptions import ValidationError
from django.test import TestCase
import djohno
from djohno.utils import (
is_pretty_from_address,
get_app_versions
)
class DjohnoUtilTests(TestCase):
def test_is_pretty_from_address_fails_on_bare_address(self):
"""
Ensure normal email addresses aren't parsed as being "pretty".
"""
self.assertFalse(is_pretty_from_address('foo@bar.com'))
def test_is_pretty_from_succeeds_on_pretty_address(self):
"""
Ensure pretty addresses (e.g. Foo <foo@bar.com>) are parsed as
being "pretty".
"""
self.assertTrue(is_pretty_from_address('Foo <foo@bar.com>'))
def test_is_pretty_from_raises_validation_error_on_bad_input(self):
"""
Ensure invalid email addresses (e.g. "hello") raise
ValidationError if given invalid inputs.
"""
with self.assertRaises(ValidationError):
self.assertTrue(is_pretty_from_address('hello'))
def test_get_installed_app_versions(self):
"""
Ensure we can correctly get the version of a few simple apps
(Baz and Moo are bundled in djohno.test, and set up in
test_settings.py).
"""
versions = get_app_versions()
self.assertEqual(versions['Djohno']['installed'], djohno.__version__)
self.assertEqual(versions['Baz']['installed'], '0.4.2')
self.assertEqual(versions['Moo']['installed'], '0.42')
|
3a3d1f5b2c376de3e979aa17d11505dc66421852
|
test_journal.py
|
test_journal.py
|
# -*- coding: utf-8 -*-
from contextlib import closing
import pytest
from journal import app
from journal import connect_db
from journal import get_database_connection
from journal import init_db
TEST_DSN = 'dbname=test_learning_journal'
def clear_db():
with closing(connect_db()) as db:
db.cursor().execute("DROP TABLE entries")
db.commit()
@pytest.fixture(scope='session')
def test_app():
"""configure our app for use in testing"""
app.config['DATABASE'] = TEST_DSN
app.config['TESTING'] = True
|
# -*- coding: utf-8 -*-
from contextlib import closing
import pytest
from journal import app
from journal import connect_db
from journal import get_database_connection
from journal import init_db
TEST_DSN = 'dbname=test_learning_journal'
def clear_db():
with closing(connect_db()) as db:
db.cursor().execute("DROP TABLE entries")
db.commit()
@pytest.fixture(scope='session')
def test_app():
"""configure our app for use in testing"""
app.config['DATABASE'] = TEST_DSN
app.config['TESTING'] = True
@pytest.fixture(scope='session')
def db(test_app, request):
"""initialize the entries table and drop it when finished"""
init_db()
def cleanup():
clear_db()
request.addfinalizer(cleanup)
|
Add db() to initialize a table and drop when finished
|
Add db() to initialize a table and drop when finished
|
Python
|
mit
|
sazlin/learning_journal
|
# -*- coding: utf-8 -*-
from contextlib import closing
import pytest
from journal import app
from journal import connect_db
from journal import get_database_connection
from journal import init_db
TEST_DSN = 'dbname=test_learning_journal'
def clear_db():
with closing(connect_db()) as db:
db.cursor().execute("DROP TABLE entries")
db.commit()
@pytest.fixture(scope='session')
def test_app():
"""configure our app for use in testing"""
app.config['DATABASE'] = TEST_DSN
app.config['TESTING'] = True
Add db() to initialize a table and drop when finished
|
# -*- coding: utf-8 -*-
from contextlib import closing
import pytest
from journal import app
from journal import connect_db
from journal import get_database_connection
from journal import init_db
TEST_DSN = 'dbname=test_learning_journal'
def clear_db():
with closing(connect_db()) as db:
db.cursor().execute("DROP TABLE entries")
db.commit()
@pytest.fixture(scope='session')
def test_app():
"""configure our app for use in testing"""
app.config['DATABASE'] = TEST_DSN
app.config['TESTING'] = True
@pytest.fixture(scope='session')
def db(test_app, request):
"""initialize the entries table and drop it when finished"""
init_db()
def cleanup():
clear_db()
request.addfinalizer(cleanup)
|
<commit_before># -*- coding: utf-8 -*-
from contextlib import closing
import pytest
from journal import app
from journal import connect_db
from journal import get_database_connection
from journal import init_db
TEST_DSN = 'dbname=test_learning_journal'
def clear_db():
with closing(connect_db()) as db:
db.cursor().execute("DROP TABLE entries")
db.commit()
@pytest.fixture(scope='session')
def test_app():
"""configure our app for use in testing"""
app.config['DATABASE'] = TEST_DSN
app.config['TESTING'] = True
<commit_msg>Add db() to initialize a table and drop when finished<commit_after>
|
# -*- coding: utf-8 -*-
from contextlib import closing
import pytest
from journal import app
from journal import connect_db
from journal import get_database_connection
from journal import init_db
TEST_DSN = 'dbname=test_learning_journal'
def clear_db():
with closing(connect_db()) as db:
db.cursor().execute("DROP TABLE entries")
db.commit()
@pytest.fixture(scope='session')
def test_app():
"""configure our app for use in testing"""
app.config['DATABASE'] = TEST_DSN
app.config['TESTING'] = True
@pytest.fixture(scope='session')
def db(test_app, request):
"""initialize the entries table and drop it when finished"""
init_db()
def cleanup():
clear_db()
request.addfinalizer(cleanup)
|
# -*- coding: utf-8 -*-
from contextlib import closing
import pytest
from journal import app
from journal import connect_db
from journal import get_database_connection
from journal import init_db
TEST_DSN = 'dbname=test_learning_journal'
def clear_db():
with closing(connect_db()) as db:
db.cursor().execute("DROP TABLE entries")
db.commit()
@pytest.fixture(scope='session')
def test_app():
"""configure our app for use in testing"""
app.config['DATABASE'] = TEST_DSN
app.config['TESTING'] = True
Add db() to initialize a table and drop when finished# -*- coding: utf-8 -*-
from contextlib import closing
import pytest
from journal import app
from journal import connect_db
from journal import get_database_connection
from journal import init_db
TEST_DSN = 'dbname=test_learning_journal'
def clear_db():
with closing(connect_db()) as db:
db.cursor().execute("DROP TABLE entries")
db.commit()
@pytest.fixture(scope='session')
def test_app():
"""configure our app for use in testing"""
app.config['DATABASE'] = TEST_DSN
app.config['TESTING'] = True
@pytest.fixture(scope='session')
def db(test_app, request):
"""initialize the entries table and drop it when finished"""
init_db()
def cleanup():
clear_db()
request.addfinalizer(cleanup)
|
<commit_before># -*- coding: utf-8 -*-
from contextlib import closing
import pytest
from journal import app
from journal import connect_db
from journal import get_database_connection
from journal import init_db
TEST_DSN = 'dbname=test_learning_journal'
def clear_db():
with closing(connect_db()) as db:
db.cursor().execute("DROP TABLE entries")
db.commit()
@pytest.fixture(scope='session')
def test_app():
"""configure our app for use in testing"""
app.config['DATABASE'] = TEST_DSN
app.config['TESTING'] = True
<commit_msg>Add db() to initialize a table and drop when finished<commit_after># -*- coding: utf-8 -*-
from contextlib import closing
import pytest
from journal import app
from journal import connect_db
from journal import get_database_connection
from journal import init_db
TEST_DSN = 'dbname=test_learning_journal'
def clear_db():
with closing(connect_db()) as db:
db.cursor().execute("DROP TABLE entries")
db.commit()
@pytest.fixture(scope='session')
def test_app():
"""configure our app for use in testing"""
app.config['DATABASE'] = TEST_DSN
app.config['TESTING'] = True
@pytest.fixture(scope='session')
def db(test_app, request):
"""initialize the entries table and drop it when finished"""
init_db()
def cleanup():
clear_db()
request.addfinalizer(cleanup)
|
557fddcf26ef52ccea3761b000e5a94e3f551a78
|
pygraphc/optimization/SimulatedAnnealing.py
|
pygraphc/optimization/SimulatedAnnealing.py
|
from random import choice
from pygraphc.evaluation.InternalEvaluation import InternalEvaluation
class SimulatedAnnealing(object):
def __init__(self, method, tmin, tmax, parameter, energy_type):
"""The constructor of Simulated Annealing method.
Parameters
----------
method : str
The method to run with simulated annealing.
tmin : float
Minimum temperature.
tmax : float
Maximum temperature.
parameter : dict
Dictionary of parameter. Key: parameter, value: list.
energy_type : str
Objective function of simulated annealing. We use internal evaluation for graph clustering.
"""
self.method = method
self.Tmin = tmin
self.Tmax = tmax
self.parameter = parameter
self.energy_type = energy_type
def __get_parameter(self):
chosen_parameter = {}
for param, value in self.parameter.iteritems():
chosen_parameter[param] = choice(value)
return chosen_parameter
def __run_method(self):
if self.method == 'max_clique':
pass
def __get_energy(self, graph, clusters):
energy = 0.
if self.energy_type == 'silhoutte':
energy = InternalEvaluation.get_silhoutte_index(graph, clusters)
return energy
|
from random import choice
from pygraphc.evaluation.InternalEvaluation import InternalEvaluation
class SimulatedAnnealing(object):
def __init__(self, tmin, tmax, alpha, parameters, energy_type, max_iteration):
"""The constructor of Simulated Annealing method.
Parameters
----------
tmin : float
Minimum temperature.
tmax : float
Maximum temperature.
alpha : float
Cooling factor. Tnew = alpha * Tcurrent
parameters : dict
Dictionary of parameters. Key: parameters, value: list.
energy_type : str
Objective function of simulated annealing. We use internal evaluation for graph clustering.
max_iteration : int
Maximum iteration for simulated annealing.
"""
self.Tmin = tmin
self.Tmax = tmax
self.alpha = alpha
self.parameters = parameters
self.energy_type = energy_type
self.max_iteration = max_iteration
def get_parameter(self):
"""Get random parameter based on given range.
Returns
-------
random_parameter : dict[str, float]
Dictionary of random parameters.
"""
random_parameter = {}
for param, value in self.parameters.iteritems():
random_parameter[param] = choice(value)
return random_parameter
def get_temperature(self, current_temperature):
new_temperature = self.alpha * current_temperature
return new_temperature
def get_energy(self, graph, clusters):
energy = 0.
if self.energy_type == 'silhoutte':
energy = InternalEvaluation.get_silhoutte_index(graph, clusters)
return energy
|
Edit the parameters. Not fix yet
|
Edit the parameters. Not fix yet
|
Python
|
mit
|
studiawan/pygraphc
|
from random import choice
from pygraphc.evaluation.InternalEvaluation import InternalEvaluation
class SimulatedAnnealing(object):
def __init__(self, method, tmin, tmax, parameter, energy_type):
"""The constructor of Simulated Annealing method.
Parameters
----------
method : str
The method to run with simulated annealing.
tmin : float
Minimum temperature.
tmax : float
Maximum temperature.
parameter : dict
Dictionary of parameter. Key: parameter, value: list.
energy_type : str
Objective function of simulated annealing. We use internal evaluation for graph clustering.
"""
self.method = method
self.Tmin = tmin
self.Tmax = tmax
self.parameter = parameter
self.energy_type = energy_type
def __get_parameter(self):
chosen_parameter = {}
for param, value in self.parameter.iteritems():
chosen_parameter[param] = choice(value)
return chosen_parameter
def __run_method(self):
if self.method == 'max_clique':
pass
def __get_energy(self, graph, clusters):
energy = 0.
if self.energy_type == 'silhoutte':
energy = InternalEvaluation.get_silhoutte_index(graph, clusters)
return energy
Edit the parameters. Not fix yet
|
from random import choice
from pygraphc.evaluation.InternalEvaluation import InternalEvaluation
class SimulatedAnnealing(object):
def __init__(self, tmin, tmax, alpha, parameters, energy_type, max_iteration):
"""The constructor of Simulated Annealing method.
Parameters
----------
tmin : float
Minimum temperature.
tmax : float
Maximum temperature.
alpha : float
Cooling factor. Tnew = alpha * Tcurrent
parameters : dict
Dictionary of parameters. Key: parameters, value: list.
energy_type : str
Objective function of simulated annealing. We use internal evaluation for graph clustering.
max_iteration : int
Maximum iteration for simulated annealing.
"""
self.Tmin = tmin
self.Tmax = tmax
self.alpha = alpha
self.parameters = parameters
self.energy_type = energy_type
self.max_iteration = max_iteration
def get_parameter(self):
"""Get random parameter based on given range.
Returns
-------
random_parameter : dict[str, float]
Dictionary of random parameters.
"""
random_parameter = {}
for param, value in self.parameters.iteritems():
random_parameter[param] = choice(value)
return random_parameter
def get_temperature(self, current_temperature):
new_temperature = self.alpha * current_temperature
return new_temperature
def get_energy(self, graph, clusters):
energy = 0.
if self.energy_type == 'silhoutte':
energy = InternalEvaluation.get_silhoutte_index(graph, clusters)
return energy
|
<commit_before>from random import choice
from pygraphc.evaluation.InternalEvaluation import InternalEvaluation
class SimulatedAnnealing(object):
def __init__(self, method, tmin, tmax, parameter, energy_type):
"""The constructor of Simulated Annealing method.
Parameters
----------
method : str
The method to run with simulated annealing.
tmin : float
Minimum temperature.
tmax : float
Maximum temperature.
parameter : dict
Dictionary of parameter. Key: parameter, value: list.
energy_type : str
Objective function of simulated annealing. We use internal evaluation for graph clustering.
"""
self.method = method
self.Tmin = tmin
self.Tmax = tmax
self.parameter = parameter
self.energy_type = energy_type
def __get_parameter(self):
chosen_parameter = {}
for param, value in self.parameter.iteritems():
chosen_parameter[param] = choice(value)
return chosen_parameter
def __run_method(self):
if self.method == 'max_clique':
pass
def __get_energy(self, graph, clusters):
energy = 0.
if self.energy_type == 'silhoutte':
energy = InternalEvaluation.get_silhoutte_index(graph, clusters)
return energy
<commit_msg>Edit the parameters. Not fix yet<commit_after>
|
from random import choice
from pygraphc.evaluation.InternalEvaluation import InternalEvaluation
class SimulatedAnnealing(object):
def __init__(self, tmin, tmax, alpha, parameters, energy_type, max_iteration):
"""The constructor of Simulated Annealing method.
Parameters
----------
tmin : float
Minimum temperature.
tmax : float
Maximum temperature.
alpha : float
Cooling factor. Tnew = alpha * Tcurrent
parameters : dict
Dictionary of parameters. Key: parameters, value: list.
energy_type : str
Objective function of simulated annealing. We use internal evaluation for graph clustering.
max_iteration : int
Maximum iteration for simulated annealing.
"""
self.Tmin = tmin
self.Tmax = tmax
self.alpha = alpha
self.parameters = parameters
self.energy_type = energy_type
self.max_iteration = max_iteration
def get_parameter(self):
"""Get random parameter based on given range.
Returns
-------
random_parameter : dict[str, float]
Dictionary of random parameters.
"""
random_parameter = {}
for param, value in self.parameters.iteritems():
random_parameter[param] = choice(value)
return random_parameter
def get_temperature(self, current_temperature):
new_temperature = self.alpha * current_temperature
return new_temperature
def get_energy(self, graph, clusters):
energy = 0.
if self.energy_type == 'silhoutte':
energy = InternalEvaluation.get_silhoutte_index(graph, clusters)
return energy
|
from random import choice
from pygraphc.evaluation.InternalEvaluation import InternalEvaluation
class SimulatedAnnealing(object):
def __init__(self, method, tmin, tmax, parameter, energy_type):
"""The constructor of Simulated Annealing method.
Parameters
----------
method : str
The method to run with simulated annealing.
tmin : float
Minimum temperature.
tmax : float
Maximum temperature.
parameter : dict
Dictionary of parameter. Key: parameter, value: list.
energy_type : str
Objective function of simulated annealing. We use internal evaluation for graph clustering.
"""
self.method = method
self.Tmin = tmin
self.Tmax = tmax
self.parameter = parameter
self.energy_type = energy_type
def __get_parameter(self):
chosen_parameter = {}
for param, value in self.parameter.iteritems():
chosen_parameter[param] = choice(value)
return chosen_parameter
def __run_method(self):
if self.method == 'max_clique':
pass
def __get_energy(self, graph, clusters):
energy = 0.
if self.energy_type == 'silhoutte':
energy = InternalEvaluation.get_silhoutte_index(graph, clusters)
return energy
Edit the parameters. Not fix yetfrom random import choice
from pygraphc.evaluation.InternalEvaluation import InternalEvaluation
class SimulatedAnnealing(object):
def __init__(self, tmin, tmax, alpha, parameters, energy_type, max_iteration):
"""The constructor of Simulated Annealing method.
Parameters
----------
tmin : float
Minimum temperature.
tmax : float
Maximum temperature.
alpha : float
Cooling factor. Tnew = alpha * Tcurrent
parameters : dict
Dictionary of parameters. Key: parameters, value: list.
energy_type : str
Objective function of simulated annealing. We use internal evaluation for graph clustering.
max_iteration : int
Maximum iteration for simulated annealing.
"""
self.Tmin = tmin
self.Tmax = tmax
self.alpha = alpha
self.parameters = parameters
self.energy_type = energy_type
self.max_iteration = max_iteration
def get_parameter(self):
"""Get random parameter based on given range.
Returns
-------
random_parameter : dict[str, float]
Dictionary of random parameters.
"""
random_parameter = {}
for param, value in self.parameters.iteritems():
random_parameter[param] = choice(value)
return random_parameter
def get_temperature(self, current_temperature):
new_temperature = self.alpha * current_temperature
return new_temperature
def get_energy(self, graph, clusters):
energy = 0.
if self.energy_type == 'silhoutte':
energy = InternalEvaluation.get_silhoutte_index(graph, clusters)
return energy
|
<commit_before>from random import choice
from pygraphc.evaluation.InternalEvaluation import InternalEvaluation
class SimulatedAnnealing(object):
def __init__(self, method, tmin, tmax, parameter, energy_type):
"""The constructor of Simulated Annealing method.
Parameters
----------
method : str
The method to run with simulated annealing.
tmin : float
Minimum temperature.
tmax : float
Maximum temperature.
parameter : dict
Dictionary of parameter. Key: parameter, value: list.
energy_type : str
Objective function of simulated annealing. We use internal evaluation for graph clustering.
"""
self.method = method
self.Tmin = tmin
self.Tmax = tmax
self.parameter = parameter
self.energy_type = energy_type
def __get_parameter(self):
chosen_parameter = {}
for param, value in self.parameter.iteritems():
chosen_parameter[param] = choice(value)
return chosen_parameter
def __run_method(self):
if self.method == 'max_clique':
pass
def __get_energy(self, graph, clusters):
energy = 0.
if self.energy_type == 'silhoutte':
energy = InternalEvaluation.get_silhoutte_index(graph, clusters)
return energy
<commit_msg>Edit the parameters. Not fix yet<commit_after>from random import choice
from pygraphc.evaluation.InternalEvaluation import InternalEvaluation
class SimulatedAnnealing(object):
def __init__(self, tmin, tmax, alpha, parameters, energy_type, max_iteration):
"""The constructor of Simulated Annealing method.
Parameters
----------
tmin : float
Minimum temperature.
tmax : float
Maximum temperature.
alpha : float
Cooling factor. Tnew = alpha * Tcurrent
parameters : dict
Dictionary of parameters. Key: parameters, value: list.
energy_type : str
Objective function of simulated annealing. We use internal evaluation for graph clustering.
max_iteration : int
Maximum iteration for simulated annealing.
"""
self.Tmin = tmin
self.Tmax = tmax
self.alpha = alpha
self.parameters = parameters
self.energy_type = energy_type
self.max_iteration = max_iteration
def get_parameter(self):
"""Get random parameter based on given range.
Returns
-------
random_parameter : dict[str, float]
Dictionary of random parameters.
"""
random_parameter = {}
for param, value in self.parameters.iteritems():
random_parameter[param] = choice(value)
return random_parameter
def get_temperature(self, current_temperature):
new_temperature = self.alpha * current_temperature
return new_temperature
def get_energy(self, graph, clusters):
energy = 0.
if self.energy_type == 'silhoutte':
energy = InternalEvaluation.get_silhoutte_index(graph, clusters)
return energy
|
7d7dd781500328c0160ac59affc150f9323ee35d
|
examples/jupyter-output-area/server.py
|
examples/jupyter-output-area/server.py
|
#!/usr/bin/env python2
from SimpleHTTPServer import SimpleHTTPRequestHandler
import BaseHTTPServer
class CORSRequestHandler( SimpleHTTPRequestHandler ):
def end_headers( self ):
self.send_header( 'Access-Control-Allow-Origin', '*' )
SimpleHTTPRequestHandler.end_headers(self)
if __name__ == '__main__':
BaseHTTPServer.test( CORSRequestHandler, BaseHTTPServer.HTTPServer )
|
try:
from http.server import SimpleHTTPRequestHandler
import http.server as BaseHTTPServer
except ImportError:
from SimpleHTTPServer import SimpleHTTPRequestHandler
import BaseHTTPServer
class CORSRequestHandler( SimpleHTTPRequestHandler ):
def end_headers( self ):
self.send_header( 'Access-Control-Allow-Origin', '*' )
SimpleHTTPRequestHandler.end_headers(self)
if __name__ == '__main__':
BaseHTTPServer.test( CORSRequestHandler, BaseHTTPServer.HTTPServer )
|
Add python3 support to stop Steve whinging
|
Add python3 support to stop Steve whinging
|
Python
|
bsd-3-clause
|
dwillmer/playground,dwillmer/playground,dwillmer/playground
|
#!/usr/bin/env python2
from SimpleHTTPServer import SimpleHTTPRequestHandler
import BaseHTTPServer
class CORSRequestHandler( SimpleHTTPRequestHandler ):
def end_headers( self ):
self.send_header( 'Access-Control-Allow-Origin', '*' )
SimpleHTTPRequestHandler.end_headers(self)
if __name__ == '__main__':
BaseHTTPServer.test( CORSRequestHandler, BaseHTTPServer.HTTPServer )Add python3 support to stop Steve whinging
|
try:
from http.server import SimpleHTTPRequestHandler
import http.server as BaseHTTPServer
except ImportError:
from SimpleHTTPServer import SimpleHTTPRequestHandler
import BaseHTTPServer
class CORSRequestHandler( SimpleHTTPRequestHandler ):
def end_headers( self ):
self.send_header( 'Access-Control-Allow-Origin', '*' )
SimpleHTTPRequestHandler.end_headers(self)
if __name__ == '__main__':
BaseHTTPServer.test( CORSRequestHandler, BaseHTTPServer.HTTPServer )
|
<commit_before>#!/usr/bin/env python2
from SimpleHTTPServer import SimpleHTTPRequestHandler
import BaseHTTPServer
class CORSRequestHandler( SimpleHTTPRequestHandler ):
def end_headers( self ):
self.send_header( 'Access-Control-Allow-Origin', '*' )
SimpleHTTPRequestHandler.end_headers(self)
if __name__ == '__main__':
BaseHTTPServer.test( CORSRequestHandler, BaseHTTPServer.HTTPServer )<commit_msg>Add python3 support to stop Steve whinging<commit_after>
|
try:
from http.server import SimpleHTTPRequestHandler
import http.server as BaseHTTPServer
except ImportError:
from SimpleHTTPServer import SimpleHTTPRequestHandler
import BaseHTTPServer
class CORSRequestHandler( SimpleHTTPRequestHandler ):
def end_headers( self ):
self.send_header( 'Access-Control-Allow-Origin', '*' )
SimpleHTTPRequestHandler.end_headers(self)
if __name__ == '__main__':
BaseHTTPServer.test( CORSRequestHandler, BaseHTTPServer.HTTPServer )
|
#!/usr/bin/env python2
from SimpleHTTPServer import SimpleHTTPRequestHandler
import BaseHTTPServer
class CORSRequestHandler( SimpleHTTPRequestHandler ):
def end_headers( self ):
self.send_header( 'Access-Control-Allow-Origin', '*' )
SimpleHTTPRequestHandler.end_headers(self)
if __name__ == '__main__':
BaseHTTPServer.test( CORSRequestHandler, BaseHTTPServer.HTTPServer )Add python3 support to stop Steve whingingtry:
from http.server import SimpleHTTPRequestHandler
import http.server as BaseHTTPServer
except ImportError:
from SimpleHTTPServer import SimpleHTTPRequestHandler
import BaseHTTPServer
class CORSRequestHandler( SimpleHTTPRequestHandler ):
def end_headers( self ):
self.send_header( 'Access-Control-Allow-Origin', '*' )
SimpleHTTPRequestHandler.end_headers(self)
if __name__ == '__main__':
BaseHTTPServer.test( CORSRequestHandler, BaseHTTPServer.HTTPServer )
|
<commit_before>#!/usr/bin/env python2
from SimpleHTTPServer import SimpleHTTPRequestHandler
import BaseHTTPServer
class CORSRequestHandler( SimpleHTTPRequestHandler ):
def end_headers( self ):
self.send_header( 'Access-Control-Allow-Origin', '*' )
SimpleHTTPRequestHandler.end_headers(self)
if __name__ == '__main__':
BaseHTTPServer.test( CORSRequestHandler, BaseHTTPServer.HTTPServer )<commit_msg>Add python3 support to stop Steve whinging<commit_after>try:
from http.server import SimpleHTTPRequestHandler
import http.server as BaseHTTPServer
except ImportError:
from SimpleHTTPServer import SimpleHTTPRequestHandler
import BaseHTTPServer
class CORSRequestHandler( SimpleHTTPRequestHandler ):
def end_headers( self ):
self.send_header( 'Access-Control-Allow-Origin', '*' )
SimpleHTTPRequestHandler.end_headers(self)
if __name__ == '__main__':
BaseHTTPServer.test( CORSRequestHandler, BaseHTTPServer.HTTPServer )
|
9dee48fb0964b12780f57cef26c5b84072448232
|
ds/api/serializer/app.py
|
ds/api/serializer/app.py
|
from __future__ import absolute_import
from ds.models import App
from .base import Serializer
from .manager import add
@add(App)
class AppSerializer(Serializer):
def serialize(self, item, attrs):
return {
'id': str(item.id),
'name': item.name,
}
|
from __future__ import absolute_import
from ds.models import App
from .base import Serializer
from .manager import add
@add(App)
class AppSerializer(Serializer):
def serialize(self, item, attrs):
return {
'id': str(item.id),
'name': item.name,
'provider': item.provider,
'provider_config': item.provider_config,
}
|
Add provider information to App
|
Add provider information to App
|
Python
|
apache-2.0
|
jkimbo/freight,rshk/freight,jkimbo/freight,getsentry/freight,jkimbo/freight,rshk/freight,klynton/freight,rshk/freight,getsentry/freight,klynton/freight,getsentry/freight,rshk/freight,klynton/freight,getsentry/freight,getsentry/freight,jkimbo/freight,klynton/freight
|
from __future__ import absolute_import
from ds.models import App
from .base import Serializer
from .manager import add
@add(App)
class AppSerializer(Serializer):
def serialize(self, item, attrs):
return {
'id': str(item.id),
'name': item.name,
}
Add provider information to App
|
from __future__ import absolute_import
from ds.models import App
from .base import Serializer
from .manager import add
@add(App)
class AppSerializer(Serializer):
def serialize(self, item, attrs):
return {
'id': str(item.id),
'name': item.name,
'provider': item.provider,
'provider_config': item.provider_config,
}
|
<commit_before>from __future__ import absolute_import
from ds.models import App
from .base import Serializer
from .manager import add
@add(App)
class AppSerializer(Serializer):
def serialize(self, item, attrs):
return {
'id': str(item.id),
'name': item.name,
}
<commit_msg>Add provider information to App<commit_after>
|
from __future__ import absolute_import
from ds.models import App
from .base import Serializer
from .manager import add
@add(App)
class AppSerializer(Serializer):
def serialize(self, item, attrs):
return {
'id': str(item.id),
'name': item.name,
'provider': item.provider,
'provider_config': item.provider_config,
}
|
from __future__ import absolute_import
from ds.models import App
from .base import Serializer
from .manager import add
@add(App)
class AppSerializer(Serializer):
def serialize(self, item, attrs):
return {
'id': str(item.id),
'name': item.name,
}
Add provider information to Appfrom __future__ import absolute_import
from ds.models import App
from .base import Serializer
from .manager import add
@add(App)
class AppSerializer(Serializer):
def serialize(self, item, attrs):
return {
'id': str(item.id),
'name': item.name,
'provider': item.provider,
'provider_config': item.provider_config,
}
|
<commit_before>from __future__ import absolute_import
from ds.models import App
from .base import Serializer
from .manager import add
@add(App)
class AppSerializer(Serializer):
def serialize(self, item, attrs):
return {
'id': str(item.id),
'name': item.name,
}
<commit_msg>Add provider information to App<commit_after>from __future__ import absolute_import
from ds.models import App
from .base import Serializer
from .manager import add
@add(App)
class AppSerializer(Serializer):
def serialize(self, item, attrs):
return {
'id': str(item.id),
'name': item.name,
'provider': item.provider,
'provider_config': item.provider_config,
}
|
169d34c179a0878383edd7e2c4ba8f80aaabc7c8
|
zendesk_tickets_machine/tickets/services.py
|
zendesk_tickets_machine/tickets/services.py
|
import datetime
from django.utils.timezone import utc
from .models import Ticket
class TicketServices():
def edit_ticket_once(self, **kwargs):
id_list = kwargs.get('id_list')
edit_tags = kwargs.get('edit_tags')
edit_requester = kwargs.get('edit_requester')
edit_subject = kwargs.get('edit_subject')
edit_due_at = kwargs.get('edit_due_at')
edit_assignee = kwargs.get('edit_assignee')
if edit_tags:
Ticket.objects.filter(
pk__in=id_list
).update(tags=edit_tags)
if edit_subject:
Ticket.objects.filter(
pk__in=id_list
).update(subject=edit_subject)
if edit_requester:
Ticket.objects.filter(
pk__in=id_list
).update(requester=edit_requester)
if edit_due_at:
Ticket.objects.filter(
pk__in=id_list
).update(
due_at=datetime.datetime.strptime(
edit_due_at, "%m/%d/%Y"
).replace(tzinfo=utc)
)
if edit_assignee:
Ticket.objects.filter(
pk__in=id_list
).update(assignee=edit_assignee)
|
import datetime
from django.utils.timezone import utc
from .models import Ticket
class TicketServices():
def edit_ticket_once(self, **kwargs):
id_list = kwargs.get('id_list')
edit_tags = kwargs.get('edit_tags')
edit_requester = kwargs.get('edit_requester')
edit_subject = kwargs.get('edit_subject')
edit_due_at = kwargs.get('edit_due_at')
edit_assignee = kwargs.get('edit_assignee')
if edit_tags:
Ticket.objects.filter(pk__in=id_list).update(tags=edit_tags)
if edit_subject:
Ticket.objects.filter(pk__in=id_list).update(subject=edit_subject)
if edit_requester:
Ticket.objects.filter(pk__in=id_list).update(
requester=edit_requester
)
if edit_due_at:
Ticket.objects.filter(pk__in=id_list).update(
due_at=datetime.datetime.strptime(
edit_due_at, '%m/%d/%Y'
).replace(tzinfo=utc)
)
if edit_assignee:
Ticket.objects.filter(pk__in=id_list).update(
assignee=edit_assignee
)
|
Adjust code style to reduce lines of code :bear:
|
Adjust code style to reduce lines of code :bear:
|
Python
|
mit
|
prontotools/zendesk-tickets-machine,prontotools/zendesk-tickets-machine,prontotools/zendesk-tickets-machine,prontotools/zendesk-tickets-machine
|
import datetime
from django.utils.timezone import utc
from .models import Ticket
class TicketServices():
def edit_ticket_once(self, **kwargs):
id_list = kwargs.get('id_list')
edit_tags = kwargs.get('edit_tags')
edit_requester = kwargs.get('edit_requester')
edit_subject = kwargs.get('edit_subject')
edit_due_at = kwargs.get('edit_due_at')
edit_assignee = kwargs.get('edit_assignee')
if edit_tags:
Ticket.objects.filter(
pk__in=id_list
).update(tags=edit_tags)
if edit_subject:
Ticket.objects.filter(
pk__in=id_list
).update(subject=edit_subject)
if edit_requester:
Ticket.objects.filter(
pk__in=id_list
).update(requester=edit_requester)
if edit_due_at:
Ticket.objects.filter(
pk__in=id_list
).update(
due_at=datetime.datetime.strptime(
edit_due_at, "%m/%d/%Y"
).replace(tzinfo=utc)
)
if edit_assignee:
Ticket.objects.filter(
pk__in=id_list
).update(assignee=edit_assignee)
Adjust code style to reduce lines of code :bear:
|
import datetime
from django.utils.timezone import utc
from .models import Ticket
class TicketServices():
def edit_ticket_once(self, **kwargs):
id_list = kwargs.get('id_list')
edit_tags = kwargs.get('edit_tags')
edit_requester = kwargs.get('edit_requester')
edit_subject = kwargs.get('edit_subject')
edit_due_at = kwargs.get('edit_due_at')
edit_assignee = kwargs.get('edit_assignee')
if edit_tags:
Ticket.objects.filter(pk__in=id_list).update(tags=edit_tags)
if edit_subject:
Ticket.objects.filter(pk__in=id_list).update(subject=edit_subject)
if edit_requester:
Ticket.objects.filter(pk__in=id_list).update(
requester=edit_requester
)
if edit_due_at:
Ticket.objects.filter(pk__in=id_list).update(
due_at=datetime.datetime.strptime(
edit_due_at, '%m/%d/%Y'
).replace(tzinfo=utc)
)
if edit_assignee:
Ticket.objects.filter(pk__in=id_list).update(
assignee=edit_assignee
)
|
<commit_before>import datetime
from django.utils.timezone import utc
from .models import Ticket
class TicketServices():
def edit_ticket_once(self, **kwargs):
id_list = kwargs.get('id_list')
edit_tags = kwargs.get('edit_tags')
edit_requester = kwargs.get('edit_requester')
edit_subject = kwargs.get('edit_subject')
edit_due_at = kwargs.get('edit_due_at')
edit_assignee = kwargs.get('edit_assignee')
if edit_tags:
Ticket.objects.filter(
pk__in=id_list
).update(tags=edit_tags)
if edit_subject:
Ticket.objects.filter(
pk__in=id_list
).update(subject=edit_subject)
if edit_requester:
Ticket.objects.filter(
pk__in=id_list
).update(requester=edit_requester)
if edit_due_at:
Ticket.objects.filter(
pk__in=id_list
).update(
due_at=datetime.datetime.strptime(
edit_due_at, "%m/%d/%Y"
).replace(tzinfo=utc)
)
if edit_assignee:
Ticket.objects.filter(
pk__in=id_list
).update(assignee=edit_assignee)
<commit_msg>Adjust code style to reduce lines of code :bear:<commit_after>
|
import datetime
from django.utils.timezone import utc
from .models import Ticket
class TicketServices():
def edit_ticket_once(self, **kwargs):
id_list = kwargs.get('id_list')
edit_tags = kwargs.get('edit_tags')
edit_requester = kwargs.get('edit_requester')
edit_subject = kwargs.get('edit_subject')
edit_due_at = kwargs.get('edit_due_at')
edit_assignee = kwargs.get('edit_assignee')
if edit_tags:
Ticket.objects.filter(pk__in=id_list).update(tags=edit_tags)
if edit_subject:
Ticket.objects.filter(pk__in=id_list).update(subject=edit_subject)
if edit_requester:
Ticket.objects.filter(pk__in=id_list).update(
requester=edit_requester
)
if edit_due_at:
Ticket.objects.filter(pk__in=id_list).update(
due_at=datetime.datetime.strptime(
edit_due_at, '%m/%d/%Y'
).replace(tzinfo=utc)
)
if edit_assignee:
Ticket.objects.filter(pk__in=id_list).update(
assignee=edit_assignee
)
|
import datetime
from django.utils.timezone import utc
from .models import Ticket
class TicketServices():
def edit_ticket_once(self, **kwargs):
id_list = kwargs.get('id_list')
edit_tags = kwargs.get('edit_tags')
edit_requester = kwargs.get('edit_requester')
edit_subject = kwargs.get('edit_subject')
edit_due_at = kwargs.get('edit_due_at')
edit_assignee = kwargs.get('edit_assignee')
if edit_tags:
Ticket.objects.filter(
pk__in=id_list
).update(tags=edit_tags)
if edit_subject:
Ticket.objects.filter(
pk__in=id_list
).update(subject=edit_subject)
if edit_requester:
Ticket.objects.filter(
pk__in=id_list
).update(requester=edit_requester)
if edit_due_at:
Ticket.objects.filter(
pk__in=id_list
).update(
due_at=datetime.datetime.strptime(
edit_due_at, "%m/%d/%Y"
).replace(tzinfo=utc)
)
if edit_assignee:
Ticket.objects.filter(
pk__in=id_list
).update(assignee=edit_assignee)
Adjust code style to reduce lines of code :bear:import datetime
from django.utils.timezone import utc
from .models import Ticket
class TicketServices():
def edit_ticket_once(self, **kwargs):
id_list = kwargs.get('id_list')
edit_tags = kwargs.get('edit_tags')
edit_requester = kwargs.get('edit_requester')
edit_subject = kwargs.get('edit_subject')
edit_due_at = kwargs.get('edit_due_at')
edit_assignee = kwargs.get('edit_assignee')
if edit_tags:
Ticket.objects.filter(pk__in=id_list).update(tags=edit_tags)
if edit_subject:
Ticket.objects.filter(pk__in=id_list).update(subject=edit_subject)
if edit_requester:
Ticket.objects.filter(pk__in=id_list).update(
requester=edit_requester
)
if edit_due_at:
Ticket.objects.filter(pk__in=id_list).update(
due_at=datetime.datetime.strptime(
edit_due_at, '%m/%d/%Y'
).replace(tzinfo=utc)
)
if edit_assignee:
Ticket.objects.filter(pk__in=id_list).update(
assignee=edit_assignee
)
|
<commit_before>import datetime
from django.utils.timezone import utc
from .models import Ticket
class TicketServices():
def edit_ticket_once(self, **kwargs):
id_list = kwargs.get('id_list')
edit_tags = kwargs.get('edit_tags')
edit_requester = kwargs.get('edit_requester')
edit_subject = kwargs.get('edit_subject')
edit_due_at = kwargs.get('edit_due_at')
edit_assignee = kwargs.get('edit_assignee')
if edit_tags:
Ticket.objects.filter(
pk__in=id_list
).update(tags=edit_tags)
if edit_subject:
Ticket.objects.filter(
pk__in=id_list
).update(subject=edit_subject)
if edit_requester:
Ticket.objects.filter(
pk__in=id_list
).update(requester=edit_requester)
if edit_due_at:
Ticket.objects.filter(
pk__in=id_list
).update(
due_at=datetime.datetime.strptime(
edit_due_at, "%m/%d/%Y"
).replace(tzinfo=utc)
)
if edit_assignee:
Ticket.objects.filter(
pk__in=id_list
).update(assignee=edit_assignee)
<commit_msg>Adjust code style to reduce lines of code :bear:<commit_after>import datetime
from django.utils.timezone import utc
from .models import Ticket
class TicketServices():
def edit_ticket_once(self, **kwargs):
id_list = kwargs.get('id_list')
edit_tags = kwargs.get('edit_tags')
edit_requester = kwargs.get('edit_requester')
edit_subject = kwargs.get('edit_subject')
edit_due_at = kwargs.get('edit_due_at')
edit_assignee = kwargs.get('edit_assignee')
if edit_tags:
Ticket.objects.filter(pk__in=id_list).update(tags=edit_tags)
if edit_subject:
Ticket.objects.filter(pk__in=id_list).update(subject=edit_subject)
if edit_requester:
Ticket.objects.filter(pk__in=id_list).update(
requester=edit_requester
)
if edit_due_at:
Ticket.objects.filter(pk__in=id_list).update(
due_at=datetime.datetime.strptime(
edit_due_at, '%m/%d/%Y'
).replace(tzinfo=utc)
)
if edit_assignee:
Ticket.objects.filter(pk__in=id_list).update(
assignee=edit_assignee
)
|
6879ea37aaee51b7144234f855e0f4ff9fe0dd2c
|
models.py
|
models.py
|
import re
class Phage:
supported_databases = {
# European Nucleotide Archive phage database
"ENA": "^gi\|[0-9]+\|ref\|([^\|]+)\|\ ([^,]+)[^$]*$",
# National Center for Biotechnology Information phage database
"NCBI": "^ENA\|([^\|]+)\|[^\ ]+\ ([^,]+)[^$]*$",
# Actinobacteriophage Database
"AD": "^([^\ ]+)\ [^,]*,[^,]*,\ Cluster\ ([^$]+)$"
}
def __init__(self, raw_text, phage_finder):
self.raw = raw_text.strip()
self.refseq = None
self.name = None
self.db = None
self._parsePhage(raw_text, phage_finder)
def _parsePhage(self, raw_text, phage_finder):
for db, regex in Phage.supported_databases.items():
match = re.search(regex, raw_text)
if match is not None:
if db is not "AD":
self.name = match.group(2)
self.refseq = match.group(1)
else:
short_name = match.group(1)
cluster = match.group(2)
self.name = "Mycobacteriophage " + short_name
self.refseq = phage_finder.findByPhage(short_name, cluster)
self.db = db
|
import re
class Phage:
supported_databases = {
# European Nucleotide Archive phage database
"ENA": r"^gi\|[0-9]+\|ref\|([^\|]+)\|\ ([^,]+)[^$]*$",
# National Center for Biotechnology Information phage database
"NCBI": r"^ENA\|([^\|]+)\|[^\ ]+\ ([^,]+)[^$]*$",
# Actinobacteriophage Database
"AD": r"^([^\ ]+)\ [^,]*,[^,]*,\ Cluster\ ([^$]+)$"
}
def __init__(self, raw_text, phage_finder):
self.raw = raw_text.strip()
self.refseq = None
self.name = None
self.db = None
self._parsePhage(raw_text, phage_finder)
def _parsePhage(self, raw_text, phage_finder):
for db, regex in Phage.supported_databases.items():
match = re.search(regex, raw_text)
if match is not None:
if db is not "AD":
self.name = match.group(2)
self.refseq = match.group(1)
else:
short_name = match.group(1)
cluster = match.group(2)
self.name = "Mycobacteriophage " + short_name
self.refseq = phage_finder.findByPhage(short_name, cluster)
self.db = db
|
Use raw strings for regexes.
|
Use raw strings for regexes.
|
Python
|
mit
|
goyalsid/phageParser,phageParser/phageParser,phageParser/phageParser,mbonsma/phageParser,goyalsid/phageParser,phageParser/phageParser,mbonsma/phageParser,phageParser/phageParser,goyalsid/phageParser,mbonsma/phageParser,mbonsma/phageParser
|
import re
class Phage:
supported_databases = {
# European Nucleotide Archive phage database
"ENA": "^gi\|[0-9]+\|ref\|([^\|]+)\|\ ([^,]+)[^$]*$",
# National Center for Biotechnology Information phage database
"NCBI": "^ENA\|([^\|]+)\|[^\ ]+\ ([^,]+)[^$]*$",
# Actinobacteriophage Database
"AD": "^([^\ ]+)\ [^,]*,[^,]*,\ Cluster\ ([^$]+)$"
}
def __init__(self, raw_text, phage_finder):
self.raw = raw_text.strip()
self.refseq = None
self.name = None
self.db = None
self._parsePhage(raw_text, phage_finder)
def _parsePhage(self, raw_text, phage_finder):
for db, regex in Phage.supported_databases.items():
match = re.search(regex, raw_text)
if match is not None:
if db is not "AD":
self.name = match.group(2)
self.refseq = match.group(1)
else:
short_name = match.group(1)
cluster = match.group(2)
self.name = "Mycobacteriophage " + short_name
self.refseq = phage_finder.findByPhage(short_name, cluster)
self.db = dbUse raw strings for regexes.
|
import re
class Phage:
supported_databases = {
# European Nucleotide Archive phage database
"ENA": r"^gi\|[0-9]+\|ref\|([^\|]+)\|\ ([^,]+)[^$]*$",
# National Center for Biotechnology Information phage database
"NCBI": r"^ENA\|([^\|]+)\|[^\ ]+\ ([^,]+)[^$]*$",
# Actinobacteriophage Database
"AD": r"^([^\ ]+)\ [^,]*,[^,]*,\ Cluster\ ([^$]+)$"
}
def __init__(self, raw_text, phage_finder):
self.raw = raw_text.strip()
self.refseq = None
self.name = None
self.db = None
self._parsePhage(raw_text, phage_finder)
def _parsePhage(self, raw_text, phage_finder):
for db, regex in Phage.supported_databases.items():
match = re.search(regex, raw_text)
if match is not None:
if db is not "AD":
self.name = match.group(2)
self.refseq = match.group(1)
else:
short_name = match.group(1)
cluster = match.group(2)
self.name = "Mycobacteriophage " + short_name
self.refseq = phage_finder.findByPhage(short_name, cluster)
self.db = db
|
<commit_before>import re
class Phage:
supported_databases = {
# European Nucleotide Archive phage database
"ENA": "^gi\|[0-9]+\|ref\|([^\|]+)\|\ ([^,]+)[^$]*$",
# National Center for Biotechnology Information phage database
"NCBI": "^ENA\|([^\|]+)\|[^\ ]+\ ([^,]+)[^$]*$",
# Actinobacteriophage Database
"AD": "^([^\ ]+)\ [^,]*,[^,]*,\ Cluster\ ([^$]+)$"
}
def __init__(self, raw_text, phage_finder):
self.raw = raw_text.strip()
self.refseq = None
self.name = None
self.db = None
self._parsePhage(raw_text, phage_finder)
def _parsePhage(self, raw_text, phage_finder):
for db, regex in Phage.supported_databases.items():
match = re.search(regex, raw_text)
if match is not None:
if db is not "AD":
self.name = match.group(2)
self.refseq = match.group(1)
else:
short_name = match.group(1)
cluster = match.group(2)
self.name = "Mycobacteriophage " + short_name
self.refseq = phage_finder.findByPhage(short_name, cluster)
self.db = db<commit_msg>Use raw strings for regexes.<commit_after>
|
import re
class Phage:
supported_databases = {
# European Nucleotide Archive phage database
"ENA": r"^gi\|[0-9]+\|ref\|([^\|]+)\|\ ([^,]+)[^$]*$",
# National Center for Biotechnology Information phage database
"NCBI": r"^ENA\|([^\|]+)\|[^\ ]+\ ([^,]+)[^$]*$",
# Actinobacteriophage Database
"AD": r"^([^\ ]+)\ [^,]*,[^,]*,\ Cluster\ ([^$]+)$"
}
def __init__(self, raw_text, phage_finder):
self.raw = raw_text.strip()
self.refseq = None
self.name = None
self.db = None
self._parsePhage(raw_text, phage_finder)
def _parsePhage(self, raw_text, phage_finder):
for db, regex in Phage.supported_databases.items():
match = re.search(regex, raw_text)
if match is not None:
if db is not "AD":
self.name = match.group(2)
self.refseq = match.group(1)
else:
short_name = match.group(1)
cluster = match.group(2)
self.name = "Mycobacteriophage " + short_name
self.refseq = phage_finder.findByPhage(short_name, cluster)
self.db = db
|
import re
class Phage:
supported_databases = {
# European Nucleotide Archive phage database
"ENA": "^gi\|[0-9]+\|ref\|([^\|]+)\|\ ([^,]+)[^$]*$",
# National Center for Biotechnology Information phage database
"NCBI": "^ENA\|([^\|]+)\|[^\ ]+\ ([^,]+)[^$]*$",
# Actinobacteriophage Database
"AD": "^([^\ ]+)\ [^,]*,[^,]*,\ Cluster\ ([^$]+)$"
}
def __init__(self, raw_text, phage_finder):
self.raw = raw_text.strip()
self.refseq = None
self.name = None
self.db = None
self._parsePhage(raw_text, phage_finder)
def _parsePhage(self, raw_text, phage_finder):
for db, regex in Phage.supported_databases.items():
match = re.search(regex, raw_text)
if match is not None:
if db is not "AD":
self.name = match.group(2)
self.refseq = match.group(1)
else:
short_name = match.group(1)
cluster = match.group(2)
self.name = "Mycobacteriophage " + short_name
self.refseq = phage_finder.findByPhage(short_name, cluster)
self.db = dbUse raw strings for regexes.import re
class Phage:
supported_databases = {
# European Nucleotide Archive phage database
"ENA": r"^gi\|[0-9]+\|ref\|([^\|]+)\|\ ([^,]+)[^$]*$",
# National Center for Biotechnology Information phage database
"NCBI": r"^ENA\|([^\|]+)\|[^\ ]+\ ([^,]+)[^$]*$",
# Actinobacteriophage Database
"AD": r"^([^\ ]+)\ [^,]*,[^,]*,\ Cluster\ ([^$]+)$"
}
def __init__(self, raw_text, phage_finder):
self.raw = raw_text.strip()
self.refseq = None
self.name = None
self.db = None
self._parsePhage(raw_text, phage_finder)
def _parsePhage(self, raw_text, phage_finder):
for db, regex in Phage.supported_databases.items():
match = re.search(regex, raw_text)
if match is not None:
if db is not "AD":
self.name = match.group(2)
self.refseq = match.group(1)
else:
short_name = match.group(1)
cluster = match.group(2)
self.name = "Mycobacteriophage " + short_name
self.refseq = phage_finder.findByPhage(short_name, cluster)
self.db = db
|
<commit_before>import re
class Phage:
supported_databases = {
# European Nucleotide Archive phage database
"ENA": "^gi\|[0-9]+\|ref\|([^\|]+)\|\ ([^,]+)[^$]*$",
# National Center for Biotechnology Information phage database
"NCBI": "^ENA\|([^\|]+)\|[^\ ]+\ ([^,]+)[^$]*$",
# Actinobacteriophage Database
"AD": "^([^\ ]+)\ [^,]*,[^,]*,\ Cluster\ ([^$]+)$"
}
def __init__(self, raw_text, phage_finder):
self.raw = raw_text.strip()
self.refseq = None
self.name = None
self.db = None
self._parsePhage(raw_text, phage_finder)
def _parsePhage(self, raw_text, phage_finder):
for db, regex in Phage.supported_databases.items():
match = re.search(regex, raw_text)
if match is not None:
if db is not "AD":
self.name = match.group(2)
self.refseq = match.group(1)
else:
short_name = match.group(1)
cluster = match.group(2)
self.name = "Mycobacteriophage " + short_name
self.refseq = phage_finder.findByPhage(short_name, cluster)
self.db = db<commit_msg>Use raw strings for regexes.<commit_after>import re
class Phage:
supported_databases = {
# European Nucleotide Archive phage database
"ENA": r"^gi\|[0-9]+\|ref\|([^\|]+)\|\ ([^,]+)[^$]*$",
# National Center for Biotechnology Information phage database
"NCBI": r"^ENA\|([^\|]+)\|[^\ ]+\ ([^,]+)[^$]*$",
# Actinobacteriophage Database
"AD": r"^([^\ ]+)\ [^,]*,[^,]*,\ Cluster\ ([^$]+)$"
}
def __init__(self, raw_text, phage_finder):
self.raw = raw_text.strip()
self.refseq = None
self.name = None
self.db = None
self._parsePhage(raw_text, phage_finder)
def _parsePhage(self, raw_text, phage_finder):
for db, regex in Phage.supported_databases.items():
match = re.search(regex, raw_text)
if match is not None:
if db is not "AD":
self.name = match.group(2)
self.refseq = match.group(1)
else:
short_name = match.group(1)
cluster = match.group(2)
self.name = "Mycobacteriophage " + short_name
self.refseq = phage_finder.findByPhage(short_name, cluster)
self.db = db
|
2c41bfe7da9644b3a76adc5d2f1744107a3c40f4
|
core/git_mixins/rewrite.py
|
core/git_mixins/rewrite.py
|
from types import SimpleNamespace
class ChangeTemplate(SimpleNamespace):
# orig_hash
do_commit = True
msg = None
datetime = None
author = None
class RewriteMixin():
ChangeTemplate = ChangeTemplate
def rewrite_active_branch(self, base_commit, commit_chain):
branch_name = self.get_current_branch_name()
# Detach HEAD to base commit.
self.checkout_ref(base_commit)
# Apply each commit to HEAD in order.
try:
for commit in commit_chain:
self.git(
"cherry-pick",
"--allow-empty",
"--no-commit",
commit.orig_hash
)
# If squashing one commit into the next, do_commit should be
# False so that it's changes are included in the next commit.
if commit.do_commit:
self.git(
"commit",
"--author",
commit.author,
"--date",
commit.datetime,
"-F",
"-",
stdin=commit.msg
)
self.git("branch", "-f", branch_name, "HEAD")
except Exception as e:
raise e
finally:
# Whether on success or failure, always re-checkout the branch. On success,
# this will be the re-written branch. On failure, this will be the original
# branch (since re-defining the branch ref is the last step).
self.git("checkout", branch_name)
|
from types import SimpleNamespace
class ChangeTemplate(SimpleNamespace):
# orig_hash
do_commit = True
msg = None
datetime = None
author = None
class RewriteMixin():
ChangeTemplate = ChangeTemplate
def rewrite_active_branch(self, base_commit, commit_chain):
branch_name = self.get_current_branch_name()
# Detach HEAD to base commit.
self.checkout_ref(base_commit)
# Apply each commit to HEAD in order.
try:
for commit in commit_chain:
self.git(
"cherry-pick",
"--allow-empty",
"--allow-empty-message",
"--no-commit",
commit.orig_hash
)
# If squashing one commit into the next, do_commit should be
# False so that it's changes are included in the next commit.
if commit.do_commit:
self.git(
"commit",
"--author",
commit.author,
"--date",
commit.datetime,
"-F",
"-",
stdin=commit.msg
)
self.git("branch", "-f", branch_name, "HEAD")
except Exception as e:
raise e
finally:
# Whether on success or failure, always re-checkout the branch. On success,
# this will be the re-written branch. On failure, this will be the original
# branch (since re-defining the branch ref is the last step).
self.git("checkout", branch_name)
|
Allow empty commit messages if explictly specified.
|
Allow empty commit messages if explictly specified.
|
Python
|
mit
|
theiviaxx/GitSavvy,jmanuel1/GitSavvy,dreki/GitSavvy,dvcrn/GitSavvy,dvcrn/GitSavvy,asfaltboy/GitSavvy,jmanuel1/GitSavvy,ddevlin/GitSavvy,ddevlin/GitSavvy,divmain/GitSavvy,stoivo/GitSavvy,stoivo/GitSavvy,divmain/GitSavvy,dreki/GitSavvy,ddevlin/GitSavvy,theiviaxx/GitSavvy,stoivo/GitSavvy,asfaltboy/GitSavvy,ralic/GitSavvy,divmain/GitSavvy,ralic/GitSavvy,asfaltboy/GitSavvy
|
from types import SimpleNamespace
class ChangeTemplate(SimpleNamespace):
# orig_hash
do_commit = True
msg = None
datetime = None
author = None
class RewriteMixin():
ChangeTemplate = ChangeTemplate
def rewrite_active_branch(self, base_commit, commit_chain):
branch_name = self.get_current_branch_name()
# Detach HEAD to base commit.
self.checkout_ref(base_commit)
# Apply each commit to HEAD in order.
try:
for commit in commit_chain:
self.git(
"cherry-pick",
"--allow-empty",
"--no-commit",
commit.orig_hash
)
# If squashing one commit into the next, do_commit should be
# False so that it's changes are included in the next commit.
if commit.do_commit:
self.git(
"commit",
"--author",
commit.author,
"--date",
commit.datetime,
"-F",
"-",
stdin=commit.msg
)
self.git("branch", "-f", branch_name, "HEAD")
except Exception as e:
raise e
finally:
# Whether on success or failure, always re-checkout the branch. On success,
# this will be the re-written branch. On failure, this will be the original
# branch (since re-defining the branch ref is the last step).
self.git("checkout", branch_name)
Allow empty commit messages if explictly specified.
|
from types import SimpleNamespace
class ChangeTemplate(SimpleNamespace):
# orig_hash
do_commit = True
msg = None
datetime = None
author = None
class RewriteMixin():
ChangeTemplate = ChangeTemplate
def rewrite_active_branch(self, base_commit, commit_chain):
branch_name = self.get_current_branch_name()
# Detach HEAD to base commit.
self.checkout_ref(base_commit)
# Apply each commit to HEAD in order.
try:
for commit in commit_chain:
self.git(
"cherry-pick",
"--allow-empty",
"--allow-empty-message",
"--no-commit",
commit.orig_hash
)
# If squashing one commit into the next, do_commit should be
# False so that it's changes are included in the next commit.
if commit.do_commit:
self.git(
"commit",
"--author",
commit.author,
"--date",
commit.datetime,
"-F",
"-",
stdin=commit.msg
)
self.git("branch", "-f", branch_name, "HEAD")
except Exception as e:
raise e
finally:
# Whether on success or failure, always re-checkout the branch. On success,
# this will be the re-written branch. On failure, this will be the original
# branch (since re-defining the branch ref is the last step).
self.git("checkout", branch_name)
|
<commit_before>from types import SimpleNamespace
class ChangeTemplate(SimpleNamespace):
# orig_hash
do_commit = True
msg = None
datetime = None
author = None
class RewriteMixin():
ChangeTemplate = ChangeTemplate
def rewrite_active_branch(self, base_commit, commit_chain):
branch_name = self.get_current_branch_name()
# Detach HEAD to base commit.
self.checkout_ref(base_commit)
# Apply each commit to HEAD in order.
try:
for commit in commit_chain:
self.git(
"cherry-pick",
"--allow-empty",
"--no-commit",
commit.orig_hash
)
# If squashing one commit into the next, do_commit should be
# False so that it's changes are included in the next commit.
if commit.do_commit:
self.git(
"commit",
"--author",
commit.author,
"--date",
commit.datetime,
"-F",
"-",
stdin=commit.msg
)
self.git("branch", "-f", branch_name, "HEAD")
except Exception as e:
raise e
finally:
# Whether on success or failure, always re-checkout the branch. On success,
# this will be the re-written branch. On failure, this will be the original
# branch (since re-defining the branch ref is the last step).
self.git("checkout", branch_name)
<commit_msg>Allow empty commit messages if explictly specified.<commit_after>
|
from types import SimpleNamespace
class ChangeTemplate(SimpleNamespace):
# orig_hash
do_commit = True
msg = None
datetime = None
author = None
class RewriteMixin():
ChangeTemplate = ChangeTemplate
def rewrite_active_branch(self, base_commit, commit_chain):
branch_name = self.get_current_branch_name()
# Detach HEAD to base commit.
self.checkout_ref(base_commit)
# Apply each commit to HEAD in order.
try:
for commit in commit_chain:
self.git(
"cherry-pick",
"--allow-empty",
"--allow-empty-message",
"--no-commit",
commit.orig_hash
)
# If squashing one commit into the next, do_commit should be
# False so that it's changes are included in the next commit.
if commit.do_commit:
self.git(
"commit",
"--author",
commit.author,
"--date",
commit.datetime,
"-F",
"-",
stdin=commit.msg
)
self.git("branch", "-f", branch_name, "HEAD")
except Exception as e:
raise e
finally:
# Whether on success or failure, always re-checkout the branch. On success,
# this will be the re-written branch. On failure, this will be the original
# branch (since re-defining the branch ref is the last step).
self.git("checkout", branch_name)
|
from types import SimpleNamespace
class ChangeTemplate(SimpleNamespace):
# orig_hash
do_commit = True
msg = None
datetime = None
author = None
class RewriteMixin():
ChangeTemplate = ChangeTemplate
def rewrite_active_branch(self, base_commit, commit_chain):
branch_name = self.get_current_branch_name()
# Detach HEAD to base commit.
self.checkout_ref(base_commit)
# Apply each commit to HEAD in order.
try:
for commit in commit_chain:
self.git(
"cherry-pick",
"--allow-empty",
"--no-commit",
commit.orig_hash
)
# If squashing one commit into the next, do_commit should be
# False so that it's changes are included in the next commit.
if commit.do_commit:
self.git(
"commit",
"--author",
commit.author,
"--date",
commit.datetime,
"-F",
"-",
stdin=commit.msg
)
self.git("branch", "-f", branch_name, "HEAD")
except Exception as e:
raise e
finally:
# Whether on success or failure, always re-checkout the branch. On success,
# this will be the re-written branch. On failure, this will be the original
# branch (since re-defining the branch ref is the last step).
self.git("checkout", branch_name)
Allow empty commit messages if explictly specified.from types import SimpleNamespace
class ChangeTemplate(SimpleNamespace):
# orig_hash
do_commit = True
msg = None
datetime = None
author = None
class RewriteMixin():
ChangeTemplate = ChangeTemplate
def rewrite_active_branch(self, base_commit, commit_chain):
branch_name = self.get_current_branch_name()
# Detach HEAD to base commit.
self.checkout_ref(base_commit)
# Apply each commit to HEAD in order.
try:
for commit in commit_chain:
self.git(
"cherry-pick",
"--allow-empty",
"--allow-empty-message",
"--no-commit",
commit.orig_hash
)
# If squashing one commit into the next, do_commit should be
# False so that it's changes are included in the next commit.
if commit.do_commit:
self.git(
"commit",
"--author",
commit.author,
"--date",
commit.datetime,
"-F",
"-",
stdin=commit.msg
)
self.git("branch", "-f", branch_name, "HEAD")
except Exception as e:
raise e
finally:
# Whether on success or failure, always re-checkout the branch. On success,
# this will be the re-written branch. On failure, this will be the original
# branch (since re-defining the branch ref is the last step).
self.git("checkout", branch_name)
|
<commit_before>from types import SimpleNamespace
class ChangeTemplate(SimpleNamespace):
# orig_hash
do_commit = True
msg = None
datetime = None
author = None
class RewriteMixin():
ChangeTemplate = ChangeTemplate
def rewrite_active_branch(self, base_commit, commit_chain):
branch_name = self.get_current_branch_name()
# Detach HEAD to base commit.
self.checkout_ref(base_commit)
# Apply each commit to HEAD in order.
try:
for commit in commit_chain:
self.git(
"cherry-pick",
"--allow-empty",
"--no-commit",
commit.orig_hash
)
# If squashing one commit into the next, do_commit should be
# False so that it's changes are included in the next commit.
if commit.do_commit:
self.git(
"commit",
"--author",
commit.author,
"--date",
commit.datetime,
"-F",
"-",
stdin=commit.msg
)
self.git("branch", "-f", branch_name, "HEAD")
except Exception as e:
raise e
finally:
# Whether on success or failure, always re-checkout the branch. On success,
# this will be the re-written branch. On failure, this will be the original
# branch (since re-defining the branch ref is the last step).
self.git("checkout", branch_name)
<commit_msg>Allow empty commit messages if explictly specified.<commit_after>from types import SimpleNamespace
class ChangeTemplate(SimpleNamespace):
# orig_hash
do_commit = True
msg = None
datetime = None
author = None
class RewriteMixin():
ChangeTemplate = ChangeTemplate
def rewrite_active_branch(self, base_commit, commit_chain):
branch_name = self.get_current_branch_name()
# Detach HEAD to base commit.
self.checkout_ref(base_commit)
# Apply each commit to HEAD in order.
try:
for commit in commit_chain:
self.git(
"cherry-pick",
"--allow-empty",
"--allow-empty-message",
"--no-commit",
commit.orig_hash
)
# If squashing one commit into the next, do_commit should be
# False so that it's changes are included in the next commit.
if commit.do_commit:
self.git(
"commit",
"--author",
commit.author,
"--date",
commit.datetime,
"-F",
"-",
stdin=commit.msg
)
self.git("branch", "-f", branch_name, "HEAD")
except Exception as e:
raise e
finally:
# Whether on success or failure, always re-checkout the branch. On success,
# this will be the re-written branch. On failure, this will be the original
# branch (since re-defining the branch ref is the last step).
self.git("checkout", branch_name)
|
cea1f24aa0862d2feab1150fbd667159ab4cbe3a
|
migrations/versions/0313_email_access_validated_at.py
|
migrations/versions/0313_email_access_validated_at.py
|
"""
Revision ID: 0313_email_access_validated_at
Revises: 0312_populate_returned_letters
Create Date: 2020-01-28 18:03:22.237386
"""
from alembic import op
import sqlalchemy as sa
revision = '0313_email_access_validated_at'
down_revision = '0312_populate_returned_letters'
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('users', sa.Column('email_access_validated_at', sa.DateTime(), nullable=True))
# if user has email_auth, set email_access_validated_at on last login, else set it at user created_at date.
op.execute("""
UPDATE
users
SET
email_access_validated_at = created_at
""")
op.execute("""
UPDATE
users
SET
email_access_validated_at = logged_in_at
WHERE
auth_type = 'email_auth'
AND
logged_in_at IS NOT NULL
""")
op.alter_column('users', 'email_access_validated_at', nullable=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('users', 'email_access_validated_at')
# ### end Alembic commands ###
|
"""
Revision ID: 0313_email_access_validated_at
Revises: 0312_populate_returned_letters
Create Date: 2020-01-28 18:03:22.237386
"""
from alembic import op
import sqlalchemy as sa
revision = '0313_email_access_validated_at'
down_revision = '0312_populate_returned_letters'
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('users', sa.Column('email_access_validated_at', sa.DateTime(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('users', 'email_access_validated_at')
# ### end Alembic commands ###
|
Simplify the first migration, we will do execute statements later
|
Simplify the first migration, we will do execute statements later
|
Python
|
mit
|
alphagov/notifications-api,alphagov/notifications-api
|
"""
Revision ID: 0313_email_access_validated_at
Revises: 0312_populate_returned_letters
Create Date: 2020-01-28 18:03:22.237386
"""
from alembic import op
import sqlalchemy as sa
revision = '0313_email_access_validated_at'
down_revision = '0312_populate_returned_letters'
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('users', sa.Column('email_access_validated_at', sa.DateTime(), nullable=True))
# if user has email_auth, set email_access_validated_at on last login, else set it at user created_at date.
op.execute("""
UPDATE
users
SET
email_access_validated_at = created_at
""")
op.execute("""
UPDATE
users
SET
email_access_validated_at = logged_in_at
WHERE
auth_type = 'email_auth'
AND
logged_in_at IS NOT NULL
""")
op.alter_column('users', 'email_access_validated_at', nullable=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('users', 'email_access_validated_at')
# ### end Alembic commands ###
Simplify the first migration, we will do execute statements later
|
"""
Revision ID: 0313_email_access_validated_at
Revises: 0312_populate_returned_letters
Create Date: 2020-01-28 18:03:22.237386
"""
from alembic import op
import sqlalchemy as sa
revision = '0313_email_access_validated_at'
down_revision = '0312_populate_returned_letters'
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('users', sa.Column('email_access_validated_at', sa.DateTime(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('users', 'email_access_validated_at')
# ### end Alembic commands ###
|
<commit_before>"""
Revision ID: 0313_email_access_validated_at
Revises: 0312_populate_returned_letters
Create Date: 2020-01-28 18:03:22.237386
"""
from alembic import op
import sqlalchemy as sa
revision = '0313_email_access_validated_at'
down_revision = '0312_populate_returned_letters'
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('users', sa.Column('email_access_validated_at', sa.DateTime(), nullable=True))
# if user has email_auth, set email_access_validated_at on last login, else set it at user created_at date.
op.execute("""
UPDATE
users
SET
email_access_validated_at = created_at
""")
op.execute("""
UPDATE
users
SET
email_access_validated_at = logged_in_at
WHERE
auth_type = 'email_auth'
AND
logged_in_at IS NOT NULL
""")
op.alter_column('users', 'email_access_validated_at', nullable=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('users', 'email_access_validated_at')
# ### end Alembic commands ###
<commit_msg>Simplify the first migration, we will do execute statements later<commit_after>
|
"""
Revision ID: 0313_email_access_validated_at
Revises: 0312_populate_returned_letters
Create Date: 2020-01-28 18:03:22.237386
"""
from alembic import op
import sqlalchemy as sa
revision = '0313_email_access_validated_at'
down_revision = '0312_populate_returned_letters'
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('users', sa.Column('email_access_validated_at', sa.DateTime(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('users', 'email_access_validated_at')
# ### end Alembic commands ###
|
"""
Revision ID: 0313_email_access_validated_at
Revises: 0312_populate_returned_letters
Create Date: 2020-01-28 18:03:22.237386
"""
from alembic import op
import sqlalchemy as sa
revision = '0313_email_access_validated_at'
down_revision = '0312_populate_returned_letters'
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('users', sa.Column('email_access_validated_at', sa.DateTime(), nullable=True))
# if user has email_auth, set email_access_validated_at on last login, else set it at user created_at date.
op.execute("""
UPDATE
users
SET
email_access_validated_at = created_at
""")
op.execute("""
UPDATE
users
SET
email_access_validated_at = logged_in_at
WHERE
auth_type = 'email_auth'
AND
logged_in_at IS NOT NULL
""")
op.alter_column('users', 'email_access_validated_at', nullable=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('users', 'email_access_validated_at')
# ### end Alembic commands ###
Simplify the first migration, we will do execute statements later"""
Revision ID: 0313_email_access_validated_at
Revises: 0312_populate_returned_letters
Create Date: 2020-01-28 18:03:22.237386
"""
from alembic import op
import sqlalchemy as sa
revision = '0313_email_access_validated_at'
down_revision = '0312_populate_returned_letters'
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('users', sa.Column('email_access_validated_at', sa.DateTime(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('users', 'email_access_validated_at')
# ### end Alembic commands ###
|
<commit_before>"""
Revision ID: 0313_email_access_validated_at
Revises: 0312_populate_returned_letters
Create Date: 2020-01-28 18:03:22.237386
"""
from alembic import op
import sqlalchemy as sa
revision = '0313_email_access_validated_at'
down_revision = '0312_populate_returned_letters'
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('users', sa.Column('email_access_validated_at', sa.DateTime(), nullable=True))
# if user has email_auth, set email_access_validated_at on last login, else set it at user created_at date.
op.execute("""
UPDATE
users
SET
email_access_validated_at = created_at
""")
op.execute("""
UPDATE
users
SET
email_access_validated_at = logged_in_at
WHERE
auth_type = 'email_auth'
AND
logged_in_at IS NOT NULL
""")
op.alter_column('users', 'email_access_validated_at', nullable=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('users', 'email_access_validated_at')
# ### end Alembic commands ###
<commit_msg>Simplify the first migration, we will do execute statements later<commit_after>"""
Revision ID: 0313_email_access_validated_at
Revises: 0312_populate_returned_letters
Create Date: 2020-01-28 18:03:22.237386
"""
from alembic import op
import sqlalchemy as sa
revision = '0313_email_access_validated_at'
down_revision = '0312_populate_returned_letters'
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('users', sa.Column('email_access_validated_at', sa.DateTime(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('users', 'email_access_validated_at')
# ### end Alembic commands ###
|
a3ee74b3b7cba17e013b549f0ed56587cfc65331
|
rnacentral/nhmmer/urls.py
|
rnacentral/nhmmer/urls.py
|
"""
Copyright [2009-2014] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from django.conf.urls import patterns, url
from django.views.generic.base import TemplateView
import views
# exporting metadata search results
urlpatterns = patterns('',
# export search results
url(r'^submit-query/?$',
'nhmmer.views.submit_job',
name='nhmmer-submit-job'),
# get nhmmer search job status
url(r'^job-status/?$',
'nhmmer.views.get_status',
name='nhmmer-job-status'),
# get nhmmer results
url(r'^get-results/?$',
views.ResultsView.as_view(),
name='nhmmer-job-results'),
# user interface
url(r'^$', TemplateView.as_view(template_name='nhmmer/sequence-search.html'),
name='nhmmer-sequence-search'),
)
|
"""
Copyright [2009-2014] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from django.conf.urls import patterns, url
from django.views.generic.base import TemplateView
import views
# exporting metadata search results
urlpatterns = patterns('',
# export search results
url(r'^submit-query/?$',
'nhmmer.views.submit_job',
name='nhmmer-submit-job'),
# get nhmmer search job status
url(r'^job-status/?$',
'nhmmer.views.get_status',
name='nhmmer-job-status'),
# get nhmmer results
url(r'^get-results/?$',
views.ResultsView.as_view(),
name='nhmmer-job-results'),
# user interface
url(r'^$', TemplateView.as_view(template_name='nhmmer/sequence-search.html'),
name='nhmmer-sequence-search'),
)
|
Use spaces instead of tabs
|
Use spaces instead of tabs
|
Python
|
apache-2.0
|
RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode
|
"""
Copyright [2009-2014] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from django.conf.urls import patterns, url
from django.views.generic.base import TemplateView
import views
# exporting metadata search results
urlpatterns = patterns('',
# export search results
url(r'^submit-query/?$',
'nhmmer.views.submit_job',
name='nhmmer-submit-job'),
# get nhmmer search job status
url(r'^job-status/?$',
'nhmmer.views.get_status',
name='nhmmer-job-status'),
# get nhmmer results
url(r'^get-results/?$',
views.ResultsView.as_view(),
name='nhmmer-job-results'),
# user interface
url(r'^$', TemplateView.as_view(template_name='nhmmer/sequence-search.html'),
name='nhmmer-sequence-search'),
)
Use spaces instead of tabs
|
"""
Copyright [2009-2014] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from django.conf.urls import patterns, url
from django.views.generic.base import TemplateView
import views
# exporting metadata search results
urlpatterns = patterns('',
# export search results
url(r'^submit-query/?$',
'nhmmer.views.submit_job',
name='nhmmer-submit-job'),
# get nhmmer search job status
url(r'^job-status/?$',
'nhmmer.views.get_status',
name='nhmmer-job-status'),
# get nhmmer results
url(r'^get-results/?$',
views.ResultsView.as_view(),
name='nhmmer-job-results'),
# user interface
url(r'^$', TemplateView.as_view(template_name='nhmmer/sequence-search.html'),
name='nhmmer-sequence-search'),
)
|
<commit_before>"""
Copyright [2009-2014] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from django.conf.urls import patterns, url
from django.views.generic.base import TemplateView
import views
# exporting metadata search results
urlpatterns = patterns('',
# export search results
url(r'^submit-query/?$',
'nhmmer.views.submit_job',
name='nhmmer-submit-job'),
# get nhmmer search job status
url(r'^job-status/?$',
'nhmmer.views.get_status',
name='nhmmer-job-status'),
# get nhmmer results
url(r'^get-results/?$',
views.ResultsView.as_view(),
name='nhmmer-job-results'),
# user interface
url(r'^$', TemplateView.as_view(template_name='nhmmer/sequence-search.html'),
name='nhmmer-sequence-search'),
)
<commit_msg>Use spaces instead of tabs<commit_after>
|
"""
Copyright [2009-2014] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from django.conf.urls import patterns, url
from django.views.generic.base import TemplateView
import views
# exporting metadata search results
urlpatterns = patterns('',
# export search results
url(r'^submit-query/?$',
'nhmmer.views.submit_job',
name='nhmmer-submit-job'),
# get nhmmer search job status
url(r'^job-status/?$',
'nhmmer.views.get_status',
name='nhmmer-job-status'),
# get nhmmer results
url(r'^get-results/?$',
views.ResultsView.as_view(),
name='nhmmer-job-results'),
# user interface
url(r'^$', TemplateView.as_view(template_name='nhmmer/sequence-search.html'),
name='nhmmer-sequence-search'),
)
|
"""
Copyright [2009-2014] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from django.conf.urls import patterns, url
from django.views.generic.base import TemplateView
import views
# exporting metadata search results
urlpatterns = patterns('',
# export search results
url(r'^submit-query/?$',
'nhmmer.views.submit_job',
name='nhmmer-submit-job'),
# get nhmmer search job status
url(r'^job-status/?$',
'nhmmer.views.get_status',
name='nhmmer-job-status'),
# get nhmmer results
url(r'^get-results/?$',
views.ResultsView.as_view(),
name='nhmmer-job-results'),
# user interface
url(r'^$', TemplateView.as_view(template_name='nhmmer/sequence-search.html'),
name='nhmmer-sequence-search'),
)
Use spaces instead of tabs"""
Copyright [2009-2014] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from django.conf.urls import patterns, url
from django.views.generic.base import TemplateView
import views
# exporting metadata search results
urlpatterns = patterns('',
# export search results
url(r'^submit-query/?$',
'nhmmer.views.submit_job',
name='nhmmer-submit-job'),
# get nhmmer search job status
url(r'^job-status/?$',
'nhmmer.views.get_status',
name='nhmmer-job-status'),
# get nhmmer results
url(r'^get-results/?$',
views.ResultsView.as_view(),
name='nhmmer-job-results'),
# user interface
url(r'^$', TemplateView.as_view(template_name='nhmmer/sequence-search.html'),
name='nhmmer-sequence-search'),
)
|
<commit_before>"""
Copyright [2009-2014] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from django.conf.urls import patterns, url
from django.views.generic.base import TemplateView
import views
# exporting metadata search results
urlpatterns = patterns('',
# export search results
url(r'^submit-query/?$',
'nhmmer.views.submit_job',
name='nhmmer-submit-job'),
# get nhmmer search job status
url(r'^job-status/?$',
'nhmmer.views.get_status',
name='nhmmer-job-status'),
# get nhmmer results
url(r'^get-results/?$',
views.ResultsView.as_view(),
name='nhmmer-job-results'),
# user interface
url(r'^$', TemplateView.as_view(template_name='nhmmer/sequence-search.html'),
name='nhmmer-sequence-search'),
)
<commit_msg>Use spaces instead of tabs<commit_after>"""
Copyright [2009-2014] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from django.conf.urls import patterns, url
from django.views.generic.base import TemplateView
import views
# exporting metadata search results
urlpatterns = patterns('',
# export search results
url(r'^submit-query/?$',
'nhmmer.views.submit_job',
name='nhmmer-submit-job'),
# get nhmmer search job status
url(r'^job-status/?$',
'nhmmer.views.get_status',
name='nhmmer-job-status'),
# get nhmmer results
url(r'^get-results/?$',
views.ResultsView.as_view(),
name='nhmmer-job-results'),
# user interface
url(r'^$', TemplateView.as_view(template_name='nhmmer/sequence-search.html'),
name='nhmmer-sequence-search'),
)
|
e548b18f223e2493472dcf393d21d1714d304216
|
median.py
|
median.py
|
def median(numbers):
"""Return the median of a list of numbers."""
length = len(numbers)
numbers.sort()
if length % 2 == 0:
place = length / 2
calculated_median = (numbers[place] + numbers[place - 1]) / 2.0
else:
place = (length - 1) / 2
calculated_median = numbers[place]
return calculated_median
|
def median(numbers):
"""Return the median of a list of numbers."""
length = len(numbers)
numbers.sort()
if length % 2 == 0:
place = int(length / 2) # index must be integer
calculated_median = (numbers[place] + numbers[place - 1]) / 2.0
else:
place = int((length - 1) / 2) # index must be integer
calculated_median = numbers[place]
return calculated_median
|
Convert place to integer, for list index
|
Convert place to integer, for list index
|
Python
|
agpl-3.0
|
brylie/python-practice
|
def median(numbers):
"""Return the median of a list of numbers."""
length = len(numbers)
numbers.sort()
if length % 2 == 0:
place = length / 2
calculated_median = (numbers[place] + numbers[place - 1]) / 2.0
else:
place = (length - 1) / 2
calculated_median = numbers[place]
return calculated_median
Convert place to integer, for list index
|
def median(numbers):
"""Return the median of a list of numbers."""
length = len(numbers)
numbers.sort()
if length % 2 == 0:
place = int(length / 2) # index must be integer
calculated_median = (numbers[place] + numbers[place - 1]) / 2.0
else:
place = int((length - 1) / 2) # index must be integer
calculated_median = numbers[place]
return calculated_median
|
<commit_before>def median(numbers):
"""Return the median of a list of numbers."""
length = len(numbers)
numbers.sort()
if length % 2 == 0:
place = length / 2
calculated_median = (numbers[place] + numbers[place - 1]) / 2.0
else:
place = (length - 1) / 2
calculated_median = numbers[place]
return calculated_median
<commit_msg>Convert place to integer, for list index<commit_after>
|
def median(numbers):
"""Return the median of a list of numbers."""
length = len(numbers)
numbers.sort()
if length % 2 == 0:
place = int(length / 2) # index must be integer
calculated_median = (numbers[place] + numbers[place - 1]) / 2.0
else:
place = int((length - 1) / 2) # index must be integer
calculated_median = numbers[place]
return calculated_median
|
def median(numbers):
"""Return the median of a list of numbers."""
length = len(numbers)
numbers.sort()
if length % 2 == 0:
place = length / 2
calculated_median = (numbers[place] + numbers[place - 1]) / 2.0
else:
place = (length - 1) / 2
calculated_median = numbers[place]
return calculated_median
Convert place to integer, for list indexdef median(numbers):
"""Return the median of a list of numbers."""
length = len(numbers)
numbers.sort()
if length % 2 == 0:
place = int(length / 2) # index must be integer
calculated_median = (numbers[place] + numbers[place - 1]) / 2.0
else:
place = int((length - 1) / 2) # index must be integer
calculated_median = numbers[place]
return calculated_median
|
<commit_before>def median(numbers):
"""Return the median of a list of numbers."""
length = len(numbers)
numbers.sort()
if length % 2 == 0:
place = length / 2
calculated_median = (numbers[place] + numbers[place - 1]) / 2.0
else:
place = (length - 1) / 2
calculated_median = numbers[place]
return calculated_median
<commit_msg>Convert place to integer, for list index<commit_after>def median(numbers):
"""Return the median of a list of numbers."""
length = len(numbers)
numbers.sort()
if length % 2 == 0:
place = int(length / 2) # index must be integer
calculated_median = (numbers[place] + numbers[place - 1]) / 2.0
else:
place = int((length - 1) / 2) # index must be integer
calculated_median = numbers[place]
return calculated_median
|
302f5b586baccafbc2641d241312edb90e922074
|
mla_game/settings/stage.py
|
mla_game/settings/stage.py
|
from .base import *
import os
SECRET_KEY = os.environ['SECRET_KEY']
DEBUG = True
LOG_DIRECTORY = '/home/wgbh/logs'
STATIC_ROOT = '/home/wgbh/webroot/static'
ALLOWED_HOSTS = [
'mlagame-dev.wgbhdigital.org', 'mlagame.wgbhdigital.org',
'fixit.wgbhdigital.org',
]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'HOST': 'localhost',
'NAME': 'mla',
'USER': 'mla',
'PASSWORD': os.environ['PG_PASS'],
'TEST': {
'NAME': 'mla-test',
},
},
}
GA_CODE = 'null'
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'file': {
'level': 'INFO',
'class': 'logging.FileHandler',
'filename': '{}/django.log'.format(LOG_DIRECTORY),
},
},
'loggers': {
'django': {
'handlers': ['file'],
'level': 'DEBUG',
'propagate': True,
},
},
}
|
from .base import *
import os
# how many data points are enough to calculate confidence?
MINIMUM_SAMPLE_SIZE = 3
# original phrase is good enough for export
TRANSCRIPT_PHRASE_POSITIVE_CONFIDENCE_LIMIT = .51
# original phrase needs correction
TRANSCRIPT_PHRASE_NEGATIVE_CONFIDENCE_LIMIT = -.51
# correction is good enough to award points and export data
TRANSCRIPT_PHRASE_CORRECTION_LOWER_LIMIT = .51
# correction no longer needs votes and can replace original phrase
TRANSCRIPT_PHRASE_CORRECTION_UPPER_LIMIT = .66
SECRET_KEY = os.environ['SECRET_KEY']
DEBUG = True
LOG_DIRECTORY = '/home/wgbh/logs'
STATIC_ROOT = '/home/wgbh/webroot/static'
ALLOWED_HOSTS = [
'mlagame-dev.wgbhdigital.org', 'mlagame.wgbhdigital.org',
'fixit.wgbhdigital.org',
]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'HOST': 'localhost',
'NAME': 'mla',
'USER': 'mla',
'PASSWORD': os.environ['PG_PASS'],
'TEST': {
'NAME': 'mla-test',
},
},
}
GA_CODE = 'null'
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'file': {
'level': 'INFO',
'class': 'logging.FileHandler',
'filename': '{}/django.log'.format(LOG_DIRECTORY),
},
},
'loggers': {
'django': {
'handlers': ['file'],
'level': 'DEBUG',
'propagate': True,
},
},
}
|
Set the bar low on staging
|
Set the bar low on staging
|
Python
|
mit
|
WGBH/FixIt,WGBH/FixIt,WGBH/FixIt
|
from .base import *
import os
SECRET_KEY = os.environ['SECRET_KEY']
DEBUG = True
LOG_DIRECTORY = '/home/wgbh/logs'
STATIC_ROOT = '/home/wgbh/webroot/static'
ALLOWED_HOSTS = [
'mlagame-dev.wgbhdigital.org', 'mlagame.wgbhdigital.org',
'fixit.wgbhdigital.org',
]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'HOST': 'localhost',
'NAME': 'mla',
'USER': 'mla',
'PASSWORD': os.environ['PG_PASS'],
'TEST': {
'NAME': 'mla-test',
},
},
}
GA_CODE = 'null'
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'file': {
'level': 'INFO',
'class': 'logging.FileHandler',
'filename': '{}/django.log'.format(LOG_DIRECTORY),
},
},
'loggers': {
'django': {
'handlers': ['file'],
'level': 'DEBUG',
'propagate': True,
},
},
}
Set the bar low on staging
|
from .base import *
import os
# how many data points are enough to calculate confidence?
MINIMUM_SAMPLE_SIZE = 3
# original phrase is good enough for export
TRANSCRIPT_PHRASE_POSITIVE_CONFIDENCE_LIMIT = .51
# original phrase needs correction
TRANSCRIPT_PHRASE_NEGATIVE_CONFIDENCE_LIMIT = -.51
# correction is good enough to award points and export data
TRANSCRIPT_PHRASE_CORRECTION_LOWER_LIMIT = .51
# correction no longer needs votes and can replace original phrase
TRANSCRIPT_PHRASE_CORRECTION_UPPER_LIMIT = .66
SECRET_KEY = os.environ['SECRET_KEY']
DEBUG = True
LOG_DIRECTORY = '/home/wgbh/logs'
STATIC_ROOT = '/home/wgbh/webroot/static'
ALLOWED_HOSTS = [
'mlagame-dev.wgbhdigital.org', 'mlagame.wgbhdigital.org',
'fixit.wgbhdigital.org',
]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'HOST': 'localhost',
'NAME': 'mla',
'USER': 'mla',
'PASSWORD': os.environ['PG_PASS'],
'TEST': {
'NAME': 'mla-test',
},
},
}
GA_CODE = 'null'
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'file': {
'level': 'INFO',
'class': 'logging.FileHandler',
'filename': '{}/django.log'.format(LOG_DIRECTORY),
},
},
'loggers': {
'django': {
'handlers': ['file'],
'level': 'DEBUG',
'propagate': True,
},
},
}
|
<commit_before>from .base import *
import os
SECRET_KEY = os.environ['SECRET_KEY']
DEBUG = True
LOG_DIRECTORY = '/home/wgbh/logs'
STATIC_ROOT = '/home/wgbh/webroot/static'
ALLOWED_HOSTS = [
'mlagame-dev.wgbhdigital.org', 'mlagame.wgbhdigital.org',
'fixit.wgbhdigital.org',
]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'HOST': 'localhost',
'NAME': 'mla',
'USER': 'mla',
'PASSWORD': os.environ['PG_PASS'],
'TEST': {
'NAME': 'mla-test',
},
},
}
GA_CODE = 'null'
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'file': {
'level': 'INFO',
'class': 'logging.FileHandler',
'filename': '{}/django.log'.format(LOG_DIRECTORY),
},
},
'loggers': {
'django': {
'handlers': ['file'],
'level': 'DEBUG',
'propagate': True,
},
},
}
<commit_msg>Set the bar low on staging<commit_after>
|
from .base import *
import os
# how many data points are enough to calculate confidence?
MINIMUM_SAMPLE_SIZE = 3
# original phrase is good enough for export
TRANSCRIPT_PHRASE_POSITIVE_CONFIDENCE_LIMIT = .51
# original phrase needs correction
TRANSCRIPT_PHRASE_NEGATIVE_CONFIDENCE_LIMIT = -.51
# correction is good enough to award points and export data
TRANSCRIPT_PHRASE_CORRECTION_LOWER_LIMIT = .51
# correction no longer needs votes and can replace original phrase
TRANSCRIPT_PHRASE_CORRECTION_UPPER_LIMIT = .66
SECRET_KEY = os.environ['SECRET_KEY']
DEBUG = True
LOG_DIRECTORY = '/home/wgbh/logs'
STATIC_ROOT = '/home/wgbh/webroot/static'
ALLOWED_HOSTS = [
'mlagame-dev.wgbhdigital.org', 'mlagame.wgbhdigital.org',
'fixit.wgbhdigital.org',
]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'HOST': 'localhost',
'NAME': 'mla',
'USER': 'mla',
'PASSWORD': os.environ['PG_PASS'],
'TEST': {
'NAME': 'mla-test',
},
},
}
GA_CODE = 'null'
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'file': {
'level': 'INFO',
'class': 'logging.FileHandler',
'filename': '{}/django.log'.format(LOG_DIRECTORY),
},
},
'loggers': {
'django': {
'handlers': ['file'],
'level': 'DEBUG',
'propagate': True,
},
},
}
|
from .base import *
import os
SECRET_KEY = os.environ['SECRET_KEY']
DEBUG = True
LOG_DIRECTORY = '/home/wgbh/logs'
STATIC_ROOT = '/home/wgbh/webroot/static'
ALLOWED_HOSTS = [
'mlagame-dev.wgbhdigital.org', 'mlagame.wgbhdigital.org',
'fixit.wgbhdigital.org',
]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'HOST': 'localhost',
'NAME': 'mla',
'USER': 'mla',
'PASSWORD': os.environ['PG_PASS'],
'TEST': {
'NAME': 'mla-test',
},
},
}
GA_CODE = 'null'
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'file': {
'level': 'INFO',
'class': 'logging.FileHandler',
'filename': '{}/django.log'.format(LOG_DIRECTORY),
},
},
'loggers': {
'django': {
'handlers': ['file'],
'level': 'DEBUG',
'propagate': True,
},
},
}
Set the bar low on stagingfrom .base import *
import os
# how many data points are enough to calculate confidence?
MINIMUM_SAMPLE_SIZE = 3
# original phrase is good enough for export
TRANSCRIPT_PHRASE_POSITIVE_CONFIDENCE_LIMIT = .51
# original phrase needs correction
TRANSCRIPT_PHRASE_NEGATIVE_CONFIDENCE_LIMIT = -.51
# correction is good enough to award points and export data
TRANSCRIPT_PHRASE_CORRECTION_LOWER_LIMIT = .51
# correction no longer needs votes and can replace original phrase
TRANSCRIPT_PHRASE_CORRECTION_UPPER_LIMIT = .66
SECRET_KEY = os.environ['SECRET_KEY']
DEBUG = True
LOG_DIRECTORY = '/home/wgbh/logs'
STATIC_ROOT = '/home/wgbh/webroot/static'
ALLOWED_HOSTS = [
'mlagame-dev.wgbhdigital.org', 'mlagame.wgbhdigital.org',
'fixit.wgbhdigital.org',
]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'HOST': 'localhost',
'NAME': 'mla',
'USER': 'mla',
'PASSWORD': os.environ['PG_PASS'],
'TEST': {
'NAME': 'mla-test',
},
},
}
GA_CODE = 'null'
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'file': {
'level': 'INFO',
'class': 'logging.FileHandler',
'filename': '{}/django.log'.format(LOG_DIRECTORY),
},
},
'loggers': {
'django': {
'handlers': ['file'],
'level': 'DEBUG',
'propagate': True,
},
},
}
|
<commit_before>from .base import *
import os
SECRET_KEY = os.environ['SECRET_KEY']
DEBUG = True
LOG_DIRECTORY = '/home/wgbh/logs'
STATIC_ROOT = '/home/wgbh/webroot/static'
ALLOWED_HOSTS = [
'mlagame-dev.wgbhdigital.org', 'mlagame.wgbhdigital.org',
'fixit.wgbhdigital.org',
]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'HOST': 'localhost',
'NAME': 'mla',
'USER': 'mla',
'PASSWORD': os.environ['PG_PASS'],
'TEST': {
'NAME': 'mla-test',
},
},
}
GA_CODE = 'null'
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'file': {
'level': 'INFO',
'class': 'logging.FileHandler',
'filename': '{}/django.log'.format(LOG_DIRECTORY),
},
},
'loggers': {
'django': {
'handlers': ['file'],
'level': 'DEBUG',
'propagate': True,
},
},
}
<commit_msg>Set the bar low on staging<commit_after>from .base import *
import os
# how many data points are enough to calculate confidence?
MINIMUM_SAMPLE_SIZE = 3
# original phrase is good enough for export
TRANSCRIPT_PHRASE_POSITIVE_CONFIDENCE_LIMIT = .51
# original phrase needs correction
TRANSCRIPT_PHRASE_NEGATIVE_CONFIDENCE_LIMIT = -.51
# correction is good enough to award points and export data
TRANSCRIPT_PHRASE_CORRECTION_LOWER_LIMIT = .51
# correction no longer needs votes and can replace original phrase
TRANSCRIPT_PHRASE_CORRECTION_UPPER_LIMIT = .66
SECRET_KEY = os.environ['SECRET_KEY']
DEBUG = True
LOG_DIRECTORY = '/home/wgbh/logs'
STATIC_ROOT = '/home/wgbh/webroot/static'
ALLOWED_HOSTS = [
'mlagame-dev.wgbhdigital.org', 'mlagame.wgbhdigital.org',
'fixit.wgbhdigital.org',
]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'HOST': 'localhost',
'NAME': 'mla',
'USER': 'mla',
'PASSWORD': os.environ['PG_PASS'],
'TEST': {
'NAME': 'mla-test',
},
},
}
GA_CODE = 'null'
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'file': {
'level': 'INFO',
'class': 'logging.FileHandler',
'filename': '{}/django.log'.format(LOG_DIRECTORY),
},
},
'loggers': {
'django': {
'handlers': ['file'],
'level': 'DEBUG',
'propagate': True,
},
},
}
|
48e340377ae06e962e043658b2dc8235b18f44e2
|
turbustat/statistics/base_statistic.py
|
turbustat/statistics/base_statistic.py
|
from astropy.io import fits
import numpy as np
from ..io import input_data
class BaseStatisticMixIn(object):
"""
Common properties to all statistics
"""
# Disable this flag when a statistic does not need a header
need_header_flag = True
@property
def header(self):
return self._header
@header.setter
def header(self, input_hdr):
if not self.need_header_flag:
input_hdr = None
elif not isinstance(input_hdr, fits.header.Header):
raise TypeError("The header must be a"
" astropy.io.fits.header.Header.")
self._header = input_hdr
@property
def data(self):
return self._data
@data.setter
def data(self, values):
if not isinstance(values, np.ndarray):
raise TypeError("Data is not a numpy array.")
self._data = values
def input_data_header(self, data, header):
'''
Check if the header is given separately from the data type.
'''
if header is not None:
self.data = input_data(data, no_header=True)
self.header = header
else:
self.data, self.header = input_data(data)
|
from astropy.io import fits
import numpy as np
from ..io import input_data
class BaseStatisticMixIn(object):
"""
Common properties to all statistics
"""
# Disable this flag when a statistic does not need a header
need_header_flag = True
# Disable this when the data property will not be used.
no_data_flag = False
@property
def header(self):
return self._header
@header.setter
def header(self, input_hdr):
if not self.need_header_flag:
input_hdr = None
elif not isinstance(input_hdr, fits.header.Header):
raise TypeError("The header must be a"
" astropy.io.fits.header.Header.")
self._header = input_hdr
@property
def data(self):
return self._data
@data.setter
def data(self, values):
if self.no_data_flag:
values = None
elif not isinstance(values, np.ndarray):
raise TypeError("Data is not a numpy array.")
self._data = values
def input_data_header(self, data, header):
'''
Check if the header is given separately from the data type.
'''
if header is not None:
self.data = input_data(data, no_header=True)
self.header = header
else:
self.data, self.header = input_data(data)
|
Allow data property to not be use
|
Allow data property to not be use
|
Python
|
mit
|
e-koch/TurbuStat,Astroua/TurbuStat
|
from astropy.io import fits
import numpy as np
from ..io import input_data
class BaseStatisticMixIn(object):
"""
Common properties to all statistics
"""
# Disable this flag when a statistic does not need a header
need_header_flag = True
@property
def header(self):
return self._header
@header.setter
def header(self, input_hdr):
if not self.need_header_flag:
input_hdr = None
elif not isinstance(input_hdr, fits.header.Header):
raise TypeError("The header must be a"
" astropy.io.fits.header.Header.")
self._header = input_hdr
@property
def data(self):
return self._data
@data.setter
def data(self, values):
if not isinstance(values, np.ndarray):
raise TypeError("Data is not a numpy array.")
self._data = values
def input_data_header(self, data, header):
'''
Check if the header is given separately from the data type.
'''
if header is not None:
self.data = input_data(data, no_header=True)
self.header = header
else:
self.data, self.header = input_data(data)
Allow data property to not be use
|
from astropy.io import fits
import numpy as np
from ..io import input_data
class BaseStatisticMixIn(object):
"""
Common properties to all statistics
"""
# Disable this flag when a statistic does not need a header
need_header_flag = True
# Disable this when the data property will not be used.
no_data_flag = False
@property
def header(self):
return self._header
@header.setter
def header(self, input_hdr):
if not self.need_header_flag:
input_hdr = None
elif not isinstance(input_hdr, fits.header.Header):
raise TypeError("The header must be a"
" astropy.io.fits.header.Header.")
self._header = input_hdr
@property
def data(self):
return self._data
@data.setter
def data(self, values):
if self.no_data_flag:
values = None
elif not isinstance(values, np.ndarray):
raise TypeError("Data is not a numpy array.")
self._data = values
def input_data_header(self, data, header):
'''
Check if the header is given separately from the data type.
'''
if header is not None:
self.data = input_data(data, no_header=True)
self.header = header
else:
self.data, self.header = input_data(data)
|
<commit_before>
from astropy.io import fits
import numpy as np
from ..io import input_data
class BaseStatisticMixIn(object):
"""
Common properties to all statistics
"""
# Disable this flag when a statistic does not need a header
need_header_flag = True
@property
def header(self):
return self._header
@header.setter
def header(self, input_hdr):
if not self.need_header_flag:
input_hdr = None
elif not isinstance(input_hdr, fits.header.Header):
raise TypeError("The header must be a"
" astropy.io.fits.header.Header.")
self._header = input_hdr
@property
def data(self):
return self._data
@data.setter
def data(self, values):
if not isinstance(values, np.ndarray):
raise TypeError("Data is not a numpy array.")
self._data = values
def input_data_header(self, data, header):
'''
Check if the header is given separately from the data type.
'''
if header is not None:
self.data = input_data(data, no_header=True)
self.header = header
else:
self.data, self.header = input_data(data)
<commit_msg>Allow data property to not be use<commit_after>
|
from astropy.io import fits
import numpy as np
from ..io import input_data
class BaseStatisticMixIn(object):
"""
Common properties to all statistics
"""
# Disable this flag when a statistic does not need a header
need_header_flag = True
# Disable this when the data property will not be used.
no_data_flag = False
@property
def header(self):
return self._header
@header.setter
def header(self, input_hdr):
if not self.need_header_flag:
input_hdr = None
elif not isinstance(input_hdr, fits.header.Header):
raise TypeError("The header must be a"
" astropy.io.fits.header.Header.")
self._header = input_hdr
@property
def data(self):
return self._data
@data.setter
def data(self, values):
if self.no_data_flag:
values = None
elif not isinstance(values, np.ndarray):
raise TypeError("Data is not a numpy array.")
self._data = values
def input_data_header(self, data, header):
'''
Check if the header is given separately from the data type.
'''
if header is not None:
self.data = input_data(data, no_header=True)
self.header = header
else:
self.data, self.header = input_data(data)
|
from astropy.io import fits
import numpy as np
from ..io import input_data
class BaseStatisticMixIn(object):
"""
Common properties to all statistics
"""
# Disable this flag when a statistic does not need a header
need_header_flag = True
@property
def header(self):
return self._header
@header.setter
def header(self, input_hdr):
if not self.need_header_flag:
input_hdr = None
elif not isinstance(input_hdr, fits.header.Header):
raise TypeError("The header must be a"
" astropy.io.fits.header.Header.")
self._header = input_hdr
@property
def data(self):
return self._data
@data.setter
def data(self, values):
if not isinstance(values, np.ndarray):
raise TypeError("Data is not a numpy array.")
self._data = values
def input_data_header(self, data, header):
'''
Check if the header is given separately from the data type.
'''
if header is not None:
self.data = input_data(data, no_header=True)
self.header = header
else:
self.data, self.header = input_data(data)
Allow data property to not be use
from astropy.io import fits
import numpy as np
from ..io import input_data
class BaseStatisticMixIn(object):
"""
Common properties to all statistics
"""
# Disable this flag when a statistic does not need a header
need_header_flag = True
# Disable this when the data property will not be used.
no_data_flag = False
@property
def header(self):
return self._header
@header.setter
def header(self, input_hdr):
if not self.need_header_flag:
input_hdr = None
elif not isinstance(input_hdr, fits.header.Header):
raise TypeError("The header must be a"
" astropy.io.fits.header.Header.")
self._header = input_hdr
@property
def data(self):
return self._data
@data.setter
def data(self, values):
if self.no_data_flag:
values = None
elif not isinstance(values, np.ndarray):
raise TypeError("Data is not a numpy array.")
self._data = values
def input_data_header(self, data, header):
'''
Check if the header is given separately from the data type.
'''
if header is not None:
self.data = input_data(data, no_header=True)
self.header = header
else:
self.data, self.header = input_data(data)
|
<commit_before>
from astropy.io import fits
import numpy as np
from ..io import input_data
class BaseStatisticMixIn(object):
"""
Common properties to all statistics
"""
# Disable this flag when a statistic does not need a header
need_header_flag = True
@property
def header(self):
return self._header
@header.setter
def header(self, input_hdr):
if not self.need_header_flag:
input_hdr = None
elif not isinstance(input_hdr, fits.header.Header):
raise TypeError("The header must be a"
" astropy.io.fits.header.Header.")
self._header = input_hdr
@property
def data(self):
return self._data
@data.setter
def data(self, values):
if not isinstance(values, np.ndarray):
raise TypeError("Data is not a numpy array.")
self._data = values
def input_data_header(self, data, header):
'''
Check if the header is given separately from the data type.
'''
if header is not None:
self.data = input_data(data, no_header=True)
self.header = header
else:
self.data, self.header = input_data(data)
<commit_msg>Allow data property to not be use<commit_after>
from astropy.io import fits
import numpy as np
from ..io import input_data
class BaseStatisticMixIn(object):
"""
Common properties to all statistics
"""
# Disable this flag when a statistic does not need a header
need_header_flag = True
# Disable this when the data property will not be used.
no_data_flag = False
@property
def header(self):
return self._header
@header.setter
def header(self, input_hdr):
if not self.need_header_flag:
input_hdr = None
elif not isinstance(input_hdr, fits.header.Header):
raise TypeError("The header must be a"
" astropy.io.fits.header.Header.")
self._header = input_hdr
@property
def data(self):
return self._data
@data.setter
def data(self, values):
if self.no_data_flag:
values = None
elif not isinstance(values, np.ndarray):
raise TypeError("Data is not a numpy array.")
self._data = values
def input_data_header(self, data, header):
'''
Check if the header is given separately from the data type.
'''
if header is not None:
self.data = input_data(data, no_header=True)
self.header = header
else:
self.data, self.header = input_data(data)
|
1846ebff5c71a8c3bb0c9cccd29460f656f5a21b
|
oauthlib/__init__.py
|
oauthlib/__init__.py
|
"""
oauthlib
~~~~~~~~
A generic, spec-compliant, thorough implementation of the OAuth
request-signing logic.
:copyright: (c) 2011 by Idan Gazit.
:license: BSD, see LICENSE for details.
"""
__author__ = 'Idan Gazit <idan@gazit.me>'
__version__ = '0.6.0'
|
Add meta info in oauthlib module
|
Add meta info in oauthlib module
|
Python
|
bsd-3-clause
|
hirokiky/oauthlib,skion/oauthlib-oidc,cyrilchaponeverysens/oauthlib,Blitzen/oauthlib,mick88/oauthlib,idan/oauthlib,barseghyanartur/oauthlib,masci/oauthlib,metatoaster/oauthlib,bjmc/oauthlib,oauthlib/oauthlib,flamusdiu/oauthlib,singingwolfboy/oauthlib,armersong/oauthlib,garciasolero/oauthlib,masci/oauthlib,flamusdiu/oauthlib
|
Add meta info in oauthlib module
|
"""
oauthlib
~~~~~~~~
A generic, spec-compliant, thorough implementation of the OAuth
request-signing logic.
:copyright: (c) 2011 by Idan Gazit.
:license: BSD, see LICENSE for details.
"""
__author__ = 'Idan Gazit <idan@gazit.me>'
__version__ = '0.6.0'
|
<commit_before><commit_msg>Add meta info in oauthlib module<commit_after>
|
"""
oauthlib
~~~~~~~~
A generic, spec-compliant, thorough implementation of the OAuth
request-signing logic.
:copyright: (c) 2011 by Idan Gazit.
:license: BSD, see LICENSE for details.
"""
__author__ = 'Idan Gazit <idan@gazit.me>'
__version__ = '0.6.0'
|
Add meta info in oauthlib module"""
oauthlib
~~~~~~~~
A generic, spec-compliant, thorough implementation of the OAuth
request-signing logic.
:copyright: (c) 2011 by Idan Gazit.
:license: BSD, see LICENSE for details.
"""
__author__ = 'Idan Gazit <idan@gazit.me>'
__version__ = '0.6.0'
|
<commit_before><commit_msg>Add meta info in oauthlib module<commit_after>"""
oauthlib
~~~~~~~~
A generic, spec-compliant, thorough implementation of the OAuth
request-signing logic.
:copyright: (c) 2011 by Idan Gazit.
:license: BSD, see LICENSE for details.
"""
__author__ = 'Idan Gazit <idan@gazit.me>'
__version__ = '0.6.0'
|
|
18000a73273a65a320513c5ca119bc07e1efb37d
|
octopenstack/view.py
|
octopenstack/view.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pprint
import json
class View(object):
def service_list(self, service_list):
print('service LIST:')
for service in service_list:
print(service)
print('')
def service_information(self, action, name, *argv):
print('%s service %s with arguments:' % (action, name) )
for arg in argv:
print(arg)
print('')
def service_not_found(self, name):
print('The service "%s" does not exist' % name)
def command_not_found(self, name):
print('The command "%s" does not exist' % name)
print('Available commands are: build, create or start')
def display_stream(self, line):
jsonstream = json.loads(line)
stream = jsonstream.get('stream')
error = jsonstream.get('error')
if not error==None:
print(error)
if not stream==None:
print(stream)
def usage():
print('Commands are: build, run, rm, ip, ')
def stopping(self, tag):
print('Stoping container %s ...' % (tag))
def removing(self, tag):
print('Removing container %s ...' % (tag))
def notlaunched(self, tag):
print('Services %s not launched' % (tag))
def ip(self, tag, ipaddr):
print('Container %s on IP: %s' % (tag, ipaddr))
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pprint
import json
class View(object):
def service_list(self, service_list):
print('service LIST:')
for service in service_list:
print(service)
print('')
def service_information(self, action, name, *argv):
print('%s service %s' % (action, name) )
def service_not_found(self, name):
print('The service "%s" does not exist' % name)
def command_not_found(self, name):
print('The command "%s" does not exist' % name)
print('Available commands are: build, create or start')
def display_stream(self, line):
jsonstream = json.loads(line.decode())
stream = jsonstream.get('stream')
error = jsonstream.get('error')
if not error == None:
print(error)
if not stream == None:
print(stream)
def usage():
print('Commands are: build, run, rm, ip, ')
def stopping(self, tag):
print('Stoping container %s ...' % (tag))
def removing(self, tag):
print('Removing container %s ...' % (tag))
def notlaunched(self, tag):
print('Services %s not launched' % (tag))
def ip(self, tag, ipaddr):
print('Container %s on IP: %s' % (tag, ipaddr))
|
Fix Json error for Python3 compatibility
|
Fix Json error for Python3 compatibility
|
Python
|
apache-2.0
|
epheo/shaddock,epheo/shaddock
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pprint
import json
class View(object):
def service_list(self, service_list):
print('service LIST:')
for service in service_list:
print(service)
print('')
def service_information(self, action, name, *argv):
print('%s service %s with arguments:' % (action, name) )
for arg in argv:
print(arg)
print('')
def service_not_found(self, name):
print('The service "%s" does not exist' % name)
def command_not_found(self, name):
print('The command "%s" does not exist' % name)
print('Available commands are: build, create or start')
def display_stream(self, line):
jsonstream = json.loads(line)
stream = jsonstream.get('stream')
error = jsonstream.get('error')
if not error==None:
print(error)
if not stream==None:
print(stream)
def usage():
print('Commands are: build, run, rm, ip, ')
def stopping(self, tag):
print('Stoping container %s ...' % (tag))
def removing(self, tag):
print('Removing container %s ...' % (tag))
def notlaunched(self, tag):
print('Services %s not launched' % (tag))
def ip(self, tag, ipaddr):
print('Container %s on IP: %s' % (tag, ipaddr))Fix Json error for Python3 compatibility
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pprint
import json
class View(object):
def service_list(self, service_list):
print('service LIST:')
for service in service_list:
print(service)
print('')
def service_information(self, action, name, *argv):
print('%s service %s' % (action, name) )
def service_not_found(self, name):
print('The service "%s" does not exist' % name)
def command_not_found(self, name):
print('The command "%s" does not exist' % name)
print('Available commands are: build, create or start')
def display_stream(self, line):
jsonstream = json.loads(line.decode())
stream = jsonstream.get('stream')
error = jsonstream.get('error')
if not error == None:
print(error)
if not stream == None:
print(stream)
def usage():
print('Commands are: build, run, rm, ip, ')
def stopping(self, tag):
print('Stoping container %s ...' % (tag))
def removing(self, tag):
print('Removing container %s ...' % (tag))
def notlaunched(self, tag):
print('Services %s not launched' % (tag))
def ip(self, tag, ipaddr):
print('Container %s on IP: %s' % (tag, ipaddr))
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pprint
import json
class View(object):
def service_list(self, service_list):
print('service LIST:')
for service in service_list:
print(service)
print('')
def service_information(self, action, name, *argv):
print('%s service %s with arguments:' % (action, name) )
for arg in argv:
print(arg)
print('')
def service_not_found(self, name):
print('The service "%s" does not exist' % name)
def command_not_found(self, name):
print('The command "%s" does not exist' % name)
print('Available commands are: build, create or start')
def display_stream(self, line):
jsonstream = json.loads(line)
stream = jsonstream.get('stream')
error = jsonstream.get('error')
if not error==None:
print(error)
if not stream==None:
print(stream)
def usage():
print('Commands are: build, run, rm, ip, ')
def stopping(self, tag):
print('Stoping container %s ...' % (tag))
def removing(self, tag):
print('Removing container %s ...' % (tag))
def notlaunched(self, tag):
print('Services %s not launched' % (tag))
def ip(self, tag, ipaddr):
print('Container %s on IP: %s' % (tag, ipaddr))<commit_msg>Fix Json error for Python3 compatibility<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pprint
import json
class View(object):
def service_list(self, service_list):
print('service LIST:')
for service in service_list:
print(service)
print('')
def service_information(self, action, name, *argv):
print('%s service %s' % (action, name) )
def service_not_found(self, name):
print('The service "%s" does not exist' % name)
def command_not_found(self, name):
print('The command "%s" does not exist' % name)
print('Available commands are: build, create or start')
def display_stream(self, line):
jsonstream = json.loads(line.decode())
stream = jsonstream.get('stream')
error = jsonstream.get('error')
if not error == None:
print(error)
if not stream == None:
print(stream)
def usage():
print('Commands are: build, run, rm, ip, ')
def stopping(self, tag):
print('Stoping container %s ...' % (tag))
def removing(self, tag):
print('Removing container %s ...' % (tag))
def notlaunched(self, tag):
print('Services %s not launched' % (tag))
def ip(self, tag, ipaddr):
print('Container %s on IP: %s' % (tag, ipaddr))
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pprint
import json
class View(object):
def service_list(self, service_list):
print('service LIST:')
for service in service_list:
print(service)
print('')
def service_information(self, action, name, *argv):
print('%s service %s with arguments:' % (action, name) )
for arg in argv:
print(arg)
print('')
def service_not_found(self, name):
print('The service "%s" does not exist' % name)
def command_not_found(self, name):
print('The command "%s" does not exist' % name)
print('Available commands are: build, create or start')
def display_stream(self, line):
jsonstream = json.loads(line)
stream = jsonstream.get('stream')
error = jsonstream.get('error')
if not error==None:
print(error)
if not stream==None:
print(stream)
def usage():
print('Commands are: build, run, rm, ip, ')
def stopping(self, tag):
print('Stoping container %s ...' % (tag))
def removing(self, tag):
print('Removing container %s ...' % (tag))
def notlaunched(self, tag):
print('Services %s not launched' % (tag))
def ip(self, tag, ipaddr):
print('Container %s on IP: %s' % (tag, ipaddr))Fix Json error for Python3 compatibility#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pprint
import json
class View(object):
def service_list(self, service_list):
print('service LIST:')
for service in service_list:
print(service)
print('')
def service_information(self, action, name, *argv):
print('%s service %s' % (action, name) )
def service_not_found(self, name):
print('The service "%s" does not exist' % name)
def command_not_found(self, name):
print('The command "%s" does not exist' % name)
print('Available commands are: build, create or start')
def display_stream(self, line):
jsonstream = json.loads(line.decode())
stream = jsonstream.get('stream')
error = jsonstream.get('error')
if not error == None:
print(error)
if not stream == None:
print(stream)
def usage():
print('Commands are: build, run, rm, ip, ')
def stopping(self, tag):
print('Stoping container %s ...' % (tag))
def removing(self, tag):
print('Removing container %s ...' % (tag))
def notlaunched(self, tag):
print('Services %s not launched' % (tag))
def ip(self, tag, ipaddr):
print('Container %s on IP: %s' % (tag, ipaddr))
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pprint
import json
class View(object):
def service_list(self, service_list):
print('service LIST:')
for service in service_list:
print(service)
print('')
def service_information(self, action, name, *argv):
print('%s service %s with arguments:' % (action, name) )
for arg in argv:
print(arg)
print('')
def service_not_found(self, name):
print('The service "%s" does not exist' % name)
def command_not_found(self, name):
print('The command "%s" does not exist' % name)
print('Available commands are: build, create or start')
def display_stream(self, line):
jsonstream = json.loads(line)
stream = jsonstream.get('stream')
error = jsonstream.get('error')
if not error==None:
print(error)
if not stream==None:
print(stream)
def usage():
print('Commands are: build, run, rm, ip, ')
def stopping(self, tag):
print('Stoping container %s ...' % (tag))
def removing(self, tag):
print('Removing container %s ...' % (tag))
def notlaunched(self, tag):
print('Services %s not launched' % (tag))
def ip(self, tag, ipaddr):
print('Container %s on IP: %s' % (tag, ipaddr))<commit_msg>Fix Json error for Python3 compatibility<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pprint
import json
class View(object):
def service_list(self, service_list):
print('service LIST:')
for service in service_list:
print(service)
print('')
def service_information(self, action, name, *argv):
print('%s service %s' % (action, name) )
def service_not_found(self, name):
print('The service "%s" does not exist' % name)
def command_not_found(self, name):
print('The command "%s" does not exist' % name)
print('Available commands are: build, create or start')
def display_stream(self, line):
jsonstream = json.loads(line.decode())
stream = jsonstream.get('stream')
error = jsonstream.get('error')
if not error == None:
print(error)
if not stream == None:
print(stream)
def usage():
print('Commands are: build, run, rm, ip, ')
def stopping(self, tag):
print('Stoping container %s ...' % (tag))
def removing(self, tag):
print('Removing container %s ...' % (tag))
def notlaunched(self, tag):
print('Services %s not launched' % (tag))
def ip(self, tag, ipaddr):
print('Container %s on IP: %s' % (tag, ipaddr))
|
9982c25f3fade2cd411277f92761b8c560d36b61
|
morenines/ignores.py
|
morenines/ignores.py
|
import os
from fnmatch import fnmatchcase
import click
class Ignores(object):
def __init__(self, default_patterns=[]):
self.patterns = default_patterns
def read(cls, path):
with open(path, 'r') as stream:
self.patterns.extend([line.strip() for line in stream])
def match(self, path):
filename = os.path.basename(path)
if any(fnmatchcase(filename, pattern) for pattern in self.patterns):
return True
else:
return False
|
import os
from fnmatch import fnmatchcase
import click
class Ignores(object):
def __init__(self, default_patterns=[]):
self.patterns = default_patterns
def read(self, path):
with open(path, 'r') as stream:
self.patterns.extend([line.strip() for line in stream])
def match(self, path):
filename = os.path.basename(path)
if any(fnmatchcase(filename, pattern) for pattern in self.patterns):
return True
else:
return False
|
Fix typo from making Ignores.read an instance method
|
Fix typo from making Ignores.read an instance method
|
Python
|
mit
|
mcgid/morenines,mcgid/morenines
|
import os
from fnmatch import fnmatchcase
import click
class Ignores(object):
def __init__(self, default_patterns=[]):
self.patterns = default_patterns
def read(cls, path):
with open(path, 'r') as stream:
self.patterns.extend([line.strip() for line in stream])
def match(self, path):
filename = os.path.basename(path)
if any(fnmatchcase(filename, pattern) for pattern in self.patterns):
return True
else:
return False
Fix typo from making Ignores.read an instance method
|
import os
from fnmatch import fnmatchcase
import click
class Ignores(object):
def __init__(self, default_patterns=[]):
self.patterns = default_patterns
def read(self, path):
with open(path, 'r') as stream:
self.patterns.extend([line.strip() for line in stream])
def match(self, path):
filename = os.path.basename(path)
if any(fnmatchcase(filename, pattern) for pattern in self.patterns):
return True
else:
return False
|
<commit_before>import os
from fnmatch import fnmatchcase
import click
class Ignores(object):
def __init__(self, default_patterns=[]):
self.patterns = default_patterns
def read(cls, path):
with open(path, 'r') as stream:
self.patterns.extend([line.strip() for line in stream])
def match(self, path):
filename = os.path.basename(path)
if any(fnmatchcase(filename, pattern) for pattern in self.patterns):
return True
else:
return False
<commit_msg>Fix typo from making Ignores.read an instance method<commit_after>
|
import os
from fnmatch import fnmatchcase
import click
class Ignores(object):
def __init__(self, default_patterns=[]):
self.patterns = default_patterns
def read(self, path):
with open(path, 'r') as stream:
self.patterns.extend([line.strip() for line in stream])
def match(self, path):
filename = os.path.basename(path)
if any(fnmatchcase(filename, pattern) for pattern in self.patterns):
return True
else:
return False
|
import os
from fnmatch import fnmatchcase
import click
class Ignores(object):
def __init__(self, default_patterns=[]):
self.patterns = default_patterns
def read(cls, path):
with open(path, 'r') as stream:
self.patterns.extend([line.strip() for line in stream])
def match(self, path):
filename = os.path.basename(path)
if any(fnmatchcase(filename, pattern) for pattern in self.patterns):
return True
else:
return False
Fix typo from making Ignores.read an instance methodimport os
from fnmatch import fnmatchcase
import click
class Ignores(object):
def __init__(self, default_patterns=[]):
self.patterns = default_patterns
def read(self, path):
with open(path, 'r') as stream:
self.patterns.extend([line.strip() for line in stream])
def match(self, path):
filename = os.path.basename(path)
if any(fnmatchcase(filename, pattern) for pattern in self.patterns):
return True
else:
return False
|
<commit_before>import os
from fnmatch import fnmatchcase
import click
class Ignores(object):
def __init__(self, default_patterns=[]):
self.patterns = default_patterns
def read(cls, path):
with open(path, 'r') as stream:
self.patterns.extend([line.strip() for line in stream])
def match(self, path):
filename = os.path.basename(path)
if any(fnmatchcase(filename, pattern) for pattern in self.patterns):
return True
else:
return False
<commit_msg>Fix typo from making Ignores.read an instance method<commit_after>import os
from fnmatch import fnmatchcase
import click
class Ignores(object):
def __init__(self, default_patterns=[]):
self.patterns = default_patterns
def read(self, path):
with open(path, 'r') as stream:
self.patterns.extend([line.strip() for line in stream])
def match(self, path):
filename = os.path.basename(path)
if any(fnmatchcase(filename, pattern) for pattern in self.patterns):
return True
else:
return False
|
89e196e86d3b337ff6addb9e0ba289cbd63950d5
|
netbox/extras/querysets.py
|
netbox/extras/querysets.py
|
from __future__ import unicode_literals
from django.db.models import Q, QuerySet
class ConfigContextQuerySet(QuerySet):
def get_for_object(self, obj):
"""
Return all applicable ConfigContexts for a given object. Only active ConfigContexts will be included.
"""
# `device_role` for Device; `role` for VirtualMachine
role = getattr(obj, 'device_role', None) or obj.role
return self.filter(
Q(regions=obj.site.region) | Q(regions=None),
Q(sites=obj.site) | Q(sites=None),
Q(roles=role) | Q(roles=None),
Q(tenants=obj.tenant) | Q(tenants=None),
Q(platforms=obj.platform) | Q(platforms=None),
is_active=True,
).order_by('weight', 'name')
|
from __future__ import unicode_literals
from django.db.models import Q, QuerySet
class ConfigContextQuerySet(QuerySet):
def get_for_object(self, obj):
"""
Return all applicable ConfigContexts for a given object. Only active ConfigContexts will be included.
"""
# `device_role` for Device; `role` for VirtualMachine
role = getattr(obj, 'device_role', None) or obj.role
return self.filter(
Q(regions=getattr(obj.site, 'region', None)) | Q(regions=None),
Q(sites=obj.site) | Q(sites=None),
Q(roles=role) | Q(roles=None),
Q(tenants=obj.tenant) | Q(tenants=None),
Q(platforms=obj.platform) | Q(platforms=None),
is_active=True,
).order_by('weight', 'name')
|
Tweak ConfigContext manager to allow for objects with a regionless site
|
Tweak ConfigContext manager to allow for objects with a regionless site
|
Python
|
apache-2.0
|
digitalocean/netbox,lampwins/netbox,lampwins/netbox,lampwins/netbox,digitalocean/netbox,digitalocean/netbox,lampwins/netbox,digitalocean/netbox
|
from __future__ import unicode_literals
from django.db.models import Q, QuerySet
class ConfigContextQuerySet(QuerySet):
def get_for_object(self, obj):
"""
Return all applicable ConfigContexts for a given object. Only active ConfigContexts will be included.
"""
# `device_role` for Device; `role` for VirtualMachine
role = getattr(obj, 'device_role', None) or obj.role
return self.filter(
Q(regions=obj.site.region) | Q(regions=None),
Q(sites=obj.site) | Q(sites=None),
Q(roles=role) | Q(roles=None),
Q(tenants=obj.tenant) | Q(tenants=None),
Q(platforms=obj.platform) | Q(platforms=None),
is_active=True,
).order_by('weight', 'name')
Tweak ConfigContext manager to allow for objects with a regionless site
|
from __future__ import unicode_literals
from django.db.models import Q, QuerySet
class ConfigContextQuerySet(QuerySet):
def get_for_object(self, obj):
"""
Return all applicable ConfigContexts for a given object. Only active ConfigContexts will be included.
"""
# `device_role` for Device; `role` for VirtualMachine
role = getattr(obj, 'device_role', None) or obj.role
return self.filter(
Q(regions=getattr(obj.site, 'region', None)) | Q(regions=None),
Q(sites=obj.site) | Q(sites=None),
Q(roles=role) | Q(roles=None),
Q(tenants=obj.tenant) | Q(tenants=None),
Q(platforms=obj.platform) | Q(platforms=None),
is_active=True,
).order_by('weight', 'name')
|
<commit_before>from __future__ import unicode_literals
from django.db.models import Q, QuerySet
class ConfigContextQuerySet(QuerySet):
def get_for_object(self, obj):
"""
Return all applicable ConfigContexts for a given object. Only active ConfigContexts will be included.
"""
# `device_role` for Device; `role` for VirtualMachine
role = getattr(obj, 'device_role', None) or obj.role
return self.filter(
Q(regions=obj.site.region) | Q(regions=None),
Q(sites=obj.site) | Q(sites=None),
Q(roles=role) | Q(roles=None),
Q(tenants=obj.tenant) | Q(tenants=None),
Q(platforms=obj.platform) | Q(platforms=None),
is_active=True,
).order_by('weight', 'name')
<commit_msg>Tweak ConfigContext manager to allow for objects with a regionless site<commit_after>
|
from __future__ import unicode_literals
from django.db.models import Q, QuerySet
class ConfigContextQuerySet(QuerySet):
def get_for_object(self, obj):
"""
Return all applicable ConfigContexts for a given object. Only active ConfigContexts will be included.
"""
# `device_role` for Device; `role` for VirtualMachine
role = getattr(obj, 'device_role', None) or obj.role
return self.filter(
Q(regions=getattr(obj.site, 'region', None)) | Q(regions=None),
Q(sites=obj.site) | Q(sites=None),
Q(roles=role) | Q(roles=None),
Q(tenants=obj.tenant) | Q(tenants=None),
Q(platforms=obj.platform) | Q(platforms=None),
is_active=True,
).order_by('weight', 'name')
|
from __future__ import unicode_literals
from django.db.models import Q, QuerySet
class ConfigContextQuerySet(QuerySet):
def get_for_object(self, obj):
"""
Return all applicable ConfigContexts for a given object. Only active ConfigContexts will be included.
"""
# `device_role` for Device; `role` for VirtualMachine
role = getattr(obj, 'device_role', None) or obj.role
return self.filter(
Q(regions=obj.site.region) | Q(regions=None),
Q(sites=obj.site) | Q(sites=None),
Q(roles=role) | Q(roles=None),
Q(tenants=obj.tenant) | Q(tenants=None),
Q(platforms=obj.platform) | Q(platforms=None),
is_active=True,
).order_by('weight', 'name')
Tweak ConfigContext manager to allow for objects with a regionless sitefrom __future__ import unicode_literals
from django.db.models import Q, QuerySet
class ConfigContextQuerySet(QuerySet):
def get_for_object(self, obj):
"""
Return all applicable ConfigContexts for a given object. Only active ConfigContexts will be included.
"""
# `device_role` for Device; `role` for VirtualMachine
role = getattr(obj, 'device_role', None) or obj.role
return self.filter(
Q(regions=getattr(obj.site, 'region', None)) | Q(regions=None),
Q(sites=obj.site) | Q(sites=None),
Q(roles=role) | Q(roles=None),
Q(tenants=obj.tenant) | Q(tenants=None),
Q(platforms=obj.platform) | Q(platforms=None),
is_active=True,
).order_by('weight', 'name')
|
<commit_before>from __future__ import unicode_literals
from django.db.models import Q, QuerySet
class ConfigContextQuerySet(QuerySet):
def get_for_object(self, obj):
"""
Return all applicable ConfigContexts for a given object. Only active ConfigContexts will be included.
"""
# `device_role` for Device; `role` for VirtualMachine
role = getattr(obj, 'device_role', None) or obj.role
return self.filter(
Q(regions=obj.site.region) | Q(regions=None),
Q(sites=obj.site) | Q(sites=None),
Q(roles=role) | Q(roles=None),
Q(tenants=obj.tenant) | Q(tenants=None),
Q(platforms=obj.platform) | Q(platforms=None),
is_active=True,
).order_by('weight', 'name')
<commit_msg>Tweak ConfigContext manager to allow for objects with a regionless site<commit_after>from __future__ import unicode_literals
from django.db.models import Q, QuerySet
class ConfigContextQuerySet(QuerySet):
def get_for_object(self, obj):
"""
Return all applicable ConfigContexts for a given object. Only active ConfigContexts will be included.
"""
# `device_role` for Device; `role` for VirtualMachine
role = getattr(obj, 'device_role', None) or obj.role
return self.filter(
Q(regions=getattr(obj.site, 'region', None)) | Q(regions=None),
Q(sites=obj.site) | Q(sites=None),
Q(roles=role) | Q(roles=None),
Q(tenants=obj.tenant) | Q(tenants=None),
Q(platforms=obj.platform) | Q(platforms=None),
is_active=True,
).order_by('weight', 'name')
|
6a7d741da6124ec3d8607b5780608b51b7aca8ba
|
editorconfig/exceptions.py
|
editorconfig/exceptions.py
|
"""EditorConfig exception classes
Licensed under PSF License (see LICENSE.txt file).
"""
class EditorConfigError(Exception):
"""Parent class of all exceptions raised by EditorConfig"""
from ConfigParser import ParsingError as _ParsingError
class ParsingError(_ParsingError, EditorConfigError):
"""Error raised if an EditorConfig file could not be parsed"""
class PathError(ValueError, EditorConfigError):
"""Error raised if invalid filepath is specified"""
class VersionError(ValueError, EditorConfigError):
"""Error raised if invalid version number is specified"""
|
"""EditorConfig exception classes
Licensed under PSF License (see LICENSE.txt file).
"""
class EditorConfigError(Exception):
"""Parent class of all exceptions raised by EditorConfig"""
try:
from ConfigParser import ParsingError as _ParsingError
except:
from configparser import ParsingError as _ParsingError
class ParsingError(_ParsingError, EditorConfigError):
"""Error raised if an EditorConfig file could not be parsed"""
class PathError(ValueError, EditorConfigError):
"""Error raised if invalid filepath is specified"""
class VersionError(ValueError, EditorConfigError):
"""Error raised if invalid version number is specified"""
|
Fix broken ConfigParser import for Python3
|
Fix broken ConfigParser import for Python3
|
Python
|
bsd-2-clause
|
benjifisher/editorconfig-vim,VictorBjelkholm/editorconfig-vim,johnfraney/editorconfig-vim,benjifisher/editorconfig-vim,pocke/editorconfig-vim,pocke/editorconfig-vim,pocke/editorconfig-vim,VictorBjelkholm/editorconfig-vim,johnfraney/editorconfig-vim,VictorBjelkholm/editorconfig-vim,benjifisher/editorconfig-vim,johnfraney/editorconfig-vim
|
"""EditorConfig exception classes
Licensed under PSF License (see LICENSE.txt file).
"""
class EditorConfigError(Exception):
"""Parent class of all exceptions raised by EditorConfig"""
from ConfigParser import ParsingError as _ParsingError
class ParsingError(_ParsingError, EditorConfigError):
"""Error raised if an EditorConfig file could not be parsed"""
class PathError(ValueError, EditorConfigError):
"""Error raised if invalid filepath is specified"""
class VersionError(ValueError, EditorConfigError):
"""Error raised if invalid version number is specified"""
Fix broken ConfigParser import for Python3
|
"""EditorConfig exception classes
Licensed under PSF License (see LICENSE.txt file).
"""
class EditorConfigError(Exception):
"""Parent class of all exceptions raised by EditorConfig"""
try:
from ConfigParser import ParsingError as _ParsingError
except:
from configparser import ParsingError as _ParsingError
class ParsingError(_ParsingError, EditorConfigError):
"""Error raised if an EditorConfig file could not be parsed"""
class PathError(ValueError, EditorConfigError):
"""Error raised if invalid filepath is specified"""
class VersionError(ValueError, EditorConfigError):
"""Error raised if invalid version number is specified"""
|
<commit_before>"""EditorConfig exception classes
Licensed under PSF License (see LICENSE.txt file).
"""
class EditorConfigError(Exception):
"""Parent class of all exceptions raised by EditorConfig"""
from ConfigParser import ParsingError as _ParsingError
class ParsingError(_ParsingError, EditorConfigError):
"""Error raised if an EditorConfig file could not be parsed"""
class PathError(ValueError, EditorConfigError):
"""Error raised if invalid filepath is specified"""
class VersionError(ValueError, EditorConfigError):
"""Error raised if invalid version number is specified"""
<commit_msg>Fix broken ConfigParser import for Python3<commit_after>
|
"""EditorConfig exception classes
Licensed under PSF License (see LICENSE.txt file).
"""
class EditorConfigError(Exception):
"""Parent class of all exceptions raised by EditorConfig"""
try:
from ConfigParser import ParsingError as _ParsingError
except:
from configparser import ParsingError as _ParsingError
class ParsingError(_ParsingError, EditorConfigError):
"""Error raised if an EditorConfig file could not be parsed"""
class PathError(ValueError, EditorConfigError):
"""Error raised if invalid filepath is specified"""
class VersionError(ValueError, EditorConfigError):
"""Error raised if invalid version number is specified"""
|
"""EditorConfig exception classes
Licensed under PSF License (see LICENSE.txt file).
"""
class EditorConfigError(Exception):
"""Parent class of all exceptions raised by EditorConfig"""
from ConfigParser import ParsingError as _ParsingError
class ParsingError(_ParsingError, EditorConfigError):
"""Error raised if an EditorConfig file could not be parsed"""
class PathError(ValueError, EditorConfigError):
"""Error raised if invalid filepath is specified"""
class VersionError(ValueError, EditorConfigError):
"""Error raised if invalid version number is specified"""
Fix broken ConfigParser import for Python3"""EditorConfig exception classes
Licensed under PSF License (see LICENSE.txt file).
"""
class EditorConfigError(Exception):
"""Parent class of all exceptions raised by EditorConfig"""
try:
from ConfigParser import ParsingError as _ParsingError
except:
from configparser import ParsingError as _ParsingError
class ParsingError(_ParsingError, EditorConfigError):
"""Error raised if an EditorConfig file could not be parsed"""
class PathError(ValueError, EditorConfigError):
"""Error raised if invalid filepath is specified"""
class VersionError(ValueError, EditorConfigError):
"""Error raised if invalid version number is specified"""
|
<commit_before>"""EditorConfig exception classes
Licensed under PSF License (see LICENSE.txt file).
"""
class EditorConfigError(Exception):
"""Parent class of all exceptions raised by EditorConfig"""
from ConfigParser import ParsingError as _ParsingError
class ParsingError(_ParsingError, EditorConfigError):
"""Error raised if an EditorConfig file could not be parsed"""
class PathError(ValueError, EditorConfigError):
"""Error raised if invalid filepath is specified"""
class VersionError(ValueError, EditorConfigError):
"""Error raised if invalid version number is specified"""
<commit_msg>Fix broken ConfigParser import for Python3<commit_after>"""EditorConfig exception classes
Licensed under PSF License (see LICENSE.txt file).
"""
class EditorConfigError(Exception):
"""Parent class of all exceptions raised by EditorConfig"""
try:
from ConfigParser import ParsingError as _ParsingError
except:
from configparser import ParsingError as _ParsingError
class ParsingError(_ParsingError, EditorConfigError):
"""Error raised if an EditorConfig file could not be parsed"""
class PathError(ValueError, EditorConfigError):
"""Error raised if invalid filepath is specified"""
class VersionError(ValueError, EditorConfigError):
"""Error raised if invalid version number is specified"""
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.