commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
5125bbfcf96ff0d3f2690198b43ed96059eb6745
|
common/parsableText.py
|
common/parsableText.py
|
from docutils import core
class ParsableText:
"""Allow to parse a string with different parsers"""
def __init__(self,content,mode="rst"):
"""Init the object. Content is the string to be parsed. Mode is the parser to be used. Currently, only rst(reStructuredText) and HTML are supported"""
if mode not in ["rst","HTML"]:
raise Exception("Unknown text parser: "+ mode)
self.content = content
self.mode = mode
def parse(self):
"""Returns parsed text"""
if self.mode == "HTML":
return self.content
else:
return self.rst(self.content)
def __str__(self):
"""Returns parsed text"""
return self.parse()
def rst(self,s):
"""Parses reStructuredText"""
parts = core.publish_parts(source=s,writer_name='html')
return parts['body_pre_docinfo']+parts['fragment']
|
from docutils import core
class ParsableText:
"""Allow to parse a string with different parsers"""
def __init__(self,content,mode="rst"):
"""Init the object. Content is the string to be parsed. Mode is the parser to be used. Currently, only rst(reStructuredText) and HTML are supported"""
if mode not in ["rst","HTML"]:
raise Exception("Unknown text parser: "+ mode)
self.content = content
self.mode = mode
def parse(self):
"""Returns parsed text"""
if self.mode == "HTML":
return self.content
else:
return self.rst(self.content)
def __str__(self):
"""Returns parsed text"""
return self.parse()
def __unicode__(self):
"""Returns parsed text"""
return self.parse()
def rst(self,s):
"""Parses reStructuredText"""
parts = core.publish_parts(source=s,writer_name='html')
return parts['body_pre_docinfo']+parts['fragment']
|
Fix unicode in parsable text
|
Fix unicode in parsable text
|
Python
|
agpl-3.0
|
GuillaumeDerval/INGInious,GuillaumeDerval/INGInious,layus/INGInious,layus/INGInious,layus/INGInious,GuillaumeDerval/INGInious,GuillaumeDerval/INGInious,layus/INGInious
|
from docutils import core
class ParsableText:
"""Allow to parse a string with different parsers"""
def __init__(self,content,mode="rst"):
"""Init the object. Content is the string to be parsed. Mode is the parser to be used. Currently, only rst(reStructuredText) and HTML are supported"""
if mode not in ["rst","HTML"]:
raise Exception("Unknown text parser: "+ mode)
self.content = content
self.mode = mode
def parse(self):
"""Returns parsed text"""
if self.mode == "HTML":
return self.content
else:
return self.rst(self.content)
def __str__(self):
"""Returns parsed text"""
return self.parse()
def rst(self,s):
"""Parses reStructuredText"""
parts = core.publish_parts(source=s,writer_name='html')
return parts['body_pre_docinfo']+parts['fragment']Fix unicode in parsable text
|
from docutils import core
class ParsableText:
"""Allow to parse a string with different parsers"""
def __init__(self,content,mode="rst"):
"""Init the object. Content is the string to be parsed. Mode is the parser to be used. Currently, only rst(reStructuredText) and HTML are supported"""
if mode not in ["rst","HTML"]:
raise Exception("Unknown text parser: "+ mode)
self.content = content
self.mode = mode
def parse(self):
"""Returns parsed text"""
if self.mode == "HTML":
return self.content
else:
return self.rst(self.content)
def __str__(self):
"""Returns parsed text"""
return self.parse()
def __unicode__(self):
"""Returns parsed text"""
return self.parse()
def rst(self,s):
"""Parses reStructuredText"""
parts = core.publish_parts(source=s,writer_name='html')
return parts['body_pre_docinfo']+parts['fragment']
|
<commit_before>from docutils import core
class ParsableText:
"""Allow to parse a string with different parsers"""
def __init__(self,content,mode="rst"):
"""Init the object. Content is the string to be parsed. Mode is the parser to be used. Currently, only rst(reStructuredText) and HTML are supported"""
if mode not in ["rst","HTML"]:
raise Exception("Unknown text parser: "+ mode)
self.content = content
self.mode = mode
def parse(self):
"""Returns parsed text"""
if self.mode == "HTML":
return self.content
else:
return self.rst(self.content)
def __str__(self):
"""Returns parsed text"""
return self.parse()
def rst(self,s):
"""Parses reStructuredText"""
parts = core.publish_parts(source=s,writer_name='html')
return parts['body_pre_docinfo']+parts['fragment']<commit_msg>Fix unicode in parsable text<commit_after>
|
from docutils import core
class ParsableText:
"""Allow to parse a string with different parsers"""
def __init__(self,content,mode="rst"):
"""Init the object. Content is the string to be parsed. Mode is the parser to be used. Currently, only rst(reStructuredText) and HTML are supported"""
if mode not in ["rst","HTML"]:
raise Exception("Unknown text parser: "+ mode)
self.content = content
self.mode = mode
def parse(self):
"""Returns parsed text"""
if self.mode == "HTML":
return self.content
else:
return self.rst(self.content)
def __str__(self):
"""Returns parsed text"""
return self.parse()
def __unicode__(self):
"""Returns parsed text"""
return self.parse()
def rst(self,s):
"""Parses reStructuredText"""
parts = core.publish_parts(source=s,writer_name='html')
return parts['body_pre_docinfo']+parts['fragment']
|
from docutils import core
class ParsableText:
"""Allow to parse a string with different parsers"""
def __init__(self,content,mode="rst"):
"""Init the object. Content is the string to be parsed. Mode is the parser to be used. Currently, only rst(reStructuredText) and HTML are supported"""
if mode not in ["rst","HTML"]:
raise Exception("Unknown text parser: "+ mode)
self.content = content
self.mode = mode
def parse(self):
"""Returns parsed text"""
if self.mode == "HTML":
return self.content
else:
return self.rst(self.content)
def __str__(self):
"""Returns parsed text"""
return self.parse()
def rst(self,s):
"""Parses reStructuredText"""
parts = core.publish_parts(source=s,writer_name='html')
return parts['body_pre_docinfo']+parts['fragment']Fix unicode in parsable textfrom docutils import core
class ParsableText:
"""Allow to parse a string with different parsers"""
def __init__(self,content,mode="rst"):
"""Init the object. Content is the string to be parsed. Mode is the parser to be used. Currently, only rst(reStructuredText) and HTML are supported"""
if mode not in ["rst","HTML"]:
raise Exception("Unknown text parser: "+ mode)
self.content = content
self.mode = mode
def parse(self):
"""Returns parsed text"""
if self.mode == "HTML":
return self.content
else:
return self.rst(self.content)
def __str__(self):
"""Returns parsed text"""
return self.parse()
def __unicode__(self):
"""Returns parsed text"""
return self.parse()
def rst(self,s):
"""Parses reStructuredText"""
parts = core.publish_parts(source=s,writer_name='html')
return parts['body_pre_docinfo']+parts['fragment']
|
<commit_before>from docutils import core
class ParsableText:
"""Allow to parse a string with different parsers"""
def __init__(self,content,mode="rst"):
"""Init the object. Content is the string to be parsed. Mode is the parser to be used. Currently, only rst(reStructuredText) and HTML are supported"""
if mode not in ["rst","HTML"]:
raise Exception("Unknown text parser: "+ mode)
self.content = content
self.mode = mode
def parse(self):
"""Returns parsed text"""
if self.mode == "HTML":
return self.content
else:
return self.rst(self.content)
def __str__(self):
"""Returns parsed text"""
return self.parse()
def rst(self,s):
"""Parses reStructuredText"""
parts = core.publish_parts(source=s,writer_name='html')
return parts['body_pre_docinfo']+parts['fragment']<commit_msg>Fix unicode in parsable text<commit_after>from docutils import core
class ParsableText:
"""Allow to parse a string with different parsers"""
def __init__(self,content,mode="rst"):
"""Init the object. Content is the string to be parsed. Mode is the parser to be used. Currently, only rst(reStructuredText) and HTML are supported"""
if mode not in ["rst","HTML"]:
raise Exception("Unknown text parser: "+ mode)
self.content = content
self.mode = mode
def parse(self):
"""Returns parsed text"""
if self.mode == "HTML":
return self.content
else:
return self.rst(self.content)
def __str__(self):
"""Returns parsed text"""
return self.parse()
def __unicode__(self):
"""Returns parsed text"""
return self.parse()
def rst(self,s):
"""Parses reStructuredText"""
parts = core.publish_parts(source=s,writer_name='html')
return parts['body_pre_docinfo']+parts['fragment']
|
acd376d854693cacf8ca20a9971dcd2653a22429
|
rlpy/Agents/__init__.py
|
rlpy/Agents/__init__.py
|
from .TDControlAgent import Q_Learning, SARSA
# for compatibility of old scripts
Q_LEARNING = Q_Learning
from .Greedy_GQ import Greedy_GQ
from .LSPI import LSPI
from .LSPI_SARSA import LSPI_SARSA
from .NaturalActorCritic import NaturalActorCritic
|
from .TDControlAgent import Q_Learning, SARSA
# for compatibility of old scripts
Q_LEARNING = Q_Learning
from .Greedy_GQ import Greedy_GQ
from .LSPI import LSPI
from .LSPI_SARSA import LSPI_SARSA
from .NaturalActorCritic import NaturalActorCritic
from .PosteriorSampling import PosteriorSampling
from .UCRL import UCRL
|
Add new agents to init file
|
Add new agents to init file
|
Python
|
bsd-3-clause
|
imanolarrieta/RL,imanolarrieta/RL,imanolarrieta/RL
|
from .TDControlAgent import Q_Learning, SARSA
# for compatibility of old scripts
Q_LEARNING = Q_Learning
from .Greedy_GQ import Greedy_GQ
from .LSPI import LSPI
from .LSPI_SARSA import LSPI_SARSA
from .NaturalActorCritic import NaturalActorCritic
Add new agents to init file
|
from .TDControlAgent import Q_Learning, SARSA
# for compatibility of old scripts
Q_LEARNING = Q_Learning
from .Greedy_GQ import Greedy_GQ
from .LSPI import LSPI
from .LSPI_SARSA import LSPI_SARSA
from .NaturalActorCritic import NaturalActorCritic
from .PosteriorSampling import PosteriorSampling
from .UCRL import UCRL
|
<commit_before>from .TDControlAgent import Q_Learning, SARSA
# for compatibility of old scripts
Q_LEARNING = Q_Learning
from .Greedy_GQ import Greedy_GQ
from .LSPI import LSPI
from .LSPI_SARSA import LSPI_SARSA
from .NaturalActorCritic import NaturalActorCritic
<commit_msg>Add new agents to init file<commit_after>
|
from .TDControlAgent import Q_Learning, SARSA
# for compatibility of old scripts
Q_LEARNING = Q_Learning
from .Greedy_GQ import Greedy_GQ
from .LSPI import LSPI
from .LSPI_SARSA import LSPI_SARSA
from .NaturalActorCritic import NaturalActorCritic
from .PosteriorSampling import PosteriorSampling
from .UCRL import UCRL
|
from .TDControlAgent import Q_Learning, SARSA
# for compatibility of old scripts
Q_LEARNING = Q_Learning
from .Greedy_GQ import Greedy_GQ
from .LSPI import LSPI
from .LSPI_SARSA import LSPI_SARSA
from .NaturalActorCritic import NaturalActorCritic
Add new agents to init filefrom .TDControlAgent import Q_Learning, SARSA
# for compatibility of old scripts
Q_LEARNING = Q_Learning
from .Greedy_GQ import Greedy_GQ
from .LSPI import LSPI
from .LSPI_SARSA import LSPI_SARSA
from .NaturalActorCritic import NaturalActorCritic
from .PosteriorSampling import PosteriorSampling
from .UCRL import UCRL
|
<commit_before>from .TDControlAgent import Q_Learning, SARSA
# for compatibility of old scripts
Q_LEARNING = Q_Learning
from .Greedy_GQ import Greedy_GQ
from .LSPI import LSPI
from .LSPI_SARSA import LSPI_SARSA
from .NaturalActorCritic import NaturalActorCritic
<commit_msg>Add new agents to init file<commit_after>from .TDControlAgent import Q_Learning, SARSA
# for compatibility of old scripts
Q_LEARNING = Q_Learning
from .Greedy_GQ import Greedy_GQ
from .LSPI import LSPI
from .LSPI_SARSA import LSPI_SARSA
from .NaturalActorCritic import NaturalActorCritic
from .PosteriorSampling import PosteriorSampling
from .UCRL import UCRL
|
48bc050c59d60037fa719542db8f6a0c68752ed1
|
config/flask_config.py
|
config/flask_config.py
|
# flake8: noqa: E501
import config.options
# Flask-SQLAlchemy
SQLALCHEMY_DATABASE_URI = 'mysql://{database_user}:{database_password}@{database_host}/{database_name}'.format(
database_user=config.options.DATABASE_USER,
database_password=config.options.DATABASE_PASSWORD,
database_host=config.options.DATABASE_HOST,
database_name=config.options.DATABASE_NAME,
)
SQLALCHEMY_TEST_DATABASE_URI = 'mysql://{database_user}:{database_password}@{database_host}/{database_name}'.format(
database_user=config.options.DATABASE_USER,
database_password=config.options.DATABASE_PASSWORD,
database_host=config.options.DATABASE_HOST,
database_name=config.options.DATABASE_NAME + '_test',
)
SQLALCHEMY_TRACK_MODIFICATIONS = False
# Flask session secret key
SECRET_KEY = '\xec5\xea\xc9\x9f,o\xd7v\xac\x06\xe2\xeeK2\xb9\x1d\x8a\xdel\xb27\x8a\xa8>\x07\n\xd4Z\xfeO\xa1'
|
# flake8: noqa: E501
import config.options
# Flask-SQLAlchemy
SQLALCHEMY_DATABASE_URI = 'mysql://{database_user}:{database_password}@{database_host}/{database_name}'.format(
database_user=config.options.DATABASE_USER,
database_password=config.options.DATABASE_PASSWORD,
database_host=config.options.DATABASE_HOST,
database_name=config.options.DATABASE_NAME,
)
SQLALCHEMY_TEST_DATABASE_URI = 'mysql://{database_user}:{database_password}@{database_host}/{database_name}'.format(
database_user=config.options.DATABASE_USER,
database_password=config.options.DATABASE_PASSWORD,
database_host=config.options.DATABASE_HOST,
database_name=config.options.DATABASE_NAME + '_test',
)
SQLALCHEMY_TRACK_MODIFICATIONS = False
# Flask session cookie name
SESSION_COOKIE_NAME = 'linkr-session'
# Flask session secret key
SECRET_KEY = '\xec5\xea\xc9\x9f,o\xd7v\xac\x06\xe2\xeeK2\xb9\x1d\x8a\xdel\xb27\x8a\xa8>\x07\n\xd4Z\xfeO\xa1'
|
Use Linkr-unique session cookie name
|
Use Linkr-unique session cookie name
|
Python
|
mit
|
LINKIWI/linkr,LINKIWI/linkr,LINKIWI/linkr
|
# flake8: noqa: E501
import config.options
# Flask-SQLAlchemy
SQLALCHEMY_DATABASE_URI = 'mysql://{database_user}:{database_password}@{database_host}/{database_name}'.format(
database_user=config.options.DATABASE_USER,
database_password=config.options.DATABASE_PASSWORD,
database_host=config.options.DATABASE_HOST,
database_name=config.options.DATABASE_NAME,
)
SQLALCHEMY_TEST_DATABASE_URI = 'mysql://{database_user}:{database_password}@{database_host}/{database_name}'.format(
database_user=config.options.DATABASE_USER,
database_password=config.options.DATABASE_PASSWORD,
database_host=config.options.DATABASE_HOST,
database_name=config.options.DATABASE_NAME + '_test',
)
SQLALCHEMY_TRACK_MODIFICATIONS = False
# Flask session secret key
SECRET_KEY = '\xec5\xea\xc9\x9f,o\xd7v\xac\x06\xe2\xeeK2\xb9\x1d\x8a\xdel\xb27\x8a\xa8>\x07\n\xd4Z\xfeO\xa1'
Use Linkr-unique session cookie name
|
# flake8: noqa: E501
import config.options
# Flask-SQLAlchemy
SQLALCHEMY_DATABASE_URI = 'mysql://{database_user}:{database_password}@{database_host}/{database_name}'.format(
database_user=config.options.DATABASE_USER,
database_password=config.options.DATABASE_PASSWORD,
database_host=config.options.DATABASE_HOST,
database_name=config.options.DATABASE_NAME,
)
SQLALCHEMY_TEST_DATABASE_URI = 'mysql://{database_user}:{database_password}@{database_host}/{database_name}'.format(
database_user=config.options.DATABASE_USER,
database_password=config.options.DATABASE_PASSWORD,
database_host=config.options.DATABASE_HOST,
database_name=config.options.DATABASE_NAME + '_test',
)
SQLALCHEMY_TRACK_MODIFICATIONS = False
# Flask session cookie name
SESSION_COOKIE_NAME = 'linkr-session'
# Flask session secret key
SECRET_KEY = '\xec5\xea\xc9\x9f,o\xd7v\xac\x06\xe2\xeeK2\xb9\x1d\x8a\xdel\xb27\x8a\xa8>\x07\n\xd4Z\xfeO\xa1'
|
<commit_before># flake8: noqa: E501
import config.options
# Flask-SQLAlchemy
SQLALCHEMY_DATABASE_URI = 'mysql://{database_user}:{database_password}@{database_host}/{database_name}'.format(
database_user=config.options.DATABASE_USER,
database_password=config.options.DATABASE_PASSWORD,
database_host=config.options.DATABASE_HOST,
database_name=config.options.DATABASE_NAME,
)
SQLALCHEMY_TEST_DATABASE_URI = 'mysql://{database_user}:{database_password}@{database_host}/{database_name}'.format(
database_user=config.options.DATABASE_USER,
database_password=config.options.DATABASE_PASSWORD,
database_host=config.options.DATABASE_HOST,
database_name=config.options.DATABASE_NAME + '_test',
)
SQLALCHEMY_TRACK_MODIFICATIONS = False
# Flask session secret key
SECRET_KEY = '\xec5\xea\xc9\x9f,o\xd7v\xac\x06\xe2\xeeK2\xb9\x1d\x8a\xdel\xb27\x8a\xa8>\x07\n\xd4Z\xfeO\xa1'
<commit_msg>Use Linkr-unique session cookie name<commit_after>
|
# flake8: noqa: E501
import config.options
# Flask-SQLAlchemy
SQLALCHEMY_DATABASE_URI = 'mysql://{database_user}:{database_password}@{database_host}/{database_name}'.format(
database_user=config.options.DATABASE_USER,
database_password=config.options.DATABASE_PASSWORD,
database_host=config.options.DATABASE_HOST,
database_name=config.options.DATABASE_NAME,
)
SQLALCHEMY_TEST_DATABASE_URI = 'mysql://{database_user}:{database_password}@{database_host}/{database_name}'.format(
database_user=config.options.DATABASE_USER,
database_password=config.options.DATABASE_PASSWORD,
database_host=config.options.DATABASE_HOST,
database_name=config.options.DATABASE_NAME + '_test',
)
SQLALCHEMY_TRACK_MODIFICATIONS = False
# Flask session cookie name
SESSION_COOKIE_NAME = 'linkr-session'
# Flask session secret key
SECRET_KEY = '\xec5\xea\xc9\x9f,o\xd7v\xac\x06\xe2\xeeK2\xb9\x1d\x8a\xdel\xb27\x8a\xa8>\x07\n\xd4Z\xfeO\xa1'
|
# flake8: noqa: E501
import config.options
# Flask-SQLAlchemy
SQLALCHEMY_DATABASE_URI = 'mysql://{database_user}:{database_password}@{database_host}/{database_name}'.format(
database_user=config.options.DATABASE_USER,
database_password=config.options.DATABASE_PASSWORD,
database_host=config.options.DATABASE_HOST,
database_name=config.options.DATABASE_NAME,
)
SQLALCHEMY_TEST_DATABASE_URI = 'mysql://{database_user}:{database_password}@{database_host}/{database_name}'.format(
database_user=config.options.DATABASE_USER,
database_password=config.options.DATABASE_PASSWORD,
database_host=config.options.DATABASE_HOST,
database_name=config.options.DATABASE_NAME + '_test',
)
SQLALCHEMY_TRACK_MODIFICATIONS = False
# Flask session secret key
SECRET_KEY = '\xec5\xea\xc9\x9f,o\xd7v\xac\x06\xe2\xeeK2\xb9\x1d\x8a\xdel\xb27\x8a\xa8>\x07\n\xd4Z\xfeO\xa1'
Use Linkr-unique session cookie name# flake8: noqa: E501
import config.options
# Flask-SQLAlchemy
SQLALCHEMY_DATABASE_URI = 'mysql://{database_user}:{database_password}@{database_host}/{database_name}'.format(
database_user=config.options.DATABASE_USER,
database_password=config.options.DATABASE_PASSWORD,
database_host=config.options.DATABASE_HOST,
database_name=config.options.DATABASE_NAME,
)
SQLALCHEMY_TEST_DATABASE_URI = 'mysql://{database_user}:{database_password}@{database_host}/{database_name}'.format(
database_user=config.options.DATABASE_USER,
database_password=config.options.DATABASE_PASSWORD,
database_host=config.options.DATABASE_HOST,
database_name=config.options.DATABASE_NAME + '_test',
)
SQLALCHEMY_TRACK_MODIFICATIONS = False
# Flask session cookie name
SESSION_COOKIE_NAME = 'linkr-session'
# Flask session secret key
SECRET_KEY = '\xec5\xea\xc9\x9f,o\xd7v\xac\x06\xe2\xeeK2\xb9\x1d\x8a\xdel\xb27\x8a\xa8>\x07\n\xd4Z\xfeO\xa1'
|
<commit_before># flake8: noqa: E501
import config.options
# Flask-SQLAlchemy
SQLALCHEMY_DATABASE_URI = 'mysql://{database_user}:{database_password}@{database_host}/{database_name}'.format(
database_user=config.options.DATABASE_USER,
database_password=config.options.DATABASE_PASSWORD,
database_host=config.options.DATABASE_HOST,
database_name=config.options.DATABASE_NAME,
)
SQLALCHEMY_TEST_DATABASE_URI = 'mysql://{database_user}:{database_password}@{database_host}/{database_name}'.format(
database_user=config.options.DATABASE_USER,
database_password=config.options.DATABASE_PASSWORD,
database_host=config.options.DATABASE_HOST,
database_name=config.options.DATABASE_NAME + '_test',
)
SQLALCHEMY_TRACK_MODIFICATIONS = False
# Flask session secret key
SECRET_KEY = '\xec5\xea\xc9\x9f,o\xd7v\xac\x06\xe2\xeeK2\xb9\x1d\x8a\xdel\xb27\x8a\xa8>\x07\n\xd4Z\xfeO\xa1'
<commit_msg>Use Linkr-unique session cookie name<commit_after># flake8: noqa: E501
import config.options
# Flask-SQLAlchemy
SQLALCHEMY_DATABASE_URI = 'mysql://{database_user}:{database_password}@{database_host}/{database_name}'.format(
database_user=config.options.DATABASE_USER,
database_password=config.options.DATABASE_PASSWORD,
database_host=config.options.DATABASE_HOST,
database_name=config.options.DATABASE_NAME,
)
SQLALCHEMY_TEST_DATABASE_URI = 'mysql://{database_user}:{database_password}@{database_host}/{database_name}'.format(
database_user=config.options.DATABASE_USER,
database_password=config.options.DATABASE_PASSWORD,
database_host=config.options.DATABASE_HOST,
database_name=config.options.DATABASE_NAME + '_test',
)
SQLALCHEMY_TRACK_MODIFICATIONS = False
# Flask session cookie name
SESSION_COOKIE_NAME = 'linkr-session'
# Flask session secret key
SECRET_KEY = '\xec5\xea\xc9\x9f,o\xd7v\xac\x06\xe2\xeeK2\xb9\x1d\x8a\xdel\xb27\x8a\xa8>\x07\n\xd4Z\xfeO\xa1'
|
3ed02baa8ad7fcd1f6ca5cccc4f67799ec79e272
|
kimi.py
|
kimi.py
|
# Kimi language interpreter in Python 3
# Anjana Vakil
# http://www.github.com/vakila/kimi
import sys
def tokenize(program):
'''Take a Kimi program as a string, return the tokenized program as a list of strings.
>>> tokenize("(+ 1 2)")
['(', '+', '1', '2', ')']
>>> tokenize("(define square (lambda x (* x x)))")
['(', 'define', 'square', '(', 'lambda', 'x', '(', '*', 'x', 'x', ')', ')', ')']
'''
program = program.replace("(", " ( ")
program = program.replace(")", " ) ")
tokens = program.split()
return tokens
def parse(tokens):
pass
def evaluate(tree):
pass
if __name__ == "__main__":
program = sys.argv[1]
print(tokenize(program))
|
# Kimi language interpreter in Python 3
# Anjana Vakil
# http://www.github.com/vakila/kimi
import sys
def tokenize(string):
'''Take a Kimi program as a string, return the tokenized program as a list of strings.
>>> tokenize("(+ 1 2)")
['(', '+', '1', '2', ')']
>>> tokenize("(define square (lambda x (* x x)))")
['(', 'define', 'square', '(', 'lambda', 'x', '(', '*', 'x', 'x', ')', ')', ')']
'''
string = string.replace("(", " ( ")
string = string.replace(")", " ) ")
tokens = string.split()
return tokens
def parse(tokens):
pass
def evaluate(tree):
pass
if __name__ == "__main__":
program = sys.argv[1]
print(tokenize(program))
|
Rename program to string in tokenize
|
Rename program to string in tokenize
|
Python
|
mit
|
vakila/kimi
|
# Kimi language interpreter in Python 3
# Anjana Vakil
# http://www.github.com/vakila/kimi
import sys
def tokenize(program):
'''Take a Kimi program as a string, return the tokenized program as a list of strings.
>>> tokenize("(+ 1 2)")
['(', '+', '1', '2', ')']
>>> tokenize("(define square (lambda x (* x x)))")
['(', 'define', 'square', '(', 'lambda', 'x', '(', '*', 'x', 'x', ')', ')', ')']
'''
program = program.replace("(", " ( ")
program = program.replace(")", " ) ")
tokens = program.split()
return tokens
def parse(tokens):
pass
def evaluate(tree):
pass
if __name__ == "__main__":
program = sys.argv[1]
print(tokenize(program))
Rename program to string in tokenize
|
# Kimi language interpreter in Python 3
# Anjana Vakil
# http://www.github.com/vakila/kimi
import sys
def tokenize(string):
'''Take a Kimi program as a string, return the tokenized program as a list of strings.
>>> tokenize("(+ 1 2)")
['(', '+', '1', '2', ')']
>>> tokenize("(define square (lambda x (* x x)))")
['(', 'define', 'square', '(', 'lambda', 'x', '(', '*', 'x', 'x', ')', ')', ')']
'''
string = string.replace("(", " ( ")
string = string.replace(")", " ) ")
tokens = string.split()
return tokens
def parse(tokens):
pass
def evaluate(tree):
pass
if __name__ == "__main__":
program = sys.argv[1]
print(tokenize(program))
|
<commit_before># Kimi language interpreter in Python 3
# Anjana Vakil
# http://www.github.com/vakila/kimi
import sys
def tokenize(program):
'''Take a Kimi program as a string, return the tokenized program as a list of strings.
>>> tokenize("(+ 1 2)")
['(', '+', '1', '2', ')']
>>> tokenize("(define square (lambda x (* x x)))")
['(', 'define', 'square', '(', 'lambda', 'x', '(', '*', 'x', 'x', ')', ')', ')']
'''
program = program.replace("(", " ( ")
program = program.replace(")", " ) ")
tokens = program.split()
return tokens
def parse(tokens):
pass
def evaluate(tree):
pass
if __name__ == "__main__":
program = sys.argv[1]
print(tokenize(program))
<commit_msg>Rename program to string in tokenize<commit_after>
|
# Kimi language interpreter in Python 3
# Anjana Vakil
# http://www.github.com/vakila/kimi
import sys
def tokenize(string):
'''Take a Kimi program as a string, return the tokenized program as a list of strings.
>>> tokenize("(+ 1 2)")
['(', '+', '1', '2', ')']
>>> tokenize("(define square (lambda x (* x x)))")
['(', 'define', 'square', '(', 'lambda', 'x', '(', '*', 'x', 'x', ')', ')', ')']
'''
string = string.replace("(", " ( ")
string = string.replace(")", " ) ")
tokens = string.split()
return tokens
def parse(tokens):
pass
def evaluate(tree):
pass
if __name__ == "__main__":
program = sys.argv[1]
print(tokenize(program))
|
# Kimi language interpreter in Python 3
# Anjana Vakil
# http://www.github.com/vakila/kimi
import sys
def tokenize(program):
'''Take a Kimi program as a string, return the tokenized program as a list of strings.
>>> tokenize("(+ 1 2)")
['(', '+', '1', '2', ')']
>>> tokenize("(define square (lambda x (* x x)))")
['(', 'define', 'square', '(', 'lambda', 'x', '(', '*', 'x', 'x', ')', ')', ')']
'''
program = program.replace("(", " ( ")
program = program.replace(")", " ) ")
tokens = program.split()
return tokens
def parse(tokens):
pass
def evaluate(tree):
pass
if __name__ == "__main__":
program = sys.argv[1]
print(tokenize(program))
Rename program to string in tokenize# Kimi language interpreter in Python 3
# Anjana Vakil
# http://www.github.com/vakila/kimi
import sys
def tokenize(string):
'''Take a Kimi program as a string, return the tokenized program as a list of strings.
>>> tokenize("(+ 1 2)")
['(', '+', '1', '2', ')']
>>> tokenize("(define square (lambda x (* x x)))")
['(', 'define', 'square', '(', 'lambda', 'x', '(', '*', 'x', 'x', ')', ')', ')']
'''
string = string.replace("(", " ( ")
string = string.replace(")", " ) ")
tokens = string.split()
return tokens
def parse(tokens):
pass
def evaluate(tree):
pass
if __name__ == "__main__":
program = sys.argv[1]
print(tokenize(program))
|
<commit_before># Kimi language interpreter in Python 3
# Anjana Vakil
# http://www.github.com/vakila/kimi
import sys
def tokenize(program):
'''Take a Kimi program as a string, return the tokenized program as a list of strings.
>>> tokenize("(+ 1 2)")
['(', '+', '1', '2', ')']
>>> tokenize("(define square (lambda x (* x x)))")
['(', 'define', 'square', '(', 'lambda', 'x', '(', '*', 'x', 'x', ')', ')', ')']
'''
program = program.replace("(", " ( ")
program = program.replace(")", " ) ")
tokens = program.split()
return tokens
def parse(tokens):
pass
def evaluate(tree):
pass
if __name__ == "__main__":
program = sys.argv[1]
print(tokenize(program))
<commit_msg>Rename program to string in tokenize<commit_after># Kimi language interpreter in Python 3
# Anjana Vakil
# http://www.github.com/vakila/kimi
import sys
def tokenize(string):
'''Take a Kimi program as a string, return the tokenized program as a list of strings.
>>> tokenize("(+ 1 2)")
['(', '+', '1', '2', ')']
>>> tokenize("(define square (lambda x (* x x)))")
['(', 'define', 'square', '(', 'lambda', 'x', '(', '*', 'x', 'x', ')', ')', ')']
'''
string = string.replace("(", " ( ")
string = string.replace(")", " ) ")
tokens = string.split()
return tokens
def parse(tokens):
pass
def evaluate(tree):
pass
if __name__ == "__main__":
program = sys.argv[1]
print(tokenize(program))
|
592105b9ee6a9c6f3d9bd7358bc5ab18f8ded0c8
|
jfr_playoff/remote.py
|
jfr_playoff/remote.py
|
import re
import requests
from bs4 import BeautifulSoup as bs
from jfr_playoff.logger import PlayoffLogger
class RemoteUrl:
url_cache = {}
@classmethod
def fetch_raw(cls, url):
PlayoffLogger.get('remote').info(
'fetching content for: %s', url)
if url not in cls.url_cache:
request = requests.get(url)
encoding_match = re.search(
'content=".*;( )?charset=(.*?)"',
request.content, re.IGNORECASE)
PlayoffLogger.get('remote').debug(
'Content encoding: %s',
encoding_match.group(2))
if encoding_match:
request.encoding = encoding_match.group(2)
cls.url_cache[url] = request.text
PlayoffLogger.get('remote').info(
'fetched %d bytes from remote location',
len(cls.url_cache[url]))
return cls.url_cache[url]
@classmethod
def fetch(cls, url):
return bs(RemoteUrl.fetch_raw(url), 'lxml')
@classmethod
def clear_cache(cls):
cls.url_cache = {}
|
import re
import requests
from bs4 import BeautifulSoup as bs
from jfr_playoff.logger import PlayoffLogger
class RemoteUrl:
url_cache = {}
@classmethod
def fetch_raw(cls, url):
PlayoffLogger.get('remote').info(
'fetching content for: %s', url)
if url not in cls.url_cache:
request = requests.get(url)
encoding_match = re.search(
'content=".*;( )?charset=(.*?)"',
request.content, re.IGNORECASE)
if encoding_match:
PlayoffLogger.get('remote').debug(
'Content encoding: %s',
encoding_match.group(2))
request.encoding = encoding_match.group(2)
cls.url_cache[url] = request.text
PlayoffLogger.get('remote').info(
'fetched %d bytes from remote location',
len(cls.url_cache[url]))
return cls.url_cache[url]
@classmethod
def fetch(cls, url):
return bs(RemoteUrl.fetch_raw(url), 'lxml')
@classmethod
def clear_cache(cls):
cls.url_cache = {}
|
Print detected content encoding info only if it's actually been detected
|
Print detected content encoding info only if it's actually been detected
|
Python
|
bsd-2-clause
|
emkael/jfrteamy-playoff,emkael/jfrteamy-playoff
|
import re
import requests
from bs4 import BeautifulSoup as bs
from jfr_playoff.logger import PlayoffLogger
class RemoteUrl:
url_cache = {}
@classmethod
def fetch_raw(cls, url):
PlayoffLogger.get('remote').info(
'fetching content for: %s', url)
if url not in cls.url_cache:
request = requests.get(url)
encoding_match = re.search(
'content=".*;( )?charset=(.*?)"',
request.content, re.IGNORECASE)
PlayoffLogger.get('remote').debug(
'Content encoding: %s',
encoding_match.group(2))
if encoding_match:
request.encoding = encoding_match.group(2)
cls.url_cache[url] = request.text
PlayoffLogger.get('remote').info(
'fetched %d bytes from remote location',
len(cls.url_cache[url]))
return cls.url_cache[url]
@classmethod
def fetch(cls, url):
return bs(RemoteUrl.fetch_raw(url), 'lxml')
@classmethod
def clear_cache(cls):
cls.url_cache = {}
Print detected content encoding info only if it's actually been detected
|
import re
import requests
from bs4 import BeautifulSoup as bs
from jfr_playoff.logger import PlayoffLogger
class RemoteUrl:
url_cache = {}
@classmethod
def fetch_raw(cls, url):
PlayoffLogger.get('remote').info(
'fetching content for: %s', url)
if url not in cls.url_cache:
request = requests.get(url)
encoding_match = re.search(
'content=".*;( )?charset=(.*?)"',
request.content, re.IGNORECASE)
if encoding_match:
PlayoffLogger.get('remote').debug(
'Content encoding: %s',
encoding_match.group(2))
request.encoding = encoding_match.group(2)
cls.url_cache[url] = request.text
PlayoffLogger.get('remote').info(
'fetched %d bytes from remote location',
len(cls.url_cache[url]))
return cls.url_cache[url]
@classmethod
def fetch(cls, url):
return bs(RemoteUrl.fetch_raw(url), 'lxml')
@classmethod
def clear_cache(cls):
cls.url_cache = {}
|
<commit_before>import re
import requests
from bs4 import BeautifulSoup as bs
from jfr_playoff.logger import PlayoffLogger
class RemoteUrl:
url_cache = {}
@classmethod
def fetch_raw(cls, url):
PlayoffLogger.get('remote').info(
'fetching content for: %s', url)
if url not in cls.url_cache:
request = requests.get(url)
encoding_match = re.search(
'content=".*;( )?charset=(.*?)"',
request.content, re.IGNORECASE)
PlayoffLogger.get('remote').debug(
'Content encoding: %s',
encoding_match.group(2))
if encoding_match:
request.encoding = encoding_match.group(2)
cls.url_cache[url] = request.text
PlayoffLogger.get('remote').info(
'fetched %d bytes from remote location',
len(cls.url_cache[url]))
return cls.url_cache[url]
@classmethod
def fetch(cls, url):
return bs(RemoteUrl.fetch_raw(url), 'lxml')
@classmethod
def clear_cache(cls):
cls.url_cache = {}
<commit_msg>Print detected content encoding info only if it's actually been detected<commit_after>
|
import re
import requests
from bs4 import BeautifulSoup as bs
from jfr_playoff.logger import PlayoffLogger
class RemoteUrl:
url_cache = {}
@classmethod
def fetch_raw(cls, url):
PlayoffLogger.get('remote').info(
'fetching content for: %s', url)
if url not in cls.url_cache:
request = requests.get(url)
encoding_match = re.search(
'content=".*;( )?charset=(.*?)"',
request.content, re.IGNORECASE)
if encoding_match:
PlayoffLogger.get('remote').debug(
'Content encoding: %s',
encoding_match.group(2))
request.encoding = encoding_match.group(2)
cls.url_cache[url] = request.text
PlayoffLogger.get('remote').info(
'fetched %d bytes from remote location',
len(cls.url_cache[url]))
return cls.url_cache[url]
@classmethod
def fetch(cls, url):
return bs(RemoteUrl.fetch_raw(url), 'lxml')
@classmethod
def clear_cache(cls):
cls.url_cache = {}
|
import re
import requests
from bs4 import BeautifulSoup as bs
from jfr_playoff.logger import PlayoffLogger
class RemoteUrl:
url_cache = {}
@classmethod
def fetch_raw(cls, url):
PlayoffLogger.get('remote').info(
'fetching content for: %s', url)
if url not in cls.url_cache:
request = requests.get(url)
encoding_match = re.search(
'content=".*;( )?charset=(.*?)"',
request.content, re.IGNORECASE)
PlayoffLogger.get('remote').debug(
'Content encoding: %s',
encoding_match.group(2))
if encoding_match:
request.encoding = encoding_match.group(2)
cls.url_cache[url] = request.text
PlayoffLogger.get('remote').info(
'fetched %d bytes from remote location',
len(cls.url_cache[url]))
return cls.url_cache[url]
@classmethod
def fetch(cls, url):
return bs(RemoteUrl.fetch_raw(url), 'lxml')
@classmethod
def clear_cache(cls):
cls.url_cache = {}
Print detected content encoding info only if it's actually been detectedimport re
import requests
from bs4 import BeautifulSoup as bs
from jfr_playoff.logger import PlayoffLogger
class RemoteUrl:
url_cache = {}
@classmethod
def fetch_raw(cls, url):
PlayoffLogger.get('remote').info(
'fetching content for: %s', url)
if url not in cls.url_cache:
request = requests.get(url)
encoding_match = re.search(
'content=".*;( )?charset=(.*?)"',
request.content, re.IGNORECASE)
if encoding_match:
PlayoffLogger.get('remote').debug(
'Content encoding: %s',
encoding_match.group(2))
request.encoding = encoding_match.group(2)
cls.url_cache[url] = request.text
PlayoffLogger.get('remote').info(
'fetched %d bytes from remote location',
len(cls.url_cache[url]))
return cls.url_cache[url]
@classmethod
def fetch(cls, url):
return bs(RemoteUrl.fetch_raw(url), 'lxml')
@classmethod
def clear_cache(cls):
cls.url_cache = {}
|
<commit_before>import re
import requests
from bs4 import BeautifulSoup as bs
from jfr_playoff.logger import PlayoffLogger
class RemoteUrl:
url_cache = {}
@classmethod
def fetch_raw(cls, url):
PlayoffLogger.get('remote').info(
'fetching content for: %s', url)
if url not in cls.url_cache:
request = requests.get(url)
encoding_match = re.search(
'content=".*;( )?charset=(.*?)"',
request.content, re.IGNORECASE)
PlayoffLogger.get('remote').debug(
'Content encoding: %s',
encoding_match.group(2))
if encoding_match:
request.encoding = encoding_match.group(2)
cls.url_cache[url] = request.text
PlayoffLogger.get('remote').info(
'fetched %d bytes from remote location',
len(cls.url_cache[url]))
return cls.url_cache[url]
@classmethod
def fetch(cls, url):
return bs(RemoteUrl.fetch_raw(url), 'lxml')
@classmethod
def clear_cache(cls):
cls.url_cache = {}
<commit_msg>Print detected content encoding info only if it's actually been detected<commit_after>import re
import requests
from bs4 import BeautifulSoup as bs
from jfr_playoff.logger import PlayoffLogger
class RemoteUrl:
url_cache = {}
@classmethod
def fetch_raw(cls, url):
PlayoffLogger.get('remote').info(
'fetching content for: %s', url)
if url not in cls.url_cache:
request = requests.get(url)
encoding_match = re.search(
'content=".*;( )?charset=(.*?)"',
request.content, re.IGNORECASE)
if encoding_match:
PlayoffLogger.get('remote').debug(
'Content encoding: %s',
encoding_match.group(2))
request.encoding = encoding_match.group(2)
cls.url_cache[url] = request.text
PlayoffLogger.get('remote').info(
'fetched %d bytes from remote location',
len(cls.url_cache[url]))
return cls.url_cache[url]
@classmethod
def fetch(cls, url):
return bs(RemoteUrl.fetch_raw(url), 'lxml')
@classmethod
def clear_cache(cls):
cls.url_cache = {}
|
a7c40b43d90f32d0da4de1389d859865ae283180
|
seleniumbase/config/proxy_list.py
|
seleniumbase/config/proxy_list.py
|
"""
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
"""
PROXY_LIST = {
"example1": "46.28.229.75:3128", # (Example) - set your own proxy here
"example2": "82.200.233.4:3128", # (Example) - set your own proxy here
"example3": "128.199.214.87:3128", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
|
"""
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
"""
PROXY_LIST = {
"example1": "45.133.182.18:18080", # (Example) - set your own proxy here
"example2": "95.174.67.50:18080", # (Example) - set your own proxy here
"example3": "83.97.23.90:18080", # (Example) - set your own proxy here
"example4": "82.200.233.4:3128", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
|
Update the proxy list examples
|
Update the proxy list examples
|
Python
|
mit
|
mdmintz/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase
|
"""
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
"""
PROXY_LIST = {
"example1": "46.28.229.75:3128", # (Example) - set your own proxy here
"example2": "82.200.233.4:3128", # (Example) - set your own proxy here
"example3": "128.199.214.87:3128", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
Update the proxy list examples
|
"""
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
"""
PROXY_LIST = {
"example1": "45.133.182.18:18080", # (Example) - set your own proxy here
"example2": "95.174.67.50:18080", # (Example) - set your own proxy here
"example3": "83.97.23.90:18080", # (Example) - set your own proxy here
"example4": "82.200.233.4:3128", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
|
<commit_before>"""
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
"""
PROXY_LIST = {
"example1": "46.28.229.75:3128", # (Example) - set your own proxy here
"example2": "82.200.233.4:3128", # (Example) - set your own proxy here
"example3": "128.199.214.87:3128", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
<commit_msg>Update the proxy list examples<commit_after>
|
"""
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
"""
PROXY_LIST = {
"example1": "45.133.182.18:18080", # (Example) - set your own proxy here
"example2": "95.174.67.50:18080", # (Example) - set your own proxy here
"example3": "83.97.23.90:18080", # (Example) - set your own proxy here
"example4": "82.200.233.4:3128", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
|
"""
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
"""
PROXY_LIST = {
"example1": "46.28.229.75:3128", # (Example) - set your own proxy here
"example2": "82.200.233.4:3128", # (Example) - set your own proxy here
"example3": "128.199.214.87:3128", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
Update the proxy list examples"""
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
"""
PROXY_LIST = {
"example1": "45.133.182.18:18080", # (Example) - set your own proxy here
"example2": "95.174.67.50:18080", # (Example) - set your own proxy here
"example3": "83.97.23.90:18080", # (Example) - set your own proxy here
"example4": "82.200.233.4:3128", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
|
<commit_before>"""
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
"""
PROXY_LIST = {
"example1": "46.28.229.75:3128", # (Example) - set your own proxy here
"example2": "82.200.233.4:3128", # (Example) - set your own proxy here
"example3": "128.199.214.87:3128", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
<commit_msg>Update the proxy list examples<commit_after>"""
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
"""
PROXY_LIST = {
"example1": "45.133.182.18:18080", # (Example) - set your own proxy here
"example2": "95.174.67.50:18080", # (Example) - set your own proxy here
"example3": "83.97.23.90:18080", # (Example) - set your own proxy here
"example4": "82.200.233.4:3128", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
|
b158e65839b9b662d56bd43dfd362ad26da70184
|
__init__.py
|
__init__.py
|
#Shoopdawoop
from . import CuraEngineBackend
from UM.Preferences import Preferences
def getMetaData():
return { "name": "CuraEngine Backend", "type": "Backend" }
def register(app):
Preferences.addPreference("BackendLocation","../PinkUnicornEngine/CuraEngine")
engine = CuraEngineBackend.CuraEngineBackend()
app.setBackend(engine)
#engine.addCommand(TransferMeshCommand())
|
#Shoopdawoop
from . import CuraEngineBackend
from UM.Preferences import Preferences
def getMetaData():
return { "name": "CuraEngine Backend", "type": "Backend" }
def register(app):
Preferences.addPreference("BackendLocation","../PinkUnicornEngine/CuraEngine")
return CuraEngineBackend.CuraEngineBackend()
|
Update plugin's register functions to return the object instance instead of performing the registration themselves
|
Update plugin's register functions to return the object instance instead of performing the registration themselves
|
Python
|
agpl-3.0
|
Curahelper/Cura,Curahelper/Cura,bq/Ultimaker-Cura,DeskboxBrazil/Cura,lo0ol/Ultimaker-Cura,quillford/Cura,fxtentacle/Cura,totalretribution/Cura,DeskboxBrazil/Cura,hmflash/Cura,ynotstartups/Wanhao,markwal/Cura,ad1217/Cura,ynotstartups/Wanhao,derekhe/Cura,lo0ol/Ultimaker-Cura,senttech/Cura,totalretribution/Cura,bq/Ultimaker-Cura,fxtentacle/Cura,fieldOfView/Cura,quillford/Cura,senttech/Cura,hmflash/Cura,ad1217/Cura,derekhe/Cura,fieldOfView/Cura,markwal/Cura
|
#Shoopdawoop
from . import CuraEngineBackend
from UM.Preferences import Preferences
def getMetaData():
return { "name": "CuraEngine Backend", "type": "Backend" }
def register(app):
Preferences.addPreference("BackendLocation","../PinkUnicornEngine/CuraEngine")
engine = CuraEngineBackend.CuraEngineBackend()
app.setBackend(engine)
#engine.addCommand(TransferMeshCommand())
Update plugin's register functions to return the object instance instead of performing the registration themselves
|
#Shoopdawoop
from . import CuraEngineBackend
from UM.Preferences import Preferences
def getMetaData():
return { "name": "CuraEngine Backend", "type": "Backend" }
def register(app):
Preferences.addPreference("BackendLocation","../PinkUnicornEngine/CuraEngine")
return CuraEngineBackend.CuraEngineBackend()
|
<commit_before>#Shoopdawoop
from . import CuraEngineBackend
from UM.Preferences import Preferences
def getMetaData():
return { "name": "CuraEngine Backend", "type": "Backend" }
def register(app):
Preferences.addPreference("BackendLocation","../PinkUnicornEngine/CuraEngine")
engine = CuraEngineBackend.CuraEngineBackend()
app.setBackend(engine)
#engine.addCommand(TransferMeshCommand())
<commit_msg>Update plugin's register functions to return the object instance instead of performing the registration themselves<commit_after>
|
#Shoopdawoop
from . import CuraEngineBackend
from UM.Preferences import Preferences
def getMetaData():
return { "name": "CuraEngine Backend", "type": "Backend" }
def register(app):
Preferences.addPreference("BackendLocation","../PinkUnicornEngine/CuraEngine")
return CuraEngineBackend.CuraEngineBackend()
|
#Shoopdawoop
from . import CuraEngineBackend
from UM.Preferences import Preferences
def getMetaData():
return { "name": "CuraEngine Backend", "type": "Backend" }
def register(app):
Preferences.addPreference("BackendLocation","../PinkUnicornEngine/CuraEngine")
engine = CuraEngineBackend.CuraEngineBackend()
app.setBackend(engine)
#engine.addCommand(TransferMeshCommand())
Update plugin's register functions to return the object instance instead of performing the registration themselves#Shoopdawoop
from . import CuraEngineBackend
from UM.Preferences import Preferences
def getMetaData():
return { "name": "CuraEngine Backend", "type": "Backend" }
def register(app):
Preferences.addPreference("BackendLocation","../PinkUnicornEngine/CuraEngine")
return CuraEngineBackend.CuraEngineBackend()
|
<commit_before>#Shoopdawoop
from . import CuraEngineBackend
from UM.Preferences import Preferences
def getMetaData():
return { "name": "CuraEngine Backend", "type": "Backend" }
def register(app):
Preferences.addPreference("BackendLocation","../PinkUnicornEngine/CuraEngine")
engine = CuraEngineBackend.CuraEngineBackend()
app.setBackend(engine)
#engine.addCommand(TransferMeshCommand())
<commit_msg>Update plugin's register functions to return the object instance instead of performing the registration themselves<commit_after>#Shoopdawoop
from . import CuraEngineBackend
from UM.Preferences import Preferences
def getMetaData():
return { "name": "CuraEngine Backend", "type": "Backend" }
def register(app):
Preferences.addPreference("BackendLocation","../PinkUnicornEngine/CuraEngine")
return CuraEngineBackend.CuraEngineBackend()
|
c612b92847dc89bb4cd4b63502c43a7a9f63c52f
|
tx_salaries/utils/transformers/mixins.py
|
tx_salaries/utils/transformers/mixins.py
|
class OrganizationMixin(object):
@property
def organization(self):
return {
'name': self.ORGANIZATION_NAME,
'children': [{
'name': unicode(self.department),
}],
}
|
class OrganizationMixin(object):
"""
Adds a generic ``organization`` property to the class
This requires that the class mixing it in adds an
``ORGANIZATION_NAME`` property of the main level agency or
department.
"""
@property
def organization(self):
return {
'name': self.ORGANIZATION_NAME,
'children': [{
'name': unicode(self.department),
}],
}
|
Add a docblock for this mixin
|
Add a docblock for this mixin
|
Python
|
apache-2.0
|
texastribune/tx_salaries,texastribune/tx_salaries
|
class OrganizationMixin(object):
@property
def organization(self):
return {
'name': self.ORGANIZATION_NAME,
'children': [{
'name': unicode(self.department),
}],
}
Add a docblock for this mixin
|
class OrganizationMixin(object):
"""
Adds a generic ``organization`` property to the class
This requires that the class mixing it in adds an
``ORGANIZATION_NAME`` property of the main level agency or
department.
"""
@property
def organization(self):
return {
'name': self.ORGANIZATION_NAME,
'children': [{
'name': unicode(self.department),
}],
}
|
<commit_before>class OrganizationMixin(object):
@property
def organization(self):
return {
'name': self.ORGANIZATION_NAME,
'children': [{
'name': unicode(self.department),
}],
}
<commit_msg>Add a docblock for this mixin<commit_after>
|
class OrganizationMixin(object):
"""
Adds a generic ``organization`` property to the class
This requires that the class mixing it in adds an
``ORGANIZATION_NAME`` property of the main level agency or
department.
"""
@property
def organization(self):
return {
'name': self.ORGANIZATION_NAME,
'children': [{
'name': unicode(self.department),
}],
}
|
class OrganizationMixin(object):
@property
def organization(self):
return {
'name': self.ORGANIZATION_NAME,
'children': [{
'name': unicode(self.department),
}],
}
Add a docblock for this mixinclass OrganizationMixin(object):
"""
Adds a generic ``organization`` property to the class
This requires that the class mixing it in adds an
``ORGANIZATION_NAME`` property of the main level agency or
department.
"""
@property
def organization(self):
return {
'name': self.ORGANIZATION_NAME,
'children': [{
'name': unicode(self.department),
}],
}
|
<commit_before>class OrganizationMixin(object):
@property
def organization(self):
return {
'name': self.ORGANIZATION_NAME,
'children': [{
'name': unicode(self.department),
}],
}
<commit_msg>Add a docblock for this mixin<commit_after>class OrganizationMixin(object):
"""
Adds a generic ``organization`` property to the class
This requires that the class mixing it in adds an
``ORGANIZATION_NAME`` property of the main level agency or
department.
"""
@property
def organization(self):
return {
'name': self.ORGANIZATION_NAME,
'children': [{
'name': unicode(self.department),
}],
}
|
a08005a03ccce63a541e8e41b0d98e9c7c30cc67
|
vispy/visuals/graphs/layouts/circular.py
|
vispy/visuals/graphs/layouts/circular.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
"""
Circular Layout
===============
This module contains several graph layouts which rely heavily on circles.
"""
import numpy as np
from ..util import _straight_line_vertices
def circular(adjacency_mat, directed=False):
"""Places all nodes on a single circle.
Parameters
----------
adjacency_mat : matrix or sparse
The graph adjacency matrix
directed : bool
Whether the graph is directed. If this is True, is will also
generate the vertices for arrows, which can be passed to an
ArrowVisual.
Yields
------
(node_vertices, line_vertices, arrow_vertices) : tuple
Yields the node and line vertices in a tuple. This layout only yields a
single time, and has no builtin animation
"""
num_nodes = adjacency_mat.shape[0]
t = np.arange(0, 2.0*np.pi, 2.0*np.pi/num_nodes, dtype=np.float32)
# Visual coordinate system is between 0 and 1, so generate a circle with
# radius 0.5 and center it at the point (0.5, 0.5).
node_coords = (0.5 * np.array([np.cos(t), np.sin(t)]) + 0.5).T
line_vertices, arrows = _straight_line_vertices(adjacency_mat,
node_coords, directed)
yield node_coords, line_vertices, arrows
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
"""
Circular Layout
===============
This module contains several graph layouts which rely heavily on circles.
"""
import numpy as np
from ..util import _straight_line_vertices
def circular(adjacency_mat, directed=False):
"""Places all nodes on a single circle.
Parameters
----------
adjacency_mat : matrix or sparse
The graph adjacency matrix
directed : bool
Whether the graph is directed. If this is True, is will also
generate the vertices for arrows, which can be passed to an
ArrowVisual.
Yields
------
(node_vertices, line_vertices, arrow_vertices) : tuple
Yields the node and line vertices in a tuple. This layout only yields a
single time, and has no builtin animation
"""
num_nodes = adjacency_mat.shape[0]
t = np.linpsace(0, 2 * np.pi, num_nodes, endpt=False, dtype=np.float32)
# Visual coordinate system is between 0 and 1, so generate a circle with
# radius 0.5 and center it at the point (0.5, 0.5).
node_coords = (0.5 * np.array([np.cos(t), np.sin(t)]) + 0.5).T
line_vertices, arrows = _straight_line_vertices(adjacency_mat,
node_coords, directed)
yield node_coords, line_vertices, arrows
|
Use the more obvious linspace instead of arange
|
Use the more obvious linspace instead of arange
|
Python
|
bsd-3-clause
|
michaelaye/vispy,Eric89GXL/vispy,drufat/vispy,drufat/vispy,drufat/vispy,michaelaye/vispy,ghisvail/vispy,ghisvail/vispy,Eric89GXL/vispy,ghisvail/vispy,michaelaye/vispy,Eric89GXL/vispy
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
"""
Circular Layout
===============
This module contains several graph layouts which rely heavily on circles.
"""
import numpy as np
from ..util import _straight_line_vertices
def circular(adjacency_mat, directed=False):
"""Places all nodes on a single circle.
Parameters
----------
adjacency_mat : matrix or sparse
The graph adjacency matrix
directed : bool
Whether the graph is directed. If this is True, is will also
generate the vertices for arrows, which can be passed to an
ArrowVisual.
Yields
------
(node_vertices, line_vertices, arrow_vertices) : tuple
Yields the node and line vertices in a tuple. This layout only yields a
single time, and has no builtin animation
"""
num_nodes = adjacency_mat.shape[0]
t = np.arange(0, 2.0*np.pi, 2.0*np.pi/num_nodes, dtype=np.float32)
# Visual coordinate system is between 0 and 1, so generate a circle with
# radius 0.5 and center it at the point (0.5, 0.5).
node_coords = (0.5 * np.array([np.cos(t), np.sin(t)]) + 0.5).T
line_vertices, arrows = _straight_line_vertices(adjacency_mat,
node_coords, directed)
yield node_coords, line_vertices, arrows
Use the more obvious linspace instead of arange
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
"""
Circular Layout
===============
This module contains several graph layouts which rely heavily on circles.
"""
import numpy as np
from ..util import _straight_line_vertices
def circular(adjacency_mat, directed=False):
"""Places all nodes on a single circle.
Parameters
----------
adjacency_mat : matrix or sparse
The graph adjacency matrix
directed : bool
Whether the graph is directed. If this is True, is will also
generate the vertices for arrows, which can be passed to an
ArrowVisual.
Yields
------
(node_vertices, line_vertices, arrow_vertices) : tuple
Yields the node and line vertices in a tuple. This layout only yields a
single time, and has no builtin animation
"""
num_nodes = adjacency_mat.shape[0]
t = np.linpsace(0, 2 * np.pi, num_nodes, endpt=False, dtype=np.float32)
# Visual coordinate system is between 0 and 1, so generate a circle with
# radius 0.5 and center it at the point (0.5, 0.5).
node_coords = (0.5 * np.array([np.cos(t), np.sin(t)]) + 0.5).T
line_vertices, arrows = _straight_line_vertices(adjacency_mat,
node_coords, directed)
yield node_coords, line_vertices, arrows
|
<commit_before># -*- coding: utf-8 -*-
# Copyright (c) 2015, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
"""
Circular Layout
===============
This module contains several graph layouts which rely heavily on circles.
"""
import numpy as np
from ..util import _straight_line_vertices
def circular(adjacency_mat, directed=False):
"""Places all nodes on a single circle.
Parameters
----------
adjacency_mat : matrix or sparse
The graph adjacency matrix
directed : bool
Whether the graph is directed. If this is True, is will also
generate the vertices for arrows, which can be passed to an
ArrowVisual.
Yields
------
(node_vertices, line_vertices, arrow_vertices) : tuple
Yields the node and line vertices in a tuple. This layout only yields a
single time, and has no builtin animation
"""
num_nodes = adjacency_mat.shape[0]
t = np.arange(0, 2.0*np.pi, 2.0*np.pi/num_nodes, dtype=np.float32)
# Visual coordinate system is between 0 and 1, so generate a circle with
# radius 0.5 and center it at the point (0.5, 0.5).
node_coords = (0.5 * np.array([np.cos(t), np.sin(t)]) + 0.5).T
line_vertices, arrows = _straight_line_vertices(adjacency_mat,
node_coords, directed)
yield node_coords, line_vertices, arrows
<commit_msg>Use the more obvious linspace instead of arange<commit_after>
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
"""
Circular Layout
===============
This module contains several graph layouts which rely heavily on circles.
"""
import numpy as np
from ..util import _straight_line_vertices
def circular(adjacency_mat, directed=False):
"""Places all nodes on a single circle.
Parameters
----------
adjacency_mat : matrix or sparse
The graph adjacency matrix
directed : bool
Whether the graph is directed. If this is True, is will also
generate the vertices for arrows, which can be passed to an
ArrowVisual.
Yields
------
(node_vertices, line_vertices, arrow_vertices) : tuple
Yields the node and line vertices in a tuple. This layout only yields a
single time, and has no builtin animation
"""
num_nodes = adjacency_mat.shape[0]
t = np.linpsace(0, 2 * np.pi, num_nodes, endpt=False, dtype=np.float32)
# Visual coordinate system is between 0 and 1, so generate a circle with
# radius 0.5 and center it at the point (0.5, 0.5).
node_coords = (0.5 * np.array([np.cos(t), np.sin(t)]) + 0.5).T
line_vertices, arrows = _straight_line_vertices(adjacency_mat,
node_coords, directed)
yield node_coords, line_vertices, arrows
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
"""
Circular Layout
===============
This module contains several graph layouts which rely heavily on circles.
"""
import numpy as np
from ..util import _straight_line_vertices
def circular(adjacency_mat, directed=False):
"""Places all nodes on a single circle.
Parameters
----------
adjacency_mat : matrix or sparse
The graph adjacency matrix
directed : bool
Whether the graph is directed. If this is True, is will also
generate the vertices for arrows, which can be passed to an
ArrowVisual.
Yields
------
(node_vertices, line_vertices, arrow_vertices) : tuple
Yields the node and line vertices in a tuple. This layout only yields a
single time, and has no builtin animation
"""
num_nodes = adjacency_mat.shape[0]
t = np.arange(0, 2.0*np.pi, 2.0*np.pi/num_nodes, dtype=np.float32)
# Visual coordinate system is between 0 and 1, so generate a circle with
# radius 0.5 and center it at the point (0.5, 0.5).
node_coords = (0.5 * np.array([np.cos(t), np.sin(t)]) + 0.5).T
line_vertices, arrows = _straight_line_vertices(adjacency_mat,
node_coords, directed)
yield node_coords, line_vertices, arrows
Use the more obvious linspace instead of arange# -*- coding: utf-8 -*-
# Copyright (c) 2015, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
"""
Circular Layout
===============
This module contains several graph layouts which rely heavily on circles.
"""
import numpy as np
from ..util import _straight_line_vertices
def circular(adjacency_mat, directed=False):
"""Places all nodes on a single circle.
Parameters
----------
adjacency_mat : matrix or sparse
The graph adjacency matrix
directed : bool
Whether the graph is directed. If this is True, is will also
generate the vertices for arrows, which can be passed to an
ArrowVisual.
Yields
------
(node_vertices, line_vertices, arrow_vertices) : tuple
Yields the node and line vertices in a tuple. This layout only yields a
single time, and has no builtin animation
"""
num_nodes = adjacency_mat.shape[0]
t = np.linpsace(0, 2 * np.pi, num_nodes, endpt=False, dtype=np.float32)
# Visual coordinate system is between 0 and 1, so generate a circle with
# radius 0.5 and center it at the point (0.5, 0.5).
node_coords = (0.5 * np.array([np.cos(t), np.sin(t)]) + 0.5).T
line_vertices, arrows = _straight_line_vertices(adjacency_mat,
node_coords, directed)
yield node_coords, line_vertices, arrows
|
<commit_before># -*- coding: utf-8 -*-
# Copyright (c) 2015, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
"""
Circular Layout
===============
This module contains several graph layouts which rely heavily on circles.
"""
import numpy as np
from ..util import _straight_line_vertices
def circular(adjacency_mat, directed=False):
"""Places all nodes on a single circle.
Parameters
----------
adjacency_mat : matrix or sparse
The graph adjacency matrix
directed : bool
Whether the graph is directed. If this is True, is will also
generate the vertices for arrows, which can be passed to an
ArrowVisual.
Yields
------
(node_vertices, line_vertices, arrow_vertices) : tuple
Yields the node and line vertices in a tuple. This layout only yields a
single time, and has no builtin animation
"""
num_nodes = adjacency_mat.shape[0]
t = np.arange(0, 2.0*np.pi, 2.0*np.pi/num_nodes, dtype=np.float32)
# Visual coordinate system is between 0 and 1, so generate a circle with
# radius 0.5 and center it at the point (0.5, 0.5).
node_coords = (0.5 * np.array([np.cos(t), np.sin(t)]) + 0.5).T
line_vertices, arrows = _straight_line_vertices(adjacency_mat,
node_coords, directed)
yield node_coords, line_vertices, arrows
<commit_msg>Use the more obvious linspace instead of arange<commit_after># -*- coding: utf-8 -*-
# Copyright (c) 2015, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
"""
Circular Layout
===============
This module contains several graph layouts which rely heavily on circles.
"""
import numpy as np
from ..util import _straight_line_vertices
def circular(adjacency_mat, directed=False):
"""Places all nodes on a single circle.
Parameters
----------
adjacency_mat : matrix or sparse
The graph adjacency matrix
directed : bool
Whether the graph is directed. If this is True, is will also
generate the vertices for arrows, which can be passed to an
ArrowVisual.
Yields
------
(node_vertices, line_vertices, arrow_vertices) : tuple
Yields the node and line vertices in a tuple. This layout only yields a
single time, and has no builtin animation
"""
num_nodes = adjacency_mat.shape[0]
t = np.linpsace(0, 2 * np.pi, num_nodes, endpt=False, dtype=np.float32)
# Visual coordinate system is between 0 and 1, so generate a circle with
# radius 0.5 and center it at the point (0.5, 0.5).
node_coords = (0.5 * np.array([np.cos(t), np.sin(t)]) + 0.5).T
line_vertices, arrows = _straight_line_vertices(adjacency_mat,
node_coords, directed)
yield node_coords, line_vertices, arrows
|
76b0c364b8bfbc553d3eedc97e4805299b8d9974
|
extensions/ExtGameController.py
|
extensions/ExtGameController.py
|
from python_cowbull_game.GameController import GameController
from python_cowbull_game.GameMode import GameMode
class ExtGameController(GameController):
"""
TBC
"""
#
# Example of defining additional game modes:
# ==========================================
#
# Replace:
# ------------------------------------------
# additional_modes = []
#
# With:
# ------------------------------------------
# additional_modes = [
# GameMode(mode="SuperTough", priority=6, digits=10, digit_type=0),
# GameMode(mode="hexTough", priority=5, digits=3, guesses_allowed=3, digit_type=1)
# ]
#
additional_modes = []
def __init__(self, game_modes=None, mode=None, game_json=None):
if game_modes is not None and not isinstance(game_modes, list):
raise TypeError("ExtGameController expected a list of GameMode objects")
super(ExtGameController, self).__init__(
game_json=game_json,
mode=mode,
game_modes=self.additional_modes + (game_modes or [])
)
|
from python_cowbull_game.GameController import GameController
from python_cowbull_game.GameMode import GameMode
class ExtGameController(GameController):
"""
TBC
"""
#
# Example of defining additional game modes:
# ==========================================
#
# Replace:
# ------------------------------------------
# additional_modes = []
#
# With:
# ------------------------------------------
# additional_modes = [
# GameMode(mode="SuperTough", priority=6, digits=10, digit_type=0),
# GameMode(mode="hexTough", priority=5, digits=3, guesses_allowed=3, digit_type=1)
# ]
#
additional_modes = [
GameMode(
mode="hexTough",
priority=5,
digits=3,
guesses_allowed=3,
digit_type=1,
help_text="Guess a set of 3 digits between 0 and F",
instruction_text="hexTough is a hard hexidecimal based game. You need to "
"guess 3 digits, each of which needs to be a hex number "
"(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, A, B, C, D, E, or F). "
"The numbers can be passed as hex (0x0, 0xd, 0xE) or as "
"strings (A, b, C, 0, 5, etc.)."
)
]
def __init__(self, game_modes=None, mode=None, game_json=None):
if game_modes is not None and not isinstance(game_modes, list):
raise TypeError("ExtGameController expected a list of GameMode objects")
super(ExtGameController, self).__init__(
game_json=game_json,
mode=mode,
game_modes=self.additional_modes + (game_modes or [])
)
|
Update to include instruction and help texts in GET response.
|
Update to include instruction and help texts in GET response.
|
Python
|
apache-2.0
|
dsandersAzure/python_cowbull_server,dsandersAzure/python_cowbull_server
|
from python_cowbull_game.GameController import GameController
from python_cowbull_game.GameMode import GameMode
class ExtGameController(GameController):
"""
TBC
"""
#
# Example of defining additional game modes:
# ==========================================
#
# Replace:
# ------------------------------------------
# additional_modes = []
#
# With:
# ------------------------------------------
# additional_modes = [
# GameMode(mode="SuperTough", priority=6, digits=10, digit_type=0),
# GameMode(mode="hexTough", priority=5, digits=3, guesses_allowed=3, digit_type=1)
# ]
#
additional_modes = []
def __init__(self, game_modes=None, mode=None, game_json=None):
if game_modes is not None and not isinstance(game_modes, list):
raise TypeError("ExtGameController expected a list of GameMode objects")
super(ExtGameController, self).__init__(
game_json=game_json,
mode=mode,
game_modes=self.additional_modes + (game_modes or [])
)
Update to include instruction and help texts in GET response.
|
from python_cowbull_game.GameController import GameController
from python_cowbull_game.GameMode import GameMode
class ExtGameController(GameController):
"""
TBC
"""
#
# Example of defining additional game modes:
# ==========================================
#
# Replace:
# ------------------------------------------
# additional_modes = []
#
# With:
# ------------------------------------------
# additional_modes = [
# GameMode(mode="SuperTough", priority=6, digits=10, digit_type=0),
# GameMode(mode="hexTough", priority=5, digits=3, guesses_allowed=3, digit_type=1)
# ]
#
additional_modes = [
GameMode(
mode="hexTough",
priority=5,
digits=3,
guesses_allowed=3,
digit_type=1,
help_text="Guess a set of 3 digits between 0 and F",
instruction_text="hexTough is a hard hexidecimal based game. You need to "
"guess 3 digits, each of which needs to be a hex number "
"(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, A, B, C, D, E, or F). "
"The numbers can be passed as hex (0x0, 0xd, 0xE) or as "
"strings (A, b, C, 0, 5, etc.)."
)
]
def __init__(self, game_modes=None, mode=None, game_json=None):
if game_modes is not None and not isinstance(game_modes, list):
raise TypeError("ExtGameController expected a list of GameMode objects")
super(ExtGameController, self).__init__(
game_json=game_json,
mode=mode,
game_modes=self.additional_modes + (game_modes or [])
)
|
<commit_before>from python_cowbull_game.GameController import GameController
from python_cowbull_game.GameMode import GameMode
class ExtGameController(GameController):
"""
TBC
"""
#
# Example of defining additional game modes:
# ==========================================
#
# Replace:
# ------------------------------------------
# additional_modes = []
#
# With:
# ------------------------------------------
# additional_modes = [
# GameMode(mode="SuperTough", priority=6, digits=10, digit_type=0),
# GameMode(mode="hexTough", priority=5, digits=3, guesses_allowed=3, digit_type=1)
# ]
#
additional_modes = []
def __init__(self, game_modes=None, mode=None, game_json=None):
if game_modes is not None and not isinstance(game_modes, list):
raise TypeError("ExtGameController expected a list of GameMode objects")
super(ExtGameController, self).__init__(
game_json=game_json,
mode=mode,
game_modes=self.additional_modes + (game_modes or [])
)
<commit_msg>Update to include instruction and help texts in GET response.<commit_after>
|
from python_cowbull_game.GameController import GameController
from python_cowbull_game.GameMode import GameMode
class ExtGameController(GameController):
"""
TBC
"""
#
# Example of defining additional game modes:
# ==========================================
#
# Replace:
# ------------------------------------------
# additional_modes = []
#
# With:
# ------------------------------------------
# additional_modes = [
# GameMode(mode="SuperTough", priority=6, digits=10, digit_type=0),
# GameMode(mode="hexTough", priority=5, digits=3, guesses_allowed=3, digit_type=1)
# ]
#
additional_modes = [
GameMode(
mode="hexTough",
priority=5,
digits=3,
guesses_allowed=3,
digit_type=1,
help_text="Guess a set of 3 digits between 0 and F",
instruction_text="hexTough is a hard hexidecimal based game. You need to "
"guess 3 digits, each of which needs to be a hex number "
"(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, A, B, C, D, E, or F). "
"The numbers can be passed as hex (0x0, 0xd, 0xE) or as "
"strings (A, b, C, 0, 5, etc.)."
)
]
def __init__(self, game_modes=None, mode=None, game_json=None):
if game_modes is not None and not isinstance(game_modes, list):
raise TypeError("ExtGameController expected a list of GameMode objects")
super(ExtGameController, self).__init__(
game_json=game_json,
mode=mode,
game_modes=self.additional_modes + (game_modes or [])
)
|
from python_cowbull_game.GameController import GameController
from python_cowbull_game.GameMode import GameMode
class ExtGameController(GameController):
"""
TBC
"""
#
# Example of defining additional game modes:
# ==========================================
#
# Replace:
# ------------------------------------------
# additional_modes = []
#
# With:
# ------------------------------------------
# additional_modes = [
# GameMode(mode="SuperTough", priority=6, digits=10, digit_type=0),
# GameMode(mode="hexTough", priority=5, digits=3, guesses_allowed=3, digit_type=1)
# ]
#
additional_modes = []
def __init__(self, game_modes=None, mode=None, game_json=None):
if game_modes is not None and not isinstance(game_modes, list):
raise TypeError("ExtGameController expected a list of GameMode objects")
super(ExtGameController, self).__init__(
game_json=game_json,
mode=mode,
game_modes=self.additional_modes + (game_modes or [])
)
Update to include instruction and help texts in GET response.from python_cowbull_game.GameController import GameController
from python_cowbull_game.GameMode import GameMode
class ExtGameController(GameController):
"""
TBC
"""
#
# Example of defining additional game modes:
# ==========================================
#
# Replace:
# ------------------------------------------
# additional_modes = []
#
# With:
# ------------------------------------------
# additional_modes = [
# GameMode(mode="SuperTough", priority=6, digits=10, digit_type=0),
# GameMode(mode="hexTough", priority=5, digits=3, guesses_allowed=3, digit_type=1)
# ]
#
additional_modes = [
GameMode(
mode="hexTough",
priority=5,
digits=3,
guesses_allowed=3,
digit_type=1,
help_text="Guess a set of 3 digits between 0 and F",
instruction_text="hexTough is a hard hexidecimal based game. You need to "
"guess 3 digits, each of which needs to be a hex number "
"(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, A, B, C, D, E, or F). "
"The numbers can be passed as hex (0x0, 0xd, 0xE) or as "
"strings (A, b, C, 0, 5, etc.)."
)
]
def __init__(self, game_modes=None, mode=None, game_json=None):
if game_modes is not None and not isinstance(game_modes, list):
raise TypeError("ExtGameController expected a list of GameMode objects")
super(ExtGameController, self).__init__(
game_json=game_json,
mode=mode,
game_modes=self.additional_modes + (game_modes or [])
)
|
<commit_before>from python_cowbull_game.GameController import GameController
from python_cowbull_game.GameMode import GameMode
class ExtGameController(GameController):
"""
TBC
"""
#
# Example of defining additional game modes:
# ==========================================
#
# Replace:
# ------------------------------------------
# additional_modes = []
#
# With:
# ------------------------------------------
# additional_modes = [
# GameMode(mode="SuperTough", priority=6, digits=10, digit_type=0),
# GameMode(mode="hexTough", priority=5, digits=3, guesses_allowed=3, digit_type=1)
# ]
#
additional_modes = []
def __init__(self, game_modes=None, mode=None, game_json=None):
if game_modes is not None and not isinstance(game_modes, list):
raise TypeError("ExtGameController expected a list of GameMode objects")
super(ExtGameController, self).__init__(
game_json=game_json,
mode=mode,
game_modes=self.additional_modes + (game_modes or [])
)
<commit_msg>Update to include instruction and help texts in GET response.<commit_after>from python_cowbull_game.GameController import GameController
from python_cowbull_game.GameMode import GameMode
class ExtGameController(GameController):
"""
TBC
"""
#
# Example of defining additional game modes:
# ==========================================
#
# Replace:
# ------------------------------------------
# additional_modes = []
#
# With:
# ------------------------------------------
# additional_modes = [
# GameMode(mode="SuperTough", priority=6, digits=10, digit_type=0),
# GameMode(mode="hexTough", priority=5, digits=3, guesses_allowed=3, digit_type=1)
# ]
#
additional_modes = [
GameMode(
mode="hexTough",
priority=5,
digits=3,
guesses_allowed=3,
digit_type=1,
help_text="Guess a set of 3 digits between 0 and F",
instruction_text="hexTough is a hard hexidecimal based game. You need to "
"guess 3 digits, each of which needs to be a hex number "
"(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, A, B, C, D, E, or F). "
"The numbers can be passed as hex (0x0, 0xd, 0xE) or as "
"strings (A, b, C, 0, 5, etc.)."
)
]
def __init__(self, game_modes=None, mode=None, game_json=None):
if game_modes is not None and not isinstance(game_modes, list):
raise TypeError("ExtGameController expected a list of GameMode objects")
super(ExtGameController, self).__init__(
game_json=game_json,
mode=mode,
game_modes=self.additional_modes + (game_modes or [])
)
|
19fcd893b88fd2ac9891904af93baf76b49fd5c0
|
plasmapy/_metadata.py
|
plasmapy/_metadata.py
|
##
# Package metadata
##
import ah_bootstrap
from astropy_helpers.git_helpers import get_git_devstr
# Name
name = 'plasmapy'
# PlasmaPy uses Semantic Versioning of the form: MAJOR.MINOR.PATCH
#
# - The MAJOR version changes when there are backwards incompatible changes
# - The MINOR version changes when backwards compatible functionality is added
# - The PATCH version changes when the public API remains the same
#
# During initial development releases (with MAJOR = 0), backwards compatibility
# does not need to be maintained when MINOR is incremented.
#
# While a new version is being developed, '.dev' followed by the commit number
# will be appended to the version string.
version = '0.1.dev'
release = 'dev' in version
if release:
version += get_git_devstr(False)
# Long description / docstring
description = """
PlasmaPy is a community-developed and community-driven core Python
package for plasma physics.
"""
# Author(s)
author = 'The PlasmaPy Community'
|
##
# Package metadata
##
import ah_bootstrap
from astropy_helpers.git_helpers import get_git_devstr
# Name
name = 'plasmapy'
# PlasmaPy uses Semantic Versioning of the form: MAJOR.MINOR.PATCH
#
# - The MAJOR version changes when there are backwards incompatible changes
# - The MINOR version changes when backwards compatible functionality is added
# - The PATCH version changes when the public API remains the same
#
# During initial development releases (with MAJOR = 0), backwards compatibility
# does not need to be maintained when MINOR is incremented.
#
# While a new version is being developed, '.dev' followed by the commit number
# will be appended to the version string.
version = '0.1.0.dev'
release = 'dev' in version
if release:
version += get_git_devstr(False)
# Long description / docstring
description = """
PlasmaPy is a community-developed and community-driven core Python
package for plasma physics.
"""
# Author(s)
author = 'The PlasmaPy Community'
|
Change version from 0.1.dev* to 0.1.0.dev*
|
Change version from 0.1.dev* to 0.1.0.dev*
The semantic versioning specification requires that the major, minor,
and patch numbers always be present.
|
Python
|
bsd-3-clause
|
StanczakDominik/PlasmaPy
|
##
# Package metadata
##
import ah_bootstrap
from astropy_helpers.git_helpers import get_git_devstr
# Name
name = 'plasmapy'
# PlasmaPy uses Semantic Versioning of the form: MAJOR.MINOR.PATCH
#
# - The MAJOR version changes when there are backwards incompatible changes
# - The MINOR version changes when backwards compatible functionality is added
# - The PATCH version changes when the public API remains the same
#
# During initial development releases (with MAJOR = 0), backwards compatibility
# does not need to be maintained when MINOR is incremented.
#
# While a new version is being developed, '.dev' followed by the commit number
# will be appended to the version string.
version = '0.1.dev'
release = 'dev' in version
if release:
version += get_git_devstr(False)
# Long description / docstring
description = """
PlasmaPy is a community-developed and community-driven core Python
package for plasma physics.
"""
# Author(s)
author = 'The PlasmaPy Community'
Change version from 0.1.dev* to 0.1.0.dev*
The semantic versioning specification requires that the major, minor,
and patch numbers always be present.
|
##
# Package metadata
##
import ah_bootstrap
from astropy_helpers.git_helpers import get_git_devstr
# Name
name = 'plasmapy'
# PlasmaPy uses Semantic Versioning of the form: MAJOR.MINOR.PATCH
#
# - The MAJOR version changes when there are backwards incompatible changes
# - The MINOR version changes when backwards compatible functionality is added
# - The PATCH version changes when the public API remains the same
#
# During initial development releases (with MAJOR = 0), backwards compatibility
# does not need to be maintained when MINOR is incremented.
#
# While a new version is being developed, '.dev' followed by the commit number
# will be appended to the version string.
version = '0.1.0.dev'
release = 'dev' in version
if release:
version += get_git_devstr(False)
# Long description / docstring
description = """
PlasmaPy is a community-developed and community-driven core Python
package for plasma physics.
"""
# Author(s)
author = 'The PlasmaPy Community'
|
<commit_before>##
# Package metadata
##
import ah_bootstrap
from astropy_helpers.git_helpers import get_git_devstr
# Name
name = 'plasmapy'
# PlasmaPy uses Semantic Versioning of the form: MAJOR.MINOR.PATCH
#
# - The MAJOR version changes when there are backwards incompatible changes
# - The MINOR version changes when backwards compatible functionality is added
# - The PATCH version changes when the public API remains the same
#
# During initial development releases (with MAJOR = 0), backwards compatibility
# does not need to be maintained when MINOR is incremented.
#
# While a new version is being developed, '.dev' followed by the commit number
# will be appended to the version string.
version = '0.1.dev'
release = 'dev' in version
if release:
version += get_git_devstr(False)
# Long description / docstring
description = """
PlasmaPy is a community-developed and community-driven core Python
package for plasma physics.
"""
# Author(s)
author = 'The PlasmaPy Community'
<commit_msg>Change version from 0.1.dev* to 0.1.0.dev*
The semantic versioning specification requires that the major, minor,
and patch numbers always be present.<commit_after>
|
##
# Package metadata
##
import ah_bootstrap
from astropy_helpers.git_helpers import get_git_devstr
# Name
name = 'plasmapy'
# PlasmaPy uses Semantic Versioning of the form: MAJOR.MINOR.PATCH
#
# - The MAJOR version changes when there are backwards incompatible changes
# - The MINOR version changes when backwards compatible functionality is added
# - The PATCH version changes when the public API remains the same
#
# During initial development releases (with MAJOR = 0), backwards compatibility
# does not need to be maintained when MINOR is incremented.
#
# While a new version is being developed, '.dev' followed by the commit number
# will be appended to the version string.
version = '0.1.0.dev'
release = 'dev' in version
if release:
version += get_git_devstr(False)
# Long description / docstring
description = """
PlasmaPy is a community-developed and community-driven core Python
package for plasma physics.
"""
# Author(s)
author = 'The PlasmaPy Community'
|
##
# Package metadata
##
import ah_bootstrap
from astropy_helpers.git_helpers import get_git_devstr
# Name
name = 'plasmapy'
# PlasmaPy uses Semantic Versioning of the form: MAJOR.MINOR.PATCH
#
# - The MAJOR version changes when there are backwards incompatible changes
# - The MINOR version changes when backwards compatible functionality is added
# - The PATCH version changes when the public API remains the same
#
# During initial development releases (with MAJOR = 0), backwards compatibility
# does not need to be maintained when MINOR is incremented.
#
# While a new version is being developed, '.dev' followed by the commit number
# will be appended to the version string.
version = '0.1.dev'
release = 'dev' in version
if release:
version += get_git_devstr(False)
# Long description / docstring
description = """
PlasmaPy is a community-developed and community-driven core Python
package for plasma physics.
"""
# Author(s)
author = 'The PlasmaPy Community'
Change version from 0.1.dev* to 0.1.0.dev*
The semantic versioning specification requires that the major, minor,
and patch numbers always be present.##
# Package metadata
##
import ah_bootstrap
from astropy_helpers.git_helpers import get_git_devstr
# Name
name = 'plasmapy'
# PlasmaPy uses Semantic Versioning of the form: MAJOR.MINOR.PATCH
#
# - The MAJOR version changes when there are backwards incompatible changes
# - The MINOR version changes when backwards compatible functionality is added
# - The PATCH version changes when the public API remains the same
#
# During initial development releases (with MAJOR = 0), backwards compatibility
# does not need to be maintained when MINOR is incremented.
#
# While a new version is being developed, '.dev' followed by the commit number
# will be appended to the version string.
version = '0.1.0.dev'
release = 'dev' in version
if release:
version += get_git_devstr(False)
# Long description / docstring
description = """
PlasmaPy is a community-developed and community-driven core Python
package for plasma physics.
"""
# Author(s)
author = 'The PlasmaPy Community'
|
<commit_before>##
# Package metadata
##
import ah_bootstrap
from astropy_helpers.git_helpers import get_git_devstr
# Name
name = 'plasmapy'
# PlasmaPy uses Semantic Versioning of the form: MAJOR.MINOR.PATCH
#
# - The MAJOR version changes when there are backwards incompatible changes
# - The MINOR version changes when backwards compatible functionality is added
# - The PATCH version changes when the public API remains the same
#
# During initial development releases (with MAJOR = 0), backwards compatibility
# does not need to be maintained when MINOR is incremented.
#
# While a new version is being developed, '.dev' followed by the commit number
# will be appended to the version string.
version = '0.1.dev'
release = 'dev' in version
if release:
version += get_git_devstr(False)
# Long description / docstring
description = """
PlasmaPy is a community-developed and community-driven core Python
package for plasma physics.
"""
# Author(s)
author = 'The PlasmaPy Community'
<commit_msg>Change version from 0.1.dev* to 0.1.0.dev*
The semantic versioning specification requires that the major, minor,
and patch numbers always be present.<commit_after>##
# Package metadata
##
import ah_bootstrap
from astropy_helpers.git_helpers import get_git_devstr
# Name
name = 'plasmapy'
# PlasmaPy uses Semantic Versioning of the form: MAJOR.MINOR.PATCH
#
# - The MAJOR version changes when there are backwards incompatible changes
# - The MINOR version changes when backwards compatible functionality is added
# - The PATCH version changes when the public API remains the same
#
# During initial development releases (with MAJOR = 0), backwards compatibility
# does not need to be maintained when MINOR is incremented.
#
# While a new version is being developed, '.dev' followed by the commit number
# will be appended to the version string.
version = '0.1.0.dev'
release = 'dev' in version
if release:
version += get_git_devstr(False)
# Long description / docstring
description = """
PlasmaPy is a community-developed and community-driven core Python
package for plasma physics.
"""
# Author(s)
author = 'The PlasmaPy Community'
|
2eac437b9d907fb60d53522633dd278aa277ea08
|
test/user_tests/test_models.py
|
test/user_tests/test_models.py
|
# coding: utf-8
import unittest
from test.factories import UserFactory
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from users.models import create_user_profile, Users
class UserTest(unittest.TestCase):
'''User-specific tests'''
def setUp(self):
self.user = UserFactory.build()
def tearDown(self):
self.user = None
def test_user(self):
self.assertNotEqual(None, self.user)
self.assertEqual('Boy', self.user.first_name)
self.assertEqual('Factory', self.user.last_name)
self.assertEqual('boy_factory@example.com', self.user.email)
def test_user_generator(self):
pass
class UserProfileTest(unittest.TestCase):
'''User profile test'''
def test_post_save_signal(self):
# Disconnect post_save signal from user model (for test purposing only)
post_save.disconnect(create_user_profile, sender=User)
sender = User
user = UserFactory.create()
create_user_profile(sender, user, True)
cnt = Users.objects.all().count()
self.assertEqual(1, cnt)
|
# coding: utf-8
import unittest
from test.factories import UserFactory
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from users.models import create_new_user, Users
class UserTest(unittest.TestCase):
'''User-specific tests'''
def setUp(self):
self.user = UserFactory.build()
def tearDown(self):
self.user = None
def test_user(self):
self.assertNotEqual(None, self.user)
self.assertEqual('Boy', self.user.first_name)
self.assertEqual('Factory', self.user.last_name)
self.assertEqual('boy_factory@example.com', self.user.email)
def test_user_generator(self):
pass
def test_create_new_user(self):
self.assertEqual(0, User.objects.all().count())
create_new_user(
first_name = self.user.first_name,
last_name = self.user.last_name,
email = self.user.email,
password='123'
)
self.assertEqual(1, User.objects.all().count())
u = User.objects.get(email=self.user.email)
self.assertEqual(u.first_name, self.user.first_name)
self.assertEqual(u.last_name, self.user.last_name)
self.assertTrue(u.check_password('123'))
self.assertFalse(u.is_staff)
self.assertFalse(u.is_active)
|
Test for create user in model. Remove test profile creation
|
Test for create user in model. Remove test profile creation
|
Python
|
mit
|
sarutobi/ritmserdtsa,sarutobi/Rynda,sarutobi/Rynda,sarutobi/flowofkindness,sarutobi/ritmserdtsa,sarutobi/ritmserdtsa,sarutobi/flowofkindness,sarutobi/Rynda,sarutobi/Rynda,sarutobi/flowofkindness,sarutobi/flowofkindness,sarutobi/ritmserdtsa
|
# coding: utf-8
import unittest
from test.factories import UserFactory
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from users.models import create_user_profile, Users
class UserTest(unittest.TestCase):
'''User-specific tests'''
def setUp(self):
self.user = UserFactory.build()
def tearDown(self):
self.user = None
def test_user(self):
self.assertNotEqual(None, self.user)
self.assertEqual('Boy', self.user.first_name)
self.assertEqual('Factory', self.user.last_name)
self.assertEqual('boy_factory@example.com', self.user.email)
def test_user_generator(self):
pass
class UserProfileTest(unittest.TestCase):
'''User profile test'''
def test_post_save_signal(self):
# Disconnect post_save signal from user model (for test purposing only)
post_save.disconnect(create_user_profile, sender=User)
sender = User
user = UserFactory.create()
create_user_profile(sender, user, True)
cnt = Users.objects.all().count()
self.assertEqual(1, cnt)
Test for create user in model. Remove test profile creation
|
# coding: utf-8
import unittest
from test.factories import UserFactory
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from users.models import create_new_user, Users
class UserTest(unittest.TestCase):
'''User-specific tests'''
def setUp(self):
self.user = UserFactory.build()
def tearDown(self):
self.user = None
def test_user(self):
self.assertNotEqual(None, self.user)
self.assertEqual('Boy', self.user.first_name)
self.assertEqual('Factory', self.user.last_name)
self.assertEqual('boy_factory@example.com', self.user.email)
def test_user_generator(self):
pass
def test_create_new_user(self):
self.assertEqual(0, User.objects.all().count())
create_new_user(
first_name = self.user.first_name,
last_name = self.user.last_name,
email = self.user.email,
password='123'
)
self.assertEqual(1, User.objects.all().count())
u = User.objects.get(email=self.user.email)
self.assertEqual(u.first_name, self.user.first_name)
self.assertEqual(u.last_name, self.user.last_name)
self.assertTrue(u.check_password('123'))
self.assertFalse(u.is_staff)
self.assertFalse(u.is_active)
|
<commit_before># coding: utf-8
import unittest
from test.factories import UserFactory
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from users.models import create_user_profile, Users
class UserTest(unittest.TestCase):
'''User-specific tests'''
def setUp(self):
self.user = UserFactory.build()
def tearDown(self):
self.user = None
def test_user(self):
self.assertNotEqual(None, self.user)
self.assertEqual('Boy', self.user.first_name)
self.assertEqual('Factory', self.user.last_name)
self.assertEqual('boy_factory@example.com', self.user.email)
def test_user_generator(self):
pass
class UserProfileTest(unittest.TestCase):
'''User profile test'''
def test_post_save_signal(self):
# Disconnect post_save signal from user model (for test purposing only)
post_save.disconnect(create_user_profile, sender=User)
sender = User
user = UserFactory.create()
create_user_profile(sender, user, True)
cnt = Users.objects.all().count()
self.assertEqual(1, cnt)
<commit_msg>Test for create user in model. Remove test profile creation<commit_after>
|
# coding: utf-8
import unittest
from test.factories import UserFactory
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from users.models import create_new_user, Users
class UserTest(unittest.TestCase):
'''User-specific tests'''
def setUp(self):
self.user = UserFactory.build()
def tearDown(self):
self.user = None
def test_user(self):
self.assertNotEqual(None, self.user)
self.assertEqual('Boy', self.user.first_name)
self.assertEqual('Factory', self.user.last_name)
self.assertEqual('boy_factory@example.com', self.user.email)
def test_user_generator(self):
pass
def test_create_new_user(self):
self.assertEqual(0, User.objects.all().count())
create_new_user(
first_name = self.user.first_name,
last_name = self.user.last_name,
email = self.user.email,
password='123'
)
self.assertEqual(1, User.objects.all().count())
u = User.objects.get(email=self.user.email)
self.assertEqual(u.first_name, self.user.first_name)
self.assertEqual(u.last_name, self.user.last_name)
self.assertTrue(u.check_password('123'))
self.assertFalse(u.is_staff)
self.assertFalse(u.is_active)
|
# coding: utf-8
import unittest
from test.factories import UserFactory
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from users.models import create_user_profile, Users
class UserTest(unittest.TestCase):
'''User-specific tests'''
def setUp(self):
self.user = UserFactory.build()
def tearDown(self):
self.user = None
def test_user(self):
self.assertNotEqual(None, self.user)
self.assertEqual('Boy', self.user.first_name)
self.assertEqual('Factory', self.user.last_name)
self.assertEqual('boy_factory@example.com', self.user.email)
def test_user_generator(self):
pass
class UserProfileTest(unittest.TestCase):
'''User profile test'''
def test_post_save_signal(self):
# Disconnect post_save signal from user model (for test purposing only)
post_save.disconnect(create_user_profile, sender=User)
sender = User
user = UserFactory.create()
create_user_profile(sender, user, True)
cnt = Users.objects.all().count()
self.assertEqual(1, cnt)
Test for create user in model. Remove test profile creation# coding: utf-8
import unittest
from test.factories import UserFactory
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from users.models import create_new_user, Users
class UserTest(unittest.TestCase):
'''User-specific tests'''
def setUp(self):
self.user = UserFactory.build()
def tearDown(self):
self.user = None
def test_user(self):
self.assertNotEqual(None, self.user)
self.assertEqual('Boy', self.user.first_name)
self.assertEqual('Factory', self.user.last_name)
self.assertEqual('boy_factory@example.com', self.user.email)
def test_user_generator(self):
pass
def test_create_new_user(self):
self.assertEqual(0, User.objects.all().count())
create_new_user(
first_name = self.user.first_name,
last_name = self.user.last_name,
email = self.user.email,
password='123'
)
self.assertEqual(1, User.objects.all().count())
u = User.objects.get(email=self.user.email)
self.assertEqual(u.first_name, self.user.first_name)
self.assertEqual(u.last_name, self.user.last_name)
self.assertTrue(u.check_password('123'))
self.assertFalse(u.is_staff)
self.assertFalse(u.is_active)
|
<commit_before># coding: utf-8
import unittest
from test.factories import UserFactory
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from users.models import create_user_profile, Users
class UserTest(unittest.TestCase):
'''User-specific tests'''
def setUp(self):
self.user = UserFactory.build()
def tearDown(self):
self.user = None
def test_user(self):
self.assertNotEqual(None, self.user)
self.assertEqual('Boy', self.user.first_name)
self.assertEqual('Factory', self.user.last_name)
self.assertEqual('boy_factory@example.com', self.user.email)
def test_user_generator(self):
pass
class UserProfileTest(unittest.TestCase):
'''User profile test'''
def test_post_save_signal(self):
# Disconnect post_save signal from user model (for test purposing only)
post_save.disconnect(create_user_profile, sender=User)
sender = User
user = UserFactory.create()
create_user_profile(sender, user, True)
cnt = Users.objects.all().count()
self.assertEqual(1, cnt)
<commit_msg>Test for create user in model. Remove test profile creation<commit_after># coding: utf-8
import unittest
from test.factories import UserFactory
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from users.models import create_new_user, Users
class UserTest(unittest.TestCase):
'''User-specific tests'''
def setUp(self):
self.user = UserFactory.build()
def tearDown(self):
self.user = None
def test_user(self):
self.assertNotEqual(None, self.user)
self.assertEqual('Boy', self.user.first_name)
self.assertEqual('Factory', self.user.last_name)
self.assertEqual('boy_factory@example.com', self.user.email)
def test_user_generator(self):
pass
def test_create_new_user(self):
self.assertEqual(0, User.objects.all().count())
create_new_user(
first_name = self.user.first_name,
last_name = self.user.last_name,
email = self.user.email,
password='123'
)
self.assertEqual(1, User.objects.all().count())
u = User.objects.get(email=self.user.email)
self.assertEqual(u.first_name, self.user.first_name)
self.assertEqual(u.last_name, self.user.last_name)
self.assertTrue(u.check_password('123'))
self.assertFalse(u.is_staff)
self.assertFalse(u.is_active)
|
fc01acc869969e5c0666de1065f149b3caec851d
|
core/wait_ssh_ready.py
|
core/wait_ssh_ready.py
|
from __future__ import print_function
import time
import sys
import socket
import logging
def wait_ssh_ready(host, tries=40, delay=3, port=22):
# Wait until the SSH is actually up
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
logging.info('Waiting for SSH at %s to be ready to connect' % host, end='')
sys.stdout.flush()
for _ in xrange(tries):
try:
s.connect((host, port))
assert s.recv(3) == 'SSH'
except KeyboardInterrupt:
logging.warn('User stopped the loop.')
break
except socket.error:
time.sleep(delay)
print('.', end='')
sys.stdout.flush()
except AssertionError:
time.sleep(delay)
print('!', end='')
sys.stdout.flush()
else:
print() # A new line
logging.info('SSH is ready to connect')
return True
else:
waited = tries * delay
logging.error('SSH is not available after %s seconds.' % waited)
return False
|
from __future__ import print_function
import time
import sys
import socket
import logging
def wait_ssh_ready(host, tries=40, delay=3, port=22):
# Wait until the SSH is actually up
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
print('Waiting for SSH at %s to be ready to connect' % host, end='')
sys.stdout.flush()
for _ in xrange(tries):
try:
s.connect((host, port))
assert s.recv(3) == 'SSH'
except KeyboardInterrupt:
logging.warn('User stopped the loop.')
break
except socket.error:
time.sleep(delay)
print('.', end='')
sys.stdout.flush()
except AssertionError:
time.sleep(delay)
print('!', end='')
sys.stdout.flush()
else:
print() # A new line
logging.info('SSH is ready to connect')
return True
else:
waited = tries * delay
logging.error('SSH is not available after %s seconds.' % waited)
return False
|
Fix incorrect call to logging module
|
Fix incorrect call to logging module
|
Python
|
agpl-3.0
|
andresriancho/nimbostratus-target
|
from __future__ import print_function
import time
import sys
import socket
import logging
def wait_ssh_ready(host, tries=40, delay=3, port=22):
# Wait until the SSH is actually up
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
logging.info('Waiting for SSH at %s to be ready to connect' % host, end='')
sys.stdout.flush()
for _ in xrange(tries):
try:
s.connect((host, port))
assert s.recv(3) == 'SSH'
except KeyboardInterrupt:
logging.warn('User stopped the loop.')
break
except socket.error:
time.sleep(delay)
print('.', end='')
sys.stdout.flush()
except AssertionError:
time.sleep(delay)
print('!', end='')
sys.stdout.flush()
else:
print() # A new line
logging.info('SSH is ready to connect')
return True
else:
waited = tries * delay
logging.error('SSH is not available after %s seconds.' % waited)
return FalseFix incorrect call to logging module
|
from __future__ import print_function
import time
import sys
import socket
import logging
def wait_ssh_ready(host, tries=40, delay=3, port=22):
# Wait until the SSH is actually up
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
print('Waiting for SSH at %s to be ready to connect' % host, end='')
sys.stdout.flush()
for _ in xrange(tries):
try:
s.connect((host, port))
assert s.recv(3) == 'SSH'
except KeyboardInterrupt:
logging.warn('User stopped the loop.')
break
except socket.error:
time.sleep(delay)
print('.', end='')
sys.stdout.flush()
except AssertionError:
time.sleep(delay)
print('!', end='')
sys.stdout.flush()
else:
print() # A new line
logging.info('SSH is ready to connect')
return True
else:
waited = tries * delay
logging.error('SSH is not available after %s seconds.' % waited)
return False
|
<commit_before>from __future__ import print_function
import time
import sys
import socket
import logging
def wait_ssh_ready(host, tries=40, delay=3, port=22):
# Wait until the SSH is actually up
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
logging.info('Waiting for SSH at %s to be ready to connect' % host, end='')
sys.stdout.flush()
for _ in xrange(tries):
try:
s.connect((host, port))
assert s.recv(3) == 'SSH'
except KeyboardInterrupt:
logging.warn('User stopped the loop.')
break
except socket.error:
time.sleep(delay)
print('.', end='')
sys.stdout.flush()
except AssertionError:
time.sleep(delay)
print('!', end='')
sys.stdout.flush()
else:
print() # A new line
logging.info('SSH is ready to connect')
return True
else:
waited = tries * delay
logging.error('SSH is not available after %s seconds.' % waited)
return False<commit_msg>Fix incorrect call to logging module<commit_after>
|
from __future__ import print_function
import time
import sys
import socket
import logging
def wait_ssh_ready(host, tries=40, delay=3, port=22):
# Wait until the SSH is actually up
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
print('Waiting for SSH at %s to be ready to connect' % host, end='')
sys.stdout.flush()
for _ in xrange(tries):
try:
s.connect((host, port))
assert s.recv(3) == 'SSH'
except KeyboardInterrupt:
logging.warn('User stopped the loop.')
break
except socket.error:
time.sleep(delay)
print('.', end='')
sys.stdout.flush()
except AssertionError:
time.sleep(delay)
print('!', end='')
sys.stdout.flush()
else:
print() # A new line
logging.info('SSH is ready to connect')
return True
else:
waited = tries * delay
logging.error('SSH is not available after %s seconds.' % waited)
return False
|
from __future__ import print_function
import time
import sys
import socket
import logging
def wait_ssh_ready(host, tries=40, delay=3, port=22):
# Wait until the SSH is actually up
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
logging.info('Waiting for SSH at %s to be ready to connect' % host, end='')
sys.stdout.flush()
for _ in xrange(tries):
try:
s.connect((host, port))
assert s.recv(3) == 'SSH'
except KeyboardInterrupt:
logging.warn('User stopped the loop.')
break
except socket.error:
time.sleep(delay)
print('.', end='')
sys.stdout.flush()
except AssertionError:
time.sleep(delay)
print('!', end='')
sys.stdout.flush()
else:
print() # A new line
logging.info('SSH is ready to connect')
return True
else:
waited = tries * delay
logging.error('SSH is not available after %s seconds.' % waited)
return FalseFix incorrect call to logging modulefrom __future__ import print_function
import time
import sys
import socket
import logging
def wait_ssh_ready(host, tries=40, delay=3, port=22):
# Wait until the SSH is actually up
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
print('Waiting for SSH at %s to be ready to connect' % host, end='')
sys.stdout.flush()
for _ in xrange(tries):
try:
s.connect((host, port))
assert s.recv(3) == 'SSH'
except KeyboardInterrupt:
logging.warn('User stopped the loop.')
break
except socket.error:
time.sleep(delay)
print('.', end='')
sys.stdout.flush()
except AssertionError:
time.sleep(delay)
print('!', end='')
sys.stdout.flush()
else:
print() # A new line
logging.info('SSH is ready to connect')
return True
else:
waited = tries * delay
logging.error('SSH is not available after %s seconds.' % waited)
return False
|
<commit_before>from __future__ import print_function
import time
import sys
import socket
import logging
def wait_ssh_ready(host, tries=40, delay=3, port=22):
# Wait until the SSH is actually up
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
logging.info('Waiting for SSH at %s to be ready to connect' % host, end='')
sys.stdout.flush()
for _ in xrange(tries):
try:
s.connect((host, port))
assert s.recv(3) == 'SSH'
except KeyboardInterrupt:
logging.warn('User stopped the loop.')
break
except socket.error:
time.sleep(delay)
print('.', end='')
sys.stdout.flush()
except AssertionError:
time.sleep(delay)
print('!', end='')
sys.stdout.flush()
else:
print() # A new line
logging.info('SSH is ready to connect')
return True
else:
waited = tries * delay
logging.error('SSH is not available after %s seconds.' % waited)
return False<commit_msg>Fix incorrect call to logging module<commit_after>from __future__ import print_function
import time
import sys
import socket
import logging
def wait_ssh_ready(host, tries=40, delay=3, port=22):
# Wait until the SSH is actually up
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
print('Waiting for SSH at %s to be ready to connect' % host, end='')
sys.stdout.flush()
for _ in xrange(tries):
try:
s.connect((host, port))
assert s.recv(3) == 'SSH'
except KeyboardInterrupt:
logging.warn('User stopped the loop.')
break
except socket.error:
time.sleep(delay)
print('.', end='')
sys.stdout.flush()
except AssertionError:
time.sleep(delay)
print('!', end='')
sys.stdout.flush()
else:
print() # A new line
logging.info('SSH is ready to connect')
return True
else:
waited = tries * delay
logging.error('SSH is not available after %s seconds.' % waited)
return False
|
afbcda104f9903bda2d82e34a6fdc63b6e2b52a9
|
mbio/Application/__init__.py
|
mbio/Application/__init__.py
|
__author__ = 'Wenzhi Mao'
__all__ = []
def _Startup():
'''Get _path__ and compile files.'''
from os import path
from mbio import _ABSpath
global _path__
_path__ = _ABSpath()
_Startup()
from . import sort
from .sort import *
__all__.extend(sort.__all__)
from . import cluster
# from .cluster import *
# __all__.extend(cluster.__all__)
from . import job_organization
from .job_organization import *
__all__.extend(job_organization.__all__)
from . import math
# from .math import *
# __all__.extend(math.__all__)
|
__author__ = 'Wenzhi Mao'
__all__ = []
def _Startup():
'''Get _path__ and compile files.'''
from os import path
from mbio import _ABSpath
global _path__
_path__ = _ABSpath()
_Startup()
from . import sort
from .sort import *
__all__.extend(sort.__all__)
# from . import cluster
# from .cluster import *
# __all__.extend(cluster.__all__)
# from . import job_organization
# from .job_organization import *
# __all__.extend(job_organization.__all__)
from . import math
# from .math import *
# __all__.extend(math.__all__)
|
Remove the cluster and job_organization.
|
Remove the cluster and job_organization.
|
Python
|
mit
|
wzmao/mbio,wzmao/mbio,wzmao/mbio
|
__author__ = 'Wenzhi Mao'
__all__ = []
def _Startup():
'''Get _path__ and compile files.'''
from os import path
from mbio import _ABSpath
global _path__
_path__ = _ABSpath()
_Startup()
from . import sort
from .sort import *
__all__.extend(sort.__all__)
from . import cluster
# from .cluster import *
# __all__.extend(cluster.__all__)
from . import job_organization
from .job_organization import *
__all__.extend(job_organization.__all__)
from . import math
# from .math import *
# __all__.extend(math.__all__)
Remove the cluster and job_organization.
|
__author__ = 'Wenzhi Mao'
__all__ = []
def _Startup():
'''Get _path__ and compile files.'''
from os import path
from mbio import _ABSpath
global _path__
_path__ = _ABSpath()
_Startup()
from . import sort
from .sort import *
__all__.extend(sort.__all__)
# from . import cluster
# from .cluster import *
# __all__.extend(cluster.__all__)
# from . import job_organization
# from .job_organization import *
# __all__.extend(job_organization.__all__)
from . import math
# from .math import *
# __all__.extend(math.__all__)
|
<commit_before>__author__ = 'Wenzhi Mao'
__all__ = []
def _Startup():
'''Get _path__ and compile files.'''
from os import path
from mbio import _ABSpath
global _path__
_path__ = _ABSpath()
_Startup()
from . import sort
from .sort import *
__all__.extend(sort.__all__)
from . import cluster
# from .cluster import *
# __all__.extend(cluster.__all__)
from . import job_organization
from .job_organization import *
__all__.extend(job_organization.__all__)
from . import math
# from .math import *
# __all__.extend(math.__all__)
<commit_msg>Remove the cluster and job_organization.<commit_after>
|
__author__ = 'Wenzhi Mao'
__all__ = []
def _Startup():
'''Get _path__ and compile files.'''
from os import path
from mbio import _ABSpath
global _path__
_path__ = _ABSpath()
_Startup()
from . import sort
from .sort import *
__all__.extend(sort.__all__)
# from . import cluster
# from .cluster import *
# __all__.extend(cluster.__all__)
# from . import job_organization
# from .job_organization import *
# __all__.extend(job_organization.__all__)
from . import math
# from .math import *
# __all__.extend(math.__all__)
|
__author__ = 'Wenzhi Mao'
__all__ = []
def _Startup():
'''Get _path__ and compile files.'''
from os import path
from mbio import _ABSpath
global _path__
_path__ = _ABSpath()
_Startup()
from . import sort
from .sort import *
__all__.extend(sort.__all__)
from . import cluster
# from .cluster import *
# __all__.extend(cluster.__all__)
from . import job_organization
from .job_organization import *
__all__.extend(job_organization.__all__)
from . import math
# from .math import *
# __all__.extend(math.__all__)
Remove the cluster and job_organization.__author__ = 'Wenzhi Mao'
__all__ = []
def _Startup():
'''Get _path__ and compile files.'''
from os import path
from mbio import _ABSpath
global _path__
_path__ = _ABSpath()
_Startup()
from . import sort
from .sort import *
__all__.extend(sort.__all__)
# from . import cluster
# from .cluster import *
# __all__.extend(cluster.__all__)
# from . import job_organization
# from .job_organization import *
# __all__.extend(job_organization.__all__)
from . import math
# from .math import *
# __all__.extend(math.__all__)
|
<commit_before>__author__ = 'Wenzhi Mao'
__all__ = []
def _Startup():
'''Get _path__ and compile files.'''
from os import path
from mbio import _ABSpath
global _path__
_path__ = _ABSpath()
_Startup()
from . import sort
from .sort import *
__all__.extend(sort.__all__)
from . import cluster
# from .cluster import *
# __all__.extend(cluster.__all__)
from . import job_organization
from .job_organization import *
__all__.extend(job_organization.__all__)
from . import math
# from .math import *
# __all__.extend(math.__all__)
<commit_msg>Remove the cluster and job_organization.<commit_after>__author__ = 'Wenzhi Mao'
__all__ = []
def _Startup():
'''Get _path__ and compile files.'''
from os import path
from mbio import _ABSpath
global _path__
_path__ = _ABSpath()
_Startup()
from . import sort
from .sort import *
__all__.extend(sort.__all__)
# from . import cluster
# from .cluster import *
# __all__.extend(cluster.__all__)
# from . import job_organization
# from .job_organization import *
# __all__.extend(job_organization.__all__)
from . import math
# from .math import *
# __all__.extend(math.__all__)
|
020e8db7ed28c3c6e6968d2d107b23e1fa8eb284
|
pcapfile/test/__main__.py
|
pcapfile/test/__main__.py
|
#!/usr/bin/env python
"""
This is the front end to the pcapfile test SUITE.
"""
import unittest
from pcapfile.test.linklayer_test import TestCase as LinklayerTest
from pcapfile.test.savefile_test import TestCase as SavefileTest
from pcapfile.test.protocols_linklayer_ethernet import TestCase as EthernetTest
from pcapfile.test.protocols_linklayer_wifi import TestCase as WifiTest
from pcapfile.test.protocols_network_ip import TestCase as IpTest
from pcapfile.test.protocols_transport_tcp import TestCase as TcpTest
if __name__ == '__main__':
TEST_CLASSES = [SavefileTest, LinklayerTest, EthernetTest, WifiTest, IpTest, TcpTest]
SUITE = unittest.TestSuite()
LOADER = unittest.TestLoader()
for test_class in TEST_CLASSES:
SUITE.addTests(LOADER.loadTestsFromTestCase(test_class))
unittest.TextTestRunner(verbosity=2).run(SUITE)
|
#!/usr/bin/env python
"""
This is the front end to the pcapfile test SUITE.
"""
import unittest
import sys
from pcapfile.test.linklayer_test import TestCase as LinklayerTest
from pcapfile.test.savefile_test import TestCase as SavefileTest
from pcapfile.test.protocols_linklayer_ethernet import TestCase as EthernetTest
from pcapfile.test.protocols_linklayer_wifi import TestCase as WifiTest
from pcapfile.test.protocols_network_ip import TestCase as IpTest
from pcapfile.test.protocols_transport_tcp import TestCase as TcpTest
if __name__ == '__main__':
TEST_CLASSES = [SavefileTest, LinklayerTest, EthernetTest, WifiTest, IpTest, TcpTest]
SUITE = unittest.TestSuite()
LOADER = unittest.TestLoader()
for test_class in TEST_CLASSES:
SUITE.addTests(LOADER.loadTestsFromTestCase(test_class))
result = unittest.TextTestRunner(verbosity=2).run(SUITE)
if not result.wasSuccessful():
sys.exit(1)
|
Return -1 when tests fail
|
Return -1 when tests fail
|
Python
|
isc
|
kisom/pypcapfile
|
#!/usr/bin/env python
"""
This is the front end to the pcapfile test SUITE.
"""
import unittest
from pcapfile.test.linklayer_test import TestCase as LinklayerTest
from pcapfile.test.savefile_test import TestCase as SavefileTest
from pcapfile.test.protocols_linklayer_ethernet import TestCase as EthernetTest
from pcapfile.test.protocols_linklayer_wifi import TestCase as WifiTest
from pcapfile.test.protocols_network_ip import TestCase as IpTest
from pcapfile.test.protocols_transport_tcp import TestCase as TcpTest
if __name__ == '__main__':
TEST_CLASSES = [SavefileTest, LinklayerTest, EthernetTest, WifiTest, IpTest, TcpTest]
SUITE = unittest.TestSuite()
LOADER = unittest.TestLoader()
for test_class in TEST_CLASSES:
SUITE.addTests(LOADER.loadTestsFromTestCase(test_class))
unittest.TextTestRunner(verbosity=2).run(SUITE)
Return -1 when tests fail
|
#!/usr/bin/env python
"""
This is the front end to the pcapfile test SUITE.
"""
import unittest
import sys
from pcapfile.test.linklayer_test import TestCase as LinklayerTest
from pcapfile.test.savefile_test import TestCase as SavefileTest
from pcapfile.test.protocols_linklayer_ethernet import TestCase as EthernetTest
from pcapfile.test.protocols_linklayer_wifi import TestCase as WifiTest
from pcapfile.test.protocols_network_ip import TestCase as IpTest
from pcapfile.test.protocols_transport_tcp import TestCase as TcpTest
if __name__ == '__main__':
TEST_CLASSES = [SavefileTest, LinklayerTest, EthernetTest, WifiTest, IpTest, TcpTest]
SUITE = unittest.TestSuite()
LOADER = unittest.TestLoader()
for test_class in TEST_CLASSES:
SUITE.addTests(LOADER.loadTestsFromTestCase(test_class))
result = unittest.TextTestRunner(verbosity=2).run(SUITE)
if not result.wasSuccessful():
sys.exit(1)
|
<commit_before>#!/usr/bin/env python
"""
This is the front end to the pcapfile test SUITE.
"""
import unittest
from pcapfile.test.linklayer_test import TestCase as LinklayerTest
from pcapfile.test.savefile_test import TestCase as SavefileTest
from pcapfile.test.protocols_linklayer_ethernet import TestCase as EthernetTest
from pcapfile.test.protocols_linklayer_wifi import TestCase as WifiTest
from pcapfile.test.protocols_network_ip import TestCase as IpTest
from pcapfile.test.protocols_transport_tcp import TestCase as TcpTest
if __name__ == '__main__':
TEST_CLASSES = [SavefileTest, LinklayerTest, EthernetTest, WifiTest, IpTest, TcpTest]
SUITE = unittest.TestSuite()
LOADER = unittest.TestLoader()
for test_class in TEST_CLASSES:
SUITE.addTests(LOADER.loadTestsFromTestCase(test_class))
unittest.TextTestRunner(verbosity=2).run(SUITE)
<commit_msg>Return -1 when tests fail<commit_after>
|
#!/usr/bin/env python
"""
This is the front end to the pcapfile test SUITE.
"""
import unittest
import sys
from pcapfile.test.linklayer_test import TestCase as LinklayerTest
from pcapfile.test.savefile_test import TestCase as SavefileTest
from pcapfile.test.protocols_linklayer_ethernet import TestCase as EthernetTest
from pcapfile.test.protocols_linklayer_wifi import TestCase as WifiTest
from pcapfile.test.protocols_network_ip import TestCase as IpTest
from pcapfile.test.protocols_transport_tcp import TestCase as TcpTest
if __name__ == '__main__':
TEST_CLASSES = [SavefileTest, LinklayerTest, EthernetTest, WifiTest, IpTest, TcpTest]
SUITE = unittest.TestSuite()
LOADER = unittest.TestLoader()
for test_class in TEST_CLASSES:
SUITE.addTests(LOADER.loadTestsFromTestCase(test_class))
result = unittest.TextTestRunner(verbosity=2).run(SUITE)
if not result.wasSuccessful():
sys.exit(1)
|
#!/usr/bin/env python
"""
This is the front end to the pcapfile test SUITE.
"""
import unittest
from pcapfile.test.linklayer_test import TestCase as LinklayerTest
from pcapfile.test.savefile_test import TestCase as SavefileTest
from pcapfile.test.protocols_linklayer_ethernet import TestCase as EthernetTest
from pcapfile.test.protocols_linklayer_wifi import TestCase as WifiTest
from pcapfile.test.protocols_network_ip import TestCase as IpTest
from pcapfile.test.protocols_transport_tcp import TestCase as TcpTest
if __name__ == '__main__':
TEST_CLASSES = [SavefileTest, LinklayerTest, EthernetTest, WifiTest, IpTest, TcpTest]
SUITE = unittest.TestSuite()
LOADER = unittest.TestLoader()
for test_class in TEST_CLASSES:
SUITE.addTests(LOADER.loadTestsFromTestCase(test_class))
unittest.TextTestRunner(verbosity=2).run(SUITE)
Return -1 when tests fail#!/usr/bin/env python
"""
This is the front end to the pcapfile test SUITE.
"""
import unittest
import sys
from pcapfile.test.linklayer_test import TestCase as LinklayerTest
from pcapfile.test.savefile_test import TestCase as SavefileTest
from pcapfile.test.protocols_linklayer_ethernet import TestCase as EthernetTest
from pcapfile.test.protocols_linklayer_wifi import TestCase as WifiTest
from pcapfile.test.protocols_network_ip import TestCase as IpTest
from pcapfile.test.protocols_transport_tcp import TestCase as TcpTest
if __name__ == '__main__':
TEST_CLASSES = [SavefileTest, LinklayerTest, EthernetTest, WifiTest, IpTest, TcpTest]
SUITE = unittest.TestSuite()
LOADER = unittest.TestLoader()
for test_class in TEST_CLASSES:
SUITE.addTests(LOADER.loadTestsFromTestCase(test_class))
result = unittest.TextTestRunner(verbosity=2).run(SUITE)
if not result.wasSuccessful():
sys.exit(1)
|
<commit_before>#!/usr/bin/env python
"""
This is the front end to the pcapfile test SUITE.
"""
import unittest
from pcapfile.test.linklayer_test import TestCase as LinklayerTest
from pcapfile.test.savefile_test import TestCase as SavefileTest
from pcapfile.test.protocols_linklayer_ethernet import TestCase as EthernetTest
from pcapfile.test.protocols_linklayer_wifi import TestCase as WifiTest
from pcapfile.test.protocols_network_ip import TestCase as IpTest
from pcapfile.test.protocols_transport_tcp import TestCase as TcpTest
if __name__ == '__main__':
TEST_CLASSES = [SavefileTest, LinklayerTest, EthernetTest, WifiTest, IpTest, TcpTest]
SUITE = unittest.TestSuite()
LOADER = unittest.TestLoader()
for test_class in TEST_CLASSES:
SUITE.addTests(LOADER.loadTestsFromTestCase(test_class))
unittest.TextTestRunner(verbosity=2).run(SUITE)
<commit_msg>Return -1 when tests fail<commit_after>#!/usr/bin/env python
"""
This is the front end to the pcapfile test SUITE.
"""
import unittest
import sys
from pcapfile.test.linklayer_test import TestCase as LinklayerTest
from pcapfile.test.savefile_test import TestCase as SavefileTest
from pcapfile.test.protocols_linklayer_ethernet import TestCase as EthernetTest
from pcapfile.test.protocols_linklayer_wifi import TestCase as WifiTest
from pcapfile.test.protocols_network_ip import TestCase as IpTest
from pcapfile.test.protocols_transport_tcp import TestCase as TcpTest
if __name__ == '__main__':
TEST_CLASSES = [SavefileTest, LinklayerTest, EthernetTest, WifiTest, IpTest, TcpTest]
SUITE = unittest.TestSuite()
LOADER = unittest.TestLoader()
for test_class in TEST_CLASSES:
SUITE.addTests(LOADER.loadTestsFromTestCase(test_class))
result = unittest.TextTestRunner(verbosity=2).run(SUITE)
if not result.wasSuccessful():
sys.exit(1)
|
d474edcdbe1d9966ad09609b87d119c60c2a38d4
|
datapusher/main.py
|
datapusher/main.py
|
import os
import six
import ckanserviceprovider.web as web
from . import jobs
# check whether jobs have been imported properly
assert(jobs.push_to_datastore)
def serve():
web.init()
web.app.run(web.app.config.get('HOST'), web.app.config.get('PORT'))
def serve_test():
web.init()
return web.app.test_client()
def main():
import argparse
argparser = argparse.ArgumentParser(
description='Service that allows automatic migration of data to the CKAN DataStore',
epilog='''"He reached out and pressed an invitingly large red button on a nearby panel.
The panel lit up with the words Please do not press this button again."''')
if six.PY3:
argparser.add_argument('config', metavar='CONFIG', type=argparse.FileType('r'),
help='configuration file')
if six.PY2:
argparser.add_argument('config', metavar='CONFIG', type=file,
help='configuration file')
args = argparser.parse_args()
os.environ['JOB_CONFIG'] = os.path.abspath(args.config.name)
serve()
if __name__ == '__main__':
main()
|
import os
import six
import ckanserviceprovider.web as web
from datapusher import jobs
# check whether jobs have been imported properly
assert(jobs.push_to_datastore)
def serve():
web.init()
web.app.run(web.app.config.get('HOST'), web.app.config.get('PORT'))
def serve_test():
web.init()
return web.app.test_client()
def main():
import argparse
argparser = argparse.ArgumentParser(
description='Service that allows automatic migration of data to the CKAN DataStore',
epilog='''"He reached out and pressed an invitingly large red button on a nearby panel.
The panel lit up with the words Please do not press this button again."''')
if six.PY3:
argparser.add_argument('config', metavar='CONFIG', type=argparse.FileType('r'),
help='configuration file')
if six.PY2:
argparser.add_argument('config', metavar='CONFIG', type=file,
help='configuration file')
args = argparser.parse_args()
os.environ['JOB_CONFIG'] = os.path.abspath(args.config.name)
serve()
if __name__ == '__main__':
main()
|
Fix Import Error for relative Import
|
[x]: Fix Import Error for relative Import
|
Python
|
agpl-3.0
|
ckan/datapusher
|
import os
import six
import ckanserviceprovider.web as web
from . import jobs
# check whether jobs have been imported properly
assert(jobs.push_to_datastore)
def serve():
web.init()
web.app.run(web.app.config.get('HOST'), web.app.config.get('PORT'))
def serve_test():
web.init()
return web.app.test_client()
def main():
import argparse
argparser = argparse.ArgumentParser(
description='Service that allows automatic migration of data to the CKAN DataStore',
epilog='''"He reached out and pressed an invitingly large red button on a nearby panel.
The panel lit up with the words Please do not press this button again."''')
if six.PY3:
argparser.add_argument('config', metavar='CONFIG', type=argparse.FileType('r'),
help='configuration file')
if six.PY2:
argparser.add_argument('config', metavar='CONFIG', type=file,
help='configuration file')
args = argparser.parse_args()
os.environ['JOB_CONFIG'] = os.path.abspath(args.config.name)
serve()
if __name__ == '__main__':
main()
[x]: Fix Import Error for relative Import
|
import os
import six
import ckanserviceprovider.web as web
from datapusher import jobs
# check whether jobs have been imported properly
assert(jobs.push_to_datastore)
def serve():
web.init()
web.app.run(web.app.config.get('HOST'), web.app.config.get('PORT'))
def serve_test():
web.init()
return web.app.test_client()
def main():
import argparse
argparser = argparse.ArgumentParser(
description='Service that allows automatic migration of data to the CKAN DataStore',
epilog='''"He reached out and pressed an invitingly large red button on a nearby panel.
The panel lit up with the words Please do not press this button again."''')
if six.PY3:
argparser.add_argument('config', metavar='CONFIG', type=argparse.FileType('r'),
help='configuration file')
if six.PY2:
argparser.add_argument('config', metavar='CONFIG', type=file,
help='configuration file')
args = argparser.parse_args()
os.environ['JOB_CONFIG'] = os.path.abspath(args.config.name)
serve()
if __name__ == '__main__':
main()
|
<commit_before>import os
import six
import ckanserviceprovider.web as web
from . import jobs
# check whether jobs have been imported properly
assert(jobs.push_to_datastore)
def serve():
web.init()
web.app.run(web.app.config.get('HOST'), web.app.config.get('PORT'))
def serve_test():
web.init()
return web.app.test_client()
def main():
import argparse
argparser = argparse.ArgumentParser(
description='Service that allows automatic migration of data to the CKAN DataStore',
epilog='''"He reached out and pressed an invitingly large red button on a nearby panel.
The panel lit up with the words Please do not press this button again."''')
if six.PY3:
argparser.add_argument('config', metavar='CONFIG', type=argparse.FileType('r'),
help='configuration file')
if six.PY2:
argparser.add_argument('config', metavar='CONFIG', type=file,
help='configuration file')
args = argparser.parse_args()
os.environ['JOB_CONFIG'] = os.path.abspath(args.config.name)
serve()
if __name__ == '__main__':
main()
<commit_msg>[x]: Fix Import Error for relative Import<commit_after>
|
import os
import six
import ckanserviceprovider.web as web
from datapusher import jobs
# check whether jobs have been imported properly
assert(jobs.push_to_datastore)
def serve():
web.init()
web.app.run(web.app.config.get('HOST'), web.app.config.get('PORT'))
def serve_test():
web.init()
return web.app.test_client()
def main():
import argparse
argparser = argparse.ArgumentParser(
description='Service that allows automatic migration of data to the CKAN DataStore',
epilog='''"He reached out and pressed an invitingly large red button on a nearby panel.
The panel lit up with the words Please do not press this button again."''')
if six.PY3:
argparser.add_argument('config', metavar='CONFIG', type=argparse.FileType('r'),
help='configuration file')
if six.PY2:
argparser.add_argument('config', metavar='CONFIG', type=file,
help='configuration file')
args = argparser.parse_args()
os.environ['JOB_CONFIG'] = os.path.abspath(args.config.name)
serve()
if __name__ == '__main__':
main()
|
import os
import six
import ckanserviceprovider.web as web
from . import jobs
# check whether jobs have been imported properly
assert(jobs.push_to_datastore)
def serve():
web.init()
web.app.run(web.app.config.get('HOST'), web.app.config.get('PORT'))
def serve_test():
web.init()
return web.app.test_client()
def main():
import argparse
argparser = argparse.ArgumentParser(
description='Service that allows automatic migration of data to the CKAN DataStore',
epilog='''"He reached out and pressed an invitingly large red button on a nearby panel.
The panel lit up with the words Please do not press this button again."''')
if six.PY3:
argparser.add_argument('config', metavar='CONFIG', type=argparse.FileType('r'),
help='configuration file')
if six.PY2:
argparser.add_argument('config', metavar='CONFIG', type=file,
help='configuration file')
args = argparser.parse_args()
os.environ['JOB_CONFIG'] = os.path.abspath(args.config.name)
serve()
if __name__ == '__main__':
main()
[x]: Fix Import Error for relative Importimport os
import six
import ckanserviceprovider.web as web
from datapusher import jobs
# check whether jobs have been imported properly
assert(jobs.push_to_datastore)
def serve():
web.init()
web.app.run(web.app.config.get('HOST'), web.app.config.get('PORT'))
def serve_test():
web.init()
return web.app.test_client()
def main():
import argparse
argparser = argparse.ArgumentParser(
description='Service that allows automatic migration of data to the CKAN DataStore',
epilog='''"He reached out and pressed an invitingly large red button on a nearby panel.
The panel lit up with the words Please do not press this button again."''')
if six.PY3:
argparser.add_argument('config', metavar='CONFIG', type=argparse.FileType('r'),
help='configuration file')
if six.PY2:
argparser.add_argument('config', metavar='CONFIG', type=file,
help='configuration file')
args = argparser.parse_args()
os.environ['JOB_CONFIG'] = os.path.abspath(args.config.name)
serve()
if __name__ == '__main__':
main()
|
<commit_before>import os
import six
import ckanserviceprovider.web as web
from . import jobs
# check whether jobs have been imported properly
assert(jobs.push_to_datastore)
def serve():
web.init()
web.app.run(web.app.config.get('HOST'), web.app.config.get('PORT'))
def serve_test():
web.init()
return web.app.test_client()
def main():
import argparse
argparser = argparse.ArgumentParser(
description='Service that allows automatic migration of data to the CKAN DataStore',
epilog='''"He reached out and pressed an invitingly large red button on a nearby panel.
The panel lit up with the words Please do not press this button again."''')
if six.PY3:
argparser.add_argument('config', metavar='CONFIG', type=argparse.FileType('r'),
help='configuration file')
if six.PY2:
argparser.add_argument('config', metavar='CONFIG', type=file,
help='configuration file')
args = argparser.parse_args()
os.environ['JOB_CONFIG'] = os.path.abspath(args.config.name)
serve()
if __name__ == '__main__':
main()
<commit_msg>[x]: Fix Import Error for relative Import<commit_after>import os
import six
import ckanserviceprovider.web as web
from datapusher import jobs
# check whether jobs have been imported properly
assert(jobs.push_to_datastore)
def serve():
web.init()
web.app.run(web.app.config.get('HOST'), web.app.config.get('PORT'))
def serve_test():
web.init()
return web.app.test_client()
def main():
import argparse
argparser = argparse.ArgumentParser(
description='Service that allows automatic migration of data to the CKAN DataStore',
epilog='''"He reached out and pressed an invitingly large red button on a nearby panel.
The panel lit up with the words Please do not press this button again."''')
if six.PY3:
argparser.add_argument('config', metavar='CONFIG', type=argparse.FileType('r'),
help='configuration file')
if six.PY2:
argparser.add_argument('config', metavar='CONFIG', type=file,
help='configuration file')
args = argparser.parse_args()
os.environ['JOB_CONFIG'] = os.path.abspath(args.config.name)
serve()
if __name__ == '__main__':
main()
|
946a2bcd57ac33cca0f48d29350a8f75b2fee2cf
|
sparqllib/tests/test_formatter.py
|
sparqllib/tests/test_formatter.py
|
import unittest
import sparqllib
class TestBasicFormatter(unittest.TestCase):
def setUp(self):
self.formatter = sparqllib.formatter.BasicFormatter()
def test_newlines(self):
self.assertEqual(self.formatter.format("{}"), "{\n}")
def test_indentation(self):
self.assertEqual(self.formatter.format("{test text}"), "{\n test text\n}")
self.assertEqual(self.formatter.format("{test\ntext}"), "{\n test\n text\n}")
self.assertEqual(self.formatter.format("{{text}}"), "{\n {\n text\n }\n}")
def test_trim_whitespace(self):
self.assertEqual(self.formatter.format("text \n"), "text\n")
def test_remove_duplicate_newlines(self):
self.assertEqual(self.formatter.format("\n\n"), "\n")
self.assertEqual(self.formatter.format("\n"), "\n")
if __name__ == '__main__':
unittest.main()
|
import unittest
import sparqllib
class TestBasicFormatter(unittest.TestCase):
def setUp(self):
self.formatter = sparqllib.formatter.BasicFormatter()
def test_newlines(self):
self.assertEqual(self.formatter.format("{}"), "{\n}")
self.assertEqual(self.formatter.format("{\n}"), "{\n}")
def test_indentation(self):
self.assertEqual(self.formatter.format("{test text}"), "{\n test text\n}")
self.assertEqual(self.formatter.format("{test\ntext}"), "{\n test\n text\n}")
self.assertEqual(self.formatter.format("{{text}}"), "{\n {\n text\n }\n}")
def test_trim_whitespace(self):
self.assertEqual(self.formatter.format("text \n"), "text\n")
def test_remove_duplicate_newlines(self):
self.assertEqual(self.formatter.format("\n\n"), "\n")
self.assertEqual(self.formatter.format("\n"), "\n")
if __name__ == '__main__':
unittest.main()
|
Add test to verify single newline is not stripped
|
Add test to verify single newline is not stripped
|
Python
|
mit
|
ALSchwalm/sparqllib
|
import unittest
import sparqllib
class TestBasicFormatter(unittest.TestCase):
def setUp(self):
self.formatter = sparqllib.formatter.BasicFormatter()
def test_newlines(self):
self.assertEqual(self.formatter.format("{}"), "{\n}")
def test_indentation(self):
self.assertEqual(self.formatter.format("{test text}"), "{\n test text\n}")
self.assertEqual(self.formatter.format("{test\ntext}"), "{\n test\n text\n}")
self.assertEqual(self.formatter.format("{{text}}"), "{\n {\n text\n }\n}")
def test_trim_whitespace(self):
self.assertEqual(self.formatter.format("text \n"), "text\n")
def test_remove_duplicate_newlines(self):
self.assertEqual(self.formatter.format("\n\n"), "\n")
self.assertEqual(self.formatter.format("\n"), "\n")
if __name__ == '__main__':
unittest.main()
Add test to verify single newline is not stripped
|
import unittest
import sparqllib
class TestBasicFormatter(unittest.TestCase):
def setUp(self):
self.formatter = sparqllib.formatter.BasicFormatter()
def test_newlines(self):
self.assertEqual(self.formatter.format("{}"), "{\n}")
self.assertEqual(self.formatter.format("{\n}"), "{\n}")
def test_indentation(self):
self.assertEqual(self.formatter.format("{test text}"), "{\n test text\n}")
self.assertEqual(self.formatter.format("{test\ntext}"), "{\n test\n text\n}")
self.assertEqual(self.formatter.format("{{text}}"), "{\n {\n text\n }\n}")
def test_trim_whitespace(self):
self.assertEqual(self.formatter.format("text \n"), "text\n")
def test_remove_duplicate_newlines(self):
self.assertEqual(self.formatter.format("\n\n"), "\n")
self.assertEqual(self.formatter.format("\n"), "\n")
if __name__ == '__main__':
unittest.main()
|
<commit_before>
import unittest
import sparqllib
class TestBasicFormatter(unittest.TestCase):
def setUp(self):
self.formatter = sparqllib.formatter.BasicFormatter()
def test_newlines(self):
self.assertEqual(self.formatter.format("{}"), "{\n}")
def test_indentation(self):
self.assertEqual(self.formatter.format("{test text}"), "{\n test text\n}")
self.assertEqual(self.formatter.format("{test\ntext}"), "{\n test\n text\n}")
self.assertEqual(self.formatter.format("{{text}}"), "{\n {\n text\n }\n}")
def test_trim_whitespace(self):
self.assertEqual(self.formatter.format("text \n"), "text\n")
def test_remove_duplicate_newlines(self):
self.assertEqual(self.formatter.format("\n\n"), "\n")
self.assertEqual(self.formatter.format("\n"), "\n")
if __name__ == '__main__':
unittest.main()
<commit_msg>Add test to verify single newline is not stripped<commit_after>
|
import unittest
import sparqllib
class TestBasicFormatter(unittest.TestCase):
def setUp(self):
self.formatter = sparqllib.formatter.BasicFormatter()
def test_newlines(self):
self.assertEqual(self.formatter.format("{}"), "{\n}")
self.assertEqual(self.formatter.format("{\n}"), "{\n}")
def test_indentation(self):
self.assertEqual(self.formatter.format("{test text}"), "{\n test text\n}")
self.assertEqual(self.formatter.format("{test\ntext}"), "{\n test\n text\n}")
self.assertEqual(self.formatter.format("{{text}}"), "{\n {\n text\n }\n}")
def test_trim_whitespace(self):
self.assertEqual(self.formatter.format("text \n"), "text\n")
def test_remove_duplicate_newlines(self):
self.assertEqual(self.formatter.format("\n\n"), "\n")
self.assertEqual(self.formatter.format("\n"), "\n")
if __name__ == '__main__':
unittest.main()
|
import unittest
import sparqllib
class TestBasicFormatter(unittest.TestCase):
def setUp(self):
self.formatter = sparqllib.formatter.BasicFormatter()
def test_newlines(self):
self.assertEqual(self.formatter.format("{}"), "{\n}")
def test_indentation(self):
self.assertEqual(self.formatter.format("{test text}"), "{\n test text\n}")
self.assertEqual(self.formatter.format("{test\ntext}"), "{\n test\n text\n}")
self.assertEqual(self.formatter.format("{{text}}"), "{\n {\n text\n }\n}")
def test_trim_whitespace(self):
self.assertEqual(self.formatter.format("text \n"), "text\n")
def test_remove_duplicate_newlines(self):
self.assertEqual(self.formatter.format("\n\n"), "\n")
self.assertEqual(self.formatter.format("\n"), "\n")
if __name__ == '__main__':
unittest.main()
Add test to verify single newline is not stripped
import unittest
import sparqllib
class TestBasicFormatter(unittest.TestCase):
def setUp(self):
self.formatter = sparqllib.formatter.BasicFormatter()
def test_newlines(self):
self.assertEqual(self.formatter.format("{}"), "{\n}")
self.assertEqual(self.formatter.format("{\n}"), "{\n}")
def test_indentation(self):
self.assertEqual(self.formatter.format("{test text}"), "{\n test text\n}")
self.assertEqual(self.formatter.format("{test\ntext}"), "{\n test\n text\n}")
self.assertEqual(self.formatter.format("{{text}}"), "{\n {\n text\n }\n}")
def test_trim_whitespace(self):
self.assertEqual(self.formatter.format("text \n"), "text\n")
def test_remove_duplicate_newlines(self):
self.assertEqual(self.formatter.format("\n\n"), "\n")
self.assertEqual(self.formatter.format("\n"), "\n")
if __name__ == '__main__':
unittest.main()
|
<commit_before>
import unittest
import sparqllib
class TestBasicFormatter(unittest.TestCase):
def setUp(self):
self.formatter = sparqllib.formatter.BasicFormatter()
def test_newlines(self):
self.assertEqual(self.formatter.format("{}"), "{\n}")
def test_indentation(self):
self.assertEqual(self.formatter.format("{test text}"), "{\n test text\n}")
self.assertEqual(self.formatter.format("{test\ntext}"), "{\n test\n text\n}")
self.assertEqual(self.formatter.format("{{text}}"), "{\n {\n text\n }\n}")
def test_trim_whitespace(self):
self.assertEqual(self.formatter.format("text \n"), "text\n")
def test_remove_duplicate_newlines(self):
self.assertEqual(self.formatter.format("\n\n"), "\n")
self.assertEqual(self.formatter.format("\n"), "\n")
if __name__ == '__main__':
unittest.main()
<commit_msg>Add test to verify single newline is not stripped<commit_after>
import unittest
import sparqllib
class TestBasicFormatter(unittest.TestCase):
def setUp(self):
self.formatter = sparqllib.formatter.BasicFormatter()
def test_newlines(self):
self.assertEqual(self.formatter.format("{}"), "{\n}")
self.assertEqual(self.formatter.format("{\n}"), "{\n}")
def test_indentation(self):
self.assertEqual(self.formatter.format("{test text}"), "{\n test text\n}")
self.assertEqual(self.formatter.format("{test\ntext}"), "{\n test\n text\n}")
self.assertEqual(self.formatter.format("{{text}}"), "{\n {\n text\n }\n}")
def test_trim_whitespace(self):
self.assertEqual(self.formatter.format("text \n"), "text\n")
def test_remove_duplicate_newlines(self):
self.assertEqual(self.formatter.format("\n\n"), "\n")
self.assertEqual(self.formatter.format("\n"), "\n")
if __name__ == '__main__':
unittest.main()
|
a7083c3c70142f744ace0055c537d9217ed9cbfe
|
paypal/base.py
|
paypal/base.py
|
import urlparse
from django.db import models
class ResponseModel(models.Model):
# Debug information
raw_request = models.TextField(max_length=512)
raw_response = models.TextField(max_length=512)
response_time = models.FloatField(help_text="Response time in milliseconds")
date_created = models.DateTimeField(auto_now_add=True)
class Meta:
abstract = True
ordering = ('-date_created',)
app_label = 'paypal'
def request(self):
request_params = urlparse.parse_qs(self.raw_request)
return self._as_table(request_params)
request.allow_tags = True
def response(self):
return self._as_table(self.context)
response.allow_tags = True
def _as_table(self, params):
rows = []
for k, v in sorted(params.items()):
rows.append('<tr><th>%s</th><td>%s</td></tr>' % (k, v[0]))
return '<table>%s</table>' % ''.join(rows)
@property
def context(self):
return urlparse.parse_qs(self.raw_response)
def value(self, key):
ctx = self.context
return ctx[key][0] if key in ctx else None
|
import urlparse
from django.db import models
class ResponseModel(models.Model):
# Debug information
raw_request = models.TextField(max_length=512)
raw_response = models.TextField(max_length=512)
response_time = models.FloatField(help_text="Response time in milliseconds")
date_created = models.DateTimeField(auto_now_add=True)
class Meta:
abstract = True
ordering = ('-date_created',)
app_label = 'paypal'
def request(self):
request_params = urlparse.parse_qs(self.raw_request)
return self._as_table(request_params)
request.allow_tags = True
def response(self):
return self._as_table(self.context)
response.allow_tags = True
def _as_table(self, params):
rows = []
for k, v in sorted(params.items()):
rows.append('<tr><th>%s</th><td>%s</td></tr>' % (k, v[0]))
return '<table>%s</table>' % ''.join(rows)
@property
def context(self):
return urlparse.parse_qs(self.raw_response)
def value(self, key):
ctx = self.context
return ctx[key][0].decode('utf8') if key in ctx else None
|
Fix a bad issue when PAYPAL returning utf8 encoded chars
|
Fix a bad issue when PAYPAL returning utf8 encoded chars
|
Python
|
bsd-3-clause
|
bharling/django-oscar-worldpay,embedded1/django-oscar-paypal,FedeDR/django-oscar-paypal,django-oscar/django-oscar-paypal,evonove/django-oscar-paypal,st8st8/django-oscar-paypal,nfletton/django-oscar-paypal,britco/django-oscar-paypal,britco/django-oscar-paypal,st8st8/django-oscar-paypal,ZachGoldberg/django-oscar-paypal,evonove/django-oscar-paypal,ZachGoldberg/django-oscar-paypal,enodyt/django-oscar-paypal,evonove/django-oscar-paypal,embedded1/django-oscar-paypal,vintasoftware/django-oscar-paypal,bharling/django-oscar-worldpay,st8st8/django-oscar-paypal,lpakula/django-oscar-paypal,lpakula/django-oscar-paypal,enodyt/django-oscar-paypal,nfletton/django-oscar-paypal,britco/django-oscar-paypal,FedeDR/django-oscar-paypal,embedded1/django-oscar-paypal,lpakula/django-oscar-paypal,bharling/django-oscar-worldpay,ZachGoldberg/django-oscar-paypal,FedeDR/django-oscar-paypal,django-oscar/django-oscar-paypal,django-oscar/django-oscar-paypal,vintasoftware/django-oscar-paypal,enodyt/django-oscar-paypal,phedoreanu/django-oscar-paypal,phedoreanu/django-oscar-paypal,phedoreanu/django-oscar-paypal,nfletton/django-oscar-paypal,bharling/django-oscar-worldpay,vintasoftware/django-oscar-paypal
|
import urlparse
from django.db import models
class ResponseModel(models.Model):
# Debug information
raw_request = models.TextField(max_length=512)
raw_response = models.TextField(max_length=512)
response_time = models.FloatField(help_text="Response time in milliseconds")
date_created = models.DateTimeField(auto_now_add=True)
class Meta:
abstract = True
ordering = ('-date_created',)
app_label = 'paypal'
def request(self):
request_params = urlparse.parse_qs(self.raw_request)
return self._as_table(request_params)
request.allow_tags = True
def response(self):
return self._as_table(self.context)
response.allow_tags = True
def _as_table(self, params):
rows = []
for k, v in sorted(params.items()):
rows.append('<tr><th>%s</th><td>%s</td></tr>' % (k, v[0]))
return '<table>%s</table>' % ''.join(rows)
@property
def context(self):
return urlparse.parse_qs(self.raw_response)
def value(self, key):
ctx = self.context
return ctx[key][0] if key in ctx else None
Fix a bad issue when PAYPAL returning utf8 encoded chars
|
import urlparse
from django.db import models
class ResponseModel(models.Model):
# Debug information
raw_request = models.TextField(max_length=512)
raw_response = models.TextField(max_length=512)
response_time = models.FloatField(help_text="Response time in milliseconds")
date_created = models.DateTimeField(auto_now_add=True)
class Meta:
abstract = True
ordering = ('-date_created',)
app_label = 'paypal'
def request(self):
request_params = urlparse.parse_qs(self.raw_request)
return self._as_table(request_params)
request.allow_tags = True
def response(self):
return self._as_table(self.context)
response.allow_tags = True
def _as_table(self, params):
rows = []
for k, v in sorted(params.items()):
rows.append('<tr><th>%s</th><td>%s</td></tr>' % (k, v[0]))
return '<table>%s</table>' % ''.join(rows)
@property
def context(self):
return urlparse.parse_qs(self.raw_response)
def value(self, key):
ctx = self.context
return ctx[key][0].decode('utf8') if key in ctx else None
|
<commit_before>import urlparse
from django.db import models
class ResponseModel(models.Model):
# Debug information
raw_request = models.TextField(max_length=512)
raw_response = models.TextField(max_length=512)
response_time = models.FloatField(help_text="Response time in milliseconds")
date_created = models.DateTimeField(auto_now_add=True)
class Meta:
abstract = True
ordering = ('-date_created',)
app_label = 'paypal'
def request(self):
request_params = urlparse.parse_qs(self.raw_request)
return self._as_table(request_params)
request.allow_tags = True
def response(self):
return self._as_table(self.context)
response.allow_tags = True
def _as_table(self, params):
rows = []
for k, v in sorted(params.items()):
rows.append('<tr><th>%s</th><td>%s</td></tr>' % (k, v[0]))
return '<table>%s</table>' % ''.join(rows)
@property
def context(self):
return urlparse.parse_qs(self.raw_response)
def value(self, key):
ctx = self.context
return ctx[key][0] if key in ctx else None
<commit_msg>Fix a bad issue when PAYPAL returning utf8 encoded chars<commit_after>
|
import urlparse
from django.db import models
class ResponseModel(models.Model):
# Debug information
raw_request = models.TextField(max_length=512)
raw_response = models.TextField(max_length=512)
response_time = models.FloatField(help_text="Response time in milliseconds")
date_created = models.DateTimeField(auto_now_add=True)
class Meta:
abstract = True
ordering = ('-date_created',)
app_label = 'paypal'
def request(self):
request_params = urlparse.parse_qs(self.raw_request)
return self._as_table(request_params)
request.allow_tags = True
def response(self):
return self._as_table(self.context)
response.allow_tags = True
def _as_table(self, params):
rows = []
for k, v in sorted(params.items()):
rows.append('<tr><th>%s</th><td>%s</td></tr>' % (k, v[0]))
return '<table>%s</table>' % ''.join(rows)
@property
def context(self):
return urlparse.parse_qs(self.raw_response)
def value(self, key):
ctx = self.context
return ctx[key][0].decode('utf8') if key in ctx else None
|
import urlparse
from django.db import models
class ResponseModel(models.Model):
# Debug information
raw_request = models.TextField(max_length=512)
raw_response = models.TextField(max_length=512)
response_time = models.FloatField(help_text="Response time in milliseconds")
date_created = models.DateTimeField(auto_now_add=True)
class Meta:
abstract = True
ordering = ('-date_created',)
app_label = 'paypal'
def request(self):
request_params = urlparse.parse_qs(self.raw_request)
return self._as_table(request_params)
request.allow_tags = True
def response(self):
return self._as_table(self.context)
response.allow_tags = True
def _as_table(self, params):
rows = []
for k, v in sorted(params.items()):
rows.append('<tr><th>%s</th><td>%s</td></tr>' % (k, v[0]))
return '<table>%s</table>' % ''.join(rows)
@property
def context(self):
return urlparse.parse_qs(self.raw_response)
def value(self, key):
ctx = self.context
return ctx[key][0] if key in ctx else None
Fix a bad issue when PAYPAL returning utf8 encoded charsimport urlparse
from django.db import models
class ResponseModel(models.Model):
# Debug information
raw_request = models.TextField(max_length=512)
raw_response = models.TextField(max_length=512)
response_time = models.FloatField(help_text="Response time in milliseconds")
date_created = models.DateTimeField(auto_now_add=True)
class Meta:
abstract = True
ordering = ('-date_created',)
app_label = 'paypal'
def request(self):
request_params = urlparse.parse_qs(self.raw_request)
return self._as_table(request_params)
request.allow_tags = True
def response(self):
return self._as_table(self.context)
response.allow_tags = True
def _as_table(self, params):
rows = []
for k, v in sorted(params.items()):
rows.append('<tr><th>%s</th><td>%s</td></tr>' % (k, v[0]))
return '<table>%s</table>' % ''.join(rows)
@property
def context(self):
return urlparse.parse_qs(self.raw_response)
def value(self, key):
ctx = self.context
return ctx[key][0].decode('utf8') if key in ctx else None
|
<commit_before>import urlparse
from django.db import models
class ResponseModel(models.Model):
# Debug information
raw_request = models.TextField(max_length=512)
raw_response = models.TextField(max_length=512)
response_time = models.FloatField(help_text="Response time in milliseconds")
date_created = models.DateTimeField(auto_now_add=True)
class Meta:
abstract = True
ordering = ('-date_created',)
app_label = 'paypal'
def request(self):
request_params = urlparse.parse_qs(self.raw_request)
return self._as_table(request_params)
request.allow_tags = True
def response(self):
return self._as_table(self.context)
response.allow_tags = True
def _as_table(self, params):
rows = []
for k, v in sorted(params.items()):
rows.append('<tr><th>%s</th><td>%s</td></tr>' % (k, v[0]))
return '<table>%s</table>' % ''.join(rows)
@property
def context(self):
return urlparse.parse_qs(self.raw_response)
def value(self, key):
ctx = self.context
return ctx[key][0] if key in ctx else None
<commit_msg>Fix a bad issue when PAYPAL returning utf8 encoded chars<commit_after>import urlparse
from django.db import models
class ResponseModel(models.Model):
# Debug information
raw_request = models.TextField(max_length=512)
raw_response = models.TextField(max_length=512)
response_time = models.FloatField(help_text="Response time in milliseconds")
date_created = models.DateTimeField(auto_now_add=True)
class Meta:
abstract = True
ordering = ('-date_created',)
app_label = 'paypal'
def request(self):
request_params = urlparse.parse_qs(self.raw_request)
return self._as_table(request_params)
request.allow_tags = True
def response(self):
return self._as_table(self.context)
response.allow_tags = True
def _as_table(self, params):
rows = []
for k, v in sorted(params.items()):
rows.append('<tr><th>%s</th><td>%s</td></tr>' % (k, v[0]))
return '<table>%s</table>' % ''.join(rows)
@property
def context(self):
return urlparse.parse_qs(self.raw_response)
def value(self, key):
ctx = self.context
return ctx[key][0].decode('utf8') if key in ctx else None
|
233d52247d89bb39ccc9ada3a591296baae9cff5
|
notification/backends/web.py
|
notification/backends/web.py
|
from notification.backends.base import NotificationBackend
class WebBackend(NotificationBackend):
slug = u'web'
display_name = u'E-mail'
formats = ['short.txt', 'full.txt']
def send(self, sender, recipient, notice_type, context, on_site=False,
*args, **kwargs):
"""Always "sends" (i.e. stores to the database), setting on_site
accordingly.
"""
# TODO can't do this at the top or we get circular imports
from notification.models import Notice
Notice.objects.create(recipient=recipient,
message=self.format_message(notice_type.label,
'notice.html', context),
notice_type=notice_type,
on_site=on_site,
sender=sender)
return True
|
from notification.backends.base import NotificationBackend
class WebBackend(NotificationBackend):
slug = u'web'
display_name = u'Web'
formats = ['short.txt', 'full.txt']
def send(self, sender, recipient, notice_type, context, on_site=False,
*args, **kwargs):
"""Always "sends" (i.e. stores to the database), setting on_site
accordingly.
"""
# TODO can't do this at the top or we get circular imports
from notification.models import Notice
Notice.objects.create(recipient=recipient,
message=self.format_message(notice_type.label,
'notice.html', context),
notice_type=notice_type,
on_site=on_site,
sender=sender)
return True
|
Use correct slug for Web backend.
|
Use correct slug for Web backend.
|
Python
|
mit
|
theatlantic/django-notification,theatlantic/django-notification
|
from notification.backends.base import NotificationBackend
class WebBackend(NotificationBackend):
slug = u'web'
display_name = u'E-mail'
formats = ['short.txt', 'full.txt']
def send(self, sender, recipient, notice_type, context, on_site=False,
*args, **kwargs):
"""Always "sends" (i.e. stores to the database), setting on_site
accordingly.
"""
# TODO can't do this at the top or we get circular imports
from notification.models import Notice
Notice.objects.create(recipient=recipient,
message=self.format_message(notice_type.label,
'notice.html', context),
notice_type=notice_type,
on_site=on_site,
sender=sender)
return True
Use correct slug for Web backend.
|
from notification.backends.base import NotificationBackend
class WebBackend(NotificationBackend):
slug = u'web'
display_name = u'Web'
formats = ['short.txt', 'full.txt']
def send(self, sender, recipient, notice_type, context, on_site=False,
*args, **kwargs):
"""Always "sends" (i.e. stores to the database), setting on_site
accordingly.
"""
# TODO can't do this at the top or we get circular imports
from notification.models import Notice
Notice.objects.create(recipient=recipient,
message=self.format_message(notice_type.label,
'notice.html', context),
notice_type=notice_type,
on_site=on_site,
sender=sender)
return True
|
<commit_before>from notification.backends.base import NotificationBackend
class WebBackend(NotificationBackend):
slug = u'web'
display_name = u'E-mail'
formats = ['short.txt', 'full.txt']
def send(self, sender, recipient, notice_type, context, on_site=False,
*args, **kwargs):
"""Always "sends" (i.e. stores to the database), setting on_site
accordingly.
"""
# TODO can't do this at the top or we get circular imports
from notification.models import Notice
Notice.objects.create(recipient=recipient,
message=self.format_message(notice_type.label,
'notice.html', context),
notice_type=notice_type,
on_site=on_site,
sender=sender)
return True
<commit_msg>Use correct slug for Web backend.<commit_after>
|
from notification.backends.base import NotificationBackend
class WebBackend(NotificationBackend):
slug = u'web'
display_name = u'Web'
formats = ['short.txt', 'full.txt']
def send(self, sender, recipient, notice_type, context, on_site=False,
*args, **kwargs):
"""Always "sends" (i.e. stores to the database), setting on_site
accordingly.
"""
# TODO can't do this at the top or we get circular imports
from notification.models import Notice
Notice.objects.create(recipient=recipient,
message=self.format_message(notice_type.label,
'notice.html', context),
notice_type=notice_type,
on_site=on_site,
sender=sender)
return True
|
from notification.backends.base import NotificationBackend
class WebBackend(NotificationBackend):
slug = u'web'
display_name = u'E-mail'
formats = ['short.txt', 'full.txt']
def send(self, sender, recipient, notice_type, context, on_site=False,
*args, **kwargs):
"""Always "sends" (i.e. stores to the database), setting on_site
accordingly.
"""
# TODO can't do this at the top or we get circular imports
from notification.models import Notice
Notice.objects.create(recipient=recipient,
message=self.format_message(notice_type.label,
'notice.html', context),
notice_type=notice_type,
on_site=on_site,
sender=sender)
return True
Use correct slug for Web backend.from notification.backends.base import NotificationBackend
class WebBackend(NotificationBackend):
slug = u'web'
display_name = u'Web'
formats = ['short.txt', 'full.txt']
def send(self, sender, recipient, notice_type, context, on_site=False,
*args, **kwargs):
"""Always "sends" (i.e. stores to the database), setting on_site
accordingly.
"""
# TODO can't do this at the top or we get circular imports
from notification.models import Notice
Notice.objects.create(recipient=recipient,
message=self.format_message(notice_type.label,
'notice.html', context),
notice_type=notice_type,
on_site=on_site,
sender=sender)
return True
|
<commit_before>from notification.backends.base import NotificationBackend
class WebBackend(NotificationBackend):
slug = u'web'
display_name = u'E-mail'
formats = ['short.txt', 'full.txt']
def send(self, sender, recipient, notice_type, context, on_site=False,
*args, **kwargs):
"""Always "sends" (i.e. stores to the database), setting on_site
accordingly.
"""
# TODO can't do this at the top or we get circular imports
from notification.models import Notice
Notice.objects.create(recipient=recipient,
message=self.format_message(notice_type.label,
'notice.html', context),
notice_type=notice_type,
on_site=on_site,
sender=sender)
return True
<commit_msg>Use correct slug for Web backend.<commit_after>from notification.backends.base import NotificationBackend
class WebBackend(NotificationBackend):
slug = u'web'
display_name = u'Web'
formats = ['short.txt', 'full.txt']
def send(self, sender, recipient, notice_type, context, on_site=False,
*args, **kwargs):
"""Always "sends" (i.e. stores to the database), setting on_site
accordingly.
"""
# TODO can't do this at the top or we get circular imports
from notification.models import Notice
Notice.objects.create(recipient=recipient,
message=self.format_message(notice_type.label,
'notice.html', context),
notice_type=notice_type,
on_site=on_site,
sender=sender)
return True
|
39406267d31ca428dc73d721ccc19285ff7599bd
|
lit/Quit/expect_exit_code.py
|
lit/Quit/expect_exit_code.py
|
#!/usr/bin/env python2
import subprocess
import sys
args = sys.argv
expected_exit_code = args[1]
args = args[2:]
print("Running " + (" ".join(args)))
real_exit_code = subprocess.call(args)
if str(real_exit_code) != expected_exit_code:
print("Got exit code %d but expected %s" % (real_exit_code, expected_exit_code))
exit(1)
|
#!/usr/bin/env python
import subprocess
import sys
args = sys.argv
expected_exit_code = args[1]
args = args[2:]
print("Running " + (" ".join(args)))
real_exit_code = subprocess.call(args)
if str(real_exit_code) != expected_exit_code:
print("Got exit code %d but expected %s" % (real_exit_code, expected_exit_code))
exit(1)
|
Update shebang python2 -> python
|
[lldb] Update shebang python2 -> python
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@352259 91177308-0d34-0410-b5e6-96231b3b80d8
|
Python
|
apache-2.0
|
apple/swift-lldb,llvm-mirror/lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,llvm-mirror/lldb
|
#!/usr/bin/env python2
import subprocess
import sys
args = sys.argv
expected_exit_code = args[1]
args = args[2:]
print("Running " + (" ".join(args)))
real_exit_code = subprocess.call(args)
if str(real_exit_code) != expected_exit_code:
print("Got exit code %d but expected %s" % (real_exit_code, expected_exit_code))
exit(1)
[lldb] Update shebang python2 -> python
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@352259 91177308-0d34-0410-b5e6-96231b3b80d8
|
#!/usr/bin/env python
import subprocess
import sys
args = sys.argv
expected_exit_code = args[1]
args = args[2:]
print("Running " + (" ".join(args)))
real_exit_code = subprocess.call(args)
if str(real_exit_code) != expected_exit_code:
print("Got exit code %d but expected %s" % (real_exit_code, expected_exit_code))
exit(1)
|
<commit_before>#!/usr/bin/env python2
import subprocess
import sys
args = sys.argv
expected_exit_code = args[1]
args = args[2:]
print("Running " + (" ".join(args)))
real_exit_code = subprocess.call(args)
if str(real_exit_code) != expected_exit_code:
print("Got exit code %d but expected %s" % (real_exit_code, expected_exit_code))
exit(1)
<commit_msg>[lldb] Update shebang python2 -> python
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@352259 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>
|
#!/usr/bin/env python
import subprocess
import sys
args = sys.argv
expected_exit_code = args[1]
args = args[2:]
print("Running " + (" ".join(args)))
real_exit_code = subprocess.call(args)
if str(real_exit_code) != expected_exit_code:
print("Got exit code %d but expected %s" % (real_exit_code, expected_exit_code))
exit(1)
|
#!/usr/bin/env python2
import subprocess
import sys
args = sys.argv
expected_exit_code = args[1]
args = args[2:]
print("Running " + (" ".join(args)))
real_exit_code = subprocess.call(args)
if str(real_exit_code) != expected_exit_code:
print("Got exit code %d but expected %s" % (real_exit_code, expected_exit_code))
exit(1)
[lldb] Update shebang python2 -> python
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@352259 91177308-0d34-0410-b5e6-96231b3b80d8#!/usr/bin/env python
import subprocess
import sys
args = sys.argv
expected_exit_code = args[1]
args = args[2:]
print("Running " + (" ".join(args)))
real_exit_code = subprocess.call(args)
if str(real_exit_code) != expected_exit_code:
print("Got exit code %d but expected %s" % (real_exit_code, expected_exit_code))
exit(1)
|
<commit_before>#!/usr/bin/env python2
import subprocess
import sys
args = sys.argv
expected_exit_code = args[1]
args = args[2:]
print("Running " + (" ".join(args)))
real_exit_code = subprocess.call(args)
if str(real_exit_code) != expected_exit_code:
print("Got exit code %d but expected %s" % (real_exit_code, expected_exit_code))
exit(1)
<commit_msg>[lldb] Update shebang python2 -> python
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@352259 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>#!/usr/bin/env python
import subprocess
import sys
args = sys.argv
expected_exit_code = args[1]
args = args[2:]
print("Running " + (" ".join(args)))
real_exit_code = subprocess.call(args)
if str(real_exit_code) != expected_exit_code:
print("Got exit code %d but expected %s" % (real_exit_code, expected_exit_code))
exit(1)
|
7aa84fbcc7a3af57ef62c29008fac4036d2d28af
|
django_afip/migrations/0021_drop_batches.py
|
django_afip/migrations/0021_drop_batches.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-07-02 23:19
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('afip', '0020_backfill_receiptvalidation__processed_date'),
]
operations = [
migrations.RemoveField(
model_name='receipt',
name='batch',
),
migrations.RemoveField(
model_name='receiptvalidation',
name='validation',
),
migrations.AlterField(
model_name='receiptvalidation',
name='processed_date',
field=models.DateTimeField(verbose_name='processed date'),
),
migrations.DeleteModel(
name='ReceiptBatch',
),
migrations.DeleteModel(
name='Validation',
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-07-02 23:19
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('afip', '0020_backfill_receiptvalidation__processed_date'),
]
operations = [
migrations.RemoveField(
model_name='receipt',
name='batch',
),
migrations.RemoveField(
model_name='receiptvalidation',
name='validation',
),
migrations.AlterField(
model_name='receiptvalidation',
name='processed_date',
field=models.DateTimeField(verbose_name='processed date'),
),
migrations.DeleteModel(
name='Validation',
),
migrations.DeleteModel(
name='ReceiptBatch',
),
]
|
Tweak a migration to run on non-transactional DBs
|
Tweak a migration to run on non-transactional DBs
A single migration failed to run on databases with no support for
transactions because those require explicit ordering of commands that's
generally implicit on modern relational DBs.
Switch the order of those queries to prevent that crash.
Fixes #27
|
Python
|
isc
|
hobarrera/django-afip,hobarrera/django-afip
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-07-02 23:19
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('afip', '0020_backfill_receiptvalidation__processed_date'),
]
operations = [
migrations.RemoveField(
model_name='receipt',
name='batch',
),
migrations.RemoveField(
model_name='receiptvalidation',
name='validation',
),
migrations.AlterField(
model_name='receiptvalidation',
name='processed_date',
field=models.DateTimeField(verbose_name='processed date'),
),
migrations.DeleteModel(
name='ReceiptBatch',
),
migrations.DeleteModel(
name='Validation',
),
]
Tweak a migration to run on non-transactional DBs
A single migration failed to run on databases with no support for
transactions because those require explicit ordering of commands that's
generally implicit on modern relational DBs.
Switch the order of those queries to prevent that crash.
Fixes #27
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-07-02 23:19
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('afip', '0020_backfill_receiptvalidation__processed_date'),
]
operations = [
migrations.RemoveField(
model_name='receipt',
name='batch',
),
migrations.RemoveField(
model_name='receiptvalidation',
name='validation',
),
migrations.AlterField(
model_name='receiptvalidation',
name='processed_date',
field=models.DateTimeField(verbose_name='processed date'),
),
migrations.DeleteModel(
name='Validation',
),
migrations.DeleteModel(
name='ReceiptBatch',
),
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-07-02 23:19
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('afip', '0020_backfill_receiptvalidation__processed_date'),
]
operations = [
migrations.RemoveField(
model_name='receipt',
name='batch',
),
migrations.RemoveField(
model_name='receiptvalidation',
name='validation',
),
migrations.AlterField(
model_name='receiptvalidation',
name='processed_date',
field=models.DateTimeField(verbose_name='processed date'),
),
migrations.DeleteModel(
name='ReceiptBatch',
),
migrations.DeleteModel(
name='Validation',
),
]
<commit_msg>Tweak a migration to run on non-transactional DBs
A single migration failed to run on databases with no support for
transactions because those require explicit ordering of commands that's
generally implicit on modern relational DBs.
Switch the order of those queries to prevent that crash.
Fixes #27<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-07-02 23:19
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('afip', '0020_backfill_receiptvalidation__processed_date'),
]
operations = [
migrations.RemoveField(
model_name='receipt',
name='batch',
),
migrations.RemoveField(
model_name='receiptvalidation',
name='validation',
),
migrations.AlterField(
model_name='receiptvalidation',
name='processed_date',
field=models.DateTimeField(verbose_name='processed date'),
),
migrations.DeleteModel(
name='Validation',
),
migrations.DeleteModel(
name='ReceiptBatch',
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-07-02 23:19
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('afip', '0020_backfill_receiptvalidation__processed_date'),
]
operations = [
migrations.RemoveField(
model_name='receipt',
name='batch',
),
migrations.RemoveField(
model_name='receiptvalidation',
name='validation',
),
migrations.AlterField(
model_name='receiptvalidation',
name='processed_date',
field=models.DateTimeField(verbose_name='processed date'),
),
migrations.DeleteModel(
name='ReceiptBatch',
),
migrations.DeleteModel(
name='Validation',
),
]
Tweak a migration to run on non-transactional DBs
A single migration failed to run on databases with no support for
transactions because those require explicit ordering of commands that's
generally implicit on modern relational DBs.
Switch the order of those queries to prevent that crash.
Fixes #27# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-07-02 23:19
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('afip', '0020_backfill_receiptvalidation__processed_date'),
]
operations = [
migrations.RemoveField(
model_name='receipt',
name='batch',
),
migrations.RemoveField(
model_name='receiptvalidation',
name='validation',
),
migrations.AlterField(
model_name='receiptvalidation',
name='processed_date',
field=models.DateTimeField(verbose_name='processed date'),
),
migrations.DeleteModel(
name='Validation',
),
migrations.DeleteModel(
name='ReceiptBatch',
),
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-07-02 23:19
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('afip', '0020_backfill_receiptvalidation__processed_date'),
]
operations = [
migrations.RemoveField(
model_name='receipt',
name='batch',
),
migrations.RemoveField(
model_name='receiptvalidation',
name='validation',
),
migrations.AlterField(
model_name='receiptvalidation',
name='processed_date',
field=models.DateTimeField(verbose_name='processed date'),
),
migrations.DeleteModel(
name='ReceiptBatch',
),
migrations.DeleteModel(
name='Validation',
),
]
<commit_msg>Tweak a migration to run on non-transactional DBs
A single migration failed to run on databases with no support for
transactions because those require explicit ordering of commands that's
generally implicit on modern relational DBs.
Switch the order of those queries to prevent that crash.
Fixes #27<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-07-02 23:19
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('afip', '0020_backfill_receiptvalidation__processed_date'),
]
operations = [
migrations.RemoveField(
model_name='receipt',
name='batch',
),
migrations.RemoveField(
model_name='receiptvalidation',
name='validation',
),
migrations.AlterField(
model_name='receiptvalidation',
name='processed_date',
field=models.DateTimeField(verbose_name='processed date'),
),
migrations.DeleteModel(
name='Validation',
),
migrations.DeleteModel(
name='ReceiptBatch',
),
]
|
1ff616fe4f6ff0ff295eeeaa4a817851df750e51
|
openslides/utils/validate.py
|
openslides/utils/validate.py
|
import bleach
allowed_tags = [
"a",
"img", # links and images
"br",
"p",
"span",
"blockquote", # text layout
"strike",
"strong",
"u",
"em",
"sup",
"sub",
"pre", # text formatting
"h1",
"h2",
"h3",
"h4",
"h5",
"h6", # headings
"ol",
"ul",
"li", # lists
"table",
"caption",
"thead",
"tbody",
"th",
"tr",
"td", # tables
]
allowed_attributes = {
"*": ["class", "style"],
"img": ["alt", "src", "title"],
"a": ["href", "title"],
"th": ["scope"],
"ol": ["start"],
}
allowed_styles = [
"color",
"background-color",
"height",
"width",
"text-align",
"float",
"padding",
"text-decoration",
]
def validate_html(html: str) -> str:
"""
This method takes a string and escapes all non-whitelisted html entries.
Every field of a model that is loaded trusted in the DOM should be validated.
During copy and paste from Word maybe some tabs are spread over the html. Remove them.
"""
html = html.replace("\t", "")
return bleach.clean(
html, tags=allowed_tags, attributes=allowed_attributes, styles=allowed_styles
)
|
import bleach
allowed_tags = [
"a",
"img", # links and images
"br",
"p",
"span",
"blockquote", # text layout
"strike",
"del",
"ins",
"strong",
"u",
"em",
"sup",
"sub",
"pre", # text formatting
"h1",
"h2",
"h3",
"h4",
"h5",
"h6", # headings
"ol",
"ul",
"li", # lists
"table",
"caption",
"thead",
"tbody",
"th",
"tr",
"td", # tables
]
allowed_attributes = {
"*": ["class", "style"],
"img": ["alt", "src", "title"],
"a": ["href", "title"],
"th": ["scope"],
"ol": ["start"],
}
allowed_styles = [
"color",
"background-color",
"height",
"width",
"text-align",
"float",
"padding",
"text-decoration",
]
def validate_html(html: str) -> str:
"""
This method takes a string and escapes all non-whitelisted html entries.
Every field of a model that is loaded trusted in the DOM should be validated.
During copy and paste from Word maybe some tabs are spread over the html. Remove them.
"""
html = html.replace("\t", "")
return bleach.clean(
html, tags=allowed_tags, attributes=allowed_attributes, styles=allowed_styles
)
|
Allow <del> and <ins> html tags.
|
Allow <del> and <ins> html tags.
|
Python
|
mit
|
tsiegleauq/OpenSlides,ostcar/OpenSlides,FinnStutzenstein/OpenSlides,normanjaeckel/OpenSlides,jwinzer/OpenSlides,OpenSlides/OpenSlides,tsiegleauq/OpenSlides,jwinzer/OpenSlides,FinnStutzenstein/OpenSlides,CatoTH/OpenSlides,CatoTH/OpenSlides,OpenSlides/OpenSlides,tsiegleauq/OpenSlides,CatoTH/OpenSlides,normanjaeckel/OpenSlides,ostcar/OpenSlides,ostcar/OpenSlides,FinnStutzenstein/OpenSlides,normanjaeckel/OpenSlides,jwinzer/OpenSlides,normanjaeckel/OpenSlides,FinnStutzenstein/OpenSlides,jwinzer/OpenSlides,CatoTH/OpenSlides,jwinzer/OpenSlides
|
import bleach
allowed_tags = [
"a",
"img", # links and images
"br",
"p",
"span",
"blockquote", # text layout
"strike",
"strong",
"u",
"em",
"sup",
"sub",
"pre", # text formatting
"h1",
"h2",
"h3",
"h4",
"h5",
"h6", # headings
"ol",
"ul",
"li", # lists
"table",
"caption",
"thead",
"tbody",
"th",
"tr",
"td", # tables
]
allowed_attributes = {
"*": ["class", "style"],
"img": ["alt", "src", "title"],
"a": ["href", "title"],
"th": ["scope"],
"ol": ["start"],
}
allowed_styles = [
"color",
"background-color",
"height",
"width",
"text-align",
"float",
"padding",
"text-decoration",
]
def validate_html(html: str) -> str:
"""
This method takes a string and escapes all non-whitelisted html entries.
Every field of a model that is loaded trusted in the DOM should be validated.
During copy and paste from Word maybe some tabs are spread over the html. Remove them.
"""
html = html.replace("\t", "")
return bleach.clean(
html, tags=allowed_tags, attributes=allowed_attributes, styles=allowed_styles
)
Allow <del> and <ins> html tags.
|
import bleach
allowed_tags = [
"a",
"img", # links and images
"br",
"p",
"span",
"blockquote", # text layout
"strike",
"del",
"ins",
"strong",
"u",
"em",
"sup",
"sub",
"pre", # text formatting
"h1",
"h2",
"h3",
"h4",
"h5",
"h6", # headings
"ol",
"ul",
"li", # lists
"table",
"caption",
"thead",
"tbody",
"th",
"tr",
"td", # tables
]
allowed_attributes = {
"*": ["class", "style"],
"img": ["alt", "src", "title"],
"a": ["href", "title"],
"th": ["scope"],
"ol": ["start"],
}
allowed_styles = [
"color",
"background-color",
"height",
"width",
"text-align",
"float",
"padding",
"text-decoration",
]
def validate_html(html: str) -> str:
"""
This method takes a string and escapes all non-whitelisted html entries.
Every field of a model that is loaded trusted in the DOM should be validated.
During copy and paste from Word maybe some tabs are spread over the html. Remove them.
"""
html = html.replace("\t", "")
return bleach.clean(
html, tags=allowed_tags, attributes=allowed_attributes, styles=allowed_styles
)
|
<commit_before>import bleach
allowed_tags = [
"a",
"img", # links and images
"br",
"p",
"span",
"blockquote", # text layout
"strike",
"strong",
"u",
"em",
"sup",
"sub",
"pre", # text formatting
"h1",
"h2",
"h3",
"h4",
"h5",
"h6", # headings
"ol",
"ul",
"li", # lists
"table",
"caption",
"thead",
"tbody",
"th",
"tr",
"td", # tables
]
allowed_attributes = {
"*": ["class", "style"],
"img": ["alt", "src", "title"],
"a": ["href", "title"],
"th": ["scope"],
"ol": ["start"],
}
allowed_styles = [
"color",
"background-color",
"height",
"width",
"text-align",
"float",
"padding",
"text-decoration",
]
def validate_html(html: str) -> str:
"""
This method takes a string and escapes all non-whitelisted html entries.
Every field of a model that is loaded trusted in the DOM should be validated.
During copy and paste from Word maybe some tabs are spread over the html. Remove them.
"""
html = html.replace("\t", "")
return bleach.clean(
html, tags=allowed_tags, attributes=allowed_attributes, styles=allowed_styles
)
<commit_msg>Allow <del> and <ins> html tags.<commit_after>
|
import bleach
allowed_tags = [
"a",
"img", # links and images
"br",
"p",
"span",
"blockquote", # text layout
"strike",
"del",
"ins",
"strong",
"u",
"em",
"sup",
"sub",
"pre", # text formatting
"h1",
"h2",
"h3",
"h4",
"h5",
"h6", # headings
"ol",
"ul",
"li", # lists
"table",
"caption",
"thead",
"tbody",
"th",
"tr",
"td", # tables
]
allowed_attributes = {
"*": ["class", "style"],
"img": ["alt", "src", "title"],
"a": ["href", "title"],
"th": ["scope"],
"ol": ["start"],
}
allowed_styles = [
"color",
"background-color",
"height",
"width",
"text-align",
"float",
"padding",
"text-decoration",
]
def validate_html(html: str) -> str:
"""
This method takes a string and escapes all non-whitelisted html entries.
Every field of a model that is loaded trusted in the DOM should be validated.
During copy and paste from Word maybe some tabs are spread over the html. Remove them.
"""
html = html.replace("\t", "")
return bleach.clean(
html, tags=allowed_tags, attributes=allowed_attributes, styles=allowed_styles
)
|
import bleach
allowed_tags = [
"a",
"img", # links and images
"br",
"p",
"span",
"blockquote", # text layout
"strike",
"strong",
"u",
"em",
"sup",
"sub",
"pre", # text formatting
"h1",
"h2",
"h3",
"h4",
"h5",
"h6", # headings
"ol",
"ul",
"li", # lists
"table",
"caption",
"thead",
"tbody",
"th",
"tr",
"td", # tables
]
allowed_attributes = {
"*": ["class", "style"],
"img": ["alt", "src", "title"],
"a": ["href", "title"],
"th": ["scope"],
"ol": ["start"],
}
allowed_styles = [
"color",
"background-color",
"height",
"width",
"text-align",
"float",
"padding",
"text-decoration",
]
def validate_html(html: str) -> str:
"""
This method takes a string and escapes all non-whitelisted html entries.
Every field of a model that is loaded trusted in the DOM should be validated.
During copy and paste from Word maybe some tabs are spread over the html. Remove them.
"""
html = html.replace("\t", "")
return bleach.clean(
html, tags=allowed_tags, attributes=allowed_attributes, styles=allowed_styles
)
Allow <del> and <ins> html tags.import bleach
allowed_tags = [
"a",
"img", # links and images
"br",
"p",
"span",
"blockquote", # text layout
"strike",
"del",
"ins",
"strong",
"u",
"em",
"sup",
"sub",
"pre", # text formatting
"h1",
"h2",
"h3",
"h4",
"h5",
"h6", # headings
"ol",
"ul",
"li", # lists
"table",
"caption",
"thead",
"tbody",
"th",
"tr",
"td", # tables
]
allowed_attributes = {
"*": ["class", "style"],
"img": ["alt", "src", "title"],
"a": ["href", "title"],
"th": ["scope"],
"ol": ["start"],
}
allowed_styles = [
"color",
"background-color",
"height",
"width",
"text-align",
"float",
"padding",
"text-decoration",
]
def validate_html(html: str) -> str:
"""
This method takes a string and escapes all non-whitelisted html entries.
Every field of a model that is loaded trusted in the DOM should be validated.
During copy and paste from Word maybe some tabs are spread over the html. Remove them.
"""
html = html.replace("\t", "")
return bleach.clean(
html, tags=allowed_tags, attributes=allowed_attributes, styles=allowed_styles
)
|
<commit_before>import bleach
allowed_tags = [
"a",
"img", # links and images
"br",
"p",
"span",
"blockquote", # text layout
"strike",
"strong",
"u",
"em",
"sup",
"sub",
"pre", # text formatting
"h1",
"h2",
"h3",
"h4",
"h5",
"h6", # headings
"ol",
"ul",
"li", # lists
"table",
"caption",
"thead",
"tbody",
"th",
"tr",
"td", # tables
]
allowed_attributes = {
"*": ["class", "style"],
"img": ["alt", "src", "title"],
"a": ["href", "title"],
"th": ["scope"],
"ol": ["start"],
}
allowed_styles = [
"color",
"background-color",
"height",
"width",
"text-align",
"float",
"padding",
"text-decoration",
]
def validate_html(html: str) -> str:
"""
This method takes a string and escapes all non-whitelisted html entries.
Every field of a model that is loaded trusted in the DOM should be validated.
During copy and paste from Word maybe some tabs are spread over the html. Remove them.
"""
html = html.replace("\t", "")
return bleach.clean(
html, tags=allowed_tags, attributes=allowed_attributes, styles=allowed_styles
)
<commit_msg>Allow <del> and <ins> html tags.<commit_after>import bleach
allowed_tags = [
"a",
"img", # links and images
"br",
"p",
"span",
"blockquote", # text layout
"strike",
"del",
"ins",
"strong",
"u",
"em",
"sup",
"sub",
"pre", # text formatting
"h1",
"h2",
"h3",
"h4",
"h5",
"h6", # headings
"ol",
"ul",
"li", # lists
"table",
"caption",
"thead",
"tbody",
"th",
"tr",
"td", # tables
]
allowed_attributes = {
"*": ["class", "style"],
"img": ["alt", "src", "title"],
"a": ["href", "title"],
"th": ["scope"],
"ol": ["start"],
}
allowed_styles = [
"color",
"background-color",
"height",
"width",
"text-align",
"float",
"padding",
"text-decoration",
]
def validate_html(html: str) -> str:
"""
This method takes a string and escapes all non-whitelisted html entries.
Every field of a model that is loaded trusted in the DOM should be validated.
During copy and paste from Word maybe some tabs are spread over the html. Remove them.
"""
html = html.replace("\t", "")
return bleach.clean(
html, tags=allowed_tags, attributes=allowed_attributes, styles=allowed_styles
)
|
e0ebd4cb41d3ed9168e819f7017dd98c2fbb599a
|
insertion_sort.py
|
insertion_sort.py
|
def insertion_sort(un_list):
for idx in range(1, len(un_list)):
current = un_list[idx]
position = idx
while position > 0 and un_list[position-1] > current:
un_list[position] = un_list[position-1]
position = position - 1
un_list[position] = current
if __name__ == '__main__':
BEST_CASE = range(1000)
WORST_CASE = BEST_CASE[::-1]
from timeit import Timer
best = Timer(
'insertion_sort({})'.format(BEST_CASE),
'from __main__ import BEST_CASE, insertion_sort').timeit(1000)
worst = Timer(
'insertion_sort({})'.format(WORST_CASE),
'from __main__ import WORST_CASE, insertion_sort').timeit(1000)
print("""Best case represented as a list that is already sorted\n
Worst case represented as a list that is absolute reverse of sorted""")
print('Best Case: {}'.format(best))
print('Worst Case: {}'.format(worst))
|
def insertion_sort(un_list):
if type(un_list) is not list:
return "You must pass a valid list as argument. Do it."
for idx in range(1, len(un_list)):
current = un_list[idx]
position = idx
while position > 0 and un_list[position-1] > current:
un_list[position] = un_list[position-1]
position = position - 1
un_list[position] = current
if __name__ == '__main__':
BEST_CASE = range(1000)
WORST_CASE = BEST_CASE[::-1]
from timeit import Timer
best = Timer(
'insertion_sort({})'.format(BEST_CASE),
'from __main__ import BEST_CASE, insertion_sort').timeit(1000)
worst = Timer(
'insertion_sort({})'.format(WORST_CASE),
'from __main__ import WORST_CASE, insertion_sort').timeit(1000)
print("""Best case represented as a list that is already sorted\n
Worst case represented as a list that is absolute reverse of sorted""")
print('Best Case: {}'.format(best))
print('Worst Case: {}'.format(worst))
|
Update insertion sort with list validation
|
Update insertion sort with list validation
|
Python
|
mit
|
jonathanstallings/data-structures
|
def insertion_sort(un_list):
for idx in range(1, len(un_list)):
current = un_list[idx]
position = idx
while position > 0 and un_list[position-1] > current:
un_list[position] = un_list[position-1]
position = position - 1
un_list[position] = current
if __name__ == '__main__':
BEST_CASE = range(1000)
WORST_CASE = BEST_CASE[::-1]
from timeit import Timer
best = Timer(
'insertion_sort({})'.format(BEST_CASE),
'from __main__ import BEST_CASE, insertion_sort').timeit(1000)
worst = Timer(
'insertion_sort({})'.format(WORST_CASE),
'from __main__ import WORST_CASE, insertion_sort').timeit(1000)
print("""Best case represented as a list that is already sorted\n
Worst case represented as a list that is absolute reverse of sorted""")
print('Best Case: {}'.format(best))
print('Worst Case: {}'.format(worst))
Update insertion sort with list validation
|
def insertion_sort(un_list):
if type(un_list) is not list:
return "You must pass a valid list as argument. Do it."
for idx in range(1, len(un_list)):
current = un_list[idx]
position = idx
while position > 0 and un_list[position-1] > current:
un_list[position] = un_list[position-1]
position = position - 1
un_list[position] = current
if __name__ == '__main__':
BEST_CASE = range(1000)
WORST_CASE = BEST_CASE[::-1]
from timeit import Timer
best = Timer(
'insertion_sort({})'.format(BEST_CASE),
'from __main__ import BEST_CASE, insertion_sort').timeit(1000)
worst = Timer(
'insertion_sort({})'.format(WORST_CASE),
'from __main__ import WORST_CASE, insertion_sort').timeit(1000)
print("""Best case represented as a list that is already sorted\n
Worst case represented as a list that is absolute reverse of sorted""")
print('Best Case: {}'.format(best))
print('Worst Case: {}'.format(worst))
|
<commit_before>def insertion_sort(un_list):
for idx in range(1, len(un_list)):
current = un_list[idx]
position = idx
while position > 0 and un_list[position-1] > current:
un_list[position] = un_list[position-1]
position = position - 1
un_list[position] = current
if __name__ == '__main__':
BEST_CASE = range(1000)
WORST_CASE = BEST_CASE[::-1]
from timeit import Timer
best = Timer(
'insertion_sort({})'.format(BEST_CASE),
'from __main__ import BEST_CASE, insertion_sort').timeit(1000)
worst = Timer(
'insertion_sort({})'.format(WORST_CASE),
'from __main__ import WORST_CASE, insertion_sort').timeit(1000)
print("""Best case represented as a list that is already sorted\n
Worst case represented as a list that is absolute reverse of sorted""")
print('Best Case: {}'.format(best))
print('Worst Case: {}'.format(worst))
<commit_msg>Update insertion sort with list validation<commit_after>
|
def insertion_sort(un_list):
if type(un_list) is not list:
return "You must pass a valid list as argument. Do it."
for idx in range(1, len(un_list)):
current = un_list[idx]
position = idx
while position > 0 and un_list[position-1] > current:
un_list[position] = un_list[position-1]
position = position - 1
un_list[position] = current
if __name__ == '__main__':
BEST_CASE = range(1000)
WORST_CASE = BEST_CASE[::-1]
from timeit import Timer
best = Timer(
'insertion_sort({})'.format(BEST_CASE),
'from __main__ import BEST_CASE, insertion_sort').timeit(1000)
worst = Timer(
'insertion_sort({})'.format(WORST_CASE),
'from __main__ import WORST_CASE, insertion_sort').timeit(1000)
print("""Best case represented as a list that is already sorted\n
Worst case represented as a list that is absolute reverse of sorted""")
print('Best Case: {}'.format(best))
print('Worst Case: {}'.format(worst))
|
def insertion_sort(un_list):
for idx in range(1, len(un_list)):
current = un_list[idx]
position = idx
while position > 0 and un_list[position-1] > current:
un_list[position] = un_list[position-1]
position = position - 1
un_list[position] = current
if __name__ == '__main__':
BEST_CASE = range(1000)
WORST_CASE = BEST_CASE[::-1]
from timeit import Timer
best = Timer(
'insertion_sort({})'.format(BEST_CASE),
'from __main__ import BEST_CASE, insertion_sort').timeit(1000)
worst = Timer(
'insertion_sort({})'.format(WORST_CASE),
'from __main__ import WORST_CASE, insertion_sort').timeit(1000)
print("""Best case represented as a list that is already sorted\n
Worst case represented as a list that is absolute reverse of sorted""")
print('Best Case: {}'.format(best))
print('Worst Case: {}'.format(worst))
Update insertion sort with list validationdef insertion_sort(un_list):
if type(un_list) is not list:
return "You must pass a valid list as argument. Do it."
for idx in range(1, len(un_list)):
current = un_list[idx]
position = idx
while position > 0 and un_list[position-1] > current:
un_list[position] = un_list[position-1]
position = position - 1
un_list[position] = current
if __name__ == '__main__':
BEST_CASE = range(1000)
WORST_CASE = BEST_CASE[::-1]
from timeit import Timer
best = Timer(
'insertion_sort({})'.format(BEST_CASE),
'from __main__ import BEST_CASE, insertion_sort').timeit(1000)
worst = Timer(
'insertion_sort({})'.format(WORST_CASE),
'from __main__ import WORST_CASE, insertion_sort').timeit(1000)
print("""Best case represented as a list that is already sorted\n
Worst case represented as a list that is absolute reverse of sorted""")
print('Best Case: {}'.format(best))
print('Worst Case: {}'.format(worst))
|
<commit_before>def insertion_sort(un_list):
for idx in range(1, len(un_list)):
current = un_list[idx]
position = idx
while position > 0 and un_list[position-1] > current:
un_list[position] = un_list[position-1]
position = position - 1
un_list[position] = current
if __name__ == '__main__':
BEST_CASE = range(1000)
WORST_CASE = BEST_CASE[::-1]
from timeit import Timer
best = Timer(
'insertion_sort({})'.format(BEST_CASE),
'from __main__ import BEST_CASE, insertion_sort').timeit(1000)
worst = Timer(
'insertion_sort({})'.format(WORST_CASE),
'from __main__ import WORST_CASE, insertion_sort').timeit(1000)
print("""Best case represented as a list that is already sorted\n
Worst case represented as a list that is absolute reverse of sorted""")
print('Best Case: {}'.format(best))
print('Worst Case: {}'.format(worst))
<commit_msg>Update insertion sort with list validation<commit_after>def insertion_sort(un_list):
if type(un_list) is not list:
return "You must pass a valid list as argument. Do it."
for idx in range(1, len(un_list)):
current = un_list[idx]
position = idx
while position > 0 and un_list[position-1] > current:
un_list[position] = un_list[position-1]
position = position - 1
un_list[position] = current
if __name__ == '__main__':
BEST_CASE = range(1000)
WORST_CASE = BEST_CASE[::-1]
from timeit import Timer
best = Timer(
'insertion_sort({})'.format(BEST_CASE),
'from __main__ import BEST_CASE, insertion_sort').timeit(1000)
worst = Timer(
'insertion_sort({})'.format(WORST_CASE),
'from __main__ import WORST_CASE, insertion_sort').timeit(1000)
print("""Best case represented as a list that is already sorted\n
Worst case represented as a list that is absolute reverse of sorted""")
print('Best Case: {}'.format(best))
print('Worst Case: {}'.format(worst))
|
a5697ddd595b929ef7261d62fd333c2cd2f56dd0
|
plots/views.py
|
plots/views.py
|
# Create your views here.
import json
from django.template import RequestContext
from django.shortcuts import render_to_response
from django.utils import simplejson
from django.http import HttpResponse, Http404
from .models import BenchmarkLogs, MachineInfo
import data
def rawdata(request, plotname):
"""
Based on the ajax request from the template, it calls data.py to fetch the appropriate data.
It returns a JSON dump of the dictionary that is returned from data.py
"""
if request.is_ajax():
try:
data_dict = {}
data_dict = getattr(data, plotname).__call__()
print data_dict
return HttpResponse(simplejson.dumps(data_dict), content_type="application/json")
except AttributeError:
raise 404
raise 404
def draw(request, plotname):
"""
The draw view is responsible for drawing the chart. It renders a template chart.html which sends ajax request for the JSON data.
It also provides the template the name of the plot to draw via the <name_dict> dictionary.
"""
name_dict = {'plotname': plotname}
return render_to_response("flot-chart.html", name_dict, context_instance=RequestContext(request))
|
# Create your views here.
import json
from django.template import RequestContext
from django.shortcuts import render_to_response
from django.utils import simplejson
from django.http import HttpResponse, Http404
from .models import BenchmarkLogs, MachineInfo
import data
def rawdata(request, plotname):
"""
Based on the ajax request from the template, it calls data.py to fetch the appropriate data.
It returns a JSON dump of the dictionary that is returned from data.py
"""
if request.is_ajax():
try:
data_dict = {}
data_dict = getattr(data, plotname).__call__()
print data_dict
return HttpResponse(simplejson.dumps(data_dict), content_type="application/json")
except AttributeError:
raise Http404
raise Http404
def draw(request, plotname):
"""
The draw view is responsible for drawing the chart. It renders a template chart.html which sends ajax request for the JSON data.
It also provides the template the name of the plot to draw via the <name_dict> dictionary.
"""
name_dict = {'plotname': plotname}
return render_to_response("flot-chart.html", name_dict, context_instance=RequestContext(request))
|
Raise Http404 instead of 404
|
Raise Http404 instead of 404
|
Python
|
bsd-2-clause
|
ankeshanand/benchmark,ankeshanand/benchmark,ankeshanand/benchmark,ankeshanand/benchmark
|
# Create your views here.
import json
from django.template import RequestContext
from django.shortcuts import render_to_response
from django.utils import simplejson
from django.http import HttpResponse, Http404
from .models import BenchmarkLogs, MachineInfo
import data
def rawdata(request, plotname):
"""
Based on the ajax request from the template, it calls data.py to fetch the appropriate data.
It returns a JSON dump of the dictionary that is returned from data.py
"""
if request.is_ajax():
try:
data_dict = {}
data_dict = getattr(data, plotname).__call__()
print data_dict
return HttpResponse(simplejson.dumps(data_dict), content_type="application/json")
except AttributeError:
raise 404
raise 404
def draw(request, plotname):
"""
The draw view is responsible for drawing the chart. It renders a template chart.html which sends ajax request for the JSON data.
It also provides the template the name of the plot to draw via the <name_dict> dictionary.
"""
name_dict = {'plotname': plotname}
return render_to_response("flot-chart.html", name_dict, context_instance=RequestContext(request))
Raise Http404 instead of 404
|
# Create your views here.
import json
from django.template import RequestContext
from django.shortcuts import render_to_response
from django.utils import simplejson
from django.http import HttpResponse, Http404
from .models import BenchmarkLogs, MachineInfo
import data
def rawdata(request, plotname):
"""
Based on the ajax request from the template, it calls data.py to fetch the appropriate data.
It returns a JSON dump of the dictionary that is returned from data.py
"""
if request.is_ajax():
try:
data_dict = {}
data_dict = getattr(data, plotname).__call__()
print data_dict
return HttpResponse(simplejson.dumps(data_dict), content_type="application/json")
except AttributeError:
raise Http404
raise Http404
def draw(request, plotname):
"""
The draw view is responsible for drawing the chart. It renders a template chart.html which sends ajax request for the JSON data.
It also provides the template the name of the plot to draw via the <name_dict> dictionary.
"""
name_dict = {'plotname': plotname}
return render_to_response("flot-chart.html", name_dict, context_instance=RequestContext(request))
|
<commit_before># Create your views here.
import json
from django.template import RequestContext
from django.shortcuts import render_to_response
from django.utils import simplejson
from django.http import HttpResponse, Http404
from .models import BenchmarkLogs, MachineInfo
import data
def rawdata(request, plotname):
"""
Based on the ajax request from the template, it calls data.py to fetch the appropriate data.
It returns a JSON dump of the dictionary that is returned from data.py
"""
if request.is_ajax():
try:
data_dict = {}
data_dict = getattr(data, plotname).__call__()
print data_dict
return HttpResponse(simplejson.dumps(data_dict), content_type="application/json")
except AttributeError:
raise 404
raise 404
def draw(request, plotname):
"""
The draw view is responsible for drawing the chart. It renders a template chart.html which sends ajax request for the JSON data.
It also provides the template the name of the plot to draw via the <name_dict> dictionary.
"""
name_dict = {'plotname': plotname}
return render_to_response("flot-chart.html", name_dict, context_instance=RequestContext(request))
<commit_msg>Raise Http404 instead of 404<commit_after>
|
# Create your views here.
import json
from django.template import RequestContext
from django.shortcuts import render_to_response
from django.utils import simplejson
from django.http import HttpResponse, Http404
from .models import BenchmarkLogs, MachineInfo
import data
def rawdata(request, plotname):
"""
Based on the ajax request from the template, it calls data.py to fetch the appropriate data.
It returns a JSON dump of the dictionary that is returned from data.py
"""
if request.is_ajax():
try:
data_dict = {}
data_dict = getattr(data, plotname).__call__()
print data_dict
return HttpResponse(simplejson.dumps(data_dict), content_type="application/json")
except AttributeError:
raise Http404
raise Http404
def draw(request, plotname):
"""
The draw view is responsible for drawing the chart. It renders a template chart.html which sends ajax request for the JSON data.
It also provides the template the name of the plot to draw via the <name_dict> dictionary.
"""
name_dict = {'plotname': plotname}
return render_to_response("flot-chart.html", name_dict, context_instance=RequestContext(request))
|
# Create your views here.
import json
from django.template import RequestContext
from django.shortcuts import render_to_response
from django.utils import simplejson
from django.http import HttpResponse, Http404
from .models import BenchmarkLogs, MachineInfo
import data
def rawdata(request, plotname):
"""
Based on the ajax request from the template, it calls data.py to fetch the appropriate data.
It returns a JSON dump of the dictionary that is returned from data.py
"""
if request.is_ajax():
try:
data_dict = {}
data_dict = getattr(data, plotname).__call__()
print data_dict
return HttpResponse(simplejson.dumps(data_dict), content_type="application/json")
except AttributeError:
raise 404
raise 404
def draw(request, plotname):
"""
The draw view is responsible for drawing the chart. It renders a template chart.html which sends ajax request for the JSON data.
It also provides the template the name of the plot to draw via the <name_dict> dictionary.
"""
name_dict = {'plotname': plotname}
return render_to_response("flot-chart.html", name_dict, context_instance=RequestContext(request))
Raise Http404 instead of 404# Create your views here.
import json
from django.template import RequestContext
from django.shortcuts import render_to_response
from django.utils import simplejson
from django.http import HttpResponse, Http404
from .models import BenchmarkLogs, MachineInfo
import data
def rawdata(request, plotname):
"""
Based on the ajax request from the template, it calls data.py to fetch the appropriate data.
It returns a JSON dump of the dictionary that is returned from data.py
"""
if request.is_ajax():
try:
data_dict = {}
data_dict = getattr(data, plotname).__call__()
print data_dict
return HttpResponse(simplejson.dumps(data_dict), content_type="application/json")
except AttributeError:
raise Http404
raise Http404
def draw(request, plotname):
"""
The draw view is responsible for drawing the chart. It renders a template chart.html which sends ajax request for the JSON data.
It also provides the template the name of the plot to draw via the <name_dict> dictionary.
"""
name_dict = {'plotname': plotname}
return render_to_response("flot-chart.html", name_dict, context_instance=RequestContext(request))
|
<commit_before># Create your views here.
import json
from django.template import RequestContext
from django.shortcuts import render_to_response
from django.utils import simplejson
from django.http import HttpResponse, Http404
from .models import BenchmarkLogs, MachineInfo
import data
def rawdata(request, plotname):
"""
Based on the ajax request from the template, it calls data.py to fetch the appropriate data.
It returns a JSON dump of the dictionary that is returned from data.py
"""
if request.is_ajax():
try:
data_dict = {}
data_dict = getattr(data, plotname).__call__()
print data_dict
return HttpResponse(simplejson.dumps(data_dict), content_type="application/json")
except AttributeError:
raise 404
raise 404
def draw(request, plotname):
"""
The draw view is responsible for drawing the chart. It renders a template chart.html which sends ajax request for the JSON data.
It also provides the template the name of the plot to draw via the <name_dict> dictionary.
"""
name_dict = {'plotname': plotname}
return render_to_response("flot-chart.html", name_dict, context_instance=RequestContext(request))
<commit_msg>Raise Http404 instead of 404<commit_after># Create your views here.
import json
from django.template import RequestContext
from django.shortcuts import render_to_response
from django.utils import simplejson
from django.http import HttpResponse, Http404
from .models import BenchmarkLogs, MachineInfo
import data
def rawdata(request, plotname):
"""
Based on the ajax request from the template, it calls data.py to fetch the appropriate data.
It returns a JSON dump of the dictionary that is returned from data.py
"""
if request.is_ajax():
try:
data_dict = {}
data_dict = getattr(data, plotname).__call__()
print data_dict
return HttpResponse(simplejson.dumps(data_dict), content_type="application/json")
except AttributeError:
raise Http404
raise Http404
def draw(request, plotname):
"""
The draw view is responsible for drawing the chart. It renders a template chart.html which sends ajax request for the JSON data.
It also provides the template the name of the plot to draw via the <name_dict> dictionary.
"""
name_dict = {'plotname': plotname}
return render_to_response("flot-chart.html", name_dict, context_instance=RequestContext(request))
|
5a2212746bfabcfd64cf27846770b35f767d57a6
|
polls/views.py
|
polls/views.py
|
from django.shortcuts import render
from django.core.urlresolvers import reverse_lazy
from singleurlcrud.views import CRUDView
from .models import *
# Create your views here.
class AuthorCRUDView(CRUDView):
model = Author
list_display = ('name',)
class QuestionCRUDView(CRUDView):
model = Question
list_display = ('question_text', 'pub_date', 'author')
related_field_crud_urls = {
'author': reverse_lazy("polls:authors")
}
def get_actions(self):
self.related_field_crud_urls = {
'author': reverse_lazy('polls:authors') +"?o=add",
}
return [
('Delete', self.delete_multiple_items)
]
def delete_multiple_items(self, request, items):
pass
class VoteItemAction(object):
title = 'Vote'
key = 'vote1'
css = 'glyphicon glyphicon-envelope'
def doAction(self, item):
import logging
logging.getLogger('general').info("VoteItemAction invoked!")
pass
def get_item_actions(self):
return [self.VoteItemAction()]
|
from django.shortcuts import render
from django.core.urlresolvers import reverse_lazy
from singleurlcrud.views import CRUDView
from .models import *
# Create your views here.
class AuthorCRUDView(CRUDView):
model = Author
list_display = ('name',)
class QuestionCRUDView(CRUDView):
model = Question
list_display = ('question_text', 'pub_date', 'author')
related_field_crud_urls = {
'author': reverse_lazy("polls:authors")
}
def get_actions(self):
self.related_field_crud_urls = {
'author': reverse_lazy('polls:authors') +"?o=add",
}
return [
('Delete', self.delete_multiple_items)
]
def delete_multiple_items(self, request, items):
Question.objects.filter(pk__in=items).delete()
class VoteItemAction(object):
title = 'Vote'
key = 'vote1'
css = 'glyphicon glyphicon-envelope'
def doAction(self, item):
import logging
logging.getLogger('general').info("VoteItemAction invoked!")
pass
def get_item_actions(self):
return [self.VoteItemAction()]
|
Implement 'Delete' action for polls sample app
|
Implement 'Delete' action for polls sample app
|
Python
|
bsd-3-clause
|
harikvpy/crud,harikvpy/crud,harikvpy/crud
|
from django.shortcuts import render
from django.core.urlresolvers import reverse_lazy
from singleurlcrud.views import CRUDView
from .models import *
# Create your views here.
class AuthorCRUDView(CRUDView):
model = Author
list_display = ('name',)
class QuestionCRUDView(CRUDView):
model = Question
list_display = ('question_text', 'pub_date', 'author')
related_field_crud_urls = {
'author': reverse_lazy("polls:authors")
}
def get_actions(self):
self.related_field_crud_urls = {
'author': reverse_lazy('polls:authors') +"?o=add",
}
return [
('Delete', self.delete_multiple_items)
]
def delete_multiple_items(self, request, items):
pass
class VoteItemAction(object):
title = 'Vote'
key = 'vote1'
css = 'glyphicon glyphicon-envelope'
def doAction(self, item):
import logging
logging.getLogger('general').info("VoteItemAction invoked!")
pass
def get_item_actions(self):
return [self.VoteItemAction()]
Implement 'Delete' action for polls sample app
|
from django.shortcuts import render
from django.core.urlresolvers import reverse_lazy
from singleurlcrud.views import CRUDView
from .models import *
# Create your views here.
class AuthorCRUDView(CRUDView):
model = Author
list_display = ('name',)
class QuestionCRUDView(CRUDView):
model = Question
list_display = ('question_text', 'pub_date', 'author')
related_field_crud_urls = {
'author': reverse_lazy("polls:authors")
}
def get_actions(self):
self.related_field_crud_urls = {
'author': reverse_lazy('polls:authors') +"?o=add",
}
return [
('Delete', self.delete_multiple_items)
]
def delete_multiple_items(self, request, items):
Question.objects.filter(pk__in=items).delete()
class VoteItemAction(object):
title = 'Vote'
key = 'vote1'
css = 'glyphicon glyphicon-envelope'
def doAction(self, item):
import logging
logging.getLogger('general').info("VoteItemAction invoked!")
pass
def get_item_actions(self):
return [self.VoteItemAction()]
|
<commit_before>from django.shortcuts import render
from django.core.urlresolvers import reverse_lazy
from singleurlcrud.views import CRUDView
from .models import *
# Create your views here.
class AuthorCRUDView(CRUDView):
model = Author
list_display = ('name',)
class QuestionCRUDView(CRUDView):
model = Question
list_display = ('question_text', 'pub_date', 'author')
related_field_crud_urls = {
'author': reverse_lazy("polls:authors")
}
def get_actions(self):
self.related_field_crud_urls = {
'author': reverse_lazy('polls:authors') +"?o=add",
}
return [
('Delete', self.delete_multiple_items)
]
def delete_multiple_items(self, request, items):
pass
class VoteItemAction(object):
title = 'Vote'
key = 'vote1'
css = 'glyphicon glyphicon-envelope'
def doAction(self, item):
import logging
logging.getLogger('general').info("VoteItemAction invoked!")
pass
def get_item_actions(self):
return [self.VoteItemAction()]
<commit_msg>Implement 'Delete' action for polls sample app<commit_after>
|
from django.shortcuts import render
from django.core.urlresolvers import reverse_lazy
from singleurlcrud.views import CRUDView
from .models import *
# Create your views here.
class AuthorCRUDView(CRUDView):
model = Author
list_display = ('name',)
class QuestionCRUDView(CRUDView):
model = Question
list_display = ('question_text', 'pub_date', 'author')
related_field_crud_urls = {
'author': reverse_lazy("polls:authors")
}
def get_actions(self):
self.related_field_crud_urls = {
'author': reverse_lazy('polls:authors') +"?o=add",
}
return [
('Delete', self.delete_multiple_items)
]
def delete_multiple_items(self, request, items):
Question.objects.filter(pk__in=items).delete()
class VoteItemAction(object):
title = 'Vote'
key = 'vote1'
css = 'glyphicon glyphicon-envelope'
def doAction(self, item):
import logging
logging.getLogger('general').info("VoteItemAction invoked!")
pass
def get_item_actions(self):
return [self.VoteItemAction()]
|
from django.shortcuts import render
from django.core.urlresolvers import reverse_lazy
from singleurlcrud.views import CRUDView
from .models import *
# Create your views here.
class AuthorCRUDView(CRUDView):
model = Author
list_display = ('name',)
class QuestionCRUDView(CRUDView):
model = Question
list_display = ('question_text', 'pub_date', 'author')
related_field_crud_urls = {
'author': reverse_lazy("polls:authors")
}
def get_actions(self):
self.related_field_crud_urls = {
'author': reverse_lazy('polls:authors') +"?o=add",
}
return [
('Delete', self.delete_multiple_items)
]
def delete_multiple_items(self, request, items):
pass
class VoteItemAction(object):
title = 'Vote'
key = 'vote1'
css = 'glyphicon glyphicon-envelope'
def doAction(self, item):
import logging
logging.getLogger('general').info("VoteItemAction invoked!")
pass
def get_item_actions(self):
return [self.VoteItemAction()]
Implement 'Delete' action for polls sample appfrom django.shortcuts import render
from django.core.urlresolvers import reverse_lazy
from singleurlcrud.views import CRUDView
from .models import *
# Create your views here.
class AuthorCRUDView(CRUDView):
model = Author
list_display = ('name',)
class QuestionCRUDView(CRUDView):
model = Question
list_display = ('question_text', 'pub_date', 'author')
related_field_crud_urls = {
'author': reverse_lazy("polls:authors")
}
def get_actions(self):
self.related_field_crud_urls = {
'author': reverse_lazy('polls:authors') +"?o=add",
}
return [
('Delete', self.delete_multiple_items)
]
def delete_multiple_items(self, request, items):
Question.objects.filter(pk__in=items).delete()
class VoteItemAction(object):
title = 'Vote'
key = 'vote1'
css = 'glyphicon glyphicon-envelope'
def doAction(self, item):
import logging
logging.getLogger('general').info("VoteItemAction invoked!")
pass
def get_item_actions(self):
return [self.VoteItemAction()]
|
<commit_before>from django.shortcuts import render
from django.core.urlresolvers import reverse_lazy
from singleurlcrud.views import CRUDView
from .models import *
# Create your views here.
class AuthorCRUDView(CRUDView):
model = Author
list_display = ('name',)
class QuestionCRUDView(CRUDView):
model = Question
list_display = ('question_text', 'pub_date', 'author')
related_field_crud_urls = {
'author': reverse_lazy("polls:authors")
}
def get_actions(self):
self.related_field_crud_urls = {
'author': reverse_lazy('polls:authors') +"?o=add",
}
return [
('Delete', self.delete_multiple_items)
]
def delete_multiple_items(self, request, items):
pass
class VoteItemAction(object):
title = 'Vote'
key = 'vote1'
css = 'glyphicon glyphicon-envelope'
def doAction(self, item):
import logging
logging.getLogger('general').info("VoteItemAction invoked!")
pass
def get_item_actions(self):
return [self.VoteItemAction()]
<commit_msg>Implement 'Delete' action for polls sample app<commit_after>from django.shortcuts import render
from django.core.urlresolvers import reverse_lazy
from singleurlcrud.views import CRUDView
from .models import *
# Create your views here.
class AuthorCRUDView(CRUDView):
model = Author
list_display = ('name',)
class QuestionCRUDView(CRUDView):
model = Question
list_display = ('question_text', 'pub_date', 'author')
related_field_crud_urls = {
'author': reverse_lazy("polls:authors")
}
def get_actions(self):
self.related_field_crud_urls = {
'author': reverse_lazy('polls:authors') +"?o=add",
}
return [
('Delete', self.delete_multiple_items)
]
def delete_multiple_items(self, request, items):
Question.objects.filter(pk__in=items).delete()
class VoteItemAction(object):
title = 'Vote'
key = 'vote1'
css = 'glyphicon glyphicon-envelope'
def doAction(self, item):
import logging
logging.getLogger('general').info("VoteItemAction invoked!")
pass
def get_item_actions(self):
return [self.VoteItemAction()]
|
14123d5e3544ab9dbee813e26536e43cbfb9f783
|
pycroscopy/__version__.py
|
pycroscopy/__version__.py
|
version = '0.59.8'
time = '2018-04-18 08:12:59'
|
version = '0.60.0rc1'
time = '2018-04-18 08:12:59'
|
Mark as release candidate version
|
Mark as release candidate version
|
Python
|
mit
|
pycroscopy/pycroscopy
|
version = '0.59.8'
time = '2018-04-18 08:12:59'
Mark as release candidate version
|
version = '0.60.0rc1'
time = '2018-04-18 08:12:59'
|
<commit_before>version = '0.59.8'
time = '2018-04-18 08:12:59'
<commit_msg>Mark as release candidate version<commit_after>
|
version = '0.60.0rc1'
time = '2018-04-18 08:12:59'
|
version = '0.59.8'
time = '2018-04-18 08:12:59'
Mark as release candidate versionversion = '0.60.0rc1'
time = '2018-04-18 08:12:59'
|
<commit_before>version = '0.59.8'
time = '2018-04-18 08:12:59'
<commit_msg>Mark as release candidate version<commit_after>version = '0.60.0rc1'
time = '2018-04-18 08:12:59'
|
9851430922f9c14583c9eb17062629f6ea99c258
|
turbustat/tests/test_vcs.py
|
turbustat/tests/test_vcs.py
|
# Licensed under an MIT open source license - see LICENSE
'''
Test functions for VCS
'''
from unittest import TestCase
import numpy as np
import numpy.testing as npt
from ..statistics import VCS, VCS_Distance
from ._testing_data import \
dataset1, dataset2, computed_data, computed_distances
class testVCS(TestCase):
def setUp(self):
self.dataset1 = dataset1
self.dataset2 = dataset2
def test_VCS_method(self):
self.tester = VCS(dataset1["cube"]).run()
npt.assert_allclose(self.tester.ps1D, computed_data['vcs_val'])
def test_VCS_distance(self):
self.tester_dist = \
VCS_Distance(dataset1["cube"], dataset2["cube"])
self.tester_dist = self.tester_dist.distance_metric()
npt.assert_almost_equal(self.tester_dist.distance,
computed_distances['vcs_distance'])
|
# Licensed under an MIT open source license - see LICENSE
'''
Test functions for VCS
'''
import pytest
import numpy as np
import numpy.testing as npt
from ..statistics import VCS, VCS_Distance
from ._testing_data import \
dataset1, dataset2, computed_data, computed_distances
def test_VCS_method():
tester = VCS(dataset1["cube"]).run()
npt.assert_allclose(tester.ps1D, computed_data['vcs_val'])
npt.assert_allclose(tester.slope, computed_data['vcs_slopes_val'])
def test_VCS_distance():
tester_dist = \
VCS_Distance(dataset1["cube"], dataset2["cube"])
tester_dist = tester_dist.distance_metric()
npt.assert_almost_equal(tester_dist.distance,
computed_distances['vcs_distance'])
# Add tests for: VCS changing the spectral width, pixel and spectral units,
|
Reformat VCS tests; need updated unit test values!
|
Reformat VCS tests; need updated unit test values!
|
Python
|
mit
|
e-koch/TurbuStat,Astroua/TurbuStat
|
# Licensed under an MIT open source license - see LICENSE
'''
Test functions for VCS
'''
from unittest import TestCase
import numpy as np
import numpy.testing as npt
from ..statistics import VCS, VCS_Distance
from ._testing_data import \
dataset1, dataset2, computed_data, computed_distances
class testVCS(TestCase):
def setUp(self):
self.dataset1 = dataset1
self.dataset2 = dataset2
def test_VCS_method(self):
self.tester = VCS(dataset1["cube"]).run()
npt.assert_allclose(self.tester.ps1D, computed_data['vcs_val'])
def test_VCS_distance(self):
self.tester_dist = \
VCS_Distance(dataset1["cube"], dataset2["cube"])
self.tester_dist = self.tester_dist.distance_metric()
npt.assert_almost_equal(self.tester_dist.distance,
computed_distances['vcs_distance'])
Reformat VCS tests; need updated unit test values!
|
# Licensed under an MIT open source license - see LICENSE
'''
Test functions for VCS
'''
import pytest
import numpy as np
import numpy.testing as npt
from ..statistics import VCS, VCS_Distance
from ._testing_data import \
dataset1, dataset2, computed_data, computed_distances
def test_VCS_method():
tester = VCS(dataset1["cube"]).run()
npt.assert_allclose(tester.ps1D, computed_data['vcs_val'])
npt.assert_allclose(tester.slope, computed_data['vcs_slopes_val'])
def test_VCS_distance():
tester_dist = \
VCS_Distance(dataset1["cube"], dataset2["cube"])
tester_dist = tester_dist.distance_metric()
npt.assert_almost_equal(tester_dist.distance,
computed_distances['vcs_distance'])
# Add tests for: VCS changing the spectral width, pixel and spectral units,
|
<commit_before># Licensed under an MIT open source license - see LICENSE
'''
Test functions for VCS
'''
from unittest import TestCase
import numpy as np
import numpy.testing as npt
from ..statistics import VCS, VCS_Distance
from ._testing_data import \
dataset1, dataset2, computed_data, computed_distances
class testVCS(TestCase):
def setUp(self):
self.dataset1 = dataset1
self.dataset2 = dataset2
def test_VCS_method(self):
self.tester = VCS(dataset1["cube"]).run()
npt.assert_allclose(self.tester.ps1D, computed_data['vcs_val'])
def test_VCS_distance(self):
self.tester_dist = \
VCS_Distance(dataset1["cube"], dataset2["cube"])
self.tester_dist = self.tester_dist.distance_metric()
npt.assert_almost_equal(self.tester_dist.distance,
computed_distances['vcs_distance'])
<commit_msg>Reformat VCS tests; need updated unit test values!<commit_after>
|
# Licensed under an MIT open source license - see LICENSE
'''
Test functions for VCS
'''
import pytest
import numpy as np
import numpy.testing as npt
from ..statistics import VCS, VCS_Distance
from ._testing_data import \
dataset1, dataset2, computed_data, computed_distances
def test_VCS_method():
tester = VCS(dataset1["cube"]).run()
npt.assert_allclose(tester.ps1D, computed_data['vcs_val'])
npt.assert_allclose(tester.slope, computed_data['vcs_slopes_val'])
def test_VCS_distance():
tester_dist = \
VCS_Distance(dataset1["cube"], dataset2["cube"])
tester_dist = tester_dist.distance_metric()
npt.assert_almost_equal(tester_dist.distance,
computed_distances['vcs_distance'])
# Add tests for: VCS changing the spectral width, pixel and spectral units,
|
# Licensed under an MIT open source license - see LICENSE
'''
Test functions for VCS
'''
from unittest import TestCase
import numpy as np
import numpy.testing as npt
from ..statistics import VCS, VCS_Distance
from ._testing_data import \
dataset1, dataset2, computed_data, computed_distances
class testVCS(TestCase):
def setUp(self):
self.dataset1 = dataset1
self.dataset2 = dataset2
def test_VCS_method(self):
self.tester = VCS(dataset1["cube"]).run()
npt.assert_allclose(self.tester.ps1D, computed_data['vcs_val'])
def test_VCS_distance(self):
self.tester_dist = \
VCS_Distance(dataset1["cube"], dataset2["cube"])
self.tester_dist = self.tester_dist.distance_metric()
npt.assert_almost_equal(self.tester_dist.distance,
computed_distances['vcs_distance'])
Reformat VCS tests; need updated unit test values!# Licensed under an MIT open source license - see LICENSE
'''
Test functions for VCS
'''
import pytest
import numpy as np
import numpy.testing as npt
from ..statistics import VCS, VCS_Distance
from ._testing_data import \
dataset1, dataset2, computed_data, computed_distances
def test_VCS_method():
tester = VCS(dataset1["cube"]).run()
npt.assert_allclose(tester.ps1D, computed_data['vcs_val'])
npt.assert_allclose(tester.slope, computed_data['vcs_slopes_val'])
def test_VCS_distance():
tester_dist = \
VCS_Distance(dataset1["cube"], dataset2["cube"])
tester_dist = tester_dist.distance_metric()
npt.assert_almost_equal(tester_dist.distance,
computed_distances['vcs_distance'])
# Add tests for: VCS changing the spectral width, pixel and spectral units,
|
<commit_before># Licensed under an MIT open source license - see LICENSE
'''
Test functions for VCS
'''
from unittest import TestCase
import numpy as np
import numpy.testing as npt
from ..statistics import VCS, VCS_Distance
from ._testing_data import \
dataset1, dataset2, computed_data, computed_distances
class testVCS(TestCase):
def setUp(self):
self.dataset1 = dataset1
self.dataset2 = dataset2
def test_VCS_method(self):
self.tester = VCS(dataset1["cube"]).run()
npt.assert_allclose(self.tester.ps1D, computed_data['vcs_val'])
def test_VCS_distance(self):
self.tester_dist = \
VCS_Distance(dataset1["cube"], dataset2["cube"])
self.tester_dist = self.tester_dist.distance_metric()
npt.assert_almost_equal(self.tester_dist.distance,
computed_distances['vcs_distance'])
<commit_msg>Reformat VCS tests; need updated unit test values!<commit_after># Licensed under an MIT open source license - see LICENSE
'''
Test functions for VCS
'''
import pytest
import numpy as np
import numpy.testing as npt
from ..statistics import VCS, VCS_Distance
from ._testing_data import \
dataset1, dataset2, computed_data, computed_distances
def test_VCS_method():
tester = VCS(dataset1["cube"]).run()
npt.assert_allclose(tester.ps1D, computed_data['vcs_val'])
npt.assert_allclose(tester.slope, computed_data['vcs_slopes_val'])
def test_VCS_distance():
tester_dist = \
VCS_Distance(dataset1["cube"], dataset2["cube"])
tester_dist = tester_dist.distance_metric()
npt.assert_almost_equal(tester_dist.distance,
computed_distances['vcs_distance'])
# Add tests for: VCS changing the spectral width, pixel and spectral units,
|
75b221fa63b0f81b94ffbbe9f5cdc39a0adb848a
|
dmrg101/core/braket.py
|
dmrg101/core/braket.py
|
'''
File: braket.py
Author: Ivan Gonzalez
Description: A function to implement quantum-mechanics brakets
'''
from numpy import inner
from core.exceptions import DMRGException
def braket(bra, ket):
"""Takes a bra and a ket and return their braket
You use this function to calculate the quantum mechanical braket, i.e.
the inner product in the wavefunction Hilbert space of two
wavefunction.
The wavefunction in the bra is hermitian conjugated by the braket
function.
Attributes:
bra: a Wavefunction with the bra part of the braket.
ket: a Wavefunction with the ket part of the braket.
Returns:
a double/complex with value of the braket.
Raises:
DMRGException: if the wavefunction don't belong to the same
Hilbert space, i.e. they have a different number of elements.
"""
# use wf.as_matrix to access the matrix elements of wf
if bra.as_matrix.shape() != ket.as_matrix.shape():
raise DMRGException("Wavefunctions in braket are not in the same
Hilbert space")
hermitian_conjugated_bra=conjugate(bra.as_matrix).transpose()
return inner(hermitian_conjugated_bra, ket.as_matrix)
|
'''
File: braket.py
Author: Ivan Gonzalez
Description: A function to implement quantum-mechanics brakets
'''
from numpy import inner, conjugate
from dmrg_exceptions import DMRGException
def braket(bra, ket):
"""Takes a bra and a ket and return their braket
You use this function to calculate the quantum mechanical braket, i.e.
the inner product in the wavefunction Hilbert space of two
wavefunction.
The wavefunction in the bra is hermitian conjugated by the braket
function.
Parameters
----------
bra: a Wavefunction with the bra part of the braket.
ket: a Wavefunction with the ket part of the braket.
Returns
-------
a double/complex with value of the braket.
Raises
------
DMRGException: if the wavefunction don't belong to the same
Hilbert space, i.e. they have a different number of elements.
"""
# use wf.as_matrix to access the matrix elements of wf
if bra.as_matrix.shape() != ket.as_matrix.shape():
raise DMRGException("Wavefunctions in braket are not in the same
Hilbert space")
hermitian_conjugated_bra=conjugate(bra.as_matrix).transpose()
return inner(hermitian_conjugated_bra, ket.as_matrix)
|
Clean up comments, fixing imports.
|
Clean up comments, fixing imports.
|
Python
|
mit
|
iglpdc/dmrg101
|
'''
File: braket.py
Author: Ivan Gonzalez
Description: A function to implement quantum-mechanics brakets
'''
from numpy import inner
from core.exceptions import DMRGException
def braket(bra, ket):
"""Takes a bra and a ket and return their braket
You use this function to calculate the quantum mechanical braket, i.e.
the inner product in the wavefunction Hilbert space of two
wavefunction.
The wavefunction in the bra is hermitian conjugated by the braket
function.
Attributes:
bra: a Wavefunction with the bra part of the braket.
ket: a Wavefunction with the ket part of the braket.
Returns:
a double/complex with value of the braket.
Raises:
DMRGException: if the wavefunction don't belong to the same
Hilbert space, i.e. they have a different number of elements.
"""
# use wf.as_matrix to access the matrix elements of wf
if bra.as_matrix.shape() != ket.as_matrix.shape():
raise DMRGException("Wavefunctions in braket are not in the same
Hilbert space")
hermitian_conjugated_bra=conjugate(bra.as_matrix).transpose()
return inner(hermitian_conjugated_bra, ket.as_matrix)
Clean up comments, fixing imports.
|
'''
File: braket.py
Author: Ivan Gonzalez
Description: A function to implement quantum-mechanics brakets
'''
from numpy import inner, conjugate
from dmrg_exceptions import DMRGException
def braket(bra, ket):
"""Takes a bra and a ket and return their braket
You use this function to calculate the quantum mechanical braket, i.e.
the inner product in the wavefunction Hilbert space of two
wavefunction.
The wavefunction in the bra is hermitian conjugated by the braket
function.
Parameters
----------
bra: a Wavefunction with the bra part of the braket.
ket: a Wavefunction with the ket part of the braket.
Returns
-------
a double/complex with value of the braket.
Raises
------
DMRGException: if the wavefunction don't belong to the same
Hilbert space, i.e. they have a different number of elements.
"""
# use wf.as_matrix to access the matrix elements of wf
if bra.as_matrix.shape() != ket.as_matrix.shape():
raise DMRGException("Wavefunctions in braket are not in the same
Hilbert space")
hermitian_conjugated_bra=conjugate(bra.as_matrix).transpose()
return inner(hermitian_conjugated_bra, ket.as_matrix)
|
<commit_before>'''
File: braket.py
Author: Ivan Gonzalez
Description: A function to implement quantum-mechanics brakets
'''
from numpy import inner
from core.exceptions import DMRGException
def braket(bra, ket):
"""Takes a bra and a ket and return their braket
You use this function to calculate the quantum mechanical braket, i.e.
the inner product in the wavefunction Hilbert space of two
wavefunction.
The wavefunction in the bra is hermitian conjugated by the braket
function.
Attributes:
bra: a Wavefunction with the bra part of the braket.
ket: a Wavefunction with the ket part of the braket.
Returns:
a double/complex with value of the braket.
Raises:
DMRGException: if the wavefunction don't belong to the same
Hilbert space, i.e. they have a different number of elements.
"""
# use wf.as_matrix to access the matrix elements of wf
if bra.as_matrix.shape() != ket.as_matrix.shape():
raise DMRGException("Wavefunctions in braket are not in the same
Hilbert space")
hermitian_conjugated_bra=conjugate(bra.as_matrix).transpose()
return inner(hermitian_conjugated_bra, ket.as_matrix)
<commit_msg>Clean up comments, fixing imports.<commit_after>
|
'''
File: braket.py
Author: Ivan Gonzalez
Description: A function to implement quantum-mechanics brakets
'''
from numpy import inner, conjugate
from dmrg_exceptions import DMRGException
def braket(bra, ket):
"""Takes a bra and a ket and return their braket
You use this function to calculate the quantum mechanical braket, i.e.
the inner product in the wavefunction Hilbert space of two
wavefunction.
The wavefunction in the bra is hermitian conjugated by the braket
function.
Parameters
----------
bra: a Wavefunction with the bra part of the braket.
ket: a Wavefunction with the ket part of the braket.
Returns
-------
a double/complex with value of the braket.
Raises
------
DMRGException: if the wavefunction don't belong to the same
Hilbert space, i.e. they have a different number of elements.
"""
# use wf.as_matrix to access the matrix elements of wf
if bra.as_matrix.shape() != ket.as_matrix.shape():
raise DMRGException("Wavefunctions in braket are not in the same
Hilbert space")
hermitian_conjugated_bra=conjugate(bra.as_matrix).transpose()
return inner(hermitian_conjugated_bra, ket.as_matrix)
|
'''
File: braket.py
Author: Ivan Gonzalez
Description: A function to implement quantum-mechanics brakets
'''
from numpy import inner
from core.exceptions import DMRGException
def braket(bra, ket):
"""Takes a bra and a ket and return their braket
You use this function to calculate the quantum mechanical braket, i.e.
the inner product in the wavefunction Hilbert space of two
wavefunction.
The wavefunction in the bra is hermitian conjugated by the braket
function.
Attributes:
bra: a Wavefunction with the bra part of the braket.
ket: a Wavefunction with the ket part of the braket.
Returns:
a double/complex with value of the braket.
Raises:
DMRGException: if the wavefunction don't belong to the same
Hilbert space, i.e. they have a different number of elements.
"""
# use wf.as_matrix to access the matrix elements of wf
if bra.as_matrix.shape() != ket.as_matrix.shape():
raise DMRGException("Wavefunctions in braket are not in the same
Hilbert space")
hermitian_conjugated_bra=conjugate(bra.as_matrix).transpose()
return inner(hermitian_conjugated_bra, ket.as_matrix)
Clean up comments, fixing imports.'''
File: braket.py
Author: Ivan Gonzalez
Description: A function to implement quantum-mechanics brakets
'''
from numpy import inner, conjugate
from dmrg_exceptions import DMRGException
def braket(bra, ket):
"""Takes a bra and a ket and return their braket
You use this function to calculate the quantum mechanical braket, i.e.
the inner product in the wavefunction Hilbert space of two
wavefunction.
The wavefunction in the bra is hermitian conjugated by the braket
function.
Parameters
----------
bra: a Wavefunction with the bra part of the braket.
ket: a Wavefunction with the ket part of the braket.
Returns
-------
a double/complex with value of the braket.
Raises
------
DMRGException: if the wavefunction don't belong to the same
Hilbert space, i.e. they have a different number of elements.
"""
# use wf.as_matrix to access the matrix elements of wf
if bra.as_matrix.shape() != ket.as_matrix.shape():
raise DMRGException("Wavefunctions in braket are not in the same
Hilbert space")
hermitian_conjugated_bra=conjugate(bra.as_matrix).transpose()
return inner(hermitian_conjugated_bra, ket.as_matrix)
|
<commit_before>'''
File: braket.py
Author: Ivan Gonzalez
Description: A function to implement quantum-mechanics brakets
'''
from numpy import inner
from core.exceptions import DMRGException
def braket(bra, ket):
"""Takes a bra and a ket and return their braket
You use this function to calculate the quantum mechanical braket, i.e.
the inner product in the wavefunction Hilbert space of two
wavefunction.
The wavefunction in the bra is hermitian conjugated by the braket
function.
Attributes:
bra: a Wavefunction with the bra part of the braket.
ket: a Wavefunction with the ket part of the braket.
Returns:
a double/complex with value of the braket.
Raises:
DMRGException: if the wavefunction don't belong to the same
Hilbert space, i.e. they have a different number of elements.
"""
# use wf.as_matrix to access the matrix elements of wf
if bra.as_matrix.shape() != ket.as_matrix.shape():
raise DMRGException("Wavefunctions in braket are not in the same
Hilbert space")
hermitian_conjugated_bra=conjugate(bra.as_matrix).transpose()
return inner(hermitian_conjugated_bra, ket.as_matrix)
<commit_msg>Clean up comments, fixing imports.<commit_after>'''
File: braket.py
Author: Ivan Gonzalez
Description: A function to implement quantum-mechanics brakets
'''
from numpy import inner, conjugate
from dmrg_exceptions import DMRGException
def braket(bra, ket):
"""Takes a bra and a ket and return their braket
You use this function to calculate the quantum mechanical braket, i.e.
the inner product in the wavefunction Hilbert space of two
wavefunction.
The wavefunction in the bra is hermitian conjugated by the braket
function.
Parameters
----------
bra: a Wavefunction with the bra part of the braket.
ket: a Wavefunction with the ket part of the braket.
Returns
-------
a double/complex with value of the braket.
Raises
------
DMRGException: if the wavefunction don't belong to the same
Hilbert space, i.e. they have a different number of elements.
"""
# use wf.as_matrix to access the matrix elements of wf
if bra.as_matrix.shape() != ket.as_matrix.shape():
raise DMRGException("Wavefunctions in braket are not in the same
Hilbert space")
hermitian_conjugated_bra=conjugate(bra.as_matrix).transpose()
return inner(hermitian_conjugated_bra, ket.as_matrix)
|
b1bb9e86b51bf0d1c57fa10ac9b8297f0bc078db
|
flow_workflow/petri_net/future_nets/base.py
|
flow_workflow/petri_net/future_nets/base.py
|
from flow.petri_net import future
from flow.petri_net import success_failure_net
# XXX Maybe this turns into a historian mixin?
class GenomeNetBase(success_failure_net.SuccessFailureNet):
def __init__(self, name, operation_id, parent_operation_id=None):
success_failure_net.SuccessFailureNet.__init__(self, name=name)
self.operation_id = operation_id
self.parent_operation_id = parent_operation_id
def historian_action(self, status, **kwargs):
info = {"id": self.operation_id,
"name": self.name,
"status": status,
"parent_net_key": None,
"parent_operation_id": self.parent_operation_id}
# XXX the name 'parallel_index' is suspicious
optional_attrs = ['parent_net_key',
'peer_operation_id', 'parallel_index']
for attr in optional_attrs:
value = getattr(self, attr, None)
if value is not None:
info[attr] = value
args = {"children_info": [info]}
args.update(kwargs)
return future.FutureAction(cls=WorkflowHistorianUpdateAction, args=args)
|
from flow.petri_net import future
from flow.petri_net.success_failure_net import SuccessFailureNet
# XXX Maybe this turns into a historian mixin?
class GenomeNetBase(SuccessFailureNet):
"""
Basically a success-failure net with operation_id and parent_operation_id and
the ability to construct historian_actions
"""
def __init__(self, name, operation_id, parent_operation_id=None):
SuccessFailureNet.__init__(self, name=name)
self.operation_id = operation_id
self.parent_operation_id = parent_operation_id
def historian_action(self, status, **kwargs):
info = {"id": self.operation_id,
"name": self.name,
"status": status,
"parent_net_key": None,
"parent_operation_id": self.parent_operation_id}
# XXX the name 'parallel_index' is suspicious
optional_attrs = ['parent_net_key',
'peer_operation_id', 'parallel_index']
for attr in optional_attrs:
value = getattr(self, attr, None)
if value is not None:
info[attr] = value
args = {"children_info": [info]}
args.update(kwargs)
return future.FutureAction(cls=WorkflowHistorianUpdateAction, args=args)
|
Add comments and clean-up import of GenomeNetBase
|
Add comments and clean-up import of GenomeNetBase
|
Python
|
agpl-3.0
|
genome/flow-workflow,genome/flow-workflow,genome/flow-workflow
|
from flow.petri_net import future
from flow.petri_net import success_failure_net
# XXX Maybe this turns into a historian mixin?
class GenomeNetBase(success_failure_net.SuccessFailureNet):
def __init__(self, name, operation_id, parent_operation_id=None):
success_failure_net.SuccessFailureNet.__init__(self, name=name)
self.operation_id = operation_id
self.parent_operation_id = parent_operation_id
def historian_action(self, status, **kwargs):
info = {"id": self.operation_id,
"name": self.name,
"status": status,
"parent_net_key": None,
"parent_operation_id": self.parent_operation_id}
# XXX the name 'parallel_index' is suspicious
optional_attrs = ['parent_net_key',
'peer_operation_id', 'parallel_index']
for attr in optional_attrs:
value = getattr(self, attr, None)
if value is not None:
info[attr] = value
args = {"children_info": [info]}
args.update(kwargs)
return future.FutureAction(cls=WorkflowHistorianUpdateAction, args=args)
Add comments and clean-up import of GenomeNetBase
|
from flow.petri_net import future
from flow.petri_net.success_failure_net import SuccessFailureNet
# XXX Maybe this turns into a historian mixin?
class GenomeNetBase(SuccessFailureNet):
"""
Basically a success-failure net with operation_id and parent_operation_id and
the ability to construct historian_actions
"""
def __init__(self, name, operation_id, parent_operation_id=None):
SuccessFailureNet.__init__(self, name=name)
self.operation_id = operation_id
self.parent_operation_id = parent_operation_id
def historian_action(self, status, **kwargs):
info = {"id": self.operation_id,
"name": self.name,
"status": status,
"parent_net_key": None,
"parent_operation_id": self.parent_operation_id}
# XXX the name 'parallel_index' is suspicious
optional_attrs = ['parent_net_key',
'peer_operation_id', 'parallel_index']
for attr in optional_attrs:
value = getattr(self, attr, None)
if value is not None:
info[attr] = value
args = {"children_info": [info]}
args.update(kwargs)
return future.FutureAction(cls=WorkflowHistorianUpdateAction, args=args)
|
<commit_before>from flow.petri_net import future
from flow.petri_net import success_failure_net
# XXX Maybe this turns into a historian mixin?
class GenomeNetBase(success_failure_net.SuccessFailureNet):
def __init__(self, name, operation_id, parent_operation_id=None):
success_failure_net.SuccessFailureNet.__init__(self, name=name)
self.operation_id = operation_id
self.parent_operation_id = parent_operation_id
def historian_action(self, status, **kwargs):
info = {"id": self.operation_id,
"name": self.name,
"status": status,
"parent_net_key": None,
"parent_operation_id": self.parent_operation_id}
# XXX the name 'parallel_index' is suspicious
optional_attrs = ['parent_net_key',
'peer_operation_id', 'parallel_index']
for attr in optional_attrs:
value = getattr(self, attr, None)
if value is not None:
info[attr] = value
args = {"children_info": [info]}
args.update(kwargs)
return future.FutureAction(cls=WorkflowHistorianUpdateAction, args=args)
<commit_msg>Add comments and clean-up import of GenomeNetBase<commit_after>
|
from flow.petri_net import future
from flow.petri_net.success_failure_net import SuccessFailureNet
# XXX Maybe this turns into a historian mixin?
class GenomeNetBase(SuccessFailureNet):
"""
Basically a success-failure net with operation_id and parent_operation_id and
the ability to construct historian_actions
"""
def __init__(self, name, operation_id, parent_operation_id=None):
SuccessFailureNet.__init__(self, name=name)
self.operation_id = operation_id
self.parent_operation_id = parent_operation_id
def historian_action(self, status, **kwargs):
info = {"id": self.operation_id,
"name": self.name,
"status": status,
"parent_net_key": None,
"parent_operation_id": self.parent_operation_id}
# XXX the name 'parallel_index' is suspicious
optional_attrs = ['parent_net_key',
'peer_operation_id', 'parallel_index']
for attr in optional_attrs:
value = getattr(self, attr, None)
if value is not None:
info[attr] = value
args = {"children_info": [info]}
args.update(kwargs)
return future.FutureAction(cls=WorkflowHistorianUpdateAction, args=args)
|
from flow.petri_net import future
from flow.petri_net import success_failure_net
# XXX Maybe this turns into a historian mixin?
class GenomeNetBase(success_failure_net.SuccessFailureNet):
def __init__(self, name, operation_id, parent_operation_id=None):
success_failure_net.SuccessFailureNet.__init__(self, name=name)
self.operation_id = operation_id
self.parent_operation_id = parent_operation_id
def historian_action(self, status, **kwargs):
info = {"id": self.operation_id,
"name": self.name,
"status": status,
"parent_net_key": None,
"parent_operation_id": self.parent_operation_id}
# XXX the name 'parallel_index' is suspicious
optional_attrs = ['parent_net_key',
'peer_operation_id', 'parallel_index']
for attr in optional_attrs:
value = getattr(self, attr, None)
if value is not None:
info[attr] = value
args = {"children_info": [info]}
args.update(kwargs)
return future.FutureAction(cls=WorkflowHistorianUpdateAction, args=args)
Add comments and clean-up import of GenomeNetBasefrom flow.petri_net import future
from flow.petri_net.success_failure_net import SuccessFailureNet
# XXX Maybe this turns into a historian mixin?
class GenomeNetBase(SuccessFailureNet):
"""
Basically a success-failure net with operation_id and parent_operation_id and
the ability to construct historian_actions
"""
def __init__(self, name, operation_id, parent_operation_id=None):
SuccessFailureNet.__init__(self, name=name)
self.operation_id = operation_id
self.parent_operation_id = parent_operation_id
def historian_action(self, status, **kwargs):
info = {"id": self.operation_id,
"name": self.name,
"status": status,
"parent_net_key": None,
"parent_operation_id": self.parent_operation_id}
# XXX the name 'parallel_index' is suspicious
optional_attrs = ['parent_net_key',
'peer_operation_id', 'parallel_index']
for attr in optional_attrs:
value = getattr(self, attr, None)
if value is not None:
info[attr] = value
args = {"children_info": [info]}
args.update(kwargs)
return future.FutureAction(cls=WorkflowHistorianUpdateAction, args=args)
|
<commit_before>from flow.petri_net import future
from flow.petri_net import success_failure_net
# XXX Maybe this turns into a historian mixin?
class GenomeNetBase(success_failure_net.SuccessFailureNet):
def __init__(self, name, operation_id, parent_operation_id=None):
success_failure_net.SuccessFailureNet.__init__(self, name=name)
self.operation_id = operation_id
self.parent_operation_id = parent_operation_id
def historian_action(self, status, **kwargs):
info = {"id": self.operation_id,
"name": self.name,
"status": status,
"parent_net_key": None,
"parent_operation_id": self.parent_operation_id}
# XXX the name 'parallel_index' is suspicious
optional_attrs = ['parent_net_key',
'peer_operation_id', 'parallel_index']
for attr in optional_attrs:
value = getattr(self, attr, None)
if value is not None:
info[attr] = value
args = {"children_info": [info]}
args.update(kwargs)
return future.FutureAction(cls=WorkflowHistorianUpdateAction, args=args)
<commit_msg>Add comments and clean-up import of GenomeNetBase<commit_after>from flow.petri_net import future
from flow.petri_net.success_failure_net import SuccessFailureNet
# XXX Maybe this turns into a historian mixin?
class GenomeNetBase(SuccessFailureNet):
"""
Basically a success-failure net with operation_id and parent_operation_id and
the ability to construct historian_actions
"""
def __init__(self, name, operation_id, parent_operation_id=None):
SuccessFailureNet.__init__(self, name=name)
self.operation_id = operation_id
self.parent_operation_id = parent_operation_id
def historian_action(self, status, **kwargs):
info = {"id": self.operation_id,
"name": self.name,
"status": status,
"parent_net_key": None,
"parent_operation_id": self.parent_operation_id}
# XXX the name 'parallel_index' is suspicious
optional_attrs = ['parent_net_key',
'peer_operation_id', 'parallel_index']
for attr in optional_attrs:
value = getattr(self, attr, None)
if value is not None:
info[attr] = value
args = {"children_info": [info]}
args.update(kwargs)
return future.FutureAction(cls=WorkflowHistorianUpdateAction, args=args)
|
de348d8816151f2674410566f3eaff9d43d9dcde
|
src/markdoc/cli/main.py
|
src/markdoc/cli/main.py
|
# -*- coding: utf-8 -*-
import os
import argparse
from markdoc.cli import commands
from markdoc.cli.parser import parser
from markdoc.config import Config, ConfigNotFound
def main(cmd_args=None):
"""The main entry point for running the Markdoc CLI."""
if cmd_args is not None:
args = parser.parse_args(cmd_args)
else:
args = parser.parse_args()
if args.command != 'init':
try:
args.config = os.path.abspath(args.config)
if os.path.isdir(args.config):
config = Config.for_directory(args.config)
elif os.path.isfile(args.config):
config = Config.for_file(args.config)
else:
raise ConfigNotFound("Couldn't locate Markdoc config.")
except ConfigNotFound, exc:
parser.error(str(exc))
else:
config = None
command = getattr(commands, args.command.replace('-', '_'))
return command(config, args)
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*-
import logging
import os
import argparse
from markdoc.cli import commands
from markdoc.cli.parser import parser
from markdoc.config import Config, ConfigNotFound
def main(cmd_args=None):
"""The main entry point for running the Markdoc CLI."""
if cmd_args is not None:
args = parser.parse_args(cmd_args)
else:
args = parser.parse_args()
if args.command != 'init':
try:
args.config = os.path.abspath(args.config)
if os.path.isdir(args.config):
config = Config.for_directory(args.config)
elif os.path.isfile(args.config):
config = Config.for_file(args.config)
else:
raise ConfigNotFound("Couldn't locate Markdoc config.")
except ConfigNotFound, exc:
parser.error(str(exc))
else:
config = None
if args.quiet:
logging.getLogger('markdoc').setLevel(logging.ERROR)
command = getattr(commands, args.command.replace('-', '_'))
return command(config, args)
if __name__ == '__main__':
main()
|
Use logging levels to suppress non-error output with --quiet on the CLI.
|
Use logging levels to suppress non-error output with --quiet on the CLI.
|
Python
|
unlicense
|
wlonk/markdoc,lrem/phdoc,lrem/phdoc,zacharyvoase/markdoc,snoozbuster/markdoc,wlonk/markdoc,snoozbuster/markdoc
|
# -*- coding: utf-8 -*-
import os
import argparse
from markdoc.cli import commands
from markdoc.cli.parser import parser
from markdoc.config import Config, ConfigNotFound
def main(cmd_args=None):
"""The main entry point for running the Markdoc CLI."""
if cmd_args is not None:
args = parser.parse_args(cmd_args)
else:
args = parser.parse_args()
if args.command != 'init':
try:
args.config = os.path.abspath(args.config)
if os.path.isdir(args.config):
config = Config.for_directory(args.config)
elif os.path.isfile(args.config):
config = Config.for_file(args.config)
else:
raise ConfigNotFound("Couldn't locate Markdoc config.")
except ConfigNotFound, exc:
parser.error(str(exc))
else:
config = None
command = getattr(commands, args.command.replace('-', '_'))
return command(config, args)
if __name__ == '__main__':
main()
Use logging levels to suppress non-error output with --quiet on the CLI.
|
# -*- coding: utf-8 -*-
import logging
import os
import argparse
from markdoc.cli import commands
from markdoc.cli.parser import parser
from markdoc.config import Config, ConfigNotFound
def main(cmd_args=None):
"""The main entry point for running the Markdoc CLI."""
if cmd_args is not None:
args = parser.parse_args(cmd_args)
else:
args = parser.parse_args()
if args.command != 'init':
try:
args.config = os.path.abspath(args.config)
if os.path.isdir(args.config):
config = Config.for_directory(args.config)
elif os.path.isfile(args.config):
config = Config.for_file(args.config)
else:
raise ConfigNotFound("Couldn't locate Markdoc config.")
except ConfigNotFound, exc:
parser.error(str(exc))
else:
config = None
if args.quiet:
logging.getLogger('markdoc').setLevel(logging.ERROR)
command = getattr(commands, args.command.replace('-', '_'))
return command(config, args)
if __name__ == '__main__':
main()
|
<commit_before># -*- coding: utf-8 -*-
import os
import argparse
from markdoc.cli import commands
from markdoc.cli.parser import parser
from markdoc.config import Config, ConfigNotFound
def main(cmd_args=None):
"""The main entry point for running the Markdoc CLI."""
if cmd_args is not None:
args = parser.parse_args(cmd_args)
else:
args = parser.parse_args()
if args.command != 'init':
try:
args.config = os.path.abspath(args.config)
if os.path.isdir(args.config):
config = Config.for_directory(args.config)
elif os.path.isfile(args.config):
config = Config.for_file(args.config)
else:
raise ConfigNotFound("Couldn't locate Markdoc config.")
except ConfigNotFound, exc:
parser.error(str(exc))
else:
config = None
command = getattr(commands, args.command.replace('-', '_'))
return command(config, args)
if __name__ == '__main__':
main()
<commit_msg>Use logging levels to suppress non-error output with --quiet on the CLI.<commit_after>
|
# -*- coding: utf-8 -*-
import logging
import os
import argparse
from markdoc.cli import commands
from markdoc.cli.parser import parser
from markdoc.config import Config, ConfigNotFound
def main(cmd_args=None):
"""The main entry point for running the Markdoc CLI."""
if cmd_args is not None:
args = parser.parse_args(cmd_args)
else:
args = parser.parse_args()
if args.command != 'init':
try:
args.config = os.path.abspath(args.config)
if os.path.isdir(args.config):
config = Config.for_directory(args.config)
elif os.path.isfile(args.config):
config = Config.for_file(args.config)
else:
raise ConfigNotFound("Couldn't locate Markdoc config.")
except ConfigNotFound, exc:
parser.error(str(exc))
else:
config = None
if args.quiet:
logging.getLogger('markdoc').setLevel(logging.ERROR)
command = getattr(commands, args.command.replace('-', '_'))
return command(config, args)
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*-
import os
import argparse
from markdoc.cli import commands
from markdoc.cli.parser import parser
from markdoc.config import Config, ConfigNotFound
def main(cmd_args=None):
"""The main entry point for running the Markdoc CLI."""
if cmd_args is not None:
args = parser.parse_args(cmd_args)
else:
args = parser.parse_args()
if args.command != 'init':
try:
args.config = os.path.abspath(args.config)
if os.path.isdir(args.config):
config = Config.for_directory(args.config)
elif os.path.isfile(args.config):
config = Config.for_file(args.config)
else:
raise ConfigNotFound("Couldn't locate Markdoc config.")
except ConfigNotFound, exc:
parser.error(str(exc))
else:
config = None
command = getattr(commands, args.command.replace('-', '_'))
return command(config, args)
if __name__ == '__main__':
main()
Use logging levels to suppress non-error output with --quiet on the CLI.# -*- coding: utf-8 -*-
import logging
import os
import argparse
from markdoc.cli import commands
from markdoc.cli.parser import parser
from markdoc.config import Config, ConfigNotFound
def main(cmd_args=None):
"""The main entry point for running the Markdoc CLI."""
if cmd_args is not None:
args = parser.parse_args(cmd_args)
else:
args = parser.parse_args()
if args.command != 'init':
try:
args.config = os.path.abspath(args.config)
if os.path.isdir(args.config):
config = Config.for_directory(args.config)
elif os.path.isfile(args.config):
config = Config.for_file(args.config)
else:
raise ConfigNotFound("Couldn't locate Markdoc config.")
except ConfigNotFound, exc:
parser.error(str(exc))
else:
config = None
if args.quiet:
logging.getLogger('markdoc').setLevel(logging.ERROR)
command = getattr(commands, args.command.replace('-', '_'))
return command(config, args)
if __name__ == '__main__':
main()
|
<commit_before># -*- coding: utf-8 -*-
import os
import argparse
from markdoc.cli import commands
from markdoc.cli.parser import parser
from markdoc.config import Config, ConfigNotFound
def main(cmd_args=None):
"""The main entry point for running the Markdoc CLI."""
if cmd_args is not None:
args = parser.parse_args(cmd_args)
else:
args = parser.parse_args()
if args.command != 'init':
try:
args.config = os.path.abspath(args.config)
if os.path.isdir(args.config):
config = Config.for_directory(args.config)
elif os.path.isfile(args.config):
config = Config.for_file(args.config)
else:
raise ConfigNotFound("Couldn't locate Markdoc config.")
except ConfigNotFound, exc:
parser.error(str(exc))
else:
config = None
command = getattr(commands, args.command.replace('-', '_'))
return command(config, args)
if __name__ == '__main__':
main()
<commit_msg>Use logging levels to suppress non-error output with --quiet on the CLI.<commit_after># -*- coding: utf-8 -*-
import logging
import os
import argparse
from markdoc.cli import commands
from markdoc.cli.parser import parser
from markdoc.config import Config, ConfigNotFound
def main(cmd_args=None):
"""The main entry point for running the Markdoc CLI."""
if cmd_args is not None:
args = parser.parse_args(cmd_args)
else:
args = parser.parse_args()
if args.command != 'init':
try:
args.config = os.path.abspath(args.config)
if os.path.isdir(args.config):
config = Config.for_directory(args.config)
elif os.path.isfile(args.config):
config = Config.for_file(args.config)
else:
raise ConfigNotFound("Couldn't locate Markdoc config.")
except ConfigNotFound, exc:
parser.error(str(exc))
else:
config = None
if args.quiet:
logging.getLogger('markdoc').setLevel(logging.ERROR)
command = getattr(commands, args.command.replace('-', '_'))
return command(config, args)
if __name__ == '__main__':
main()
|
a9666ecaa7ed904cb9ded38e41ea381eb08d7d65
|
citrination_client/models/design/target.py
|
citrination_client/models/design/target.py
|
from citrination_client.base.errors import CitrinationClientError
class Target(object):
"""
The optimization target for a design run. Consists of
the name of the output column to optimize and the objective
(either "Max" or "Min")
"""
def __init__(self, name, objective):
"""
Constructor.
:param name: The name of the target output column
:type name: str
:param objective: The optimization objective; "Min", "Max", or a scalar value (such as "5.0")
:type objective: str
"""
try:
self._objective = float(objective)
except ValueError:
if objective.lower() not in ["max", "min"]:
raise CitrinationClientError(
"Target objective must either be \"min\" or \"max\""
)
self._objective = objective
self._name = name
def to_dict(self):
return {
"descriptor": self._name,
"objective": self._objective
}
|
from citrination_client.base.errors import CitrinationClientError
class Target(object):
"""
The optimization target for a design run. Consists of
the name of the output column to optimize and the objective
(either "Max" or "Min", or a scalar value (such as "5.0"))
"""
def __init__(self, name, objective):
"""
Constructor.
:param name: The name of the target output column
:type name: str
:param objective: The optimization objective; "Min", "Max", or a scalar value (such as "5.0")
:type objective: str
"""
try:
self._objective = float(objective)
except ValueError:
if objective.lower() not in ["max", "min"]:
raise CitrinationClientError(
"Target objective must either be \"min\" or \"max\""
)
self._objective = objective
self._name = name
def to_dict(self):
return {
"descriptor": self._name,
"objective": self._objective
}
|
Update outdated design Target docstring
|
Update outdated design Target docstring
|
Python
|
apache-2.0
|
CitrineInformatics/python-citrination-client
|
from citrination_client.base.errors import CitrinationClientError
class Target(object):
"""
The optimization target for a design run. Consists of
the name of the output column to optimize and the objective
(either "Max" or "Min")
"""
def __init__(self, name, objective):
"""
Constructor.
:param name: The name of the target output column
:type name: str
:param objective: The optimization objective; "Min", "Max", or a scalar value (such as "5.0")
:type objective: str
"""
try:
self._objective = float(objective)
except ValueError:
if objective.lower() not in ["max", "min"]:
raise CitrinationClientError(
"Target objective must either be \"min\" or \"max\""
)
self._objective = objective
self._name = name
def to_dict(self):
return {
"descriptor": self._name,
"objective": self._objective
}
Update outdated design Target docstring
|
from citrination_client.base.errors import CitrinationClientError
class Target(object):
"""
The optimization target for a design run. Consists of
the name of the output column to optimize and the objective
(either "Max" or "Min", or a scalar value (such as "5.0"))
"""
def __init__(self, name, objective):
"""
Constructor.
:param name: The name of the target output column
:type name: str
:param objective: The optimization objective; "Min", "Max", or a scalar value (such as "5.0")
:type objective: str
"""
try:
self._objective = float(objective)
except ValueError:
if objective.lower() not in ["max", "min"]:
raise CitrinationClientError(
"Target objective must either be \"min\" or \"max\""
)
self._objective = objective
self._name = name
def to_dict(self):
return {
"descriptor": self._name,
"objective": self._objective
}
|
<commit_before>from citrination_client.base.errors import CitrinationClientError
class Target(object):
"""
The optimization target for a design run. Consists of
the name of the output column to optimize and the objective
(either "Max" or "Min")
"""
def __init__(self, name, objective):
"""
Constructor.
:param name: The name of the target output column
:type name: str
:param objective: The optimization objective; "Min", "Max", or a scalar value (such as "5.0")
:type objective: str
"""
try:
self._objective = float(objective)
except ValueError:
if objective.lower() not in ["max", "min"]:
raise CitrinationClientError(
"Target objective must either be \"min\" or \"max\""
)
self._objective = objective
self._name = name
def to_dict(self):
return {
"descriptor": self._name,
"objective": self._objective
}
<commit_msg>Update outdated design Target docstring<commit_after>
|
from citrination_client.base.errors import CitrinationClientError
class Target(object):
"""
The optimization target for a design run. Consists of
the name of the output column to optimize and the objective
(either "Max" or "Min", or a scalar value (such as "5.0"))
"""
def __init__(self, name, objective):
"""
Constructor.
:param name: The name of the target output column
:type name: str
:param objective: The optimization objective; "Min", "Max", or a scalar value (such as "5.0")
:type objective: str
"""
try:
self._objective = float(objective)
except ValueError:
if objective.lower() not in ["max", "min"]:
raise CitrinationClientError(
"Target objective must either be \"min\" or \"max\""
)
self._objective = objective
self._name = name
def to_dict(self):
return {
"descriptor": self._name,
"objective": self._objective
}
|
from citrination_client.base.errors import CitrinationClientError
class Target(object):
"""
The optimization target for a design run. Consists of
the name of the output column to optimize and the objective
(either "Max" or "Min")
"""
def __init__(self, name, objective):
"""
Constructor.
:param name: The name of the target output column
:type name: str
:param objective: The optimization objective; "Min", "Max", or a scalar value (such as "5.0")
:type objective: str
"""
try:
self._objective = float(objective)
except ValueError:
if objective.lower() not in ["max", "min"]:
raise CitrinationClientError(
"Target objective must either be \"min\" or \"max\""
)
self._objective = objective
self._name = name
def to_dict(self):
return {
"descriptor": self._name,
"objective": self._objective
}
Update outdated design Target docstringfrom citrination_client.base.errors import CitrinationClientError
class Target(object):
"""
The optimization target for a design run. Consists of
the name of the output column to optimize and the objective
(either "Max" or "Min", or a scalar value (such as "5.0"))
"""
def __init__(self, name, objective):
"""
Constructor.
:param name: The name of the target output column
:type name: str
:param objective: The optimization objective; "Min", "Max", or a scalar value (such as "5.0")
:type objective: str
"""
try:
self._objective = float(objective)
except ValueError:
if objective.lower() not in ["max", "min"]:
raise CitrinationClientError(
"Target objective must either be \"min\" or \"max\""
)
self._objective = objective
self._name = name
def to_dict(self):
return {
"descriptor": self._name,
"objective": self._objective
}
|
<commit_before>from citrination_client.base.errors import CitrinationClientError
class Target(object):
"""
The optimization target for a design run. Consists of
the name of the output column to optimize and the objective
(either "Max" or "Min")
"""
def __init__(self, name, objective):
"""
Constructor.
:param name: The name of the target output column
:type name: str
:param objective: The optimization objective; "Min", "Max", or a scalar value (such as "5.0")
:type objective: str
"""
try:
self._objective = float(objective)
except ValueError:
if objective.lower() not in ["max", "min"]:
raise CitrinationClientError(
"Target objective must either be \"min\" or \"max\""
)
self._objective = objective
self._name = name
def to_dict(self):
return {
"descriptor": self._name,
"objective": self._objective
}
<commit_msg>Update outdated design Target docstring<commit_after>from citrination_client.base.errors import CitrinationClientError
class Target(object):
"""
The optimization target for a design run. Consists of
the name of the output column to optimize and the objective
(either "Max" or "Min", or a scalar value (such as "5.0"))
"""
def __init__(self, name, objective):
"""
Constructor.
:param name: The name of the target output column
:type name: str
:param objective: The optimization objective; "Min", "Max", or a scalar value (such as "5.0")
:type objective: str
"""
try:
self._objective = float(objective)
except ValueError:
if objective.lower() not in ["max", "min"]:
raise CitrinationClientError(
"Target objective must either be \"min\" or \"max\""
)
self._objective = objective
self._name = name
def to_dict(self):
return {
"descriptor": self._name,
"objective": self._objective
}
|
dc57eb8fa84f10ffa9ba3f8133563b7de3945034
|
whalelinter/commands/common.py
|
whalelinter/commands/common.py
|
#!/usr/bin/env python3
from whalelinter.app import App
from whalelinter.dispatcher import Dispatcher
from whalelinter.commands.command import Command
from whalelinter.commands.apt import Apt
@Dispatcher.register(token='run', command='cd')
class Cd(Command):
def __init__(self, **kwargs):
App._collecter.throw(2002, self.line)
return False
@Dispatcher.register(token='run', command='rm')
class Rm(Command):
def __init__(self, **kwargs):
if (
'-rf' in kwargs.get('args') or
'-fr' in kwargs.get('args') or
('-r' in kwargs.get('args') and '-f' in kwargs.get('args'))
) and ('/var/lib/apt/lists' in kwargs.get('args')):
if (int(Apt._has_been_used) < int(kwargs.get('lineno'))):
Apt._has_been_used = 0
|
#!/usr/bin/env python3
import re
from whalelinter.app import App
from whalelinter.dispatcher import Dispatcher
from whalelinter.commands.command import Command
from whalelinter.commands.apt import Apt
@Dispatcher.register(token='run', command='cd')
class Cd(Command):
def __init__(self, **kwargs):
App._collecter.throw(2002, self.line)
return False
@Dispatcher.register(token='run', command='rm')
class Rm(Command):
def __init__(self, **kwargs):
rf_flags_regex = re.compile("(-.*[rRf].+-?[rRf]|-[rR]f|-f[rR])")
rf_flags = True if [i for i in kwargs.get('args') if rf_flags_regex.search(i)] else False
cache_path_regex = re.compile("/var/lib/apt/lists(\/\*?)?")
cache_path = True if [i for i in kwargs.get('args') if cache_path_regex.search(i)] else False
if rf_flags and cache_path:
if (int(Apt._has_been_used) < int(kwargs.get('lineno'))):
Apt._has_been_used = 0
|
Enhance flags detection with regex when trying to remove apt cache
|
Enhance flags detection with regex when trying to remove apt cache
|
Python
|
mit
|
jeromepin/whale-linter
|
#!/usr/bin/env python3
from whalelinter.app import App
from whalelinter.dispatcher import Dispatcher
from whalelinter.commands.command import Command
from whalelinter.commands.apt import Apt
@Dispatcher.register(token='run', command='cd')
class Cd(Command):
def __init__(self, **kwargs):
App._collecter.throw(2002, self.line)
return False
@Dispatcher.register(token='run', command='rm')
class Rm(Command):
def __init__(self, **kwargs):
if (
'-rf' in kwargs.get('args') or
'-fr' in kwargs.get('args') or
('-r' in kwargs.get('args') and '-f' in kwargs.get('args'))
) and ('/var/lib/apt/lists' in kwargs.get('args')):
if (int(Apt._has_been_used) < int(kwargs.get('lineno'))):
Apt._has_been_used = 0
Enhance flags detection with regex when trying to remove apt cache
|
#!/usr/bin/env python3
import re
from whalelinter.app import App
from whalelinter.dispatcher import Dispatcher
from whalelinter.commands.command import Command
from whalelinter.commands.apt import Apt
@Dispatcher.register(token='run', command='cd')
class Cd(Command):
def __init__(self, **kwargs):
App._collecter.throw(2002, self.line)
return False
@Dispatcher.register(token='run', command='rm')
class Rm(Command):
def __init__(self, **kwargs):
rf_flags_regex = re.compile("(-.*[rRf].+-?[rRf]|-[rR]f|-f[rR])")
rf_flags = True if [i for i in kwargs.get('args') if rf_flags_regex.search(i)] else False
cache_path_regex = re.compile("/var/lib/apt/lists(\/\*?)?")
cache_path = True if [i for i in kwargs.get('args') if cache_path_regex.search(i)] else False
if rf_flags and cache_path:
if (int(Apt._has_been_used) < int(kwargs.get('lineno'))):
Apt._has_been_used = 0
|
<commit_before>#!/usr/bin/env python3
from whalelinter.app import App
from whalelinter.dispatcher import Dispatcher
from whalelinter.commands.command import Command
from whalelinter.commands.apt import Apt
@Dispatcher.register(token='run', command='cd')
class Cd(Command):
def __init__(self, **kwargs):
App._collecter.throw(2002, self.line)
return False
@Dispatcher.register(token='run', command='rm')
class Rm(Command):
def __init__(self, **kwargs):
if (
'-rf' in kwargs.get('args') or
'-fr' in kwargs.get('args') or
('-r' in kwargs.get('args') and '-f' in kwargs.get('args'))
) and ('/var/lib/apt/lists' in kwargs.get('args')):
if (int(Apt._has_been_used) < int(kwargs.get('lineno'))):
Apt._has_been_used = 0
<commit_msg>Enhance flags detection with regex when trying to remove apt cache<commit_after>
|
#!/usr/bin/env python3
import re
from whalelinter.app import App
from whalelinter.dispatcher import Dispatcher
from whalelinter.commands.command import Command
from whalelinter.commands.apt import Apt
@Dispatcher.register(token='run', command='cd')
class Cd(Command):
def __init__(self, **kwargs):
App._collecter.throw(2002, self.line)
return False
@Dispatcher.register(token='run', command='rm')
class Rm(Command):
def __init__(self, **kwargs):
rf_flags_regex = re.compile("(-.*[rRf].+-?[rRf]|-[rR]f|-f[rR])")
rf_flags = True if [i for i in kwargs.get('args') if rf_flags_regex.search(i)] else False
cache_path_regex = re.compile("/var/lib/apt/lists(\/\*?)?")
cache_path = True if [i for i in kwargs.get('args') if cache_path_regex.search(i)] else False
if rf_flags and cache_path:
if (int(Apt._has_been_used) < int(kwargs.get('lineno'))):
Apt._has_been_used = 0
|
#!/usr/bin/env python3
from whalelinter.app import App
from whalelinter.dispatcher import Dispatcher
from whalelinter.commands.command import Command
from whalelinter.commands.apt import Apt
@Dispatcher.register(token='run', command='cd')
class Cd(Command):
def __init__(self, **kwargs):
App._collecter.throw(2002, self.line)
return False
@Dispatcher.register(token='run', command='rm')
class Rm(Command):
def __init__(self, **kwargs):
if (
'-rf' in kwargs.get('args') or
'-fr' in kwargs.get('args') or
('-r' in kwargs.get('args') and '-f' in kwargs.get('args'))
) and ('/var/lib/apt/lists' in kwargs.get('args')):
if (int(Apt._has_been_used) < int(kwargs.get('lineno'))):
Apt._has_been_used = 0
Enhance flags detection with regex when trying to remove apt cache#!/usr/bin/env python3
import re
from whalelinter.app import App
from whalelinter.dispatcher import Dispatcher
from whalelinter.commands.command import Command
from whalelinter.commands.apt import Apt
@Dispatcher.register(token='run', command='cd')
class Cd(Command):
def __init__(self, **kwargs):
App._collecter.throw(2002, self.line)
return False
@Dispatcher.register(token='run', command='rm')
class Rm(Command):
def __init__(self, **kwargs):
rf_flags_regex = re.compile("(-.*[rRf].+-?[rRf]|-[rR]f|-f[rR])")
rf_flags = True if [i for i in kwargs.get('args') if rf_flags_regex.search(i)] else False
cache_path_regex = re.compile("/var/lib/apt/lists(\/\*?)?")
cache_path = True if [i for i in kwargs.get('args') if cache_path_regex.search(i)] else False
if rf_flags and cache_path:
if (int(Apt._has_been_used) < int(kwargs.get('lineno'))):
Apt._has_been_used = 0
|
<commit_before>#!/usr/bin/env python3
from whalelinter.app import App
from whalelinter.dispatcher import Dispatcher
from whalelinter.commands.command import Command
from whalelinter.commands.apt import Apt
@Dispatcher.register(token='run', command='cd')
class Cd(Command):
def __init__(self, **kwargs):
App._collecter.throw(2002, self.line)
return False
@Dispatcher.register(token='run', command='rm')
class Rm(Command):
def __init__(self, **kwargs):
if (
'-rf' in kwargs.get('args') or
'-fr' in kwargs.get('args') or
('-r' in kwargs.get('args') and '-f' in kwargs.get('args'))
) and ('/var/lib/apt/lists' in kwargs.get('args')):
if (int(Apt._has_been_used) < int(kwargs.get('lineno'))):
Apt._has_been_used = 0
<commit_msg>Enhance flags detection with regex when trying to remove apt cache<commit_after>#!/usr/bin/env python3
import re
from whalelinter.app import App
from whalelinter.dispatcher import Dispatcher
from whalelinter.commands.command import Command
from whalelinter.commands.apt import Apt
@Dispatcher.register(token='run', command='cd')
class Cd(Command):
def __init__(self, **kwargs):
App._collecter.throw(2002, self.line)
return False
@Dispatcher.register(token='run', command='rm')
class Rm(Command):
def __init__(self, **kwargs):
rf_flags_regex = re.compile("(-.*[rRf].+-?[rRf]|-[rR]f|-f[rR])")
rf_flags = True if [i for i in kwargs.get('args') if rf_flags_regex.search(i)] else False
cache_path_regex = re.compile("/var/lib/apt/lists(\/\*?)?")
cache_path = True if [i for i in kwargs.get('args') if cache_path_regex.search(i)] else False
if rf_flags and cache_path:
if (int(Apt._has_been_used) < int(kwargs.get('lineno'))):
Apt._has_been_used = 0
|
bb3ba296038f45c2de6517c1f980843ce2042aa9
|
etcd3/__init__.py
|
etcd3/__init__.py
|
from __future__ import absolute_import
import etcd3.etcdrpc as etcdrpc
from etcd3.client import Etcd3Client
from etcd3.client import Transactions
from etcd3.client import client
from etcd3.leases import Lease
from etcd3.locks import Lock
from etcd3.members import Member
__author__ = 'Louis Taylor'
__email__ = 'louis@kragniz.eu'
__version__ = '0.6.0'
__all__ = (
'Etcd3Client',
'Lease',
'Lock',
'Member',
'Transactions',
'client',
'etcdrpc',
'utils',
)
|
from __future__ import absolute_import
import etcd3.etcdrpc as etcdrpc
from etcd3.client import Etcd3Client
from etcd3.client import Transactions
from etcd3.client import client
from etcd3.leases import Lease
from etcd3.locks import Lock
from etcd3.members import Member
__author__ = 'Louis Taylor'
__email__ = 'louis@kragniz.eu'
__version__ = '0.6.0'
__all__ = (
'Etcd3Client',
'Lease',
'Lock',
'Member',
'Transactions',
'client',
'etcdrpc',
)
|
Remove obsolete 'utils' entry from '__all__
|
Remove obsolete 'utils' entry from '__all__
|
Python
|
apache-2.0
|
kragniz/python-etcd3
|
from __future__ import absolute_import
import etcd3.etcdrpc as etcdrpc
from etcd3.client import Etcd3Client
from etcd3.client import Transactions
from etcd3.client import client
from etcd3.leases import Lease
from etcd3.locks import Lock
from etcd3.members import Member
__author__ = 'Louis Taylor'
__email__ = 'louis@kragniz.eu'
__version__ = '0.6.0'
__all__ = (
'Etcd3Client',
'Lease',
'Lock',
'Member',
'Transactions',
'client',
'etcdrpc',
'utils',
)
Remove obsolete 'utils' entry from '__all__
|
from __future__ import absolute_import
import etcd3.etcdrpc as etcdrpc
from etcd3.client import Etcd3Client
from etcd3.client import Transactions
from etcd3.client import client
from etcd3.leases import Lease
from etcd3.locks import Lock
from etcd3.members import Member
__author__ = 'Louis Taylor'
__email__ = 'louis@kragniz.eu'
__version__ = '0.6.0'
__all__ = (
'Etcd3Client',
'Lease',
'Lock',
'Member',
'Transactions',
'client',
'etcdrpc',
)
|
<commit_before>from __future__ import absolute_import
import etcd3.etcdrpc as etcdrpc
from etcd3.client import Etcd3Client
from etcd3.client import Transactions
from etcd3.client import client
from etcd3.leases import Lease
from etcd3.locks import Lock
from etcd3.members import Member
__author__ = 'Louis Taylor'
__email__ = 'louis@kragniz.eu'
__version__ = '0.6.0'
__all__ = (
'Etcd3Client',
'Lease',
'Lock',
'Member',
'Transactions',
'client',
'etcdrpc',
'utils',
)
<commit_msg>Remove obsolete 'utils' entry from '__all__<commit_after>
|
from __future__ import absolute_import
import etcd3.etcdrpc as etcdrpc
from etcd3.client import Etcd3Client
from etcd3.client import Transactions
from etcd3.client import client
from etcd3.leases import Lease
from etcd3.locks import Lock
from etcd3.members import Member
__author__ = 'Louis Taylor'
__email__ = 'louis@kragniz.eu'
__version__ = '0.6.0'
__all__ = (
'Etcd3Client',
'Lease',
'Lock',
'Member',
'Transactions',
'client',
'etcdrpc',
)
|
from __future__ import absolute_import
import etcd3.etcdrpc as etcdrpc
from etcd3.client import Etcd3Client
from etcd3.client import Transactions
from etcd3.client import client
from etcd3.leases import Lease
from etcd3.locks import Lock
from etcd3.members import Member
__author__ = 'Louis Taylor'
__email__ = 'louis@kragniz.eu'
__version__ = '0.6.0'
__all__ = (
'Etcd3Client',
'Lease',
'Lock',
'Member',
'Transactions',
'client',
'etcdrpc',
'utils',
)
Remove obsolete 'utils' entry from '__all__from __future__ import absolute_import
import etcd3.etcdrpc as etcdrpc
from etcd3.client import Etcd3Client
from etcd3.client import Transactions
from etcd3.client import client
from etcd3.leases import Lease
from etcd3.locks import Lock
from etcd3.members import Member
__author__ = 'Louis Taylor'
__email__ = 'louis@kragniz.eu'
__version__ = '0.6.0'
__all__ = (
'Etcd3Client',
'Lease',
'Lock',
'Member',
'Transactions',
'client',
'etcdrpc',
)
|
<commit_before>from __future__ import absolute_import
import etcd3.etcdrpc as etcdrpc
from etcd3.client import Etcd3Client
from etcd3.client import Transactions
from etcd3.client import client
from etcd3.leases import Lease
from etcd3.locks import Lock
from etcd3.members import Member
__author__ = 'Louis Taylor'
__email__ = 'louis@kragniz.eu'
__version__ = '0.6.0'
__all__ = (
'Etcd3Client',
'Lease',
'Lock',
'Member',
'Transactions',
'client',
'etcdrpc',
'utils',
)
<commit_msg>Remove obsolete 'utils' entry from '__all__<commit_after>from __future__ import absolute_import
import etcd3.etcdrpc as etcdrpc
from etcd3.client import Etcd3Client
from etcd3.client import Transactions
from etcd3.client import client
from etcd3.leases import Lease
from etcd3.locks import Lock
from etcd3.members import Member
__author__ = 'Louis Taylor'
__email__ = 'louis@kragniz.eu'
__version__ = '0.6.0'
__all__ = (
'Etcd3Client',
'Lease',
'Lock',
'Member',
'Transactions',
'client',
'etcdrpc',
)
|
f698dbc8b10aacf6ac8ee2a5d0d63ad01bd73674
|
octopus/image/data.py
|
octopus/image/data.py
|
# System Imports
import StringIO
import urllib
# Twisted Imports
from twisted.python.util import unsignedID
# Package Imports
from ..data.errors import Immutable
class Image (object):
@property
def value (self):
output = StringIO.StringIO()
img = self._image_fn()
img.scale(0.25).getPIL().save(output, format = "PNG")
encoded = "data:image/png;base64," + urllib.quote(output.getvalue().encode('base64'))
return encoded
@property
def type (self):
return "Image"
def serialize (self):
if self.alias is None:
return "[Image]"
else:
return str(self.alias)
def __init__ (self, title, fn):
self.alias = None
self.title = title
self._image_fn = fn
def set (self, value):
raise Immutable
def setLogFile (self, logFile):
pass
def stopLogging (self):
pass
def __str__ (self):
return "Image"
def __repr__ (self):
return "<%s at %s>" % (
self.__class__.__name__,
hex(unsignedID(self))
)
|
# System Imports
import StringIO
import urllib
# Package Imports
from ..data.errors import Immutable
class Image (object):
@property
def value (self):
output = StringIO.StringIO()
img = self._image_fn()
img.scale(0.25).getPIL().save(output, format = "PNG")
encoded = "data:image/png;base64," + urllib.quote(output.getvalue().encode('base64'))
return encoded
@property
def type (self):
return "Image"
def serialize (self):
if self.alias is None:
return "[Image]"
else:
return str(self.alias)
def __init__ (self, title, fn):
self.alias = None
self.title = title
self._image_fn = fn
def set (self, value):
raise Immutable
def setLogFile (self, logFile):
pass
def stopLogging (self):
pass
def __str__ (self):
return "Image"
def __repr__ (self):
return "<%s at %s>" % (
self.__class__.__name__,
hex(id(self))
)
|
Replace another call to unsignedID.
|
Replace another call to unsignedID.
|
Python
|
mit
|
richardingham/octopus,rasata/octopus,rasata/octopus,richardingham/octopus,richardingham/octopus,richardingham/octopus,rasata/octopus
|
# System Imports
import StringIO
import urllib
# Twisted Imports
from twisted.python.util import unsignedID
# Package Imports
from ..data.errors import Immutable
class Image (object):
@property
def value (self):
output = StringIO.StringIO()
img = self._image_fn()
img.scale(0.25).getPIL().save(output, format = "PNG")
encoded = "data:image/png;base64," + urllib.quote(output.getvalue().encode('base64'))
return encoded
@property
def type (self):
return "Image"
def serialize (self):
if self.alias is None:
return "[Image]"
else:
return str(self.alias)
def __init__ (self, title, fn):
self.alias = None
self.title = title
self._image_fn = fn
def set (self, value):
raise Immutable
def setLogFile (self, logFile):
pass
def stopLogging (self):
pass
def __str__ (self):
return "Image"
def __repr__ (self):
return "<%s at %s>" % (
self.__class__.__name__,
hex(unsignedID(self))
)
Replace another call to unsignedID.
|
# System Imports
import StringIO
import urllib
# Package Imports
from ..data.errors import Immutable
class Image (object):
@property
def value (self):
output = StringIO.StringIO()
img = self._image_fn()
img.scale(0.25).getPIL().save(output, format = "PNG")
encoded = "data:image/png;base64," + urllib.quote(output.getvalue().encode('base64'))
return encoded
@property
def type (self):
return "Image"
def serialize (self):
if self.alias is None:
return "[Image]"
else:
return str(self.alias)
def __init__ (self, title, fn):
self.alias = None
self.title = title
self._image_fn = fn
def set (self, value):
raise Immutable
def setLogFile (self, logFile):
pass
def stopLogging (self):
pass
def __str__ (self):
return "Image"
def __repr__ (self):
return "<%s at %s>" % (
self.__class__.__name__,
hex(id(self))
)
|
<commit_before># System Imports
import StringIO
import urllib
# Twisted Imports
from twisted.python.util import unsignedID
# Package Imports
from ..data.errors import Immutable
class Image (object):
@property
def value (self):
output = StringIO.StringIO()
img = self._image_fn()
img.scale(0.25).getPIL().save(output, format = "PNG")
encoded = "data:image/png;base64," + urllib.quote(output.getvalue().encode('base64'))
return encoded
@property
def type (self):
return "Image"
def serialize (self):
if self.alias is None:
return "[Image]"
else:
return str(self.alias)
def __init__ (self, title, fn):
self.alias = None
self.title = title
self._image_fn = fn
def set (self, value):
raise Immutable
def setLogFile (self, logFile):
pass
def stopLogging (self):
pass
def __str__ (self):
return "Image"
def __repr__ (self):
return "<%s at %s>" % (
self.__class__.__name__,
hex(unsignedID(self))
)
<commit_msg>Replace another call to unsignedID.<commit_after>
|
# System Imports
import StringIO
import urllib
# Package Imports
from ..data.errors import Immutable
class Image (object):
@property
def value (self):
output = StringIO.StringIO()
img = self._image_fn()
img.scale(0.25).getPIL().save(output, format = "PNG")
encoded = "data:image/png;base64," + urllib.quote(output.getvalue().encode('base64'))
return encoded
@property
def type (self):
return "Image"
def serialize (self):
if self.alias is None:
return "[Image]"
else:
return str(self.alias)
def __init__ (self, title, fn):
self.alias = None
self.title = title
self._image_fn = fn
def set (self, value):
raise Immutable
def setLogFile (self, logFile):
pass
def stopLogging (self):
pass
def __str__ (self):
return "Image"
def __repr__ (self):
return "<%s at %s>" % (
self.__class__.__name__,
hex(id(self))
)
|
# System Imports
import StringIO
import urllib
# Twisted Imports
from twisted.python.util import unsignedID
# Package Imports
from ..data.errors import Immutable
class Image (object):
@property
def value (self):
output = StringIO.StringIO()
img = self._image_fn()
img.scale(0.25).getPIL().save(output, format = "PNG")
encoded = "data:image/png;base64," + urllib.quote(output.getvalue().encode('base64'))
return encoded
@property
def type (self):
return "Image"
def serialize (self):
if self.alias is None:
return "[Image]"
else:
return str(self.alias)
def __init__ (self, title, fn):
self.alias = None
self.title = title
self._image_fn = fn
def set (self, value):
raise Immutable
def setLogFile (self, logFile):
pass
def stopLogging (self):
pass
def __str__ (self):
return "Image"
def __repr__ (self):
return "<%s at %s>" % (
self.__class__.__name__,
hex(unsignedID(self))
)
Replace another call to unsignedID.# System Imports
import StringIO
import urllib
# Package Imports
from ..data.errors import Immutable
class Image (object):
@property
def value (self):
output = StringIO.StringIO()
img = self._image_fn()
img.scale(0.25).getPIL().save(output, format = "PNG")
encoded = "data:image/png;base64," + urllib.quote(output.getvalue().encode('base64'))
return encoded
@property
def type (self):
return "Image"
def serialize (self):
if self.alias is None:
return "[Image]"
else:
return str(self.alias)
def __init__ (self, title, fn):
self.alias = None
self.title = title
self._image_fn = fn
def set (self, value):
raise Immutable
def setLogFile (self, logFile):
pass
def stopLogging (self):
pass
def __str__ (self):
return "Image"
def __repr__ (self):
return "<%s at %s>" % (
self.__class__.__name__,
hex(id(self))
)
|
<commit_before># System Imports
import StringIO
import urllib
# Twisted Imports
from twisted.python.util import unsignedID
# Package Imports
from ..data.errors import Immutable
class Image (object):
@property
def value (self):
output = StringIO.StringIO()
img = self._image_fn()
img.scale(0.25).getPIL().save(output, format = "PNG")
encoded = "data:image/png;base64," + urllib.quote(output.getvalue().encode('base64'))
return encoded
@property
def type (self):
return "Image"
def serialize (self):
if self.alias is None:
return "[Image]"
else:
return str(self.alias)
def __init__ (self, title, fn):
self.alias = None
self.title = title
self._image_fn = fn
def set (self, value):
raise Immutable
def setLogFile (self, logFile):
pass
def stopLogging (self):
pass
def __str__ (self):
return "Image"
def __repr__ (self):
return "<%s at %s>" % (
self.__class__.__name__,
hex(unsignedID(self))
)
<commit_msg>Replace another call to unsignedID.<commit_after># System Imports
import StringIO
import urllib
# Package Imports
from ..data.errors import Immutable
class Image (object):
@property
def value (self):
output = StringIO.StringIO()
img = self._image_fn()
img.scale(0.25).getPIL().save(output, format = "PNG")
encoded = "data:image/png;base64," + urllib.quote(output.getvalue().encode('base64'))
return encoded
@property
def type (self):
return "Image"
def serialize (self):
if self.alias is None:
return "[Image]"
else:
return str(self.alias)
def __init__ (self, title, fn):
self.alias = None
self.title = title
self._image_fn = fn
def set (self, value):
raise Immutable
def setLogFile (self, logFile):
pass
def stopLogging (self):
pass
def __str__ (self):
return "Image"
def __repr__ (self):
return "<%s at %s>" % (
self.__class__.__name__,
hex(id(self))
)
|
7881e6d06a34eddef5523df88ee601fb5e5d3ba6
|
encryptit/dump_json.py
|
encryptit/dump_json.py
|
import json
from .compat import OrderedDict
from .openpgp_message import OpenPGPMessage
def dump_stream(f, output_stream, indent=4):
message = OpenPGPMessage.from_stream(f)
return json.dump(message, output_stream, indent=indent,
cls=OpenPGPJsonEncoder)
class OpenPGPJsonEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, bytearray):
return self.serialize_bytes(obj)
if getattr(obj, 'serialize', None):
return obj.serialize()
return super(OpenPGPJsonEncoder, self).default(obj)
def encode(self, obj):
# If a builtin type provides a `serialize` method, use that instead of
# the default serialisation, eg. namedtuple
if getattr(obj, 'serialize', None):
obj = obj.serialize()
return super(OpenPGPJsonEncoder, self).encode(obj)
@staticmethod
def serialize_bytes(some_bytes):
return OrderedDict([
('octets', ':'.join(['{0:02x}'.format(byte)
for byte in some_bytes])),
('length', len(some_bytes)),
])
|
import json
from .compat import OrderedDict
from .openpgp_message import OpenPGPMessage
def dump_stream(f, output_stream, indent=4):
message = OpenPGPMessage.from_stream(f)
return json.dump(message, output_stream, indent=indent,
cls=OpenPGPJsonEncoder)
class OpenPGPJsonEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, bytearray):
return self.serialize_bytes(obj)
if getattr(obj, 'serialize', None):
return obj.serialize()
return super(OpenPGPJsonEncoder, self).default(obj)
@staticmethod
def serialize_bytes(some_bytes):
return OrderedDict([
('octets', ':'.join(['{0:02x}'.format(byte)
for byte in some_bytes])),
('length', len(some_bytes)),
])
|
Revert "Fix JSON encoding of `PacketLocation`"
|
Revert "Fix JSON encoding of `PacketLocation`"
This reverts commit 9e91912c6c1764c88890ec47df9372e6ac41612c.
|
Python
|
agpl-3.0
|
paulfurley/encryptit,paulfurley/encryptit
|
import json
from .compat import OrderedDict
from .openpgp_message import OpenPGPMessage
def dump_stream(f, output_stream, indent=4):
message = OpenPGPMessage.from_stream(f)
return json.dump(message, output_stream, indent=indent,
cls=OpenPGPJsonEncoder)
class OpenPGPJsonEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, bytearray):
return self.serialize_bytes(obj)
if getattr(obj, 'serialize', None):
return obj.serialize()
return super(OpenPGPJsonEncoder, self).default(obj)
def encode(self, obj):
# If a builtin type provides a `serialize` method, use that instead of
# the default serialisation, eg. namedtuple
if getattr(obj, 'serialize', None):
obj = obj.serialize()
return super(OpenPGPJsonEncoder, self).encode(obj)
@staticmethod
def serialize_bytes(some_bytes):
return OrderedDict([
('octets', ':'.join(['{0:02x}'.format(byte)
for byte in some_bytes])),
('length', len(some_bytes)),
])
Revert "Fix JSON encoding of `PacketLocation`"
This reverts commit 9e91912c6c1764c88890ec47df9372e6ac41612c.
|
import json
from .compat import OrderedDict
from .openpgp_message import OpenPGPMessage
def dump_stream(f, output_stream, indent=4):
message = OpenPGPMessage.from_stream(f)
return json.dump(message, output_stream, indent=indent,
cls=OpenPGPJsonEncoder)
class OpenPGPJsonEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, bytearray):
return self.serialize_bytes(obj)
if getattr(obj, 'serialize', None):
return obj.serialize()
return super(OpenPGPJsonEncoder, self).default(obj)
@staticmethod
def serialize_bytes(some_bytes):
return OrderedDict([
('octets', ':'.join(['{0:02x}'.format(byte)
for byte in some_bytes])),
('length', len(some_bytes)),
])
|
<commit_before>import json
from .compat import OrderedDict
from .openpgp_message import OpenPGPMessage
def dump_stream(f, output_stream, indent=4):
message = OpenPGPMessage.from_stream(f)
return json.dump(message, output_stream, indent=indent,
cls=OpenPGPJsonEncoder)
class OpenPGPJsonEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, bytearray):
return self.serialize_bytes(obj)
if getattr(obj, 'serialize', None):
return obj.serialize()
return super(OpenPGPJsonEncoder, self).default(obj)
def encode(self, obj):
# If a builtin type provides a `serialize` method, use that instead of
# the default serialisation, eg. namedtuple
if getattr(obj, 'serialize', None):
obj = obj.serialize()
return super(OpenPGPJsonEncoder, self).encode(obj)
@staticmethod
def serialize_bytes(some_bytes):
return OrderedDict([
('octets', ':'.join(['{0:02x}'.format(byte)
for byte in some_bytes])),
('length', len(some_bytes)),
])
<commit_msg>Revert "Fix JSON encoding of `PacketLocation`"
This reverts commit 9e91912c6c1764c88890ec47df9372e6ac41612c.<commit_after>
|
import json
from .compat import OrderedDict
from .openpgp_message import OpenPGPMessage
def dump_stream(f, output_stream, indent=4):
message = OpenPGPMessage.from_stream(f)
return json.dump(message, output_stream, indent=indent,
cls=OpenPGPJsonEncoder)
class OpenPGPJsonEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, bytearray):
return self.serialize_bytes(obj)
if getattr(obj, 'serialize', None):
return obj.serialize()
return super(OpenPGPJsonEncoder, self).default(obj)
@staticmethod
def serialize_bytes(some_bytes):
return OrderedDict([
('octets', ':'.join(['{0:02x}'.format(byte)
for byte in some_bytes])),
('length', len(some_bytes)),
])
|
import json
from .compat import OrderedDict
from .openpgp_message import OpenPGPMessage
def dump_stream(f, output_stream, indent=4):
message = OpenPGPMessage.from_stream(f)
return json.dump(message, output_stream, indent=indent,
cls=OpenPGPJsonEncoder)
class OpenPGPJsonEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, bytearray):
return self.serialize_bytes(obj)
if getattr(obj, 'serialize', None):
return obj.serialize()
return super(OpenPGPJsonEncoder, self).default(obj)
def encode(self, obj):
# If a builtin type provides a `serialize` method, use that instead of
# the default serialisation, eg. namedtuple
if getattr(obj, 'serialize', None):
obj = obj.serialize()
return super(OpenPGPJsonEncoder, self).encode(obj)
@staticmethod
def serialize_bytes(some_bytes):
return OrderedDict([
('octets', ':'.join(['{0:02x}'.format(byte)
for byte in some_bytes])),
('length', len(some_bytes)),
])
Revert "Fix JSON encoding of `PacketLocation`"
This reverts commit 9e91912c6c1764c88890ec47df9372e6ac41612c.import json
from .compat import OrderedDict
from .openpgp_message import OpenPGPMessage
def dump_stream(f, output_stream, indent=4):
message = OpenPGPMessage.from_stream(f)
return json.dump(message, output_stream, indent=indent,
cls=OpenPGPJsonEncoder)
class OpenPGPJsonEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, bytearray):
return self.serialize_bytes(obj)
if getattr(obj, 'serialize', None):
return obj.serialize()
return super(OpenPGPJsonEncoder, self).default(obj)
@staticmethod
def serialize_bytes(some_bytes):
return OrderedDict([
('octets', ':'.join(['{0:02x}'.format(byte)
for byte in some_bytes])),
('length', len(some_bytes)),
])
|
<commit_before>import json
from .compat import OrderedDict
from .openpgp_message import OpenPGPMessage
def dump_stream(f, output_stream, indent=4):
message = OpenPGPMessage.from_stream(f)
return json.dump(message, output_stream, indent=indent,
cls=OpenPGPJsonEncoder)
class OpenPGPJsonEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, bytearray):
return self.serialize_bytes(obj)
if getattr(obj, 'serialize', None):
return obj.serialize()
return super(OpenPGPJsonEncoder, self).default(obj)
def encode(self, obj):
# If a builtin type provides a `serialize` method, use that instead of
# the default serialisation, eg. namedtuple
if getattr(obj, 'serialize', None):
obj = obj.serialize()
return super(OpenPGPJsonEncoder, self).encode(obj)
@staticmethod
def serialize_bytes(some_bytes):
return OrderedDict([
('octets', ':'.join(['{0:02x}'.format(byte)
for byte in some_bytes])),
('length', len(some_bytes)),
])
<commit_msg>Revert "Fix JSON encoding of `PacketLocation`"
This reverts commit 9e91912c6c1764c88890ec47df9372e6ac41612c.<commit_after>import json
from .compat import OrderedDict
from .openpgp_message import OpenPGPMessage
def dump_stream(f, output_stream, indent=4):
message = OpenPGPMessage.from_stream(f)
return json.dump(message, output_stream, indent=indent,
cls=OpenPGPJsonEncoder)
class OpenPGPJsonEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, bytearray):
return self.serialize_bytes(obj)
if getattr(obj, 'serialize', None):
return obj.serialize()
return super(OpenPGPJsonEncoder, self).default(obj)
@staticmethod
def serialize_bytes(some_bytes):
return OrderedDict([
('octets', ':'.join(['{0:02x}'.format(byte)
for byte in some_bytes])),
('length', len(some_bytes)),
])
|
84a0aef34f8ab187de7e0c2b17c2e79d0e8f2330
|
feedback/forms.py
|
feedback/forms.py
|
from django import forms
from feedback.models import Feedback
class FeedbackForm(forms.ModelForm):
class Meta:
model = Feedback
exclude = ("url", "resolved", "publish",)
|
from django import forms
from feedback.models import Feedback
class FeedbackForm(forms.ModelForm):
class Meta:
model = Feedback
exclude = ("user", "url", "resolved", "publish",)
|
Remove the user field from the form to counter-balance making the field editable.
|
Remove the user field from the form to counter-balance making the field editable.
|
Python
|
bsd-3-clause
|
gabrielhurley/django-user-feedback
|
from django import forms
from feedback.models import Feedback
class FeedbackForm(forms.ModelForm):
class Meta:
model = Feedback
exclude = ("url", "resolved", "publish",)Remove the user field from the form to counter-balance making the field editable.
|
from django import forms
from feedback.models import Feedback
class FeedbackForm(forms.ModelForm):
class Meta:
model = Feedback
exclude = ("user", "url", "resolved", "publish",)
|
<commit_before>from django import forms
from feedback.models import Feedback
class FeedbackForm(forms.ModelForm):
class Meta:
model = Feedback
exclude = ("url", "resolved", "publish",)<commit_msg>Remove the user field from the form to counter-balance making the field editable.<commit_after>
|
from django import forms
from feedback.models import Feedback
class FeedbackForm(forms.ModelForm):
class Meta:
model = Feedback
exclude = ("user", "url", "resolved", "publish",)
|
from django import forms
from feedback.models import Feedback
class FeedbackForm(forms.ModelForm):
class Meta:
model = Feedback
exclude = ("url", "resolved", "publish",)Remove the user field from the form to counter-balance making the field editable.from django import forms
from feedback.models import Feedback
class FeedbackForm(forms.ModelForm):
class Meta:
model = Feedback
exclude = ("user", "url", "resolved", "publish",)
|
<commit_before>from django import forms
from feedback.models import Feedback
class FeedbackForm(forms.ModelForm):
class Meta:
model = Feedback
exclude = ("url", "resolved", "publish",)<commit_msg>Remove the user field from the form to counter-balance making the field editable.<commit_after>from django import forms
from feedback.models import Feedback
class FeedbackForm(forms.ModelForm):
class Meta:
model = Feedback
exclude = ("user", "url", "resolved", "publish",)
|
1239128a082757c3a7d53e7b14c189dda06f4171
|
flaws/__init__.py
|
flaws/__init__.py
|
#!/usr/bin/env python
import sys
from funcy import split, map
from .analysis import global_usage, local_usage, FileSet
import sys, ipdb, traceback
def info(type, value, tb):
traceback.print_exception(type, value, tb)
print
ipdb.pm()
sys.excepthook = info
def main():
command = sys.argv[1]
kwargs, args = split(r'^--', sys.argv[2:])
kwargs = dict(map(r'^--(\w+)=(.+)', kwargs))
from .ext import django
django.register(args, kwargs)
files = FileSet(args, base=kwargs.get('base'), ignore=kwargs.get('ignore'))
if command == 'global':
global_usage(files)
elif command == 'local':
local_usage(files)
else:
print 'Unknown command', command
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import sys
from funcy import split, map
from .analysis import global_usage, local_usage, FileSet
def main():
command = sys.argv[1]
kwargs, args = split(r'^--', sys.argv[2:])
kwargs = dict(map(r'^--(\w+)(?:=(.+))?', kwargs))
# Run ipdb on exception
if 'ipdb' in kwargs:
import ipdb, traceback
def info(type, value, tb):
traceback.print_exception(type, value, tb)
print
ipdb.pm()
sys.excepthook = info
# Register plugins
from .ext import django
django.register(args, kwargs)
# Do the job
files = FileSet(args, base=kwargs.get('base'), ignore=kwargs.get('ignore'))
if command == 'global':
global_usage(files)
elif command == 'local':
local_usage(files)
else:
print 'Unknown command', command
if __name__ == '__main__':
main()
|
Make ipdb hook turn on only when --ipdb
|
Make ipdb hook turn on only when --ipdb
|
Python
|
bsd-2-clause
|
Suor/flaws
|
#!/usr/bin/env python
import sys
from funcy import split, map
from .analysis import global_usage, local_usage, FileSet
import sys, ipdb, traceback
def info(type, value, tb):
traceback.print_exception(type, value, tb)
print
ipdb.pm()
sys.excepthook = info
def main():
command = sys.argv[1]
kwargs, args = split(r'^--', sys.argv[2:])
kwargs = dict(map(r'^--(\w+)=(.+)', kwargs))
from .ext import django
django.register(args, kwargs)
files = FileSet(args, base=kwargs.get('base'), ignore=kwargs.get('ignore'))
if command == 'global':
global_usage(files)
elif command == 'local':
local_usage(files)
else:
print 'Unknown command', command
if __name__ == '__main__':
main()
Make ipdb hook turn on only when --ipdb
|
#!/usr/bin/env python
import sys
from funcy import split, map
from .analysis import global_usage, local_usage, FileSet
def main():
command = sys.argv[1]
kwargs, args = split(r'^--', sys.argv[2:])
kwargs = dict(map(r'^--(\w+)(?:=(.+))?', kwargs))
# Run ipdb on exception
if 'ipdb' in kwargs:
import ipdb, traceback
def info(type, value, tb):
traceback.print_exception(type, value, tb)
print
ipdb.pm()
sys.excepthook = info
# Register plugins
from .ext import django
django.register(args, kwargs)
# Do the job
files = FileSet(args, base=kwargs.get('base'), ignore=kwargs.get('ignore'))
if command == 'global':
global_usage(files)
elif command == 'local':
local_usage(files)
else:
print 'Unknown command', command
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
import sys
from funcy import split, map
from .analysis import global_usage, local_usage, FileSet
import sys, ipdb, traceback
def info(type, value, tb):
traceback.print_exception(type, value, tb)
print
ipdb.pm()
sys.excepthook = info
def main():
command = sys.argv[1]
kwargs, args = split(r'^--', sys.argv[2:])
kwargs = dict(map(r'^--(\w+)=(.+)', kwargs))
from .ext import django
django.register(args, kwargs)
files = FileSet(args, base=kwargs.get('base'), ignore=kwargs.get('ignore'))
if command == 'global':
global_usage(files)
elif command == 'local':
local_usage(files)
else:
print 'Unknown command', command
if __name__ == '__main__':
main()
<commit_msg>Make ipdb hook turn on only when --ipdb<commit_after>
|
#!/usr/bin/env python
import sys
from funcy import split, map
from .analysis import global_usage, local_usage, FileSet
def main():
command = sys.argv[1]
kwargs, args = split(r'^--', sys.argv[2:])
kwargs = dict(map(r'^--(\w+)(?:=(.+))?', kwargs))
# Run ipdb on exception
if 'ipdb' in kwargs:
import ipdb, traceback
def info(type, value, tb):
traceback.print_exception(type, value, tb)
print
ipdb.pm()
sys.excepthook = info
# Register plugins
from .ext import django
django.register(args, kwargs)
# Do the job
files = FileSet(args, base=kwargs.get('base'), ignore=kwargs.get('ignore'))
if command == 'global':
global_usage(files)
elif command == 'local':
local_usage(files)
else:
print 'Unknown command', command
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import sys
from funcy import split, map
from .analysis import global_usage, local_usage, FileSet
import sys, ipdb, traceback
def info(type, value, tb):
traceback.print_exception(type, value, tb)
print
ipdb.pm()
sys.excepthook = info
def main():
command = sys.argv[1]
kwargs, args = split(r'^--', sys.argv[2:])
kwargs = dict(map(r'^--(\w+)=(.+)', kwargs))
from .ext import django
django.register(args, kwargs)
files = FileSet(args, base=kwargs.get('base'), ignore=kwargs.get('ignore'))
if command == 'global':
global_usage(files)
elif command == 'local':
local_usage(files)
else:
print 'Unknown command', command
if __name__ == '__main__':
main()
Make ipdb hook turn on only when --ipdb#!/usr/bin/env python
import sys
from funcy import split, map
from .analysis import global_usage, local_usage, FileSet
def main():
command = sys.argv[1]
kwargs, args = split(r'^--', sys.argv[2:])
kwargs = dict(map(r'^--(\w+)(?:=(.+))?', kwargs))
# Run ipdb on exception
if 'ipdb' in kwargs:
import ipdb, traceback
def info(type, value, tb):
traceback.print_exception(type, value, tb)
print
ipdb.pm()
sys.excepthook = info
# Register plugins
from .ext import django
django.register(args, kwargs)
# Do the job
files = FileSet(args, base=kwargs.get('base'), ignore=kwargs.get('ignore'))
if command == 'global':
global_usage(files)
elif command == 'local':
local_usage(files)
else:
print 'Unknown command', command
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
import sys
from funcy import split, map
from .analysis import global_usage, local_usage, FileSet
import sys, ipdb, traceback
def info(type, value, tb):
traceback.print_exception(type, value, tb)
print
ipdb.pm()
sys.excepthook = info
def main():
command = sys.argv[1]
kwargs, args = split(r'^--', sys.argv[2:])
kwargs = dict(map(r'^--(\w+)=(.+)', kwargs))
from .ext import django
django.register(args, kwargs)
files = FileSet(args, base=kwargs.get('base'), ignore=kwargs.get('ignore'))
if command == 'global':
global_usage(files)
elif command == 'local':
local_usage(files)
else:
print 'Unknown command', command
if __name__ == '__main__':
main()
<commit_msg>Make ipdb hook turn on only when --ipdb<commit_after>#!/usr/bin/env python
import sys
from funcy import split, map
from .analysis import global_usage, local_usage, FileSet
def main():
command = sys.argv[1]
kwargs, args = split(r'^--', sys.argv[2:])
kwargs = dict(map(r'^--(\w+)(?:=(.+))?', kwargs))
# Run ipdb on exception
if 'ipdb' in kwargs:
import ipdb, traceback
def info(type, value, tb):
traceback.print_exception(type, value, tb)
print
ipdb.pm()
sys.excepthook = info
# Register plugins
from .ext import django
django.register(args, kwargs)
# Do the job
files = FileSet(args, base=kwargs.get('base'), ignore=kwargs.get('ignore'))
if command == 'global':
global_usage(files)
elif command == 'local':
local_usage(files)
else:
print 'Unknown command', command
if __name__ == '__main__':
main()
|
c64682fe6204b56bd5282c46a7c7168a55b46a86
|
spicedham/__init__.py
|
spicedham/__init__.py
|
from pkg_resources import iter_entry_points
from config import config
plugins = []
for plugin in iter_entry_points(group='spicedham.classifiers', name=None):
pluginClass = plugin.load()
plugins.append(pluginClass())
def train(training_data, is_spam):
for plugin in plugins:
plugin.train(training_data, is_spam)
def classify(classification_data):
average_score = 0
total = 0
for plugin in plugins:
value = plugin.classify(classification_data)
if value != None:
total += 1
average_score += value
return average_score / total
|
from pkg_resources import iter_entry_points
from config import config
plugins = []
for plugin in iter_entry_points(group='spicedham.classifiers', name=None):
pluginClass = plugin.load()
plugins.append(pluginClass())
def train(training_data, is_spam):
for plugin in plugins:
plugin.train(training_data, is_spam)
def classify(classification_data):
average_score = 0
total = 0
for plugin in plugins:
value = plugin.classify(classification_data)
if value != None:
total += 1
average_score += value
if total > 0:
return average_score / total
else:
return 0
|
Allow for the case where no plugin returns a score
|
Allow for the case where no plugin returns a score
|
Python
|
mpl-2.0
|
mozilla/spicedham,mozilla/spicedham
|
from pkg_resources import iter_entry_points
from config import config
plugins = []
for plugin in iter_entry_points(group='spicedham.classifiers', name=None):
pluginClass = plugin.load()
plugins.append(pluginClass())
def train(training_data, is_spam):
for plugin in plugins:
plugin.train(training_data, is_spam)
def classify(classification_data):
average_score = 0
total = 0
for plugin in plugins:
value = plugin.classify(classification_data)
if value != None:
total += 1
average_score += value
return average_score / total
Allow for the case where no plugin returns a score
|
from pkg_resources import iter_entry_points
from config import config
plugins = []
for plugin in iter_entry_points(group='spicedham.classifiers', name=None):
pluginClass = plugin.load()
plugins.append(pluginClass())
def train(training_data, is_spam):
for plugin in plugins:
plugin.train(training_data, is_spam)
def classify(classification_data):
average_score = 0
total = 0
for plugin in plugins:
value = plugin.classify(classification_data)
if value != None:
total += 1
average_score += value
if total > 0:
return average_score / total
else:
return 0
|
<commit_before>from pkg_resources import iter_entry_points
from config import config
plugins = []
for plugin in iter_entry_points(group='spicedham.classifiers', name=None):
pluginClass = plugin.load()
plugins.append(pluginClass())
def train(training_data, is_spam):
for plugin in plugins:
plugin.train(training_data, is_spam)
def classify(classification_data):
average_score = 0
total = 0
for plugin in plugins:
value = plugin.classify(classification_data)
if value != None:
total += 1
average_score += value
return average_score / total
<commit_msg>Allow for the case where no plugin returns a score<commit_after>
|
from pkg_resources import iter_entry_points
from config import config
plugins = []
for plugin in iter_entry_points(group='spicedham.classifiers', name=None):
pluginClass = plugin.load()
plugins.append(pluginClass())
def train(training_data, is_spam):
for plugin in plugins:
plugin.train(training_data, is_spam)
def classify(classification_data):
average_score = 0
total = 0
for plugin in plugins:
value = plugin.classify(classification_data)
if value != None:
total += 1
average_score += value
if total > 0:
return average_score / total
else:
return 0
|
from pkg_resources import iter_entry_points
from config import config
plugins = []
for plugin in iter_entry_points(group='spicedham.classifiers', name=None):
pluginClass = plugin.load()
plugins.append(pluginClass())
def train(training_data, is_spam):
for plugin in plugins:
plugin.train(training_data, is_spam)
def classify(classification_data):
average_score = 0
total = 0
for plugin in plugins:
value = plugin.classify(classification_data)
if value != None:
total += 1
average_score += value
return average_score / total
Allow for the case where no plugin returns a scorefrom pkg_resources import iter_entry_points
from config import config
plugins = []
for plugin in iter_entry_points(group='spicedham.classifiers', name=None):
pluginClass = plugin.load()
plugins.append(pluginClass())
def train(training_data, is_spam):
for plugin in plugins:
plugin.train(training_data, is_spam)
def classify(classification_data):
average_score = 0
total = 0
for plugin in plugins:
value = plugin.classify(classification_data)
if value != None:
total += 1
average_score += value
if total > 0:
return average_score / total
else:
return 0
|
<commit_before>from pkg_resources import iter_entry_points
from config import config
plugins = []
for plugin in iter_entry_points(group='spicedham.classifiers', name=None):
pluginClass = plugin.load()
plugins.append(pluginClass())
def train(training_data, is_spam):
for plugin in plugins:
plugin.train(training_data, is_spam)
def classify(classification_data):
average_score = 0
total = 0
for plugin in plugins:
value = plugin.classify(classification_data)
if value != None:
total += 1
average_score += value
return average_score / total
<commit_msg>Allow for the case where no plugin returns a score<commit_after>from pkg_resources import iter_entry_points
from config import config
plugins = []
for plugin in iter_entry_points(group='spicedham.classifiers', name=None):
pluginClass = plugin.load()
plugins.append(pluginClass())
def train(training_data, is_spam):
for plugin in plugins:
plugin.train(training_data, is_spam)
def classify(classification_data):
average_score = 0
total = 0
for plugin in plugins:
value = plugin.classify(classification_data)
if value != None:
total += 1
average_score += value
if total > 0:
return average_score / total
else:
return 0
|
0a5e2134fda46269626b6fac367a28218734b256
|
conf_site/accounts/tests/__init__.py
|
conf_site/accounts/tests/__init__.py
|
from factory import fuzzy
from django.contrib.auth import get_user_model
from django.test import TestCase
class AccountsTestCase(TestCase):
def setUp(self):
super(AccountsTestCase, self).setUp()
self.password = fuzzy.FuzzyText(length=16)
self.new_password = fuzzy.FuzzyText(length=16)
user_model = get_user_model()
self.user = user_model.objects.get_or_create(
username="test",
email="example@example.com",
first_name="Test",
last_name="User",
)[0]
self.user.set_password(self.password)
self.user.save()
def _become_superuser(self):
"""Make this testcase's user a superuser."""
self.user.is_superuser = True
self.user.save()
|
from factory import fuzzy
from django.contrib.auth import get_user_model
from django.test import TestCase
class AccountsTestCase(TestCase):
def setUp(self):
super(AccountsTestCase, self).setUp()
self.password = fuzzy.FuzzyText(length=16)
self.new_password = fuzzy.FuzzyText(length=16)
user_model = get_user_model()
self.user = user_model.objects.get_or_create(
username="test",
email="example@example.com",
first_name="Test",
last_name="User",
)[0]
self.user.set_password(self.password)
self.user.save()
def _become_staff(self):
"""Make this testcase's user a staff user."""
self.user.is_staff = True
self.user.is_superuser = False
self.user.save()
def _become_superuser(self):
"""Make this testcase's user a superuser."""
self.user.is_superuser = True
self.user.save()
|
Add `_become_staff` method to AccountsTestCase.
|
Add `_become_staff` method to AccountsTestCase.
|
Python
|
mit
|
pydata/conf_site,pydata/conf_site,pydata/conf_site
|
from factory import fuzzy
from django.contrib.auth import get_user_model
from django.test import TestCase
class AccountsTestCase(TestCase):
def setUp(self):
super(AccountsTestCase, self).setUp()
self.password = fuzzy.FuzzyText(length=16)
self.new_password = fuzzy.FuzzyText(length=16)
user_model = get_user_model()
self.user = user_model.objects.get_or_create(
username="test",
email="example@example.com",
first_name="Test",
last_name="User",
)[0]
self.user.set_password(self.password)
self.user.save()
def _become_superuser(self):
"""Make this testcase's user a superuser."""
self.user.is_superuser = True
self.user.save()
Add `_become_staff` method to AccountsTestCase.
|
from factory import fuzzy
from django.contrib.auth import get_user_model
from django.test import TestCase
class AccountsTestCase(TestCase):
def setUp(self):
super(AccountsTestCase, self).setUp()
self.password = fuzzy.FuzzyText(length=16)
self.new_password = fuzzy.FuzzyText(length=16)
user_model = get_user_model()
self.user = user_model.objects.get_or_create(
username="test",
email="example@example.com",
first_name="Test",
last_name="User",
)[0]
self.user.set_password(self.password)
self.user.save()
def _become_staff(self):
"""Make this testcase's user a staff user."""
self.user.is_staff = True
self.user.is_superuser = False
self.user.save()
def _become_superuser(self):
"""Make this testcase's user a superuser."""
self.user.is_superuser = True
self.user.save()
|
<commit_before>from factory import fuzzy
from django.contrib.auth import get_user_model
from django.test import TestCase
class AccountsTestCase(TestCase):
def setUp(self):
super(AccountsTestCase, self).setUp()
self.password = fuzzy.FuzzyText(length=16)
self.new_password = fuzzy.FuzzyText(length=16)
user_model = get_user_model()
self.user = user_model.objects.get_or_create(
username="test",
email="example@example.com",
first_name="Test",
last_name="User",
)[0]
self.user.set_password(self.password)
self.user.save()
def _become_superuser(self):
"""Make this testcase's user a superuser."""
self.user.is_superuser = True
self.user.save()
<commit_msg>Add `_become_staff` method to AccountsTestCase.<commit_after>
|
from factory import fuzzy
from django.contrib.auth import get_user_model
from django.test import TestCase
class AccountsTestCase(TestCase):
def setUp(self):
super(AccountsTestCase, self).setUp()
self.password = fuzzy.FuzzyText(length=16)
self.new_password = fuzzy.FuzzyText(length=16)
user_model = get_user_model()
self.user = user_model.objects.get_or_create(
username="test",
email="example@example.com",
first_name="Test",
last_name="User",
)[0]
self.user.set_password(self.password)
self.user.save()
def _become_staff(self):
"""Make this testcase's user a staff user."""
self.user.is_staff = True
self.user.is_superuser = False
self.user.save()
def _become_superuser(self):
"""Make this testcase's user a superuser."""
self.user.is_superuser = True
self.user.save()
|
from factory import fuzzy
from django.contrib.auth import get_user_model
from django.test import TestCase
class AccountsTestCase(TestCase):
def setUp(self):
super(AccountsTestCase, self).setUp()
self.password = fuzzy.FuzzyText(length=16)
self.new_password = fuzzy.FuzzyText(length=16)
user_model = get_user_model()
self.user = user_model.objects.get_or_create(
username="test",
email="example@example.com",
first_name="Test",
last_name="User",
)[0]
self.user.set_password(self.password)
self.user.save()
def _become_superuser(self):
"""Make this testcase's user a superuser."""
self.user.is_superuser = True
self.user.save()
Add `_become_staff` method to AccountsTestCase.from factory import fuzzy
from django.contrib.auth import get_user_model
from django.test import TestCase
class AccountsTestCase(TestCase):
def setUp(self):
super(AccountsTestCase, self).setUp()
self.password = fuzzy.FuzzyText(length=16)
self.new_password = fuzzy.FuzzyText(length=16)
user_model = get_user_model()
self.user = user_model.objects.get_or_create(
username="test",
email="example@example.com",
first_name="Test",
last_name="User",
)[0]
self.user.set_password(self.password)
self.user.save()
def _become_staff(self):
"""Make this testcase's user a staff user."""
self.user.is_staff = True
self.user.is_superuser = False
self.user.save()
def _become_superuser(self):
"""Make this testcase's user a superuser."""
self.user.is_superuser = True
self.user.save()
|
<commit_before>from factory import fuzzy
from django.contrib.auth import get_user_model
from django.test import TestCase
class AccountsTestCase(TestCase):
def setUp(self):
super(AccountsTestCase, self).setUp()
self.password = fuzzy.FuzzyText(length=16)
self.new_password = fuzzy.FuzzyText(length=16)
user_model = get_user_model()
self.user = user_model.objects.get_or_create(
username="test",
email="example@example.com",
first_name="Test",
last_name="User",
)[0]
self.user.set_password(self.password)
self.user.save()
def _become_superuser(self):
"""Make this testcase's user a superuser."""
self.user.is_superuser = True
self.user.save()
<commit_msg>Add `_become_staff` method to AccountsTestCase.<commit_after>from factory import fuzzy
from django.contrib.auth import get_user_model
from django.test import TestCase
class AccountsTestCase(TestCase):
def setUp(self):
super(AccountsTestCase, self).setUp()
self.password = fuzzy.FuzzyText(length=16)
self.new_password = fuzzy.FuzzyText(length=16)
user_model = get_user_model()
self.user = user_model.objects.get_or_create(
username="test",
email="example@example.com",
first_name="Test",
last_name="User",
)[0]
self.user.set_password(self.password)
self.user.save()
def _become_staff(self):
"""Make this testcase's user a staff user."""
self.user.is_staff = True
self.user.is_superuser = False
self.user.save()
def _become_superuser(self):
"""Make this testcase's user a superuser."""
self.user.is_superuser = True
self.user.save()
|
76e436daef154bdf6acd1b0569f6fa2baa61addd
|
pyxform/tests_v1/test_audit.py
|
pyxform/tests_v1/test_audit.py
|
from pyxform.tests_v1.pyxform_test_case import PyxformTestCase
class AuditTest(PyxformTestCase):
def test_audit(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | audit | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
def test_audit_random_name(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | bobby | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
|
from pyxform.tests_v1.pyxform_test_case import PyxformTestCase
class AuditTest(PyxformTestCase):
def test_audit(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | audit | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
def test_audit_random_name(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | bobby | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
def test_audit_blank_name(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
|
Add test for blank audit name.
|
Add test for blank audit name.
|
Python
|
bsd-2-clause
|
XLSForm/pyxform,XLSForm/pyxform
|
from pyxform.tests_v1.pyxform_test_case import PyxformTestCase
class AuditTest(PyxformTestCase):
def test_audit(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | audit | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
def test_audit_random_name(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | bobby | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)Add test for blank audit name.
|
from pyxform.tests_v1.pyxform_test_case import PyxformTestCase
class AuditTest(PyxformTestCase):
def test_audit(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | audit | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
def test_audit_random_name(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | bobby | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
def test_audit_blank_name(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
|
<commit_before>from pyxform.tests_v1.pyxform_test_case import PyxformTestCase
class AuditTest(PyxformTestCase):
def test_audit(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | audit | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
def test_audit_random_name(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | bobby | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)<commit_msg>Add test for blank audit name.<commit_after>
|
from pyxform.tests_v1.pyxform_test_case import PyxformTestCase
class AuditTest(PyxformTestCase):
def test_audit(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | audit | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
def test_audit_random_name(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | bobby | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
def test_audit_blank_name(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
|
from pyxform.tests_v1.pyxform_test_case import PyxformTestCase
class AuditTest(PyxformTestCase):
def test_audit(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | audit | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
def test_audit_random_name(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | bobby | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)Add test for blank audit name.from pyxform.tests_v1.pyxform_test_case import PyxformTestCase
class AuditTest(PyxformTestCase):
def test_audit(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | audit | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
def test_audit_random_name(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | bobby | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
def test_audit_blank_name(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
|
<commit_before>from pyxform.tests_v1.pyxform_test_case import PyxformTestCase
class AuditTest(PyxformTestCase):
def test_audit(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | audit | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
def test_audit_random_name(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | bobby | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)<commit_msg>Add test for blank audit name.<commit_after>from pyxform.tests_v1.pyxform_test_case import PyxformTestCase
class AuditTest(PyxformTestCase):
def test_audit(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | audit | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
def test_audit_random_name(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | bobby | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
def test_audit_blank_name(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
|
fb25fa04cf553b1084425a1f2af6a9315266ffaf
|
salt/renderers/yaml_jinja.py
|
salt/renderers/yaml_jinja.py
|
'''
The default rendering engine, process yaml with the jinja2 templating engine
This renderer will take a yaml file with the jinja2 template and render it to a
high data format for salt states.
'''
# Import Python Modules
import os
# Import thirt party modules
import yaml
try:
yaml.Loader = yaml.CLoader
yaml.Dumper = yaml.CDumper
except:
pass
# Import Salt libs
from salt.utils.jinja import get_template
def render(template_file, env='', sls=''):
'''
Render the data passing the functions and grains into the rendering system
'''
if not os.path.isfile(template_file):
return {}
passthrough = {}
passthrough['salt'] = __salt__
passthrough['grains'] = __grains__
passthrough['env'] = env
passthrough['sls'] = sls
template = get_template(template_file, __opts__, env)
yaml_data = template.render(**passthrough)
return yaml.safe_load(yaml_data)
|
'''
The default rendering engine, process yaml with the jinja2 templating engine
This renderer will take a yaml file with the jinja2 template and render it to a
high data format for salt states.
'''
# Import Python Modules
import os
# Import thirt party modules
import yaml
try:
yaml.Loader = yaml.CLoader
yaml.Dumper = yaml.CDumper
except:
pass
# Import Salt libs
from salt.utils.jinja import get_template
def render(template_file, env='', sls=''):
'''
Render the data passing the functions and grains into the rendering system
'''
if not os.path.isfile(template_file):
return {}
passthrough = {}
passthrough['salt'] = __salt__
passthrough['grains'] = __grains__
passthrough['pillar'] = __pillar__
passthrough['env'] = env
passthrough['sls'] = sls
template = get_template(template_file, __opts__, env)
yaml_data = template.render(**passthrough)
return yaml.safe_load(yaml_data)
|
Add pillar data to default renderer
|
Add pillar data to default renderer
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
'''
The default rendering engine, process yaml with the jinja2 templating engine
This renderer will take a yaml file with the jinja2 template and render it to a
high data format for salt states.
'''
# Import Python Modules
import os
# Import thirt party modules
import yaml
try:
yaml.Loader = yaml.CLoader
yaml.Dumper = yaml.CDumper
except:
pass
# Import Salt libs
from salt.utils.jinja import get_template
def render(template_file, env='', sls=''):
'''
Render the data passing the functions and grains into the rendering system
'''
if not os.path.isfile(template_file):
return {}
passthrough = {}
passthrough['salt'] = __salt__
passthrough['grains'] = __grains__
passthrough['env'] = env
passthrough['sls'] = sls
template = get_template(template_file, __opts__, env)
yaml_data = template.render(**passthrough)
return yaml.safe_load(yaml_data)
Add pillar data to default renderer
|
'''
The default rendering engine, process yaml with the jinja2 templating engine
This renderer will take a yaml file with the jinja2 template and render it to a
high data format for salt states.
'''
# Import Python Modules
import os
# Import thirt party modules
import yaml
try:
yaml.Loader = yaml.CLoader
yaml.Dumper = yaml.CDumper
except:
pass
# Import Salt libs
from salt.utils.jinja import get_template
def render(template_file, env='', sls=''):
'''
Render the data passing the functions and grains into the rendering system
'''
if not os.path.isfile(template_file):
return {}
passthrough = {}
passthrough['salt'] = __salt__
passthrough['grains'] = __grains__
passthrough['pillar'] = __pillar__
passthrough['env'] = env
passthrough['sls'] = sls
template = get_template(template_file, __opts__, env)
yaml_data = template.render(**passthrough)
return yaml.safe_load(yaml_data)
|
<commit_before>'''
The default rendering engine, process yaml with the jinja2 templating engine
This renderer will take a yaml file with the jinja2 template and render it to a
high data format for salt states.
'''
# Import Python Modules
import os
# Import thirt party modules
import yaml
try:
yaml.Loader = yaml.CLoader
yaml.Dumper = yaml.CDumper
except:
pass
# Import Salt libs
from salt.utils.jinja import get_template
def render(template_file, env='', sls=''):
'''
Render the data passing the functions and grains into the rendering system
'''
if not os.path.isfile(template_file):
return {}
passthrough = {}
passthrough['salt'] = __salt__
passthrough['grains'] = __grains__
passthrough['env'] = env
passthrough['sls'] = sls
template = get_template(template_file, __opts__, env)
yaml_data = template.render(**passthrough)
return yaml.safe_load(yaml_data)
<commit_msg>Add pillar data to default renderer<commit_after>
|
'''
The default rendering engine, process yaml with the jinja2 templating engine
This renderer will take a yaml file with the jinja2 template and render it to a
high data format for salt states.
'''
# Import Python Modules
import os
# Import thirt party modules
import yaml
try:
yaml.Loader = yaml.CLoader
yaml.Dumper = yaml.CDumper
except:
pass
# Import Salt libs
from salt.utils.jinja import get_template
def render(template_file, env='', sls=''):
'''
Render the data passing the functions and grains into the rendering system
'''
if not os.path.isfile(template_file):
return {}
passthrough = {}
passthrough['salt'] = __salt__
passthrough['grains'] = __grains__
passthrough['pillar'] = __pillar__
passthrough['env'] = env
passthrough['sls'] = sls
template = get_template(template_file, __opts__, env)
yaml_data = template.render(**passthrough)
return yaml.safe_load(yaml_data)
|
'''
The default rendering engine, process yaml with the jinja2 templating engine
This renderer will take a yaml file with the jinja2 template and render it to a
high data format for salt states.
'''
# Import Python Modules
import os
# Import thirt party modules
import yaml
try:
yaml.Loader = yaml.CLoader
yaml.Dumper = yaml.CDumper
except:
pass
# Import Salt libs
from salt.utils.jinja import get_template
def render(template_file, env='', sls=''):
'''
Render the data passing the functions and grains into the rendering system
'''
if not os.path.isfile(template_file):
return {}
passthrough = {}
passthrough['salt'] = __salt__
passthrough['grains'] = __grains__
passthrough['env'] = env
passthrough['sls'] = sls
template = get_template(template_file, __opts__, env)
yaml_data = template.render(**passthrough)
return yaml.safe_load(yaml_data)
Add pillar data to default renderer'''
The default rendering engine, process yaml with the jinja2 templating engine
This renderer will take a yaml file with the jinja2 template and render it to a
high data format for salt states.
'''
# Import Python Modules
import os
# Import thirt party modules
import yaml
try:
yaml.Loader = yaml.CLoader
yaml.Dumper = yaml.CDumper
except:
pass
# Import Salt libs
from salt.utils.jinja import get_template
def render(template_file, env='', sls=''):
'''
Render the data passing the functions and grains into the rendering system
'''
if not os.path.isfile(template_file):
return {}
passthrough = {}
passthrough['salt'] = __salt__
passthrough['grains'] = __grains__
passthrough['pillar'] = __pillar__
passthrough['env'] = env
passthrough['sls'] = sls
template = get_template(template_file, __opts__, env)
yaml_data = template.render(**passthrough)
return yaml.safe_load(yaml_data)
|
<commit_before>'''
The default rendering engine, process yaml with the jinja2 templating engine
This renderer will take a yaml file with the jinja2 template and render it to a
high data format for salt states.
'''
# Import Python Modules
import os
# Import thirt party modules
import yaml
try:
yaml.Loader = yaml.CLoader
yaml.Dumper = yaml.CDumper
except:
pass
# Import Salt libs
from salt.utils.jinja import get_template
def render(template_file, env='', sls=''):
'''
Render the data passing the functions and grains into the rendering system
'''
if not os.path.isfile(template_file):
return {}
passthrough = {}
passthrough['salt'] = __salt__
passthrough['grains'] = __grains__
passthrough['env'] = env
passthrough['sls'] = sls
template = get_template(template_file, __opts__, env)
yaml_data = template.render(**passthrough)
return yaml.safe_load(yaml_data)
<commit_msg>Add pillar data to default renderer<commit_after>'''
The default rendering engine, process yaml with the jinja2 templating engine
This renderer will take a yaml file with the jinja2 template and render it to a
high data format for salt states.
'''
# Import Python Modules
import os
# Import thirt party modules
import yaml
try:
yaml.Loader = yaml.CLoader
yaml.Dumper = yaml.CDumper
except:
pass
# Import Salt libs
from salt.utils.jinja import get_template
def render(template_file, env='', sls=''):
'''
Render the data passing the functions and grains into the rendering system
'''
if not os.path.isfile(template_file):
return {}
passthrough = {}
passthrough['salt'] = __salt__
passthrough['grains'] = __grains__
passthrough['pillar'] = __pillar__
passthrough['env'] = env
passthrough['sls'] = sls
template = get_template(template_file, __opts__, env)
yaml_data = template.render(**passthrough)
return yaml.safe_load(yaml_data)
|
d63e792815b9bfe485e4127bdcb088374d8e983e
|
salvus/scripts/first_boot.py
|
salvus/scripts/first_boot.py
|
#!/usr/bin/env python
# This script is run by /etc/rc.local when booting up. It does special configuration
# depending on what images are mounted, etc.
import os
if os.path.exists('/mnt/home/'):
# Compute machine
if not os.path.exists('/mnt/home/aquota.group'):
os.system("quotacheck -cug /mnt/home")
os.system("quotaon -a")
# disable quotas for now, so that students in my class can do Sage development.
os.system('quotaoff -a')
# Restore user accounts
if os.path.exists('/mnt/home/etc/'):
os.system("cp /mnt/home/etc/* /etc/")
else:
os.system("mkdir -p /mnt/home/etc/")
# Setup /tmp so it is on the external disk image (has that quota) and is clean, since this is a fresh boot.
os.system("rm -rf /mnt/home/tmp; mkdir -p /mnt/home/tmp/; chmod +t /mnt/home/tmp; mount -o bind /mnt/home/tmp /tmp; chmod a+rwx /mnt/home/tmp/")
os.system("mkdir -p /mnt/home/scratch; mkdir -p /scratch; chmod +t /mnt/home/tmp; mount -o bind /mnt/home/scratch /scratch; chmod a+rwx /mnt/home/scratch/")
|
#!/usr/bin/env python
# This script is run by /etc/rc.local when booting up. It does special configuration
# depending on what images are mounted, etc.
import os
if os.path.exists('/mnt/home/'):
# Compute machine
if not os.path.exists('/mnt/home/aquota.group'):
os.system("quotacheck -cug /mnt/home")
os.system("quotaon -a")
# disable quotas for now, so that students in my class can do Sage development.
os.system('quotaoff -a')
# Restore user accounts
if os.path.exists('/mnt/home/etc/'):
os.system("cp /mnt/home/etc/* /etc/")
else:
os.system("mkdir -p /mnt/home/etc/")
# Setup /tmp so it is on the external disk image (has that quota) and is clean, since this is a fresh boot.
os.system("rm -rf /mnt/home/tmp; mkdir -p /mnt/home/tmp/; chmod +t /mnt/home/tmp; mount -o bind /mnt/home/tmp /tmp; chmod a+rwx /mnt/home/tmp/")
os.system("mkdir -p /mnt/home/scratch; mkdir -p /scratch; chmod +t /mnt/home/tmp; mount -o bind /mnt/home/scratch /scratch; chmod a+rwx /mnt/home/scratch/")
# Remove .ssh keys on compute node from /mnt/home/salvus account, since this is a security risk (in case compute node is r00ted).
os.system("rm -rf /mnt/home/salvus/.ssh/id_rsa*")
|
Remove .ssh keys on compute node from /mnt/home/salvus account, since this is a security risk (in case compute node is r00ted).
|
Remove .ssh keys on compute node from /mnt/home/salvus account, since this is a security risk (in case compute node is r00ted).
|
Python
|
agpl-3.0
|
tscholl2/smc,sagemathinc/smc,sagemathinc/smc,sagemathinc/smc,DrXyzzy/smc,tscholl2/smc,tscholl2/smc,DrXyzzy/smc,DrXyzzy/smc,DrXyzzy/smc,tscholl2/smc,tscholl2/smc,sagemathinc/smc
|
#!/usr/bin/env python
# This script is run by /etc/rc.local when booting up. It does special configuration
# depending on what images are mounted, etc.
import os
if os.path.exists('/mnt/home/'):
# Compute machine
if not os.path.exists('/mnt/home/aquota.group'):
os.system("quotacheck -cug /mnt/home")
os.system("quotaon -a")
# disable quotas for now, so that students in my class can do Sage development.
os.system('quotaoff -a')
# Restore user accounts
if os.path.exists('/mnt/home/etc/'):
os.system("cp /mnt/home/etc/* /etc/")
else:
os.system("mkdir -p /mnt/home/etc/")
# Setup /tmp so it is on the external disk image (has that quota) and is clean, since this is a fresh boot.
os.system("rm -rf /mnt/home/tmp; mkdir -p /mnt/home/tmp/; chmod +t /mnt/home/tmp; mount -o bind /mnt/home/tmp /tmp; chmod a+rwx /mnt/home/tmp/")
os.system("mkdir -p /mnt/home/scratch; mkdir -p /scratch; chmod +t /mnt/home/tmp; mount -o bind /mnt/home/scratch /scratch; chmod a+rwx /mnt/home/scratch/")
Remove .ssh keys on compute node from /mnt/home/salvus account, since this is a security risk (in case compute node is r00ted).
|
#!/usr/bin/env python
# This script is run by /etc/rc.local when booting up. It does special configuration
# depending on what images are mounted, etc.
import os
if os.path.exists('/mnt/home/'):
# Compute machine
if not os.path.exists('/mnt/home/aquota.group'):
os.system("quotacheck -cug /mnt/home")
os.system("quotaon -a")
# disable quotas for now, so that students in my class can do Sage development.
os.system('quotaoff -a')
# Restore user accounts
if os.path.exists('/mnt/home/etc/'):
os.system("cp /mnt/home/etc/* /etc/")
else:
os.system("mkdir -p /mnt/home/etc/")
# Setup /tmp so it is on the external disk image (has that quota) and is clean, since this is a fresh boot.
os.system("rm -rf /mnt/home/tmp; mkdir -p /mnt/home/tmp/; chmod +t /mnt/home/tmp; mount -o bind /mnt/home/tmp /tmp; chmod a+rwx /mnt/home/tmp/")
os.system("mkdir -p /mnt/home/scratch; mkdir -p /scratch; chmod +t /mnt/home/tmp; mount -o bind /mnt/home/scratch /scratch; chmod a+rwx /mnt/home/scratch/")
# Remove .ssh keys on compute node from /mnt/home/salvus account, since this is a security risk (in case compute node is r00ted).
os.system("rm -rf /mnt/home/salvus/.ssh/id_rsa*")
|
<commit_before>#!/usr/bin/env python
# This script is run by /etc/rc.local when booting up. It does special configuration
# depending on what images are mounted, etc.
import os
if os.path.exists('/mnt/home/'):
# Compute machine
if not os.path.exists('/mnt/home/aquota.group'):
os.system("quotacheck -cug /mnt/home")
os.system("quotaon -a")
# disable quotas for now, so that students in my class can do Sage development.
os.system('quotaoff -a')
# Restore user accounts
if os.path.exists('/mnt/home/etc/'):
os.system("cp /mnt/home/etc/* /etc/")
else:
os.system("mkdir -p /mnt/home/etc/")
# Setup /tmp so it is on the external disk image (has that quota) and is clean, since this is a fresh boot.
os.system("rm -rf /mnt/home/tmp; mkdir -p /mnt/home/tmp/; chmod +t /mnt/home/tmp; mount -o bind /mnt/home/tmp /tmp; chmod a+rwx /mnt/home/tmp/")
os.system("mkdir -p /mnt/home/scratch; mkdir -p /scratch; chmod +t /mnt/home/tmp; mount -o bind /mnt/home/scratch /scratch; chmod a+rwx /mnt/home/scratch/")
<commit_msg>Remove .ssh keys on compute node from /mnt/home/salvus account, since this is a security risk (in case compute node is r00ted).<commit_after>
|
#!/usr/bin/env python
# This script is run by /etc/rc.local when booting up. It does special configuration
# depending on what images are mounted, etc.
import os
if os.path.exists('/mnt/home/'):
# Compute machine
if not os.path.exists('/mnt/home/aquota.group'):
os.system("quotacheck -cug /mnt/home")
os.system("quotaon -a")
# disable quotas for now, so that students in my class can do Sage development.
os.system('quotaoff -a')
# Restore user accounts
if os.path.exists('/mnt/home/etc/'):
os.system("cp /mnt/home/etc/* /etc/")
else:
os.system("mkdir -p /mnt/home/etc/")
# Setup /tmp so it is on the external disk image (has that quota) and is clean, since this is a fresh boot.
os.system("rm -rf /mnt/home/tmp; mkdir -p /mnt/home/tmp/; chmod +t /mnt/home/tmp; mount -o bind /mnt/home/tmp /tmp; chmod a+rwx /mnt/home/tmp/")
os.system("mkdir -p /mnt/home/scratch; mkdir -p /scratch; chmod +t /mnt/home/tmp; mount -o bind /mnt/home/scratch /scratch; chmod a+rwx /mnt/home/scratch/")
# Remove .ssh keys on compute node from /mnt/home/salvus account, since this is a security risk (in case compute node is r00ted).
os.system("rm -rf /mnt/home/salvus/.ssh/id_rsa*")
|
#!/usr/bin/env python
# This script is run by /etc/rc.local when booting up. It does special configuration
# depending on what images are mounted, etc.
import os
if os.path.exists('/mnt/home/'):
# Compute machine
if not os.path.exists('/mnt/home/aquota.group'):
os.system("quotacheck -cug /mnt/home")
os.system("quotaon -a")
# disable quotas for now, so that students in my class can do Sage development.
os.system('quotaoff -a')
# Restore user accounts
if os.path.exists('/mnt/home/etc/'):
os.system("cp /mnt/home/etc/* /etc/")
else:
os.system("mkdir -p /mnt/home/etc/")
# Setup /tmp so it is on the external disk image (has that quota) and is clean, since this is a fresh boot.
os.system("rm -rf /mnt/home/tmp; mkdir -p /mnt/home/tmp/; chmod +t /mnt/home/tmp; mount -o bind /mnt/home/tmp /tmp; chmod a+rwx /mnt/home/tmp/")
os.system("mkdir -p /mnt/home/scratch; mkdir -p /scratch; chmod +t /mnt/home/tmp; mount -o bind /mnt/home/scratch /scratch; chmod a+rwx /mnt/home/scratch/")
Remove .ssh keys on compute node from /mnt/home/salvus account, since this is a security risk (in case compute node is r00ted).#!/usr/bin/env python
# This script is run by /etc/rc.local when booting up. It does special configuration
# depending on what images are mounted, etc.
import os
if os.path.exists('/mnt/home/'):
# Compute machine
if not os.path.exists('/mnt/home/aquota.group'):
os.system("quotacheck -cug /mnt/home")
os.system("quotaon -a")
# disable quotas for now, so that students in my class can do Sage development.
os.system('quotaoff -a')
# Restore user accounts
if os.path.exists('/mnt/home/etc/'):
os.system("cp /mnt/home/etc/* /etc/")
else:
os.system("mkdir -p /mnt/home/etc/")
# Setup /tmp so it is on the external disk image (has that quota) and is clean, since this is a fresh boot.
os.system("rm -rf /mnt/home/tmp; mkdir -p /mnt/home/tmp/; chmod +t /mnt/home/tmp; mount -o bind /mnt/home/tmp /tmp; chmod a+rwx /mnt/home/tmp/")
os.system("mkdir -p /mnt/home/scratch; mkdir -p /scratch; chmod +t /mnt/home/tmp; mount -o bind /mnt/home/scratch /scratch; chmod a+rwx /mnt/home/scratch/")
# Remove .ssh keys on compute node from /mnt/home/salvus account, since this is a security risk (in case compute node is r00ted).
os.system("rm -rf /mnt/home/salvus/.ssh/id_rsa*")
|
<commit_before>#!/usr/bin/env python
# This script is run by /etc/rc.local when booting up. It does special configuration
# depending on what images are mounted, etc.
import os
if os.path.exists('/mnt/home/'):
# Compute machine
if not os.path.exists('/mnt/home/aquota.group'):
os.system("quotacheck -cug /mnt/home")
os.system("quotaon -a")
# disable quotas for now, so that students in my class can do Sage development.
os.system('quotaoff -a')
# Restore user accounts
if os.path.exists('/mnt/home/etc/'):
os.system("cp /mnt/home/etc/* /etc/")
else:
os.system("mkdir -p /mnt/home/etc/")
# Setup /tmp so it is on the external disk image (has that quota) and is clean, since this is a fresh boot.
os.system("rm -rf /mnt/home/tmp; mkdir -p /mnt/home/tmp/; chmod +t /mnt/home/tmp; mount -o bind /mnt/home/tmp /tmp; chmod a+rwx /mnt/home/tmp/")
os.system("mkdir -p /mnt/home/scratch; mkdir -p /scratch; chmod +t /mnt/home/tmp; mount -o bind /mnt/home/scratch /scratch; chmod a+rwx /mnt/home/scratch/")
<commit_msg>Remove .ssh keys on compute node from /mnt/home/salvus account, since this is a security risk (in case compute node is r00ted).<commit_after>#!/usr/bin/env python
# This script is run by /etc/rc.local when booting up. It does special configuration
# depending on what images are mounted, etc.
import os
if os.path.exists('/mnt/home/'):
# Compute machine
if not os.path.exists('/mnt/home/aquota.group'):
os.system("quotacheck -cug /mnt/home")
os.system("quotaon -a")
# disable quotas for now, so that students in my class can do Sage development.
os.system('quotaoff -a')
# Restore user accounts
if os.path.exists('/mnt/home/etc/'):
os.system("cp /mnt/home/etc/* /etc/")
else:
os.system("mkdir -p /mnt/home/etc/")
# Setup /tmp so it is on the external disk image (has that quota) and is clean, since this is a fresh boot.
os.system("rm -rf /mnt/home/tmp; mkdir -p /mnt/home/tmp/; chmod +t /mnt/home/tmp; mount -o bind /mnt/home/tmp /tmp; chmod a+rwx /mnt/home/tmp/")
os.system("mkdir -p /mnt/home/scratch; mkdir -p /scratch; chmod +t /mnt/home/tmp; mount -o bind /mnt/home/scratch /scratch; chmod a+rwx /mnt/home/scratch/")
# Remove .ssh keys on compute node from /mnt/home/salvus account, since this is a security risk (in case compute node is r00ted).
os.system("rm -rf /mnt/home/salvus/.ssh/id_rsa*")
|
e645104656fda22f4c0c2b3d9841ed792b1e7103
|
conftest.py
|
conftest.py
|
import sys
pytest_plugins = 'setuptools.tests.fixtures'
def pytest_addoption(parser):
parser.addoption(
"--package_name", action="append", default=[],
help="list of package_name to pass to test functions",
)
collect_ignore = [
'tests/manual_test.py',
'setuptools/tests/mod_with_constant.py',
'setuptools/_distutils',
'_distutils_hack',
'setuptools/extern',
'pkg_resources/extern',
'pkg_resources/tests/data',
'setuptools/_vendor',
'pkg_resources/_vendor',
]
if sys.version_info < (3, 6):
collect_ignore.append('docs/conf.py') # uses f-strings
collect_ignore.append('pavement.py')
|
import sys
import pytest
pytest_plugins = 'setuptools.tests.fixtures'
def pytest_addoption(parser):
parser.addoption(
"--package_name", action="append", default=[],
help="list of package_name to pass to test functions",
)
parser.addoption(
"--integration", action="store_true", default=False,
help="run integration tests (only)"
)
def pytest_configure(config):
config.addinivalue_line("markers", "integration: indicate integration tests")
if config.option.integration:
# Assume unit tests and flake already run
config.option.flake8 = False
collect_ignore = [
'tests/manual_test.py',
'setuptools/tests/mod_with_constant.py',
'setuptools/_distutils',
'_distutils_hack',
'setuptools/extern',
'pkg_resources/extern',
'pkg_resources/tests/data',
'setuptools/_vendor',
'pkg_resources/_vendor',
]
if sys.version_info < (3, 6):
collect_ignore.append('docs/conf.py') # uses f-strings
collect_ignore.append('pavement.py')
@pytest.fixture(autouse=True)
def _skip_integration(request):
running_integration_tests = request.config.getoption("--integration")
is_integration_test = request.node.get_closest_marker("integration")
if running_integration_tests and not is_integration_test:
pytest.skip("running integration tests only")
if not running_integration_tests and is_integration_test:
pytest.skip("skipping integration tests")
|
Configure pytest to enable/disable integration tests
|
Configure pytest to enable/disable integration tests
|
Python
|
mit
|
pypa/setuptools,pypa/setuptools,pypa/setuptools
|
import sys
pytest_plugins = 'setuptools.tests.fixtures'
def pytest_addoption(parser):
parser.addoption(
"--package_name", action="append", default=[],
help="list of package_name to pass to test functions",
)
collect_ignore = [
'tests/manual_test.py',
'setuptools/tests/mod_with_constant.py',
'setuptools/_distutils',
'_distutils_hack',
'setuptools/extern',
'pkg_resources/extern',
'pkg_resources/tests/data',
'setuptools/_vendor',
'pkg_resources/_vendor',
]
if sys.version_info < (3, 6):
collect_ignore.append('docs/conf.py') # uses f-strings
collect_ignore.append('pavement.py')
Configure pytest to enable/disable integration tests
|
import sys
import pytest
pytest_plugins = 'setuptools.tests.fixtures'
def pytest_addoption(parser):
parser.addoption(
"--package_name", action="append", default=[],
help="list of package_name to pass to test functions",
)
parser.addoption(
"--integration", action="store_true", default=False,
help="run integration tests (only)"
)
def pytest_configure(config):
config.addinivalue_line("markers", "integration: indicate integration tests")
if config.option.integration:
# Assume unit tests and flake already run
config.option.flake8 = False
collect_ignore = [
'tests/manual_test.py',
'setuptools/tests/mod_with_constant.py',
'setuptools/_distutils',
'_distutils_hack',
'setuptools/extern',
'pkg_resources/extern',
'pkg_resources/tests/data',
'setuptools/_vendor',
'pkg_resources/_vendor',
]
if sys.version_info < (3, 6):
collect_ignore.append('docs/conf.py') # uses f-strings
collect_ignore.append('pavement.py')
@pytest.fixture(autouse=True)
def _skip_integration(request):
running_integration_tests = request.config.getoption("--integration")
is_integration_test = request.node.get_closest_marker("integration")
if running_integration_tests and not is_integration_test:
pytest.skip("running integration tests only")
if not running_integration_tests and is_integration_test:
pytest.skip("skipping integration tests")
|
<commit_before>import sys
pytest_plugins = 'setuptools.tests.fixtures'
def pytest_addoption(parser):
parser.addoption(
"--package_name", action="append", default=[],
help="list of package_name to pass to test functions",
)
collect_ignore = [
'tests/manual_test.py',
'setuptools/tests/mod_with_constant.py',
'setuptools/_distutils',
'_distutils_hack',
'setuptools/extern',
'pkg_resources/extern',
'pkg_resources/tests/data',
'setuptools/_vendor',
'pkg_resources/_vendor',
]
if sys.version_info < (3, 6):
collect_ignore.append('docs/conf.py') # uses f-strings
collect_ignore.append('pavement.py')
<commit_msg>Configure pytest to enable/disable integration tests<commit_after>
|
import sys
import pytest
pytest_plugins = 'setuptools.tests.fixtures'
def pytest_addoption(parser):
parser.addoption(
"--package_name", action="append", default=[],
help="list of package_name to pass to test functions",
)
parser.addoption(
"--integration", action="store_true", default=False,
help="run integration tests (only)"
)
def pytest_configure(config):
config.addinivalue_line("markers", "integration: indicate integration tests")
if config.option.integration:
# Assume unit tests and flake already run
config.option.flake8 = False
collect_ignore = [
'tests/manual_test.py',
'setuptools/tests/mod_with_constant.py',
'setuptools/_distutils',
'_distutils_hack',
'setuptools/extern',
'pkg_resources/extern',
'pkg_resources/tests/data',
'setuptools/_vendor',
'pkg_resources/_vendor',
]
if sys.version_info < (3, 6):
collect_ignore.append('docs/conf.py') # uses f-strings
collect_ignore.append('pavement.py')
@pytest.fixture(autouse=True)
def _skip_integration(request):
running_integration_tests = request.config.getoption("--integration")
is_integration_test = request.node.get_closest_marker("integration")
if running_integration_tests and not is_integration_test:
pytest.skip("running integration tests only")
if not running_integration_tests and is_integration_test:
pytest.skip("skipping integration tests")
|
import sys
pytest_plugins = 'setuptools.tests.fixtures'
def pytest_addoption(parser):
parser.addoption(
"--package_name", action="append", default=[],
help="list of package_name to pass to test functions",
)
collect_ignore = [
'tests/manual_test.py',
'setuptools/tests/mod_with_constant.py',
'setuptools/_distutils',
'_distutils_hack',
'setuptools/extern',
'pkg_resources/extern',
'pkg_resources/tests/data',
'setuptools/_vendor',
'pkg_resources/_vendor',
]
if sys.version_info < (3, 6):
collect_ignore.append('docs/conf.py') # uses f-strings
collect_ignore.append('pavement.py')
Configure pytest to enable/disable integration testsimport sys
import pytest
pytest_plugins = 'setuptools.tests.fixtures'
def pytest_addoption(parser):
parser.addoption(
"--package_name", action="append", default=[],
help="list of package_name to pass to test functions",
)
parser.addoption(
"--integration", action="store_true", default=False,
help="run integration tests (only)"
)
def pytest_configure(config):
config.addinivalue_line("markers", "integration: indicate integration tests")
if config.option.integration:
# Assume unit tests and flake already run
config.option.flake8 = False
collect_ignore = [
'tests/manual_test.py',
'setuptools/tests/mod_with_constant.py',
'setuptools/_distutils',
'_distutils_hack',
'setuptools/extern',
'pkg_resources/extern',
'pkg_resources/tests/data',
'setuptools/_vendor',
'pkg_resources/_vendor',
]
if sys.version_info < (3, 6):
collect_ignore.append('docs/conf.py') # uses f-strings
collect_ignore.append('pavement.py')
@pytest.fixture(autouse=True)
def _skip_integration(request):
running_integration_tests = request.config.getoption("--integration")
is_integration_test = request.node.get_closest_marker("integration")
if running_integration_tests and not is_integration_test:
pytest.skip("running integration tests only")
if not running_integration_tests and is_integration_test:
pytest.skip("skipping integration tests")
|
<commit_before>import sys
pytest_plugins = 'setuptools.tests.fixtures'
def pytest_addoption(parser):
parser.addoption(
"--package_name", action="append", default=[],
help="list of package_name to pass to test functions",
)
collect_ignore = [
'tests/manual_test.py',
'setuptools/tests/mod_with_constant.py',
'setuptools/_distutils',
'_distutils_hack',
'setuptools/extern',
'pkg_resources/extern',
'pkg_resources/tests/data',
'setuptools/_vendor',
'pkg_resources/_vendor',
]
if sys.version_info < (3, 6):
collect_ignore.append('docs/conf.py') # uses f-strings
collect_ignore.append('pavement.py')
<commit_msg>Configure pytest to enable/disable integration tests<commit_after>import sys
import pytest
pytest_plugins = 'setuptools.tests.fixtures'
def pytest_addoption(parser):
parser.addoption(
"--package_name", action="append", default=[],
help="list of package_name to pass to test functions",
)
parser.addoption(
"--integration", action="store_true", default=False,
help="run integration tests (only)"
)
def pytest_configure(config):
config.addinivalue_line("markers", "integration: indicate integration tests")
if config.option.integration:
# Assume unit tests and flake already run
config.option.flake8 = False
collect_ignore = [
'tests/manual_test.py',
'setuptools/tests/mod_with_constant.py',
'setuptools/_distutils',
'_distutils_hack',
'setuptools/extern',
'pkg_resources/extern',
'pkg_resources/tests/data',
'setuptools/_vendor',
'pkg_resources/_vendor',
]
if sys.version_info < (3, 6):
collect_ignore.append('docs/conf.py') # uses f-strings
collect_ignore.append('pavement.py')
@pytest.fixture(autouse=True)
def _skip_integration(request):
running_integration_tests = request.config.getoption("--integration")
is_integration_test = request.node.get_closest_marker("integration")
if running_integration_tests and not is_integration_test:
pytest.skip("running integration tests only")
if not running_integration_tests and is_integration_test:
pytest.skip("skipping integration tests")
|
84d9a421b33660f4ad17432fef8604a55b0e2302
|
calvin/runtime/south/plugins/io/sensors/environmental/platform/raspberry_pi/sensehat_impl/environmental.py
|
calvin/runtime/south/plugins/io/sensors/environmental/platform/raspberry_pi/sensehat_impl/environmental.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sense_hat import SenseHat
from calvin.runtime.south.plugins.io.sensors.environmental import base_environmental
class Environmental(base_environmental.EnvironmentalBase):
"""
Raspberry Pi Sense HAT environmental sensors
"""
def __init__(self):
self.sense = SenseHat()
def get_temperature(self):
return self.sense.get_temperature()
def get_humidity(self):
return self.sense.get_humidity()
def get_pressure(self):
return self.sense.get_pressure()
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sense_hat import SenseHat
from calvin.runtime.south.plugins.io.sensors.environmental import base_environmental
class SenseHatResourceHandler(object):
sense_hat = SenseHat()
def __init__(self):
pass
def get_temperature(self):
return self.sense_hat.get_temperature()
def get_humidity(self):
return self.sense_hat.get_humidity()
def get_pressure(self):
return self.sense_hat.get_pressure()
class Environmental(base_environmental.EnvironmentalBase):
"""
Raspberry Pi Sense HAT environmental sensors
"""
def __init__(self):
self.sense = SenseHatResourceHandler()
def get_temperature(self):
return self.sense.get_temperature()
def get_humidity(self):
return self.sense.get_humidity()
def get_pressure(self):
return self.sense.get_pressure()
|
Allow use of sensors from more than one actor concurrenctly
|
Sensehat: Allow use of sensors from more than one actor concurrenctly
|
Python
|
apache-2.0
|
EricssonResearch/calvin-base,EricssonResearch/calvin-base,EricssonResearch/calvin-base,EricssonResearch/calvin-base
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sense_hat import SenseHat
from calvin.runtime.south.plugins.io.sensors.environmental import base_environmental
class Environmental(base_environmental.EnvironmentalBase):
"""
Raspberry Pi Sense HAT environmental sensors
"""
def __init__(self):
self.sense = SenseHat()
def get_temperature(self):
return self.sense.get_temperature()
def get_humidity(self):
return self.sense.get_humidity()
def get_pressure(self):
return self.sense.get_pressure()
Sensehat: Allow use of sensors from more than one actor concurrenctly
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sense_hat import SenseHat
from calvin.runtime.south.plugins.io.sensors.environmental import base_environmental
class SenseHatResourceHandler(object):
sense_hat = SenseHat()
def __init__(self):
pass
def get_temperature(self):
return self.sense_hat.get_temperature()
def get_humidity(self):
return self.sense_hat.get_humidity()
def get_pressure(self):
return self.sense_hat.get_pressure()
class Environmental(base_environmental.EnvironmentalBase):
"""
Raspberry Pi Sense HAT environmental sensors
"""
def __init__(self):
self.sense = SenseHatResourceHandler()
def get_temperature(self):
return self.sense.get_temperature()
def get_humidity(self):
return self.sense.get_humidity()
def get_pressure(self):
return self.sense.get_pressure()
|
<commit_before># -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sense_hat import SenseHat
from calvin.runtime.south.plugins.io.sensors.environmental import base_environmental
class Environmental(base_environmental.EnvironmentalBase):
"""
Raspberry Pi Sense HAT environmental sensors
"""
def __init__(self):
self.sense = SenseHat()
def get_temperature(self):
return self.sense.get_temperature()
def get_humidity(self):
return self.sense.get_humidity()
def get_pressure(self):
return self.sense.get_pressure()
<commit_msg>Sensehat: Allow use of sensors from more than one actor concurrenctly<commit_after>
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sense_hat import SenseHat
from calvin.runtime.south.plugins.io.sensors.environmental import base_environmental
class SenseHatResourceHandler(object):
sense_hat = SenseHat()
def __init__(self):
pass
def get_temperature(self):
return self.sense_hat.get_temperature()
def get_humidity(self):
return self.sense_hat.get_humidity()
def get_pressure(self):
return self.sense_hat.get_pressure()
class Environmental(base_environmental.EnvironmentalBase):
"""
Raspberry Pi Sense HAT environmental sensors
"""
def __init__(self):
self.sense = SenseHatResourceHandler()
def get_temperature(self):
return self.sense.get_temperature()
def get_humidity(self):
return self.sense.get_humidity()
def get_pressure(self):
return self.sense.get_pressure()
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sense_hat import SenseHat
from calvin.runtime.south.plugins.io.sensors.environmental import base_environmental
class Environmental(base_environmental.EnvironmentalBase):
"""
Raspberry Pi Sense HAT environmental sensors
"""
def __init__(self):
self.sense = SenseHat()
def get_temperature(self):
return self.sense.get_temperature()
def get_humidity(self):
return self.sense.get_humidity()
def get_pressure(self):
return self.sense.get_pressure()
Sensehat: Allow use of sensors from more than one actor concurrenctly# -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sense_hat import SenseHat
from calvin.runtime.south.plugins.io.sensors.environmental import base_environmental
class SenseHatResourceHandler(object):
sense_hat = SenseHat()
def __init__(self):
pass
def get_temperature(self):
return self.sense_hat.get_temperature()
def get_humidity(self):
return self.sense_hat.get_humidity()
def get_pressure(self):
return self.sense_hat.get_pressure()
class Environmental(base_environmental.EnvironmentalBase):
"""
Raspberry Pi Sense HAT environmental sensors
"""
def __init__(self):
self.sense = SenseHatResourceHandler()
def get_temperature(self):
return self.sense.get_temperature()
def get_humidity(self):
return self.sense.get_humidity()
def get_pressure(self):
return self.sense.get_pressure()
|
<commit_before># -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sense_hat import SenseHat
from calvin.runtime.south.plugins.io.sensors.environmental import base_environmental
class Environmental(base_environmental.EnvironmentalBase):
"""
Raspberry Pi Sense HAT environmental sensors
"""
def __init__(self):
self.sense = SenseHat()
def get_temperature(self):
return self.sense.get_temperature()
def get_humidity(self):
return self.sense.get_humidity()
def get_pressure(self):
return self.sense.get_pressure()
<commit_msg>Sensehat: Allow use of sensors from more than one actor concurrenctly<commit_after># -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sense_hat import SenseHat
from calvin.runtime.south.plugins.io.sensors.environmental import base_environmental
class SenseHatResourceHandler(object):
sense_hat = SenseHat()
def __init__(self):
pass
def get_temperature(self):
return self.sense_hat.get_temperature()
def get_humidity(self):
return self.sense_hat.get_humidity()
def get_pressure(self):
return self.sense_hat.get_pressure()
class Environmental(base_environmental.EnvironmentalBase):
"""
Raspberry Pi Sense HAT environmental sensors
"""
def __init__(self):
self.sense = SenseHatResourceHandler()
def get_temperature(self):
return self.sense.get_temperature()
def get_humidity(self):
return self.sense.get_humidity()
def get_pressure(self):
return self.sense.get_pressure()
|
fa30c15c6bdaa49d3af2e717f559d279da770b46
|
src/streamlink/plugins/arconai.py
|
src/streamlink/plugins/arconai.py
|
import re
from streamlink.plugin import Plugin
from streamlink.plugin.api import http
from streamlink.plugin.api import useragents
from streamlink.stream import HLSStream
_url_re = re.compile(r'''https?://(www\.)?arconaitv\.me/stream\.php\?id=\d+''')
_playlist_re = re.compile(r'''source\ssrc=["'](?P<url>[^"']+)["']''')
class ArconaiTv(Plugin):
@classmethod
def can_handle_url(cls, url):
return _url_re.match(url)
def _get_streams(self):
headers = {
'User-Agent': useragents.CHROME,
'Referer': self.url
}
res = http.get(self.url, headers=headers)
match = _playlist_re.search(res.text)
if match is None:
return
url = match.group('url')
if url:
self.logger.debug('HLS URL: {0}'.format(url))
yield 'live', HLSStream(self.session, url, headers=headers)
__plugin__ = ArconaiTv
|
import re
from streamlink.plugin import Plugin
from streamlink.plugin.api import http
from streamlink.plugin.api import useragents
from streamlink.stream import HLSStream
_url_re = re.compile(r'''https?://(www\.)?arconaitv\.co/stream\.php\?id=\d+''')
_playlist_re = re.compile(r'''source\ssrc=["'](?P<url>[^"']+)["']''')
class ArconaiTv(Plugin):
@classmethod
def can_handle_url(cls, url):
return _url_re.match(url)
def _get_streams(self):
headers = {
'User-Agent': useragents.CHROME,
'Referer': self.url
}
res = http.get(self.url, headers=headers)
match = _playlist_re.search(res.text)
if match is None:
return
url = match.group('url')
if url:
self.logger.debug('HLS URL: {0}'.format(url))
yield 'live', HLSStream(self.session, url, headers=headers)
__plugin__ = ArconaiTv
|
Update Arconaitv to new url
|
Update Arconaitv to new url
|
Python
|
bsd-2-clause
|
bastimeyer/streamlink,streamlink/streamlink,back-to/streamlink,melmorabity/streamlink,bastimeyer/streamlink,gravyboat/streamlink,melmorabity/streamlink,javiercantero/streamlink,gravyboat/streamlink,javiercantero/streamlink,wlerin/streamlink,wlerin/streamlink,beardypig/streamlink,chhe/streamlink,streamlink/streamlink,beardypig/streamlink,back-to/streamlink,chhe/streamlink
|
import re
from streamlink.plugin import Plugin
from streamlink.plugin.api import http
from streamlink.plugin.api import useragents
from streamlink.stream import HLSStream
_url_re = re.compile(r'''https?://(www\.)?arconaitv\.me/stream\.php\?id=\d+''')
_playlist_re = re.compile(r'''source\ssrc=["'](?P<url>[^"']+)["']''')
class ArconaiTv(Plugin):
@classmethod
def can_handle_url(cls, url):
return _url_re.match(url)
def _get_streams(self):
headers = {
'User-Agent': useragents.CHROME,
'Referer': self.url
}
res = http.get(self.url, headers=headers)
match = _playlist_re.search(res.text)
if match is None:
return
url = match.group('url')
if url:
self.logger.debug('HLS URL: {0}'.format(url))
yield 'live', HLSStream(self.session, url, headers=headers)
__plugin__ = ArconaiTv
Update Arconaitv to new url
|
import re
from streamlink.plugin import Plugin
from streamlink.plugin.api import http
from streamlink.plugin.api import useragents
from streamlink.stream import HLSStream
_url_re = re.compile(r'''https?://(www\.)?arconaitv\.co/stream\.php\?id=\d+''')
_playlist_re = re.compile(r'''source\ssrc=["'](?P<url>[^"']+)["']''')
class ArconaiTv(Plugin):
@classmethod
def can_handle_url(cls, url):
return _url_re.match(url)
def _get_streams(self):
headers = {
'User-Agent': useragents.CHROME,
'Referer': self.url
}
res = http.get(self.url, headers=headers)
match = _playlist_re.search(res.text)
if match is None:
return
url = match.group('url')
if url:
self.logger.debug('HLS URL: {0}'.format(url))
yield 'live', HLSStream(self.session, url, headers=headers)
__plugin__ = ArconaiTv
|
<commit_before>import re
from streamlink.plugin import Plugin
from streamlink.plugin.api import http
from streamlink.plugin.api import useragents
from streamlink.stream import HLSStream
_url_re = re.compile(r'''https?://(www\.)?arconaitv\.me/stream\.php\?id=\d+''')
_playlist_re = re.compile(r'''source\ssrc=["'](?P<url>[^"']+)["']''')
class ArconaiTv(Plugin):
@classmethod
def can_handle_url(cls, url):
return _url_re.match(url)
def _get_streams(self):
headers = {
'User-Agent': useragents.CHROME,
'Referer': self.url
}
res = http.get(self.url, headers=headers)
match = _playlist_re.search(res.text)
if match is None:
return
url = match.group('url')
if url:
self.logger.debug('HLS URL: {0}'.format(url))
yield 'live', HLSStream(self.session, url, headers=headers)
__plugin__ = ArconaiTv
<commit_msg>Update Arconaitv to new url<commit_after>
|
import re
from streamlink.plugin import Plugin
from streamlink.plugin.api import http
from streamlink.plugin.api import useragents
from streamlink.stream import HLSStream
_url_re = re.compile(r'''https?://(www\.)?arconaitv\.co/stream\.php\?id=\d+''')
_playlist_re = re.compile(r'''source\ssrc=["'](?P<url>[^"']+)["']''')
class ArconaiTv(Plugin):
@classmethod
def can_handle_url(cls, url):
return _url_re.match(url)
def _get_streams(self):
headers = {
'User-Agent': useragents.CHROME,
'Referer': self.url
}
res = http.get(self.url, headers=headers)
match = _playlist_re.search(res.text)
if match is None:
return
url = match.group('url')
if url:
self.logger.debug('HLS URL: {0}'.format(url))
yield 'live', HLSStream(self.session, url, headers=headers)
__plugin__ = ArconaiTv
|
import re
from streamlink.plugin import Plugin
from streamlink.plugin.api import http
from streamlink.plugin.api import useragents
from streamlink.stream import HLSStream
_url_re = re.compile(r'''https?://(www\.)?arconaitv\.me/stream\.php\?id=\d+''')
_playlist_re = re.compile(r'''source\ssrc=["'](?P<url>[^"']+)["']''')
class ArconaiTv(Plugin):
@classmethod
def can_handle_url(cls, url):
return _url_re.match(url)
def _get_streams(self):
headers = {
'User-Agent': useragents.CHROME,
'Referer': self.url
}
res = http.get(self.url, headers=headers)
match = _playlist_re.search(res.text)
if match is None:
return
url = match.group('url')
if url:
self.logger.debug('HLS URL: {0}'.format(url))
yield 'live', HLSStream(self.session, url, headers=headers)
__plugin__ = ArconaiTv
Update Arconaitv to new urlimport re
from streamlink.plugin import Plugin
from streamlink.plugin.api import http
from streamlink.plugin.api import useragents
from streamlink.stream import HLSStream
_url_re = re.compile(r'''https?://(www\.)?arconaitv\.co/stream\.php\?id=\d+''')
_playlist_re = re.compile(r'''source\ssrc=["'](?P<url>[^"']+)["']''')
class ArconaiTv(Plugin):
@classmethod
def can_handle_url(cls, url):
return _url_re.match(url)
def _get_streams(self):
headers = {
'User-Agent': useragents.CHROME,
'Referer': self.url
}
res = http.get(self.url, headers=headers)
match = _playlist_re.search(res.text)
if match is None:
return
url = match.group('url')
if url:
self.logger.debug('HLS URL: {0}'.format(url))
yield 'live', HLSStream(self.session, url, headers=headers)
__plugin__ = ArconaiTv
|
<commit_before>import re
from streamlink.plugin import Plugin
from streamlink.plugin.api import http
from streamlink.plugin.api import useragents
from streamlink.stream import HLSStream
_url_re = re.compile(r'''https?://(www\.)?arconaitv\.me/stream\.php\?id=\d+''')
_playlist_re = re.compile(r'''source\ssrc=["'](?P<url>[^"']+)["']''')
class ArconaiTv(Plugin):
@classmethod
def can_handle_url(cls, url):
return _url_re.match(url)
def _get_streams(self):
headers = {
'User-Agent': useragents.CHROME,
'Referer': self.url
}
res = http.get(self.url, headers=headers)
match = _playlist_re.search(res.text)
if match is None:
return
url = match.group('url')
if url:
self.logger.debug('HLS URL: {0}'.format(url))
yield 'live', HLSStream(self.session, url, headers=headers)
__plugin__ = ArconaiTv
<commit_msg>Update Arconaitv to new url<commit_after>import re
from streamlink.plugin import Plugin
from streamlink.plugin.api import http
from streamlink.plugin.api import useragents
from streamlink.stream import HLSStream
_url_re = re.compile(r'''https?://(www\.)?arconaitv\.co/stream\.php\?id=\d+''')
_playlist_re = re.compile(r'''source\ssrc=["'](?P<url>[^"']+)["']''')
class ArconaiTv(Plugin):
@classmethod
def can_handle_url(cls, url):
return _url_re.match(url)
def _get_streams(self):
headers = {
'User-Agent': useragents.CHROME,
'Referer': self.url
}
res = http.get(self.url, headers=headers)
match = _playlist_re.search(res.text)
if match is None:
return
url = match.group('url')
if url:
self.logger.debug('HLS URL: {0}'.format(url))
yield 'live', HLSStream(self.session, url, headers=headers)
__plugin__ = ArconaiTv
|
98aa2b25c63ec5bd6384a9d398a70996799b135e
|
mygpoauth/urls.py
|
mygpoauth/urls.py
|
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic.base import RedirectView
from mygpoauth import oauth2
urlpatterns = [
# Examples:
# url(r'^$', 'mygpoauth.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', RedirectView.as_view(url='http://mygpo-auth.rtfd.org/'),
name='index'),
url(r'^admin/', include(admin.site.urls)),
url(r'^oauth2/', include('mygpoauth.oauth2.urls', namespace='oauth2')),
]
|
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic.base import RedirectView
from mygpoauth import oauth2
urlpatterns = [
# Examples:
# url(r'^$', 'mygpoauth.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', RedirectView.as_view(url='http://mygpo-auth.rtfd.org/',
permanent=False),
name='index'),
url(r'^admin/', include(admin.site.urls)),
url(r'^oauth2/', include('mygpoauth.oauth2.urls', namespace='oauth2')),
]
|
Make "/" a non-permanent redirect
|
Make "/" a non-permanent redirect
|
Python
|
agpl-3.0
|
gpodder/mygpo-auth,gpodder/mygpo-auth
|
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic.base import RedirectView
from mygpoauth import oauth2
urlpatterns = [
# Examples:
# url(r'^$', 'mygpoauth.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', RedirectView.as_view(url='http://mygpo-auth.rtfd.org/'),
name='index'),
url(r'^admin/', include(admin.site.urls)),
url(r'^oauth2/', include('mygpoauth.oauth2.urls', namespace='oauth2')),
]
Make "/" a non-permanent redirect
|
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic.base import RedirectView
from mygpoauth import oauth2
urlpatterns = [
# Examples:
# url(r'^$', 'mygpoauth.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', RedirectView.as_view(url='http://mygpo-auth.rtfd.org/',
permanent=False),
name='index'),
url(r'^admin/', include(admin.site.urls)),
url(r'^oauth2/', include('mygpoauth.oauth2.urls', namespace='oauth2')),
]
|
<commit_before>from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic.base import RedirectView
from mygpoauth import oauth2
urlpatterns = [
# Examples:
# url(r'^$', 'mygpoauth.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', RedirectView.as_view(url='http://mygpo-auth.rtfd.org/'),
name='index'),
url(r'^admin/', include(admin.site.urls)),
url(r'^oauth2/', include('mygpoauth.oauth2.urls', namespace='oauth2')),
]
<commit_msg>Make "/" a non-permanent redirect<commit_after>
|
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic.base import RedirectView
from mygpoauth import oauth2
urlpatterns = [
# Examples:
# url(r'^$', 'mygpoauth.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', RedirectView.as_view(url='http://mygpo-auth.rtfd.org/',
permanent=False),
name='index'),
url(r'^admin/', include(admin.site.urls)),
url(r'^oauth2/', include('mygpoauth.oauth2.urls', namespace='oauth2')),
]
|
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic.base import RedirectView
from mygpoauth import oauth2
urlpatterns = [
# Examples:
# url(r'^$', 'mygpoauth.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', RedirectView.as_view(url='http://mygpo-auth.rtfd.org/'),
name='index'),
url(r'^admin/', include(admin.site.urls)),
url(r'^oauth2/', include('mygpoauth.oauth2.urls', namespace='oauth2')),
]
Make "/" a non-permanent redirectfrom django.conf.urls import include, url
from django.contrib import admin
from django.views.generic.base import RedirectView
from mygpoauth import oauth2
urlpatterns = [
# Examples:
# url(r'^$', 'mygpoauth.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', RedirectView.as_view(url='http://mygpo-auth.rtfd.org/',
permanent=False),
name='index'),
url(r'^admin/', include(admin.site.urls)),
url(r'^oauth2/', include('mygpoauth.oauth2.urls', namespace='oauth2')),
]
|
<commit_before>from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic.base import RedirectView
from mygpoauth import oauth2
urlpatterns = [
# Examples:
# url(r'^$', 'mygpoauth.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', RedirectView.as_view(url='http://mygpo-auth.rtfd.org/'),
name='index'),
url(r'^admin/', include(admin.site.urls)),
url(r'^oauth2/', include('mygpoauth.oauth2.urls', namespace='oauth2')),
]
<commit_msg>Make "/" a non-permanent redirect<commit_after>from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic.base import RedirectView
from mygpoauth import oauth2
urlpatterns = [
# Examples:
# url(r'^$', 'mygpoauth.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', RedirectView.as_view(url='http://mygpo-auth.rtfd.org/',
permanent=False),
name='index'),
url(r'^admin/', include(admin.site.urls)),
url(r'^oauth2/', include('mygpoauth.oauth2.urls', namespace='oauth2')),
]
|
b706c1a949b10a7dd4b3206c02de8d4abda088a9
|
pytac/mini_project.py
|
pytac/mini_project.py
|
import pytac.load_csv
import pytac.epics
def main():
lattice = pytac.load_csv.load('VMX', pytac.epics.EpicsControlSystem())
bpms = lattice.get_elements('BPM')
bpms_n = 0
try:
for bpm in bpms:
bpm.get_pv_name('y')
bpms_n += 1
print 'There exist {0} BPMy elements in the ring.'.format(bpms_n)
except:
print 'Warning! There exists a bpm with no y field.'
if __name__=='__main__':
main()
|
import pytac.load_csv
import pytac.epics
def main():
# First task: print the number of bpm y elements in the ring.
lattice = pytac.load_csv.load('VMX', pytac.epics.EpicsControlSystem())
bpms = lattice.get_elements('BPM')
bpms_n = 0
try:
for bpm in bpms:
bpm.get_pv_name('y')
bpms_n += 1
print 'There exist {0} BPMy elements in the ring.'.format(bpms_n)
except:
print 'Warning! There exists a bpm with no y field.'
# Second task: Print each bpmx pv along with the associated value.
for bpm in bpms:
print bpm.get_pv_name('x', 'readback')
print bpm.get_pv_value('x', 'readback')
if __name__=='__main__':
main()
|
Print each bpmx pv name along with the associated value to stdout
|
Print each bpmx pv name along with the associated value to stdout
|
Python
|
apache-2.0
|
razvanvasile/Work-Mini-Projects,razvanvasile/Work-Mini-Projects,razvanvasile/Work-Mini-Projects
|
import pytac.load_csv
import pytac.epics
def main():
lattice = pytac.load_csv.load('VMX', pytac.epics.EpicsControlSystem())
bpms = lattice.get_elements('BPM')
bpms_n = 0
try:
for bpm in bpms:
bpm.get_pv_name('y')
bpms_n += 1
print 'There exist {0} BPMy elements in the ring.'.format(bpms_n)
except:
print 'Warning! There exists a bpm with no y field.'
if __name__=='__main__':
main()
Print each bpmx pv name along with the associated value to stdout
|
import pytac.load_csv
import pytac.epics
def main():
# First task: print the number of bpm y elements in the ring.
lattice = pytac.load_csv.load('VMX', pytac.epics.EpicsControlSystem())
bpms = lattice.get_elements('BPM')
bpms_n = 0
try:
for bpm in bpms:
bpm.get_pv_name('y')
bpms_n += 1
print 'There exist {0} BPMy elements in the ring.'.format(bpms_n)
except:
print 'Warning! There exists a bpm with no y field.'
# Second task: Print each bpmx pv along with the associated value.
for bpm in bpms:
print bpm.get_pv_name('x', 'readback')
print bpm.get_pv_value('x', 'readback')
if __name__=='__main__':
main()
|
<commit_before>import pytac.load_csv
import pytac.epics
def main():
lattice = pytac.load_csv.load('VMX', pytac.epics.EpicsControlSystem())
bpms = lattice.get_elements('BPM')
bpms_n = 0
try:
for bpm in bpms:
bpm.get_pv_name('y')
bpms_n += 1
print 'There exist {0} BPMy elements in the ring.'.format(bpms_n)
except:
print 'Warning! There exists a bpm with no y field.'
if __name__=='__main__':
main()
<commit_msg>Print each bpmx pv name along with the associated value to stdout<commit_after>
|
import pytac.load_csv
import pytac.epics
def main():
# First task: print the number of bpm y elements in the ring.
lattice = pytac.load_csv.load('VMX', pytac.epics.EpicsControlSystem())
bpms = lattice.get_elements('BPM')
bpms_n = 0
try:
for bpm in bpms:
bpm.get_pv_name('y')
bpms_n += 1
print 'There exist {0} BPMy elements in the ring.'.format(bpms_n)
except:
print 'Warning! There exists a bpm with no y field.'
# Second task: Print each bpmx pv along with the associated value.
for bpm in bpms:
print bpm.get_pv_name('x', 'readback')
print bpm.get_pv_value('x', 'readback')
if __name__=='__main__':
main()
|
import pytac.load_csv
import pytac.epics
def main():
lattice = pytac.load_csv.load('VMX', pytac.epics.EpicsControlSystem())
bpms = lattice.get_elements('BPM')
bpms_n = 0
try:
for bpm in bpms:
bpm.get_pv_name('y')
bpms_n += 1
print 'There exist {0} BPMy elements in the ring.'.format(bpms_n)
except:
print 'Warning! There exists a bpm with no y field.'
if __name__=='__main__':
main()
Print each bpmx pv name along with the associated value to stdoutimport pytac.load_csv
import pytac.epics
def main():
# First task: print the number of bpm y elements in the ring.
lattice = pytac.load_csv.load('VMX', pytac.epics.EpicsControlSystem())
bpms = lattice.get_elements('BPM')
bpms_n = 0
try:
for bpm in bpms:
bpm.get_pv_name('y')
bpms_n += 1
print 'There exist {0} BPMy elements in the ring.'.format(bpms_n)
except:
print 'Warning! There exists a bpm with no y field.'
# Second task: Print each bpmx pv along with the associated value.
for bpm in bpms:
print bpm.get_pv_name('x', 'readback')
print bpm.get_pv_value('x', 'readback')
if __name__=='__main__':
main()
|
<commit_before>import pytac.load_csv
import pytac.epics
def main():
lattice = pytac.load_csv.load('VMX', pytac.epics.EpicsControlSystem())
bpms = lattice.get_elements('BPM')
bpms_n = 0
try:
for bpm in bpms:
bpm.get_pv_name('y')
bpms_n += 1
print 'There exist {0} BPMy elements in the ring.'.format(bpms_n)
except:
print 'Warning! There exists a bpm with no y field.'
if __name__=='__main__':
main()
<commit_msg>Print each bpmx pv name along with the associated value to stdout<commit_after>import pytac.load_csv
import pytac.epics
def main():
# First task: print the number of bpm y elements in the ring.
lattice = pytac.load_csv.load('VMX', pytac.epics.EpicsControlSystem())
bpms = lattice.get_elements('BPM')
bpms_n = 0
try:
for bpm in bpms:
bpm.get_pv_name('y')
bpms_n += 1
print 'There exist {0} BPMy elements in the ring.'.format(bpms_n)
except:
print 'Warning! There exists a bpm with no y field.'
# Second task: Print each bpmx pv along with the associated value.
for bpm in bpms:
print bpm.get_pv_name('x', 'readback')
print bpm.get_pv_value('x', 'readback')
if __name__=='__main__':
main()
|
0b8b32a044e92f4e996af734d44a2d93d1492684
|
project_code/bulk_fitting.py
|
project_code/bulk_fitting.py
|
'''
Bulk spectral line fitting with SDSS galaxy spectra
'''
import os
from astropy.io import fits
from pandas import concat
# Bring in the package funcs
from specfit import do_specfit
from download_spectra import download_spectra
def bulk_fit(obs_file, output_file, keep_spectra=False):
'''
Downloads files based off of the entries in the given file, performs
spectral line fitting and saves the results to a FITS table.
'''
# Open the file
data_file = fits.open(obs_file)
spectra_data = data_file[1].data
del data_file
num_spectra = spectra_data['Z'].shape[0]
for i in range(num_spectra):
spec_info = spectra_data[i]
# Download the spectrum
spec_name = \
download_spectra(spec_info['PLATEID'], spec_info['FIBREID'],
spec_info['MJD'], spec_info['SURVEY'])
spec_df = do_specfit(spec_name, verbose=False)
if i == 0:
df = spec_df
else:
df = concat([df, spec_df])
if not keep_spectra:
os.system('rm ' + spec_name)
df.write(output_file)
return
|
'''
Bulk spectral line fitting with SDSS galaxy spectra
'''
import os
from astropy.io import fits
from pandas import DataFrame
# Bring in the package funcs
from specfit import do_specfit
from download_spectra import download_spectra
def bulk_fit(obs_file, output_file, keep_spectra=False):
'''
Downloads files based off of the entries in the given file, performs
spectral line fitting and saves the results to a FITS table.
'''
# Open the file
data_file = fits.open(obs_file)
spectra_data = data_file[1].data
del data_file
num_spectra = spectra_data['Z'].shape[0]
for i in range(num_spectra):
spec_info = spectra_data[i]
# Download the spectrum
spec_name = \
download_spectra(spec_info['PLATE'], spec_info['FIBERID'],
spec_info['MJD'], spec_info['SURVEY'])
spec_df = do_specfit(spec_name, verbose=False)
if i == 0:
df = DataFrame(spec_df, columns=[spec_name[:-5]])
else:
df[spec_name[:-5]] = spec_df
if not keep_spectra:
os.system('rm ' + spec_name)
df.to_csv(output_file)
return
|
Correct names, concat dataframes properly
|
Correct names, concat dataframes properly
|
Python
|
mit
|
e-koch/Phys-595
|
'''
Bulk spectral line fitting with SDSS galaxy spectra
'''
import os
from astropy.io import fits
from pandas import concat
# Bring in the package funcs
from specfit import do_specfit
from download_spectra import download_spectra
def bulk_fit(obs_file, output_file, keep_spectra=False):
'''
Downloads files based off of the entries in the given file, performs
spectral line fitting and saves the results to a FITS table.
'''
# Open the file
data_file = fits.open(obs_file)
spectra_data = data_file[1].data
del data_file
num_spectra = spectra_data['Z'].shape[0]
for i in range(num_spectra):
spec_info = spectra_data[i]
# Download the spectrum
spec_name = \
download_spectra(spec_info['PLATEID'], spec_info['FIBREID'],
spec_info['MJD'], spec_info['SURVEY'])
spec_df = do_specfit(spec_name, verbose=False)
if i == 0:
df = spec_df
else:
df = concat([df, spec_df])
if not keep_spectra:
os.system('rm ' + spec_name)
df.write(output_file)
return
Correct names, concat dataframes properly
|
'''
Bulk spectral line fitting with SDSS galaxy spectra
'''
import os
from astropy.io import fits
from pandas import DataFrame
# Bring in the package funcs
from specfit import do_specfit
from download_spectra import download_spectra
def bulk_fit(obs_file, output_file, keep_spectra=False):
'''
Downloads files based off of the entries in the given file, performs
spectral line fitting and saves the results to a FITS table.
'''
# Open the file
data_file = fits.open(obs_file)
spectra_data = data_file[1].data
del data_file
num_spectra = spectra_data['Z'].shape[0]
for i in range(num_spectra):
spec_info = spectra_data[i]
# Download the spectrum
spec_name = \
download_spectra(spec_info['PLATE'], spec_info['FIBERID'],
spec_info['MJD'], spec_info['SURVEY'])
spec_df = do_specfit(spec_name, verbose=False)
if i == 0:
df = DataFrame(spec_df, columns=[spec_name[:-5]])
else:
df[spec_name[:-5]] = spec_df
if not keep_spectra:
os.system('rm ' + spec_name)
df.to_csv(output_file)
return
|
<commit_before>
'''
Bulk spectral line fitting with SDSS galaxy spectra
'''
import os
from astropy.io import fits
from pandas import concat
# Bring in the package funcs
from specfit import do_specfit
from download_spectra import download_spectra
def bulk_fit(obs_file, output_file, keep_spectra=False):
'''
Downloads files based off of the entries in the given file, performs
spectral line fitting and saves the results to a FITS table.
'''
# Open the file
data_file = fits.open(obs_file)
spectra_data = data_file[1].data
del data_file
num_spectra = spectra_data['Z'].shape[0]
for i in range(num_spectra):
spec_info = spectra_data[i]
# Download the spectrum
spec_name = \
download_spectra(spec_info['PLATEID'], spec_info['FIBREID'],
spec_info['MJD'], spec_info['SURVEY'])
spec_df = do_specfit(spec_name, verbose=False)
if i == 0:
df = spec_df
else:
df = concat([df, spec_df])
if not keep_spectra:
os.system('rm ' + spec_name)
df.write(output_file)
return
<commit_msg>Correct names, concat dataframes properly<commit_after>
|
'''
Bulk spectral line fitting with SDSS galaxy spectra
'''
import os
from astropy.io import fits
from pandas import DataFrame
# Bring in the package funcs
from specfit import do_specfit
from download_spectra import download_spectra
def bulk_fit(obs_file, output_file, keep_spectra=False):
'''
Downloads files based off of the entries in the given file, performs
spectral line fitting and saves the results to a FITS table.
'''
# Open the file
data_file = fits.open(obs_file)
spectra_data = data_file[1].data
del data_file
num_spectra = spectra_data['Z'].shape[0]
for i in range(num_spectra):
spec_info = spectra_data[i]
# Download the spectrum
spec_name = \
download_spectra(spec_info['PLATE'], spec_info['FIBERID'],
spec_info['MJD'], spec_info['SURVEY'])
spec_df = do_specfit(spec_name, verbose=False)
if i == 0:
df = DataFrame(spec_df, columns=[spec_name[:-5]])
else:
df[spec_name[:-5]] = spec_df
if not keep_spectra:
os.system('rm ' + spec_name)
df.to_csv(output_file)
return
|
'''
Bulk spectral line fitting with SDSS galaxy spectra
'''
import os
from astropy.io import fits
from pandas import concat
# Bring in the package funcs
from specfit import do_specfit
from download_spectra import download_spectra
def bulk_fit(obs_file, output_file, keep_spectra=False):
'''
Downloads files based off of the entries in the given file, performs
spectral line fitting and saves the results to a FITS table.
'''
# Open the file
data_file = fits.open(obs_file)
spectra_data = data_file[1].data
del data_file
num_spectra = spectra_data['Z'].shape[0]
for i in range(num_spectra):
spec_info = spectra_data[i]
# Download the spectrum
spec_name = \
download_spectra(spec_info['PLATEID'], spec_info['FIBREID'],
spec_info['MJD'], spec_info['SURVEY'])
spec_df = do_specfit(spec_name, verbose=False)
if i == 0:
df = spec_df
else:
df = concat([df, spec_df])
if not keep_spectra:
os.system('rm ' + spec_name)
df.write(output_file)
return
Correct names, concat dataframes properly
'''
Bulk spectral line fitting with SDSS galaxy spectra
'''
import os
from astropy.io import fits
from pandas import DataFrame
# Bring in the package funcs
from specfit import do_specfit
from download_spectra import download_spectra
def bulk_fit(obs_file, output_file, keep_spectra=False):
'''
Downloads files based off of the entries in the given file, performs
spectral line fitting and saves the results to a FITS table.
'''
# Open the file
data_file = fits.open(obs_file)
spectra_data = data_file[1].data
del data_file
num_spectra = spectra_data['Z'].shape[0]
for i in range(num_spectra):
spec_info = spectra_data[i]
# Download the spectrum
spec_name = \
download_spectra(spec_info['PLATE'], spec_info['FIBERID'],
spec_info['MJD'], spec_info['SURVEY'])
spec_df = do_specfit(spec_name, verbose=False)
if i == 0:
df = DataFrame(spec_df, columns=[spec_name[:-5]])
else:
df[spec_name[:-5]] = spec_df
if not keep_spectra:
os.system('rm ' + spec_name)
df.to_csv(output_file)
return
|
<commit_before>
'''
Bulk spectral line fitting with SDSS galaxy spectra
'''
import os
from astropy.io import fits
from pandas import concat
# Bring in the package funcs
from specfit import do_specfit
from download_spectra import download_spectra
def bulk_fit(obs_file, output_file, keep_spectra=False):
'''
Downloads files based off of the entries in the given file, performs
spectral line fitting and saves the results to a FITS table.
'''
# Open the file
data_file = fits.open(obs_file)
spectra_data = data_file[1].data
del data_file
num_spectra = spectra_data['Z'].shape[0]
for i in range(num_spectra):
spec_info = spectra_data[i]
# Download the spectrum
spec_name = \
download_spectra(spec_info['PLATEID'], spec_info['FIBREID'],
spec_info['MJD'], spec_info['SURVEY'])
spec_df = do_specfit(spec_name, verbose=False)
if i == 0:
df = spec_df
else:
df = concat([df, spec_df])
if not keep_spectra:
os.system('rm ' + spec_name)
df.write(output_file)
return
<commit_msg>Correct names, concat dataframes properly<commit_after>
'''
Bulk spectral line fitting with SDSS galaxy spectra
'''
import os
from astropy.io import fits
from pandas import DataFrame
# Bring in the package funcs
from specfit import do_specfit
from download_spectra import download_spectra
def bulk_fit(obs_file, output_file, keep_spectra=False):
'''
Downloads files based off of the entries in the given file, performs
spectral line fitting and saves the results to a FITS table.
'''
# Open the file
data_file = fits.open(obs_file)
spectra_data = data_file[1].data
del data_file
num_spectra = spectra_data['Z'].shape[0]
for i in range(num_spectra):
spec_info = spectra_data[i]
# Download the spectrum
spec_name = \
download_spectra(spec_info['PLATE'], spec_info['FIBERID'],
spec_info['MJD'], spec_info['SURVEY'])
spec_df = do_specfit(spec_name, verbose=False)
if i == 0:
df = DataFrame(spec_df, columns=[spec_name[:-5]])
else:
df[spec_name[:-5]] = spec_df
if not keep_spectra:
os.system('rm ' + spec_name)
df.to_csv(output_file)
return
|
3e41a447076c4aa183923700c1c8203afdf07377
|
bitbots_body_behavior/src/bitbots_body_behavior/decisions/ball_close.py
|
bitbots_body_behavior/src/bitbots_body_behavior/decisions/ball_close.py
|
from dynamic_stack_decider.abstract_decision_element import AbstractDecisionElement
class BallClose(AbstractDecisionElement):
def __init__(self, blackboard, dsd, parameters=None):
super(BallClose, self).__init__(blackboard, dsd, parameters)
self.ball_close_distance = self.blackboard.config['ball_close_distance']
def perform(self, reevaluate=False):
"""
Determines whether the ball is in close range to the robot. The distance threshold is set in the config file.
:param reevaluate:
:return:
"""
self.publish_debug_data("ball_distance", self.blackboard.world_model.get_ball_distance())
if self.blackboard.world_model.get_ball_distance() < self.ball_close_distance:
return 'YES'
return 'NO'
def get_reevaluate(self):
return True
|
from dynamic_stack_decider.abstract_decision_element import AbstractDecisionElement
class BallClose(AbstractDecisionElement):
def __init__(self, blackboard, dsd, parameters=None):
super(BallClose, self).__init__(blackboard, dsd, parameters)
self.ball_close_distance = parameters.get("distance", self.blackboard.config['ball_close_distance'])
def perform(self, reevaluate=False):
"""
Determines whether the ball is in close range to the robot. The distance threshold is set in the config file.
:param reevaluate:
:return:
"""
self.publish_debug_data("ball_distance", self.blackboard.world_model.get_ball_distance())
if self.blackboard.world_model.get_ball_distance() < self.ball_close_distance:
return 'YES'
return 'NO'
def get_reevaluate(self):
return True
|
Add param to ball close
|
Add param to ball close
|
Python
|
bsd-3-clause
|
bit-bots/bitbots_behaviour
|
from dynamic_stack_decider.abstract_decision_element import AbstractDecisionElement
class BallClose(AbstractDecisionElement):
def __init__(self, blackboard, dsd, parameters=None):
super(BallClose, self).__init__(blackboard, dsd, parameters)
self.ball_close_distance = self.blackboard.config['ball_close_distance']
def perform(self, reevaluate=False):
"""
Determines whether the ball is in close range to the robot. The distance threshold is set in the config file.
:param reevaluate:
:return:
"""
self.publish_debug_data("ball_distance", self.blackboard.world_model.get_ball_distance())
if self.blackboard.world_model.get_ball_distance() < self.ball_close_distance:
return 'YES'
return 'NO'
def get_reevaluate(self):
return True
Add param to ball close
|
from dynamic_stack_decider.abstract_decision_element import AbstractDecisionElement
class BallClose(AbstractDecisionElement):
def __init__(self, blackboard, dsd, parameters=None):
super(BallClose, self).__init__(blackboard, dsd, parameters)
self.ball_close_distance = parameters.get("distance", self.blackboard.config['ball_close_distance'])
def perform(self, reevaluate=False):
"""
Determines whether the ball is in close range to the robot. The distance threshold is set in the config file.
:param reevaluate:
:return:
"""
self.publish_debug_data("ball_distance", self.blackboard.world_model.get_ball_distance())
if self.blackboard.world_model.get_ball_distance() < self.ball_close_distance:
return 'YES'
return 'NO'
def get_reevaluate(self):
return True
|
<commit_before>from dynamic_stack_decider.abstract_decision_element import AbstractDecisionElement
class BallClose(AbstractDecisionElement):
def __init__(self, blackboard, dsd, parameters=None):
super(BallClose, self).__init__(blackboard, dsd, parameters)
self.ball_close_distance = self.blackboard.config['ball_close_distance']
def perform(self, reevaluate=False):
"""
Determines whether the ball is in close range to the robot. The distance threshold is set in the config file.
:param reevaluate:
:return:
"""
self.publish_debug_data("ball_distance", self.blackboard.world_model.get_ball_distance())
if self.blackboard.world_model.get_ball_distance() < self.ball_close_distance:
return 'YES'
return 'NO'
def get_reevaluate(self):
return True
<commit_msg>Add param to ball close<commit_after>
|
from dynamic_stack_decider.abstract_decision_element import AbstractDecisionElement
class BallClose(AbstractDecisionElement):
def __init__(self, blackboard, dsd, parameters=None):
super(BallClose, self).__init__(blackboard, dsd, parameters)
self.ball_close_distance = parameters.get("distance", self.blackboard.config['ball_close_distance'])
def perform(self, reevaluate=False):
"""
Determines whether the ball is in close range to the robot. The distance threshold is set in the config file.
:param reevaluate:
:return:
"""
self.publish_debug_data("ball_distance", self.blackboard.world_model.get_ball_distance())
if self.blackboard.world_model.get_ball_distance() < self.ball_close_distance:
return 'YES'
return 'NO'
def get_reevaluate(self):
return True
|
from dynamic_stack_decider.abstract_decision_element import AbstractDecisionElement
class BallClose(AbstractDecisionElement):
def __init__(self, blackboard, dsd, parameters=None):
super(BallClose, self).__init__(blackboard, dsd, parameters)
self.ball_close_distance = self.blackboard.config['ball_close_distance']
def perform(self, reevaluate=False):
"""
Determines whether the ball is in close range to the robot. The distance threshold is set in the config file.
:param reevaluate:
:return:
"""
self.publish_debug_data("ball_distance", self.blackboard.world_model.get_ball_distance())
if self.blackboard.world_model.get_ball_distance() < self.ball_close_distance:
return 'YES'
return 'NO'
def get_reevaluate(self):
return True
Add param to ball closefrom dynamic_stack_decider.abstract_decision_element import AbstractDecisionElement
class BallClose(AbstractDecisionElement):
def __init__(self, blackboard, dsd, parameters=None):
super(BallClose, self).__init__(blackboard, dsd, parameters)
self.ball_close_distance = parameters.get("distance", self.blackboard.config['ball_close_distance'])
def perform(self, reevaluate=False):
"""
Determines whether the ball is in close range to the robot. The distance threshold is set in the config file.
:param reevaluate:
:return:
"""
self.publish_debug_data("ball_distance", self.blackboard.world_model.get_ball_distance())
if self.blackboard.world_model.get_ball_distance() < self.ball_close_distance:
return 'YES'
return 'NO'
def get_reevaluate(self):
return True
|
<commit_before>from dynamic_stack_decider.abstract_decision_element import AbstractDecisionElement
class BallClose(AbstractDecisionElement):
def __init__(self, blackboard, dsd, parameters=None):
super(BallClose, self).__init__(blackboard, dsd, parameters)
self.ball_close_distance = self.blackboard.config['ball_close_distance']
def perform(self, reevaluate=False):
"""
Determines whether the ball is in close range to the robot. The distance threshold is set in the config file.
:param reevaluate:
:return:
"""
self.publish_debug_data("ball_distance", self.blackboard.world_model.get_ball_distance())
if self.blackboard.world_model.get_ball_distance() < self.ball_close_distance:
return 'YES'
return 'NO'
def get_reevaluate(self):
return True
<commit_msg>Add param to ball close<commit_after>from dynamic_stack_decider.abstract_decision_element import AbstractDecisionElement
class BallClose(AbstractDecisionElement):
def __init__(self, blackboard, dsd, parameters=None):
super(BallClose, self).__init__(blackboard, dsd, parameters)
self.ball_close_distance = parameters.get("distance", self.blackboard.config['ball_close_distance'])
def perform(self, reevaluate=False):
"""
Determines whether the ball is in close range to the robot. The distance threshold is set in the config file.
:param reevaluate:
:return:
"""
self.publish_debug_data("ball_distance", self.blackboard.world_model.get_ball_distance())
if self.blackboard.world_model.get_ball_distance() < self.ball_close_distance:
return 'YES'
return 'NO'
def get_reevaluate(self):
return True
|
ba4953423450c3bf2924aa76f37694b405c8ee85
|
parse-zmmailbox-ids.py
|
parse-zmmailbox-ids.py
|
import re
import sys
# $ zmmailbox -z -m username@domain.tld search -l 200 "in:/inbox (before:today)"
# num: 200, more: true
#
# Id Type From Subject Date
# ------- ---- -------------------- -------------------------------------------------- --------------
# 1. -946182 conv admin Daily mail report 09/24/15 23:57
# 2. 421345 conv John Some great news for you 09/24/15 23:57
REGEX_HEAD = re.compile(r'^Id')
REGEX_HEAD_SEP = re.compile(r'^---')
REGEX_DATA = re.compile(r'^(\d+)\.\s+\-?(\d+)\s+(\S+)')
def main():
lines = [line.strip() for line in sys.stdin.readlines() if line.strip()]
while True:
line = lines.pop(0)
if REGEX_HEAD.search(line):
break
line = lines.pop(0)
assert REGEX_HEAD_SEP.search(line)
ids = []
for line in lines:
matched = REGEX_DATA.match(line)
if matched:
ids.append(matched.group(2))
else:
sys.stderr.write("Couldn't parse line: {0}\n".format(line))
sys.exit(1)
for an_id in ids:
print an_id
if __name__ == '__main__':
main()
|
import re
import sys
# $ zmmailbox -z -m username@domain.tld search -l 200 "in:/inbox (before:today)"
# num: 200, more: true
#
# Id Type From Subject Date
# ------- ---- -------------------- -------------------------------------------------- --------------
# 1. -946182 conv admin Daily mail report 09/24/15 23:57
# 2. 421345 conv John Some great news for you 09/24/15 23:57
REGEX_HEAD = re.compile(r'^Id')
REGEX_HEAD_SEP = re.compile(r'^---')
REGEX_DATA = re.compile(r'^(\d+)\.\s+(\-?\d+)\s+(\S+)')
def main():
lines = [line.strip() for line in sys.stdin.readlines() if line.strip()]
while True:
line = lines.pop(0)
if REGEX_HEAD.search(line):
break
line = lines.pop(0)
assert REGEX_HEAD_SEP.search(line)
ids = []
for line in lines:
matched = REGEX_DATA.match(line)
if matched:
ids.append(matched.group(2))
else:
sys.stderr.write("Couldn't parse line: {0}\n".format(line))
sys.exit(1)
print ','.join(ids)
if __name__ == '__main__':
main()
|
Include '-' in ID, print IDs separated by ','
|
Include '-' in ID, print IDs separated by ','
|
Python
|
apache-2.0
|
hgdeoro/zimbra7-to-zimbra8-password-migrator
|
import re
import sys
# $ zmmailbox -z -m username@domain.tld search -l 200 "in:/inbox (before:today)"
# num: 200, more: true
#
# Id Type From Subject Date
# ------- ---- -------------------- -------------------------------------------------- --------------
# 1. -946182 conv admin Daily mail report 09/24/15 23:57
# 2. 421345 conv John Some great news for you 09/24/15 23:57
REGEX_HEAD = re.compile(r'^Id')
REGEX_HEAD_SEP = re.compile(r'^---')
REGEX_DATA = re.compile(r'^(\d+)\.\s+\-?(\d+)\s+(\S+)')
def main():
lines = [line.strip() for line in sys.stdin.readlines() if line.strip()]
while True:
line = lines.pop(0)
if REGEX_HEAD.search(line):
break
line = lines.pop(0)
assert REGEX_HEAD_SEP.search(line)
ids = []
for line in lines:
matched = REGEX_DATA.match(line)
if matched:
ids.append(matched.group(2))
else:
sys.stderr.write("Couldn't parse line: {0}\n".format(line))
sys.exit(1)
for an_id in ids:
print an_id
if __name__ == '__main__':
main()
Include '-' in ID, print IDs separated by ','
|
import re
import sys
# $ zmmailbox -z -m username@domain.tld search -l 200 "in:/inbox (before:today)"
# num: 200, more: true
#
# Id Type From Subject Date
# ------- ---- -------------------- -------------------------------------------------- --------------
# 1. -946182 conv admin Daily mail report 09/24/15 23:57
# 2. 421345 conv John Some great news for you 09/24/15 23:57
REGEX_HEAD = re.compile(r'^Id')
REGEX_HEAD_SEP = re.compile(r'^---')
REGEX_DATA = re.compile(r'^(\d+)\.\s+(\-?\d+)\s+(\S+)')
def main():
lines = [line.strip() for line in sys.stdin.readlines() if line.strip()]
while True:
line = lines.pop(0)
if REGEX_HEAD.search(line):
break
line = lines.pop(0)
assert REGEX_HEAD_SEP.search(line)
ids = []
for line in lines:
matched = REGEX_DATA.match(line)
if matched:
ids.append(matched.group(2))
else:
sys.stderr.write("Couldn't parse line: {0}\n".format(line))
sys.exit(1)
print ','.join(ids)
if __name__ == '__main__':
main()
|
<commit_before>import re
import sys
# $ zmmailbox -z -m username@domain.tld search -l 200 "in:/inbox (before:today)"
# num: 200, more: true
#
# Id Type From Subject Date
# ------- ---- -------------------- -------------------------------------------------- --------------
# 1. -946182 conv admin Daily mail report 09/24/15 23:57
# 2. 421345 conv John Some great news for you 09/24/15 23:57
REGEX_HEAD = re.compile(r'^Id')
REGEX_HEAD_SEP = re.compile(r'^---')
REGEX_DATA = re.compile(r'^(\d+)\.\s+\-?(\d+)\s+(\S+)')
def main():
lines = [line.strip() for line in sys.stdin.readlines() if line.strip()]
while True:
line = lines.pop(0)
if REGEX_HEAD.search(line):
break
line = lines.pop(0)
assert REGEX_HEAD_SEP.search(line)
ids = []
for line in lines:
matched = REGEX_DATA.match(line)
if matched:
ids.append(matched.group(2))
else:
sys.stderr.write("Couldn't parse line: {0}\n".format(line))
sys.exit(1)
for an_id in ids:
print an_id
if __name__ == '__main__':
main()
<commit_msg>Include '-' in ID, print IDs separated by ','<commit_after>
|
import re
import sys
# $ zmmailbox -z -m username@domain.tld search -l 200 "in:/inbox (before:today)"
# num: 200, more: true
#
# Id Type From Subject Date
# ------- ---- -------------------- -------------------------------------------------- --------------
# 1. -946182 conv admin Daily mail report 09/24/15 23:57
# 2. 421345 conv John Some great news for you 09/24/15 23:57
REGEX_HEAD = re.compile(r'^Id')
REGEX_HEAD_SEP = re.compile(r'^---')
REGEX_DATA = re.compile(r'^(\d+)\.\s+(\-?\d+)\s+(\S+)')
def main():
lines = [line.strip() for line in sys.stdin.readlines() if line.strip()]
while True:
line = lines.pop(0)
if REGEX_HEAD.search(line):
break
line = lines.pop(0)
assert REGEX_HEAD_SEP.search(line)
ids = []
for line in lines:
matched = REGEX_DATA.match(line)
if matched:
ids.append(matched.group(2))
else:
sys.stderr.write("Couldn't parse line: {0}\n".format(line))
sys.exit(1)
print ','.join(ids)
if __name__ == '__main__':
main()
|
import re
import sys
# $ zmmailbox -z -m username@domain.tld search -l 200 "in:/inbox (before:today)"
# num: 200, more: true
#
# Id Type From Subject Date
# ------- ---- -------------------- -------------------------------------------------- --------------
# 1. -946182 conv admin Daily mail report 09/24/15 23:57
# 2. 421345 conv John Some great news for you 09/24/15 23:57
REGEX_HEAD = re.compile(r'^Id')
REGEX_HEAD_SEP = re.compile(r'^---')
REGEX_DATA = re.compile(r'^(\d+)\.\s+\-?(\d+)\s+(\S+)')
def main():
lines = [line.strip() for line in sys.stdin.readlines() if line.strip()]
while True:
line = lines.pop(0)
if REGEX_HEAD.search(line):
break
line = lines.pop(0)
assert REGEX_HEAD_SEP.search(line)
ids = []
for line in lines:
matched = REGEX_DATA.match(line)
if matched:
ids.append(matched.group(2))
else:
sys.stderr.write("Couldn't parse line: {0}\n".format(line))
sys.exit(1)
for an_id in ids:
print an_id
if __name__ == '__main__':
main()
Include '-' in ID, print IDs separated by ','import re
import sys
# $ zmmailbox -z -m username@domain.tld search -l 200 "in:/inbox (before:today)"
# num: 200, more: true
#
# Id Type From Subject Date
# ------- ---- -------------------- -------------------------------------------------- --------------
# 1. -946182 conv admin Daily mail report 09/24/15 23:57
# 2. 421345 conv John Some great news for you 09/24/15 23:57
REGEX_HEAD = re.compile(r'^Id')
REGEX_HEAD_SEP = re.compile(r'^---')
REGEX_DATA = re.compile(r'^(\d+)\.\s+(\-?\d+)\s+(\S+)')
def main():
lines = [line.strip() for line in sys.stdin.readlines() if line.strip()]
while True:
line = lines.pop(0)
if REGEX_HEAD.search(line):
break
line = lines.pop(0)
assert REGEX_HEAD_SEP.search(line)
ids = []
for line in lines:
matched = REGEX_DATA.match(line)
if matched:
ids.append(matched.group(2))
else:
sys.stderr.write("Couldn't parse line: {0}\n".format(line))
sys.exit(1)
print ','.join(ids)
if __name__ == '__main__':
main()
|
<commit_before>import re
import sys
# $ zmmailbox -z -m username@domain.tld search -l 200 "in:/inbox (before:today)"
# num: 200, more: true
#
# Id Type From Subject Date
# ------- ---- -------------------- -------------------------------------------------- --------------
# 1. -946182 conv admin Daily mail report 09/24/15 23:57
# 2. 421345 conv John Some great news for you 09/24/15 23:57
REGEX_HEAD = re.compile(r'^Id')
REGEX_HEAD_SEP = re.compile(r'^---')
REGEX_DATA = re.compile(r'^(\d+)\.\s+\-?(\d+)\s+(\S+)')
def main():
lines = [line.strip() for line in sys.stdin.readlines() if line.strip()]
while True:
line = lines.pop(0)
if REGEX_HEAD.search(line):
break
line = lines.pop(0)
assert REGEX_HEAD_SEP.search(line)
ids = []
for line in lines:
matched = REGEX_DATA.match(line)
if matched:
ids.append(matched.group(2))
else:
sys.stderr.write("Couldn't parse line: {0}\n".format(line))
sys.exit(1)
for an_id in ids:
print an_id
if __name__ == '__main__':
main()
<commit_msg>Include '-' in ID, print IDs separated by ','<commit_after>import re
import sys
# $ zmmailbox -z -m username@domain.tld search -l 200 "in:/inbox (before:today)"
# num: 200, more: true
#
# Id Type From Subject Date
# ------- ---- -------------------- -------------------------------------------------- --------------
# 1. -946182 conv admin Daily mail report 09/24/15 23:57
# 2. 421345 conv John Some great news for you 09/24/15 23:57
REGEX_HEAD = re.compile(r'^Id')
REGEX_HEAD_SEP = re.compile(r'^---')
REGEX_DATA = re.compile(r'^(\d+)\.\s+(\-?\d+)\s+(\S+)')
def main():
lines = [line.strip() for line in sys.stdin.readlines() if line.strip()]
while True:
line = lines.pop(0)
if REGEX_HEAD.search(line):
break
line = lines.pop(0)
assert REGEX_HEAD_SEP.search(line)
ids = []
for line in lines:
matched = REGEX_DATA.match(line)
if matched:
ids.append(matched.group(2))
else:
sys.stderr.write("Couldn't parse line: {0}\n".format(line))
sys.exit(1)
print ','.join(ids)
if __name__ == '__main__':
main()
|
62297b3c937d386b759ec14a078cee36f2550d44
|
src/aiy/_drivers/_alsa.py
|
src/aiy/_drivers/_alsa.py
|
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helpers for ALSA tools, including aplay and arecord."""
def sample_width_to_string(sample_width):
"""Convert sample width (bytes) to ALSA format string."""
return {1: 's8', 2: 's16', 4: 's32'}[sample_width]
|
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helpers for ALSA tools, including aplay and arecord."""
def sample_width_to_string(sample_width):
"""Convert sample width (bytes) to ALSA format string."""
return {1: 's8', 2: 's16', 4: 's32'}.get(sample_width, None)
|
Return None with invalid sample_width from sample_width_to_string.
|
Return None with invalid sample_width from sample_width_to_string.
|
Python
|
apache-2.0
|
google/aiyprojects-raspbian,t1m0thyj/aiyprojects-raspbian,google/aiyprojects-raspbian,t1m0thyj/aiyprojects-raspbian,google/aiyprojects-raspbian,google/aiyprojects-raspbian,google/aiyprojects-raspbian
|
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helpers for ALSA tools, including aplay and arecord."""
def sample_width_to_string(sample_width):
"""Convert sample width (bytes) to ALSA format string."""
return {1: 's8', 2: 's16', 4: 's32'}[sample_width]
Return None with invalid sample_width from sample_width_to_string.
|
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helpers for ALSA tools, including aplay and arecord."""
def sample_width_to_string(sample_width):
"""Convert sample width (bytes) to ALSA format string."""
return {1: 's8', 2: 's16', 4: 's32'}.get(sample_width, None)
|
<commit_before># Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helpers for ALSA tools, including aplay and arecord."""
def sample_width_to_string(sample_width):
"""Convert sample width (bytes) to ALSA format string."""
return {1: 's8', 2: 's16', 4: 's32'}[sample_width]
<commit_msg>Return None with invalid sample_width from sample_width_to_string.<commit_after>
|
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helpers for ALSA tools, including aplay and arecord."""
def sample_width_to_string(sample_width):
"""Convert sample width (bytes) to ALSA format string."""
return {1: 's8', 2: 's16', 4: 's32'}.get(sample_width, None)
|
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helpers for ALSA tools, including aplay and arecord."""
def sample_width_to_string(sample_width):
"""Convert sample width (bytes) to ALSA format string."""
return {1: 's8', 2: 's16', 4: 's32'}[sample_width]
Return None with invalid sample_width from sample_width_to_string.# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helpers for ALSA tools, including aplay and arecord."""
def sample_width_to_string(sample_width):
"""Convert sample width (bytes) to ALSA format string."""
return {1: 's8', 2: 's16', 4: 's32'}.get(sample_width, None)
|
<commit_before># Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helpers for ALSA tools, including aplay and arecord."""
def sample_width_to_string(sample_width):
"""Convert sample width (bytes) to ALSA format string."""
return {1: 's8', 2: 's16', 4: 's32'}[sample_width]
<commit_msg>Return None with invalid sample_width from sample_width_to_string.<commit_after># Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helpers for ALSA tools, including aplay and arecord."""
def sample_width_to_string(sample_width):
"""Convert sample width (bytes) to ALSA format string."""
return {1: 's8', 2: 's16', 4: 's32'}.get(sample_width, None)
|
ec07e139b5585a8ed9bed14426dac987267ebf05
|
sbtsettings.py
|
sbtsettings.py
|
import sublime
class SBTSettings(object):
def __init__(self, window):
self.window = window
self._plugin_settings = sublime.load_settings('SublimeSBT.sublime-settings')
def sbt_command(self):
return self._view_settings().get('sbt_command', self._plugin_settings.get('sbt_command'))
def play_command(self):
return self._view_settings().get('sbt_command', self._plugin_settings.get('play_command'))
def color_scheme(self):
self.get('color_scheme')
def get(self, name):
return self._view_settings().get(name, self._plugin_settings.get(name))
def add_on_change(self, on_change):
self._plugin_settings.add_on_change('SublimeSBT', on_change)
def _view_settings(self):
return self.window.active_view().settings().get('SublimeSBT', {})
|
import sublime
from util import maybe
class SBTSettings(object):
def __init__(self, window):
self.window = window
self._plugin_settings = sublime.load_settings('SublimeSBT.sublime-settings')
def sbt_command(self):
return self._view_settings().get('sbt_command', self._plugin_settings.get('sbt_command'))
def play_command(self):
return self._view_settings().get('sbt_command', self._plugin_settings.get('play_command'))
def color_scheme(self):
self.get('color_scheme')
def get(self, name):
return self._view_settings().get(name, self._plugin_settings.get(name))
def add_on_change(self, on_change):
self._plugin_settings.add_on_change('SublimeSBT', on_change)
def _view_settings(self):
for view in maybe(self.window.active_view()):
return view.settings().get('SublimeSBT', {})
return {}
|
Fix AttributeError getting project settings when no active view
|
Fix AttributeError getting project settings when no active view
|
Python
|
mit
|
jarhart/SublimeSBT
|
import sublime
class SBTSettings(object):
def __init__(self, window):
self.window = window
self._plugin_settings = sublime.load_settings('SublimeSBT.sublime-settings')
def sbt_command(self):
return self._view_settings().get('sbt_command', self._plugin_settings.get('sbt_command'))
def play_command(self):
return self._view_settings().get('sbt_command', self._plugin_settings.get('play_command'))
def color_scheme(self):
self.get('color_scheme')
def get(self, name):
return self._view_settings().get(name, self._plugin_settings.get(name))
def add_on_change(self, on_change):
self._plugin_settings.add_on_change('SublimeSBT', on_change)
def _view_settings(self):
return self.window.active_view().settings().get('SublimeSBT', {})
Fix AttributeError getting project settings when no active view
|
import sublime
from util import maybe
class SBTSettings(object):
def __init__(self, window):
self.window = window
self._plugin_settings = sublime.load_settings('SublimeSBT.sublime-settings')
def sbt_command(self):
return self._view_settings().get('sbt_command', self._plugin_settings.get('sbt_command'))
def play_command(self):
return self._view_settings().get('sbt_command', self._plugin_settings.get('play_command'))
def color_scheme(self):
self.get('color_scheme')
def get(self, name):
return self._view_settings().get(name, self._plugin_settings.get(name))
def add_on_change(self, on_change):
self._plugin_settings.add_on_change('SublimeSBT', on_change)
def _view_settings(self):
for view in maybe(self.window.active_view()):
return view.settings().get('SublimeSBT', {})
return {}
|
<commit_before>import sublime
class SBTSettings(object):
def __init__(self, window):
self.window = window
self._plugin_settings = sublime.load_settings('SublimeSBT.sublime-settings')
def sbt_command(self):
return self._view_settings().get('sbt_command', self._plugin_settings.get('sbt_command'))
def play_command(self):
return self._view_settings().get('sbt_command', self._plugin_settings.get('play_command'))
def color_scheme(self):
self.get('color_scheme')
def get(self, name):
return self._view_settings().get(name, self._plugin_settings.get(name))
def add_on_change(self, on_change):
self._plugin_settings.add_on_change('SublimeSBT', on_change)
def _view_settings(self):
return self.window.active_view().settings().get('SublimeSBT', {})
<commit_msg>Fix AttributeError getting project settings when no active view<commit_after>
|
import sublime
from util import maybe
class SBTSettings(object):
def __init__(self, window):
self.window = window
self._plugin_settings = sublime.load_settings('SublimeSBT.sublime-settings')
def sbt_command(self):
return self._view_settings().get('sbt_command', self._plugin_settings.get('sbt_command'))
def play_command(self):
return self._view_settings().get('sbt_command', self._plugin_settings.get('play_command'))
def color_scheme(self):
self.get('color_scheme')
def get(self, name):
return self._view_settings().get(name, self._plugin_settings.get(name))
def add_on_change(self, on_change):
self._plugin_settings.add_on_change('SublimeSBT', on_change)
def _view_settings(self):
for view in maybe(self.window.active_view()):
return view.settings().get('SublimeSBT', {})
return {}
|
import sublime
class SBTSettings(object):
def __init__(self, window):
self.window = window
self._plugin_settings = sublime.load_settings('SublimeSBT.sublime-settings')
def sbt_command(self):
return self._view_settings().get('sbt_command', self._plugin_settings.get('sbt_command'))
def play_command(self):
return self._view_settings().get('sbt_command', self._plugin_settings.get('play_command'))
def color_scheme(self):
self.get('color_scheme')
def get(self, name):
return self._view_settings().get(name, self._plugin_settings.get(name))
def add_on_change(self, on_change):
self._plugin_settings.add_on_change('SublimeSBT', on_change)
def _view_settings(self):
return self.window.active_view().settings().get('SublimeSBT', {})
Fix AttributeError getting project settings when no active viewimport sublime
from util import maybe
class SBTSettings(object):
def __init__(self, window):
self.window = window
self._plugin_settings = sublime.load_settings('SublimeSBT.sublime-settings')
def sbt_command(self):
return self._view_settings().get('sbt_command', self._plugin_settings.get('sbt_command'))
def play_command(self):
return self._view_settings().get('sbt_command', self._plugin_settings.get('play_command'))
def color_scheme(self):
self.get('color_scheme')
def get(self, name):
return self._view_settings().get(name, self._plugin_settings.get(name))
def add_on_change(self, on_change):
self._plugin_settings.add_on_change('SublimeSBT', on_change)
def _view_settings(self):
for view in maybe(self.window.active_view()):
return view.settings().get('SublimeSBT', {})
return {}
|
<commit_before>import sublime
class SBTSettings(object):
def __init__(self, window):
self.window = window
self._plugin_settings = sublime.load_settings('SublimeSBT.sublime-settings')
def sbt_command(self):
return self._view_settings().get('sbt_command', self._plugin_settings.get('sbt_command'))
def play_command(self):
return self._view_settings().get('sbt_command', self._plugin_settings.get('play_command'))
def color_scheme(self):
self.get('color_scheme')
def get(self, name):
return self._view_settings().get(name, self._plugin_settings.get(name))
def add_on_change(self, on_change):
self._plugin_settings.add_on_change('SublimeSBT', on_change)
def _view_settings(self):
return self.window.active_view().settings().get('SublimeSBT', {})
<commit_msg>Fix AttributeError getting project settings when no active view<commit_after>import sublime
from util import maybe
class SBTSettings(object):
def __init__(self, window):
self.window = window
self._plugin_settings = sublime.load_settings('SublimeSBT.sublime-settings')
def sbt_command(self):
return self._view_settings().get('sbt_command', self._plugin_settings.get('sbt_command'))
def play_command(self):
return self._view_settings().get('sbt_command', self._plugin_settings.get('play_command'))
def color_scheme(self):
self.get('color_scheme')
def get(self, name):
return self._view_settings().get(name, self._plugin_settings.get(name))
def add_on_change(self, on_change):
self._plugin_settings.add_on_change('SublimeSBT', on_change)
def _view_settings(self):
for view in maybe(self.window.active_view()):
return view.settings().get('SublimeSBT', {})
return {}
|
e1b23cdc089b3a05ae4959c9859e16e5e21b5c91
|
apps/careeropportunity/views.py
|
apps/careeropportunity/views.py
|
#-*- coding: utf-8 -*-
from django.shortcuts import render_to_response
from django.shortcuts import get_object_or_404
from django.template import RequestContext
from apps.careeropportunity.models import CareerOpportunity
import datetime
def index(request):
opportunities = CareerOpportunity.objects.all()
return render_to_response('careeropportunity/index.html', \
{'opportunities': opportunities}, \
context_instance=RequestContext(request))
def details(request, opportunity_id):
opportunity = get_object_or_404(CareerOpportunity, pk=opportunity_id)
return render_to_response('careeropportunity/details.html', \
{'opportunity': opportunity}, \
context_instance=RequestContext(request))
|
#-*- coding: utf-8 -*-
from datetime import datetime
from django.shortcuts import render_to_response
from django.shortcuts import get_object_or_404
from django.template import RequestContext
from apps.careeropportunity.models import CareerOpportunity
def index(request):
opportunities = CareerOpportunity.objects.filter(start__lte=datetime.now(), end__gte=datetime.now())
return render_to_response('careeropportunity/index.html', \
{'opportunities': opportunities}, \
context_instance=RequestContext(request))
def details(request, opportunity_id):
opportunity = get_object_or_404(CareerOpportunity, pk=opportunity_id)
return render_to_response('careeropportunity/details.html', \
{'opportunity': opportunity}, \
context_instance=RequestContext(request))
|
Make careerop only display active ops
|
Make careerop only display active ops
|
Python
|
mit
|
dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4
|
#-*- coding: utf-8 -*-
from django.shortcuts import render_to_response
from django.shortcuts import get_object_or_404
from django.template import RequestContext
from apps.careeropportunity.models import CareerOpportunity
import datetime
def index(request):
opportunities = CareerOpportunity.objects.all()
return render_to_response('careeropportunity/index.html', \
{'opportunities': opportunities}, \
context_instance=RequestContext(request))
def details(request, opportunity_id):
opportunity = get_object_or_404(CareerOpportunity, pk=opportunity_id)
return render_to_response('careeropportunity/details.html', \
{'opportunity': opportunity}, \
context_instance=RequestContext(request))
Make careerop only display active ops
|
#-*- coding: utf-8 -*-
from datetime import datetime
from django.shortcuts import render_to_response
from django.shortcuts import get_object_or_404
from django.template import RequestContext
from apps.careeropportunity.models import CareerOpportunity
def index(request):
opportunities = CareerOpportunity.objects.filter(start__lte=datetime.now(), end__gte=datetime.now())
return render_to_response('careeropportunity/index.html', \
{'opportunities': opportunities}, \
context_instance=RequestContext(request))
def details(request, opportunity_id):
opportunity = get_object_or_404(CareerOpportunity, pk=opportunity_id)
return render_to_response('careeropportunity/details.html', \
{'opportunity': opportunity}, \
context_instance=RequestContext(request))
|
<commit_before>#-*- coding: utf-8 -*-
from django.shortcuts import render_to_response
from django.shortcuts import get_object_or_404
from django.template import RequestContext
from apps.careeropportunity.models import CareerOpportunity
import datetime
def index(request):
opportunities = CareerOpportunity.objects.all()
return render_to_response('careeropportunity/index.html', \
{'opportunities': opportunities}, \
context_instance=RequestContext(request))
def details(request, opportunity_id):
opportunity = get_object_or_404(CareerOpportunity, pk=opportunity_id)
return render_to_response('careeropportunity/details.html', \
{'opportunity': opportunity}, \
context_instance=RequestContext(request))
<commit_msg>Make careerop only display active ops<commit_after>
|
#-*- coding: utf-8 -*-
from datetime import datetime
from django.shortcuts import render_to_response
from django.shortcuts import get_object_or_404
from django.template import RequestContext
from apps.careeropportunity.models import CareerOpportunity
def index(request):
opportunities = CareerOpportunity.objects.filter(start__lte=datetime.now(), end__gte=datetime.now())
return render_to_response('careeropportunity/index.html', \
{'opportunities': opportunities}, \
context_instance=RequestContext(request))
def details(request, opportunity_id):
opportunity = get_object_or_404(CareerOpportunity, pk=opportunity_id)
return render_to_response('careeropportunity/details.html', \
{'opportunity': opportunity}, \
context_instance=RequestContext(request))
|
#-*- coding: utf-8 -*-
from django.shortcuts import render_to_response
from django.shortcuts import get_object_or_404
from django.template import RequestContext
from apps.careeropportunity.models import CareerOpportunity
import datetime
def index(request):
opportunities = CareerOpportunity.objects.all()
return render_to_response('careeropportunity/index.html', \
{'opportunities': opportunities}, \
context_instance=RequestContext(request))
def details(request, opportunity_id):
opportunity = get_object_or_404(CareerOpportunity, pk=opportunity_id)
return render_to_response('careeropportunity/details.html', \
{'opportunity': opportunity}, \
context_instance=RequestContext(request))
Make careerop only display active ops#-*- coding: utf-8 -*-
from datetime import datetime
from django.shortcuts import render_to_response
from django.shortcuts import get_object_or_404
from django.template import RequestContext
from apps.careeropportunity.models import CareerOpportunity
def index(request):
opportunities = CareerOpportunity.objects.filter(start__lte=datetime.now(), end__gte=datetime.now())
return render_to_response('careeropportunity/index.html', \
{'opportunities': opportunities}, \
context_instance=RequestContext(request))
def details(request, opportunity_id):
opportunity = get_object_or_404(CareerOpportunity, pk=opportunity_id)
return render_to_response('careeropportunity/details.html', \
{'opportunity': opportunity}, \
context_instance=RequestContext(request))
|
<commit_before>#-*- coding: utf-8 -*-
from django.shortcuts import render_to_response
from django.shortcuts import get_object_or_404
from django.template import RequestContext
from apps.careeropportunity.models import CareerOpportunity
import datetime
def index(request):
opportunities = CareerOpportunity.objects.all()
return render_to_response('careeropportunity/index.html', \
{'opportunities': opportunities}, \
context_instance=RequestContext(request))
def details(request, opportunity_id):
opportunity = get_object_or_404(CareerOpportunity, pk=opportunity_id)
return render_to_response('careeropportunity/details.html', \
{'opportunity': opportunity}, \
context_instance=RequestContext(request))
<commit_msg>Make careerop only display active ops<commit_after>#-*- coding: utf-8 -*-
from datetime import datetime
from django.shortcuts import render_to_response
from django.shortcuts import get_object_or_404
from django.template import RequestContext
from apps.careeropportunity.models import CareerOpportunity
def index(request):
opportunities = CareerOpportunity.objects.filter(start__lte=datetime.now(), end__gte=datetime.now())
return render_to_response('careeropportunity/index.html', \
{'opportunities': opportunities}, \
context_instance=RequestContext(request))
def details(request, opportunity_id):
opportunity = get_object_or_404(CareerOpportunity, pk=opportunity_id)
return render_to_response('careeropportunity/details.html', \
{'opportunity': opportunity}, \
context_instance=RequestContext(request))
|
fe167bfd25c0c86b3c6fb5ef76eb24036ad2b6da
|
tests/ne_np/__init__.py
|
tests/ne_np/__init__.py
|
from __future__ import unicode_literals
import unittest
import re
from faker import Factory
from faker.utils import text
from .. import string_types
class ne_NP_FactoryTestCase(unittest.TestCase):
def setUp(self):
self.factory = Factory.create('ne_NP')
def test_address(self):
from faker.providers.address.ne_NP import Provider
countries = Provider.countries
country = self.factory.country()
assert country
assert isinstance(country, string_types)
assert country in countries
districts = Provider.districts
district = self.factory.district()
assert district
assert isinstance(district, string_types)
assert district in districts
cities = Provider.cities
city = self.factory.city()
assert city
assert isinstance(city, string_types)
assert city in cities
def test_names(self):
from faker.providers.person.ne_NP import Provider
first_names = Provider.first_names
name = self.factory.name()
first_name, last_name = name.split()
assert first_name
assert isinstance(first_name, string_types)
assert first_name in first_names
last_names = Provider.last_names
assert last_names
assert isinstance(last_name, string_types)
assert last_name in last_names
|
from __future__ import unicode_literals
import unittest
from faker import Factory
from .. import string_types
class NeNPFactoryTestCase(unittest.TestCase):
def setUp(self):
self.factory = Factory.create('ne_NP')
def test_address(self):
from faker.providers.address.ne_NP import Provider
country = self.factory.country()
assert isinstance(country, string_types)
assert country in Provider.countries
district = self.factory.district()
assert isinstance(district, string_types)
assert district in Provider.districts
city = self.factory.city()
assert isinstance(city, string_types)
assert city in Provider.cities
def test_names(self):
from faker.providers.person.ne_NP import Provider
for _ in range(10000):
name = self.factory.name().split()
assert all(isinstance(n, string_types) for n in name)
# name should always be 2-3 words. If 3, first word
# should be a prefix.
assert name[-2] in Provider.first_names
assert name[-1] in Provider.last_names
prefixes = Provider.prefixes_male + Provider.prefixes_female
if len(name) == 3:
assert name[0] in prefixes
|
Fix incorrect ne_NP locale tests
|
Fix incorrect ne_NP locale tests
This test incorrectly assumes a call to name() will
yield only a first/last name, which isn't always true for this
locale. I suspect it hasn't been uncovered yet because the
tests are seeded the same at the beginning of every run. It only
becomes a problem when you start moving tests around. This change
addresses the incorrect assertions as well as makes the file PEP8
compliant.
|
Python
|
mit
|
trtd/faker,joke2k/faker,joke2k/faker,danhuss/faker
|
from __future__ import unicode_literals
import unittest
import re
from faker import Factory
from faker.utils import text
from .. import string_types
class ne_NP_FactoryTestCase(unittest.TestCase):
def setUp(self):
self.factory = Factory.create('ne_NP')
def test_address(self):
from faker.providers.address.ne_NP import Provider
countries = Provider.countries
country = self.factory.country()
assert country
assert isinstance(country, string_types)
assert country in countries
districts = Provider.districts
district = self.factory.district()
assert district
assert isinstance(district, string_types)
assert district in districts
cities = Provider.cities
city = self.factory.city()
assert city
assert isinstance(city, string_types)
assert city in cities
def test_names(self):
from faker.providers.person.ne_NP import Provider
first_names = Provider.first_names
name = self.factory.name()
first_name, last_name = name.split()
assert first_name
assert isinstance(first_name, string_types)
assert first_name in first_names
last_names = Provider.last_names
assert last_names
assert isinstance(last_name, string_types)
assert last_name in last_names
Fix incorrect ne_NP locale tests
This test incorrectly assumes a call to name() will
yield only a first/last name, which isn't always true for this
locale. I suspect it hasn't been uncovered yet because the
tests are seeded the same at the beginning of every run. It only
becomes a problem when you start moving tests around. This change
addresses the incorrect assertions as well as makes the file PEP8
compliant.
|
from __future__ import unicode_literals
import unittest
from faker import Factory
from .. import string_types
class NeNPFactoryTestCase(unittest.TestCase):
def setUp(self):
self.factory = Factory.create('ne_NP')
def test_address(self):
from faker.providers.address.ne_NP import Provider
country = self.factory.country()
assert isinstance(country, string_types)
assert country in Provider.countries
district = self.factory.district()
assert isinstance(district, string_types)
assert district in Provider.districts
city = self.factory.city()
assert isinstance(city, string_types)
assert city in Provider.cities
def test_names(self):
from faker.providers.person.ne_NP import Provider
for _ in range(10000):
name = self.factory.name().split()
assert all(isinstance(n, string_types) for n in name)
# name should always be 2-3 words. If 3, first word
# should be a prefix.
assert name[-2] in Provider.first_names
assert name[-1] in Provider.last_names
prefixes = Provider.prefixes_male + Provider.prefixes_female
if len(name) == 3:
assert name[0] in prefixes
|
<commit_before>from __future__ import unicode_literals
import unittest
import re
from faker import Factory
from faker.utils import text
from .. import string_types
class ne_NP_FactoryTestCase(unittest.TestCase):
def setUp(self):
self.factory = Factory.create('ne_NP')
def test_address(self):
from faker.providers.address.ne_NP import Provider
countries = Provider.countries
country = self.factory.country()
assert country
assert isinstance(country, string_types)
assert country in countries
districts = Provider.districts
district = self.factory.district()
assert district
assert isinstance(district, string_types)
assert district in districts
cities = Provider.cities
city = self.factory.city()
assert city
assert isinstance(city, string_types)
assert city in cities
def test_names(self):
from faker.providers.person.ne_NP import Provider
first_names = Provider.first_names
name = self.factory.name()
first_name, last_name = name.split()
assert first_name
assert isinstance(first_name, string_types)
assert first_name in first_names
last_names = Provider.last_names
assert last_names
assert isinstance(last_name, string_types)
assert last_name in last_names
<commit_msg>Fix incorrect ne_NP locale tests
This test incorrectly assumes a call to name() will
yield only a first/last name, which isn't always true for this
locale. I suspect it hasn't been uncovered yet because the
tests are seeded the same at the beginning of every run. It only
becomes a problem when you start moving tests around. This change
addresses the incorrect assertions as well as makes the file PEP8
compliant.<commit_after>
|
from __future__ import unicode_literals
import unittest
from faker import Factory
from .. import string_types
class NeNPFactoryTestCase(unittest.TestCase):
def setUp(self):
self.factory = Factory.create('ne_NP')
def test_address(self):
from faker.providers.address.ne_NP import Provider
country = self.factory.country()
assert isinstance(country, string_types)
assert country in Provider.countries
district = self.factory.district()
assert isinstance(district, string_types)
assert district in Provider.districts
city = self.factory.city()
assert isinstance(city, string_types)
assert city in Provider.cities
def test_names(self):
from faker.providers.person.ne_NP import Provider
for _ in range(10000):
name = self.factory.name().split()
assert all(isinstance(n, string_types) for n in name)
# name should always be 2-3 words. If 3, first word
# should be a prefix.
assert name[-2] in Provider.first_names
assert name[-1] in Provider.last_names
prefixes = Provider.prefixes_male + Provider.prefixes_female
if len(name) == 3:
assert name[0] in prefixes
|
from __future__ import unicode_literals
import unittest
import re
from faker import Factory
from faker.utils import text
from .. import string_types
class ne_NP_FactoryTestCase(unittest.TestCase):
def setUp(self):
self.factory = Factory.create('ne_NP')
def test_address(self):
from faker.providers.address.ne_NP import Provider
countries = Provider.countries
country = self.factory.country()
assert country
assert isinstance(country, string_types)
assert country in countries
districts = Provider.districts
district = self.factory.district()
assert district
assert isinstance(district, string_types)
assert district in districts
cities = Provider.cities
city = self.factory.city()
assert city
assert isinstance(city, string_types)
assert city in cities
def test_names(self):
from faker.providers.person.ne_NP import Provider
first_names = Provider.first_names
name = self.factory.name()
first_name, last_name = name.split()
assert first_name
assert isinstance(first_name, string_types)
assert first_name in first_names
last_names = Provider.last_names
assert last_names
assert isinstance(last_name, string_types)
assert last_name in last_names
Fix incorrect ne_NP locale tests
This test incorrectly assumes a call to name() will
yield only a first/last name, which isn't always true for this
locale. I suspect it hasn't been uncovered yet because the
tests are seeded the same at the beginning of every run. It only
becomes a problem when you start moving tests around. This change
addresses the incorrect assertions as well as makes the file PEP8
compliant.from __future__ import unicode_literals
import unittest
from faker import Factory
from .. import string_types
class NeNPFactoryTestCase(unittest.TestCase):
def setUp(self):
self.factory = Factory.create('ne_NP')
def test_address(self):
from faker.providers.address.ne_NP import Provider
country = self.factory.country()
assert isinstance(country, string_types)
assert country in Provider.countries
district = self.factory.district()
assert isinstance(district, string_types)
assert district in Provider.districts
city = self.factory.city()
assert isinstance(city, string_types)
assert city in Provider.cities
def test_names(self):
from faker.providers.person.ne_NP import Provider
for _ in range(10000):
name = self.factory.name().split()
assert all(isinstance(n, string_types) for n in name)
# name should always be 2-3 words. If 3, first word
# should be a prefix.
assert name[-2] in Provider.first_names
assert name[-1] in Provider.last_names
prefixes = Provider.prefixes_male + Provider.prefixes_female
if len(name) == 3:
assert name[0] in prefixes
|
<commit_before>from __future__ import unicode_literals
import unittest
import re
from faker import Factory
from faker.utils import text
from .. import string_types
class ne_NP_FactoryTestCase(unittest.TestCase):
def setUp(self):
self.factory = Factory.create('ne_NP')
def test_address(self):
from faker.providers.address.ne_NP import Provider
countries = Provider.countries
country = self.factory.country()
assert country
assert isinstance(country, string_types)
assert country in countries
districts = Provider.districts
district = self.factory.district()
assert district
assert isinstance(district, string_types)
assert district in districts
cities = Provider.cities
city = self.factory.city()
assert city
assert isinstance(city, string_types)
assert city in cities
def test_names(self):
from faker.providers.person.ne_NP import Provider
first_names = Provider.first_names
name = self.factory.name()
first_name, last_name = name.split()
assert first_name
assert isinstance(first_name, string_types)
assert first_name in first_names
last_names = Provider.last_names
assert last_names
assert isinstance(last_name, string_types)
assert last_name in last_names
<commit_msg>Fix incorrect ne_NP locale tests
This test incorrectly assumes a call to name() will
yield only a first/last name, which isn't always true for this
locale. I suspect it hasn't been uncovered yet because the
tests are seeded the same at the beginning of every run. It only
becomes a problem when you start moving tests around. This change
addresses the incorrect assertions as well as makes the file PEP8
compliant.<commit_after>from __future__ import unicode_literals
import unittest
from faker import Factory
from .. import string_types
class NeNPFactoryTestCase(unittest.TestCase):
def setUp(self):
self.factory = Factory.create('ne_NP')
def test_address(self):
from faker.providers.address.ne_NP import Provider
country = self.factory.country()
assert isinstance(country, string_types)
assert country in Provider.countries
district = self.factory.district()
assert isinstance(district, string_types)
assert district in Provider.districts
city = self.factory.city()
assert isinstance(city, string_types)
assert city in Provider.cities
def test_names(self):
from faker.providers.person.ne_NP import Provider
for _ in range(10000):
name = self.factory.name().split()
assert all(isinstance(n, string_types) for n in name)
# name should always be 2-3 words. If 3, first word
# should be a prefix.
assert name[-2] in Provider.first_names
assert name[-1] in Provider.last_names
prefixes = Provider.prefixes_male + Provider.prefixes_female
if len(name) == 3:
assert name[0] in prefixes
|
6b9d9c33b4d68a008bb992b9a11ab2f02a4d5cbd
|
shelltest/tests/test_runner.py
|
shelltest/tests/test_runner.py
|
import tempfile
import StringIO
import pytest
from shelltest.shelltest import ShellTest, ShellTestSource, ShellTestRunner
@pytest.fixture
def tests():
return [ShellTest('echo hello', 'hello\n', ShellTestSource('', 0)),
ShellTest('echo $?', '0\n', ShellTestSource('', 2))]
def test_run(tests):
r = ShellTestRunner(tests)
results = r.run()
assert len(results) == 2
assert results[0].success
assert results[0].ret_code == 0
assert results[0].test == tests[0]
assert results[0].actual_output == tests[0].expected_output
assert results[1].success
assert results[1].ret_code == 0
assert results[1].test == tests[1]
assert results[1].actual_output == tests[1].expected_output
|
import tempfile
import StringIO
import pytest
from shelltest.shelltest import ShellTest, ShellTestSource, ShellTestRunner
def runner(tests):
tests = [ShellTest(cmd, output, ShellTestSource('', 0)) for cmd, output in tests]
return ShellTestRunner(tests)
@pytest.mark.parametrize("cmd,output,ret_code,success", (
('echo hello', 'hello\n', 0, True),
('echo $?', '0\n', 0, True),
('exit 42', '', 42, True)))
def test_echo(cmd, output, ret_code, success):
r = runner([(cmd, output)])
res = r.run()[0]
assert res.success == success
assert res.ret_code == ret_code
assert res.test == r.tests[0]
assert res.actual_output == output
|
Update runner tests to use parameters
|
Update runner tests to use parameters
|
Python
|
mit
|
jthacker/shelltest,jthacker/shelltest
|
import tempfile
import StringIO
import pytest
from shelltest.shelltest import ShellTest, ShellTestSource, ShellTestRunner
@pytest.fixture
def tests():
return [ShellTest('echo hello', 'hello\n', ShellTestSource('', 0)),
ShellTest('echo $?', '0\n', ShellTestSource('', 2))]
def test_run(tests):
r = ShellTestRunner(tests)
results = r.run()
assert len(results) == 2
assert results[0].success
assert results[0].ret_code == 0
assert results[0].test == tests[0]
assert results[0].actual_output == tests[0].expected_output
assert results[1].success
assert results[1].ret_code == 0
assert results[1].test == tests[1]
assert results[1].actual_output == tests[1].expected_output
Update runner tests to use parameters
|
import tempfile
import StringIO
import pytest
from shelltest.shelltest import ShellTest, ShellTestSource, ShellTestRunner
def runner(tests):
tests = [ShellTest(cmd, output, ShellTestSource('', 0)) for cmd, output in tests]
return ShellTestRunner(tests)
@pytest.mark.parametrize("cmd,output,ret_code,success", (
('echo hello', 'hello\n', 0, True),
('echo $?', '0\n', 0, True),
('exit 42', '', 42, True)))
def test_echo(cmd, output, ret_code, success):
r = runner([(cmd, output)])
res = r.run()[0]
assert res.success == success
assert res.ret_code == ret_code
assert res.test == r.tests[0]
assert res.actual_output == output
|
<commit_before>import tempfile
import StringIO
import pytest
from shelltest.shelltest import ShellTest, ShellTestSource, ShellTestRunner
@pytest.fixture
def tests():
return [ShellTest('echo hello', 'hello\n', ShellTestSource('', 0)),
ShellTest('echo $?', '0\n', ShellTestSource('', 2))]
def test_run(tests):
r = ShellTestRunner(tests)
results = r.run()
assert len(results) == 2
assert results[0].success
assert results[0].ret_code == 0
assert results[0].test == tests[0]
assert results[0].actual_output == tests[0].expected_output
assert results[1].success
assert results[1].ret_code == 0
assert results[1].test == tests[1]
assert results[1].actual_output == tests[1].expected_output
<commit_msg>Update runner tests to use parameters<commit_after>
|
import tempfile
import StringIO
import pytest
from shelltest.shelltest import ShellTest, ShellTestSource, ShellTestRunner
def runner(tests):
tests = [ShellTest(cmd, output, ShellTestSource('', 0)) for cmd, output in tests]
return ShellTestRunner(tests)
@pytest.mark.parametrize("cmd,output,ret_code,success", (
('echo hello', 'hello\n', 0, True),
('echo $?', '0\n', 0, True),
('exit 42', '', 42, True)))
def test_echo(cmd, output, ret_code, success):
r = runner([(cmd, output)])
res = r.run()[0]
assert res.success == success
assert res.ret_code == ret_code
assert res.test == r.tests[0]
assert res.actual_output == output
|
import tempfile
import StringIO
import pytest
from shelltest.shelltest import ShellTest, ShellTestSource, ShellTestRunner
@pytest.fixture
def tests():
return [ShellTest('echo hello', 'hello\n', ShellTestSource('', 0)),
ShellTest('echo $?', '0\n', ShellTestSource('', 2))]
def test_run(tests):
r = ShellTestRunner(tests)
results = r.run()
assert len(results) == 2
assert results[0].success
assert results[0].ret_code == 0
assert results[0].test == tests[0]
assert results[0].actual_output == tests[0].expected_output
assert results[1].success
assert results[1].ret_code == 0
assert results[1].test == tests[1]
assert results[1].actual_output == tests[1].expected_output
Update runner tests to use parametersimport tempfile
import StringIO
import pytest
from shelltest.shelltest import ShellTest, ShellTestSource, ShellTestRunner
def runner(tests):
tests = [ShellTest(cmd, output, ShellTestSource('', 0)) for cmd, output in tests]
return ShellTestRunner(tests)
@pytest.mark.parametrize("cmd,output,ret_code,success", (
('echo hello', 'hello\n', 0, True),
('echo $?', '0\n', 0, True),
('exit 42', '', 42, True)))
def test_echo(cmd, output, ret_code, success):
r = runner([(cmd, output)])
res = r.run()[0]
assert res.success == success
assert res.ret_code == ret_code
assert res.test == r.tests[0]
assert res.actual_output == output
|
<commit_before>import tempfile
import StringIO
import pytest
from shelltest.shelltest import ShellTest, ShellTestSource, ShellTestRunner
@pytest.fixture
def tests():
return [ShellTest('echo hello', 'hello\n', ShellTestSource('', 0)),
ShellTest('echo $?', '0\n', ShellTestSource('', 2))]
def test_run(tests):
r = ShellTestRunner(tests)
results = r.run()
assert len(results) == 2
assert results[0].success
assert results[0].ret_code == 0
assert results[0].test == tests[0]
assert results[0].actual_output == tests[0].expected_output
assert results[1].success
assert results[1].ret_code == 0
assert results[1].test == tests[1]
assert results[1].actual_output == tests[1].expected_output
<commit_msg>Update runner tests to use parameters<commit_after>import tempfile
import StringIO
import pytest
from shelltest.shelltest import ShellTest, ShellTestSource, ShellTestRunner
def runner(tests):
tests = [ShellTest(cmd, output, ShellTestSource('', 0)) for cmd, output in tests]
return ShellTestRunner(tests)
@pytest.mark.parametrize("cmd,output,ret_code,success", (
('echo hello', 'hello\n', 0, True),
('echo $?', '0\n', 0, True),
('exit 42', '', 42, True)))
def test_echo(cmd, output, ret_code, success):
r = runner([(cmd, output)])
res = r.run()[0]
assert res.success == success
assert res.ret_code == ret_code
assert res.test == r.tests[0]
assert res.actual_output == output
|
82f8861df01d67335499682743f69b1763cc3c35
|
uberlogs/handlers/kill_process.py
|
uberlogs/handlers/kill_process.py
|
import sys
import os
from logging import Handler as LoggingHandler
class KillProcessHandler(LoggingHandler):
def emit(self, record):
if record.levelno != self.level:
return
try:
# flush text before exiting
for fd in [sys.stdout, sys.stderr]:
fd.flush()
finally:
# Twisted writes unhandled errors in different calls
# If we exit on the first call, we'd lose the actual error
for log_to_ignore in ["Unhandled error in Deferred"]:
if log_to_ignore.lower() in record.getMessage().lower():
return
os._exit(1)
|
import sys
import os
from logging import Handler as LoggingHandler
class KillProcessHandler(LoggingHandler):
def emit(self, record):
if record.levelno != self.level:
return
# flush text before exiting
for fd in [sys.stdout, sys.stderr]:
fd.flush()
# Twisted writes unhandled errors in different calls
# If we exit on the first call, we'd lose the actual error
for log_to_ignore in ["Unhandled error in Deferred"]:
if log_to_ignore.lower() in record.getMessage().lower():
return
os._exit(1)
|
Remove redundant try/catch block in kill process handler
|
Remove redundant try/catch block in kill process handler
|
Python
|
mit
|
odedlaz/uberlogs,odedlaz/uberlogs
|
import sys
import os
from logging import Handler as LoggingHandler
class KillProcessHandler(LoggingHandler):
def emit(self, record):
if record.levelno != self.level:
return
try:
# flush text before exiting
for fd in [sys.stdout, sys.stderr]:
fd.flush()
finally:
# Twisted writes unhandled errors in different calls
# If we exit on the first call, we'd lose the actual error
for log_to_ignore in ["Unhandled error in Deferred"]:
if log_to_ignore.lower() in record.getMessage().lower():
return
os._exit(1)
Remove redundant try/catch block in kill process handler
|
import sys
import os
from logging import Handler as LoggingHandler
class KillProcessHandler(LoggingHandler):
def emit(self, record):
if record.levelno != self.level:
return
# flush text before exiting
for fd in [sys.stdout, sys.stderr]:
fd.flush()
# Twisted writes unhandled errors in different calls
# If we exit on the first call, we'd lose the actual error
for log_to_ignore in ["Unhandled error in Deferred"]:
if log_to_ignore.lower() in record.getMessage().lower():
return
os._exit(1)
|
<commit_before>import sys
import os
from logging import Handler as LoggingHandler
class KillProcessHandler(LoggingHandler):
def emit(self, record):
if record.levelno != self.level:
return
try:
# flush text before exiting
for fd in [sys.stdout, sys.stderr]:
fd.flush()
finally:
# Twisted writes unhandled errors in different calls
# If we exit on the first call, we'd lose the actual error
for log_to_ignore in ["Unhandled error in Deferred"]:
if log_to_ignore.lower() in record.getMessage().lower():
return
os._exit(1)
<commit_msg>Remove redundant try/catch block in kill process handler<commit_after>
|
import sys
import os
from logging import Handler as LoggingHandler
class KillProcessHandler(LoggingHandler):
def emit(self, record):
if record.levelno != self.level:
return
# flush text before exiting
for fd in [sys.stdout, sys.stderr]:
fd.flush()
# Twisted writes unhandled errors in different calls
# If we exit on the first call, we'd lose the actual error
for log_to_ignore in ["Unhandled error in Deferred"]:
if log_to_ignore.lower() in record.getMessage().lower():
return
os._exit(1)
|
import sys
import os
from logging import Handler as LoggingHandler
class KillProcessHandler(LoggingHandler):
def emit(self, record):
if record.levelno != self.level:
return
try:
# flush text before exiting
for fd in [sys.stdout, sys.stderr]:
fd.flush()
finally:
# Twisted writes unhandled errors in different calls
# If we exit on the first call, we'd lose the actual error
for log_to_ignore in ["Unhandled error in Deferred"]:
if log_to_ignore.lower() in record.getMessage().lower():
return
os._exit(1)
Remove redundant try/catch block in kill process handlerimport sys
import os
from logging import Handler as LoggingHandler
class KillProcessHandler(LoggingHandler):
def emit(self, record):
if record.levelno != self.level:
return
# flush text before exiting
for fd in [sys.stdout, sys.stderr]:
fd.flush()
# Twisted writes unhandled errors in different calls
# If we exit on the first call, we'd lose the actual error
for log_to_ignore in ["Unhandled error in Deferred"]:
if log_to_ignore.lower() in record.getMessage().lower():
return
os._exit(1)
|
<commit_before>import sys
import os
from logging import Handler as LoggingHandler
class KillProcessHandler(LoggingHandler):
def emit(self, record):
if record.levelno != self.level:
return
try:
# flush text before exiting
for fd in [sys.stdout, sys.stderr]:
fd.flush()
finally:
# Twisted writes unhandled errors in different calls
# If we exit on the first call, we'd lose the actual error
for log_to_ignore in ["Unhandled error in Deferred"]:
if log_to_ignore.lower() in record.getMessage().lower():
return
os._exit(1)
<commit_msg>Remove redundant try/catch block in kill process handler<commit_after>import sys
import os
from logging import Handler as LoggingHandler
class KillProcessHandler(LoggingHandler):
def emit(self, record):
if record.levelno != self.level:
return
# flush text before exiting
for fd in [sys.stdout, sys.stderr]:
fd.flush()
# Twisted writes unhandled errors in different calls
# If we exit on the first call, we'd lose the actual error
for log_to_ignore in ["Unhandled error in Deferred"]:
if log_to_ignore.lower() in record.getMessage().lower():
return
os._exit(1)
|
6a531ebe5e097d277a7b07e142e98009d622253f
|
tests/registryd/test_root_accessible.py
|
tests/registryd/test_root_accessible.py
|
# Pytest will pick up this module automatically when running just "pytest".
#
# Each test_*() function gets passed test fixtures, which are defined
# in conftest.py. So, a function "def test_foo(bar)" will get a bar()
# fixture created for it.
PROPERTIES_IFACE = 'org.freedesktop.DBus.Properties'
ACCESSIBLE_IFACE = 'org.a11y.atspi.Accessible'
def get_property(proxy, iface_name, prop_name):
return proxy.Get(iface_name, prop_name, dbus_interface=PROPERTIES_IFACE)
def test_accessible_iface_properties(registry_root, session_manager):
values = [
('Name', 'main'),
('Description', ''),
]
for prop_name, expected in values:
assert get_property(registry_root, ACCESSIBLE_IFACE, prop_name) == expected
def test_registry_root_has_null_parent(registry_root, session_manager):
assert get_property(registry_root, ACCESSIBLE_IFACE, 'Parent') == ('', '/org/a11y/atspi/null')
def test_empty_registry_has_zero_children(registry_root, session_manager):
assert get_property(registry_root, ACCESSIBLE_IFACE, 'ChildCount') == 0
|
# Pytest will pick up this module automatically when running just "pytest".
#
# Each test_*() function gets passed test fixtures, which are defined
# in conftest.py. So, a function "def test_foo(bar)" will get a bar()
# fixture created for it.
PROPERTIES_IFACE = 'org.freedesktop.DBus.Properties'
ACCESSIBLE_IFACE = 'org.a11y.atspi.Accessible'
def get_property(proxy, iface_name, prop_name):
return proxy.Get(iface_name, prop_name, dbus_interface=PROPERTIES_IFACE)
def test_accessible_iface_properties(registry_root, session_manager):
values = [
('Name', 'main'),
('Description', ''),
('Parent', ('', '/org/a11y/atspi/null')),
('ChildCount', 0),
]
for prop_name, expected in values:
assert get_property(registry_root, ACCESSIBLE_IFACE, prop_name) == expected
|
Put all the Accessibility property tests in a single function
|
Put all the Accessibility property tests in a single function
We already had machinery for that, anyway.
|
Python
|
lgpl-2.1
|
GNOME/at-spi2-core,GNOME/at-spi2-core,GNOME/at-spi2-core
|
# Pytest will pick up this module automatically when running just "pytest".
#
# Each test_*() function gets passed test fixtures, which are defined
# in conftest.py. So, a function "def test_foo(bar)" will get a bar()
# fixture created for it.
PROPERTIES_IFACE = 'org.freedesktop.DBus.Properties'
ACCESSIBLE_IFACE = 'org.a11y.atspi.Accessible'
def get_property(proxy, iface_name, prop_name):
return proxy.Get(iface_name, prop_name, dbus_interface=PROPERTIES_IFACE)
def test_accessible_iface_properties(registry_root, session_manager):
values = [
('Name', 'main'),
('Description', ''),
]
for prop_name, expected in values:
assert get_property(registry_root, ACCESSIBLE_IFACE, prop_name) == expected
def test_registry_root_has_null_parent(registry_root, session_manager):
assert get_property(registry_root, ACCESSIBLE_IFACE, 'Parent') == ('', '/org/a11y/atspi/null')
def test_empty_registry_has_zero_children(registry_root, session_manager):
assert get_property(registry_root, ACCESSIBLE_IFACE, 'ChildCount') == 0
Put all the Accessibility property tests in a single function
We already had machinery for that, anyway.
|
# Pytest will pick up this module automatically when running just "pytest".
#
# Each test_*() function gets passed test fixtures, which are defined
# in conftest.py. So, a function "def test_foo(bar)" will get a bar()
# fixture created for it.
PROPERTIES_IFACE = 'org.freedesktop.DBus.Properties'
ACCESSIBLE_IFACE = 'org.a11y.atspi.Accessible'
def get_property(proxy, iface_name, prop_name):
return proxy.Get(iface_name, prop_name, dbus_interface=PROPERTIES_IFACE)
def test_accessible_iface_properties(registry_root, session_manager):
values = [
('Name', 'main'),
('Description', ''),
('Parent', ('', '/org/a11y/atspi/null')),
('ChildCount', 0),
]
for prop_name, expected in values:
assert get_property(registry_root, ACCESSIBLE_IFACE, prop_name) == expected
|
<commit_before># Pytest will pick up this module automatically when running just "pytest".
#
# Each test_*() function gets passed test fixtures, which are defined
# in conftest.py. So, a function "def test_foo(bar)" will get a bar()
# fixture created for it.
PROPERTIES_IFACE = 'org.freedesktop.DBus.Properties'
ACCESSIBLE_IFACE = 'org.a11y.atspi.Accessible'
def get_property(proxy, iface_name, prop_name):
return proxy.Get(iface_name, prop_name, dbus_interface=PROPERTIES_IFACE)
def test_accessible_iface_properties(registry_root, session_manager):
values = [
('Name', 'main'),
('Description', ''),
]
for prop_name, expected in values:
assert get_property(registry_root, ACCESSIBLE_IFACE, prop_name) == expected
def test_registry_root_has_null_parent(registry_root, session_manager):
assert get_property(registry_root, ACCESSIBLE_IFACE, 'Parent') == ('', '/org/a11y/atspi/null')
def test_empty_registry_has_zero_children(registry_root, session_manager):
assert get_property(registry_root, ACCESSIBLE_IFACE, 'ChildCount') == 0
<commit_msg>Put all the Accessibility property tests in a single function
We already had machinery for that, anyway.<commit_after>
|
# Pytest will pick up this module automatically when running just "pytest".
#
# Each test_*() function gets passed test fixtures, which are defined
# in conftest.py. So, a function "def test_foo(bar)" will get a bar()
# fixture created for it.
PROPERTIES_IFACE = 'org.freedesktop.DBus.Properties'
ACCESSIBLE_IFACE = 'org.a11y.atspi.Accessible'
def get_property(proxy, iface_name, prop_name):
return proxy.Get(iface_name, prop_name, dbus_interface=PROPERTIES_IFACE)
def test_accessible_iface_properties(registry_root, session_manager):
values = [
('Name', 'main'),
('Description', ''),
('Parent', ('', '/org/a11y/atspi/null')),
('ChildCount', 0),
]
for prop_name, expected in values:
assert get_property(registry_root, ACCESSIBLE_IFACE, prop_name) == expected
|
# Pytest will pick up this module automatically when running just "pytest".
#
# Each test_*() function gets passed test fixtures, which are defined
# in conftest.py. So, a function "def test_foo(bar)" will get a bar()
# fixture created for it.
PROPERTIES_IFACE = 'org.freedesktop.DBus.Properties'
ACCESSIBLE_IFACE = 'org.a11y.atspi.Accessible'
def get_property(proxy, iface_name, prop_name):
return proxy.Get(iface_name, prop_name, dbus_interface=PROPERTIES_IFACE)
def test_accessible_iface_properties(registry_root, session_manager):
values = [
('Name', 'main'),
('Description', ''),
]
for prop_name, expected in values:
assert get_property(registry_root, ACCESSIBLE_IFACE, prop_name) == expected
def test_registry_root_has_null_parent(registry_root, session_manager):
assert get_property(registry_root, ACCESSIBLE_IFACE, 'Parent') == ('', '/org/a11y/atspi/null')
def test_empty_registry_has_zero_children(registry_root, session_manager):
assert get_property(registry_root, ACCESSIBLE_IFACE, 'ChildCount') == 0
Put all the Accessibility property tests in a single function
We already had machinery for that, anyway.# Pytest will pick up this module automatically when running just "pytest".
#
# Each test_*() function gets passed test fixtures, which are defined
# in conftest.py. So, a function "def test_foo(bar)" will get a bar()
# fixture created for it.
PROPERTIES_IFACE = 'org.freedesktop.DBus.Properties'
ACCESSIBLE_IFACE = 'org.a11y.atspi.Accessible'
def get_property(proxy, iface_name, prop_name):
return proxy.Get(iface_name, prop_name, dbus_interface=PROPERTIES_IFACE)
def test_accessible_iface_properties(registry_root, session_manager):
values = [
('Name', 'main'),
('Description', ''),
('Parent', ('', '/org/a11y/atspi/null')),
('ChildCount', 0),
]
for prop_name, expected in values:
assert get_property(registry_root, ACCESSIBLE_IFACE, prop_name) == expected
|
<commit_before># Pytest will pick up this module automatically when running just "pytest".
#
# Each test_*() function gets passed test fixtures, which are defined
# in conftest.py. So, a function "def test_foo(bar)" will get a bar()
# fixture created for it.
PROPERTIES_IFACE = 'org.freedesktop.DBus.Properties'
ACCESSIBLE_IFACE = 'org.a11y.atspi.Accessible'
def get_property(proxy, iface_name, prop_name):
return proxy.Get(iface_name, prop_name, dbus_interface=PROPERTIES_IFACE)
def test_accessible_iface_properties(registry_root, session_manager):
values = [
('Name', 'main'),
('Description', ''),
]
for prop_name, expected in values:
assert get_property(registry_root, ACCESSIBLE_IFACE, prop_name) == expected
def test_registry_root_has_null_parent(registry_root, session_manager):
assert get_property(registry_root, ACCESSIBLE_IFACE, 'Parent') == ('', '/org/a11y/atspi/null')
def test_empty_registry_has_zero_children(registry_root, session_manager):
assert get_property(registry_root, ACCESSIBLE_IFACE, 'ChildCount') == 0
<commit_msg>Put all the Accessibility property tests in a single function
We already had machinery for that, anyway.<commit_after># Pytest will pick up this module automatically when running just "pytest".
#
# Each test_*() function gets passed test fixtures, which are defined
# in conftest.py. So, a function "def test_foo(bar)" will get a bar()
# fixture created for it.
PROPERTIES_IFACE = 'org.freedesktop.DBus.Properties'
ACCESSIBLE_IFACE = 'org.a11y.atspi.Accessible'
def get_property(proxy, iface_name, prop_name):
return proxy.Get(iface_name, prop_name, dbus_interface=PROPERTIES_IFACE)
def test_accessible_iface_properties(registry_root, session_manager):
values = [
('Name', 'main'),
('Description', ''),
('Parent', ('', '/org/a11y/atspi/null')),
('ChildCount', 0),
]
for prop_name, expected in values:
assert get_property(registry_root, ACCESSIBLE_IFACE, prop_name) == expected
|
b0e3ed09d401389680db14c6892e84f016423c97
|
simplesqlite/error.py
|
simplesqlite/error.py
|
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
import sqlite3
from typing import Optional
from tabledata import NameValidationError # noqa: W0611
class DatabaseError(sqlite3.DatabaseError):
"""
Exception raised for errors that are related to the database.
.. seealso::
- `sqlite3.DatabaseError <https://docs.python.org/3/library/sqlite3.html#sqlite3.DatabaseError>`__
"""
class NullDatabaseConnectionError(DatabaseError):
"""
Exception raised when executing an operation of
:py:class:`~simplesqlite.SimpleSQLite` instance without connection to
a SQLite database file.
"""
class TableNotFoundError(DatabaseError):
"""
Exception raised when accessed the table that not exists in the database.
"""
class AttributeNotFoundError(DatabaseError):
"""
Exception raised when accessed the attribute that not exists in the table.
"""
class SqlSyntaxError(Exception):
"""
Exception raised when a SQLite query syntax is invalid.
"""
class OperationalError(sqlite3.OperationalError):
"""
Exception raised when failed to execute a query.
"""
@property
def message(self) -> Optional[str]:
return self.__message
def __init__(self, *args, **kwargs) -> None:
self.__message = kwargs.pop("message", None)
super().__init__()
|
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
import sqlite3
from typing import Optional
from tabledata import NameValidationError # noqa: W0611
class DatabaseError(sqlite3.DatabaseError):
"""
Exception raised for errors that are related to the database.
.. seealso::
- `sqlite3.DatabaseError <https://docs.python.org/3/library/sqlite3.html#sqlite3.DatabaseError>`__
"""
class NullDatabaseConnectionError(DatabaseError):
"""
Exception raised when executing an operation of
:py:class:`~simplesqlite.SimpleSQLite` instance without connection to
a SQLite database file.
"""
class TableNotFoundError(DatabaseError):
"""
Exception raised when accessed the table that not exists in the database.
"""
class AttributeNotFoundError(DatabaseError):
"""
Exception raised when accessed the attribute that not exists in the table.
"""
class SqlSyntaxError(Exception):
"""
Exception raised when a SQLite query syntax is invalid.
"""
class OperationalError(sqlite3.OperationalError):
"""
Exception raised when failed to execute a query.
"""
@property
def message(self) -> Optional[str]:
return self.__message
def __init__(self, *args, **kwargs) -> None:
self.__message = kwargs.pop("message", None)
super().__init__(*args)
|
Modify to pass args to the base class constructor
|
Modify to pass args to the base class constructor
|
Python
|
mit
|
thombashi/SimpleSQLite,thombashi/SimpleSQLite
|
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
import sqlite3
from typing import Optional
from tabledata import NameValidationError # noqa: W0611
class DatabaseError(sqlite3.DatabaseError):
"""
Exception raised for errors that are related to the database.
.. seealso::
- `sqlite3.DatabaseError <https://docs.python.org/3/library/sqlite3.html#sqlite3.DatabaseError>`__
"""
class NullDatabaseConnectionError(DatabaseError):
"""
Exception raised when executing an operation of
:py:class:`~simplesqlite.SimpleSQLite` instance without connection to
a SQLite database file.
"""
class TableNotFoundError(DatabaseError):
"""
Exception raised when accessed the table that not exists in the database.
"""
class AttributeNotFoundError(DatabaseError):
"""
Exception raised when accessed the attribute that not exists in the table.
"""
class SqlSyntaxError(Exception):
"""
Exception raised when a SQLite query syntax is invalid.
"""
class OperationalError(sqlite3.OperationalError):
"""
Exception raised when failed to execute a query.
"""
@property
def message(self) -> Optional[str]:
return self.__message
def __init__(self, *args, **kwargs) -> None:
self.__message = kwargs.pop("message", None)
super().__init__()
Modify to pass args to the base class constructor
|
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
import sqlite3
from typing import Optional
from tabledata import NameValidationError # noqa: W0611
class DatabaseError(sqlite3.DatabaseError):
"""
Exception raised for errors that are related to the database.
.. seealso::
- `sqlite3.DatabaseError <https://docs.python.org/3/library/sqlite3.html#sqlite3.DatabaseError>`__
"""
class NullDatabaseConnectionError(DatabaseError):
"""
Exception raised when executing an operation of
:py:class:`~simplesqlite.SimpleSQLite` instance without connection to
a SQLite database file.
"""
class TableNotFoundError(DatabaseError):
"""
Exception raised when accessed the table that not exists in the database.
"""
class AttributeNotFoundError(DatabaseError):
"""
Exception raised when accessed the attribute that not exists in the table.
"""
class SqlSyntaxError(Exception):
"""
Exception raised when a SQLite query syntax is invalid.
"""
class OperationalError(sqlite3.OperationalError):
"""
Exception raised when failed to execute a query.
"""
@property
def message(self) -> Optional[str]:
return self.__message
def __init__(self, *args, **kwargs) -> None:
self.__message = kwargs.pop("message", None)
super().__init__(*args)
|
<commit_before>"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
import sqlite3
from typing import Optional
from tabledata import NameValidationError # noqa: W0611
class DatabaseError(sqlite3.DatabaseError):
"""
Exception raised for errors that are related to the database.
.. seealso::
- `sqlite3.DatabaseError <https://docs.python.org/3/library/sqlite3.html#sqlite3.DatabaseError>`__
"""
class NullDatabaseConnectionError(DatabaseError):
"""
Exception raised when executing an operation of
:py:class:`~simplesqlite.SimpleSQLite` instance without connection to
a SQLite database file.
"""
class TableNotFoundError(DatabaseError):
"""
Exception raised when accessed the table that not exists in the database.
"""
class AttributeNotFoundError(DatabaseError):
"""
Exception raised when accessed the attribute that not exists in the table.
"""
class SqlSyntaxError(Exception):
"""
Exception raised when a SQLite query syntax is invalid.
"""
class OperationalError(sqlite3.OperationalError):
"""
Exception raised when failed to execute a query.
"""
@property
def message(self) -> Optional[str]:
return self.__message
def __init__(self, *args, **kwargs) -> None:
self.__message = kwargs.pop("message", None)
super().__init__()
<commit_msg>Modify to pass args to the base class constructor<commit_after>
|
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
import sqlite3
from typing import Optional
from tabledata import NameValidationError # noqa: W0611
class DatabaseError(sqlite3.DatabaseError):
"""
Exception raised for errors that are related to the database.
.. seealso::
- `sqlite3.DatabaseError <https://docs.python.org/3/library/sqlite3.html#sqlite3.DatabaseError>`__
"""
class NullDatabaseConnectionError(DatabaseError):
"""
Exception raised when executing an operation of
:py:class:`~simplesqlite.SimpleSQLite` instance without connection to
a SQLite database file.
"""
class TableNotFoundError(DatabaseError):
"""
Exception raised when accessed the table that not exists in the database.
"""
class AttributeNotFoundError(DatabaseError):
"""
Exception raised when accessed the attribute that not exists in the table.
"""
class SqlSyntaxError(Exception):
"""
Exception raised when a SQLite query syntax is invalid.
"""
class OperationalError(sqlite3.OperationalError):
"""
Exception raised when failed to execute a query.
"""
@property
def message(self) -> Optional[str]:
return self.__message
def __init__(self, *args, **kwargs) -> None:
self.__message = kwargs.pop("message", None)
super().__init__(*args)
|
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
import sqlite3
from typing import Optional
from tabledata import NameValidationError # noqa: W0611
class DatabaseError(sqlite3.DatabaseError):
"""
Exception raised for errors that are related to the database.
.. seealso::
- `sqlite3.DatabaseError <https://docs.python.org/3/library/sqlite3.html#sqlite3.DatabaseError>`__
"""
class NullDatabaseConnectionError(DatabaseError):
"""
Exception raised when executing an operation of
:py:class:`~simplesqlite.SimpleSQLite` instance without connection to
a SQLite database file.
"""
class TableNotFoundError(DatabaseError):
"""
Exception raised when accessed the table that not exists in the database.
"""
class AttributeNotFoundError(DatabaseError):
"""
Exception raised when accessed the attribute that not exists in the table.
"""
class SqlSyntaxError(Exception):
"""
Exception raised when a SQLite query syntax is invalid.
"""
class OperationalError(sqlite3.OperationalError):
"""
Exception raised when failed to execute a query.
"""
@property
def message(self) -> Optional[str]:
return self.__message
def __init__(self, *args, **kwargs) -> None:
self.__message = kwargs.pop("message", None)
super().__init__()
Modify to pass args to the base class constructor"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
import sqlite3
from typing import Optional
from tabledata import NameValidationError # noqa: W0611
class DatabaseError(sqlite3.DatabaseError):
"""
Exception raised for errors that are related to the database.
.. seealso::
- `sqlite3.DatabaseError <https://docs.python.org/3/library/sqlite3.html#sqlite3.DatabaseError>`__
"""
class NullDatabaseConnectionError(DatabaseError):
"""
Exception raised when executing an operation of
:py:class:`~simplesqlite.SimpleSQLite` instance without connection to
a SQLite database file.
"""
class TableNotFoundError(DatabaseError):
"""
Exception raised when accessed the table that not exists in the database.
"""
class AttributeNotFoundError(DatabaseError):
"""
Exception raised when accessed the attribute that not exists in the table.
"""
class SqlSyntaxError(Exception):
"""
Exception raised when a SQLite query syntax is invalid.
"""
class OperationalError(sqlite3.OperationalError):
"""
Exception raised when failed to execute a query.
"""
@property
def message(self) -> Optional[str]:
return self.__message
def __init__(self, *args, **kwargs) -> None:
self.__message = kwargs.pop("message", None)
super().__init__(*args)
|
<commit_before>"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
import sqlite3
from typing import Optional
from tabledata import NameValidationError # noqa: W0611
class DatabaseError(sqlite3.DatabaseError):
"""
Exception raised for errors that are related to the database.
.. seealso::
- `sqlite3.DatabaseError <https://docs.python.org/3/library/sqlite3.html#sqlite3.DatabaseError>`__
"""
class NullDatabaseConnectionError(DatabaseError):
"""
Exception raised when executing an operation of
:py:class:`~simplesqlite.SimpleSQLite` instance without connection to
a SQLite database file.
"""
class TableNotFoundError(DatabaseError):
"""
Exception raised when accessed the table that not exists in the database.
"""
class AttributeNotFoundError(DatabaseError):
"""
Exception raised when accessed the attribute that not exists in the table.
"""
class SqlSyntaxError(Exception):
"""
Exception raised when a SQLite query syntax is invalid.
"""
class OperationalError(sqlite3.OperationalError):
"""
Exception raised when failed to execute a query.
"""
@property
def message(self) -> Optional[str]:
return self.__message
def __init__(self, *args, **kwargs) -> None:
self.__message = kwargs.pop("message", None)
super().__init__()
<commit_msg>Modify to pass args to the base class constructor<commit_after>"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
import sqlite3
from typing import Optional
from tabledata import NameValidationError # noqa: W0611
class DatabaseError(sqlite3.DatabaseError):
"""
Exception raised for errors that are related to the database.
.. seealso::
- `sqlite3.DatabaseError <https://docs.python.org/3/library/sqlite3.html#sqlite3.DatabaseError>`__
"""
class NullDatabaseConnectionError(DatabaseError):
"""
Exception raised when executing an operation of
:py:class:`~simplesqlite.SimpleSQLite` instance without connection to
a SQLite database file.
"""
class TableNotFoundError(DatabaseError):
"""
Exception raised when accessed the table that not exists in the database.
"""
class AttributeNotFoundError(DatabaseError):
"""
Exception raised when accessed the attribute that not exists in the table.
"""
class SqlSyntaxError(Exception):
"""
Exception raised when a SQLite query syntax is invalid.
"""
class OperationalError(sqlite3.OperationalError):
"""
Exception raised when failed to execute a query.
"""
@property
def message(self) -> Optional[str]:
return self.__message
def __init__(self, *args, **kwargs) -> None:
self.__message = kwargs.pop("message", None)
super().__init__(*args)
|
0d0b470e470ee913cb8983f932323921d405607b
|
refabric/context_managers.py
|
refabric/context_managers.py
|
# coding=utf-8
from contextlib import contextmanager
from fabric.context_managers import settings, hide, prefix
from fabric.state import env
__all__ = ['get_sudo_context', 'sudo', 'only_messages', 'prefix']
@contextmanager
def sudo(user=None):
with settings(sudo_user=user or env.sudo_user or env.user, use_sudo=True):
yield
silent = lambda: settings(hide('commands'), warn_only=True)
hide_prefix = lambda: settings(output_prefix=False)
abort_on_error = lambda: settings(warn_only=False)
@contextmanager
def shell_env(**env_vars):
orig_shell = env['shell']
env_vars_str = ' '.join('{0}={1}'.format(key, value)
for key, value in env_vars.items())
env['shell'] = '{0} {1}'.format(env_vars_str, orig_shell)
yield
env['shell'] = orig_shell
|
# coding=utf-8
from contextlib import contextmanager
from fabric.context_managers import settings, hide, prefix
from fabric.state import env
__all__ = ['get_sudo_context', 'sudo', 'only_messages', 'prefix']
@contextmanager
def sudo(user=None):
with settings(sudo_user=user or env.sudo_user or env.user, use_sudo=True):
yield
silent = lambda *h: settings(hide('commands', *h), warn_only=True)
hide_prefix = lambda: settings(output_prefix=False)
abort_on_error = lambda: settings(warn_only=False)
@contextmanager
def shell_env(**env_vars):
orig_shell = env['shell']
env_vars_str = ' '.join('{0}={1}'.format(key, value)
for key, value in env_vars.items())
env['shell'] = '{0} {1}'.format(env_vars_str, orig_shell)
yield
env['shell'] = orig_shell
|
Add fine tuning to silent helper
|
Add fine tuning to silent helper
|
Python
|
mit
|
5monkeys/refabric
|
# coding=utf-8
from contextlib import contextmanager
from fabric.context_managers import settings, hide, prefix
from fabric.state import env
__all__ = ['get_sudo_context', 'sudo', 'only_messages', 'prefix']
@contextmanager
def sudo(user=None):
with settings(sudo_user=user or env.sudo_user or env.user, use_sudo=True):
yield
silent = lambda: settings(hide('commands'), warn_only=True)
hide_prefix = lambda: settings(output_prefix=False)
abort_on_error = lambda: settings(warn_only=False)
@contextmanager
def shell_env(**env_vars):
orig_shell = env['shell']
env_vars_str = ' '.join('{0}={1}'.format(key, value)
for key, value in env_vars.items())
env['shell'] = '{0} {1}'.format(env_vars_str, orig_shell)
yield
env['shell'] = orig_shell
Add fine tuning to silent helper
|
# coding=utf-8
from contextlib import contextmanager
from fabric.context_managers import settings, hide, prefix
from fabric.state import env
__all__ = ['get_sudo_context', 'sudo', 'only_messages', 'prefix']
@contextmanager
def sudo(user=None):
with settings(sudo_user=user or env.sudo_user or env.user, use_sudo=True):
yield
silent = lambda *h: settings(hide('commands', *h), warn_only=True)
hide_prefix = lambda: settings(output_prefix=False)
abort_on_error = lambda: settings(warn_only=False)
@contextmanager
def shell_env(**env_vars):
orig_shell = env['shell']
env_vars_str = ' '.join('{0}={1}'.format(key, value)
for key, value in env_vars.items())
env['shell'] = '{0} {1}'.format(env_vars_str, orig_shell)
yield
env['shell'] = orig_shell
|
<commit_before># coding=utf-8
from contextlib import contextmanager
from fabric.context_managers import settings, hide, prefix
from fabric.state import env
__all__ = ['get_sudo_context', 'sudo', 'only_messages', 'prefix']
@contextmanager
def sudo(user=None):
with settings(sudo_user=user or env.sudo_user or env.user, use_sudo=True):
yield
silent = lambda: settings(hide('commands'), warn_only=True)
hide_prefix = lambda: settings(output_prefix=False)
abort_on_error = lambda: settings(warn_only=False)
@contextmanager
def shell_env(**env_vars):
orig_shell = env['shell']
env_vars_str = ' '.join('{0}={1}'.format(key, value)
for key, value in env_vars.items())
env['shell'] = '{0} {1}'.format(env_vars_str, orig_shell)
yield
env['shell'] = orig_shell
<commit_msg>Add fine tuning to silent helper<commit_after>
|
# coding=utf-8
from contextlib import contextmanager
from fabric.context_managers import settings, hide, prefix
from fabric.state import env
__all__ = ['get_sudo_context', 'sudo', 'only_messages', 'prefix']
@contextmanager
def sudo(user=None):
with settings(sudo_user=user or env.sudo_user or env.user, use_sudo=True):
yield
silent = lambda *h: settings(hide('commands', *h), warn_only=True)
hide_prefix = lambda: settings(output_prefix=False)
abort_on_error = lambda: settings(warn_only=False)
@contextmanager
def shell_env(**env_vars):
orig_shell = env['shell']
env_vars_str = ' '.join('{0}={1}'.format(key, value)
for key, value in env_vars.items())
env['shell'] = '{0} {1}'.format(env_vars_str, orig_shell)
yield
env['shell'] = orig_shell
|
# coding=utf-8
from contextlib import contextmanager
from fabric.context_managers import settings, hide, prefix
from fabric.state import env
__all__ = ['get_sudo_context', 'sudo', 'only_messages', 'prefix']
@contextmanager
def sudo(user=None):
with settings(sudo_user=user or env.sudo_user or env.user, use_sudo=True):
yield
silent = lambda: settings(hide('commands'), warn_only=True)
hide_prefix = lambda: settings(output_prefix=False)
abort_on_error = lambda: settings(warn_only=False)
@contextmanager
def shell_env(**env_vars):
orig_shell = env['shell']
env_vars_str = ' '.join('{0}={1}'.format(key, value)
for key, value in env_vars.items())
env['shell'] = '{0} {1}'.format(env_vars_str, orig_shell)
yield
env['shell'] = orig_shell
Add fine tuning to silent helper# coding=utf-8
from contextlib import contextmanager
from fabric.context_managers import settings, hide, prefix
from fabric.state import env
__all__ = ['get_sudo_context', 'sudo', 'only_messages', 'prefix']
@contextmanager
def sudo(user=None):
with settings(sudo_user=user or env.sudo_user or env.user, use_sudo=True):
yield
silent = lambda *h: settings(hide('commands', *h), warn_only=True)
hide_prefix = lambda: settings(output_prefix=False)
abort_on_error = lambda: settings(warn_only=False)
@contextmanager
def shell_env(**env_vars):
orig_shell = env['shell']
env_vars_str = ' '.join('{0}={1}'.format(key, value)
for key, value in env_vars.items())
env['shell'] = '{0} {1}'.format(env_vars_str, orig_shell)
yield
env['shell'] = orig_shell
|
<commit_before># coding=utf-8
from contextlib import contextmanager
from fabric.context_managers import settings, hide, prefix
from fabric.state import env
__all__ = ['get_sudo_context', 'sudo', 'only_messages', 'prefix']
@contextmanager
def sudo(user=None):
with settings(sudo_user=user or env.sudo_user or env.user, use_sudo=True):
yield
silent = lambda: settings(hide('commands'), warn_only=True)
hide_prefix = lambda: settings(output_prefix=False)
abort_on_error = lambda: settings(warn_only=False)
@contextmanager
def shell_env(**env_vars):
orig_shell = env['shell']
env_vars_str = ' '.join('{0}={1}'.format(key, value)
for key, value in env_vars.items())
env['shell'] = '{0} {1}'.format(env_vars_str, orig_shell)
yield
env['shell'] = orig_shell
<commit_msg>Add fine tuning to silent helper<commit_after># coding=utf-8
from contextlib import contextmanager
from fabric.context_managers import settings, hide, prefix
from fabric.state import env
__all__ = ['get_sudo_context', 'sudo', 'only_messages', 'prefix']
@contextmanager
def sudo(user=None):
with settings(sudo_user=user or env.sudo_user or env.user, use_sudo=True):
yield
silent = lambda *h: settings(hide('commands', *h), warn_only=True)
hide_prefix = lambda: settings(output_prefix=False)
abort_on_error = lambda: settings(warn_only=False)
@contextmanager
def shell_env(**env_vars):
orig_shell = env['shell']
env_vars_str = ' '.join('{0}={1}'.format(key, value)
for key, value in env_vars.items())
env['shell'] = '{0} {1}'.format(env_vars_str, orig_shell)
yield
env['shell'] = orig_shell
|
4aeb85126cf5f75d89cc466c3f7fea2f53702a13
|
bluebottle/votes/serializers.py
|
bluebottle/votes/serializers.py
|
from bluebottle.votes.models import Vote
from bluebottle.bb_accounts.serializers import UserPreviewSerializer
from rest_framework import serializers
class VoteSerializer(serializers.ModelSerializer):
voter = UserPreviewSerializer(read_only=True)
project = serializers.SlugRelatedField(source='project', slug_field='slug')
class Meta:
model = Vote
fields = ('id', 'voter', 'project')
|
from bluebottle.votes.models import Vote
from bluebottle.bb_accounts.serializers import UserPreviewSerializer
from rest_framework import serializers
class VoteSerializer(serializers.ModelSerializer):
voter = UserPreviewSerializer(read_only=True)
project = serializers.SlugRelatedField(source='project', slug_field='slug')
class Meta:
model = Vote
fields = ('id', 'voter', 'project', 'created')
|
Add created to votes api serializer
|
Add created to votes api serializer
|
Python
|
bsd-3-clause
|
onepercentclub/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle,jfterpstra/bluebottle,onepercentclub/bluebottle
|
from bluebottle.votes.models import Vote
from bluebottle.bb_accounts.serializers import UserPreviewSerializer
from rest_framework import serializers
class VoteSerializer(serializers.ModelSerializer):
voter = UserPreviewSerializer(read_only=True)
project = serializers.SlugRelatedField(source='project', slug_field='slug')
class Meta:
model = Vote
fields = ('id', 'voter', 'project')
Add created to votes api serializer
|
from bluebottle.votes.models import Vote
from bluebottle.bb_accounts.serializers import UserPreviewSerializer
from rest_framework import serializers
class VoteSerializer(serializers.ModelSerializer):
voter = UserPreviewSerializer(read_only=True)
project = serializers.SlugRelatedField(source='project', slug_field='slug')
class Meta:
model = Vote
fields = ('id', 'voter', 'project', 'created')
|
<commit_before>from bluebottle.votes.models import Vote
from bluebottle.bb_accounts.serializers import UserPreviewSerializer
from rest_framework import serializers
class VoteSerializer(serializers.ModelSerializer):
voter = UserPreviewSerializer(read_only=True)
project = serializers.SlugRelatedField(source='project', slug_field='slug')
class Meta:
model = Vote
fields = ('id', 'voter', 'project')
<commit_msg>Add created to votes api serializer<commit_after>
|
from bluebottle.votes.models import Vote
from bluebottle.bb_accounts.serializers import UserPreviewSerializer
from rest_framework import serializers
class VoteSerializer(serializers.ModelSerializer):
voter = UserPreviewSerializer(read_only=True)
project = serializers.SlugRelatedField(source='project', slug_field='slug')
class Meta:
model = Vote
fields = ('id', 'voter', 'project', 'created')
|
from bluebottle.votes.models import Vote
from bluebottle.bb_accounts.serializers import UserPreviewSerializer
from rest_framework import serializers
class VoteSerializer(serializers.ModelSerializer):
voter = UserPreviewSerializer(read_only=True)
project = serializers.SlugRelatedField(source='project', slug_field='slug')
class Meta:
model = Vote
fields = ('id', 'voter', 'project')
Add created to votes api serializerfrom bluebottle.votes.models import Vote
from bluebottle.bb_accounts.serializers import UserPreviewSerializer
from rest_framework import serializers
class VoteSerializer(serializers.ModelSerializer):
voter = UserPreviewSerializer(read_only=True)
project = serializers.SlugRelatedField(source='project', slug_field='slug')
class Meta:
model = Vote
fields = ('id', 'voter', 'project', 'created')
|
<commit_before>from bluebottle.votes.models import Vote
from bluebottle.bb_accounts.serializers import UserPreviewSerializer
from rest_framework import serializers
class VoteSerializer(serializers.ModelSerializer):
voter = UserPreviewSerializer(read_only=True)
project = serializers.SlugRelatedField(source='project', slug_field='slug')
class Meta:
model = Vote
fields = ('id', 'voter', 'project')
<commit_msg>Add created to votes api serializer<commit_after>from bluebottle.votes.models import Vote
from bluebottle.bb_accounts.serializers import UserPreviewSerializer
from rest_framework import serializers
class VoteSerializer(serializers.ModelSerializer):
voter = UserPreviewSerializer(read_only=True)
project = serializers.SlugRelatedField(source='project', slug_field='slug')
class Meta:
model = Vote
fields = ('id', 'voter', 'project', 'created')
|
8425efaf60b642418786c523d142a370dae3c3a0
|
quilt_server/config.py
|
quilt_server/config.py
|
# Copyright (c) 2017 Quilt Data, Inc. All rights reserved.
"""
Default config values the Flask app.
Shared between dev, stage, and production.
See `app.config.from_object('...')` in __init__.py.
"""
SQLALCHEMY_TRACK_MODIFICATIONS = False
SQLALCHEMY_ECHO = False # Turn it on for debugging.
PACKAGE_URL_EXPIRATION = 60*60*12 # 12 Hours
JSON_USE_ENCODE_METHODS = True # Support the __json__ method in Node
# 100MB max for request body.
MAX_CONTENT_LENGTH = 100 * 1024 * 1024
|
# Copyright (c) 2017 Quilt Data, Inc. All rights reserved.
"""
Default config values the Flask app.
Shared between dev, stage, and production.
See `app.config.from_object('...')` in __init__.py.
"""
SQLALCHEMY_TRACK_MODIFICATIONS = False
SQLALCHEMY_ECHO = False # Turn it on for debugging.
PACKAGE_URL_EXPIRATION = 60*60*24 # 24 hours
JSON_USE_ENCODE_METHODS = True # Support the __json__ method in Node
# 100MB max for request body.
MAX_CONTENT_LENGTH = 100 * 1024 * 1024
|
Change the S3 URL expiration time from 12 to 24 hours
|
Change the S3 URL expiration time from 12 to 24 hours
|
Python
|
apache-2.0
|
quiltdata/quilt-compiler,quiltdata/quilt-compiler,quiltdata/quilt,quiltdata/quilt-compiler,quiltdata/quilt-compiler,quiltdata/quilt,quiltdata/quilt,quiltdata/quilt,quiltdata/quilt
|
# Copyright (c) 2017 Quilt Data, Inc. All rights reserved.
"""
Default config values the Flask app.
Shared between dev, stage, and production.
See `app.config.from_object('...')` in __init__.py.
"""
SQLALCHEMY_TRACK_MODIFICATIONS = False
SQLALCHEMY_ECHO = False # Turn it on for debugging.
PACKAGE_URL_EXPIRATION = 60*60*12 # 12 Hours
JSON_USE_ENCODE_METHODS = True # Support the __json__ method in Node
# 100MB max for request body.
MAX_CONTENT_LENGTH = 100 * 1024 * 1024
Change the S3 URL expiration time from 12 to 24 hours
|
# Copyright (c) 2017 Quilt Data, Inc. All rights reserved.
"""
Default config values the Flask app.
Shared between dev, stage, and production.
See `app.config.from_object('...')` in __init__.py.
"""
SQLALCHEMY_TRACK_MODIFICATIONS = False
SQLALCHEMY_ECHO = False # Turn it on for debugging.
PACKAGE_URL_EXPIRATION = 60*60*24 # 24 hours
JSON_USE_ENCODE_METHODS = True # Support the __json__ method in Node
# 100MB max for request body.
MAX_CONTENT_LENGTH = 100 * 1024 * 1024
|
<commit_before># Copyright (c) 2017 Quilt Data, Inc. All rights reserved.
"""
Default config values the Flask app.
Shared between dev, stage, and production.
See `app.config.from_object('...')` in __init__.py.
"""
SQLALCHEMY_TRACK_MODIFICATIONS = False
SQLALCHEMY_ECHO = False # Turn it on for debugging.
PACKAGE_URL_EXPIRATION = 60*60*12 # 12 Hours
JSON_USE_ENCODE_METHODS = True # Support the __json__ method in Node
# 100MB max for request body.
MAX_CONTENT_LENGTH = 100 * 1024 * 1024
<commit_msg>Change the S3 URL expiration time from 12 to 24 hours<commit_after>
|
# Copyright (c) 2017 Quilt Data, Inc. All rights reserved.
"""
Default config values the Flask app.
Shared between dev, stage, and production.
See `app.config.from_object('...')` in __init__.py.
"""
SQLALCHEMY_TRACK_MODIFICATIONS = False
SQLALCHEMY_ECHO = False # Turn it on for debugging.
PACKAGE_URL_EXPIRATION = 60*60*24 # 24 hours
JSON_USE_ENCODE_METHODS = True # Support the __json__ method in Node
# 100MB max for request body.
MAX_CONTENT_LENGTH = 100 * 1024 * 1024
|
# Copyright (c) 2017 Quilt Data, Inc. All rights reserved.
"""
Default config values the Flask app.
Shared between dev, stage, and production.
See `app.config.from_object('...')` in __init__.py.
"""
SQLALCHEMY_TRACK_MODIFICATIONS = False
SQLALCHEMY_ECHO = False # Turn it on for debugging.
PACKAGE_URL_EXPIRATION = 60*60*12 # 12 Hours
JSON_USE_ENCODE_METHODS = True # Support the __json__ method in Node
# 100MB max for request body.
MAX_CONTENT_LENGTH = 100 * 1024 * 1024
Change the S3 URL expiration time from 12 to 24 hours# Copyright (c) 2017 Quilt Data, Inc. All rights reserved.
"""
Default config values the Flask app.
Shared between dev, stage, and production.
See `app.config.from_object('...')` in __init__.py.
"""
SQLALCHEMY_TRACK_MODIFICATIONS = False
SQLALCHEMY_ECHO = False # Turn it on for debugging.
PACKAGE_URL_EXPIRATION = 60*60*24 # 24 hours
JSON_USE_ENCODE_METHODS = True # Support the __json__ method in Node
# 100MB max for request body.
MAX_CONTENT_LENGTH = 100 * 1024 * 1024
|
<commit_before># Copyright (c) 2017 Quilt Data, Inc. All rights reserved.
"""
Default config values the Flask app.
Shared between dev, stage, and production.
See `app.config.from_object('...')` in __init__.py.
"""
SQLALCHEMY_TRACK_MODIFICATIONS = False
SQLALCHEMY_ECHO = False # Turn it on for debugging.
PACKAGE_URL_EXPIRATION = 60*60*12 # 12 Hours
JSON_USE_ENCODE_METHODS = True # Support the __json__ method in Node
# 100MB max for request body.
MAX_CONTENT_LENGTH = 100 * 1024 * 1024
<commit_msg>Change the S3 URL expiration time from 12 to 24 hours<commit_after># Copyright (c) 2017 Quilt Data, Inc. All rights reserved.
"""
Default config values the Flask app.
Shared between dev, stage, and production.
See `app.config.from_object('...')` in __init__.py.
"""
SQLALCHEMY_TRACK_MODIFICATIONS = False
SQLALCHEMY_ECHO = False # Turn it on for debugging.
PACKAGE_URL_EXPIRATION = 60*60*24 # 24 hours
JSON_USE_ENCODE_METHODS = True # Support the __json__ method in Node
# 100MB max for request body.
MAX_CONTENT_LENGTH = 100 * 1024 * 1024
|
83036bf711dd5047ef87a56ea9d8def604923882
|
ts3observer/features.py
|
ts3observer/features.py
|
'''
Created on Nov 10, 2014
@author: fechnert
'''
import logging
class Feature(object):
''' Represents a abstract Feature '''
def __init__(self, config, clients, channels):
''' Initialize the Object '''
self.config = config
self.clients = clients
self.channels = channels
def run(self):
raise NotImplementedError
class UsernameBlacklist(Feature):
pass
class AutoMove(Feature):
pass
class MusicbotDetect(Feature):
pass
|
'''
Created on Nov 10, 2014
@author: fechnert
'''
import logging
class Feature(object):
''' Represents a abstract Feature '''
def __init__(self, config, clients, channels):
''' Initialize the Object '''
self.config = config
self.clients = clients
self.channels = channels
def run(self):
raise NotImplementedError
class UsernameBlacklist(Feature):
pass
class OnAway(Feature):
pass
class OnIdle(Feature):
pass
class OnMute(Feature):
pass
class OnDeaf(Feature):
pass
|
Change Feature classes to match the new config
|
Change Feature classes to match the new config
|
Python
|
mit
|
HWDexperte/ts3observer
|
'''
Created on Nov 10, 2014
@author: fechnert
'''
import logging
class Feature(object):
''' Represents a abstract Feature '''
def __init__(self, config, clients, channels):
''' Initialize the Object '''
self.config = config
self.clients = clients
self.channels = channels
def run(self):
raise NotImplementedError
class UsernameBlacklist(Feature):
pass
class AutoMove(Feature):
pass
class MusicbotDetect(Feature):
pass
Change Feature classes to match the new config
|
'''
Created on Nov 10, 2014
@author: fechnert
'''
import logging
class Feature(object):
''' Represents a abstract Feature '''
def __init__(self, config, clients, channels):
''' Initialize the Object '''
self.config = config
self.clients = clients
self.channels = channels
def run(self):
raise NotImplementedError
class UsernameBlacklist(Feature):
pass
class OnAway(Feature):
pass
class OnIdle(Feature):
pass
class OnMute(Feature):
pass
class OnDeaf(Feature):
pass
|
<commit_before>'''
Created on Nov 10, 2014
@author: fechnert
'''
import logging
class Feature(object):
''' Represents a abstract Feature '''
def __init__(self, config, clients, channels):
''' Initialize the Object '''
self.config = config
self.clients = clients
self.channels = channels
def run(self):
raise NotImplementedError
class UsernameBlacklist(Feature):
pass
class AutoMove(Feature):
pass
class MusicbotDetect(Feature):
pass
<commit_msg>Change Feature classes to match the new config<commit_after>
|
'''
Created on Nov 10, 2014
@author: fechnert
'''
import logging
class Feature(object):
''' Represents a abstract Feature '''
def __init__(self, config, clients, channels):
''' Initialize the Object '''
self.config = config
self.clients = clients
self.channels = channels
def run(self):
raise NotImplementedError
class UsernameBlacklist(Feature):
pass
class OnAway(Feature):
pass
class OnIdle(Feature):
pass
class OnMute(Feature):
pass
class OnDeaf(Feature):
pass
|
'''
Created on Nov 10, 2014
@author: fechnert
'''
import logging
class Feature(object):
''' Represents a abstract Feature '''
def __init__(self, config, clients, channels):
''' Initialize the Object '''
self.config = config
self.clients = clients
self.channels = channels
def run(self):
raise NotImplementedError
class UsernameBlacklist(Feature):
pass
class AutoMove(Feature):
pass
class MusicbotDetect(Feature):
pass
Change Feature classes to match the new config'''
Created on Nov 10, 2014
@author: fechnert
'''
import logging
class Feature(object):
''' Represents a abstract Feature '''
def __init__(self, config, clients, channels):
''' Initialize the Object '''
self.config = config
self.clients = clients
self.channels = channels
def run(self):
raise NotImplementedError
class UsernameBlacklist(Feature):
pass
class OnAway(Feature):
pass
class OnIdle(Feature):
pass
class OnMute(Feature):
pass
class OnDeaf(Feature):
pass
|
<commit_before>'''
Created on Nov 10, 2014
@author: fechnert
'''
import logging
class Feature(object):
''' Represents a abstract Feature '''
def __init__(self, config, clients, channels):
''' Initialize the Object '''
self.config = config
self.clients = clients
self.channels = channels
def run(self):
raise NotImplementedError
class UsernameBlacklist(Feature):
pass
class AutoMove(Feature):
pass
class MusicbotDetect(Feature):
pass
<commit_msg>Change Feature classes to match the new config<commit_after>'''
Created on Nov 10, 2014
@author: fechnert
'''
import logging
class Feature(object):
''' Represents a abstract Feature '''
def __init__(self, config, clients, channels):
''' Initialize the Object '''
self.config = config
self.clients = clients
self.channels = channels
def run(self):
raise NotImplementedError
class UsernameBlacklist(Feature):
pass
class OnAway(Feature):
pass
class OnIdle(Feature):
pass
class OnMute(Feature):
pass
class OnDeaf(Feature):
pass
|
a991d3d66d4a021eb88ad1ff982686f3b930d468
|
sqlobject/dberrors.py
|
sqlobject/dberrors.py
|
"""dberrors: database exception classes for SQLObject.
These classes are dictated by the DB API v2.0:
http://www.python.org/topics/database/DatabaseAPI-2.0.html
"""
import sys
if sys.version_info[0] >= 3:
StandardError = Exception
class Error(StandardError):
pass
class Warning(StandardError):
pass
class InterfaceError(Error):
pass
class DatabaseError(Error):
pass
class InternalError(DatabaseError):
pass
class OperationalError(DatabaseError):
pass
class ProgrammingError(DatabaseError):
pass
class IntegrityError(DatabaseError):
pass
class DataError(DatabaseError):
pass
class NotSupportedError(DatabaseError):
pass
class DuplicateEntryError(IntegrityError):
pass
|
"""dberrors: database exception classes for SQLObject.
These classes are dictated by the DB API v2.0:
http://www.python.org/topics/database/DatabaseAPI-2.0.html
"""
import sys
if sys.version_info[0] >= 3:
StandardError = Exception
class Error(StandardError):
pass
class Warning(StandardError):
pass
class InterfaceError(Error):
pass
class DatabaseError(Error):
pass
class InternalError(DatabaseError):
pass
class OperationalError(DatabaseError):
pass
class ProgrammingError(DatabaseError):
pass
class IntegrityError(DatabaseError):
pass
class DataError(DatabaseError):
pass
class NotSupportedError(DatabaseError):
pass
class DuplicateEntryError(IntegrityError):
pass
|
Fix flake8 E302 expected 2 blank lines, found 1
|
Fix flake8 E302 expected 2 blank lines, found 1
|
Python
|
lgpl-2.1
|
sqlobject/sqlobject,drnlm/sqlobject,sqlobject/sqlobject,drnlm/sqlobject
|
"""dberrors: database exception classes for SQLObject.
These classes are dictated by the DB API v2.0:
http://www.python.org/topics/database/DatabaseAPI-2.0.html
"""
import sys
if sys.version_info[0] >= 3:
StandardError = Exception
class Error(StandardError):
pass
class Warning(StandardError):
pass
class InterfaceError(Error):
pass
class DatabaseError(Error):
pass
class InternalError(DatabaseError):
pass
class OperationalError(DatabaseError):
pass
class ProgrammingError(DatabaseError):
pass
class IntegrityError(DatabaseError):
pass
class DataError(DatabaseError):
pass
class NotSupportedError(DatabaseError):
pass
class DuplicateEntryError(IntegrityError):
pass
Fix flake8 E302 expected 2 blank lines, found 1
|
"""dberrors: database exception classes for SQLObject.
These classes are dictated by the DB API v2.0:
http://www.python.org/topics/database/DatabaseAPI-2.0.html
"""
import sys
if sys.version_info[0] >= 3:
StandardError = Exception
class Error(StandardError):
pass
class Warning(StandardError):
pass
class InterfaceError(Error):
pass
class DatabaseError(Error):
pass
class InternalError(DatabaseError):
pass
class OperationalError(DatabaseError):
pass
class ProgrammingError(DatabaseError):
pass
class IntegrityError(DatabaseError):
pass
class DataError(DatabaseError):
pass
class NotSupportedError(DatabaseError):
pass
class DuplicateEntryError(IntegrityError):
pass
|
<commit_before>"""dberrors: database exception classes for SQLObject.
These classes are dictated by the DB API v2.0:
http://www.python.org/topics/database/DatabaseAPI-2.0.html
"""
import sys
if sys.version_info[0] >= 3:
StandardError = Exception
class Error(StandardError):
pass
class Warning(StandardError):
pass
class InterfaceError(Error):
pass
class DatabaseError(Error):
pass
class InternalError(DatabaseError):
pass
class OperationalError(DatabaseError):
pass
class ProgrammingError(DatabaseError):
pass
class IntegrityError(DatabaseError):
pass
class DataError(DatabaseError):
pass
class NotSupportedError(DatabaseError):
pass
class DuplicateEntryError(IntegrityError):
pass
<commit_msg>Fix flake8 E302 expected 2 blank lines, found 1<commit_after>
|
"""dberrors: database exception classes for SQLObject.
These classes are dictated by the DB API v2.0:
http://www.python.org/topics/database/DatabaseAPI-2.0.html
"""
import sys
if sys.version_info[0] >= 3:
StandardError = Exception
class Error(StandardError):
pass
class Warning(StandardError):
pass
class InterfaceError(Error):
pass
class DatabaseError(Error):
pass
class InternalError(DatabaseError):
pass
class OperationalError(DatabaseError):
pass
class ProgrammingError(DatabaseError):
pass
class IntegrityError(DatabaseError):
pass
class DataError(DatabaseError):
pass
class NotSupportedError(DatabaseError):
pass
class DuplicateEntryError(IntegrityError):
pass
|
"""dberrors: database exception classes for SQLObject.
These classes are dictated by the DB API v2.0:
http://www.python.org/topics/database/DatabaseAPI-2.0.html
"""
import sys
if sys.version_info[0] >= 3:
StandardError = Exception
class Error(StandardError):
pass
class Warning(StandardError):
pass
class InterfaceError(Error):
pass
class DatabaseError(Error):
pass
class InternalError(DatabaseError):
pass
class OperationalError(DatabaseError):
pass
class ProgrammingError(DatabaseError):
pass
class IntegrityError(DatabaseError):
pass
class DataError(DatabaseError):
pass
class NotSupportedError(DatabaseError):
pass
class DuplicateEntryError(IntegrityError):
pass
Fix flake8 E302 expected 2 blank lines, found 1"""dberrors: database exception classes for SQLObject.
These classes are dictated by the DB API v2.0:
http://www.python.org/topics/database/DatabaseAPI-2.0.html
"""
import sys
if sys.version_info[0] >= 3:
StandardError = Exception
class Error(StandardError):
pass
class Warning(StandardError):
pass
class InterfaceError(Error):
pass
class DatabaseError(Error):
pass
class InternalError(DatabaseError):
pass
class OperationalError(DatabaseError):
pass
class ProgrammingError(DatabaseError):
pass
class IntegrityError(DatabaseError):
pass
class DataError(DatabaseError):
pass
class NotSupportedError(DatabaseError):
pass
class DuplicateEntryError(IntegrityError):
pass
|
<commit_before>"""dberrors: database exception classes for SQLObject.
These classes are dictated by the DB API v2.0:
http://www.python.org/topics/database/DatabaseAPI-2.0.html
"""
import sys
if sys.version_info[0] >= 3:
StandardError = Exception
class Error(StandardError):
pass
class Warning(StandardError):
pass
class InterfaceError(Error):
pass
class DatabaseError(Error):
pass
class InternalError(DatabaseError):
pass
class OperationalError(DatabaseError):
pass
class ProgrammingError(DatabaseError):
pass
class IntegrityError(DatabaseError):
pass
class DataError(DatabaseError):
pass
class NotSupportedError(DatabaseError):
pass
class DuplicateEntryError(IntegrityError):
pass
<commit_msg>Fix flake8 E302 expected 2 blank lines, found 1<commit_after>"""dberrors: database exception classes for SQLObject.
These classes are dictated by the DB API v2.0:
http://www.python.org/topics/database/DatabaseAPI-2.0.html
"""
import sys
if sys.version_info[0] >= 3:
StandardError = Exception
class Error(StandardError):
pass
class Warning(StandardError):
pass
class InterfaceError(Error):
pass
class DatabaseError(Error):
pass
class InternalError(DatabaseError):
pass
class OperationalError(DatabaseError):
pass
class ProgrammingError(DatabaseError):
pass
class IntegrityError(DatabaseError):
pass
class DataError(DatabaseError):
pass
class NotSupportedError(DatabaseError):
pass
class DuplicateEntryError(IntegrityError):
pass
|
aa1bbbe1d4b463be8cedaaf445fa44612592513f
|
minette/test/helper.py
|
minette/test/helper.py
|
from time import time
from ..core import Minette
from ..models import Message
class MinetteForTest(Minette):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.default_channel = kwargs.get("default_channel", "")
self.case_id = str(int(time() * 10000000))
def chat(self, request, **kwargs):
self.logger.info("start testcase: " + self.case_id)
# convert to Message
if isinstance(request, str):
request = Message(text=request, **kwargs)
# set channel and channel_user_id for this test case
if request.channel == "console":
request.channel = self.default_channel or request.channel
if request.channel_user_id == "anonymous":
request.channel_user_id = "user" + self.case_id
# chat and return response
response = super().chat(request)
self.logger.info("end testcase: " + self.case_id)
return response
|
from time import time
from ..core import Minette
from ..models import Message
class MinetteForTest(Minette):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.default_channel = kwargs.get("default_channel", "")
self.case_id = str(int(time() * 10000000))
def chat(self, request, **kwargs):
self.logger.info("start testcase: " + self.case_id)
# convert to Message
if isinstance(request, str):
request = Message(text=request, **kwargs)
# set channel and channel_user_id for this test case
if request.channel == "console":
request.channel = self.default_channel or request.channel
if request.channel_user_id == "anonymous":
request.channel_user_id = "user" + self.case_id
# chat and return response
response = super().chat(request)
if response.messages:
response.text = response.messages[0].text
else:
response.text = ""
self.logger.info("end testcase: " + self.case_id)
return response
|
Add `text` attribute to response from `chat`
|
Add `text` attribute to response from `chat`
|
Python
|
apache-2.0
|
uezo/minette-python
|
from time import time
from ..core import Minette
from ..models import Message
class MinetteForTest(Minette):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.default_channel = kwargs.get("default_channel", "")
self.case_id = str(int(time() * 10000000))
def chat(self, request, **kwargs):
self.logger.info("start testcase: " + self.case_id)
# convert to Message
if isinstance(request, str):
request = Message(text=request, **kwargs)
# set channel and channel_user_id for this test case
if request.channel == "console":
request.channel = self.default_channel or request.channel
if request.channel_user_id == "anonymous":
request.channel_user_id = "user" + self.case_id
# chat and return response
response = super().chat(request)
self.logger.info("end testcase: " + self.case_id)
return response
Add `text` attribute to response from `chat`
|
from time import time
from ..core import Minette
from ..models import Message
class MinetteForTest(Minette):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.default_channel = kwargs.get("default_channel", "")
self.case_id = str(int(time() * 10000000))
def chat(self, request, **kwargs):
self.logger.info("start testcase: " + self.case_id)
# convert to Message
if isinstance(request, str):
request = Message(text=request, **kwargs)
# set channel and channel_user_id for this test case
if request.channel == "console":
request.channel = self.default_channel or request.channel
if request.channel_user_id == "anonymous":
request.channel_user_id = "user" + self.case_id
# chat and return response
response = super().chat(request)
if response.messages:
response.text = response.messages[0].text
else:
response.text = ""
self.logger.info("end testcase: " + self.case_id)
return response
|
<commit_before>from time import time
from ..core import Minette
from ..models import Message
class MinetteForTest(Minette):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.default_channel = kwargs.get("default_channel", "")
self.case_id = str(int(time() * 10000000))
def chat(self, request, **kwargs):
self.logger.info("start testcase: " + self.case_id)
# convert to Message
if isinstance(request, str):
request = Message(text=request, **kwargs)
# set channel and channel_user_id for this test case
if request.channel == "console":
request.channel = self.default_channel or request.channel
if request.channel_user_id == "anonymous":
request.channel_user_id = "user" + self.case_id
# chat and return response
response = super().chat(request)
self.logger.info("end testcase: " + self.case_id)
return response
<commit_msg>Add `text` attribute to response from `chat`<commit_after>
|
from time import time
from ..core import Minette
from ..models import Message
class MinetteForTest(Minette):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.default_channel = kwargs.get("default_channel", "")
self.case_id = str(int(time() * 10000000))
def chat(self, request, **kwargs):
self.logger.info("start testcase: " + self.case_id)
# convert to Message
if isinstance(request, str):
request = Message(text=request, **kwargs)
# set channel and channel_user_id for this test case
if request.channel == "console":
request.channel = self.default_channel or request.channel
if request.channel_user_id == "anonymous":
request.channel_user_id = "user" + self.case_id
# chat and return response
response = super().chat(request)
if response.messages:
response.text = response.messages[0].text
else:
response.text = ""
self.logger.info("end testcase: " + self.case_id)
return response
|
from time import time
from ..core import Minette
from ..models import Message
class MinetteForTest(Minette):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.default_channel = kwargs.get("default_channel", "")
self.case_id = str(int(time() * 10000000))
def chat(self, request, **kwargs):
self.logger.info("start testcase: " + self.case_id)
# convert to Message
if isinstance(request, str):
request = Message(text=request, **kwargs)
# set channel and channel_user_id for this test case
if request.channel == "console":
request.channel = self.default_channel or request.channel
if request.channel_user_id == "anonymous":
request.channel_user_id = "user" + self.case_id
# chat and return response
response = super().chat(request)
self.logger.info("end testcase: " + self.case_id)
return response
Add `text` attribute to response from `chat`from time import time
from ..core import Minette
from ..models import Message
class MinetteForTest(Minette):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.default_channel = kwargs.get("default_channel", "")
self.case_id = str(int(time() * 10000000))
def chat(self, request, **kwargs):
self.logger.info("start testcase: " + self.case_id)
# convert to Message
if isinstance(request, str):
request = Message(text=request, **kwargs)
# set channel and channel_user_id for this test case
if request.channel == "console":
request.channel = self.default_channel or request.channel
if request.channel_user_id == "anonymous":
request.channel_user_id = "user" + self.case_id
# chat and return response
response = super().chat(request)
if response.messages:
response.text = response.messages[0].text
else:
response.text = ""
self.logger.info("end testcase: " + self.case_id)
return response
|
<commit_before>from time import time
from ..core import Minette
from ..models import Message
class MinetteForTest(Minette):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.default_channel = kwargs.get("default_channel", "")
self.case_id = str(int(time() * 10000000))
def chat(self, request, **kwargs):
self.logger.info("start testcase: " + self.case_id)
# convert to Message
if isinstance(request, str):
request = Message(text=request, **kwargs)
# set channel and channel_user_id for this test case
if request.channel == "console":
request.channel = self.default_channel or request.channel
if request.channel_user_id == "anonymous":
request.channel_user_id = "user" + self.case_id
# chat and return response
response = super().chat(request)
self.logger.info("end testcase: " + self.case_id)
return response
<commit_msg>Add `text` attribute to response from `chat`<commit_after>from time import time
from ..core import Minette
from ..models import Message
class MinetteForTest(Minette):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.default_channel = kwargs.get("default_channel", "")
self.case_id = str(int(time() * 10000000))
def chat(self, request, **kwargs):
self.logger.info("start testcase: " + self.case_id)
# convert to Message
if isinstance(request, str):
request = Message(text=request, **kwargs)
# set channel and channel_user_id for this test case
if request.channel == "console":
request.channel = self.default_channel or request.channel
if request.channel_user_id == "anonymous":
request.channel_user_id = "user" + self.case_id
# chat and return response
response = super().chat(request)
if response.messages:
response.text = response.messages[0].text
else:
response.text = ""
self.logger.info("end testcase: " + self.case_id)
return response
|
1d043a9fa2140992435bc5d6583601464d96f5b0
|
wafer/schedule/renderers.py
|
wafer/schedule/renderers.py
|
from django_medusa.renderers import StaticSiteRenderer
class ScheduleRenderer(StaticSiteRenderer):
def get_paths(self):
paths = ["/schedule/", ]
return paths
renderers = [ScheduleRenderer, ]
|
from django_medusa.renderers import StaticSiteRenderer
from wafer.schedule.models import Venue
class ScheduleRenderer(StaticSiteRenderer):
def get_paths(self):
paths = ["/schedule/", ]
# Add the venues
items = Venue.objects.all()
for item in items:
paths.append(item.get_absolute_url())
return paths
renderers = [ScheduleRenderer, ]
|
Add venues to site export
|
Add venues to site export
|
Python
|
isc
|
CarlFK/wafer,CarlFK/wafer,CTPUG/wafer,CTPUG/wafer,CTPUG/wafer,CTPUG/wafer,CarlFK/wafer,CarlFK/wafer
|
from django_medusa.renderers import StaticSiteRenderer
class ScheduleRenderer(StaticSiteRenderer):
def get_paths(self):
paths = ["/schedule/", ]
return paths
renderers = [ScheduleRenderer, ]
Add venues to site export
|
from django_medusa.renderers import StaticSiteRenderer
from wafer.schedule.models import Venue
class ScheduleRenderer(StaticSiteRenderer):
def get_paths(self):
paths = ["/schedule/", ]
# Add the venues
items = Venue.objects.all()
for item in items:
paths.append(item.get_absolute_url())
return paths
renderers = [ScheduleRenderer, ]
|
<commit_before>from django_medusa.renderers import StaticSiteRenderer
class ScheduleRenderer(StaticSiteRenderer):
def get_paths(self):
paths = ["/schedule/", ]
return paths
renderers = [ScheduleRenderer, ]
<commit_msg>Add venues to site export<commit_after>
|
from django_medusa.renderers import StaticSiteRenderer
from wafer.schedule.models import Venue
class ScheduleRenderer(StaticSiteRenderer):
def get_paths(self):
paths = ["/schedule/", ]
# Add the venues
items = Venue.objects.all()
for item in items:
paths.append(item.get_absolute_url())
return paths
renderers = [ScheduleRenderer, ]
|
from django_medusa.renderers import StaticSiteRenderer
class ScheduleRenderer(StaticSiteRenderer):
def get_paths(self):
paths = ["/schedule/", ]
return paths
renderers = [ScheduleRenderer, ]
Add venues to site exportfrom django_medusa.renderers import StaticSiteRenderer
from wafer.schedule.models import Venue
class ScheduleRenderer(StaticSiteRenderer):
def get_paths(self):
paths = ["/schedule/", ]
# Add the venues
items = Venue.objects.all()
for item in items:
paths.append(item.get_absolute_url())
return paths
renderers = [ScheduleRenderer, ]
|
<commit_before>from django_medusa.renderers import StaticSiteRenderer
class ScheduleRenderer(StaticSiteRenderer):
def get_paths(self):
paths = ["/schedule/", ]
return paths
renderers = [ScheduleRenderer, ]
<commit_msg>Add venues to site export<commit_after>from django_medusa.renderers import StaticSiteRenderer
from wafer.schedule.models import Venue
class ScheduleRenderer(StaticSiteRenderer):
def get_paths(self):
paths = ["/schedule/", ]
# Add the venues
items = Venue.objects.all()
for item in items:
paths.append(item.get_absolute_url())
return paths
renderers = [ScheduleRenderer, ]
|
93474c192516864b2c609f2225a0f6c1fa8ca9a8
|
Cauldron/ext/commandkeywords/__init__.py
|
Cauldron/ext/commandkeywords/__init__.py
|
# -*- coding: utf-8 -*-
"""
An extension for a command-based keyword.
"""
from __future__ import absolute_import
from Cauldron.types import Boolean, DispatcherKeywordType
from Cauldron.exc import NoWriteNecessary
class CommandKeyword(Boolean, DispatcherKeywordType):
"""This keyword will receive boolean writes as 1, and will always be set to 0.
Actions can then be performed in callbacks, etc., every time this keyword is triggered.
"""
KTL_REGISTERED = False
KTL_TYPE = 'boolean'
def __init__(self, *args, **kwargs):
kwargs['initial'] = '0'
super(CommandKeyword, self).__init__(*args, **kwargs)
def prewrite(self, value):
"""Before writing, trigger no-write-necssary if value is False"""
if self.translate(value) == '0':
raise NoWriteNecessary("No write needed, command not triggered.")
return super(CommandKeyword, self).prewrite(value)
def postwrite(self, value):
"""Special postwrite that always sets the value to '0'."""
self.set('0', force=True)
# We don't have to do anything else here.
|
# -*- coding: utf-8 -*-
"""
An extension for a command-based keyword.
"""
from __future__ import absolute_import
from Cauldron.types import Boolean, DispatcherKeywordType
from Cauldron.exc import NoWriteNecessary
from Cauldron.utils.callbacks import Callbacks
class CommandKeyword(Boolean, DispatcherKeywordType):
"""This keyword will receive boolean writes as 1, and will always be set to 0.
Actions can then be performed in callbacks, etc., every time this keyword is triggered.
"""
KTL_REGISTERED = False
KTL_TYPE = 'boolean'
def __init__(self, *args, **kwargs):
kwargs['initial'] = '0'
super(CommandKeyword, self).__init__(*args, **kwargs)
self._cbs = Callbacks()
def command(self, func):
"""Add command items."""
self._cbs.add(func)
def prewrite(self, value):
"""Before writing, trigger no-write-necssary if value is False"""
if self.translate(value) == '0':
raise NoWriteNecessary("No write needed, command not triggered.")
return super(CommandKeyword, self).prewrite(value)
def write(self, value):
"""Write to the commands."""
if str(value) == '1':
self._cbs(self)
def postwrite(self, value):
"""Special postwrite that always sets the value to '0'."""
self.set('0', force=True)
# We don't have to do anything else here.
|
Make command-keyword compatible with DFW implementation
|
Make command-keyword compatible with DFW implementation
|
Python
|
bsd-3-clause
|
alexrudy/Cauldron
|
# -*- coding: utf-8 -*-
"""
An extension for a command-based keyword.
"""
from __future__ import absolute_import
from Cauldron.types import Boolean, DispatcherKeywordType
from Cauldron.exc import NoWriteNecessary
class CommandKeyword(Boolean, DispatcherKeywordType):
"""This keyword will receive boolean writes as 1, and will always be set to 0.
Actions can then be performed in callbacks, etc., every time this keyword is triggered.
"""
KTL_REGISTERED = False
KTL_TYPE = 'boolean'
def __init__(self, *args, **kwargs):
kwargs['initial'] = '0'
super(CommandKeyword, self).__init__(*args, **kwargs)
def prewrite(self, value):
"""Before writing, trigger no-write-necssary if value is False"""
if self.translate(value) == '0':
raise NoWriteNecessary("No write needed, command not triggered.")
return super(CommandKeyword, self).prewrite(value)
def postwrite(self, value):
"""Special postwrite that always sets the value to '0'."""
self.set('0', force=True)
# We don't have to do anything else here.
Make command-keyword compatible with DFW implementation
|
# -*- coding: utf-8 -*-
"""
An extension for a command-based keyword.
"""
from __future__ import absolute_import
from Cauldron.types import Boolean, DispatcherKeywordType
from Cauldron.exc import NoWriteNecessary
from Cauldron.utils.callbacks import Callbacks
class CommandKeyword(Boolean, DispatcherKeywordType):
"""This keyword will receive boolean writes as 1, and will always be set to 0.
Actions can then be performed in callbacks, etc., every time this keyword is triggered.
"""
KTL_REGISTERED = False
KTL_TYPE = 'boolean'
def __init__(self, *args, **kwargs):
kwargs['initial'] = '0'
super(CommandKeyword, self).__init__(*args, **kwargs)
self._cbs = Callbacks()
def command(self, func):
"""Add command items."""
self._cbs.add(func)
def prewrite(self, value):
"""Before writing, trigger no-write-necssary if value is False"""
if self.translate(value) == '0':
raise NoWriteNecessary("No write needed, command not triggered.")
return super(CommandKeyword, self).prewrite(value)
def write(self, value):
"""Write to the commands."""
if str(value) == '1':
self._cbs(self)
def postwrite(self, value):
"""Special postwrite that always sets the value to '0'."""
self.set('0', force=True)
# We don't have to do anything else here.
|
<commit_before># -*- coding: utf-8 -*-
"""
An extension for a command-based keyword.
"""
from __future__ import absolute_import
from Cauldron.types import Boolean, DispatcherKeywordType
from Cauldron.exc import NoWriteNecessary
class CommandKeyword(Boolean, DispatcherKeywordType):
"""This keyword will receive boolean writes as 1, and will always be set to 0.
Actions can then be performed in callbacks, etc., every time this keyword is triggered.
"""
KTL_REGISTERED = False
KTL_TYPE = 'boolean'
def __init__(self, *args, **kwargs):
kwargs['initial'] = '0'
super(CommandKeyword, self).__init__(*args, **kwargs)
def prewrite(self, value):
"""Before writing, trigger no-write-necssary if value is False"""
if self.translate(value) == '0':
raise NoWriteNecessary("No write needed, command not triggered.")
return super(CommandKeyword, self).prewrite(value)
def postwrite(self, value):
"""Special postwrite that always sets the value to '0'."""
self.set('0', force=True)
# We don't have to do anything else here.
<commit_msg>Make command-keyword compatible with DFW implementation<commit_after>
|
# -*- coding: utf-8 -*-
"""
An extension for a command-based keyword.
"""
from __future__ import absolute_import
from Cauldron.types import Boolean, DispatcherKeywordType
from Cauldron.exc import NoWriteNecessary
from Cauldron.utils.callbacks import Callbacks
class CommandKeyword(Boolean, DispatcherKeywordType):
"""This keyword will receive boolean writes as 1, and will always be set to 0.
Actions can then be performed in callbacks, etc., every time this keyword is triggered.
"""
KTL_REGISTERED = False
KTL_TYPE = 'boolean'
def __init__(self, *args, **kwargs):
kwargs['initial'] = '0'
super(CommandKeyword, self).__init__(*args, **kwargs)
self._cbs = Callbacks()
def command(self, func):
"""Add command items."""
self._cbs.add(func)
def prewrite(self, value):
"""Before writing, trigger no-write-necssary if value is False"""
if self.translate(value) == '0':
raise NoWriteNecessary("No write needed, command not triggered.")
return super(CommandKeyword, self).prewrite(value)
def write(self, value):
"""Write to the commands."""
if str(value) == '1':
self._cbs(self)
def postwrite(self, value):
"""Special postwrite that always sets the value to '0'."""
self.set('0', force=True)
# We don't have to do anything else here.
|
# -*- coding: utf-8 -*-
"""
An extension for a command-based keyword.
"""
from __future__ import absolute_import
from Cauldron.types import Boolean, DispatcherKeywordType
from Cauldron.exc import NoWriteNecessary
class CommandKeyword(Boolean, DispatcherKeywordType):
"""This keyword will receive boolean writes as 1, and will always be set to 0.
Actions can then be performed in callbacks, etc., every time this keyword is triggered.
"""
KTL_REGISTERED = False
KTL_TYPE = 'boolean'
def __init__(self, *args, **kwargs):
kwargs['initial'] = '0'
super(CommandKeyword, self).__init__(*args, **kwargs)
def prewrite(self, value):
"""Before writing, trigger no-write-necssary if value is False"""
if self.translate(value) == '0':
raise NoWriteNecessary("No write needed, command not triggered.")
return super(CommandKeyword, self).prewrite(value)
def postwrite(self, value):
"""Special postwrite that always sets the value to '0'."""
self.set('0', force=True)
# We don't have to do anything else here.
Make command-keyword compatible with DFW implementation# -*- coding: utf-8 -*-
"""
An extension for a command-based keyword.
"""
from __future__ import absolute_import
from Cauldron.types import Boolean, DispatcherKeywordType
from Cauldron.exc import NoWriteNecessary
from Cauldron.utils.callbacks import Callbacks
class CommandKeyword(Boolean, DispatcherKeywordType):
"""This keyword will receive boolean writes as 1, and will always be set to 0.
Actions can then be performed in callbacks, etc., every time this keyword is triggered.
"""
KTL_REGISTERED = False
KTL_TYPE = 'boolean'
def __init__(self, *args, **kwargs):
kwargs['initial'] = '0'
super(CommandKeyword, self).__init__(*args, **kwargs)
self._cbs = Callbacks()
def command(self, func):
"""Add command items."""
self._cbs.add(func)
def prewrite(self, value):
"""Before writing, trigger no-write-necssary if value is False"""
if self.translate(value) == '0':
raise NoWriteNecessary("No write needed, command not triggered.")
return super(CommandKeyword, self).prewrite(value)
def write(self, value):
"""Write to the commands."""
if str(value) == '1':
self._cbs(self)
def postwrite(self, value):
"""Special postwrite that always sets the value to '0'."""
self.set('0', force=True)
# We don't have to do anything else here.
|
<commit_before># -*- coding: utf-8 -*-
"""
An extension for a command-based keyword.
"""
from __future__ import absolute_import
from Cauldron.types import Boolean, DispatcherKeywordType
from Cauldron.exc import NoWriteNecessary
class CommandKeyword(Boolean, DispatcherKeywordType):
"""This keyword will receive boolean writes as 1, and will always be set to 0.
Actions can then be performed in callbacks, etc., every time this keyword is triggered.
"""
KTL_REGISTERED = False
KTL_TYPE = 'boolean'
def __init__(self, *args, **kwargs):
kwargs['initial'] = '0'
super(CommandKeyword, self).__init__(*args, **kwargs)
def prewrite(self, value):
"""Before writing, trigger no-write-necssary if value is False"""
if self.translate(value) == '0':
raise NoWriteNecessary("No write needed, command not triggered.")
return super(CommandKeyword, self).prewrite(value)
def postwrite(self, value):
"""Special postwrite that always sets the value to '0'."""
self.set('0', force=True)
# We don't have to do anything else here.
<commit_msg>Make command-keyword compatible with DFW implementation<commit_after># -*- coding: utf-8 -*-
"""
An extension for a command-based keyword.
"""
from __future__ import absolute_import
from Cauldron.types import Boolean, DispatcherKeywordType
from Cauldron.exc import NoWriteNecessary
from Cauldron.utils.callbacks import Callbacks
class CommandKeyword(Boolean, DispatcherKeywordType):
"""This keyword will receive boolean writes as 1, and will always be set to 0.
Actions can then be performed in callbacks, etc., every time this keyword is triggered.
"""
KTL_REGISTERED = False
KTL_TYPE = 'boolean'
def __init__(self, *args, **kwargs):
kwargs['initial'] = '0'
super(CommandKeyword, self).__init__(*args, **kwargs)
self._cbs = Callbacks()
def command(self, func):
"""Add command items."""
self._cbs.add(func)
def prewrite(self, value):
"""Before writing, trigger no-write-necssary if value is False"""
if self.translate(value) == '0':
raise NoWriteNecessary("No write needed, command not triggered.")
return super(CommandKeyword, self).prewrite(value)
def write(self, value):
"""Write to the commands."""
if str(value) == '1':
self._cbs(self)
def postwrite(self, value):
"""Special postwrite that always sets the value to '0'."""
self.set('0', force=True)
# We don't have to do anything else here.
|
6cf2a3966e12af5f86781a5d20c0810953722811
|
tests/basics/scope.py
|
tests/basics/scope.py
|
# test scoping rules
# explicit global variable
a = 1
def f():
global a
global a, a # should be able to redefine as global
a = 2
f()
print(a)
# explicit nonlocal variable
def f():
a = 1
def g():
nonlocal a
nonlocal a, a # should be able to redefine as nonlocal
a = 2
g()
return a
print(f())
|
# test scoping rules
# explicit global variable
a = 1
def f():
global a
global a, a # should be able to redefine as global
a = 2
f()
print(a)
# explicit nonlocal variable
def f():
a = 1
def g():
nonlocal a
nonlocal a, a # should be able to redefine as nonlocal
a = 2
g()
return a
print(f())
# nonlocal at inner-inner level (h)
def f():
x = 1
def g():
def h():
nonlocal x
return x
return h
return g
print(f()()())
# nonlocal declared at outer level (g), and referenced by inner level (h)
def f():
x = 1
def g():
nonlocal x
def h():
return x
return h
return g
print(f()()())
|
Add further tests for nonlocal scoping and closures.
|
tests/basics: Add further tests for nonlocal scoping and closures.
|
Python
|
mit
|
lowRISC/micropython,ryannathans/micropython,tralamazza/micropython,micropython/micropython-esp32,cwyark/micropython,deshipu/micropython,lowRISC/micropython,alex-march/micropython,adafruit/micropython,Peetz0r/micropython-esp32,SHA2017-badge/micropython-esp32,turbinenreiter/micropython,deshipu/micropython,ryannathans/micropython,HenrikSolver/micropython,trezor/micropython,micropython/micropython-esp32,hiway/micropython,kerneltask/micropython,mhoffma/micropython,MrSurly/micropython-esp32,chrisdearman/micropython,hosaka/micropython,selste/micropython,infinnovation/micropython,tobbad/micropython,adafruit/circuitpython,tobbad/micropython,PappaPeppar/micropython,bvernoux/micropython,toolmacher/micropython,matthewelse/micropython,dmazzella/micropython,turbinenreiter/micropython,torwag/micropython,MrSurly/micropython,hiway/micropython,kerneltask/micropython,hosaka/micropython,lowRISC/micropython,mhoffma/micropython,cwyark/micropython,pramasoul/micropython,alex-robbins/micropython,dmazzella/micropython,infinnovation/micropython,bvernoux/micropython,oopy/micropython,selste/micropython,jmarcelino/pycom-micropython,turbinenreiter/micropython,mhoffma/micropython,tuc-osg/micropython,MrSurly/micropython,adafruit/circuitpython,dxxb/micropython,pozetroninc/micropython,SHA2017-badge/micropython-esp32,deshipu/micropython,Timmenem/micropython,bvernoux/micropython,alex-robbins/micropython,turbinenreiter/micropython,puuu/micropython,dxxb/micropython,pozetroninc/micropython,TDAbboud/micropython,puuu/micropython,hosaka/micropython,deshipu/micropython,tuc-osg/micropython,AriZuu/micropython,Peetz0r/micropython-esp32,adafruit/circuitpython,adafruit/circuitpython,MrSurly/micropython,toolmacher/micropython,hosaka/micropython,lowRISC/micropython,henriknelson/micropython,pfalcon/micropython,bvernoux/micropython,HenrikSolver/micropython,blazewicz/micropython,PappaPeppar/micropython,alex-march/micropython,henriknelson/micropython,dxxb/micropython,blazewicz/micropython,Timmenem/micropython,blazewicz/micropython,blazewicz/micropython,tobbad/micropython,swegener/micropython,oopy/micropython,TDAbboud/micropython,jmarcelino/pycom-micropython,hiway/micropython,jmarcelino/pycom-micropython,PappaPeppar/micropython,HenrikSolver/micropython,dmazzella/micropython,trezor/micropython,cwyark/micropython,kerneltask/micropython,tobbad/micropython,TDAbboud/micropython,matthewelse/micropython,chrisdearman/micropython,torwag/micropython,ryannathans/micropython,pramasoul/micropython,trezor/micropython,oopy/micropython,toolmacher/micropython,adafruit/micropython,pfalcon/micropython,chrisdearman/micropython,Timmenem/micropython,Timmenem/micropython,puuu/micropython,AriZuu/micropython,dxxb/micropython,swegener/micropython,swegener/micropython,MrSurly/micropython,pramasoul/micropython,hosaka/micropython,alex-robbins/micropython,micropython/micropython-esp32,selste/micropython,tralamazza/micropython,tralamazza/micropython,matthewelse/micropython,mhoffma/micropython,Peetz0r/micropython-esp32,selste/micropython,alex-march/micropython,tralamazza/micropython,adafruit/circuitpython,pfalcon/micropython,adafruit/circuitpython,jmarcelino/pycom-micropython,tuc-osg/micropython,TDAbboud/micropython,henriknelson/micropython,MrSurly/micropython-esp32,torwag/micropython,chrisdearman/micropython,puuu/micropython,SHA2017-badge/micropython-esp32,MrSurly/micropython-esp32,Peetz0r/micropython-esp32,micropython/micropython-esp32,hiway/micropython,torwag/micropython,SHA2017-badge/micropython-esp32,mhoffma/micropython,AriZuu/micropython,oopy/micropython,pfalcon/micropython,pramasoul/micropython,MrSurly/micropython,hiway/micropython,toolmacher/micropython,AriZuu/micropython,kerneltask/micropython,chrisdearman/micropython,selste/micropython,micropython/micropython-esp32,matthewelse/micropython,PappaPeppar/micropython,matthewelse/micropython,matthewelse/micropython,deshipu/micropython,bvernoux/micropython,Timmenem/micropython,cwyark/micropython,alex-robbins/micropython,TDAbboud/micropython,henriknelson/micropython,adafruit/micropython,oopy/micropython,alex-march/micropython,dmazzella/micropython,MrSurly/micropython-esp32,turbinenreiter/micropython,kerneltask/micropython,PappaPeppar/micropython,pozetroninc/micropython,pozetroninc/micropython,tobbad/micropython,blazewicz/micropython,swegener/micropython,tuc-osg/micropython,trezor/micropython,alex-robbins/micropython,torwag/micropython,dxxb/micropython,adafruit/micropython,HenrikSolver/micropython,cwyark/micropython,infinnovation/micropython,HenrikSolver/micropython,pfalcon/micropython,adafruit/micropython,infinnovation/micropython,ryannathans/micropython,henriknelson/micropython,toolmacher/micropython,MrSurly/micropython-esp32,lowRISC/micropython,AriZuu/micropython,Peetz0r/micropython-esp32,alex-march/micropython,SHA2017-badge/micropython-esp32,pramasoul/micropython,tuc-osg/micropython,infinnovation/micropython,puuu/micropython,jmarcelino/pycom-micropython,trezor/micropython,pozetroninc/micropython,swegener/micropython,ryannathans/micropython
|
# test scoping rules
# explicit global variable
a = 1
def f():
global a
global a, a # should be able to redefine as global
a = 2
f()
print(a)
# explicit nonlocal variable
def f():
a = 1
def g():
nonlocal a
nonlocal a, a # should be able to redefine as nonlocal
a = 2
g()
return a
print(f())
tests/basics: Add further tests for nonlocal scoping and closures.
|
# test scoping rules
# explicit global variable
a = 1
def f():
global a
global a, a # should be able to redefine as global
a = 2
f()
print(a)
# explicit nonlocal variable
def f():
a = 1
def g():
nonlocal a
nonlocal a, a # should be able to redefine as nonlocal
a = 2
g()
return a
print(f())
# nonlocal at inner-inner level (h)
def f():
x = 1
def g():
def h():
nonlocal x
return x
return h
return g
print(f()()())
# nonlocal declared at outer level (g), and referenced by inner level (h)
def f():
x = 1
def g():
nonlocal x
def h():
return x
return h
return g
print(f()()())
|
<commit_before># test scoping rules
# explicit global variable
a = 1
def f():
global a
global a, a # should be able to redefine as global
a = 2
f()
print(a)
# explicit nonlocal variable
def f():
a = 1
def g():
nonlocal a
nonlocal a, a # should be able to redefine as nonlocal
a = 2
g()
return a
print(f())
<commit_msg>tests/basics: Add further tests for nonlocal scoping and closures.<commit_after>
|
# test scoping rules
# explicit global variable
a = 1
def f():
global a
global a, a # should be able to redefine as global
a = 2
f()
print(a)
# explicit nonlocal variable
def f():
a = 1
def g():
nonlocal a
nonlocal a, a # should be able to redefine as nonlocal
a = 2
g()
return a
print(f())
# nonlocal at inner-inner level (h)
def f():
x = 1
def g():
def h():
nonlocal x
return x
return h
return g
print(f()()())
# nonlocal declared at outer level (g), and referenced by inner level (h)
def f():
x = 1
def g():
nonlocal x
def h():
return x
return h
return g
print(f()()())
|
# test scoping rules
# explicit global variable
a = 1
def f():
global a
global a, a # should be able to redefine as global
a = 2
f()
print(a)
# explicit nonlocal variable
def f():
a = 1
def g():
nonlocal a
nonlocal a, a # should be able to redefine as nonlocal
a = 2
g()
return a
print(f())
tests/basics: Add further tests for nonlocal scoping and closures.# test scoping rules
# explicit global variable
a = 1
def f():
global a
global a, a # should be able to redefine as global
a = 2
f()
print(a)
# explicit nonlocal variable
def f():
a = 1
def g():
nonlocal a
nonlocal a, a # should be able to redefine as nonlocal
a = 2
g()
return a
print(f())
# nonlocal at inner-inner level (h)
def f():
x = 1
def g():
def h():
nonlocal x
return x
return h
return g
print(f()()())
# nonlocal declared at outer level (g), and referenced by inner level (h)
def f():
x = 1
def g():
nonlocal x
def h():
return x
return h
return g
print(f()()())
|
<commit_before># test scoping rules
# explicit global variable
a = 1
def f():
global a
global a, a # should be able to redefine as global
a = 2
f()
print(a)
# explicit nonlocal variable
def f():
a = 1
def g():
nonlocal a
nonlocal a, a # should be able to redefine as nonlocal
a = 2
g()
return a
print(f())
<commit_msg>tests/basics: Add further tests for nonlocal scoping and closures.<commit_after># test scoping rules
# explicit global variable
a = 1
def f():
global a
global a, a # should be able to redefine as global
a = 2
f()
print(a)
# explicit nonlocal variable
def f():
a = 1
def g():
nonlocal a
nonlocal a, a # should be able to redefine as nonlocal
a = 2
g()
return a
print(f())
# nonlocal at inner-inner level (h)
def f():
x = 1
def g():
def h():
nonlocal x
return x
return h
return g
print(f()()())
# nonlocal declared at outer level (g), and referenced by inner level (h)
def f():
x = 1
def g():
nonlocal x
def h():
return x
return h
return g
print(f()()())
|
63ee6f971b99c2f030e0347c37bc9577ba9ee7cd
|
getMenu.py
|
getMenu.py
|
#!/usr/bin/env python
import json, os, requests
from awsauth import S3Auth
key = os.environ.get('UWOPENDATA_APIKEY')
service = 'FoodMenu'
# output = 'json'
# callback = 'None'
request = 'http://api.uwaterloo.ca/public/v1/'
def getMenu():
url = request + '?' + 'key=' + key + '&' + 'service=' + service
r = requests.get(url).text
return r
menu = getMenu()
ACCESS_KEY = os.environ.get('AWS_ACCESS_KEY_ID')
SECRET_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
requests.put('http://s3.amazonaws.com/uwfoodmenu/response.txt', data=menu, auth=S3Auth(ACCESS_KEY, SECRET_KEY))
|
#!/usr/bin/env python
import json, os, requests
from awsauth import S3Auth
key = os.environ.get('UWOPENDATA_APIKEY')
service = 'FoodMenu'
def getMenu():
payload = {'key': key, 'service': service}
r = requests.get('http://api.uwaterloo.ca/public/v1/', params=payload)
return r.text
menu = getMenu()
ACCESS_KEY = os.environ.get('AWS_ACCESS_KEY_ID')
SECRET_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
requests.put('http://s3.amazonaws.com/uwfoodmenu/response.txt', data=menu, auth=S3Auth(ACCESS_KEY, SECRET_KEY))
|
Allow requests module to correctly encode query parameters.
|
Allow requests module to correctly encode query parameters.
|
Python
|
mit
|
alykhank/FoodMenu,alykhank/FoodMenu,alykhank/FoodMenu
|
#!/usr/bin/env python
import json, os, requests
from awsauth import S3Auth
key = os.environ.get('UWOPENDATA_APIKEY')
service = 'FoodMenu'
# output = 'json'
# callback = 'None'
request = 'http://api.uwaterloo.ca/public/v1/'
def getMenu():
url = request + '?' + 'key=' + key + '&' + 'service=' + service
r = requests.get(url).text
return r
menu = getMenu()
ACCESS_KEY = os.environ.get('AWS_ACCESS_KEY_ID')
SECRET_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
requests.put('http://s3.amazonaws.com/uwfoodmenu/response.txt', data=menu, auth=S3Auth(ACCESS_KEY, SECRET_KEY))
Allow requests module to correctly encode query parameters.
|
#!/usr/bin/env python
import json, os, requests
from awsauth import S3Auth
key = os.environ.get('UWOPENDATA_APIKEY')
service = 'FoodMenu'
def getMenu():
payload = {'key': key, 'service': service}
r = requests.get('http://api.uwaterloo.ca/public/v1/', params=payload)
return r.text
menu = getMenu()
ACCESS_KEY = os.environ.get('AWS_ACCESS_KEY_ID')
SECRET_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
requests.put('http://s3.amazonaws.com/uwfoodmenu/response.txt', data=menu, auth=S3Auth(ACCESS_KEY, SECRET_KEY))
|
<commit_before>#!/usr/bin/env python
import json, os, requests
from awsauth import S3Auth
key = os.environ.get('UWOPENDATA_APIKEY')
service = 'FoodMenu'
# output = 'json'
# callback = 'None'
request = 'http://api.uwaterloo.ca/public/v1/'
def getMenu():
url = request + '?' + 'key=' + key + '&' + 'service=' + service
r = requests.get(url).text
return r
menu = getMenu()
ACCESS_KEY = os.environ.get('AWS_ACCESS_KEY_ID')
SECRET_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
requests.put('http://s3.amazonaws.com/uwfoodmenu/response.txt', data=menu, auth=S3Auth(ACCESS_KEY, SECRET_KEY))
<commit_msg>Allow requests module to correctly encode query parameters.<commit_after>
|
#!/usr/bin/env python
import json, os, requests
from awsauth import S3Auth
key = os.environ.get('UWOPENDATA_APIKEY')
service = 'FoodMenu'
def getMenu():
payload = {'key': key, 'service': service}
r = requests.get('http://api.uwaterloo.ca/public/v1/', params=payload)
return r.text
menu = getMenu()
ACCESS_KEY = os.environ.get('AWS_ACCESS_KEY_ID')
SECRET_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
requests.put('http://s3.amazonaws.com/uwfoodmenu/response.txt', data=menu, auth=S3Auth(ACCESS_KEY, SECRET_KEY))
|
#!/usr/bin/env python
import json, os, requests
from awsauth import S3Auth
key = os.environ.get('UWOPENDATA_APIKEY')
service = 'FoodMenu'
# output = 'json'
# callback = 'None'
request = 'http://api.uwaterloo.ca/public/v1/'
def getMenu():
url = request + '?' + 'key=' + key + '&' + 'service=' + service
r = requests.get(url).text
return r
menu = getMenu()
ACCESS_KEY = os.environ.get('AWS_ACCESS_KEY_ID')
SECRET_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
requests.put('http://s3.amazonaws.com/uwfoodmenu/response.txt', data=menu, auth=S3Auth(ACCESS_KEY, SECRET_KEY))
Allow requests module to correctly encode query parameters.#!/usr/bin/env python
import json, os, requests
from awsauth import S3Auth
key = os.environ.get('UWOPENDATA_APIKEY')
service = 'FoodMenu'
def getMenu():
payload = {'key': key, 'service': service}
r = requests.get('http://api.uwaterloo.ca/public/v1/', params=payload)
return r.text
menu = getMenu()
ACCESS_KEY = os.environ.get('AWS_ACCESS_KEY_ID')
SECRET_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
requests.put('http://s3.amazonaws.com/uwfoodmenu/response.txt', data=menu, auth=S3Auth(ACCESS_KEY, SECRET_KEY))
|
<commit_before>#!/usr/bin/env python
import json, os, requests
from awsauth import S3Auth
key = os.environ.get('UWOPENDATA_APIKEY')
service = 'FoodMenu'
# output = 'json'
# callback = 'None'
request = 'http://api.uwaterloo.ca/public/v1/'
def getMenu():
url = request + '?' + 'key=' + key + '&' + 'service=' + service
r = requests.get(url).text
return r
menu = getMenu()
ACCESS_KEY = os.environ.get('AWS_ACCESS_KEY_ID')
SECRET_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
requests.put('http://s3.amazonaws.com/uwfoodmenu/response.txt', data=menu, auth=S3Auth(ACCESS_KEY, SECRET_KEY))
<commit_msg>Allow requests module to correctly encode query parameters.<commit_after>#!/usr/bin/env python
import json, os, requests
from awsauth import S3Auth
key = os.environ.get('UWOPENDATA_APIKEY')
service = 'FoodMenu'
def getMenu():
payload = {'key': key, 'service': service}
r = requests.get('http://api.uwaterloo.ca/public/v1/', params=payload)
return r.text
menu = getMenu()
ACCESS_KEY = os.environ.get('AWS_ACCESS_KEY_ID')
SECRET_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
requests.put('http://s3.amazonaws.com/uwfoodmenu/response.txt', data=menu, auth=S3Auth(ACCESS_KEY, SECRET_KEY))
|
98a05257eaf4ca6555ffc179a9250a7cfb3a903c
|
scripts/lib/check-database-compatibility.py
|
scripts/lib/check-database-compatibility.py
|
#!/usr/bin/env python3
import logging
import os
import sys
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.insert(0, ZULIP_PATH)
from scripts.lib.setup_path import setup_path
from scripts.lib.zulip_tools import DEPLOYMENTS_DIR, assert_not_running_as_root, parse_version_from
from version import ZULIP_VERSION as new_version
assert_not_running_as_root()
setup_path()
os.environ["DJANGO_SETTINGS_MODULE"] = "zproject.settings"
import django
from django.db import connection
from django.db.migrations.loader import MigrationLoader
django.setup()
loader = MigrationLoader(connection)
missing = set(loader.applied_migrations)
for key, migration in loader.disk_migrations.items():
missing.discard(key)
missing.difference_update(migration.replaces)
if not missing:
sys.exit(0)
current_version = parse_version_from(os.path.join(DEPLOYMENTS_DIR, "current"))
logging.error(
"This is not an upgrade -- the current deployment (version %s) "
"contains %s database migrations which %s (version %s) does not.",
current_version,
len(missing),
ZULIP_PATH,
new_version,
)
sys.exit(1)
|
#!/usr/bin/env python3
import logging
import os
import sys
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.insert(0, ZULIP_PATH)
from scripts.lib.setup_path import setup_path
from scripts.lib.zulip_tools import DEPLOYMENTS_DIR, assert_not_running_as_root, parse_version_from
from version import ZULIP_VERSION as new_version
assert_not_running_as_root()
setup_path()
os.environ["DJANGO_SETTINGS_MODULE"] = "zproject.settings"
import django
from django.db import connection
from django.db.migrations.loader import MigrationLoader
django.setup()
loader = MigrationLoader(connection)
missing = set(loader.applied_migrations)
for key, migration in loader.disk_migrations.items():
missing.discard(key)
missing.difference_update(migration.replaces)
if not missing:
sys.exit(0)
for migration in missing:
print(f"Migration {migration} missing in new version.")
current_version = parse_version_from(os.path.join(DEPLOYMENTS_DIR, "current"))
logging.error(
"This is not an upgrade -- the current deployment (version %s) "
"contains %s database migrations which %s (version %s) does not.",
current_version,
len(missing),
ZULIP_PATH,
new_version,
)
sys.exit(1)
|
Print names of missing migrations in compatibility check.
|
scripts: Print names of missing migrations in compatibility check.
This will make it much easier to debug any situations where this
happens.
|
Python
|
apache-2.0
|
rht/zulip,rht/zulip,rht/zulip,andersk/zulip,zulip/zulip,zulip/zulip,andersk/zulip,kou/zulip,andersk/zulip,kou/zulip,zulip/zulip,rht/zulip,kou/zulip,andersk/zulip,rht/zulip,kou/zulip,andersk/zulip,rht/zulip,zulip/zulip,kou/zulip,kou/zulip,zulip/zulip,kou/zulip,andersk/zulip,zulip/zulip,andersk/zulip,rht/zulip,zulip/zulip
|
#!/usr/bin/env python3
import logging
import os
import sys
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.insert(0, ZULIP_PATH)
from scripts.lib.setup_path import setup_path
from scripts.lib.zulip_tools import DEPLOYMENTS_DIR, assert_not_running_as_root, parse_version_from
from version import ZULIP_VERSION as new_version
assert_not_running_as_root()
setup_path()
os.environ["DJANGO_SETTINGS_MODULE"] = "zproject.settings"
import django
from django.db import connection
from django.db.migrations.loader import MigrationLoader
django.setup()
loader = MigrationLoader(connection)
missing = set(loader.applied_migrations)
for key, migration in loader.disk_migrations.items():
missing.discard(key)
missing.difference_update(migration.replaces)
if not missing:
sys.exit(0)
current_version = parse_version_from(os.path.join(DEPLOYMENTS_DIR, "current"))
logging.error(
"This is not an upgrade -- the current deployment (version %s) "
"contains %s database migrations which %s (version %s) does not.",
current_version,
len(missing),
ZULIP_PATH,
new_version,
)
sys.exit(1)
scripts: Print names of missing migrations in compatibility check.
This will make it much easier to debug any situations where this
happens.
|
#!/usr/bin/env python3
import logging
import os
import sys
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.insert(0, ZULIP_PATH)
from scripts.lib.setup_path import setup_path
from scripts.lib.zulip_tools import DEPLOYMENTS_DIR, assert_not_running_as_root, parse_version_from
from version import ZULIP_VERSION as new_version
assert_not_running_as_root()
setup_path()
os.environ["DJANGO_SETTINGS_MODULE"] = "zproject.settings"
import django
from django.db import connection
from django.db.migrations.loader import MigrationLoader
django.setup()
loader = MigrationLoader(connection)
missing = set(loader.applied_migrations)
for key, migration in loader.disk_migrations.items():
missing.discard(key)
missing.difference_update(migration.replaces)
if not missing:
sys.exit(0)
for migration in missing:
print(f"Migration {migration} missing in new version.")
current_version = parse_version_from(os.path.join(DEPLOYMENTS_DIR, "current"))
logging.error(
"This is not an upgrade -- the current deployment (version %s) "
"contains %s database migrations which %s (version %s) does not.",
current_version,
len(missing),
ZULIP_PATH,
new_version,
)
sys.exit(1)
|
<commit_before>#!/usr/bin/env python3
import logging
import os
import sys
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.insert(0, ZULIP_PATH)
from scripts.lib.setup_path import setup_path
from scripts.lib.zulip_tools import DEPLOYMENTS_DIR, assert_not_running_as_root, parse_version_from
from version import ZULIP_VERSION as new_version
assert_not_running_as_root()
setup_path()
os.environ["DJANGO_SETTINGS_MODULE"] = "zproject.settings"
import django
from django.db import connection
from django.db.migrations.loader import MigrationLoader
django.setup()
loader = MigrationLoader(connection)
missing = set(loader.applied_migrations)
for key, migration in loader.disk_migrations.items():
missing.discard(key)
missing.difference_update(migration.replaces)
if not missing:
sys.exit(0)
current_version = parse_version_from(os.path.join(DEPLOYMENTS_DIR, "current"))
logging.error(
"This is not an upgrade -- the current deployment (version %s) "
"contains %s database migrations which %s (version %s) does not.",
current_version,
len(missing),
ZULIP_PATH,
new_version,
)
sys.exit(1)
<commit_msg>scripts: Print names of missing migrations in compatibility check.
This will make it much easier to debug any situations where this
happens.<commit_after>
|
#!/usr/bin/env python3
import logging
import os
import sys
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.insert(0, ZULIP_PATH)
from scripts.lib.setup_path import setup_path
from scripts.lib.zulip_tools import DEPLOYMENTS_DIR, assert_not_running_as_root, parse_version_from
from version import ZULIP_VERSION as new_version
assert_not_running_as_root()
setup_path()
os.environ["DJANGO_SETTINGS_MODULE"] = "zproject.settings"
import django
from django.db import connection
from django.db.migrations.loader import MigrationLoader
django.setup()
loader = MigrationLoader(connection)
missing = set(loader.applied_migrations)
for key, migration in loader.disk_migrations.items():
missing.discard(key)
missing.difference_update(migration.replaces)
if not missing:
sys.exit(0)
for migration in missing:
print(f"Migration {migration} missing in new version.")
current_version = parse_version_from(os.path.join(DEPLOYMENTS_DIR, "current"))
logging.error(
"This is not an upgrade -- the current deployment (version %s) "
"contains %s database migrations which %s (version %s) does not.",
current_version,
len(missing),
ZULIP_PATH,
new_version,
)
sys.exit(1)
|
#!/usr/bin/env python3
import logging
import os
import sys
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.insert(0, ZULIP_PATH)
from scripts.lib.setup_path import setup_path
from scripts.lib.zulip_tools import DEPLOYMENTS_DIR, assert_not_running_as_root, parse_version_from
from version import ZULIP_VERSION as new_version
assert_not_running_as_root()
setup_path()
os.environ["DJANGO_SETTINGS_MODULE"] = "zproject.settings"
import django
from django.db import connection
from django.db.migrations.loader import MigrationLoader
django.setup()
loader = MigrationLoader(connection)
missing = set(loader.applied_migrations)
for key, migration in loader.disk_migrations.items():
missing.discard(key)
missing.difference_update(migration.replaces)
if not missing:
sys.exit(0)
current_version = parse_version_from(os.path.join(DEPLOYMENTS_DIR, "current"))
logging.error(
"This is not an upgrade -- the current deployment (version %s) "
"contains %s database migrations which %s (version %s) does not.",
current_version,
len(missing),
ZULIP_PATH,
new_version,
)
sys.exit(1)
scripts: Print names of missing migrations in compatibility check.
This will make it much easier to debug any situations where this
happens.#!/usr/bin/env python3
import logging
import os
import sys
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.insert(0, ZULIP_PATH)
from scripts.lib.setup_path import setup_path
from scripts.lib.zulip_tools import DEPLOYMENTS_DIR, assert_not_running_as_root, parse_version_from
from version import ZULIP_VERSION as new_version
assert_not_running_as_root()
setup_path()
os.environ["DJANGO_SETTINGS_MODULE"] = "zproject.settings"
import django
from django.db import connection
from django.db.migrations.loader import MigrationLoader
django.setup()
loader = MigrationLoader(connection)
missing = set(loader.applied_migrations)
for key, migration in loader.disk_migrations.items():
missing.discard(key)
missing.difference_update(migration.replaces)
if not missing:
sys.exit(0)
for migration in missing:
print(f"Migration {migration} missing in new version.")
current_version = parse_version_from(os.path.join(DEPLOYMENTS_DIR, "current"))
logging.error(
"This is not an upgrade -- the current deployment (version %s) "
"contains %s database migrations which %s (version %s) does not.",
current_version,
len(missing),
ZULIP_PATH,
new_version,
)
sys.exit(1)
|
<commit_before>#!/usr/bin/env python3
import logging
import os
import sys
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.insert(0, ZULIP_PATH)
from scripts.lib.setup_path import setup_path
from scripts.lib.zulip_tools import DEPLOYMENTS_DIR, assert_not_running_as_root, parse_version_from
from version import ZULIP_VERSION as new_version
assert_not_running_as_root()
setup_path()
os.environ["DJANGO_SETTINGS_MODULE"] = "zproject.settings"
import django
from django.db import connection
from django.db.migrations.loader import MigrationLoader
django.setup()
loader = MigrationLoader(connection)
missing = set(loader.applied_migrations)
for key, migration in loader.disk_migrations.items():
missing.discard(key)
missing.difference_update(migration.replaces)
if not missing:
sys.exit(0)
current_version = parse_version_from(os.path.join(DEPLOYMENTS_DIR, "current"))
logging.error(
"This is not an upgrade -- the current deployment (version %s) "
"contains %s database migrations which %s (version %s) does not.",
current_version,
len(missing),
ZULIP_PATH,
new_version,
)
sys.exit(1)
<commit_msg>scripts: Print names of missing migrations in compatibility check.
This will make it much easier to debug any situations where this
happens.<commit_after>#!/usr/bin/env python3
import logging
import os
import sys
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.insert(0, ZULIP_PATH)
from scripts.lib.setup_path import setup_path
from scripts.lib.zulip_tools import DEPLOYMENTS_DIR, assert_not_running_as_root, parse_version_from
from version import ZULIP_VERSION as new_version
assert_not_running_as_root()
setup_path()
os.environ["DJANGO_SETTINGS_MODULE"] = "zproject.settings"
import django
from django.db import connection
from django.db.migrations.loader import MigrationLoader
django.setup()
loader = MigrationLoader(connection)
missing = set(loader.applied_migrations)
for key, migration in loader.disk_migrations.items():
missing.discard(key)
missing.difference_update(migration.replaces)
if not missing:
sys.exit(0)
for migration in missing:
print(f"Migration {migration} missing in new version.")
current_version = parse_version_from(os.path.join(DEPLOYMENTS_DIR, "current"))
logging.error(
"This is not an upgrade -- the current deployment (version %s) "
"contains %s database migrations which %s (version %s) does not.",
current_version,
len(missing),
ZULIP_PATH,
new_version,
)
sys.exit(1)
|
cbafc968343cd2b001bcee354d418c9886fe94b4
|
tests/test_network.py
|
tests/test_network.py
|
from nose.tools import eq_, ok_
import unittest
import openxc.measurements
from openxc.sources import NetworkDataSource
from openxc.sources import DataSourceError
class NetworkDataSourceTests(unittest.TestCase):
def setUp(self):
super(NetworkDataSourceTests, self).setUp()
def test_create(self):
def callback(message):
pass
try:
s = NetworkDataSource(callback)
except DataSourceError as e:
pass
|
from nose.tools import eq_, ok_
import unittest
import openxc.measurements
from openxc.sources import NetworkDataSource
from openxc.sources import DataSourceError
class NetworkDataSourceTests(unittest.TestCase):
def setUp(self):
super(NetworkDataSourceTests, self).setUp()
def test_create(self):
def callback(message):
pass
try:
s = NetworkDataSource(callback, host='localhost')
except DataSourceError as e:
pass
|
Use localhost for network source tests to avoid waiting for DNS.
|
Use localhost for network source tests to avoid waiting for DNS.
|
Python
|
bsd-3-clause
|
openxc/openxc-python,openxc/openxc-python,openxc/openxc-python
|
from nose.tools import eq_, ok_
import unittest
import openxc.measurements
from openxc.sources import NetworkDataSource
from openxc.sources import DataSourceError
class NetworkDataSourceTests(unittest.TestCase):
def setUp(self):
super(NetworkDataSourceTests, self).setUp()
def test_create(self):
def callback(message):
pass
try:
s = NetworkDataSource(callback)
except DataSourceError as e:
pass
Use localhost for network source tests to avoid waiting for DNS.
|
from nose.tools import eq_, ok_
import unittest
import openxc.measurements
from openxc.sources import NetworkDataSource
from openxc.sources import DataSourceError
class NetworkDataSourceTests(unittest.TestCase):
def setUp(self):
super(NetworkDataSourceTests, self).setUp()
def test_create(self):
def callback(message):
pass
try:
s = NetworkDataSource(callback, host='localhost')
except DataSourceError as e:
pass
|
<commit_before>from nose.tools import eq_, ok_
import unittest
import openxc.measurements
from openxc.sources import NetworkDataSource
from openxc.sources import DataSourceError
class NetworkDataSourceTests(unittest.TestCase):
def setUp(self):
super(NetworkDataSourceTests, self).setUp()
def test_create(self):
def callback(message):
pass
try:
s = NetworkDataSource(callback)
except DataSourceError as e:
pass
<commit_msg>Use localhost for network source tests to avoid waiting for DNS.<commit_after>
|
from nose.tools import eq_, ok_
import unittest
import openxc.measurements
from openxc.sources import NetworkDataSource
from openxc.sources import DataSourceError
class NetworkDataSourceTests(unittest.TestCase):
def setUp(self):
super(NetworkDataSourceTests, self).setUp()
def test_create(self):
def callback(message):
pass
try:
s = NetworkDataSource(callback, host='localhost')
except DataSourceError as e:
pass
|
from nose.tools import eq_, ok_
import unittest
import openxc.measurements
from openxc.sources import NetworkDataSource
from openxc.sources import DataSourceError
class NetworkDataSourceTests(unittest.TestCase):
def setUp(self):
super(NetworkDataSourceTests, self).setUp()
def test_create(self):
def callback(message):
pass
try:
s = NetworkDataSource(callback)
except DataSourceError as e:
pass
Use localhost for network source tests to avoid waiting for DNS.from nose.tools import eq_, ok_
import unittest
import openxc.measurements
from openxc.sources import NetworkDataSource
from openxc.sources import DataSourceError
class NetworkDataSourceTests(unittest.TestCase):
def setUp(self):
super(NetworkDataSourceTests, self).setUp()
def test_create(self):
def callback(message):
pass
try:
s = NetworkDataSource(callback, host='localhost')
except DataSourceError as e:
pass
|
<commit_before>from nose.tools import eq_, ok_
import unittest
import openxc.measurements
from openxc.sources import NetworkDataSource
from openxc.sources import DataSourceError
class NetworkDataSourceTests(unittest.TestCase):
def setUp(self):
super(NetworkDataSourceTests, self).setUp()
def test_create(self):
def callback(message):
pass
try:
s = NetworkDataSource(callback)
except DataSourceError as e:
pass
<commit_msg>Use localhost for network source tests to avoid waiting for DNS.<commit_after>from nose.tools import eq_, ok_
import unittest
import openxc.measurements
from openxc.sources import NetworkDataSource
from openxc.sources import DataSourceError
class NetworkDataSourceTests(unittest.TestCase):
def setUp(self):
super(NetworkDataSourceTests, self).setUp()
def test_create(self):
def callback(message):
pass
try:
s = NetworkDataSource(callback, host='localhost')
except DataSourceError as e:
pass
|
86d12c7d13bd7a11a93deccf42f93df4328e70fd
|
admin_honeypot/urls.py
|
admin_honeypot/urls.py
|
from admin_honeypot import views
from django.conf.urls import url
app_name = 'admin_honeypot'
urlpatterns = [
url(r'^login/$', views.AdminHoneypot.as_view(), name='login'),
url(r'^.*$', views.AdminHoneypot.as_view(), name='index'),
]
|
from admin_honeypot import views
from django.urls import path, re_path
app_name = 'admin_honeypot'
urlpatterns = [
path('login/', views.AdminHoneypot.as_view(), name='login'),
re_path(r'^.*$', views.AdminHoneypot.as_view(), name='index'),
]
|
Update url() to path() in the urlconf.
|
Update url() to path() in the urlconf.
|
Python
|
mit
|
dmpayton/django-admin-honeypot,dmpayton/django-admin-honeypot
|
from admin_honeypot import views
from django.conf.urls import url
app_name = 'admin_honeypot'
urlpatterns = [
url(r'^login/$', views.AdminHoneypot.as_view(), name='login'),
url(r'^.*$', views.AdminHoneypot.as_view(), name='index'),
]
Update url() to path() in the urlconf.
|
from admin_honeypot import views
from django.urls import path, re_path
app_name = 'admin_honeypot'
urlpatterns = [
path('login/', views.AdminHoneypot.as_view(), name='login'),
re_path(r'^.*$', views.AdminHoneypot.as_view(), name='index'),
]
|
<commit_before>from admin_honeypot import views
from django.conf.urls import url
app_name = 'admin_honeypot'
urlpatterns = [
url(r'^login/$', views.AdminHoneypot.as_view(), name='login'),
url(r'^.*$', views.AdminHoneypot.as_view(), name='index'),
]
<commit_msg>Update url() to path() in the urlconf.<commit_after>
|
from admin_honeypot import views
from django.urls import path, re_path
app_name = 'admin_honeypot'
urlpatterns = [
path('login/', views.AdminHoneypot.as_view(), name='login'),
re_path(r'^.*$', views.AdminHoneypot.as_view(), name='index'),
]
|
from admin_honeypot import views
from django.conf.urls import url
app_name = 'admin_honeypot'
urlpatterns = [
url(r'^login/$', views.AdminHoneypot.as_view(), name='login'),
url(r'^.*$', views.AdminHoneypot.as_view(), name='index'),
]
Update url() to path() in the urlconf.from admin_honeypot import views
from django.urls import path, re_path
app_name = 'admin_honeypot'
urlpatterns = [
path('login/', views.AdminHoneypot.as_view(), name='login'),
re_path(r'^.*$', views.AdminHoneypot.as_view(), name='index'),
]
|
<commit_before>from admin_honeypot import views
from django.conf.urls import url
app_name = 'admin_honeypot'
urlpatterns = [
url(r'^login/$', views.AdminHoneypot.as_view(), name='login'),
url(r'^.*$', views.AdminHoneypot.as_view(), name='index'),
]
<commit_msg>Update url() to path() in the urlconf.<commit_after>from admin_honeypot import views
from django.urls import path, re_path
app_name = 'admin_honeypot'
urlpatterns = [
path('login/', views.AdminHoneypot.as_view(), name='login'),
re_path(r'^.*$', views.AdminHoneypot.as_view(), name='index'),
]
|
1726a73b81c8a7dfc3610690fe9272776e930f0f
|
aero/adapters/bower.py
|
aero/adapters/bower.py
|
# -*- coding: utf-8 -*-
__author__ = 'oliveiraev'
__all__ = ['Bower']
from re import sub
from re import split
from aero.__version__ import enc
from .base import BaseAdapter
class Bower(BaseAdapter):
"""
Twitter Bower - Browser package manager - Adapter
"""
def search(self, query):
return {}
response = self.command('search', query, ['--no-color'])[0].decode(*enc)
lst = dict([(self.package_name(k), v) for k, v in [
line.lstrip(' -').split(' ') for line in response.splitlines()
if line.startswith(' - ')]
])
if lst:
return lst
def install(self, query):
return self.shell('install', query)
def info(self, query):
response = self.command('info', query, ['--no-color'])[0].decode(*enc)
return response or ['Aborted: No info available']
|
# -*- coding: utf-8 -*-
__author__ = 'oliveiraev'
__all__ = ['Bower']
from re import sub
from re import split
from aero.__version__ import enc
from .base import BaseAdapter
class Bower(BaseAdapter):
"""
Twitter Bower - Browser package manager - Adapter
"""
def search(self, query):
response = self.command('search', query, ['--no-color'])[0].decode(*enc)
lst = dict([(self.package_name(k), v) for k, v in [
line.lstrip(' -').split(' ') for line in response.splitlines()
if line.startswith(' - ')]
])
return lst or {}
def install(self, query):
return self.shell('install', query)
def info(self, query):
response = self.command('info', query, ['--no-color'])[0].decode(*enc)
return response or ['Aborted: No info available']
|
Simplify return while we're at it
|
Simplify return while we're at it
|
Python
|
bsd-3-clause
|
Aeronautics/aero
|
# -*- coding: utf-8 -*-
__author__ = 'oliveiraev'
__all__ = ['Bower']
from re import sub
from re import split
from aero.__version__ import enc
from .base import BaseAdapter
class Bower(BaseAdapter):
"""
Twitter Bower - Browser package manager - Adapter
"""
def search(self, query):
return {}
response = self.command('search', query, ['--no-color'])[0].decode(*enc)
lst = dict([(self.package_name(k), v) for k, v in [
line.lstrip(' -').split(' ') for line in response.splitlines()
if line.startswith(' - ')]
])
if lst:
return lst
def install(self, query):
return self.shell('install', query)
def info(self, query):
response = self.command('info', query, ['--no-color'])[0].decode(*enc)
return response or ['Aborted: No info available']
Simplify return while we're at it
|
# -*- coding: utf-8 -*-
__author__ = 'oliveiraev'
__all__ = ['Bower']
from re import sub
from re import split
from aero.__version__ import enc
from .base import BaseAdapter
class Bower(BaseAdapter):
"""
Twitter Bower - Browser package manager - Adapter
"""
def search(self, query):
response = self.command('search', query, ['--no-color'])[0].decode(*enc)
lst = dict([(self.package_name(k), v) for k, v in [
line.lstrip(' -').split(' ') for line in response.splitlines()
if line.startswith(' - ')]
])
return lst or {}
def install(self, query):
return self.shell('install', query)
def info(self, query):
response = self.command('info', query, ['--no-color'])[0].decode(*enc)
return response or ['Aborted: No info available']
|
<commit_before># -*- coding: utf-8 -*-
__author__ = 'oliveiraev'
__all__ = ['Bower']
from re import sub
from re import split
from aero.__version__ import enc
from .base import BaseAdapter
class Bower(BaseAdapter):
"""
Twitter Bower - Browser package manager - Adapter
"""
def search(self, query):
return {}
response = self.command('search', query, ['--no-color'])[0].decode(*enc)
lst = dict([(self.package_name(k), v) for k, v in [
line.lstrip(' -').split(' ') for line in response.splitlines()
if line.startswith(' - ')]
])
if lst:
return lst
def install(self, query):
return self.shell('install', query)
def info(self, query):
response = self.command('info', query, ['--no-color'])[0].decode(*enc)
return response or ['Aborted: No info available']
<commit_msg>Simplify return while we're at it<commit_after>
|
# -*- coding: utf-8 -*-
__author__ = 'oliveiraev'
__all__ = ['Bower']
from re import sub
from re import split
from aero.__version__ import enc
from .base import BaseAdapter
class Bower(BaseAdapter):
"""
Twitter Bower - Browser package manager - Adapter
"""
def search(self, query):
response = self.command('search', query, ['--no-color'])[0].decode(*enc)
lst = dict([(self.package_name(k), v) for k, v in [
line.lstrip(' -').split(' ') for line in response.splitlines()
if line.startswith(' - ')]
])
return lst or {}
def install(self, query):
return self.shell('install', query)
def info(self, query):
response = self.command('info', query, ['--no-color'])[0].decode(*enc)
return response or ['Aborted: No info available']
|
# -*- coding: utf-8 -*-
__author__ = 'oliveiraev'
__all__ = ['Bower']
from re import sub
from re import split
from aero.__version__ import enc
from .base import BaseAdapter
class Bower(BaseAdapter):
"""
Twitter Bower - Browser package manager - Adapter
"""
def search(self, query):
return {}
response = self.command('search', query, ['--no-color'])[0].decode(*enc)
lst = dict([(self.package_name(k), v) for k, v in [
line.lstrip(' -').split(' ') for line in response.splitlines()
if line.startswith(' - ')]
])
if lst:
return lst
def install(self, query):
return self.shell('install', query)
def info(self, query):
response = self.command('info', query, ['--no-color'])[0].decode(*enc)
return response or ['Aborted: No info available']
Simplify return while we're at it# -*- coding: utf-8 -*-
__author__ = 'oliveiraev'
__all__ = ['Bower']
from re import sub
from re import split
from aero.__version__ import enc
from .base import BaseAdapter
class Bower(BaseAdapter):
"""
Twitter Bower - Browser package manager - Adapter
"""
def search(self, query):
response = self.command('search', query, ['--no-color'])[0].decode(*enc)
lst = dict([(self.package_name(k), v) for k, v in [
line.lstrip(' -').split(' ') for line in response.splitlines()
if line.startswith(' - ')]
])
return lst or {}
def install(self, query):
return self.shell('install', query)
def info(self, query):
response = self.command('info', query, ['--no-color'])[0].decode(*enc)
return response or ['Aborted: No info available']
|
<commit_before># -*- coding: utf-8 -*-
__author__ = 'oliveiraev'
__all__ = ['Bower']
from re import sub
from re import split
from aero.__version__ import enc
from .base import BaseAdapter
class Bower(BaseAdapter):
"""
Twitter Bower - Browser package manager - Adapter
"""
def search(self, query):
return {}
response = self.command('search', query, ['--no-color'])[0].decode(*enc)
lst = dict([(self.package_name(k), v) for k, v in [
line.lstrip(' -').split(' ') for line in response.splitlines()
if line.startswith(' - ')]
])
if lst:
return lst
def install(self, query):
return self.shell('install', query)
def info(self, query):
response = self.command('info', query, ['--no-color'])[0].decode(*enc)
return response or ['Aborted: No info available']
<commit_msg>Simplify return while we're at it<commit_after># -*- coding: utf-8 -*-
__author__ = 'oliveiraev'
__all__ = ['Bower']
from re import sub
from re import split
from aero.__version__ import enc
from .base import BaseAdapter
class Bower(BaseAdapter):
"""
Twitter Bower - Browser package manager - Adapter
"""
def search(self, query):
response = self.command('search', query, ['--no-color'])[0].decode(*enc)
lst = dict([(self.package_name(k), v) for k, v in [
line.lstrip(' -').split(' ') for line in response.splitlines()
if line.startswith(' - ')]
])
return lst or {}
def install(self, query):
return self.shell('install', query)
def info(self, query):
response = self.command('info', query, ['--no-color'])[0].decode(*enc)
return response or ['Aborted: No info available']
|
19964dc65cecbbb043da3fe85bf355423cf9ce3c
|
shop/products/admin/forms.py
|
shop/products/admin/forms.py
|
from django.apps import apps
from django import forms
from suit.sortables import SortableTabularInline
from multiupload.fields import MultiFileField
class ProductForm(forms.ModelForm):
images = MultiFileField(max_num=100, min_num=1, required=False)
class Meta:
model = apps.get_model('products', 'Product')
fields = '__all__'
class ProductImageInline(SortableTabularInline):
fields = ('preview', )
readonly_fields = ['preview']
model = apps.get_model('products', 'ProductImage')
extra = 0
max_num = 0
|
from django.apps import apps
from django import forms
from suit.sortables import SortableTabularInline
from multiupload.fields import MultiFileField
class ProductForm(forms.ModelForm):
images = MultiFileField(max_num=100, min_num=1, required=False)
def save(self, commit=True):
product = super(ProductForm, self).save(commit)
if 'category' in self.changed_data:
product.attribute_values.all().delete()
return product
class Meta:
model = apps.get_model('products', 'Product')
fields = '__all__'
class ProductImageInline(SortableTabularInline):
fields = ('preview', )
readonly_fields = ['preview']
model = apps.get_model('products', 'ProductImage')
extra = 0
max_num = 0
|
Clear attr values on category change.
|
Clear attr values on category change.
|
Python
|
isc
|
pmaigutyak/mp-shop,pmaigutyak/mp-shop,pmaigutyak/mp-shop
|
from django.apps import apps
from django import forms
from suit.sortables import SortableTabularInline
from multiupload.fields import MultiFileField
class ProductForm(forms.ModelForm):
images = MultiFileField(max_num=100, min_num=1, required=False)
class Meta:
model = apps.get_model('products', 'Product')
fields = '__all__'
class ProductImageInline(SortableTabularInline):
fields = ('preview', )
readonly_fields = ['preview']
model = apps.get_model('products', 'ProductImage')
extra = 0
max_num = 0
Clear attr values on category change.
|
from django.apps import apps
from django import forms
from suit.sortables import SortableTabularInline
from multiupload.fields import MultiFileField
class ProductForm(forms.ModelForm):
images = MultiFileField(max_num=100, min_num=1, required=False)
def save(self, commit=True):
product = super(ProductForm, self).save(commit)
if 'category' in self.changed_data:
product.attribute_values.all().delete()
return product
class Meta:
model = apps.get_model('products', 'Product')
fields = '__all__'
class ProductImageInline(SortableTabularInline):
fields = ('preview', )
readonly_fields = ['preview']
model = apps.get_model('products', 'ProductImage')
extra = 0
max_num = 0
|
<commit_before>
from django.apps import apps
from django import forms
from suit.sortables import SortableTabularInline
from multiupload.fields import MultiFileField
class ProductForm(forms.ModelForm):
images = MultiFileField(max_num=100, min_num=1, required=False)
class Meta:
model = apps.get_model('products', 'Product')
fields = '__all__'
class ProductImageInline(SortableTabularInline):
fields = ('preview', )
readonly_fields = ['preview']
model = apps.get_model('products', 'ProductImage')
extra = 0
max_num = 0
<commit_msg>Clear attr values on category change.<commit_after>
|
from django.apps import apps
from django import forms
from suit.sortables import SortableTabularInline
from multiupload.fields import MultiFileField
class ProductForm(forms.ModelForm):
images = MultiFileField(max_num=100, min_num=1, required=False)
def save(self, commit=True):
product = super(ProductForm, self).save(commit)
if 'category' in self.changed_data:
product.attribute_values.all().delete()
return product
class Meta:
model = apps.get_model('products', 'Product')
fields = '__all__'
class ProductImageInline(SortableTabularInline):
fields = ('preview', )
readonly_fields = ['preview']
model = apps.get_model('products', 'ProductImage')
extra = 0
max_num = 0
|
from django.apps import apps
from django import forms
from suit.sortables import SortableTabularInline
from multiupload.fields import MultiFileField
class ProductForm(forms.ModelForm):
images = MultiFileField(max_num=100, min_num=1, required=False)
class Meta:
model = apps.get_model('products', 'Product')
fields = '__all__'
class ProductImageInline(SortableTabularInline):
fields = ('preview', )
readonly_fields = ['preview']
model = apps.get_model('products', 'ProductImage')
extra = 0
max_num = 0
Clear attr values on category change.
from django.apps import apps
from django import forms
from suit.sortables import SortableTabularInline
from multiupload.fields import MultiFileField
class ProductForm(forms.ModelForm):
images = MultiFileField(max_num=100, min_num=1, required=False)
def save(self, commit=True):
product = super(ProductForm, self).save(commit)
if 'category' in self.changed_data:
product.attribute_values.all().delete()
return product
class Meta:
model = apps.get_model('products', 'Product')
fields = '__all__'
class ProductImageInline(SortableTabularInline):
fields = ('preview', )
readonly_fields = ['preview']
model = apps.get_model('products', 'ProductImage')
extra = 0
max_num = 0
|
<commit_before>
from django.apps import apps
from django import forms
from suit.sortables import SortableTabularInline
from multiupload.fields import MultiFileField
class ProductForm(forms.ModelForm):
images = MultiFileField(max_num=100, min_num=1, required=False)
class Meta:
model = apps.get_model('products', 'Product')
fields = '__all__'
class ProductImageInline(SortableTabularInline):
fields = ('preview', )
readonly_fields = ['preview']
model = apps.get_model('products', 'ProductImage')
extra = 0
max_num = 0
<commit_msg>Clear attr values on category change.<commit_after>
from django.apps import apps
from django import forms
from suit.sortables import SortableTabularInline
from multiupload.fields import MultiFileField
class ProductForm(forms.ModelForm):
images = MultiFileField(max_num=100, min_num=1, required=False)
def save(self, commit=True):
product = super(ProductForm, self).save(commit)
if 'category' in self.changed_data:
product.attribute_values.all().delete()
return product
class Meta:
model = apps.get_model('products', 'Product')
fields = '__all__'
class ProductImageInline(SortableTabularInline):
fields = ('preview', )
readonly_fields = ['preview']
model = apps.get_model('products', 'ProductImage')
extra = 0
max_num = 0
|
bd7c6e22146604183412657e68457db7ae7766ed
|
script/jsonify-book.py
|
script/jsonify-book.py
|
import sys
from glob import glob
from os.path import basename
import json
book_dir, out_dir = sys.argv[1:3]
files = [basename(x).rstrip(".xhtml") for x in glob(f"{book_dir}/*.xhtml")]
json_data = {}
for path in files:
with open(f"{book_dir}/{path}.xhtml", "r") as book_part:
content = book_part.read()
json_data = { "content": str(content) }
with open(f"{out_dir}/{path}.json", 'w') as outfile:
json.dump(json_data, outfile)
|
import sys
from glob import glob
from os.path import basename
import json
book_dir, out_dir = sys.argv[1:3]
files = [basename(x).rstrip(".xhtml") for x in glob(f"{book_dir}/*.xhtml")]
json_data = {}
for path in files:
with open(f"{book_dir}/{path}.json", "r") as meta_part:
json_data = json.load(meta_part)
with open(f"{book_dir}/{path}.xhtml", "r") as book_part:
content = book_part.read()
json_data["content"] = str(content)
with open(f"{out_dir}/{path}.json", 'w') as outfile:
json.dump(json_data, outfile)
|
Add metadata to jsonify output
|
Add metadata to jsonify output
|
Python
|
lgpl-2.1
|
Connexions/cte,Connexions/cnx-recipes,Connexions/cnx-rulesets,Connexions/cnx-recipes,Connexions/cnx-rulesets,Connexions/cnx-rulesets,Connexions/cnx-rulesets,Connexions/cnx-recipes,Connexions/cnx-recipes,Connexions/cte,Connexions/cnx-recipes
|
import sys
from glob import glob
from os.path import basename
import json
book_dir, out_dir = sys.argv[1:3]
files = [basename(x).rstrip(".xhtml") for x in glob(f"{book_dir}/*.xhtml")]
json_data = {}
for path in files:
with open(f"{book_dir}/{path}.xhtml", "r") as book_part:
content = book_part.read()
json_data = { "content": str(content) }
with open(f"{out_dir}/{path}.json", 'w') as outfile:
json.dump(json_data, outfile)
Add metadata to jsonify output
|
import sys
from glob import glob
from os.path import basename
import json
book_dir, out_dir = sys.argv[1:3]
files = [basename(x).rstrip(".xhtml") for x in glob(f"{book_dir}/*.xhtml")]
json_data = {}
for path in files:
with open(f"{book_dir}/{path}.json", "r") as meta_part:
json_data = json.load(meta_part)
with open(f"{book_dir}/{path}.xhtml", "r") as book_part:
content = book_part.read()
json_data["content"] = str(content)
with open(f"{out_dir}/{path}.json", 'w') as outfile:
json.dump(json_data, outfile)
|
<commit_before>import sys
from glob import glob
from os.path import basename
import json
book_dir, out_dir = sys.argv[1:3]
files = [basename(x).rstrip(".xhtml") for x in glob(f"{book_dir}/*.xhtml")]
json_data = {}
for path in files:
with open(f"{book_dir}/{path}.xhtml", "r") as book_part:
content = book_part.read()
json_data = { "content": str(content) }
with open(f"{out_dir}/{path}.json", 'w') as outfile:
json.dump(json_data, outfile)
<commit_msg>Add metadata to jsonify output<commit_after>
|
import sys
from glob import glob
from os.path import basename
import json
book_dir, out_dir = sys.argv[1:3]
files = [basename(x).rstrip(".xhtml") for x in glob(f"{book_dir}/*.xhtml")]
json_data = {}
for path in files:
with open(f"{book_dir}/{path}.json", "r") as meta_part:
json_data = json.load(meta_part)
with open(f"{book_dir}/{path}.xhtml", "r") as book_part:
content = book_part.read()
json_data["content"] = str(content)
with open(f"{out_dir}/{path}.json", 'w') as outfile:
json.dump(json_data, outfile)
|
import sys
from glob import glob
from os.path import basename
import json
book_dir, out_dir = sys.argv[1:3]
files = [basename(x).rstrip(".xhtml") for x in glob(f"{book_dir}/*.xhtml")]
json_data = {}
for path in files:
with open(f"{book_dir}/{path}.xhtml", "r") as book_part:
content = book_part.read()
json_data = { "content": str(content) }
with open(f"{out_dir}/{path}.json", 'w') as outfile:
json.dump(json_data, outfile)
Add metadata to jsonify outputimport sys
from glob import glob
from os.path import basename
import json
book_dir, out_dir = sys.argv[1:3]
files = [basename(x).rstrip(".xhtml") for x in glob(f"{book_dir}/*.xhtml")]
json_data = {}
for path in files:
with open(f"{book_dir}/{path}.json", "r") as meta_part:
json_data = json.load(meta_part)
with open(f"{book_dir}/{path}.xhtml", "r") as book_part:
content = book_part.read()
json_data["content"] = str(content)
with open(f"{out_dir}/{path}.json", 'w') as outfile:
json.dump(json_data, outfile)
|
<commit_before>import sys
from glob import glob
from os.path import basename
import json
book_dir, out_dir = sys.argv[1:3]
files = [basename(x).rstrip(".xhtml") for x in glob(f"{book_dir}/*.xhtml")]
json_data = {}
for path in files:
with open(f"{book_dir}/{path}.xhtml", "r") as book_part:
content = book_part.read()
json_data = { "content": str(content) }
with open(f"{out_dir}/{path}.json", 'w') as outfile:
json.dump(json_data, outfile)
<commit_msg>Add metadata to jsonify output<commit_after>import sys
from glob import glob
from os.path import basename
import json
book_dir, out_dir = sys.argv[1:3]
files = [basename(x).rstrip(".xhtml") for x in glob(f"{book_dir}/*.xhtml")]
json_data = {}
for path in files:
with open(f"{book_dir}/{path}.json", "r") as meta_part:
json_data = json.load(meta_part)
with open(f"{book_dir}/{path}.xhtml", "r") as book_part:
content = book_part.read()
json_data["content"] = str(content)
with open(f"{out_dir}/{path}.json", 'w') as outfile:
json.dump(json_data, outfile)
|
b9e3485030ef7acf5b3d312b8e9d9fc54367eded
|
tests/ext/argcomplete_tests.py
|
tests/ext/argcomplete_tests.py
|
"""Tests for cement.ext.ext_argcomplete."""
import os
from cement.ext import ext_argcomplete
from cement.ext.ext_argparse import ArgparseController, expose
from cement.utils import test
from cement.utils.misc import rando
APP = rando()[:12]
class MyBaseController(ArgparseController):
class Meta:
label = 'base'
@expose()
def default(self):
pass
class ArgcompleteExtTestCase(test.CementExtTestCase):
def setUp(self):
super(ArgcompleteExtTestCase, self).setUp()
self.app = self.make_app(APP,
base_controller=MyBaseController,
extensions=[
'argparse',
'argcomplete'
],
)
def test_argcomplete(self):
# not really sure how to test this for reals... but let's atleast get
# coverage
with self.app as app:
app.run()
|
"""Tests for cement.ext.ext_argcomplete."""
import os
from cement.ext import ext_argcomplete
from cement.ext.ext_argparse import ArgparseController, expose
from cement.utils import test
from cement.utils.misc import rando
APP = rando()[:12]
class MyBaseController(ArgparseController):
class Meta:
label = 'base'
@expose()
def default(self):
pass
class ArgcompleteExtTestCase(test.CementExtTestCase):
def setUp(self):
super(ArgcompleteExtTestCase, self).setUp()
self.app = self.make_app(APP,
argv=['default'],
base_controller=MyBaseController,
extensions=[
'argparse',
'argcomplete'
],
)
def test_argcomplete(self):
# not really sure how to test this for reals... but let's atleast get
# coverage
with self.app as app:
app.run()
|
Fix Argcomplete Tests on Python <3.2
|
Fix Argcomplete Tests on Python <3.2
|
Python
|
bsd-3-clause
|
akhilman/cement,fxstein/cement,datafolklabs/cement,akhilman/cement,akhilman/cement,fxstein/cement,fxstein/cement,datafolklabs/cement,datafolklabs/cement
|
"""Tests for cement.ext.ext_argcomplete."""
import os
from cement.ext import ext_argcomplete
from cement.ext.ext_argparse import ArgparseController, expose
from cement.utils import test
from cement.utils.misc import rando
APP = rando()[:12]
class MyBaseController(ArgparseController):
class Meta:
label = 'base'
@expose()
def default(self):
pass
class ArgcompleteExtTestCase(test.CementExtTestCase):
def setUp(self):
super(ArgcompleteExtTestCase, self).setUp()
self.app = self.make_app(APP,
base_controller=MyBaseController,
extensions=[
'argparse',
'argcomplete'
],
)
def test_argcomplete(self):
# not really sure how to test this for reals... but let's atleast get
# coverage
with self.app as app:
app.run()
Fix Argcomplete Tests on Python <3.2
|
"""Tests for cement.ext.ext_argcomplete."""
import os
from cement.ext import ext_argcomplete
from cement.ext.ext_argparse import ArgparseController, expose
from cement.utils import test
from cement.utils.misc import rando
APP = rando()[:12]
class MyBaseController(ArgparseController):
class Meta:
label = 'base'
@expose()
def default(self):
pass
class ArgcompleteExtTestCase(test.CementExtTestCase):
def setUp(self):
super(ArgcompleteExtTestCase, self).setUp()
self.app = self.make_app(APP,
argv=['default'],
base_controller=MyBaseController,
extensions=[
'argparse',
'argcomplete'
],
)
def test_argcomplete(self):
# not really sure how to test this for reals... but let's atleast get
# coverage
with self.app as app:
app.run()
|
<commit_before>"""Tests for cement.ext.ext_argcomplete."""
import os
from cement.ext import ext_argcomplete
from cement.ext.ext_argparse import ArgparseController, expose
from cement.utils import test
from cement.utils.misc import rando
APP = rando()[:12]
class MyBaseController(ArgparseController):
class Meta:
label = 'base'
@expose()
def default(self):
pass
class ArgcompleteExtTestCase(test.CementExtTestCase):
def setUp(self):
super(ArgcompleteExtTestCase, self).setUp()
self.app = self.make_app(APP,
base_controller=MyBaseController,
extensions=[
'argparse',
'argcomplete'
],
)
def test_argcomplete(self):
# not really sure how to test this for reals... but let's atleast get
# coverage
with self.app as app:
app.run()
<commit_msg>Fix Argcomplete Tests on Python <3.2<commit_after>
|
"""Tests for cement.ext.ext_argcomplete."""
import os
from cement.ext import ext_argcomplete
from cement.ext.ext_argparse import ArgparseController, expose
from cement.utils import test
from cement.utils.misc import rando
APP = rando()[:12]
class MyBaseController(ArgparseController):
class Meta:
label = 'base'
@expose()
def default(self):
pass
class ArgcompleteExtTestCase(test.CementExtTestCase):
def setUp(self):
super(ArgcompleteExtTestCase, self).setUp()
self.app = self.make_app(APP,
argv=['default'],
base_controller=MyBaseController,
extensions=[
'argparse',
'argcomplete'
],
)
def test_argcomplete(self):
# not really sure how to test this for reals... but let's atleast get
# coverage
with self.app as app:
app.run()
|
"""Tests for cement.ext.ext_argcomplete."""
import os
from cement.ext import ext_argcomplete
from cement.ext.ext_argparse import ArgparseController, expose
from cement.utils import test
from cement.utils.misc import rando
APP = rando()[:12]
class MyBaseController(ArgparseController):
class Meta:
label = 'base'
@expose()
def default(self):
pass
class ArgcompleteExtTestCase(test.CementExtTestCase):
def setUp(self):
super(ArgcompleteExtTestCase, self).setUp()
self.app = self.make_app(APP,
base_controller=MyBaseController,
extensions=[
'argparse',
'argcomplete'
],
)
def test_argcomplete(self):
# not really sure how to test this for reals... but let's atleast get
# coverage
with self.app as app:
app.run()
Fix Argcomplete Tests on Python <3.2"""Tests for cement.ext.ext_argcomplete."""
import os
from cement.ext import ext_argcomplete
from cement.ext.ext_argparse import ArgparseController, expose
from cement.utils import test
from cement.utils.misc import rando
APP = rando()[:12]
class MyBaseController(ArgparseController):
class Meta:
label = 'base'
@expose()
def default(self):
pass
class ArgcompleteExtTestCase(test.CementExtTestCase):
def setUp(self):
super(ArgcompleteExtTestCase, self).setUp()
self.app = self.make_app(APP,
argv=['default'],
base_controller=MyBaseController,
extensions=[
'argparse',
'argcomplete'
],
)
def test_argcomplete(self):
# not really sure how to test this for reals... but let's atleast get
# coverage
with self.app as app:
app.run()
|
<commit_before>"""Tests for cement.ext.ext_argcomplete."""
import os
from cement.ext import ext_argcomplete
from cement.ext.ext_argparse import ArgparseController, expose
from cement.utils import test
from cement.utils.misc import rando
APP = rando()[:12]
class MyBaseController(ArgparseController):
class Meta:
label = 'base'
@expose()
def default(self):
pass
class ArgcompleteExtTestCase(test.CementExtTestCase):
def setUp(self):
super(ArgcompleteExtTestCase, self).setUp()
self.app = self.make_app(APP,
base_controller=MyBaseController,
extensions=[
'argparse',
'argcomplete'
],
)
def test_argcomplete(self):
# not really sure how to test this for reals... but let's atleast get
# coverage
with self.app as app:
app.run()
<commit_msg>Fix Argcomplete Tests on Python <3.2<commit_after>"""Tests for cement.ext.ext_argcomplete."""
import os
from cement.ext import ext_argcomplete
from cement.ext.ext_argparse import ArgparseController, expose
from cement.utils import test
from cement.utils.misc import rando
APP = rando()[:12]
class MyBaseController(ArgparseController):
class Meta:
label = 'base'
@expose()
def default(self):
pass
class ArgcompleteExtTestCase(test.CementExtTestCase):
def setUp(self):
super(ArgcompleteExtTestCase, self).setUp()
self.app = self.make_app(APP,
argv=['default'],
base_controller=MyBaseController,
extensions=[
'argparse',
'argcomplete'
],
)
def test_argcomplete(self):
# not really sure how to test this for reals... but let's atleast get
# coverage
with self.app as app:
app.run()
|
11b16c26c182636016e7d86cd0f94963eec42556
|
project/settings/ci.py
|
project/settings/ci.py
|
# Local
from .base import *
# JWT Settings
def jwt_get_username_from_payload_handler(payload):
return payload.get('email')
JWT_AUTH = {
# 'JWT_SECRET_KEY': AUTH0_CLIENT_SECRET,
'JWT_AUDIENCE': AUTH0_CLIENT_ID,
'JWT_PAYLOAD_GET_USERNAME_HANDLER': jwt_get_username_from_payload_handler,
'JWT_AUTH_HEADER_PREFIX': 'Bearer',
'JWT_PUBLIC_KEY': jwt_public_key,
'JWT_ALGORITHM': 'RS256',
}
DATABASES['default']['TEST'] = dj_database_url.config(default=DATABASE_URL)
# Static Server Config
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static')
STATIC_STORAGE = 'django.contrib.staticfiles.storage.StaticFilesStorage'
STATIC_URL = '/static/'
STATICFILES_STORAGE = STATIC_STORAGE
# Media (aka File Upload) Server Config
MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'media')
MEDIA_STORAGE = 'django.core.files.storage.FileSystemStorage'
MEDIA_URL = '/media/'
DEFAULT_FILE_STORAGE = MEDIA_STORAGE
# Email
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
|
# Local
from .base import *
# JWT Settings
def jwt_get_username_from_payload_handler(payload):
return payload.get('email')
JWT_AUTH = {
# 'JWT_SECRET_KEY': AUTH0_CLIENT_SECRET,
'JWT_AUDIENCE': AUTH0_CLIENT_ID,
'JWT_PAYLOAD_GET_USERNAME_HANDLER': jwt_get_username_from_payload_handler,
'JWT_AUTH_HEADER_PREFIX': 'Bearer',
'JWT_PUBLIC_KEY': jwt_public_key,
'JWT_ALGORITHM': 'RS256',
}
# Static Server Config
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static')
STATIC_STORAGE = 'django.contrib.staticfiles.storage.StaticFilesStorage'
STATIC_URL = '/static/'
STATICFILES_STORAGE = STATIC_STORAGE
# Media (aka File Upload) Server Config
MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'media')
MEDIA_STORAGE = 'django.core.files.storage.FileSystemStorage'
MEDIA_URL = '/media/'
DEFAULT_FILE_STORAGE = MEDIA_STORAGE
# Email
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
|
Revert "Attempt to bypass test database"
|
Revert "Attempt to bypass test database"
This reverts commit 889713c8c4c7151ba06448a3993778a91d2abfd6.
|
Python
|
bsd-2-clause
|
barberscore/barberscore-api,dbinetti/barberscore-django,dbinetti/barberscore-django,barberscore/barberscore-api,barberscore/barberscore-api,dbinetti/barberscore,barberscore/barberscore-api,dbinetti/barberscore
|
# Local
from .base import *
# JWT Settings
def jwt_get_username_from_payload_handler(payload):
return payload.get('email')
JWT_AUTH = {
# 'JWT_SECRET_KEY': AUTH0_CLIENT_SECRET,
'JWT_AUDIENCE': AUTH0_CLIENT_ID,
'JWT_PAYLOAD_GET_USERNAME_HANDLER': jwt_get_username_from_payload_handler,
'JWT_AUTH_HEADER_PREFIX': 'Bearer',
'JWT_PUBLIC_KEY': jwt_public_key,
'JWT_ALGORITHM': 'RS256',
}
DATABASES['default']['TEST'] = dj_database_url.config(default=DATABASE_URL)
# Static Server Config
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static')
STATIC_STORAGE = 'django.contrib.staticfiles.storage.StaticFilesStorage'
STATIC_URL = '/static/'
STATICFILES_STORAGE = STATIC_STORAGE
# Media (aka File Upload) Server Config
MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'media')
MEDIA_STORAGE = 'django.core.files.storage.FileSystemStorage'
MEDIA_URL = '/media/'
DEFAULT_FILE_STORAGE = MEDIA_STORAGE
# Email
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
Revert "Attempt to bypass test database"
This reverts commit 889713c8c4c7151ba06448a3993778a91d2abfd6.
|
# Local
from .base import *
# JWT Settings
def jwt_get_username_from_payload_handler(payload):
return payload.get('email')
JWT_AUTH = {
# 'JWT_SECRET_KEY': AUTH0_CLIENT_SECRET,
'JWT_AUDIENCE': AUTH0_CLIENT_ID,
'JWT_PAYLOAD_GET_USERNAME_HANDLER': jwt_get_username_from_payload_handler,
'JWT_AUTH_HEADER_PREFIX': 'Bearer',
'JWT_PUBLIC_KEY': jwt_public_key,
'JWT_ALGORITHM': 'RS256',
}
# Static Server Config
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static')
STATIC_STORAGE = 'django.contrib.staticfiles.storage.StaticFilesStorage'
STATIC_URL = '/static/'
STATICFILES_STORAGE = STATIC_STORAGE
# Media (aka File Upload) Server Config
MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'media')
MEDIA_STORAGE = 'django.core.files.storage.FileSystemStorage'
MEDIA_URL = '/media/'
DEFAULT_FILE_STORAGE = MEDIA_STORAGE
# Email
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
|
<commit_before># Local
from .base import *
# JWT Settings
def jwt_get_username_from_payload_handler(payload):
return payload.get('email')
JWT_AUTH = {
# 'JWT_SECRET_KEY': AUTH0_CLIENT_SECRET,
'JWT_AUDIENCE': AUTH0_CLIENT_ID,
'JWT_PAYLOAD_GET_USERNAME_HANDLER': jwt_get_username_from_payload_handler,
'JWT_AUTH_HEADER_PREFIX': 'Bearer',
'JWT_PUBLIC_KEY': jwt_public_key,
'JWT_ALGORITHM': 'RS256',
}
DATABASES['default']['TEST'] = dj_database_url.config(default=DATABASE_URL)
# Static Server Config
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static')
STATIC_STORAGE = 'django.contrib.staticfiles.storage.StaticFilesStorage'
STATIC_URL = '/static/'
STATICFILES_STORAGE = STATIC_STORAGE
# Media (aka File Upload) Server Config
MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'media')
MEDIA_STORAGE = 'django.core.files.storage.FileSystemStorage'
MEDIA_URL = '/media/'
DEFAULT_FILE_STORAGE = MEDIA_STORAGE
# Email
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
<commit_msg>Revert "Attempt to bypass test database"
This reverts commit 889713c8c4c7151ba06448a3993778a91d2abfd6.<commit_after>
|
# Local
from .base import *
# JWT Settings
def jwt_get_username_from_payload_handler(payload):
return payload.get('email')
JWT_AUTH = {
# 'JWT_SECRET_KEY': AUTH0_CLIENT_SECRET,
'JWT_AUDIENCE': AUTH0_CLIENT_ID,
'JWT_PAYLOAD_GET_USERNAME_HANDLER': jwt_get_username_from_payload_handler,
'JWT_AUTH_HEADER_PREFIX': 'Bearer',
'JWT_PUBLIC_KEY': jwt_public_key,
'JWT_ALGORITHM': 'RS256',
}
# Static Server Config
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static')
STATIC_STORAGE = 'django.contrib.staticfiles.storage.StaticFilesStorage'
STATIC_URL = '/static/'
STATICFILES_STORAGE = STATIC_STORAGE
# Media (aka File Upload) Server Config
MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'media')
MEDIA_STORAGE = 'django.core.files.storage.FileSystemStorage'
MEDIA_URL = '/media/'
DEFAULT_FILE_STORAGE = MEDIA_STORAGE
# Email
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
|
# Local
from .base import *
# JWT Settings
def jwt_get_username_from_payload_handler(payload):
return payload.get('email')
JWT_AUTH = {
# 'JWT_SECRET_KEY': AUTH0_CLIENT_SECRET,
'JWT_AUDIENCE': AUTH0_CLIENT_ID,
'JWT_PAYLOAD_GET_USERNAME_HANDLER': jwt_get_username_from_payload_handler,
'JWT_AUTH_HEADER_PREFIX': 'Bearer',
'JWT_PUBLIC_KEY': jwt_public_key,
'JWT_ALGORITHM': 'RS256',
}
DATABASES['default']['TEST'] = dj_database_url.config(default=DATABASE_URL)
# Static Server Config
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static')
STATIC_STORAGE = 'django.contrib.staticfiles.storage.StaticFilesStorage'
STATIC_URL = '/static/'
STATICFILES_STORAGE = STATIC_STORAGE
# Media (aka File Upload) Server Config
MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'media')
MEDIA_STORAGE = 'django.core.files.storage.FileSystemStorage'
MEDIA_URL = '/media/'
DEFAULT_FILE_STORAGE = MEDIA_STORAGE
# Email
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
Revert "Attempt to bypass test database"
This reverts commit 889713c8c4c7151ba06448a3993778a91d2abfd6.# Local
from .base import *
# JWT Settings
def jwt_get_username_from_payload_handler(payload):
return payload.get('email')
JWT_AUTH = {
# 'JWT_SECRET_KEY': AUTH0_CLIENT_SECRET,
'JWT_AUDIENCE': AUTH0_CLIENT_ID,
'JWT_PAYLOAD_GET_USERNAME_HANDLER': jwt_get_username_from_payload_handler,
'JWT_AUTH_HEADER_PREFIX': 'Bearer',
'JWT_PUBLIC_KEY': jwt_public_key,
'JWT_ALGORITHM': 'RS256',
}
# Static Server Config
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static')
STATIC_STORAGE = 'django.contrib.staticfiles.storage.StaticFilesStorage'
STATIC_URL = '/static/'
STATICFILES_STORAGE = STATIC_STORAGE
# Media (aka File Upload) Server Config
MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'media')
MEDIA_STORAGE = 'django.core.files.storage.FileSystemStorage'
MEDIA_URL = '/media/'
DEFAULT_FILE_STORAGE = MEDIA_STORAGE
# Email
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
|
<commit_before># Local
from .base import *
# JWT Settings
def jwt_get_username_from_payload_handler(payload):
return payload.get('email')
JWT_AUTH = {
# 'JWT_SECRET_KEY': AUTH0_CLIENT_SECRET,
'JWT_AUDIENCE': AUTH0_CLIENT_ID,
'JWT_PAYLOAD_GET_USERNAME_HANDLER': jwt_get_username_from_payload_handler,
'JWT_AUTH_HEADER_PREFIX': 'Bearer',
'JWT_PUBLIC_KEY': jwt_public_key,
'JWT_ALGORITHM': 'RS256',
}
DATABASES['default']['TEST'] = dj_database_url.config(default=DATABASE_URL)
# Static Server Config
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static')
STATIC_STORAGE = 'django.contrib.staticfiles.storage.StaticFilesStorage'
STATIC_URL = '/static/'
STATICFILES_STORAGE = STATIC_STORAGE
# Media (aka File Upload) Server Config
MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'media')
MEDIA_STORAGE = 'django.core.files.storage.FileSystemStorage'
MEDIA_URL = '/media/'
DEFAULT_FILE_STORAGE = MEDIA_STORAGE
# Email
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
<commit_msg>Revert "Attempt to bypass test database"
This reverts commit 889713c8c4c7151ba06448a3993778a91d2abfd6.<commit_after># Local
from .base import *
# JWT Settings
def jwt_get_username_from_payload_handler(payload):
return payload.get('email')
JWT_AUTH = {
# 'JWT_SECRET_KEY': AUTH0_CLIENT_SECRET,
'JWT_AUDIENCE': AUTH0_CLIENT_ID,
'JWT_PAYLOAD_GET_USERNAME_HANDLER': jwt_get_username_from_payload_handler,
'JWT_AUTH_HEADER_PREFIX': 'Bearer',
'JWT_PUBLIC_KEY': jwt_public_key,
'JWT_ALGORITHM': 'RS256',
}
# Static Server Config
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static')
STATIC_STORAGE = 'django.contrib.staticfiles.storage.StaticFilesStorage'
STATIC_URL = '/static/'
STATICFILES_STORAGE = STATIC_STORAGE
# Media (aka File Upload) Server Config
MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'media')
MEDIA_STORAGE = 'django.core.files.storage.FileSystemStorage'
MEDIA_URL = '/media/'
DEFAULT_FILE_STORAGE = MEDIA_STORAGE
# Email
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
|
77f99f4862ded1b8493b5895e4f9d88a3bbf722b
|
source/globals/fieldtests.py
|
source/globals/fieldtests.py
|
# -*- coding: utf-8 -*-
## \package globals.fieldtests
# MIT licensing
# See: LICENSE.txt
import wx
## Tests if a wx control/instance is enabled
#
# Function for compatibility between wx versions
# \param enabled
# \b \e bool : Check if enabled or disabled
def FieldEnabled(field, enabled=True):
if wx.MAJOR_VERSION > 2:
return field.IsThisEnabled() == enabled
else:
return field.IsEnabled() == enabled
## Tests multiple fields
#
# \return
# \b \e bool : True if all fields are enabled
def FieldsEnabled(field_list):
if isinstance(field_list, (tuple, list)):
return FieldEnabled(field_list)
for F in field_list:
if not FieldEnabled(F):
return False
return True
|
# -*- coding: utf-8 -*-
## \package globals.fieldtests
# MIT licensing
# See: LICENSE.txt
import wx
## Tests if a wx control/instance is enabled/disabled
#
# Function for compatibility between wx versions
# \param field
# \b \e wx.Window : the wx control to check
# \param enabled
# \b \e bool : Check if enabled or disabled
# \return
# \b \e bool : True if field's enabled status is same as 'enabled'
def FieldEnabled(field, enabled=True):
if wx.MAJOR_VERSION > 2:
return field.IsThisEnabled() == enabled
else:
return field.IsEnabled() == enabled
## Tests if a wx control/instance is disabled
#
# \param field
# \b \e wx.Window : The wx field to check
# \return
# \b \e : True if field is disabled
def FieldDisabled(field):
return FieldEnabled(field, False)
## Tests multiple fields
#
# \return
# \b \e bool : True if all fields are enabled
def FieldsEnabled(field_list):
if isinstance(field_list, (tuple, list)):
return FieldEnabled(field_list)
for F in field_list:
if not FieldEnabled(F):
return False
return True
|
Add function FieldDisabled to test for disabled controls
|
Add function FieldDisabled to test for disabled controls
|
Python
|
mit
|
AntumDeluge/desktop_recorder,AntumDeluge/desktop_recorder
|
# -*- coding: utf-8 -*-
## \package globals.fieldtests
# MIT licensing
# See: LICENSE.txt
import wx
## Tests if a wx control/instance is enabled
#
# Function for compatibility between wx versions
# \param enabled
# \b \e bool : Check if enabled or disabled
def FieldEnabled(field, enabled=True):
if wx.MAJOR_VERSION > 2:
return field.IsThisEnabled() == enabled
else:
return field.IsEnabled() == enabled
## Tests multiple fields
#
# \return
# \b \e bool : True if all fields are enabled
def FieldsEnabled(field_list):
if isinstance(field_list, (tuple, list)):
return FieldEnabled(field_list)
for F in field_list:
if not FieldEnabled(F):
return False
return True
Add function FieldDisabled to test for disabled controls
|
# -*- coding: utf-8 -*-
## \package globals.fieldtests
# MIT licensing
# See: LICENSE.txt
import wx
## Tests if a wx control/instance is enabled/disabled
#
# Function for compatibility between wx versions
# \param field
# \b \e wx.Window : the wx control to check
# \param enabled
# \b \e bool : Check if enabled or disabled
# \return
# \b \e bool : True if field's enabled status is same as 'enabled'
def FieldEnabled(field, enabled=True):
if wx.MAJOR_VERSION > 2:
return field.IsThisEnabled() == enabled
else:
return field.IsEnabled() == enabled
## Tests if a wx control/instance is disabled
#
# \param field
# \b \e wx.Window : The wx field to check
# \return
# \b \e : True if field is disabled
def FieldDisabled(field):
return FieldEnabled(field, False)
## Tests multiple fields
#
# \return
# \b \e bool : True if all fields are enabled
def FieldsEnabled(field_list):
if isinstance(field_list, (tuple, list)):
return FieldEnabled(field_list)
for F in field_list:
if not FieldEnabled(F):
return False
return True
|
<commit_before># -*- coding: utf-8 -*-
## \package globals.fieldtests
# MIT licensing
# See: LICENSE.txt
import wx
## Tests if a wx control/instance is enabled
#
# Function for compatibility between wx versions
# \param enabled
# \b \e bool : Check if enabled or disabled
def FieldEnabled(field, enabled=True):
if wx.MAJOR_VERSION > 2:
return field.IsThisEnabled() == enabled
else:
return field.IsEnabled() == enabled
## Tests multiple fields
#
# \return
# \b \e bool : True if all fields are enabled
def FieldsEnabled(field_list):
if isinstance(field_list, (tuple, list)):
return FieldEnabled(field_list)
for F in field_list:
if not FieldEnabled(F):
return False
return True
<commit_msg>Add function FieldDisabled to test for disabled controls<commit_after>
|
# -*- coding: utf-8 -*-
## \package globals.fieldtests
# MIT licensing
# See: LICENSE.txt
import wx
## Tests if a wx control/instance is enabled/disabled
#
# Function for compatibility between wx versions
# \param field
# \b \e wx.Window : the wx control to check
# \param enabled
# \b \e bool : Check if enabled or disabled
# \return
# \b \e bool : True if field's enabled status is same as 'enabled'
def FieldEnabled(field, enabled=True):
if wx.MAJOR_VERSION > 2:
return field.IsThisEnabled() == enabled
else:
return field.IsEnabled() == enabled
## Tests if a wx control/instance is disabled
#
# \param field
# \b \e wx.Window : The wx field to check
# \return
# \b \e : True if field is disabled
def FieldDisabled(field):
return FieldEnabled(field, False)
## Tests multiple fields
#
# \return
# \b \e bool : True if all fields are enabled
def FieldsEnabled(field_list):
if isinstance(field_list, (tuple, list)):
return FieldEnabled(field_list)
for F in field_list:
if not FieldEnabled(F):
return False
return True
|
# -*- coding: utf-8 -*-
## \package globals.fieldtests
# MIT licensing
# See: LICENSE.txt
import wx
## Tests if a wx control/instance is enabled
#
# Function for compatibility between wx versions
# \param enabled
# \b \e bool : Check if enabled or disabled
def FieldEnabled(field, enabled=True):
if wx.MAJOR_VERSION > 2:
return field.IsThisEnabled() == enabled
else:
return field.IsEnabled() == enabled
## Tests multiple fields
#
# \return
# \b \e bool : True if all fields are enabled
def FieldsEnabled(field_list):
if isinstance(field_list, (tuple, list)):
return FieldEnabled(field_list)
for F in field_list:
if not FieldEnabled(F):
return False
return True
Add function FieldDisabled to test for disabled controls# -*- coding: utf-8 -*-
## \package globals.fieldtests
# MIT licensing
# See: LICENSE.txt
import wx
## Tests if a wx control/instance is enabled/disabled
#
# Function for compatibility between wx versions
# \param field
# \b \e wx.Window : the wx control to check
# \param enabled
# \b \e bool : Check if enabled or disabled
# \return
# \b \e bool : True if field's enabled status is same as 'enabled'
def FieldEnabled(field, enabled=True):
if wx.MAJOR_VERSION > 2:
return field.IsThisEnabled() == enabled
else:
return field.IsEnabled() == enabled
## Tests if a wx control/instance is disabled
#
# \param field
# \b \e wx.Window : The wx field to check
# \return
# \b \e : True if field is disabled
def FieldDisabled(field):
return FieldEnabled(field, False)
## Tests multiple fields
#
# \return
# \b \e bool : True if all fields are enabled
def FieldsEnabled(field_list):
if isinstance(field_list, (tuple, list)):
return FieldEnabled(field_list)
for F in field_list:
if not FieldEnabled(F):
return False
return True
|
<commit_before># -*- coding: utf-8 -*-
## \package globals.fieldtests
# MIT licensing
# See: LICENSE.txt
import wx
## Tests if a wx control/instance is enabled
#
# Function for compatibility between wx versions
# \param enabled
# \b \e bool : Check if enabled or disabled
def FieldEnabled(field, enabled=True):
if wx.MAJOR_VERSION > 2:
return field.IsThisEnabled() == enabled
else:
return field.IsEnabled() == enabled
## Tests multiple fields
#
# \return
# \b \e bool : True if all fields are enabled
def FieldsEnabled(field_list):
if isinstance(field_list, (tuple, list)):
return FieldEnabled(field_list)
for F in field_list:
if not FieldEnabled(F):
return False
return True
<commit_msg>Add function FieldDisabled to test for disabled controls<commit_after># -*- coding: utf-8 -*-
## \package globals.fieldtests
# MIT licensing
# See: LICENSE.txt
import wx
## Tests if a wx control/instance is enabled/disabled
#
# Function for compatibility between wx versions
# \param field
# \b \e wx.Window : the wx control to check
# \param enabled
# \b \e bool : Check if enabled or disabled
# \return
# \b \e bool : True if field's enabled status is same as 'enabled'
def FieldEnabled(field, enabled=True):
if wx.MAJOR_VERSION > 2:
return field.IsThisEnabled() == enabled
else:
return field.IsEnabled() == enabled
## Tests if a wx control/instance is disabled
#
# \param field
# \b \e wx.Window : The wx field to check
# \return
# \b \e : True if field is disabled
def FieldDisabled(field):
return FieldEnabled(field, False)
## Tests multiple fields
#
# \return
# \b \e bool : True if all fields are enabled
def FieldsEnabled(field_list):
if isinstance(field_list, (tuple, list)):
return FieldEnabled(field_list)
for F in field_list:
if not FieldEnabled(F):
return False
return True
|
6a2fb450eb51d46fe4ab53dd4095527ecdcc9266
|
tests/laundry_test.py
|
tests/laundry_test.py
|
import unittest
from penn import Laundry
class TestLaundry(unittest.TestCase):
def setUp(self):
self.laundry = Laundry()
def test_all(self):
data = self.laundry.all_status()
self.assertEquals('Class of 1925 House', data[0]['name'])
self.assertEquals(55, len(data))
def test_single_hall(self):
for i in range(5):
data = self.laundry.hall_status(i)
self.assertEquals(data['machines'][0]['number'], '1')
|
from nose.tools import ok_, eq_
from penn import Laundry
class TestLaundry():
def setUp(self):
self.laundry = Laundry()
def test_all(self):
data = self.laundry.all_status()
eq_(55, len(data))
eq_('Class of 1925 House', data[0]['name'])
# Check all halls have appropriate data points
for i, hall in enumerate(data):
eq_(hall['hall_no'], i)
ok_(hall['dryers_available'] >= 0)
ok_(hall['dryers_in_use'] >= 0)
ok_(hall['washers_available'] >= 0)
ok_(hall['washers_in_use'] >= 0)
def test_single_hall(self):
for i in range(1):
data = self.laundry.hall_status(i)
machines = data['machines']
# Check all machines have appropriate data points
for i, machine in enumerate(machines):
eq_(machine['number'], str(i + 1))
ok_('available' in machine)
ok_('machine_type' in machine)
ok_('time_left' in machine)
|
Add more rigorous laundry tests
|
Add more rigorous laundry tests
|
Python
|
mit
|
pennlabs/penn-sdk-python,pennlabs/penn-sdk-python
|
import unittest
from penn import Laundry
class TestLaundry(unittest.TestCase):
def setUp(self):
self.laundry = Laundry()
def test_all(self):
data = self.laundry.all_status()
self.assertEquals('Class of 1925 House', data[0]['name'])
self.assertEquals(55, len(data))
def test_single_hall(self):
for i in range(5):
data = self.laundry.hall_status(i)
self.assertEquals(data['machines'][0]['number'], '1')
Add more rigorous laundry tests
|
from nose.tools import ok_, eq_
from penn import Laundry
class TestLaundry():
def setUp(self):
self.laundry = Laundry()
def test_all(self):
data = self.laundry.all_status()
eq_(55, len(data))
eq_('Class of 1925 House', data[0]['name'])
# Check all halls have appropriate data points
for i, hall in enumerate(data):
eq_(hall['hall_no'], i)
ok_(hall['dryers_available'] >= 0)
ok_(hall['dryers_in_use'] >= 0)
ok_(hall['washers_available'] >= 0)
ok_(hall['washers_in_use'] >= 0)
def test_single_hall(self):
for i in range(1):
data = self.laundry.hall_status(i)
machines = data['machines']
# Check all machines have appropriate data points
for i, machine in enumerate(machines):
eq_(machine['number'], str(i + 1))
ok_('available' in machine)
ok_('machine_type' in machine)
ok_('time_left' in machine)
|
<commit_before>import unittest
from penn import Laundry
class TestLaundry(unittest.TestCase):
def setUp(self):
self.laundry = Laundry()
def test_all(self):
data = self.laundry.all_status()
self.assertEquals('Class of 1925 House', data[0]['name'])
self.assertEquals(55, len(data))
def test_single_hall(self):
for i in range(5):
data = self.laundry.hall_status(i)
self.assertEquals(data['machines'][0]['number'], '1')
<commit_msg>Add more rigorous laundry tests<commit_after>
|
from nose.tools import ok_, eq_
from penn import Laundry
class TestLaundry():
def setUp(self):
self.laundry = Laundry()
def test_all(self):
data = self.laundry.all_status()
eq_(55, len(data))
eq_('Class of 1925 House', data[0]['name'])
# Check all halls have appropriate data points
for i, hall in enumerate(data):
eq_(hall['hall_no'], i)
ok_(hall['dryers_available'] >= 0)
ok_(hall['dryers_in_use'] >= 0)
ok_(hall['washers_available'] >= 0)
ok_(hall['washers_in_use'] >= 0)
def test_single_hall(self):
for i in range(1):
data = self.laundry.hall_status(i)
machines = data['machines']
# Check all machines have appropriate data points
for i, machine in enumerate(machines):
eq_(machine['number'], str(i + 1))
ok_('available' in machine)
ok_('machine_type' in machine)
ok_('time_left' in machine)
|
import unittest
from penn import Laundry
class TestLaundry(unittest.TestCase):
def setUp(self):
self.laundry = Laundry()
def test_all(self):
data = self.laundry.all_status()
self.assertEquals('Class of 1925 House', data[0]['name'])
self.assertEquals(55, len(data))
def test_single_hall(self):
for i in range(5):
data = self.laundry.hall_status(i)
self.assertEquals(data['machines'][0]['number'], '1')
Add more rigorous laundry testsfrom nose.tools import ok_, eq_
from penn import Laundry
class TestLaundry():
def setUp(self):
self.laundry = Laundry()
def test_all(self):
data = self.laundry.all_status()
eq_(55, len(data))
eq_('Class of 1925 House', data[0]['name'])
# Check all halls have appropriate data points
for i, hall in enumerate(data):
eq_(hall['hall_no'], i)
ok_(hall['dryers_available'] >= 0)
ok_(hall['dryers_in_use'] >= 0)
ok_(hall['washers_available'] >= 0)
ok_(hall['washers_in_use'] >= 0)
def test_single_hall(self):
for i in range(1):
data = self.laundry.hall_status(i)
machines = data['machines']
# Check all machines have appropriate data points
for i, machine in enumerate(machines):
eq_(machine['number'], str(i + 1))
ok_('available' in machine)
ok_('machine_type' in machine)
ok_('time_left' in machine)
|
<commit_before>import unittest
from penn import Laundry
class TestLaundry(unittest.TestCase):
def setUp(self):
self.laundry = Laundry()
def test_all(self):
data = self.laundry.all_status()
self.assertEquals('Class of 1925 House', data[0]['name'])
self.assertEquals(55, len(data))
def test_single_hall(self):
for i in range(5):
data = self.laundry.hall_status(i)
self.assertEquals(data['machines'][0]['number'], '1')
<commit_msg>Add more rigorous laundry tests<commit_after>from nose.tools import ok_, eq_
from penn import Laundry
class TestLaundry():
def setUp(self):
self.laundry = Laundry()
def test_all(self):
data = self.laundry.all_status()
eq_(55, len(data))
eq_('Class of 1925 House', data[0]['name'])
# Check all halls have appropriate data points
for i, hall in enumerate(data):
eq_(hall['hall_no'], i)
ok_(hall['dryers_available'] >= 0)
ok_(hall['dryers_in_use'] >= 0)
ok_(hall['washers_available'] >= 0)
ok_(hall['washers_in_use'] >= 0)
def test_single_hall(self):
for i in range(1):
data = self.laundry.hall_status(i)
machines = data['machines']
# Check all machines have appropriate data points
for i, machine in enumerate(machines):
eq_(machine['number'], str(i + 1))
ok_('available' in machine)
ok_('machine_type' in machine)
ok_('time_left' in machine)
|
589dac7bf0305ec1289b2f81fe8c03cb61260238
|
tools/boilerplate_data/init.py
|
tools/boilerplate_data/init.py
|
<%inherit file="layout.py"/>
from .backend import ${r.name}Backend
__all__ = ['${r.name}Backend']
|
<%inherit file="layout.py"/>
from .backend import ${r.classname}Backend
__all__ = ['${r.classname}Backend']
|
Fix missing use of the class name
|
boilerplate: Fix missing use of the class name
|
Python
|
agpl-3.0
|
sputnick-dev/weboob,RouxRC/weboob,sputnick-dev/weboob,willprice/weboob,laurent-george/weboob,Konubinix/weboob,RouxRC/weboob,Boussadia/weboob,RouxRC/weboob,yannrouillard/weboob,frankrousseau/weboob,yannrouillard/weboob,franek/weboob,Boussadia/weboob,laurent-george/weboob,Konubinix/weboob,Boussadia/weboob,laurent-george/weboob,willprice/weboob,franek/weboob,franek/weboob,Boussadia/weboob,sputnick-dev/weboob,yannrouillard/weboob,nojhan/weboob-devel,nojhan/weboob-devel,Konubinix/weboob,frankrousseau/weboob,nojhan/weboob-devel,frankrousseau/weboob,willprice/weboob
|
<%inherit file="layout.py"/>
from .backend import ${r.name}Backend
__all__ = ['${r.name}Backend']
boilerplate: Fix missing use of the class name
|
<%inherit file="layout.py"/>
from .backend import ${r.classname}Backend
__all__ = ['${r.classname}Backend']
|
<commit_before><%inherit file="layout.py"/>
from .backend import ${r.name}Backend
__all__ = ['${r.name}Backend']
<commit_msg>boilerplate: Fix missing use of the class name<commit_after>
|
<%inherit file="layout.py"/>
from .backend import ${r.classname}Backend
__all__ = ['${r.classname}Backend']
|
<%inherit file="layout.py"/>
from .backend import ${r.name}Backend
__all__ = ['${r.name}Backend']
boilerplate: Fix missing use of the class name<%inherit file="layout.py"/>
from .backend import ${r.classname}Backend
__all__ = ['${r.classname}Backend']
|
<commit_before><%inherit file="layout.py"/>
from .backend import ${r.name}Backend
__all__ = ['${r.name}Backend']
<commit_msg>boilerplate: Fix missing use of the class name<commit_after><%inherit file="layout.py"/>
from .backend import ${r.classname}Backend
__all__ = ['${r.classname}Backend']
|
9bb19e21ed7f3b10af9a218cf55ea3a19ee4393c
|
tests/test_command.py
|
tests/test_command.py
|
"""Unittest of command entry point."""
# Copyright 2015 Masayuki Yamamoto
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import unittest.mock as mock
import yanico
import yanico.command
class TestCreateMainParser(unittest.TestCase):
"""yanico.command.create_main_parser() test."""
def test_version(self):
"""Parse '--version' option."""
parser = yanico.command.create_main_parser()
with mock.patch.object(parser, '_print_message') as print_message:
self.assertRaises(SystemExit, parser.parse_args, ['--version'])
print_message.assert_called_once_with('yanico version ' +
yanico.__version__ + '\n',
mock.ANY)
|
"""Unittest of command entry point."""
# Copyright 2015 Masayuki Yamamoto
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import unittest.mock as mock
import yanico
import yanico.command
class TestCreateMainParser(unittest.TestCase):
"""yanico.command.create_main_parser() test."""
def test_version(self):
"""Parse '--version' option."""
parser = yanico.command.create_main_parser()
with mock.patch.object(parser, '_print_message') as print_message:
self.assertRaises(SystemExit, parser.parse_args, ['--version'])
print_message.assert_called_once_with('yanico version ' +
yanico.__version__ + '\n',
mock.ANY)
def test_help_long(self):
"""Parse '--help' option."""
parser = yanico.command.create_main_parser()
with mock.patch.object(parser, 'print_help') as print_help:
self.assertRaises(SystemExit, parser.parse_args, ['--help'])
print_help.assert_called_once_with()
|
Add command test for '--help' option
|
Add command test for '--help' option
Check calling 'print_help' method.
|
Python
|
apache-2.0
|
ma8ma/yanico
|
"""Unittest of command entry point."""
# Copyright 2015 Masayuki Yamamoto
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import unittest.mock as mock
import yanico
import yanico.command
class TestCreateMainParser(unittest.TestCase):
"""yanico.command.create_main_parser() test."""
def test_version(self):
"""Parse '--version' option."""
parser = yanico.command.create_main_parser()
with mock.patch.object(parser, '_print_message') as print_message:
self.assertRaises(SystemExit, parser.parse_args, ['--version'])
print_message.assert_called_once_with('yanico version ' +
yanico.__version__ + '\n',
mock.ANY)
Add command test for '--help' option
Check calling 'print_help' method.
|
"""Unittest of command entry point."""
# Copyright 2015 Masayuki Yamamoto
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import unittest.mock as mock
import yanico
import yanico.command
class TestCreateMainParser(unittest.TestCase):
"""yanico.command.create_main_parser() test."""
def test_version(self):
"""Parse '--version' option."""
parser = yanico.command.create_main_parser()
with mock.patch.object(parser, '_print_message') as print_message:
self.assertRaises(SystemExit, parser.parse_args, ['--version'])
print_message.assert_called_once_with('yanico version ' +
yanico.__version__ + '\n',
mock.ANY)
def test_help_long(self):
"""Parse '--help' option."""
parser = yanico.command.create_main_parser()
with mock.patch.object(parser, 'print_help') as print_help:
self.assertRaises(SystemExit, parser.parse_args, ['--help'])
print_help.assert_called_once_with()
|
<commit_before>"""Unittest of command entry point."""
# Copyright 2015 Masayuki Yamamoto
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import unittest.mock as mock
import yanico
import yanico.command
class TestCreateMainParser(unittest.TestCase):
"""yanico.command.create_main_parser() test."""
def test_version(self):
"""Parse '--version' option."""
parser = yanico.command.create_main_parser()
with mock.patch.object(parser, '_print_message') as print_message:
self.assertRaises(SystemExit, parser.parse_args, ['--version'])
print_message.assert_called_once_with('yanico version ' +
yanico.__version__ + '\n',
mock.ANY)
<commit_msg>Add command test for '--help' option
Check calling 'print_help' method.<commit_after>
|
"""Unittest of command entry point."""
# Copyright 2015 Masayuki Yamamoto
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import unittest.mock as mock
import yanico
import yanico.command
class TestCreateMainParser(unittest.TestCase):
"""yanico.command.create_main_parser() test."""
def test_version(self):
"""Parse '--version' option."""
parser = yanico.command.create_main_parser()
with mock.patch.object(parser, '_print_message') as print_message:
self.assertRaises(SystemExit, parser.parse_args, ['--version'])
print_message.assert_called_once_with('yanico version ' +
yanico.__version__ + '\n',
mock.ANY)
def test_help_long(self):
"""Parse '--help' option."""
parser = yanico.command.create_main_parser()
with mock.patch.object(parser, 'print_help') as print_help:
self.assertRaises(SystemExit, parser.parse_args, ['--help'])
print_help.assert_called_once_with()
|
"""Unittest of command entry point."""
# Copyright 2015 Masayuki Yamamoto
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import unittest.mock as mock
import yanico
import yanico.command
class TestCreateMainParser(unittest.TestCase):
"""yanico.command.create_main_parser() test."""
def test_version(self):
"""Parse '--version' option."""
parser = yanico.command.create_main_parser()
with mock.patch.object(parser, '_print_message') as print_message:
self.assertRaises(SystemExit, parser.parse_args, ['--version'])
print_message.assert_called_once_with('yanico version ' +
yanico.__version__ + '\n',
mock.ANY)
Add command test for '--help' option
Check calling 'print_help' method."""Unittest of command entry point."""
# Copyright 2015 Masayuki Yamamoto
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import unittest.mock as mock
import yanico
import yanico.command
class TestCreateMainParser(unittest.TestCase):
"""yanico.command.create_main_parser() test."""
def test_version(self):
"""Parse '--version' option."""
parser = yanico.command.create_main_parser()
with mock.patch.object(parser, '_print_message') as print_message:
self.assertRaises(SystemExit, parser.parse_args, ['--version'])
print_message.assert_called_once_with('yanico version ' +
yanico.__version__ + '\n',
mock.ANY)
def test_help_long(self):
"""Parse '--help' option."""
parser = yanico.command.create_main_parser()
with mock.patch.object(parser, 'print_help') as print_help:
self.assertRaises(SystemExit, parser.parse_args, ['--help'])
print_help.assert_called_once_with()
|
<commit_before>"""Unittest of command entry point."""
# Copyright 2015 Masayuki Yamamoto
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import unittest.mock as mock
import yanico
import yanico.command
class TestCreateMainParser(unittest.TestCase):
"""yanico.command.create_main_parser() test."""
def test_version(self):
"""Parse '--version' option."""
parser = yanico.command.create_main_parser()
with mock.patch.object(parser, '_print_message') as print_message:
self.assertRaises(SystemExit, parser.parse_args, ['--version'])
print_message.assert_called_once_with('yanico version ' +
yanico.__version__ + '\n',
mock.ANY)
<commit_msg>Add command test for '--help' option
Check calling 'print_help' method.<commit_after>"""Unittest of command entry point."""
# Copyright 2015 Masayuki Yamamoto
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import unittest.mock as mock
import yanico
import yanico.command
class TestCreateMainParser(unittest.TestCase):
"""yanico.command.create_main_parser() test."""
def test_version(self):
"""Parse '--version' option."""
parser = yanico.command.create_main_parser()
with mock.patch.object(parser, '_print_message') as print_message:
self.assertRaises(SystemExit, parser.parse_args, ['--version'])
print_message.assert_called_once_with('yanico version ' +
yanico.__version__ + '\n',
mock.ANY)
def test_help_long(self):
"""Parse '--help' option."""
parser = yanico.command.create_main_parser()
with mock.patch.object(parser, 'print_help') as print_help:
self.assertRaises(SystemExit, parser.parse_args, ['--help'])
print_help.assert_called_once_with()
|
01920b5dcced36e72a5623bf9c08c5cecfa38753
|
src/scrapy_redis/dupefilter.py
|
src/scrapy_redis/dupefilter.py
|
import time
from scrapy.dupefilters import BaseDupeFilter
from scrapy.utils.request import request_fingerprint
from . import connection
class RFPDupeFilter(BaseDupeFilter):
"""Redis-based request duplication filter"""
def __init__(self, server, key):
"""Initialize duplication filter
Parameters
----------
server : Redis instance
key : str
Where to store fingerprints
"""
self.server = server
self.key = key
@classmethod
def from_settings(cls, settings):
server = connection.from_settings(settings)
# create one-time key. needed to support to use this
# class as standalone dupefilter with scrapy's default scheduler
# if scrapy passes spider on open() method this wouldn't be needed
key = "dupefilter:%s" % int(time.time())
return cls(server, key)
@classmethod
def from_crawler(cls, crawler):
return cls.from_settings(crawler.settings)
def request_seen(self, request):
fp = request_fingerprint(request)
added = self.server.sadd(self.key, fp)
return not added
def close(self, reason):
"""Delete data on close. Called by scrapy's scheduler"""
self.clear()
def clear(self):
"""Clears fingerprints data"""
self.server.delete(self.key)
|
import time
from scrapy.dupefilters import BaseDupeFilter
from scrapy.utils.request import request_fingerprint
from . import connection
class RFPDupeFilter(BaseDupeFilter):
"""Redis-based request duplication filter"""
def __init__(self, server, key):
"""Initialize duplication filter
Parameters
----------
server : Redis instance
key : str
Where to store fingerprints
"""
self.server = server
self.key = key
@classmethod
def from_settings(cls, settings):
server = connection.from_settings(settings)
# create one-time key. needed to support to use this
# class as standalone dupefilter with scrapy's default scheduler
# if scrapy passes spider on open() method this wouldn't be needed
key = "dupefilter:%s" % int(time.time())
return cls(server, key)
@classmethod
def from_crawler(cls, crawler):
return cls.from_settings(crawler.settings)
def request_seen(self, request):
fp = self.request_fingerprint(request)
added = self.server.sadd(self.key, fp)
return not added
def request_fingerprint(self, request):
return request_fingerprint(request)
def close(self, reason):
"""Delete data on close. Called by scrapy's scheduler"""
self.clear()
def clear(self):
"""Clears fingerprints data"""
self.server.delete(self.key)
|
Allow to override request fingerprint call.
|
Allow to override request fingerprint call.
|
Python
|
mit
|
darkrho/scrapy-redis,rolando/scrapy-redis
|
import time
from scrapy.dupefilters import BaseDupeFilter
from scrapy.utils.request import request_fingerprint
from . import connection
class RFPDupeFilter(BaseDupeFilter):
"""Redis-based request duplication filter"""
def __init__(self, server, key):
"""Initialize duplication filter
Parameters
----------
server : Redis instance
key : str
Where to store fingerprints
"""
self.server = server
self.key = key
@classmethod
def from_settings(cls, settings):
server = connection.from_settings(settings)
# create one-time key. needed to support to use this
# class as standalone dupefilter with scrapy's default scheduler
# if scrapy passes spider on open() method this wouldn't be needed
key = "dupefilter:%s" % int(time.time())
return cls(server, key)
@classmethod
def from_crawler(cls, crawler):
return cls.from_settings(crawler.settings)
def request_seen(self, request):
fp = request_fingerprint(request)
added = self.server.sadd(self.key, fp)
return not added
def close(self, reason):
"""Delete data on close. Called by scrapy's scheduler"""
self.clear()
def clear(self):
"""Clears fingerprints data"""
self.server.delete(self.key)
Allow to override request fingerprint call.
|
import time
from scrapy.dupefilters import BaseDupeFilter
from scrapy.utils.request import request_fingerprint
from . import connection
class RFPDupeFilter(BaseDupeFilter):
"""Redis-based request duplication filter"""
def __init__(self, server, key):
"""Initialize duplication filter
Parameters
----------
server : Redis instance
key : str
Where to store fingerprints
"""
self.server = server
self.key = key
@classmethod
def from_settings(cls, settings):
server = connection.from_settings(settings)
# create one-time key. needed to support to use this
# class as standalone dupefilter with scrapy's default scheduler
# if scrapy passes spider on open() method this wouldn't be needed
key = "dupefilter:%s" % int(time.time())
return cls(server, key)
@classmethod
def from_crawler(cls, crawler):
return cls.from_settings(crawler.settings)
def request_seen(self, request):
fp = self.request_fingerprint(request)
added = self.server.sadd(self.key, fp)
return not added
def request_fingerprint(self, request):
return request_fingerprint(request)
def close(self, reason):
"""Delete data on close. Called by scrapy's scheduler"""
self.clear()
def clear(self):
"""Clears fingerprints data"""
self.server.delete(self.key)
|
<commit_before>import time
from scrapy.dupefilters import BaseDupeFilter
from scrapy.utils.request import request_fingerprint
from . import connection
class RFPDupeFilter(BaseDupeFilter):
"""Redis-based request duplication filter"""
def __init__(self, server, key):
"""Initialize duplication filter
Parameters
----------
server : Redis instance
key : str
Where to store fingerprints
"""
self.server = server
self.key = key
@classmethod
def from_settings(cls, settings):
server = connection.from_settings(settings)
# create one-time key. needed to support to use this
# class as standalone dupefilter with scrapy's default scheduler
# if scrapy passes spider on open() method this wouldn't be needed
key = "dupefilter:%s" % int(time.time())
return cls(server, key)
@classmethod
def from_crawler(cls, crawler):
return cls.from_settings(crawler.settings)
def request_seen(self, request):
fp = request_fingerprint(request)
added = self.server.sadd(self.key, fp)
return not added
def close(self, reason):
"""Delete data on close. Called by scrapy's scheduler"""
self.clear()
def clear(self):
"""Clears fingerprints data"""
self.server.delete(self.key)
<commit_msg>Allow to override request fingerprint call.<commit_after>
|
import time
from scrapy.dupefilters import BaseDupeFilter
from scrapy.utils.request import request_fingerprint
from . import connection
class RFPDupeFilter(BaseDupeFilter):
"""Redis-based request duplication filter"""
def __init__(self, server, key):
"""Initialize duplication filter
Parameters
----------
server : Redis instance
key : str
Where to store fingerprints
"""
self.server = server
self.key = key
@classmethod
def from_settings(cls, settings):
server = connection.from_settings(settings)
# create one-time key. needed to support to use this
# class as standalone dupefilter with scrapy's default scheduler
# if scrapy passes spider on open() method this wouldn't be needed
key = "dupefilter:%s" % int(time.time())
return cls(server, key)
@classmethod
def from_crawler(cls, crawler):
return cls.from_settings(crawler.settings)
def request_seen(self, request):
fp = self.request_fingerprint(request)
added = self.server.sadd(self.key, fp)
return not added
def request_fingerprint(self, request):
return request_fingerprint(request)
def close(self, reason):
"""Delete data on close. Called by scrapy's scheduler"""
self.clear()
def clear(self):
"""Clears fingerprints data"""
self.server.delete(self.key)
|
import time
from scrapy.dupefilters import BaseDupeFilter
from scrapy.utils.request import request_fingerprint
from . import connection
class RFPDupeFilter(BaseDupeFilter):
"""Redis-based request duplication filter"""
def __init__(self, server, key):
"""Initialize duplication filter
Parameters
----------
server : Redis instance
key : str
Where to store fingerprints
"""
self.server = server
self.key = key
@classmethod
def from_settings(cls, settings):
server = connection.from_settings(settings)
# create one-time key. needed to support to use this
# class as standalone dupefilter with scrapy's default scheduler
# if scrapy passes spider on open() method this wouldn't be needed
key = "dupefilter:%s" % int(time.time())
return cls(server, key)
@classmethod
def from_crawler(cls, crawler):
return cls.from_settings(crawler.settings)
def request_seen(self, request):
fp = request_fingerprint(request)
added = self.server.sadd(self.key, fp)
return not added
def close(self, reason):
"""Delete data on close. Called by scrapy's scheduler"""
self.clear()
def clear(self):
"""Clears fingerprints data"""
self.server.delete(self.key)
Allow to override request fingerprint call.import time
from scrapy.dupefilters import BaseDupeFilter
from scrapy.utils.request import request_fingerprint
from . import connection
class RFPDupeFilter(BaseDupeFilter):
"""Redis-based request duplication filter"""
def __init__(self, server, key):
"""Initialize duplication filter
Parameters
----------
server : Redis instance
key : str
Where to store fingerprints
"""
self.server = server
self.key = key
@classmethod
def from_settings(cls, settings):
server = connection.from_settings(settings)
# create one-time key. needed to support to use this
# class as standalone dupefilter with scrapy's default scheduler
# if scrapy passes spider on open() method this wouldn't be needed
key = "dupefilter:%s" % int(time.time())
return cls(server, key)
@classmethod
def from_crawler(cls, crawler):
return cls.from_settings(crawler.settings)
def request_seen(self, request):
fp = self.request_fingerprint(request)
added = self.server.sadd(self.key, fp)
return not added
def request_fingerprint(self, request):
return request_fingerprint(request)
def close(self, reason):
"""Delete data on close. Called by scrapy's scheduler"""
self.clear()
def clear(self):
"""Clears fingerprints data"""
self.server.delete(self.key)
|
<commit_before>import time
from scrapy.dupefilters import BaseDupeFilter
from scrapy.utils.request import request_fingerprint
from . import connection
class RFPDupeFilter(BaseDupeFilter):
"""Redis-based request duplication filter"""
def __init__(self, server, key):
"""Initialize duplication filter
Parameters
----------
server : Redis instance
key : str
Where to store fingerprints
"""
self.server = server
self.key = key
@classmethod
def from_settings(cls, settings):
server = connection.from_settings(settings)
# create one-time key. needed to support to use this
# class as standalone dupefilter with scrapy's default scheduler
# if scrapy passes spider on open() method this wouldn't be needed
key = "dupefilter:%s" % int(time.time())
return cls(server, key)
@classmethod
def from_crawler(cls, crawler):
return cls.from_settings(crawler.settings)
def request_seen(self, request):
fp = request_fingerprint(request)
added = self.server.sadd(self.key, fp)
return not added
def close(self, reason):
"""Delete data on close. Called by scrapy's scheduler"""
self.clear()
def clear(self):
"""Clears fingerprints data"""
self.server.delete(self.key)
<commit_msg>Allow to override request fingerprint call.<commit_after>import time
from scrapy.dupefilters import BaseDupeFilter
from scrapy.utils.request import request_fingerprint
from . import connection
class RFPDupeFilter(BaseDupeFilter):
"""Redis-based request duplication filter"""
def __init__(self, server, key):
"""Initialize duplication filter
Parameters
----------
server : Redis instance
key : str
Where to store fingerprints
"""
self.server = server
self.key = key
@classmethod
def from_settings(cls, settings):
server = connection.from_settings(settings)
# create one-time key. needed to support to use this
# class as standalone dupefilter with scrapy's default scheduler
# if scrapy passes spider on open() method this wouldn't be needed
key = "dupefilter:%s" % int(time.time())
return cls(server, key)
@classmethod
def from_crawler(cls, crawler):
return cls.from_settings(crawler.settings)
def request_seen(self, request):
fp = self.request_fingerprint(request)
added = self.server.sadd(self.key, fp)
return not added
def request_fingerprint(self, request):
return request_fingerprint(request)
def close(self, reason):
"""Delete data on close. Called by scrapy's scheduler"""
self.clear()
def clear(self):
"""Clears fingerprints data"""
self.server.delete(self.key)
|
333afea8d8a548948f24745490c700c98500e22f
|
mlab-ns-simulator/mlabsim/lookup.py
|
mlab-ns-simulator/mlabsim/lookup.py
|
"""
This simulates the mlab-ns lookup request, whose code lives here:
https://code.google.com/p/m-lab/source/browse/server/mlabns/handlers/lookup.py?repo=ns
The difference in this module is that we don't support features which
ooni-support does not use and we augment features which ooni-support
would rely on if mlab-ns were to add those features.
Also, this is a twisted web server rather than appengine.
"""
from twisted.web import resource
from twisted.web.server import NOT_DONE_YET
class LookupSimulatorResource (resource.Resource):
def __init__(self, db):
# FIXME - db is some simple memory structure holding info;
# the details will solidfy soon. This resource reads from
# this structure.
resource.Resource.__init__(self)
self._db = db
def render_GET(self, request):
# FIXME: This is not implemented yet.
request.setResponseCode(500, 'NOT IMPLEMENTED')
request.finish()
return NOT_DONE_YET
|
"""
This simulates the mlab-ns lookup request, whose code lives here:
https://code.google.com/p/m-lab/source/browse/server/mlabns/handlers/lookup.py?repo=ns
The difference in this module is that we don't support features which
ooni-support does not use and we augment features which ooni-support
would rely on if mlab-ns were to add those features.
Also, this is a twisted web server rather than appengine.
"""
import json
from twisted.web import resource
from twisted.web.server import NOT_DONE_YET
class LookupSimulatorResource (resource.Resource):
def __init__(self, db):
"""db is a dict mapping { fqdn -> other_stuff }; inserts come from mlabsim.update."""
resource.Resource.__init__(self)
self._db = db
def render_GET(self, request):
if request.args['match'] == ['all'] and request.args.get('format', ['json']) == ['json']:
request.setResponseCode(200, 'ok')
request.write(json.dumps(self._db.values(), indent=2, sort_keys=True))
request.finish()
else:
request.setResponseCode(400, 'invalid')
request.finish()
return NOT_DONE_YET
|
Implement the current ``GET /ooni`` api.
|
Implement the current ``GET /ooni`` api.
|
Python
|
apache-2.0
|
hellais/ooni-support,m-lab/ooni-support,m-lab/ooni-support,hellais/ooni-support
|
"""
This simulates the mlab-ns lookup request, whose code lives here:
https://code.google.com/p/m-lab/source/browse/server/mlabns/handlers/lookup.py?repo=ns
The difference in this module is that we don't support features which
ooni-support does not use and we augment features which ooni-support
would rely on if mlab-ns were to add those features.
Also, this is a twisted web server rather than appengine.
"""
from twisted.web import resource
from twisted.web.server import NOT_DONE_YET
class LookupSimulatorResource (resource.Resource):
def __init__(self, db):
# FIXME - db is some simple memory structure holding info;
# the details will solidfy soon. This resource reads from
# this structure.
resource.Resource.__init__(self)
self._db = db
def render_GET(self, request):
# FIXME: This is not implemented yet.
request.setResponseCode(500, 'NOT IMPLEMENTED')
request.finish()
return NOT_DONE_YET
Implement the current ``GET /ooni`` api.
|
"""
This simulates the mlab-ns lookup request, whose code lives here:
https://code.google.com/p/m-lab/source/browse/server/mlabns/handlers/lookup.py?repo=ns
The difference in this module is that we don't support features which
ooni-support does not use and we augment features which ooni-support
would rely on if mlab-ns were to add those features.
Also, this is a twisted web server rather than appengine.
"""
import json
from twisted.web import resource
from twisted.web.server import NOT_DONE_YET
class LookupSimulatorResource (resource.Resource):
def __init__(self, db):
"""db is a dict mapping { fqdn -> other_stuff }; inserts come from mlabsim.update."""
resource.Resource.__init__(self)
self._db = db
def render_GET(self, request):
if request.args['match'] == ['all'] and request.args.get('format', ['json']) == ['json']:
request.setResponseCode(200, 'ok')
request.write(json.dumps(self._db.values(), indent=2, sort_keys=True))
request.finish()
else:
request.setResponseCode(400, 'invalid')
request.finish()
return NOT_DONE_YET
|
<commit_before>"""
This simulates the mlab-ns lookup request, whose code lives here:
https://code.google.com/p/m-lab/source/browse/server/mlabns/handlers/lookup.py?repo=ns
The difference in this module is that we don't support features which
ooni-support does not use and we augment features which ooni-support
would rely on if mlab-ns were to add those features.
Also, this is a twisted web server rather than appengine.
"""
from twisted.web import resource
from twisted.web.server import NOT_DONE_YET
class LookupSimulatorResource (resource.Resource):
def __init__(self, db):
# FIXME - db is some simple memory structure holding info;
# the details will solidfy soon. This resource reads from
# this structure.
resource.Resource.__init__(self)
self._db = db
def render_GET(self, request):
# FIXME: This is not implemented yet.
request.setResponseCode(500, 'NOT IMPLEMENTED')
request.finish()
return NOT_DONE_YET
<commit_msg>Implement the current ``GET /ooni`` api.<commit_after>
|
"""
This simulates the mlab-ns lookup request, whose code lives here:
https://code.google.com/p/m-lab/source/browse/server/mlabns/handlers/lookup.py?repo=ns
The difference in this module is that we don't support features which
ooni-support does not use and we augment features which ooni-support
would rely on if mlab-ns were to add those features.
Also, this is a twisted web server rather than appengine.
"""
import json
from twisted.web import resource
from twisted.web.server import NOT_DONE_YET
class LookupSimulatorResource (resource.Resource):
def __init__(self, db):
"""db is a dict mapping { fqdn -> other_stuff }; inserts come from mlabsim.update."""
resource.Resource.__init__(self)
self._db = db
def render_GET(self, request):
if request.args['match'] == ['all'] and request.args.get('format', ['json']) == ['json']:
request.setResponseCode(200, 'ok')
request.write(json.dumps(self._db.values(), indent=2, sort_keys=True))
request.finish()
else:
request.setResponseCode(400, 'invalid')
request.finish()
return NOT_DONE_YET
|
"""
This simulates the mlab-ns lookup request, whose code lives here:
https://code.google.com/p/m-lab/source/browse/server/mlabns/handlers/lookup.py?repo=ns
The difference in this module is that we don't support features which
ooni-support does not use and we augment features which ooni-support
would rely on if mlab-ns were to add those features.
Also, this is a twisted web server rather than appengine.
"""
from twisted.web import resource
from twisted.web.server import NOT_DONE_YET
class LookupSimulatorResource (resource.Resource):
def __init__(self, db):
# FIXME - db is some simple memory structure holding info;
# the details will solidfy soon. This resource reads from
# this structure.
resource.Resource.__init__(self)
self._db = db
def render_GET(self, request):
# FIXME: This is not implemented yet.
request.setResponseCode(500, 'NOT IMPLEMENTED')
request.finish()
return NOT_DONE_YET
Implement the current ``GET /ooni`` api."""
This simulates the mlab-ns lookup request, whose code lives here:
https://code.google.com/p/m-lab/source/browse/server/mlabns/handlers/lookup.py?repo=ns
The difference in this module is that we don't support features which
ooni-support does not use and we augment features which ooni-support
would rely on if mlab-ns were to add those features.
Also, this is a twisted web server rather than appengine.
"""
import json
from twisted.web import resource
from twisted.web.server import NOT_DONE_YET
class LookupSimulatorResource (resource.Resource):
def __init__(self, db):
"""db is a dict mapping { fqdn -> other_stuff }; inserts come from mlabsim.update."""
resource.Resource.__init__(self)
self._db = db
def render_GET(self, request):
if request.args['match'] == ['all'] and request.args.get('format', ['json']) == ['json']:
request.setResponseCode(200, 'ok')
request.write(json.dumps(self._db.values(), indent=2, sort_keys=True))
request.finish()
else:
request.setResponseCode(400, 'invalid')
request.finish()
return NOT_DONE_YET
|
<commit_before>"""
This simulates the mlab-ns lookup request, whose code lives here:
https://code.google.com/p/m-lab/source/browse/server/mlabns/handlers/lookup.py?repo=ns
The difference in this module is that we don't support features which
ooni-support does not use and we augment features which ooni-support
would rely on if mlab-ns were to add those features.
Also, this is a twisted web server rather than appengine.
"""
from twisted.web import resource
from twisted.web.server import NOT_DONE_YET
class LookupSimulatorResource (resource.Resource):
def __init__(self, db):
# FIXME - db is some simple memory structure holding info;
# the details will solidfy soon. This resource reads from
# this structure.
resource.Resource.__init__(self)
self._db = db
def render_GET(self, request):
# FIXME: This is not implemented yet.
request.setResponseCode(500, 'NOT IMPLEMENTED')
request.finish()
return NOT_DONE_YET
<commit_msg>Implement the current ``GET /ooni`` api.<commit_after>"""
This simulates the mlab-ns lookup request, whose code lives here:
https://code.google.com/p/m-lab/source/browse/server/mlabns/handlers/lookup.py?repo=ns
The difference in this module is that we don't support features which
ooni-support does not use and we augment features which ooni-support
would rely on if mlab-ns were to add those features.
Also, this is a twisted web server rather than appengine.
"""
import json
from twisted.web import resource
from twisted.web.server import NOT_DONE_YET
class LookupSimulatorResource (resource.Resource):
def __init__(self, db):
"""db is a dict mapping { fqdn -> other_stuff }; inserts come from mlabsim.update."""
resource.Resource.__init__(self)
self._db = db
def render_GET(self, request):
if request.args['match'] == ['all'] and request.args.get('format', ['json']) == ['json']:
request.setResponseCode(200, 'ok')
request.write(json.dumps(self._db.values(), indent=2, sort_keys=True))
request.finish()
else:
request.setResponseCode(400, 'invalid')
request.finish()
return NOT_DONE_YET
|
a057798f3e54e8d74005df10ba1f7d9b93270787
|
odbc2csv.py
|
odbc2csv.py
|
import pypyodbc
import csv
conn = pypyodbc.connect("DSN=HOSS_DB")
cur = conn.cursor()
tables = []
cur.execute("select * from sys.tables")
for row in cur.fetchall():
tables.append(row[0])
for table in tables:
print(table)
cur.execute("select * from {}".format(table))
column_names = []
for d in cur.description:
column_names.append(d[0])
# file = open("{}.csv".format(table), "w", encoding="ISO-8859-1")
file = open("{}.csv".format(table), "w", encoding="utf-8")
writer = csv.writer(file)
writer.writerow(column_names)
for row in cur.fetchall():
writer.writerow(row)
file.close()
|
import pypyodbc
import csv
conn = pypyodbc.connect("DSN=HOSS_DB")
cur = conn.cursor()
tables = []
cur.execute("select * from sys.tables")
for row in cur.fetchall():
tables.append(row[0])
for table in tables:
print(table)
cur.execute("select * from {}".format(table))
column_names = []
for d in cur.description:
column_names.append(d[0])
# file = open("{}.csv".format(table), "w", encoding="ISO-8859-1")
file = open("{}.csv".format(table), "w", encoding="utf-8")
writer = csv.writer(file, lineterminator='\n')
writer.writerow(column_names)
for row in cur.fetchall():
writer.writerow(row)
file.close()
|
Use just newline for file terminator.
|
Use just newline for file terminator.
|
Python
|
isc
|
wablair/misc_scripts,wablair/misc_scripts,wablair/misc_scripts,wablair/misc_scripts
|
import pypyodbc
import csv
conn = pypyodbc.connect("DSN=HOSS_DB")
cur = conn.cursor()
tables = []
cur.execute("select * from sys.tables")
for row in cur.fetchall():
tables.append(row[0])
for table in tables:
print(table)
cur.execute("select * from {}".format(table))
column_names = []
for d in cur.description:
column_names.append(d[0])
# file = open("{}.csv".format(table), "w", encoding="ISO-8859-1")
file = open("{}.csv".format(table), "w", encoding="utf-8")
writer = csv.writer(file)
writer.writerow(column_names)
for row in cur.fetchall():
writer.writerow(row)
file.close()
Use just newline for file terminator.
|
import pypyodbc
import csv
conn = pypyodbc.connect("DSN=HOSS_DB")
cur = conn.cursor()
tables = []
cur.execute("select * from sys.tables")
for row in cur.fetchall():
tables.append(row[0])
for table in tables:
print(table)
cur.execute("select * from {}".format(table))
column_names = []
for d in cur.description:
column_names.append(d[0])
# file = open("{}.csv".format(table), "w", encoding="ISO-8859-1")
file = open("{}.csv".format(table), "w", encoding="utf-8")
writer = csv.writer(file, lineterminator='\n')
writer.writerow(column_names)
for row in cur.fetchall():
writer.writerow(row)
file.close()
|
<commit_before>import pypyodbc
import csv
conn = pypyodbc.connect("DSN=HOSS_DB")
cur = conn.cursor()
tables = []
cur.execute("select * from sys.tables")
for row in cur.fetchall():
tables.append(row[0])
for table in tables:
print(table)
cur.execute("select * from {}".format(table))
column_names = []
for d in cur.description:
column_names.append(d[0])
# file = open("{}.csv".format(table), "w", encoding="ISO-8859-1")
file = open("{}.csv".format(table), "w", encoding="utf-8")
writer = csv.writer(file)
writer.writerow(column_names)
for row in cur.fetchall():
writer.writerow(row)
file.close()
<commit_msg>Use just newline for file terminator.<commit_after>
|
import pypyodbc
import csv
conn = pypyodbc.connect("DSN=HOSS_DB")
cur = conn.cursor()
tables = []
cur.execute("select * from sys.tables")
for row in cur.fetchall():
tables.append(row[0])
for table in tables:
print(table)
cur.execute("select * from {}".format(table))
column_names = []
for d in cur.description:
column_names.append(d[0])
# file = open("{}.csv".format(table), "w", encoding="ISO-8859-1")
file = open("{}.csv".format(table), "w", encoding="utf-8")
writer = csv.writer(file, lineterminator='\n')
writer.writerow(column_names)
for row in cur.fetchall():
writer.writerow(row)
file.close()
|
import pypyodbc
import csv
conn = pypyodbc.connect("DSN=HOSS_DB")
cur = conn.cursor()
tables = []
cur.execute("select * from sys.tables")
for row in cur.fetchall():
tables.append(row[0])
for table in tables:
print(table)
cur.execute("select * from {}".format(table))
column_names = []
for d in cur.description:
column_names.append(d[0])
# file = open("{}.csv".format(table), "w", encoding="ISO-8859-1")
file = open("{}.csv".format(table), "w", encoding="utf-8")
writer = csv.writer(file)
writer.writerow(column_names)
for row in cur.fetchall():
writer.writerow(row)
file.close()
Use just newline for file terminator.import pypyodbc
import csv
conn = pypyodbc.connect("DSN=HOSS_DB")
cur = conn.cursor()
tables = []
cur.execute("select * from sys.tables")
for row in cur.fetchall():
tables.append(row[0])
for table in tables:
print(table)
cur.execute("select * from {}".format(table))
column_names = []
for d in cur.description:
column_names.append(d[0])
# file = open("{}.csv".format(table), "w", encoding="ISO-8859-1")
file = open("{}.csv".format(table), "w", encoding="utf-8")
writer = csv.writer(file, lineterminator='\n')
writer.writerow(column_names)
for row in cur.fetchall():
writer.writerow(row)
file.close()
|
<commit_before>import pypyodbc
import csv
conn = pypyodbc.connect("DSN=HOSS_DB")
cur = conn.cursor()
tables = []
cur.execute("select * from sys.tables")
for row in cur.fetchall():
tables.append(row[0])
for table in tables:
print(table)
cur.execute("select * from {}".format(table))
column_names = []
for d in cur.description:
column_names.append(d[0])
# file = open("{}.csv".format(table), "w", encoding="ISO-8859-1")
file = open("{}.csv".format(table), "w", encoding="utf-8")
writer = csv.writer(file)
writer.writerow(column_names)
for row in cur.fetchall():
writer.writerow(row)
file.close()
<commit_msg>Use just newline for file terminator.<commit_after>import pypyodbc
import csv
conn = pypyodbc.connect("DSN=HOSS_DB")
cur = conn.cursor()
tables = []
cur.execute("select * from sys.tables")
for row in cur.fetchall():
tables.append(row[0])
for table in tables:
print(table)
cur.execute("select * from {}".format(table))
column_names = []
for d in cur.description:
column_names.append(d[0])
# file = open("{}.csv".format(table), "w", encoding="ISO-8859-1")
file = open("{}.csv".format(table), "w", encoding="utf-8")
writer = csv.writer(file, lineterminator='\n')
writer.writerow(column_names)
for row in cur.fetchall():
writer.writerow(row)
file.close()
|
9f5418e5b755232e12ea18e85b131dbd04c74587
|
benchmarks_sphere/paper_jrn_parco_rexi_nonlinear/postprocessing_pickle.py
|
benchmarks_sphere/paper_jrn_parco_rexi_nonlinear/postprocessing_pickle.py
|
#! /usr/bin/env python3
import sys
import math
import glob
from sweet.postprocessing.pickle_SphereDataPhysicalDiff import *
p = pickle_SphereDataPhysicalDiff("_t00000000120.00000000.csv")
|
#! /usr/bin/env python3
import sys
import math
import glob
from sweet.postprocessing.pickle_SphereDataPhysicalDiff import *
from mule.exec_program import *
# Ugly hack!
#output, retval = exec_program('ls *benchref*/*prog_h* | sort | tail -n 1 | sed "s/.*prog_h//"')
#if retval != 0:
# print(output)
# raise Exception("Something went wrong")
#output = output.replace("\n", '')
#output = output.replace("\r", '')
#p = pickle_SphereDataPhysicalDiff(output)
p = pickle_SphereDataPhysicalDiff()
|
Make postprocess pickling generic to various reference files
|
Make postprocess pickling generic to various reference files
|
Python
|
mit
|
schreiberx/sweet,schreiberx/sweet,schreiberx/sweet,schreiberx/sweet
|
#! /usr/bin/env python3
import sys
import math
import glob
from sweet.postprocessing.pickle_SphereDataPhysicalDiff import *
p = pickle_SphereDataPhysicalDiff("_t00000000120.00000000.csv")
Make postprocess pickling generic to various reference files
|
#! /usr/bin/env python3
import sys
import math
import glob
from sweet.postprocessing.pickle_SphereDataPhysicalDiff import *
from mule.exec_program import *
# Ugly hack!
#output, retval = exec_program('ls *benchref*/*prog_h* | sort | tail -n 1 | sed "s/.*prog_h//"')
#if retval != 0:
# print(output)
# raise Exception("Something went wrong")
#output = output.replace("\n", '')
#output = output.replace("\r", '')
#p = pickle_SphereDataPhysicalDiff(output)
p = pickle_SphereDataPhysicalDiff()
|
<commit_before>#! /usr/bin/env python3
import sys
import math
import glob
from sweet.postprocessing.pickle_SphereDataPhysicalDiff import *
p = pickle_SphereDataPhysicalDiff("_t00000000120.00000000.csv")
<commit_msg>Make postprocess pickling generic to various reference files<commit_after>
|
#! /usr/bin/env python3
import sys
import math
import glob
from sweet.postprocessing.pickle_SphereDataPhysicalDiff import *
from mule.exec_program import *
# Ugly hack!
#output, retval = exec_program('ls *benchref*/*prog_h* | sort | tail -n 1 | sed "s/.*prog_h//"')
#if retval != 0:
# print(output)
# raise Exception("Something went wrong")
#output = output.replace("\n", '')
#output = output.replace("\r", '')
#p = pickle_SphereDataPhysicalDiff(output)
p = pickle_SphereDataPhysicalDiff()
|
#! /usr/bin/env python3
import sys
import math
import glob
from sweet.postprocessing.pickle_SphereDataPhysicalDiff import *
p = pickle_SphereDataPhysicalDiff("_t00000000120.00000000.csv")
Make postprocess pickling generic to various reference files#! /usr/bin/env python3
import sys
import math
import glob
from sweet.postprocessing.pickle_SphereDataPhysicalDiff import *
from mule.exec_program import *
# Ugly hack!
#output, retval = exec_program('ls *benchref*/*prog_h* | sort | tail -n 1 | sed "s/.*prog_h//"')
#if retval != 0:
# print(output)
# raise Exception("Something went wrong")
#output = output.replace("\n", '')
#output = output.replace("\r", '')
#p = pickle_SphereDataPhysicalDiff(output)
p = pickle_SphereDataPhysicalDiff()
|
<commit_before>#! /usr/bin/env python3
import sys
import math
import glob
from sweet.postprocessing.pickle_SphereDataPhysicalDiff import *
p = pickle_SphereDataPhysicalDiff("_t00000000120.00000000.csv")
<commit_msg>Make postprocess pickling generic to various reference files<commit_after>#! /usr/bin/env python3
import sys
import math
import glob
from sweet.postprocessing.pickle_SphereDataPhysicalDiff import *
from mule.exec_program import *
# Ugly hack!
#output, retval = exec_program('ls *benchref*/*prog_h* | sort | tail -n 1 | sed "s/.*prog_h//"')
#if retval != 0:
# print(output)
# raise Exception("Something went wrong")
#output = output.replace("\n", '')
#output = output.replace("\r", '')
#p = pickle_SphereDataPhysicalDiff(output)
p = pickle_SphereDataPhysicalDiff()
|
36ea5e58ce97b69bfd0bf3701cbc5936bc59d100
|
install_dotfiles.py
|
install_dotfiles.py
|
#!/usr/bin/python
# install_dotfiles
# This script will build platform-specific dotfiles and create the appropriate symlinks in ~
import platform
import os
sysName = platform.system()
os.remove('bashrc')
bashrc = open('bashrc','a')
def writeSection(fileName, allowComments):
f = open(fileName,'r')
for line in f:
if line.startswith('#'):
if allowComments:
bashrc.write(line)
else:
bashrc.write(line)
if sysName == 'Linux':
writeSection('bash_linux',True)
elif sysName == 'Darwin':
writeSection('bash_mac',True)
else:
print "System not supported!"
bashrc.close()
exit(1)
if os.path.isfile('bash_private'):
writeSection('bash_private',False)
writeSection('bash_common',False)
bashrc.close()
|
#!/usr/bin/python
# install_dotfiles
# This script will build platform-specific dotfiles and create the appropriate symlinks in ~
import platform
import os
sysName = platform.system()
os.remove('bashrc')
bashrc = open('bashrc','a')
bashrc.write("#!/bin/bash\n")
bashrc.write("# This file was generated by a script. Do not edit manually!\n")
def writeSection(fileName, allowComments):
f = open(fileName,'r')
for line in f:
if line.startswith('#'):
if allowComments:
bashrc.write(line)
else:
bashrc.write(line)
if sysName == 'Linux':
bashrc.write("# ~/.bashrc: executed by bash(1) for non-login shells.\n")
if os.path.isfile('bash_private'):
writeSection('bash_private',False)
writeSection('bash_common',False)
writeSection('bash_linux',True)
elif sysName == 'Darwin':
bashrc.write("# ~/.bash_profile: executed by bash(1) for lon-login shells.\n")
writeSection('bash_mac',True)
if os.path.isfile('bash_private'):
writeSection('bash_private',False)
writeSection('bash_common',False)
else:
print "System not supported!"
bashrc.close()
exit(1)
bashrc.close()
|
Reorder writing of bashrc body sections.
|
Reorder writing of bashrc body sections.
|
Python
|
mit
|
rucker/dotfiles-manager
|
#!/usr/bin/python
# install_dotfiles
# This script will build platform-specific dotfiles and create the appropriate symlinks in ~
import platform
import os
sysName = platform.system()
os.remove('bashrc')
bashrc = open('bashrc','a')
def writeSection(fileName, allowComments):
f = open(fileName,'r')
for line in f:
if line.startswith('#'):
if allowComments:
bashrc.write(line)
else:
bashrc.write(line)
if sysName == 'Linux':
writeSection('bash_linux',True)
elif sysName == 'Darwin':
writeSection('bash_mac',True)
else:
print "System not supported!"
bashrc.close()
exit(1)
if os.path.isfile('bash_private'):
writeSection('bash_private',False)
writeSection('bash_common',False)
bashrc.close()
Reorder writing of bashrc body sections.
|
#!/usr/bin/python
# install_dotfiles
# This script will build platform-specific dotfiles and create the appropriate symlinks in ~
import platform
import os
sysName = platform.system()
os.remove('bashrc')
bashrc = open('bashrc','a')
bashrc.write("#!/bin/bash\n")
bashrc.write("# This file was generated by a script. Do not edit manually!\n")
def writeSection(fileName, allowComments):
f = open(fileName,'r')
for line in f:
if line.startswith('#'):
if allowComments:
bashrc.write(line)
else:
bashrc.write(line)
if sysName == 'Linux':
bashrc.write("# ~/.bashrc: executed by bash(1) for non-login shells.\n")
if os.path.isfile('bash_private'):
writeSection('bash_private',False)
writeSection('bash_common',False)
writeSection('bash_linux',True)
elif sysName == 'Darwin':
bashrc.write("# ~/.bash_profile: executed by bash(1) for lon-login shells.\n")
writeSection('bash_mac',True)
if os.path.isfile('bash_private'):
writeSection('bash_private',False)
writeSection('bash_common',False)
else:
print "System not supported!"
bashrc.close()
exit(1)
bashrc.close()
|
<commit_before>#!/usr/bin/python
# install_dotfiles
# This script will build platform-specific dotfiles and create the appropriate symlinks in ~
import platform
import os
sysName = platform.system()
os.remove('bashrc')
bashrc = open('bashrc','a')
def writeSection(fileName, allowComments):
f = open(fileName,'r')
for line in f:
if line.startswith('#'):
if allowComments:
bashrc.write(line)
else:
bashrc.write(line)
if sysName == 'Linux':
writeSection('bash_linux',True)
elif sysName == 'Darwin':
writeSection('bash_mac',True)
else:
print "System not supported!"
bashrc.close()
exit(1)
if os.path.isfile('bash_private'):
writeSection('bash_private',False)
writeSection('bash_common',False)
bashrc.close()
<commit_msg>Reorder writing of bashrc body sections.<commit_after>
|
#!/usr/bin/python
# install_dotfiles
# This script will build platform-specific dotfiles and create the appropriate symlinks in ~
import platform
import os
sysName = platform.system()
os.remove('bashrc')
bashrc = open('bashrc','a')
bashrc.write("#!/bin/bash\n")
bashrc.write("# This file was generated by a script. Do not edit manually!\n")
def writeSection(fileName, allowComments):
f = open(fileName,'r')
for line in f:
if line.startswith('#'):
if allowComments:
bashrc.write(line)
else:
bashrc.write(line)
if sysName == 'Linux':
bashrc.write("# ~/.bashrc: executed by bash(1) for non-login shells.\n")
if os.path.isfile('bash_private'):
writeSection('bash_private',False)
writeSection('bash_common',False)
writeSection('bash_linux',True)
elif sysName == 'Darwin':
bashrc.write("# ~/.bash_profile: executed by bash(1) for lon-login shells.\n")
writeSection('bash_mac',True)
if os.path.isfile('bash_private'):
writeSection('bash_private',False)
writeSection('bash_common',False)
else:
print "System not supported!"
bashrc.close()
exit(1)
bashrc.close()
|
#!/usr/bin/python
# install_dotfiles
# This script will build platform-specific dotfiles and create the appropriate symlinks in ~
import platform
import os
sysName = platform.system()
os.remove('bashrc')
bashrc = open('bashrc','a')
def writeSection(fileName, allowComments):
f = open(fileName,'r')
for line in f:
if line.startswith('#'):
if allowComments:
bashrc.write(line)
else:
bashrc.write(line)
if sysName == 'Linux':
writeSection('bash_linux',True)
elif sysName == 'Darwin':
writeSection('bash_mac',True)
else:
print "System not supported!"
bashrc.close()
exit(1)
if os.path.isfile('bash_private'):
writeSection('bash_private',False)
writeSection('bash_common',False)
bashrc.close()
Reorder writing of bashrc body sections.#!/usr/bin/python
# install_dotfiles
# This script will build platform-specific dotfiles and create the appropriate symlinks in ~
import platform
import os
sysName = platform.system()
os.remove('bashrc')
bashrc = open('bashrc','a')
bashrc.write("#!/bin/bash\n")
bashrc.write("# This file was generated by a script. Do not edit manually!\n")
def writeSection(fileName, allowComments):
f = open(fileName,'r')
for line in f:
if line.startswith('#'):
if allowComments:
bashrc.write(line)
else:
bashrc.write(line)
if sysName == 'Linux':
bashrc.write("# ~/.bashrc: executed by bash(1) for non-login shells.\n")
if os.path.isfile('bash_private'):
writeSection('bash_private',False)
writeSection('bash_common',False)
writeSection('bash_linux',True)
elif sysName == 'Darwin':
bashrc.write("# ~/.bash_profile: executed by bash(1) for lon-login shells.\n")
writeSection('bash_mac',True)
if os.path.isfile('bash_private'):
writeSection('bash_private',False)
writeSection('bash_common',False)
else:
print "System not supported!"
bashrc.close()
exit(1)
bashrc.close()
|
<commit_before>#!/usr/bin/python
# install_dotfiles
# This script will build platform-specific dotfiles and create the appropriate symlinks in ~
import platform
import os
sysName = platform.system()
os.remove('bashrc')
bashrc = open('bashrc','a')
def writeSection(fileName, allowComments):
f = open(fileName,'r')
for line in f:
if line.startswith('#'):
if allowComments:
bashrc.write(line)
else:
bashrc.write(line)
if sysName == 'Linux':
writeSection('bash_linux',True)
elif sysName == 'Darwin':
writeSection('bash_mac',True)
else:
print "System not supported!"
bashrc.close()
exit(1)
if os.path.isfile('bash_private'):
writeSection('bash_private',False)
writeSection('bash_common',False)
bashrc.close()
<commit_msg>Reorder writing of bashrc body sections.<commit_after>#!/usr/bin/python
# install_dotfiles
# This script will build platform-specific dotfiles and create the appropriate symlinks in ~
import platform
import os
sysName = platform.system()
os.remove('bashrc')
bashrc = open('bashrc','a')
bashrc.write("#!/bin/bash\n")
bashrc.write("# This file was generated by a script. Do not edit manually!\n")
def writeSection(fileName, allowComments):
f = open(fileName,'r')
for line in f:
if line.startswith('#'):
if allowComments:
bashrc.write(line)
else:
bashrc.write(line)
if sysName == 'Linux':
bashrc.write("# ~/.bashrc: executed by bash(1) for non-login shells.\n")
if os.path.isfile('bash_private'):
writeSection('bash_private',False)
writeSection('bash_common',False)
writeSection('bash_linux',True)
elif sysName == 'Darwin':
bashrc.write("# ~/.bash_profile: executed by bash(1) for lon-login shells.\n")
writeSection('bash_mac',True)
if os.path.isfile('bash_private'):
writeSection('bash_private',False)
writeSection('bash_common',False)
else:
print "System not supported!"
bashrc.close()
exit(1)
bashrc.close()
|
3661edd55553ff2dff27cb102a83d4751e033f2a
|
painter/management/commands/import_cards.py
|
painter/management/commands/import_cards.py
|
import tablib
from django.core.management.base import BaseCommand
from painter.models import Card
class Command(BaseCommand):
def handle(self, *args, **options):
dataset = tablib.Dataset()
|
import tablib
from django.core.management.base import BaseCommand
from painter.models import Card
class Command(BaseCommand):
help = ('Clears the database of cards, then fills it with the contents of one or' +
' more specified CSV files.')
def add_arguments(self, parser):
parser.add_argument(
'filenames',
nargs='+',
type=str,
help='One or more CSV file names. The extension is optional.',
)
def handle(self, *args, **options):
dataset = tablib.Dataset()
for filename in options['filenames']:
print(filename)
|
Add help text and a 'filenames' argument.
|
Add help text and a 'filenames' argument.
* Make it print the filenames it's receiving for the sake of
good testing output.
|
Python
|
mit
|
adam-incuna/imperial-painter,adam-thomas/imperial-painter,adam-thomas/imperial-painter,adam-incuna/imperial-painter
|
import tablib
from django.core.management.base import BaseCommand
from painter.models import Card
class Command(BaseCommand):
def handle(self, *args, **options):
dataset = tablib.Dataset()
Add help text and a 'filenames' argument.
* Make it print the filenames it's receiving for the sake of
good testing output.
|
import tablib
from django.core.management.base import BaseCommand
from painter.models import Card
class Command(BaseCommand):
help = ('Clears the database of cards, then fills it with the contents of one or' +
' more specified CSV files.')
def add_arguments(self, parser):
parser.add_argument(
'filenames',
nargs='+',
type=str,
help='One or more CSV file names. The extension is optional.',
)
def handle(self, *args, **options):
dataset = tablib.Dataset()
for filename in options['filenames']:
print(filename)
|
<commit_before>import tablib
from django.core.management.base import BaseCommand
from painter.models import Card
class Command(BaseCommand):
def handle(self, *args, **options):
dataset = tablib.Dataset()
<commit_msg>Add help text and a 'filenames' argument.
* Make it print the filenames it's receiving for the sake of
good testing output.<commit_after>
|
import tablib
from django.core.management.base import BaseCommand
from painter.models import Card
class Command(BaseCommand):
help = ('Clears the database of cards, then fills it with the contents of one or' +
' more specified CSV files.')
def add_arguments(self, parser):
parser.add_argument(
'filenames',
nargs='+',
type=str,
help='One or more CSV file names. The extension is optional.',
)
def handle(self, *args, **options):
dataset = tablib.Dataset()
for filename in options['filenames']:
print(filename)
|
import tablib
from django.core.management.base import BaseCommand
from painter.models import Card
class Command(BaseCommand):
def handle(self, *args, **options):
dataset = tablib.Dataset()
Add help text and a 'filenames' argument.
* Make it print the filenames it's receiving for the sake of
good testing output.import tablib
from django.core.management.base import BaseCommand
from painter.models import Card
class Command(BaseCommand):
help = ('Clears the database of cards, then fills it with the contents of one or' +
' more specified CSV files.')
def add_arguments(self, parser):
parser.add_argument(
'filenames',
nargs='+',
type=str,
help='One or more CSV file names. The extension is optional.',
)
def handle(self, *args, **options):
dataset = tablib.Dataset()
for filename in options['filenames']:
print(filename)
|
<commit_before>import tablib
from django.core.management.base import BaseCommand
from painter.models import Card
class Command(BaseCommand):
def handle(self, *args, **options):
dataset = tablib.Dataset()
<commit_msg>Add help text and a 'filenames' argument.
* Make it print the filenames it's receiving for the sake of
good testing output.<commit_after>import tablib
from django.core.management.base import BaseCommand
from painter.models import Card
class Command(BaseCommand):
help = ('Clears the database of cards, then fills it with the contents of one or' +
' more specified CSV files.')
def add_arguments(self, parser):
parser.add_argument(
'filenames',
nargs='+',
type=str,
help='One or more CSV file names. The extension is optional.',
)
def handle(self, *args, **options):
dataset = tablib.Dataset()
for filename in options['filenames']:
print(filename)
|
b457af108174821965ae8e3ee28eb3d34c0fec06
|
plugins/GCodeWriter/__init__.py
|
plugins/GCodeWriter/__init__.py
|
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from . import GCodeWriter
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
def getMetaData():
return {
"plugin": {
"name": catalog.i18nc("@label", "GCode Writer"),
"author": "Ultimaker",
"version": "1.0",
"description": catalog.i18nc("@info:whatsthis", "Writes GCode to a file"),
"api": 2
},
"mesh_writer": {
"output": [{
"extension": "gcode",
"description": catalog.i18nc("@item:inlistbox", "GCode File"),
"mime_type": "text/x-gcode",
"mode": GCodeWriter.GCodeWriter.OutputMode.TextMode
}]
}
}
def register(app):
return { "mesh_writer": GCodeWriter.GCodeWriter() }
|
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from . import GCodeWriter
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
def getMetaData():
return {
"plugin": {
"name": catalog.i18nc("@label", "GCode Writer"),
"author": "Ultimaker",
"version": "1.0",
"description": catalog.i18nc("@info:whatsthis", "Writes GCode to a file."),
"api": 2
},
"mesh_writer": {
"output": [{
"extension": "gcode",
"description": catalog.i18nc("@item:inlistbox", "GCode File"),
"mime_type": "text/x-gcode",
"mode": GCodeWriter.GCodeWriter.OutputMode.TextMode
}]
}
}
def register(app):
return { "mesh_writer": GCodeWriter.GCodeWriter() }
|
Add period at end of plug-in description
|
Add period at end of plug-in description
All other plug-in descriptions have that too. So for consistency.
Contributes to issue CURA-1190.
|
Python
|
agpl-3.0
|
fieldOfView/Cura,fieldOfView/Cura,senttech/Cura,ynotstartups/Wanhao,senttech/Cura,hmflash/Cura,totalretribution/Cura,ynotstartups/Wanhao,Curahelper/Cura,totalretribution/Cura,hmflash/Cura,Curahelper/Cura
|
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from . import GCodeWriter
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
def getMetaData():
return {
"plugin": {
"name": catalog.i18nc("@label", "GCode Writer"),
"author": "Ultimaker",
"version": "1.0",
"description": catalog.i18nc("@info:whatsthis", "Writes GCode to a file"),
"api": 2
},
"mesh_writer": {
"output": [{
"extension": "gcode",
"description": catalog.i18nc("@item:inlistbox", "GCode File"),
"mime_type": "text/x-gcode",
"mode": GCodeWriter.GCodeWriter.OutputMode.TextMode
}]
}
}
def register(app):
return { "mesh_writer": GCodeWriter.GCodeWriter() }
Add period at end of plug-in description
All other plug-in descriptions have that too. So for consistency.
Contributes to issue CURA-1190.
|
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from . import GCodeWriter
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
def getMetaData():
return {
"plugin": {
"name": catalog.i18nc("@label", "GCode Writer"),
"author": "Ultimaker",
"version": "1.0",
"description": catalog.i18nc("@info:whatsthis", "Writes GCode to a file."),
"api": 2
},
"mesh_writer": {
"output": [{
"extension": "gcode",
"description": catalog.i18nc("@item:inlistbox", "GCode File"),
"mime_type": "text/x-gcode",
"mode": GCodeWriter.GCodeWriter.OutputMode.TextMode
}]
}
}
def register(app):
return { "mesh_writer": GCodeWriter.GCodeWriter() }
|
<commit_before># Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from . import GCodeWriter
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
def getMetaData():
return {
"plugin": {
"name": catalog.i18nc("@label", "GCode Writer"),
"author": "Ultimaker",
"version": "1.0",
"description": catalog.i18nc("@info:whatsthis", "Writes GCode to a file"),
"api": 2
},
"mesh_writer": {
"output": [{
"extension": "gcode",
"description": catalog.i18nc("@item:inlistbox", "GCode File"),
"mime_type": "text/x-gcode",
"mode": GCodeWriter.GCodeWriter.OutputMode.TextMode
}]
}
}
def register(app):
return { "mesh_writer": GCodeWriter.GCodeWriter() }
<commit_msg>Add period at end of plug-in description
All other plug-in descriptions have that too. So for consistency.
Contributes to issue CURA-1190.<commit_after>
|
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from . import GCodeWriter
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
def getMetaData():
return {
"plugin": {
"name": catalog.i18nc("@label", "GCode Writer"),
"author": "Ultimaker",
"version": "1.0",
"description": catalog.i18nc("@info:whatsthis", "Writes GCode to a file."),
"api": 2
},
"mesh_writer": {
"output": [{
"extension": "gcode",
"description": catalog.i18nc("@item:inlistbox", "GCode File"),
"mime_type": "text/x-gcode",
"mode": GCodeWriter.GCodeWriter.OutputMode.TextMode
}]
}
}
def register(app):
return { "mesh_writer": GCodeWriter.GCodeWriter() }
|
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from . import GCodeWriter
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
def getMetaData():
return {
"plugin": {
"name": catalog.i18nc("@label", "GCode Writer"),
"author": "Ultimaker",
"version": "1.0",
"description": catalog.i18nc("@info:whatsthis", "Writes GCode to a file"),
"api": 2
},
"mesh_writer": {
"output": [{
"extension": "gcode",
"description": catalog.i18nc("@item:inlistbox", "GCode File"),
"mime_type": "text/x-gcode",
"mode": GCodeWriter.GCodeWriter.OutputMode.TextMode
}]
}
}
def register(app):
return { "mesh_writer": GCodeWriter.GCodeWriter() }
Add period at end of plug-in description
All other plug-in descriptions have that too. So for consistency.
Contributes to issue CURA-1190.# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from . import GCodeWriter
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
def getMetaData():
return {
"plugin": {
"name": catalog.i18nc("@label", "GCode Writer"),
"author": "Ultimaker",
"version": "1.0",
"description": catalog.i18nc("@info:whatsthis", "Writes GCode to a file."),
"api": 2
},
"mesh_writer": {
"output": [{
"extension": "gcode",
"description": catalog.i18nc("@item:inlistbox", "GCode File"),
"mime_type": "text/x-gcode",
"mode": GCodeWriter.GCodeWriter.OutputMode.TextMode
}]
}
}
def register(app):
return { "mesh_writer": GCodeWriter.GCodeWriter() }
|
<commit_before># Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from . import GCodeWriter
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
def getMetaData():
return {
"plugin": {
"name": catalog.i18nc("@label", "GCode Writer"),
"author": "Ultimaker",
"version": "1.0",
"description": catalog.i18nc("@info:whatsthis", "Writes GCode to a file"),
"api": 2
},
"mesh_writer": {
"output": [{
"extension": "gcode",
"description": catalog.i18nc("@item:inlistbox", "GCode File"),
"mime_type": "text/x-gcode",
"mode": GCodeWriter.GCodeWriter.OutputMode.TextMode
}]
}
}
def register(app):
return { "mesh_writer": GCodeWriter.GCodeWriter() }
<commit_msg>Add period at end of plug-in description
All other plug-in descriptions have that too. So for consistency.
Contributes to issue CURA-1190.<commit_after># Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from . import GCodeWriter
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
def getMetaData():
return {
"plugin": {
"name": catalog.i18nc("@label", "GCode Writer"),
"author": "Ultimaker",
"version": "1.0",
"description": catalog.i18nc("@info:whatsthis", "Writes GCode to a file."),
"api": 2
},
"mesh_writer": {
"output": [{
"extension": "gcode",
"description": catalog.i18nc("@item:inlistbox", "GCode File"),
"mime_type": "text/x-gcode",
"mode": GCodeWriter.GCodeWriter.OutputMode.TextMode
}]
}
}
def register(app):
return { "mesh_writer": GCodeWriter.GCodeWriter() }
|
6889946ebb1c1559e0e1c7b83e1d7b1d6896e0b0
|
tests/test_train_dictionary.py
|
tests/test_train_dictionary.py
|
import unittest
import zstd
class TestTrainDictionary(unittest.TestCase):
def test_no_args(self):
with self.assertRaises(TypeError):
zstd.train_dictionary()
def test_bad_args(self):
with self.assertRaises(TypeError):
zstd.train_dictionary(8192, u'foo')
with self.assertRaises(ValueError):
zstd.train_dictionary(8192, [u'foo'])
def test_basic(self):
samples = []
for i in range(128):
samples.append(b'foo' * 64)
samples.append(b'bar' * 64)
samples.append(b'foobar' * 64)
samples.append(b'baz' * 64)
samples.append(b'foobaz' * 64)
samples.append(b'bazfoo' * 64)
d = zstd.train_dictionary(8192, samples)
self.assertLessEqual(len(d), 8192)
dict_id = zstd.dictionary_id(d)
self.assertIsInstance(dict_id, int)
|
import sys
import unittest
import zstd
if sys.version_info[0] >= 3:
int_type = int
else:
int_type = long
class TestTrainDictionary(unittest.TestCase):
def test_no_args(self):
with self.assertRaises(TypeError):
zstd.train_dictionary()
def test_bad_args(self):
with self.assertRaises(TypeError):
zstd.train_dictionary(8192, u'foo')
with self.assertRaises(ValueError):
zstd.train_dictionary(8192, [u'foo'])
def test_basic(self):
samples = []
for i in range(128):
samples.append(b'foo' * 64)
samples.append(b'bar' * 64)
samples.append(b'foobar' * 64)
samples.append(b'baz' * 64)
samples.append(b'foobaz' * 64)
samples.append(b'bazfoo' * 64)
d = zstd.train_dictionary(8192, samples)
self.assertLessEqual(len(d), 8192)
dict_id = zstd.dictionary_id(d)
self.assertIsInstance(dict_id, int_type)
|
Check for appropriate long type on Python 2
|
Check for appropriate long type on Python 2
The extension always returns a long, which is not an "int" on
Python 2. Fix the test.
|
Python
|
bsd-3-clause
|
terrelln/python-zstandard,terrelln/python-zstandard,terrelln/python-zstandard,indygreg/python-zstandard,terrelln/python-zstandard,indygreg/python-zstandard,indygreg/python-zstandard,indygreg/python-zstandard
|
import unittest
import zstd
class TestTrainDictionary(unittest.TestCase):
def test_no_args(self):
with self.assertRaises(TypeError):
zstd.train_dictionary()
def test_bad_args(self):
with self.assertRaises(TypeError):
zstd.train_dictionary(8192, u'foo')
with self.assertRaises(ValueError):
zstd.train_dictionary(8192, [u'foo'])
def test_basic(self):
samples = []
for i in range(128):
samples.append(b'foo' * 64)
samples.append(b'bar' * 64)
samples.append(b'foobar' * 64)
samples.append(b'baz' * 64)
samples.append(b'foobaz' * 64)
samples.append(b'bazfoo' * 64)
d = zstd.train_dictionary(8192, samples)
self.assertLessEqual(len(d), 8192)
dict_id = zstd.dictionary_id(d)
self.assertIsInstance(dict_id, int)
Check for appropriate long type on Python 2
The extension always returns a long, which is not an "int" on
Python 2. Fix the test.
|
import sys
import unittest
import zstd
if sys.version_info[0] >= 3:
int_type = int
else:
int_type = long
class TestTrainDictionary(unittest.TestCase):
def test_no_args(self):
with self.assertRaises(TypeError):
zstd.train_dictionary()
def test_bad_args(self):
with self.assertRaises(TypeError):
zstd.train_dictionary(8192, u'foo')
with self.assertRaises(ValueError):
zstd.train_dictionary(8192, [u'foo'])
def test_basic(self):
samples = []
for i in range(128):
samples.append(b'foo' * 64)
samples.append(b'bar' * 64)
samples.append(b'foobar' * 64)
samples.append(b'baz' * 64)
samples.append(b'foobaz' * 64)
samples.append(b'bazfoo' * 64)
d = zstd.train_dictionary(8192, samples)
self.assertLessEqual(len(d), 8192)
dict_id = zstd.dictionary_id(d)
self.assertIsInstance(dict_id, int_type)
|
<commit_before>import unittest
import zstd
class TestTrainDictionary(unittest.TestCase):
def test_no_args(self):
with self.assertRaises(TypeError):
zstd.train_dictionary()
def test_bad_args(self):
with self.assertRaises(TypeError):
zstd.train_dictionary(8192, u'foo')
with self.assertRaises(ValueError):
zstd.train_dictionary(8192, [u'foo'])
def test_basic(self):
samples = []
for i in range(128):
samples.append(b'foo' * 64)
samples.append(b'bar' * 64)
samples.append(b'foobar' * 64)
samples.append(b'baz' * 64)
samples.append(b'foobaz' * 64)
samples.append(b'bazfoo' * 64)
d = zstd.train_dictionary(8192, samples)
self.assertLessEqual(len(d), 8192)
dict_id = zstd.dictionary_id(d)
self.assertIsInstance(dict_id, int)
<commit_msg>Check for appropriate long type on Python 2
The extension always returns a long, which is not an "int" on
Python 2. Fix the test.<commit_after>
|
import sys
import unittest
import zstd
if sys.version_info[0] >= 3:
int_type = int
else:
int_type = long
class TestTrainDictionary(unittest.TestCase):
def test_no_args(self):
with self.assertRaises(TypeError):
zstd.train_dictionary()
def test_bad_args(self):
with self.assertRaises(TypeError):
zstd.train_dictionary(8192, u'foo')
with self.assertRaises(ValueError):
zstd.train_dictionary(8192, [u'foo'])
def test_basic(self):
samples = []
for i in range(128):
samples.append(b'foo' * 64)
samples.append(b'bar' * 64)
samples.append(b'foobar' * 64)
samples.append(b'baz' * 64)
samples.append(b'foobaz' * 64)
samples.append(b'bazfoo' * 64)
d = zstd.train_dictionary(8192, samples)
self.assertLessEqual(len(d), 8192)
dict_id = zstd.dictionary_id(d)
self.assertIsInstance(dict_id, int_type)
|
import unittest
import zstd
class TestTrainDictionary(unittest.TestCase):
def test_no_args(self):
with self.assertRaises(TypeError):
zstd.train_dictionary()
def test_bad_args(self):
with self.assertRaises(TypeError):
zstd.train_dictionary(8192, u'foo')
with self.assertRaises(ValueError):
zstd.train_dictionary(8192, [u'foo'])
def test_basic(self):
samples = []
for i in range(128):
samples.append(b'foo' * 64)
samples.append(b'bar' * 64)
samples.append(b'foobar' * 64)
samples.append(b'baz' * 64)
samples.append(b'foobaz' * 64)
samples.append(b'bazfoo' * 64)
d = zstd.train_dictionary(8192, samples)
self.assertLessEqual(len(d), 8192)
dict_id = zstd.dictionary_id(d)
self.assertIsInstance(dict_id, int)
Check for appropriate long type on Python 2
The extension always returns a long, which is not an "int" on
Python 2. Fix the test.import sys
import unittest
import zstd
if sys.version_info[0] >= 3:
int_type = int
else:
int_type = long
class TestTrainDictionary(unittest.TestCase):
def test_no_args(self):
with self.assertRaises(TypeError):
zstd.train_dictionary()
def test_bad_args(self):
with self.assertRaises(TypeError):
zstd.train_dictionary(8192, u'foo')
with self.assertRaises(ValueError):
zstd.train_dictionary(8192, [u'foo'])
def test_basic(self):
samples = []
for i in range(128):
samples.append(b'foo' * 64)
samples.append(b'bar' * 64)
samples.append(b'foobar' * 64)
samples.append(b'baz' * 64)
samples.append(b'foobaz' * 64)
samples.append(b'bazfoo' * 64)
d = zstd.train_dictionary(8192, samples)
self.assertLessEqual(len(d), 8192)
dict_id = zstd.dictionary_id(d)
self.assertIsInstance(dict_id, int_type)
|
<commit_before>import unittest
import zstd
class TestTrainDictionary(unittest.TestCase):
def test_no_args(self):
with self.assertRaises(TypeError):
zstd.train_dictionary()
def test_bad_args(self):
with self.assertRaises(TypeError):
zstd.train_dictionary(8192, u'foo')
with self.assertRaises(ValueError):
zstd.train_dictionary(8192, [u'foo'])
def test_basic(self):
samples = []
for i in range(128):
samples.append(b'foo' * 64)
samples.append(b'bar' * 64)
samples.append(b'foobar' * 64)
samples.append(b'baz' * 64)
samples.append(b'foobaz' * 64)
samples.append(b'bazfoo' * 64)
d = zstd.train_dictionary(8192, samples)
self.assertLessEqual(len(d), 8192)
dict_id = zstd.dictionary_id(d)
self.assertIsInstance(dict_id, int)
<commit_msg>Check for appropriate long type on Python 2
The extension always returns a long, which is not an "int" on
Python 2. Fix the test.<commit_after>import sys
import unittest
import zstd
if sys.version_info[0] >= 3:
int_type = int
else:
int_type = long
class TestTrainDictionary(unittest.TestCase):
def test_no_args(self):
with self.assertRaises(TypeError):
zstd.train_dictionary()
def test_bad_args(self):
with self.assertRaises(TypeError):
zstd.train_dictionary(8192, u'foo')
with self.assertRaises(ValueError):
zstd.train_dictionary(8192, [u'foo'])
def test_basic(self):
samples = []
for i in range(128):
samples.append(b'foo' * 64)
samples.append(b'bar' * 64)
samples.append(b'foobar' * 64)
samples.append(b'baz' * 64)
samples.append(b'foobaz' * 64)
samples.append(b'bazfoo' * 64)
d = zstd.train_dictionary(8192, samples)
self.assertLessEqual(len(d), 8192)
dict_id = zstd.dictionary_id(d)
self.assertIsInstance(dict_id, int_type)
|
47fe1412857dbc251ff89004798d5507b0e70b25
|
boundary/plugin_get.py
|
boundary/plugin_get.py
|
#
# Copyright 2014-2015 Boundary, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from boundary import ApiCli
class PluginGet (ApiCli):
def __init__(self):
ApiCli.__init__(self)
self.method = "GET"
self.path="v1/plugins"
self.pluginName = None
def addArguments(self):
ApiCli.addArguments(self)
self.parser.add_argument('-n', '--plugin-Name', dest='pluginName',action='store',required=True,help='Plugin name')
def getArguments(self):
'''
Extracts the specific arguments of this CLI
'''
ApiCli.getArguments(self)
if self.args.pluginName != None:
self.pluginName = self.args.pluginName
self.path = "v1/plugins/{0}".format(self.pluginName)
def getDescription(self):
return "Get the details of a plugin in a Boundary account"
|
#
# Copyright 2014-2015 Boundary, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from boundary import ApiCli
class PluginGet(ApiCli):
def __init__(self):
ApiCli.__init__(self)
self.method = "GET"
self.path = "v1/plugins"
self.pluginName = None
def addArguments(self):
ApiCli.addArguments(self)
self.parser.add_argument('-n', '--plugin-Name', dest='pluginName', action='store', metavar='plugin_name',
required=True, help='Plugin name')
def getArguments(self):
"""
Extracts the specific arguments of this CLI
"""
ApiCli.getArguments(self)
if self.args.pluginName is not None:
self.pluginName = self.args.pluginName
self.path = "v1/plugins/{0}".format(self.pluginName)
def getDescription(self):
return "Get the details of a plugin in a Boundary account"
|
Reformat code to PEP-8 standards
|
Reformat code to PEP-8 standards
|
Python
|
apache-2.0
|
wcainboundary/boundary-api-cli,boundary/pulse-api-cli,jdgwartney/pulse-api-cli,wcainboundary/boundary-api-cli,jdgwartney/boundary-api-cli,boundary/boundary-api-cli,jdgwartney/boundary-api-cli,boundary/pulse-api-cli,jdgwartney/pulse-api-cli,boundary/boundary-api-cli
|
#
# Copyright 2014-2015 Boundary, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from boundary import ApiCli
class PluginGet (ApiCli):
def __init__(self):
ApiCli.__init__(self)
self.method = "GET"
self.path="v1/plugins"
self.pluginName = None
def addArguments(self):
ApiCli.addArguments(self)
self.parser.add_argument('-n', '--plugin-Name', dest='pluginName',action='store',required=True,help='Plugin name')
def getArguments(self):
'''
Extracts the specific arguments of this CLI
'''
ApiCli.getArguments(self)
if self.args.pluginName != None:
self.pluginName = self.args.pluginName
self.path = "v1/plugins/{0}".format(self.pluginName)
def getDescription(self):
return "Get the details of a plugin in a Boundary account"
Reformat code to PEP-8 standards
|
#
# Copyright 2014-2015 Boundary, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from boundary import ApiCli
class PluginGet(ApiCli):
def __init__(self):
ApiCli.__init__(self)
self.method = "GET"
self.path = "v1/plugins"
self.pluginName = None
def addArguments(self):
ApiCli.addArguments(self)
self.parser.add_argument('-n', '--plugin-Name', dest='pluginName', action='store', metavar='plugin_name',
required=True, help='Plugin name')
def getArguments(self):
"""
Extracts the specific arguments of this CLI
"""
ApiCli.getArguments(self)
if self.args.pluginName is not None:
self.pluginName = self.args.pluginName
self.path = "v1/plugins/{0}".format(self.pluginName)
def getDescription(self):
return "Get the details of a plugin in a Boundary account"
|
<commit_before>#
# Copyright 2014-2015 Boundary, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from boundary import ApiCli
class PluginGet (ApiCli):
def __init__(self):
ApiCli.__init__(self)
self.method = "GET"
self.path="v1/plugins"
self.pluginName = None
def addArguments(self):
ApiCli.addArguments(self)
self.parser.add_argument('-n', '--plugin-Name', dest='pluginName',action='store',required=True,help='Plugin name')
def getArguments(self):
'''
Extracts the specific arguments of this CLI
'''
ApiCli.getArguments(self)
if self.args.pluginName != None:
self.pluginName = self.args.pluginName
self.path = "v1/plugins/{0}".format(self.pluginName)
def getDescription(self):
return "Get the details of a plugin in a Boundary account"
<commit_msg>Reformat code to PEP-8 standards<commit_after>
|
#
# Copyright 2014-2015 Boundary, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from boundary import ApiCli
class PluginGet(ApiCli):
def __init__(self):
ApiCli.__init__(self)
self.method = "GET"
self.path = "v1/plugins"
self.pluginName = None
def addArguments(self):
ApiCli.addArguments(self)
self.parser.add_argument('-n', '--plugin-Name', dest='pluginName', action='store', metavar='plugin_name',
required=True, help='Plugin name')
def getArguments(self):
"""
Extracts the specific arguments of this CLI
"""
ApiCli.getArguments(self)
if self.args.pluginName is not None:
self.pluginName = self.args.pluginName
self.path = "v1/plugins/{0}".format(self.pluginName)
def getDescription(self):
return "Get the details of a plugin in a Boundary account"
|
#
# Copyright 2014-2015 Boundary, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from boundary import ApiCli
class PluginGet (ApiCli):
def __init__(self):
ApiCli.__init__(self)
self.method = "GET"
self.path="v1/plugins"
self.pluginName = None
def addArguments(self):
ApiCli.addArguments(self)
self.parser.add_argument('-n', '--plugin-Name', dest='pluginName',action='store',required=True,help='Plugin name')
def getArguments(self):
'''
Extracts the specific arguments of this CLI
'''
ApiCli.getArguments(self)
if self.args.pluginName != None:
self.pluginName = self.args.pluginName
self.path = "v1/plugins/{0}".format(self.pluginName)
def getDescription(self):
return "Get the details of a plugin in a Boundary account"
Reformat code to PEP-8 standards#
# Copyright 2014-2015 Boundary, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from boundary import ApiCli
class PluginGet(ApiCli):
def __init__(self):
ApiCli.__init__(self)
self.method = "GET"
self.path = "v1/plugins"
self.pluginName = None
def addArguments(self):
ApiCli.addArguments(self)
self.parser.add_argument('-n', '--plugin-Name', dest='pluginName', action='store', metavar='plugin_name',
required=True, help='Plugin name')
def getArguments(self):
"""
Extracts the specific arguments of this CLI
"""
ApiCli.getArguments(self)
if self.args.pluginName is not None:
self.pluginName = self.args.pluginName
self.path = "v1/plugins/{0}".format(self.pluginName)
def getDescription(self):
return "Get the details of a plugin in a Boundary account"
|
<commit_before>#
# Copyright 2014-2015 Boundary, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from boundary import ApiCli
class PluginGet (ApiCli):
def __init__(self):
ApiCli.__init__(self)
self.method = "GET"
self.path="v1/plugins"
self.pluginName = None
def addArguments(self):
ApiCli.addArguments(self)
self.parser.add_argument('-n', '--plugin-Name', dest='pluginName',action='store',required=True,help='Plugin name')
def getArguments(self):
'''
Extracts the specific arguments of this CLI
'''
ApiCli.getArguments(self)
if self.args.pluginName != None:
self.pluginName = self.args.pluginName
self.path = "v1/plugins/{0}".format(self.pluginName)
def getDescription(self):
return "Get the details of a plugin in a Boundary account"
<commit_msg>Reformat code to PEP-8 standards<commit_after>#
# Copyright 2014-2015 Boundary, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from boundary import ApiCli
class PluginGet(ApiCli):
def __init__(self):
ApiCli.__init__(self)
self.method = "GET"
self.path = "v1/plugins"
self.pluginName = None
def addArguments(self):
ApiCli.addArguments(self)
self.parser.add_argument('-n', '--plugin-Name', dest='pluginName', action='store', metavar='plugin_name',
required=True, help='Plugin name')
def getArguments(self):
"""
Extracts the specific arguments of this CLI
"""
ApiCli.getArguments(self)
if self.args.pluginName is not None:
self.pluginName = self.args.pluginName
self.path = "v1/plugins/{0}".format(self.pluginName)
def getDescription(self):
return "Get the details of a plugin in a Boundary account"
|
27d2cd57337497abb9d106fdb033c26771e481e4
|
rmgpy/data/__init__.py
|
rmgpy/data/__init__.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
################################################################################
#
# RMG - Reaction Mechanism Generator
#
# Copyright (c) 2002-2010 Prof. William H. Green (whgreen@mit.edu) and the
# RMG Team (rmg_dev@mit.edu)
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the 'Software'),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
################################################################################
import os.path
def getDatabaseDirectory():
return os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', 'database'))
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
################################################################################
#
# RMG - Reaction Mechanism Generator
#
# Copyright (c) 2002-2010 Prof. William H. Green (whgreen@mit.edu) and the
# RMG Team (rmg_dev@mit.edu)
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the 'Software'),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
################################################################################
|
Remove getDatabaseDirectory() function from rmgpy.data
|
Remove getDatabaseDirectory() function from rmgpy.data
This function is not being used anywhere, and also has been
replaced by the settings in rmgpy, which searches for and
saves a database directory
|
Python
|
mit
|
pierrelb/RMG-Py,pierrelb/RMG-Py,nickvandewiele/RMG-Py,nickvandewiele/RMG-Py,nyee/RMG-Py,nyee/RMG-Py,chatelak/RMG-Py,chatelak/RMG-Py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
################################################################################
#
# RMG - Reaction Mechanism Generator
#
# Copyright (c) 2002-2010 Prof. William H. Green (whgreen@mit.edu) and the
# RMG Team (rmg_dev@mit.edu)
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the 'Software'),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
################################################################################
import os.path
def getDatabaseDirectory():
return os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', 'database'))
Remove getDatabaseDirectory() function from rmgpy.data
This function is not being used anywhere, and also has been
replaced by the settings in rmgpy, which searches for and
saves a database directory
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
################################################################################
#
# RMG - Reaction Mechanism Generator
#
# Copyright (c) 2002-2010 Prof. William H. Green (whgreen@mit.edu) and the
# RMG Team (rmg_dev@mit.edu)
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the 'Software'),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
################################################################################
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
################################################################################
#
# RMG - Reaction Mechanism Generator
#
# Copyright (c) 2002-2010 Prof. William H. Green (whgreen@mit.edu) and the
# RMG Team (rmg_dev@mit.edu)
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the 'Software'),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
################################################################################
import os.path
def getDatabaseDirectory():
return os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', 'database'))
<commit_msg>Remove getDatabaseDirectory() function from rmgpy.data
This function is not being used anywhere, and also has been
replaced by the settings in rmgpy, which searches for and
saves a database directory<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
################################################################################
#
# RMG - Reaction Mechanism Generator
#
# Copyright (c) 2002-2010 Prof. William H. Green (whgreen@mit.edu) and the
# RMG Team (rmg_dev@mit.edu)
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the 'Software'),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
################################################################################
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
################################################################################
#
# RMG - Reaction Mechanism Generator
#
# Copyright (c) 2002-2010 Prof. William H. Green (whgreen@mit.edu) and the
# RMG Team (rmg_dev@mit.edu)
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the 'Software'),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
################################################################################
import os.path
def getDatabaseDirectory():
return os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', 'database'))
Remove getDatabaseDirectory() function from rmgpy.data
This function is not being used anywhere, and also has been
replaced by the settings in rmgpy, which searches for and
saves a database directory#!/usr/bin/env python
# -*- coding: utf-8 -*-
################################################################################
#
# RMG - Reaction Mechanism Generator
#
# Copyright (c) 2002-2010 Prof. William H. Green (whgreen@mit.edu) and the
# RMG Team (rmg_dev@mit.edu)
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the 'Software'),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
################################################################################
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
################################################################################
#
# RMG - Reaction Mechanism Generator
#
# Copyright (c) 2002-2010 Prof. William H. Green (whgreen@mit.edu) and the
# RMG Team (rmg_dev@mit.edu)
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the 'Software'),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
################################################################################
import os.path
def getDatabaseDirectory():
return os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', 'database'))
<commit_msg>Remove getDatabaseDirectory() function from rmgpy.data
This function is not being used anywhere, and also has been
replaced by the settings in rmgpy, which searches for and
saves a database directory<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
################################################################################
#
# RMG - Reaction Mechanism Generator
#
# Copyright (c) 2002-2010 Prof. William H. Green (whgreen@mit.edu) and the
# RMG Team (rmg_dev@mit.edu)
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the 'Software'),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
################################################################################
|
f188f2eb81c1310b9862b435a492b4ce6d0fac2d
|
python3/aniso8601/resolution.py
|
python3/aniso8601/resolution.py
|
# -*- coding: utf-8 -*-
# This software may be modified and distributed under the terms
# of the BSD license. See the LICENSE file for details.
from enum import Enum
class DateResolution(Enum):
Year, Month, Week, Weekday, Day, Ordinal = range(6)
class TimeResolution(Enum):
Seconds, Minutes, Hours = range(3)
|
# -*- coding: utf-8 -*-
# This software may be modified and distributed under the terms
# of the BSD license. See the LICENSE file for details.
class DateResolution(object):
Year, Month, Week, Weekday, Day, Ordinal = list(range(6))
class TimeResolution(object):
Seconds, Minutes, Hours = list(range(3))
|
Remove use of enum in Python3
|
Remove use of enum in Python3
|
Python
|
bsd-3-clause
|
3stack-software/python-aniso8601-relativedelta
|
# -*- coding: utf-8 -*-
# This software may be modified and distributed under the terms
# of the BSD license. See the LICENSE file for details.
from enum import Enum
class DateResolution(Enum):
Year, Month, Week, Weekday, Day, Ordinal = range(6)
class TimeResolution(Enum):
Seconds, Minutes, Hours = range(3)
Remove use of enum in Python3
|
# -*- coding: utf-8 -*-
# This software may be modified and distributed under the terms
# of the BSD license. See the LICENSE file for details.
class DateResolution(object):
Year, Month, Week, Weekday, Day, Ordinal = list(range(6))
class TimeResolution(object):
Seconds, Minutes, Hours = list(range(3))
|
<commit_before># -*- coding: utf-8 -*-
# This software may be modified and distributed under the terms
# of the BSD license. See the LICENSE file for details.
from enum import Enum
class DateResolution(Enum):
Year, Month, Week, Weekday, Day, Ordinal = range(6)
class TimeResolution(Enum):
Seconds, Minutes, Hours = range(3)
<commit_msg>Remove use of enum in Python3<commit_after>
|
# -*- coding: utf-8 -*-
# This software may be modified and distributed under the terms
# of the BSD license. See the LICENSE file for details.
class DateResolution(object):
Year, Month, Week, Weekday, Day, Ordinal = list(range(6))
class TimeResolution(object):
Seconds, Minutes, Hours = list(range(3))
|
# -*- coding: utf-8 -*-
# This software may be modified and distributed under the terms
# of the BSD license. See the LICENSE file for details.
from enum import Enum
class DateResolution(Enum):
Year, Month, Week, Weekday, Day, Ordinal = range(6)
class TimeResolution(Enum):
Seconds, Minutes, Hours = range(3)
Remove use of enum in Python3# -*- coding: utf-8 -*-
# This software may be modified and distributed under the terms
# of the BSD license. See the LICENSE file for details.
class DateResolution(object):
Year, Month, Week, Weekday, Day, Ordinal = list(range(6))
class TimeResolution(object):
Seconds, Minutes, Hours = list(range(3))
|
<commit_before># -*- coding: utf-8 -*-
# This software may be modified and distributed under the terms
# of the BSD license. See the LICENSE file for details.
from enum import Enum
class DateResolution(Enum):
Year, Month, Week, Weekday, Day, Ordinal = range(6)
class TimeResolution(Enum):
Seconds, Minutes, Hours = range(3)
<commit_msg>Remove use of enum in Python3<commit_after># -*- coding: utf-8 -*-
# This software may be modified and distributed under the terms
# of the BSD license. See the LICENSE file for details.
class DateResolution(object):
Year, Month, Week, Weekday, Day, Ordinal = list(range(6))
class TimeResolution(object):
Seconds, Minutes, Hours = list(range(3))
|
dfef23d834ab67acf91dcefd6fe39e089c71fb9a
|
quantized_mesh_tile/__init__.py
|
quantized_mesh_tile/__init__.py
|
"""
This module provides high level utility functions to encode and decode a terrain tile.
Reference
---------
"""
from .terrain import TerrainTile
from .topology import TerrainTopology
def encode(geometries, bounds=[], watermask=[], hasLighting=False, gzipped=False):
"""
Function to convert geometries in a quantized-mesh encoded string buffer.
Arguments:
``geometries``
A list of shapely polygon geometries representing 3 dimensional triangles.
or
A list of WKT or WKB Polygons representing 3 dimensional triangles.
or
A list of triplet of vertices using the following structure:
``(((lon0/lat0/height0),(...),(lon2,lat2,height2)),(...))``
``bounds``
The bounds of the terrain tile. (west, south, east, north)
If not defined, the bounds will be computed from the provided geometries.
Default is `[]`.
``hasLighting``
Indicate whether unit vectors should be computed for the lighting extension.
Default is `False`.
``watermask``
A water mask list (Optional). Adds rendering water effect.
The water mask list is either one byte, `[0]` for land and `[255]` for
water, either a list of 256*256 values ranging from 0 to 255.
Values in the mask are defined from north-to-south and west-to-east.
Per default no watermask is applied. Note that the water mask effect depends on
the texture of the raster layer drapped over your terrain.
Default is `[]`.
``gzipped``
Indicate if the tile content is gzipped.
Default is `False`.
"""
topology = TerrainTopology(geometries=geometries, hasLighting=hasLighting)
if len(bounds) == 4:
west, south, east, north = bounds
tile = TerrainTile(watermask=watermask,
west=west, south=south, east=east, north=north, topology=topology)
else:
tile = TerrainTile(watermask=watermask, topology=topology)
return tile.toStringIO(gzipped=gzipped)
def decode(filePath, bounds, hasLighting=False, hasWatermask=False, gzipped=False):
"""
Function to convert a quantized-mesh terrain tile file into a
:class:`quantized_mesh_tile.terrain.TerrainTile` instance.
Arguments:
``filePath``
An absolute or relative path to write the terrain tile. (Required)
``bounds``
The bounds of the terrain tile. (west, south, east, north) (Required).
``hasLighting``
Indicate whether the tile has the lighting extension.
Default is `False`.
``hasWatermask``
Indicate whether the tile has the water-mask extension.
Default is `False`.
"""
west, south, east, north = bounds
tile = TerrainTile(west=west, south=south, east=east, north=north)
tile.fromFile(
filePath, hasLighting=hasLighting, hasWatermask=hasWatermask, gzipped=gzipped)
return tile
|
Add higher level functions encode and decode
|
Add higher level functions encode and decode
|
Python
|
mit
|
loicgasser/quantized-mesh-tile
|
Add higher level functions encode and decode
|
"""
This module provides high level utility functions to encode and decode a terrain tile.
Reference
---------
"""
from .terrain import TerrainTile
from .topology import TerrainTopology
def encode(geometries, bounds=[], watermask=[], hasLighting=False, gzipped=False):
"""
Function to convert geometries in a quantized-mesh encoded string buffer.
Arguments:
``geometries``
A list of shapely polygon geometries representing 3 dimensional triangles.
or
A list of WKT or WKB Polygons representing 3 dimensional triangles.
or
A list of triplet of vertices using the following structure:
``(((lon0/lat0/height0),(...),(lon2,lat2,height2)),(...))``
``bounds``
The bounds of the terrain tile. (west, south, east, north)
If not defined, the bounds will be computed from the provided geometries.
Default is `[]`.
``hasLighting``
Indicate whether unit vectors should be computed for the lighting extension.
Default is `False`.
``watermask``
A water mask list (Optional). Adds rendering water effect.
The water mask list is either one byte, `[0]` for land and `[255]` for
water, either a list of 256*256 values ranging from 0 to 255.
Values in the mask are defined from north-to-south and west-to-east.
Per default no watermask is applied. Note that the water mask effect depends on
the texture of the raster layer drapped over your terrain.
Default is `[]`.
``gzipped``
Indicate if the tile content is gzipped.
Default is `False`.
"""
topology = TerrainTopology(geometries=geometries, hasLighting=hasLighting)
if len(bounds) == 4:
west, south, east, north = bounds
tile = TerrainTile(watermask=watermask,
west=west, south=south, east=east, north=north, topology=topology)
else:
tile = TerrainTile(watermask=watermask, topology=topology)
return tile.toStringIO(gzipped=gzipped)
def decode(filePath, bounds, hasLighting=False, hasWatermask=False, gzipped=False):
"""
Function to convert a quantized-mesh terrain tile file into a
:class:`quantized_mesh_tile.terrain.TerrainTile` instance.
Arguments:
``filePath``
An absolute or relative path to write the terrain tile. (Required)
``bounds``
The bounds of the terrain tile. (west, south, east, north) (Required).
``hasLighting``
Indicate whether the tile has the lighting extension.
Default is `False`.
``hasWatermask``
Indicate whether the tile has the water-mask extension.
Default is `False`.
"""
west, south, east, north = bounds
tile = TerrainTile(west=west, south=south, east=east, north=north)
tile.fromFile(
filePath, hasLighting=hasLighting, hasWatermask=hasWatermask, gzipped=gzipped)
return tile
|
<commit_before><commit_msg>Add higher level functions encode and decode<commit_after>
|
"""
This module provides high level utility functions to encode and decode a terrain tile.
Reference
---------
"""
from .terrain import TerrainTile
from .topology import TerrainTopology
def encode(geometries, bounds=[], watermask=[], hasLighting=False, gzipped=False):
"""
Function to convert geometries in a quantized-mesh encoded string buffer.
Arguments:
``geometries``
A list of shapely polygon geometries representing 3 dimensional triangles.
or
A list of WKT or WKB Polygons representing 3 dimensional triangles.
or
A list of triplet of vertices using the following structure:
``(((lon0/lat0/height0),(...),(lon2,lat2,height2)),(...))``
``bounds``
The bounds of the terrain tile. (west, south, east, north)
If not defined, the bounds will be computed from the provided geometries.
Default is `[]`.
``hasLighting``
Indicate whether unit vectors should be computed for the lighting extension.
Default is `False`.
``watermask``
A water mask list (Optional). Adds rendering water effect.
The water mask list is either one byte, `[0]` for land and `[255]` for
water, either a list of 256*256 values ranging from 0 to 255.
Values in the mask are defined from north-to-south and west-to-east.
Per default no watermask is applied. Note that the water mask effect depends on
the texture of the raster layer drapped over your terrain.
Default is `[]`.
``gzipped``
Indicate if the tile content is gzipped.
Default is `False`.
"""
topology = TerrainTopology(geometries=geometries, hasLighting=hasLighting)
if len(bounds) == 4:
west, south, east, north = bounds
tile = TerrainTile(watermask=watermask,
west=west, south=south, east=east, north=north, topology=topology)
else:
tile = TerrainTile(watermask=watermask, topology=topology)
return tile.toStringIO(gzipped=gzipped)
def decode(filePath, bounds, hasLighting=False, hasWatermask=False, gzipped=False):
"""
Function to convert a quantized-mesh terrain tile file into a
:class:`quantized_mesh_tile.terrain.TerrainTile` instance.
Arguments:
``filePath``
An absolute or relative path to write the terrain tile. (Required)
``bounds``
The bounds of the terrain tile. (west, south, east, north) (Required).
``hasLighting``
Indicate whether the tile has the lighting extension.
Default is `False`.
``hasWatermask``
Indicate whether the tile has the water-mask extension.
Default is `False`.
"""
west, south, east, north = bounds
tile = TerrainTile(west=west, south=south, east=east, north=north)
tile.fromFile(
filePath, hasLighting=hasLighting, hasWatermask=hasWatermask, gzipped=gzipped)
return tile
|
Add higher level functions encode and decode"""
This module provides high level utility functions to encode and decode a terrain tile.
Reference
---------
"""
from .terrain import TerrainTile
from .topology import TerrainTopology
def encode(geometries, bounds=[], watermask=[], hasLighting=False, gzipped=False):
"""
Function to convert geometries in a quantized-mesh encoded string buffer.
Arguments:
``geometries``
A list of shapely polygon geometries representing 3 dimensional triangles.
or
A list of WKT or WKB Polygons representing 3 dimensional triangles.
or
A list of triplet of vertices using the following structure:
``(((lon0/lat0/height0),(...),(lon2,lat2,height2)),(...))``
``bounds``
The bounds of the terrain tile. (west, south, east, north)
If not defined, the bounds will be computed from the provided geometries.
Default is `[]`.
``hasLighting``
Indicate whether unit vectors should be computed for the lighting extension.
Default is `False`.
``watermask``
A water mask list (Optional). Adds rendering water effect.
The water mask list is either one byte, `[0]` for land and `[255]` for
water, either a list of 256*256 values ranging from 0 to 255.
Values in the mask are defined from north-to-south and west-to-east.
Per default no watermask is applied. Note that the water mask effect depends on
the texture of the raster layer drapped over your terrain.
Default is `[]`.
``gzipped``
Indicate if the tile content is gzipped.
Default is `False`.
"""
topology = TerrainTopology(geometries=geometries, hasLighting=hasLighting)
if len(bounds) == 4:
west, south, east, north = bounds
tile = TerrainTile(watermask=watermask,
west=west, south=south, east=east, north=north, topology=topology)
else:
tile = TerrainTile(watermask=watermask, topology=topology)
return tile.toStringIO(gzipped=gzipped)
def decode(filePath, bounds, hasLighting=False, hasWatermask=False, gzipped=False):
"""
Function to convert a quantized-mesh terrain tile file into a
:class:`quantized_mesh_tile.terrain.TerrainTile` instance.
Arguments:
``filePath``
An absolute or relative path to write the terrain tile. (Required)
``bounds``
The bounds of the terrain tile. (west, south, east, north) (Required).
``hasLighting``
Indicate whether the tile has the lighting extension.
Default is `False`.
``hasWatermask``
Indicate whether the tile has the water-mask extension.
Default is `False`.
"""
west, south, east, north = bounds
tile = TerrainTile(west=west, south=south, east=east, north=north)
tile.fromFile(
filePath, hasLighting=hasLighting, hasWatermask=hasWatermask, gzipped=gzipped)
return tile
|
<commit_before><commit_msg>Add higher level functions encode and decode<commit_after>"""
This module provides high level utility functions to encode and decode a terrain tile.
Reference
---------
"""
from .terrain import TerrainTile
from .topology import TerrainTopology
def encode(geometries, bounds=[], watermask=[], hasLighting=False, gzipped=False):
"""
Function to convert geometries in a quantized-mesh encoded string buffer.
Arguments:
``geometries``
A list of shapely polygon geometries representing 3 dimensional triangles.
or
A list of WKT or WKB Polygons representing 3 dimensional triangles.
or
A list of triplet of vertices using the following structure:
``(((lon0/lat0/height0),(...),(lon2,lat2,height2)),(...))``
``bounds``
The bounds of the terrain tile. (west, south, east, north)
If not defined, the bounds will be computed from the provided geometries.
Default is `[]`.
``hasLighting``
Indicate whether unit vectors should be computed for the lighting extension.
Default is `False`.
``watermask``
A water mask list (Optional). Adds rendering water effect.
The water mask list is either one byte, `[0]` for land and `[255]` for
water, either a list of 256*256 values ranging from 0 to 255.
Values in the mask are defined from north-to-south and west-to-east.
Per default no watermask is applied. Note that the water mask effect depends on
the texture of the raster layer drapped over your terrain.
Default is `[]`.
``gzipped``
Indicate if the tile content is gzipped.
Default is `False`.
"""
topology = TerrainTopology(geometries=geometries, hasLighting=hasLighting)
if len(bounds) == 4:
west, south, east, north = bounds
tile = TerrainTile(watermask=watermask,
west=west, south=south, east=east, north=north, topology=topology)
else:
tile = TerrainTile(watermask=watermask, topology=topology)
return tile.toStringIO(gzipped=gzipped)
def decode(filePath, bounds, hasLighting=False, hasWatermask=False, gzipped=False):
"""
Function to convert a quantized-mesh terrain tile file into a
:class:`quantized_mesh_tile.terrain.TerrainTile` instance.
Arguments:
``filePath``
An absolute or relative path to write the terrain tile. (Required)
``bounds``
The bounds of the terrain tile. (west, south, east, north) (Required).
``hasLighting``
Indicate whether the tile has the lighting extension.
Default is `False`.
``hasWatermask``
Indicate whether the tile has the water-mask extension.
Default is `False`.
"""
west, south, east, north = bounds
tile = TerrainTile(west=west, south=south, east=east, north=north)
tile.fromFile(
filePath, hasLighting=hasLighting, hasWatermask=hasWatermask, gzipped=gzipped)
return tile
|
|
b9ba8a929a539f24d674aed7d7ee98b490a6fcd3
|
mopidy/__init__.py
|
mopidy/__init__.py
|
from mopidy import settings as raw_settings
def get_version():
return u'0.1.dev'
def get_mpd_protocol_version():
return u'0.16.0'
class SettingsError(Exception):
pass
class Settings(object):
def __getattr__(self, attr):
if attr.isupper() and not hasattr(raw_settings, attr):
raise SettingsError(u'Setting "%s" is not set.' % attr)
value = getattr(raw_settings, attr)
if type(value) != bool and not value:
raise SettingsError(u'Setting "%s" is empty.' % attr)
return value
settings = Settings()
|
from mopidy import settings as raw_settings
def get_version():
return u'0.1.0a0.dev0'
def get_mpd_protocol_version():
return u'0.16.0'
class SettingsError(Exception):
pass
class Settings(object):
def __getattr__(self, attr):
if attr.isupper() and not hasattr(raw_settings, attr):
raise SettingsError(u'Setting "%s" is not set.' % attr)
value = getattr(raw_settings, attr)
if type(value) != bool and not value:
raise SettingsError(u'Setting "%s" is empty.' % attr)
return value
settings = Settings()
|
Switch to a StrictVersion-compatible version number
|
Switch to a StrictVersion-compatible version number
|
Python
|
apache-2.0
|
mokieyue/mopidy,swak/mopidy,pacificIT/mopidy,quartz55/mopidy,swak/mopidy,SuperStarPL/mopidy,mopidy/mopidy,priestd09/mopidy,jmarsik/mopidy,diandiankan/mopidy,diandiankan/mopidy,vrs01/mopidy,glogiotatidis/mopidy,adamcik/mopidy,rawdlite/mopidy,pacificIT/mopidy,dbrgn/mopidy,mokieyue/mopidy,pacificIT/mopidy,ali/mopidy,hkariti/mopidy,jmarsik/mopidy,swak/mopidy,ali/mopidy,hkariti/mopidy,tkem/mopidy,mokieyue/mopidy,ZenithDK/mopidy,glogiotatidis/mopidy,bencevans/mopidy,glogiotatidis/mopidy,bencevans/mopidy,bacontext/mopidy,ZenithDK/mopidy,SuperStarPL/mopidy,ali/mopidy,hkariti/mopidy,adamcik/mopidy,glogiotatidis/mopidy,rawdlite/mopidy,abarisain/mopidy,liamw9534/mopidy,bencevans/mopidy,mokieyue/mopidy,kingosticks/mopidy,tkem/mopidy,bacontext/mopidy,ZenithDK/mopidy,rawdlite/mopidy,jmarsik/mopidy,swak/mopidy,vrs01/mopidy,abarisain/mopidy,dbrgn/mopidy,dbrgn/mopidy,priestd09/mopidy,jmarsik/mopidy,adamcik/mopidy,SuperStarPL/mopidy,bacontext/mopidy,SuperStarPL/mopidy,bacontext/mopidy,jodal/mopidy,jcass77/mopidy,tkem/mopidy,diandiankan/mopidy,woutervanwijk/mopidy,mopidy/mopidy,kingosticks/mopidy,vrs01/mopidy,hkariti/mopidy,quartz55/mopidy,dbrgn/mopidy,jcass77/mopidy,ZenithDK/mopidy,jodal/mopidy,priestd09/mopidy,pacificIT/mopidy,rawdlite/mopidy,liamw9534/mopidy,quartz55/mopidy,vrs01/mopidy,bencevans/mopidy,ali/mopidy,diandiankan/mopidy,woutervanwijk/mopidy,quartz55/mopidy,jcass77/mopidy,jodal/mopidy,mopidy/mopidy,kingosticks/mopidy,tkem/mopidy
|
from mopidy import settings as raw_settings
def get_version():
return u'0.1.dev'
def get_mpd_protocol_version():
return u'0.16.0'
class SettingsError(Exception):
pass
class Settings(object):
def __getattr__(self, attr):
if attr.isupper() and not hasattr(raw_settings, attr):
raise SettingsError(u'Setting "%s" is not set.' % attr)
value = getattr(raw_settings, attr)
if type(value) != bool and not value:
raise SettingsError(u'Setting "%s" is empty.' % attr)
return value
settings = Settings()
Switch to a StrictVersion-compatible version number
|
from mopidy import settings as raw_settings
def get_version():
return u'0.1.0a0.dev0'
def get_mpd_protocol_version():
return u'0.16.0'
class SettingsError(Exception):
pass
class Settings(object):
def __getattr__(self, attr):
if attr.isupper() and not hasattr(raw_settings, attr):
raise SettingsError(u'Setting "%s" is not set.' % attr)
value = getattr(raw_settings, attr)
if type(value) != bool and not value:
raise SettingsError(u'Setting "%s" is empty.' % attr)
return value
settings = Settings()
|
<commit_before>from mopidy import settings as raw_settings
def get_version():
return u'0.1.dev'
def get_mpd_protocol_version():
return u'0.16.0'
class SettingsError(Exception):
pass
class Settings(object):
def __getattr__(self, attr):
if attr.isupper() and not hasattr(raw_settings, attr):
raise SettingsError(u'Setting "%s" is not set.' % attr)
value = getattr(raw_settings, attr)
if type(value) != bool and not value:
raise SettingsError(u'Setting "%s" is empty.' % attr)
return value
settings = Settings()
<commit_msg>Switch to a StrictVersion-compatible version number<commit_after>
|
from mopidy import settings as raw_settings
def get_version():
return u'0.1.0a0.dev0'
def get_mpd_protocol_version():
return u'0.16.0'
class SettingsError(Exception):
pass
class Settings(object):
def __getattr__(self, attr):
if attr.isupper() and not hasattr(raw_settings, attr):
raise SettingsError(u'Setting "%s" is not set.' % attr)
value = getattr(raw_settings, attr)
if type(value) != bool and not value:
raise SettingsError(u'Setting "%s" is empty.' % attr)
return value
settings = Settings()
|
from mopidy import settings as raw_settings
def get_version():
return u'0.1.dev'
def get_mpd_protocol_version():
return u'0.16.0'
class SettingsError(Exception):
pass
class Settings(object):
def __getattr__(self, attr):
if attr.isupper() and not hasattr(raw_settings, attr):
raise SettingsError(u'Setting "%s" is not set.' % attr)
value = getattr(raw_settings, attr)
if type(value) != bool and not value:
raise SettingsError(u'Setting "%s" is empty.' % attr)
return value
settings = Settings()
Switch to a StrictVersion-compatible version numberfrom mopidy import settings as raw_settings
def get_version():
return u'0.1.0a0.dev0'
def get_mpd_protocol_version():
return u'0.16.0'
class SettingsError(Exception):
pass
class Settings(object):
def __getattr__(self, attr):
if attr.isupper() and not hasattr(raw_settings, attr):
raise SettingsError(u'Setting "%s" is not set.' % attr)
value = getattr(raw_settings, attr)
if type(value) != bool and not value:
raise SettingsError(u'Setting "%s" is empty.' % attr)
return value
settings = Settings()
|
<commit_before>from mopidy import settings as raw_settings
def get_version():
return u'0.1.dev'
def get_mpd_protocol_version():
return u'0.16.0'
class SettingsError(Exception):
pass
class Settings(object):
def __getattr__(self, attr):
if attr.isupper() and not hasattr(raw_settings, attr):
raise SettingsError(u'Setting "%s" is not set.' % attr)
value = getattr(raw_settings, attr)
if type(value) != bool and not value:
raise SettingsError(u'Setting "%s" is empty.' % attr)
return value
settings = Settings()
<commit_msg>Switch to a StrictVersion-compatible version number<commit_after>from mopidy import settings as raw_settings
def get_version():
return u'0.1.0a0.dev0'
def get_mpd_protocol_version():
return u'0.16.0'
class SettingsError(Exception):
pass
class Settings(object):
def __getattr__(self, attr):
if attr.isupper() and not hasattr(raw_settings, attr):
raise SettingsError(u'Setting "%s" is not set.' % attr)
value = getattr(raw_settings, attr)
if type(value) != bool and not value:
raise SettingsError(u'Setting "%s" is empty.' % attr)
return value
settings = Settings()
|
4b5a39c6bbc82572f67ea03236490e52049adf52
|
tests/query_test/test_scan_range_lengths.py
|
tests/query_test/test_scan_range_lengths.py
|
#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
# Validates running with different scan range length values
#
import pytest
from copy import copy
from tests.common.test_vector import TestDimension
from tests.common.impala_test_suite import ImpalaTestSuite, ALL_NODES_ONLY
# We use very small scan ranges to exercise corner cases in the HDFS scanner more
# thoroughly. In particular, it will exercise:
# 1. scan range with no tuple
# 2. tuple that span across multiple scan ranges
MAX_SCAN_RANGE_LENGTHS = [1, 2, 5]
class TestScanRangeLengths(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
@classmethod
def add_test_dimensions(cls):
super(TestScanRangeLengths, cls).add_test_dimensions()
cls.TestMatrix.add_dimension(
TestDimension('max_scan_range_length', *MAX_SCAN_RANGE_LENGTHS))
def test_scan_ranges(self, vector):
if vector.get_value('table_format').file_format != 'text':
pytest.xfail(reason='IMP-636')
elif vector.get_value('table_format').compression_codec != 'none':
pytest.xfail(reason='IMPALA-122')
vector.get_value('exec_option')['max_scan_range_length'] =\
vector.get_value('max_scan_range_length')
self.run_test_case('QueryTest/hdfs-tiny-scan', vector)
|
#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
# Validates running with different scan range length values
#
import pytest
from copy import copy
from tests.common.test_vector import TestDimension
from tests.common.impala_test_suite import ImpalaTestSuite, ALL_NODES_ONLY
# We use very small scan ranges to exercise corner cases in the HDFS scanner more
# thoroughly. In particular, it will exercise:
# 1. scan range with no tuple
# 2. tuple that span across multiple scan ranges
MAX_SCAN_RANGE_LENGTHS = [1, 2, 5]
class TestScanRangeLengths(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
@classmethod
def add_test_dimensions(cls):
super(TestScanRangeLengths, cls).add_test_dimensions()
cls.TestMatrix.add_dimension(
TestDimension('max_scan_range_length', *MAX_SCAN_RANGE_LENGTHS))
def test_scan_ranges(self, vector):
if vector.get_value('table_format').file_format != 'text':
pytest.xfail(reason='IMP-636')
vector.get_value('exec_option')['max_scan_range_length'] =\
vector.get_value('max_scan_range_length')
self.run_test_case('QueryTest/hdfs-tiny-scan', vector)
|
Fix IMPALA-122: Lzo scanner with small scan ranges.
|
Fix IMPALA-122: Lzo scanner with small scan ranges.
Change-Id: I5226fd1a1aa368f5b291b78ad371363057ef574e
Reviewed-on: http://gerrit.ent.cloudera.com:8080/140
Reviewed-by: Skye Wanderman-Milne <6d4b168ab637b0a20cc9dbf96abb2537f372f946@cloudera.com>
Reviewed-by: Nong Li <99a5e5f8f5911755b88e0b536d46aafa102bed41@cloudera.com>
Tested-by: Nong Li <99a5e5f8f5911755b88e0b536d46aafa102bed41@cloudera.com>
|
Python
|
apache-2.0
|
tempbottle/Impala,cchanning/Impala,kapilrastogi/Impala,cgvarela/Impala,brightchen/Impala,mapr/impala,caseyching/Impala,rampage644/impala-cut,mapr/impala,caseyching/Impala,rampage644/impala-cut,lirui-intel/Impala,mapr/impala,ibmsoe/ImpalaPPC,cchanning/Impala,gistic/PublicSpatialImpala,gerashegalov/Impala,lnliuxing/Impala,rdblue/Impala,cgvarela/Impala,henryr/Impala,rdblue/Impala,ImpalaToGo/ImpalaToGo,ibmsoe/ImpalaPPC,mapr/impala,lirui-intel/Impala,henryr/Impala,caseyching/Impala,cloudera/recordservice,grundprinzip/Impala,tempbottle/Impala,cgvarela/Impala,andybab/Impala,rdblue/Impala,bratatidas9/Impala-1,theyaa/Impala,andybab/Impala,placrosse/ImpalaToGo,grundprinzip/Impala,theyaa/Impala,XiaominZhang/Impala,brightchen/Impala,lnliuxing/Impala,cloudera/recordservice,rampage644/impala-cut,gistic/PublicSpatialImpala,gerashegalov/Impala,lirui-intel/Impala,ibmsoe/ImpalaPPC,henryr/Impala,AtScaleInc/Impala,bowlofstew/Impala,kapilrastogi/Impala,ibmsoe/ImpalaPPC,caseyching/Impala,gerashegalov/Impala,andybab/Impala,mapr/impala,rampage644/impala-cut,lnliuxing/Impala,cloudera/recordservice,gerashegalov/Impala,henryr/Impala,bowlofstew/Impala,ImpalaToGo/ImpalaToGo,cgvarela/Impala,cgvarela/Impala,AtScaleInc/Impala,theyaa/Impala,caseyching/Impala,XiaominZhang/Impala,gistic/PublicSpatialImpala,scalingdata/Impala,gistic/PublicSpatialImpala,rdblue/Impala,cchanning/Impala,bowlofstew/Impala,brightchen/Impala,grundprinzip/Impala,lirui-intel/Impala,bratatidas9/Impala-1,gistic/PublicSpatialImpala,lirui-intel/Impala,placrosse/ImpalaToGo,tempbottle/Impala,lnliuxing/Impala,brightchen/Impala,bowlofstew/Impala,placrosse/ImpalaToGo,cchanning/Impala,brightchen/Impala,XiaominZhang/Impala,AtScaleInc/Impala,gerashegalov/Impala,theyaa/Impala,cloudera/recordservice,scalingdata/Impala,XiaominZhang/Impala,AtScaleInc/Impala,cloudera/recordservice,caseyching/Impala,tempbottle/Impala,rampage644/impala-cut,kapilrastogi/Impala,ImpalaToGo/ImpalaToGo,XiaominZhang/Impala,AtScaleInc/Impala,XiaominZhang/Impala,bowlofstew/Impala,bowlofstew/Impala,gerashegalov/Impala,andybab/Impala,bratatidas9/Impala-1,ImpalaToGo/ImpalaToGo,placrosse/ImpalaToGo,ibmsoe/ImpalaPPC,scalingdata/Impala,brightchen/Impala,henryr/Impala,cchanning/Impala,brightchen/Impala,kapilrastogi/Impala,grundprinzip/Impala,rdblue/Impala,theyaa/Impala,scalingdata/Impala,cloudera/recordservice,cgvarela/Impala,lnliuxing/Impala,lnliuxing/Impala,kapilrastogi/Impala,placrosse/ImpalaToGo,bowlofstew/Impala,rampage644/impala-cut,theyaa/Impala,cloudera/recordservice,grundprinzip/Impala,scalingdata/Impala,gerashegalov/Impala,rdblue/Impala,gistic/PublicSpatialImpala,henryr/Impala,ImpalaToGo/ImpalaToGo,ImpalaToGo/ImpalaToGo,AtScaleInc/Impala,ibmsoe/ImpalaPPC,rdblue/Impala,bratatidas9/Impala-1,ibmsoe/ImpalaPPC,lirui-intel/Impala,bratatidas9/Impala-1,cchanning/Impala,scalingdata/Impala,placrosse/ImpalaToGo,tempbottle/Impala,andybab/Impala,grundprinzip/Impala,kapilrastogi/Impala,caseyching/Impala,bratatidas9/Impala-1,tempbottle/Impala,XiaominZhang/Impala,theyaa/Impala,lirui-intel/Impala,bratatidas9/Impala-1,cchanning/Impala,lnliuxing/Impala,cgvarela/Impala,kapilrastogi/Impala,andybab/Impala,tempbottle/Impala
|
#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
# Validates running with different scan range length values
#
import pytest
from copy import copy
from tests.common.test_vector import TestDimension
from tests.common.impala_test_suite import ImpalaTestSuite, ALL_NODES_ONLY
# We use very small scan ranges to exercise corner cases in the HDFS scanner more
# thoroughly. In particular, it will exercise:
# 1. scan range with no tuple
# 2. tuple that span across multiple scan ranges
MAX_SCAN_RANGE_LENGTHS = [1, 2, 5]
class TestScanRangeLengths(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
@classmethod
def add_test_dimensions(cls):
super(TestScanRangeLengths, cls).add_test_dimensions()
cls.TestMatrix.add_dimension(
TestDimension('max_scan_range_length', *MAX_SCAN_RANGE_LENGTHS))
def test_scan_ranges(self, vector):
if vector.get_value('table_format').file_format != 'text':
pytest.xfail(reason='IMP-636')
elif vector.get_value('table_format').compression_codec != 'none':
pytest.xfail(reason='IMPALA-122')
vector.get_value('exec_option')['max_scan_range_length'] =\
vector.get_value('max_scan_range_length')
self.run_test_case('QueryTest/hdfs-tiny-scan', vector)
Fix IMPALA-122: Lzo scanner with small scan ranges.
Change-Id: I5226fd1a1aa368f5b291b78ad371363057ef574e
Reviewed-on: http://gerrit.ent.cloudera.com:8080/140
Reviewed-by: Skye Wanderman-Milne <6d4b168ab637b0a20cc9dbf96abb2537f372f946@cloudera.com>
Reviewed-by: Nong Li <99a5e5f8f5911755b88e0b536d46aafa102bed41@cloudera.com>
Tested-by: Nong Li <99a5e5f8f5911755b88e0b536d46aafa102bed41@cloudera.com>
|
#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
# Validates running with different scan range length values
#
import pytest
from copy import copy
from tests.common.test_vector import TestDimension
from tests.common.impala_test_suite import ImpalaTestSuite, ALL_NODES_ONLY
# We use very small scan ranges to exercise corner cases in the HDFS scanner more
# thoroughly. In particular, it will exercise:
# 1. scan range with no tuple
# 2. tuple that span across multiple scan ranges
MAX_SCAN_RANGE_LENGTHS = [1, 2, 5]
class TestScanRangeLengths(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
@classmethod
def add_test_dimensions(cls):
super(TestScanRangeLengths, cls).add_test_dimensions()
cls.TestMatrix.add_dimension(
TestDimension('max_scan_range_length', *MAX_SCAN_RANGE_LENGTHS))
def test_scan_ranges(self, vector):
if vector.get_value('table_format').file_format != 'text':
pytest.xfail(reason='IMP-636')
vector.get_value('exec_option')['max_scan_range_length'] =\
vector.get_value('max_scan_range_length')
self.run_test_case('QueryTest/hdfs-tiny-scan', vector)
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
# Validates running with different scan range length values
#
import pytest
from copy import copy
from tests.common.test_vector import TestDimension
from tests.common.impala_test_suite import ImpalaTestSuite, ALL_NODES_ONLY
# We use very small scan ranges to exercise corner cases in the HDFS scanner more
# thoroughly. In particular, it will exercise:
# 1. scan range with no tuple
# 2. tuple that span across multiple scan ranges
MAX_SCAN_RANGE_LENGTHS = [1, 2, 5]
class TestScanRangeLengths(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
@classmethod
def add_test_dimensions(cls):
super(TestScanRangeLengths, cls).add_test_dimensions()
cls.TestMatrix.add_dimension(
TestDimension('max_scan_range_length', *MAX_SCAN_RANGE_LENGTHS))
def test_scan_ranges(self, vector):
if vector.get_value('table_format').file_format != 'text':
pytest.xfail(reason='IMP-636')
elif vector.get_value('table_format').compression_codec != 'none':
pytest.xfail(reason='IMPALA-122')
vector.get_value('exec_option')['max_scan_range_length'] =\
vector.get_value('max_scan_range_length')
self.run_test_case('QueryTest/hdfs-tiny-scan', vector)
<commit_msg>Fix IMPALA-122: Lzo scanner with small scan ranges.
Change-Id: I5226fd1a1aa368f5b291b78ad371363057ef574e
Reviewed-on: http://gerrit.ent.cloudera.com:8080/140
Reviewed-by: Skye Wanderman-Milne <6d4b168ab637b0a20cc9dbf96abb2537f372f946@cloudera.com>
Reviewed-by: Nong Li <99a5e5f8f5911755b88e0b536d46aafa102bed41@cloudera.com>
Tested-by: Nong Li <99a5e5f8f5911755b88e0b536d46aafa102bed41@cloudera.com><commit_after>
|
#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
# Validates running with different scan range length values
#
import pytest
from copy import copy
from tests.common.test_vector import TestDimension
from tests.common.impala_test_suite import ImpalaTestSuite, ALL_NODES_ONLY
# We use very small scan ranges to exercise corner cases in the HDFS scanner more
# thoroughly. In particular, it will exercise:
# 1. scan range with no tuple
# 2. tuple that span across multiple scan ranges
MAX_SCAN_RANGE_LENGTHS = [1, 2, 5]
class TestScanRangeLengths(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
@classmethod
def add_test_dimensions(cls):
super(TestScanRangeLengths, cls).add_test_dimensions()
cls.TestMatrix.add_dimension(
TestDimension('max_scan_range_length', *MAX_SCAN_RANGE_LENGTHS))
def test_scan_ranges(self, vector):
if vector.get_value('table_format').file_format != 'text':
pytest.xfail(reason='IMP-636')
vector.get_value('exec_option')['max_scan_range_length'] =\
vector.get_value('max_scan_range_length')
self.run_test_case('QueryTest/hdfs-tiny-scan', vector)
|
#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
# Validates running with different scan range length values
#
import pytest
from copy import copy
from tests.common.test_vector import TestDimension
from tests.common.impala_test_suite import ImpalaTestSuite, ALL_NODES_ONLY
# We use very small scan ranges to exercise corner cases in the HDFS scanner more
# thoroughly. In particular, it will exercise:
# 1. scan range with no tuple
# 2. tuple that span across multiple scan ranges
MAX_SCAN_RANGE_LENGTHS = [1, 2, 5]
class TestScanRangeLengths(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
@classmethod
def add_test_dimensions(cls):
super(TestScanRangeLengths, cls).add_test_dimensions()
cls.TestMatrix.add_dimension(
TestDimension('max_scan_range_length', *MAX_SCAN_RANGE_LENGTHS))
def test_scan_ranges(self, vector):
if vector.get_value('table_format').file_format != 'text':
pytest.xfail(reason='IMP-636')
elif vector.get_value('table_format').compression_codec != 'none':
pytest.xfail(reason='IMPALA-122')
vector.get_value('exec_option')['max_scan_range_length'] =\
vector.get_value('max_scan_range_length')
self.run_test_case('QueryTest/hdfs-tiny-scan', vector)
Fix IMPALA-122: Lzo scanner with small scan ranges.
Change-Id: I5226fd1a1aa368f5b291b78ad371363057ef574e
Reviewed-on: http://gerrit.ent.cloudera.com:8080/140
Reviewed-by: Skye Wanderman-Milne <6d4b168ab637b0a20cc9dbf96abb2537f372f946@cloudera.com>
Reviewed-by: Nong Li <99a5e5f8f5911755b88e0b536d46aafa102bed41@cloudera.com>
Tested-by: Nong Li <99a5e5f8f5911755b88e0b536d46aafa102bed41@cloudera.com>#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
# Validates running with different scan range length values
#
import pytest
from copy import copy
from tests.common.test_vector import TestDimension
from tests.common.impala_test_suite import ImpalaTestSuite, ALL_NODES_ONLY
# We use very small scan ranges to exercise corner cases in the HDFS scanner more
# thoroughly. In particular, it will exercise:
# 1. scan range with no tuple
# 2. tuple that span across multiple scan ranges
MAX_SCAN_RANGE_LENGTHS = [1, 2, 5]
class TestScanRangeLengths(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
@classmethod
def add_test_dimensions(cls):
super(TestScanRangeLengths, cls).add_test_dimensions()
cls.TestMatrix.add_dimension(
TestDimension('max_scan_range_length', *MAX_SCAN_RANGE_LENGTHS))
def test_scan_ranges(self, vector):
if vector.get_value('table_format').file_format != 'text':
pytest.xfail(reason='IMP-636')
vector.get_value('exec_option')['max_scan_range_length'] =\
vector.get_value('max_scan_range_length')
self.run_test_case('QueryTest/hdfs-tiny-scan', vector)
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
# Validates running with different scan range length values
#
import pytest
from copy import copy
from tests.common.test_vector import TestDimension
from tests.common.impala_test_suite import ImpalaTestSuite, ALL_NODES_ONLY
# We use very small scan ranges to exercise corner cases in the HDFS scanner more
# thoroughly. In particular, it will exercise:
# 1. scan range with no tuple
# 2. tuple that span across multiple scan ranges
MAX_SCAN_RANGE_LENGTHS = [1, 2, 5]
class TestScanRangeLengths(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
@classmethod
def add_test_dimensions(cls):
super(TestScanRangeLengths, cls).add_test_dimensions()
cls.TestMatrix.add_dimension(
TestDimension('max_scan_range_length', *MAX_SCAN_RANGE_LENGTHS))
def test_scan_ranges(self, vector):
if vector.get_value('table_format').file_format != 'text':
pytest.xfail(reason='IMP-636')
elif vector.get_value('table_format').compression_codec != 'none':
pytest.xfail(reason='IMPALA-122')
vector.get_value('exec_option')['max_scan_range_length'] =\
vector.get_value('max_scan_range_length')
self.run_test_case('QueryTest/hdfs-tiny-scan', vector)
<commit_msg>Fix IMPALA-122: Lzo scanner with small scan ranges.
Change-Id: I5226fd1a1aa368f5b291b78ad371363057ef574e
Reviewed-on: http://gerrit.ent.cloudera.com:8080/140
Reviewed-by: Skye Wanderman-Milne <6d4b168ab637b0a20cc9dbf96abb2537f372f946@cloudera.com>
Reviewed-by: Nong Li <99a5e5f8f5911755b88e0b536d46aafa102bed41@cloudera.com>
Tested-by: Nong Li <99a5e5f8f5911755b88e0b536d46aafa102bed41@cloudera.com><commit_after>#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
# Validates running with different scan range length values
#
import pytest
from copy import copy
from tests.common.test_vector import TestDimension
from tests.common.impala_test_suite import ImpalaTestSuite, ALL_NODES_ONLY
# We use very small scan ranges to exercise corner cases in the HDFS scanner more
# thoroughly. In particular, it will exercise:
# 1. scan range with no tuple
# 2. tuple that span across multiple scan ranges
MAX_SCAN_RANGE_LENGTHS = [1, 2, 5]
class TestScanRangeLengths(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
@classmethod
def add_test_dimensions(cls):
super(TestScanRangeLengths, cls).add_test_dimensions()
cls.TestMatrix.add_dimension(
TestDimension('max_scan_range_length', *MAX_SCAN_RANGE_LENGTHS))
def test_scan_ranges(self, vector):
if vector.get_value('table_format').file_format != 'text':
pytest.xfail(reason='IMP-636')
vector.get_value('exec_option')['max_scan_range_length'] =\
vector.get_value('max_scan_range_length')
self.run_test_case('QueryTest/hdfs-tiny-scan', vector)
|
ce875a972eb3efaa5201ba1a72ae1d8d6754cfe0
|
python-pscheduler/pscheduler/pscheduler/db.py
|
python-pscheduler/pscheduler/pscheduler/db.py
|
"""
Functions for connecting to the pScheduler database
"""
import psycopg2
from filestring import string_from_file
def pg_connection(dsn='', autocommit=True):
"""
Connect to the database, and return a handle to it
Arguments:
dsn - A data source name to use in connecting to the database. If
the string begins with an '@', the remainder will be treated as
the path to a file where the value can be retrieved.
autocommit - Whether or not commits are done automatically when
quesies are issued.
"""
dsn = string_from_file(dsn)
pg = psycopg2.connect(dsn)
if autocommit:
pg.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
return pg
def pg_cursor(dsn='', autocommit=True):
"""
Connect to the database, and return a cursor.
Arguments:
dsn - A data source name to use in connecting to the database. If
the string begins with an '@', the remainder will be treated as
the path to a file where the value can be retrieved.
autocommit - Whether or not commits are done automatically when
quesies are issued.
"""
pg = pg_connection(dsn, autocommit)
return pg.cursor()
# TODO: Need a routine that does the select wait currently
# rubberstamped into the services to do timed waits for notifications.
|
"""
Functions for connecting to the pScheduler database
"""
import os
import psycopg2
import sys
from filestring import string_from_file
def pg_connection(dsn='', autocommit=True, name=None):
"""
Connect to the database, and return a handle to it
Arguments:
dsn - A data source name to use in connecting to the database. If
the string begins with an '@', the remainder will be treated as
the path to a file where the value can be retrieved.
autocommit - Whether or not commits are done automatically when
quesies are issued.
"""
dsn = string_from_file(dsn)
if name is None:
name = os.path.basename(sys.argv[0])
dsn += " application_name=%s" % (name)
pg = psycopg2.connect(dsn)
if autocommit:
pg.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
return pg
def pg_cursor(dsn='', autocommit=True, name=None):
"""
Connect to the database, and return a cursor.
Arguments:
dsn - A data source name to use in connecting to the database. If
the string begins with an '@', the remainder will be treated as
the path to a file where the value can be retrieved.
autocommit - Whether or not commits are done automatically when
quesies are issued.
"""
pg = pg_connection(dsn, autocommit, name)
return pg.cursor()
# TODO: Need a routine that does the select wait currently
# rubberstamped into the services to do timed waits for notifications.
|
Add application name to database connection for great debugging.
|
Add application name to database connection for great debugging.
|
Python
|
apache-2.0
|
mfeit-internet2/pscheduler-dev,perfsonar/pscheduler,perfsonar/pscheduler,perfsonar/pscheduler,perfsonar/pscheduler,mfeit-internet2/pscheduler-dev
|
"""
Functions for connecting to the pScheduler database
"""
import psycopg2
from filestring import string_from_file
def pg_connection(dsn='', autocommit=True):
"""
Connect to the database, and return a handle to it
Arguments:
dsn - A data source name to use in connecting to the database. If
the string begins with an '@', the remainder will be treated as
the path to a file where the value can be retrieved.
autocommit - Whether or not commits are done automatically when
quesies are issued.
"""
dsn = string_from_file(dsn)
pg = psycopg2.connect(dsn)
if autocommit:
pg.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
return pg
def pg_cursor(dsn='', autocommit=True):
"""
Connect to the database, and return a cursor.
Arguments:
dsn - A data source name to use in connecting to the database. If
the string begins with an '@', the remainder will be treated as
the path to a file where the value can be retrieved.
autocommit - Whether or not commits are done automatically when
quesies are issued.
"""
pg = pg_connection(dsn, autocommit)
return pg.cursor()
# TODO: Need a routine that does the select wait currently
# rubberstamped into the services to do timed waits for notifications.
Add application name to database connection for great debugging.
|
"""
Functions for connecting to the pScheduler database
"""
import os
import psycopg2
import sys
from filestring import string_from_file
def pg_connection(dsn='', autocommit=True, name=None):
"""
Connect to the database, and return a handle to it
Arguments:
dsn - A data source name to use in connecting to the database. If
the string begins with an '@', the remainder will be treated as
the path to a file where the value can be retrieved.
autocommit - Whether or not commits are done automatically when
quesies are issued.
"""
dsn = string_from_file(dsn)
if name is None:
name = os.path.basename(sys.argv[0])
dsn += " application_name=%s" % (name)
pg = psycopg2.connect(dsn)
if autocommit:
pg.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
return pg
def pg_cursor(dsn='', autocommit=True, name=None):
"""
Connect to the database, and return a cursor.
Arguments:
dsn - A data source name to use in connecting to the database. If
the string begins with an '@', the remainder will be treated as
the path to a file where the value can be retrieved.
autocommit - Whether or not commits are done automatically when
quesies are issued.
"""
pg = pg_connection(dsn, autocommit, name)
return pg.cursor()
# TODO: Need a routine that does the select wait currently
# rubberstamped into the services to do timed waits for notifications.
|
<commit_before>"""
Functions for connecting to the pScheduler database
"""
import psycopg2
from filestring import string_from_file
def pg_connection(dsn='', autocommit=True):
"""
Connect to the database, and return a handle to it
Arguments:
dsn - A data source name to use in connecting to the database. If
the string begins with an '@', the remainder will be treated as
the path to a file where the value can be retrieved.
autocommit - Whether or not commits are done automatically when
quesies are issued.
"""
dsn = string_from_file(dsn)
pg = psycopg2.connect(dsn)
if autocommit:
pg.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
return pg
def pg_cursor(dsn='', autocommit=True):
"""
Connect to the database, and return a cursor.
Arguments:
dsn - A data source name to use in connecting to the database. If
the string begins with an '@', the remainder will be treated as
the path to a file where the value can be retrieved.
autocommit - Whether or not commits are done automatically when
quesies are issued.
"""
pg = pg_connection(dsn, autocommit)
return pg.cursor()
# TODO: Need a routine that does the select wait currently
# rubberstamped into the services to do timed waits for notifications.
<commit_msg>Add application name to database connection for great debugging.<commit_after>
|
"""
Functions for connecting to the pScheduler database
"""
import os
import psycopg2
import sys
from filestring import string_from_file
def pg_connection(dsn='', autocommit=True, name=None):
"""
Connect to the database, and return a handle to it
Arguments:
dsn - A data source name to use in connecting to the database. If
the string begins with an '@', the remainder will be treated as
the path to a file where the value can be retrieved.
autocommit - Whether or not commits are done automatically when
quesies are issued.
"""
dsn = string_from_file(dsn)
if name is None:
name = os.path.basename(sys.argv[0])
dsn += " application_name=%s" % (name)
pg = psycopg2.connect(dsn)
if autocommit:
pg.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
return pg
def pg_cursor(dsn='', autocommit=True, name=None):
"""
Connect to the database, and return a cursor.
Arguments:
dsn - A data source name to use in connecting to the database. If
the string begins with an '@', the remainder will be treated as
the path to a file where the value can be retrieved.
autocommit - Whether or not commits are done automatically when
quesies are issued.
"""
pg = pg_connection(dsn, autocommit, name)
return pg.cursor()
# TODO: Need a routine that does the select wait currently
# rubberstamped into the services to do timed waits for notifications.
|
"""
Functions for connecting to the pScheduler database
"""
import psycopg2
from filestring import string_from_file
def pg_connection(dsn='', autocommit=True):
"""
Connect to the database, and return a handle to it
Arguments:
dsn - A data source name to use in connecting to the database. If
the string begins with an '@', the remainder will be treated as
the path to a file where the value can be retrieved.
autocommit - Whether or not commits are done automatically when
quesies are issued.
"""
dsn = string_from_file(dsn)
pg = psycopg2.connect(dsn)
if autocommit:
pg.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
return pg
def pg_cursor(dsn='', autocommit=True):
"""
Connect to the database, and return a cursor.
Arguments:
dsn - A data source name to use in connecting to the database. If
the string begins with an '@', the remainder will be treated as
the path to a file where the value can be retrieved.
autocommit - Whether or not commits are done automatically when
quesies are issued.
"""
pg = pg_connection(dsn, autocommit)
return pg.cursor()
# TODO: Need a routine that does the select wait currently
# rubberstamped into the services to do timed waits for notifications.
Add application name to database connection for great debugging."""
Functions for connecting to the pScheduler database
"""
import os
import psycopg2
import sys
from filestring import string_from_file
def pg_connection(dsn='', autocommit=True, name=None):
"""
Connect to the database, and return a handle to it
Arguments:
dsn - A data source name to use in connecting to the database. If
the string begins with an '@', the remainder will be treated as
the path to a file where the value can be retrieved.
autocommit - Whether or not commits are done automatically when
quesies are issued.
"""
dsn = string_from_file(dsn)
if name is None:
name = os.path.basename(sys.argv[0])
dsn += " application_name=%s" % (name)
pg = psycopg2.connect(dsn)
if autocommit:
pg.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
return pg
def pg_cursor(dsn='', autocommit=True, name=None):
"""
Connect to the database, and return a cursor.
Arguments:
dsn - A data source name to use in connecting to the database. If
the string begins with an '@', the remainder will be treated as
the path to a file where the value can be retrieved.
autocommit - Whether or not commits are done automatically when
quesies are issued.
"""
pg = pg_connection(dsn, autocommit, name)
return pg.cursor()
# TODO: Need a routine that does the select wait currently
# rubberstamped into the services to do timed waits for notifications.
|
<commit_before>"""
Functions for connecting to the pScheduler database
"""
import psycopg2
from filestring import string_from_file
def pg_connection(dsn='', autocommit=True):
"""
Connect to the database, and return a handle to it
Arguments:
dsn - A data source name to use in connecting to the database. If
the string begins with an '@', the remainder will be treated as
the path to a file where the value can be retrieved.
autocommit - Whether or not commits are done automatically when
quesies are issued.
"""
dsn = string_from_file(dsn)
pg = psycopg2.connect(dsn)
if autocommit:
pg.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
return pg
def pg_cursor(dsn='', autocommit=True):
"""
Connect to the database, and return a cursor.
Arguments:
dsn - A data source name to use in connecting to the database. If
the string begins with an '@', the remainder will be treated as
the path to a file where the value can be retrieved.
autocommit - Whether or not commits are done automatically when
quesies are issued.
"""
pg = pg_connection(dsn, autocommit)
return pg.cursor()
# TODO: Need a routine that does the select wait currently
# rubberstamped into the services to do timed waits for notifications.
<commit_msg>Add application name to database connection for great debugging.<commit_after>"""
Functions for connecting to the pScheduler database
"""
import os
import psycopg2
import sys
from filestring import string_from_file
def pg_connection(dsn='', autocommit=True, name=None):
"""
Connect to the database, and return a handle to it
Arguments:
dsn - A data source name to use in connecting to the database. If
the string begins with an '@', the remainder will be treated as
the path to a file where the value can be retrieved.
autocommit - Whether or not commits are done automatically when
quesies are issued.
"""
dsn = string_from_file(dsn)
if name is None:
name = os.path.basename(sys.argv[0])
dsn += " application_name=%s" % (name)
pg = psycopg2.connect(dsn)
if autocommit:
pg.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
return pg
def pg_cursor(dsn='', autocommit=True, name=None):
"""
Connect to the database, and return a cursor.
Arguments:
dsn - A data source name to use in connecting to the database. If
the string begins with an '@', the remainder will be treated as
the path to a file where the value can be retrieved.
autocommit - Whether or not commits are done automatically when
quesies are issued.
"""
pg = pg_connection(dsn, autocommit, name)
return pg.cursor()
# TODO: Need a routine that does the select wait currently
# rubberstamped into the services to do timed waits for notifications.
|
b34634c0c9a8db389ed48b50ca4b2e4b92105f93
|
node/dictionary.py
|
node/dictionary.py
|
#!/usr/bin/env python
from nodes import Node
import json
class Dictionary(Node):
char = ".d"
args = 0
results = 1
def __init__(self, word_ids:Node.IntList):
if not hasattr(Dictionary, "word_list"):
Dictionary.word_list = init_words()
self.words = " ".join(Dictionary.word_list[i] for i in word_ids)
def func(self):
return self.words
def compress(inp):
words = init_words()
inp_words = [word.lower()for word in inp.split(" ")]
rtn = chr(len(inp_words))
for word in inp_words:
assert(word in words)
rtn += chr(words.index(word))
return rtn
def init_words(dict_file = "dictionary.json"):
words_f = open(dict_file)
words = json.load(words_f)
words_f.close()
return words
|
#!/usr/bin/env python
from nodes import Node
import json
class Dictionary(Node):
char = ".d"
args = 0
results = 1
def __init__(self, word_ids:Node.IntList):
if not hasattr(Dictionary, "word_list"):
Dictionary.word_list = init_words()
self.words = " ".join(Dictionary.word_list[i] for i in word_ids)
def func(self):
return self.words
def compress(inp):
words = init_words()
inp_words = [word.lower()for word in inp.split(" ")]
rtn = chr(len(inp_words))
for word in inp_words:
if word not in words:
rtn += "Word %s not in wordlist" % word
else:
rtn += chr(words.index(word))
return rtn
def init_words(dict_file = "dictionary.json"):
words_f = open(dict_file)
words = json.load(words_f)
words_f.close()
return words
|
Add some exception handling for dict
|
Add some exception handling for dict
|
Python
|
mit
|
muddyfish/PYKE,muddyfish/PYKE
|
#!/usr/bin/env python
from nodes import Node
import json
class Dictionary(Node):
char = ".d"
args = 0
results = 1
def __init__(self, word_ids:Node.IntList):
if not hasattr(Dictionary, "word_list"):
Dictionary.word_list = init_words()
self.words = " ".join(Dictionary.word_list[i] for i in word_ids)
def func(self):
return self.words
def compress(inp):
words = init_words()
inp_words = [word.lower()for word in inp.split(" ")]
rtn = chr(len(inp_words))
for word in inp_words:
assert(word in words)
rtn += chr(words.index(word))
return rtn
def init_words(dict_file = "dictionary.json"):
words_f = open(dict_file)
words = json.load(words_f)
words_f.close()
return words
Add some exception handling for dict
|
#!/usr/bin/env python
from nodes import Node
import json
class Dictionary(Node):
char = ".d"
args = 0
results = 1
def __init__(self, word_ids:Node.IntList):
if not hasattr(Dictionary, "word_list"):
Dictionary.word_list = init_words()
self.words = " ".join(Dictionary.word_list[i] for i in word_ids)
def func(self):
return self.words
def compress(inp):
words = init_words()
inp_words = [word.lower()for word in inp.split(" ")]
rtn = chr(len(inp_words))
for word in inp_words:
if word not in words:
rtn += "Word %s not in wordlist" % word
else:
rtn += chr(words.index(word))
return rtn
def init_words(dict_file = "dictionary.json"):
words_f = open(dict_file)
words = json.load(words_f)
words_f.close()
return words
|
<commit_before>#!/usr/bin/env python
from nodes import Node
import json
class Dictionary(Node):
char = ".d"
args = 0
results = 1
def __init__(self, word_ids:Node.IntList):
if not hasattr(Dictionary, "word_list"):
Dictionary.word_list = init_words()
self.words = " ".join(Dictionary.word_list[i] for i in word_ids)
def func(self):
return self.words
def compress(inp):
words = init_words()
inp_words = [word.lower()for word in inp.split(" ")]
rtn = chr(len(inp_words))
for word in inp_words:
assert(word in words)
rtn += chr(words.index(word))
return rtn
def init_words(dict_file = "dictionary.json"):
words_f = open(dict_file)
words = json.load(words_f)
words_f.close()
return words
<commit_msg>Add some exception handling for dict<commit_after>
|
#!/usr/bin/env python
from nodes import Node
import json
class Dictionary(Node):
char = ".d"
args = 0
results = 1
def __init__(self, word_ids:Node.IntList):
if not hasattr(Dictionary, "word_list"):
Dictionary.word_list = init_words()
self.words = " ".join(Dictionary.word_list[i] for i in word_ids)
def func(self):
return self.words
def compress(inp):
words = init_words()
inp_words = [word.lower()for word in inp.split(" ")]
rtn = chr(len(inp_words))
for word in inp_words:
if word not in words:
rtn += "Word %s not in wordlist" % word
else:
rtn += chr(words.index(word))
return rtn
def init_words(dict_file = "dictionary.json"):
words_f = open(dict_file)
words = json.load(words_f)
words_f.close()
return words
|
#!/usr/bin/env python
from nodes import Node
import json
class Dictionary(Node):
char = ".d"
args = 0
results = 1
def __init__(self, word_ids:Node.IntList):
if not hasattr(Dictionary, "word_list"):
Dictionary.word_list = init_words()
self.words = " ".join(Dictionary.word_list[i] for i in word_ids)
def func(self):
return self.words
def compress(inp):
words = init_words()
inp_words = [word.lower()for word in inp.split(" ")]
rtn = chr(len(inp_words))
for word in inp_words:
assert(word in words)
rtn += chr(words.index(word))
return rtn
def init_words(dict_file = "dictionary.json"):
words_f = open(dict_file)
words = json.load(words_f)
words_f.close()
return words
Add some exception handling for dict#!/usr/bin/env python
from nodes import Node
import json
class Dictionary(Node):
char = ".d"
args = 0
results = 1
def __init__(self, word_ids:Node.IntList):
if not hasattr(Dictionary, "word_list"):
Dictionary.word_list = init_words()
self.words = " ".join(Dictionary.word_list[i] for i in word_ids)
def func(self):
return self.words
def compress(inp):
words = init_words()
inp_words = [word.lower()for word in inp.split(" ")]
rtn = chr(len(inp_words))
for word in inp_words:
if word not in words:
rtn += "Word %s not in wordlist" % word
else:
rtn += chr(words.index(word))
return rtn
def init_words(dict_file = "dictionary.json"):
words_f = open(dict_file)
words = json.load(words_f)
words_f.close()
return words
|
<commit_before>#!/usr/bin/env python
from nodes import Node
import json
class Dictionary(Node):
char = ".d"
args = 0
results = 1
def __init__(self, word_ids:Node.IntList):
if not hasattr(Dictionary, "word_list"):
Dictionary.word_list = init_words()
self.words = " ".join(Dictionary.word_list[i] for i in word_ids)
def func(self):
return self.words
def compress(inp):
words = init_words()
inp_words = [word.lower()for word in inp.split(" ")]
rtn = chr(len(inp_words))
for word in inp_words:
assert(word in words)
rtn += chr(words.index(word))
return rtn
def init_words(dict_file = "dictionary.json"):
words_f = open(dict_file)
words = json.load(words_f)
words_f.close()
return words
<commit_msg>Add some exception handling for dict<commit_after>#!/usr/bin/env python
from nodes import Node
import json
class Dictionary(Node):
char = ".d"
args = 0
results = 1
def __init__(self, word_ids:Node.IntList):
if not hasattr(Dictionary, "word_list"):
Dictionary.word_list = init_words()
self.words = " ".join(Dictionary.word_list[i] for i in word_ids)
def func(self):
return self.words
def compress(inp):
words = init_words()
inp_words = [word.lower()for word in inp.split(" ")]
rtn = chr(len(inp_words))
for word in inp_words:
if word not in words:
rtn += "Word %s not in wordlist" % word
else:
rtn += chr(words.index(word))
return rtn
def init_words(dict_file = "dictionary.json"):
words_f = open(dict_file)
words = json.load(words_f)
words_f.close()
return words
|
404b7af74fb65299aa9c14e0e40541e3a4a68285
|
setuptools/command/bdist_wininst.py
|
setuptools/command/bdist_wininst.py
|
from distutils.command.bdist_wininst import bdist_wininst as _bdist_wininst
class bdist_wininst(_bdist_wininst):
def reinitialize_command(self, command, reinit_subcommands=0):
cmd = self.distribution.reinitialize_command(
command, reinit_subcommands)
if command in ('install', 'install_lib'):
cmd.install_lib = None # work around distutils bug
return cmd
def run(self):
self._is_running = True
try:
_bdist_wininst.run(self)
finally:
self._is_running = False
|
from distutils.command.bdist_wininst import bdist_wininst as _bdist_wininst
class bdist_wininst(_bdist_wininst):
def reinitialize_command(self, command, reinit_subcommands=0):
"""
Supplement reinitialize_command to work around
http://bugs.python.org/issue20819
"""
cmd = self.distribution.reinitialize_command(
command, reinit_subcommands)
if command in ('install', 'install_lib'):
cmd.install_lib = None
return cmd
def run(self):
self._is_running = True
try:
_bdist_wininst.run(self)
finally:
self._is_running = False
|
Update workaround to reference filed ticket.
|
Update workaround to reference filed ticket.
|
Python
|
mit
|
pypa/setuptools,pypa/setuptools,pypa/setuptools
|
from distutils.command.bdist_wininst import bdist_wininst as _bdist_wininst
class bdist_wininst(_bdist_wininst):
def reinitialize_command(self, command, reinit_subcommands=0):
cmd = self.distribution.reinitialize_command(
command, reinit_subcommands)
if command in ('install', 'install_lib'):
cmd.install_lib = None # work around distutils bug
return cmd
def run(self):
self._is_running = True
try:
_bdist_wininst.run(self)
finally:
self._is_running = False
Update workaround to reference filed ticket.
|
from distutils.command.bdist_wininst import bdist_wininst as _bdist_wininst
class bdist_wininst(_bdist_wininst):
def reinitialize_command(self, command, reinit_subcommands=0):
"""
Supplement reinitialize_command to work around
http://bugs.python.org/issue20819
"""
cmd = self.distribution.reinitialize_command(
command, reinit_subcommands)
if command in ('install', 'install_lib'):
cmd.install_lib = None
return cmd
def run(self):
self._is_running = True
try:
_bdist_wininst.run(self)
finally:
self._is_running = False
|
<commit_before>from distutils.command.bdist_wininst import bdist_wininst as _bdist_wininst
class bdist_wininst(_bdist_wininst):
def reinitialize_command(self, command, reinit_subcommands=0):
cmd = self.distribution.reinitialize_command(
command, reinit_subcommands)
if command in ('install', 'install_lib'):
cmd.install_lib = None # work around distutils bug
return cmd
def run(self):
self._is_running = True
try:
_bdist_wininst.run(self)
finally:
self._is_running = False
<commit_msg>Update workaround to reference filed ticket.<commit_after>
|
from distutils.command.bdist_wininst import bdist_wininst as _bdist_wininst
class bdist_wininst(_bdist_wininst):
def reinitialize_command(self, command, reinit_subcommands=0):
"""
Supplement reinitialize_command to work around
http://bugs.python.org/issue20819
"""
cmd = self.distribution.reinitialize_command(
command, reinit_subcommands)
if command in ('install', 'install_lib'):
cmd.install_lib = None
return cmd
def run(self):
self._is_running = True
try:
_bdist_wininst.run(self)
finally:
self._is_running = False
|
from distutils.command.bdist_wininst import bdist_wininst as _bdist_wininst
class bdist_wininst(_bdist_wininst):
def reinitialize_command(self, command, reinit_subcommands=0):
cmd = self.distribution.reinitialize_command(
command, reinit_subcommands)
if command in ('install', 'install_lib'):
cmd.install_lib = None # work around distutils bug
return cmd
def run(self):
self._is_running = True
try:
_bdist_wininst.run(self)
finally:
self._is_running = False
Update workaround to reference filed ticket.from distutils.command.bdist_wininst import bdist_wininst as _bdist_wininst
class bdist_wininst(_bdist_wininst):
def reinitialize_command(self, command, reinit_subcommands=0):
"""
Supplement reinitialize_command to work around
http://bugs.python.org/issue20819
"""
cmd = self.distribution.reinitialize_command(
command, reinit_subcommands)
if command in ('install', 'install_lib'):
cmd.install_lib = None
return cmd
def run(self):
self._is_running = True
try:
_bdist_wininst.run(self)
finally:
self._is_running = False
|
<commit_before>from distutils.command.bdist_wininst import bdist_wininst as _bdist_wininst
class bdist_wininst(_bdist_wininst):
def reinitialize_command(self, command, reinit_subcommands=0):
cmd = self.distribution.reinitialize_command(
command, reinit_subcommands)
if command in ('install', 'install_lib'):
cmd.install_lib = None # work around distutils bug
return cmd
def run(self):
self._is_running = True
try:
_bdist_wininst.run(self)
finally:
self._is_running = False
<commit_msg>Update workaround to reference filed ticket.<commit_after>from distutils.command.bdist_wininst import bdist_wininst as _bdist_wininst
class bdist_wininst(_bdist_wininst):
def reinitialize_command(self, command, reinit_subcommands=0):
"""
Supplement reinitialize_command to work around
http://bugs.python.org/issue20819
"""
cmd = self.distribution.reinitialize_command(
command, reinit_subcommands)
if command in ('install', 'install_lib'):
cmd.install_lib = None
return cmd
def run(self):
self._is_running = True
try:
_bdist_wininst.run(self)
finally:
self._is_running = False
|
efe06967b4896c7d2d4c88fbda96a0504959594b
|
opps/core/admin.py
|
opps/core/admin.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib import admin
class PublishableAdmin(admin.ModelAdmin):
"""
Overrides standard admin.ModelAdmin save_model method
It sets user (author) based on data from requet.
"""
def save_model(self, request, obj, form, change):
if getattr(obj, 'pk', None) is None:
obj.user = request.user
obj.save()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib import admin
class PublishableAdmin(admin.ModelAdmin):
"""
Overrides standard admin.ModelAdmin save_model method
It sets user (author) based on data from requet.
"""
list_display = ['title', 'channel', 'date_available', 'published']
list_filter = ['date_available', 'published', 'channel']
search_fields = ['title', 'slug', 'headline', 'channel']
exclude = ('user',)
date_hierarchy = ('date_available')
def save_model(self, request, obj, form, change):
if getattr(obj, 'pk', None) is None:
obj.user = request.user
obj.save()
|
Add basic attr on PublishableAdmin
|
Add basic attr on PublishableAdmin
|
Python
|
mit
|
williamroot/opps,williamroot/opps,jeanmask/opps,williamroot/opps,YACOWS/opps,jeanmask/opps,opps/opps,YACOWS/opps,jeanmask/opps,williamroot/opps,YACOWS/opps,YACOWS/opps,opps/opps,jeanmask/opps,opps/opps,opps/opps
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib import admin
class PublishableAdmin(admin.ModelAdmin):
"""
Overrides standard admin.ModelAdmin save_model method
It sets user (author) based on data from requet.
"""
def save_model(self, request, obj, form, change):
if getattr(obj, 'pk', None) is None:
obj.user = request.user
obj.save()
Add basic attr on PublishableAdmin
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib import admin
class PublishableAdmin(admin.ModelAdmin):
"""
Overrides standard admin.ModelAdmin save_model method
It sets user (author) based on data from requet.
"""
list_display = ['title', 'channel', 'date_available', 'published']
list_filter = ['date_available', 'published', 'channel']
search_fields = ['title', 'slug', 'headline', 'channel']
exclude = ('user',)
date_hierarchy = ('date_available')
def save_model(self, request, obj, form, change):
if getattr(obj, 'pk', None) is None:
obj.user = request.user
obj.save()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib import admin
class PublishableAdmin(admin.ModelAdmin):
"""
Overrides standard admin.ModelAdmin save_model method
It sets user (author) based on data from requet.
"""
def save_model(self, request, obj, form, change):
if getattr(obj, 'pk', None) is None:
obj.user = request.user
obj.save()
<commit_msg>Add basic attr on PublishableAdmin<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib import admin
class PublishableAdmin(admin.ModelAdmin):
"""
Overrides standard admin.ModelAdmin save_model method
It sets user (author) based on data from requet.
"""
list_display = ['title', 'channel', 'date_available', 'published']
list_filter = ['date_available', 'published', 'channel']
search_fields = ['title', 'slug', 'headline', 'channel']
exclude = ('user',)
date_hierarchy = ('date_available')
def save_model(self, request, obj, form, change):
if getattr(obj, 'pk', None) is None:
obj.user = request.user
obj.save()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib import admin
class PublishableAdmin(admin.ModelAdmin):
"""
Overrides standard admin.ModelAdmin save_model method
It sets user (author) based on data from requet.
"""
def save_model(self, request, obj, form, change):
if getattr(obj, 'pk', None) is None:
obj.user = request.user
obj.save()
Add basic attr on PublishableAdmin#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib import admin
class PublishableAdmin(admin.ModelAdmin):
"""
Overrides standard admin.ModelAdmin save_model method
It sets user (author) based on data from requet.
"""
list_display = ['title', 'channel', 'date_available', 'published']
list_filter = ['date_available', 'published', 'channel']
search_fields = ['title', 'slug', 'headline', 'channel']
exclude = ('user',)
date_hierarchy = ('date_available')
def save_model(self, request, obj, form, change):
if getattr(obj, 'pk', None) is None:
obj.user = request.user
obj.save()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib import admin
class PublishableAdmin(admin.ModelAdmin):
"""
Overrides standard admin.ModelAdmin save_model method
It sets user (author) based on data from requet.
"""
def save_model(self, request, obj, form, change):
if getattr(obj, 'pk', None) is None:
obj.user = request.user
obj.save()
<commit_msg>Add basic attr on PublishableAdmin<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib import admin
class PublishableAdmin(admin.ModelAdmin):
"""
Overrides standard admin.ModelAdmin save_model method
It sets user (author) based on data from requet.
"""
list_display = ['title', 'channel', 'date_available', 'published']
list_filter = ['date_available', 'published', 'channel']
search_fields = ['title', 'slug', 'headline', 'channel']
exclude = ('user',)
date_hierarchy = ('date_available')
def save_model(self, request, obj, form, change):
if getattr(obj, 'pk', None) is None:
obj.user = request.user
obj.save()
|
927172b383e1c62b9aab34f38ef95e96ed277cbe
|
conda_env/specs/yaml_file.py
|
conda_env/specs/yaml_file.py
|
from .. import env
from ..exceptions import EnvironmentFileNotFound
class YamlFileSpec(object):
_environment = None
def __init__(self, filename=None, **kwargs):
self.filename = filename
self.msg = None
def can_handle(self):
try:
self._environment = env.from_file(self.filename)
return True
except EnvironmentFileNotFound, e:
self.msg = e.message
return False
@property
def environment(self):
if not self._environment:
self.can_handle()
return self._environment
|
from .. import env
from ..exceptions import EnvironmentFileNotFound
class YamlFileSpec(object):
_environment = None
def __init__(self, filename=None, **kwargs):
self.filename = filename
self.msg = None
def can_handle(self):
try:
self._environment = env.from_file(self.filename)
return True
except EnvironmentFileNotFound as e:
self.msg = e.message
return False
@property
def environment(self):
if not self._environment:
self.can_handle()
return self._environment
|
Update Python 2-style exception handling to 'as'
|
Update Python 2-style exception handling to 'as'
|
Python
|
bsd-3-clause
|
isaac-kit/conda-env,asmeurer/conda-env,mikecroucher/conda-env,isaac-kit/conda-env,conda/conda-env,ESSS/conda-env,ESSS/conda-env,dan-blanchard/conda-env,nicoddemus/conda-env,asmeurer/conda-env,conda/conda-env,phobson/conda-env,nicoddemus/conda-env,mikecroucher/conda-env,dan-blanchard/conda-env,phobson/conda-env
|
from .. import env
from ..exceptions import EnvironmentFileNotFound
class YamlFileSpec(object):
_environment = None
def __init__(self, filename=None, **kwargs):
self.filename = filename
self.msg = None
def can_handle(self):
try:
self._environment = env.from_file(self.filename)
return True
except EnvironmentFileNotFound, e:
self.msg = e.message
return False
@property
def environment(self):
if not self._environment:
self.can_handle()
return self._environment
Update Python 2-style exception handling to 'as'
|
from .. import env
from ..exceptions import EnvironmentFileNotFound
class YamlFileSpec(object):
_environment = None
def __init__(self, filename=None, **kwargs):
self.filename = filename
self.msg = None
def can_handle(self):
try:
self._environment = env.from_file(self.filename)
return True
except EnvironmentFileNotFound as e:
self.msg = e.message
return False
@property
def environment(self):
if not self._environment:
self.can_handle()
return self._environment
|
<commit_before>from .. import env
from ..exceptions import EnvironmentFileNotFound
class YamlFileSpec(object):
_environment = None
def __init__(self, filename=None, **kwargs):
self.filename = filename
self.msg = None
def can_handle(self):
try:
self._environment = env.from_file(self.filename)
return True
except EnvironmentFileNotFound, e:
self.msg = e.message
return False
@property
def environment(self):
if not self._environment:
self.can_handle()
return self._environment
<commit_msg>Update Python 2-style exception handling to 'as'<commit_after>
|
from .. import env
from ..exceptions import EnvironmentFileNotFound
class YamlFileSpec(object):
_environment = None
def __init__(self, filename=None, **kwargs):
self.filename = filename
self.msg = None
def can_handle(self):
try:
self._environment = env.from_file(self.filename)
return True
except EnvironmentFileNotFound as e:
self.msg = e.message
return False
@property
def environment(self):
if not self._environment:
self.can_handle()
return self._environment
|
from .. import env
from ..exceptions import EnvironmentFileNotFound
class YamlFileSpec(object):
_environment = None
def __init__(self, filename=None, **kwargs):
self.filename = filename
self.msg = None
def can_handle(self):
try:
self._environment = env.from_file(self.filename)
return True
except EnvironmentFileNotFound, e:
self.msg = e.message
return False
@property
def environment(self):
if not self._environment:
self.can_handle()
return self._environment
Update Python 2-style exception handling to 'as'from .. import env
from ..exceptions import EnvironmentFileNotFound
class YamlFileSpec(object):
_environment = None
def __init__(self, filename=None, **kwargs):
self.filename = filename
self.msg = None
def can_handle(self):
try:
self._environment = env.from_file(self.filename)
return True
except EnvironmentFileNotFound as e:
self.msg = e.message
return False
@property
def environment(self):
if not self._environment:
self.can_handle()
return self._environment
|
<commit_before>from .. import env
from ..exceptions import EnvironmentFileNotFound
class YamlFileSpec(object):
_environment = None
def __init__(self, filename=None, **kwargs):
self.filename = filename
self.msg = None
def can_handle(self):
try:
self._environment = env.from_file(self.filename)
return True
except EnvironmentFileNotFound, e:
self.msg = e.message
return False
@property
def environment(self):
if not self._environment:
self.can_handle()
return self._environment
<commit_msg>Update Python 2-style exception handling to 'as'<commit_after>from .. import env
from ..exceptions import EnvironmentFileNotFound
class YamlFileSpec(object):
_environment = None
def __init__(self, filename=None, **kwargs):
self.filename = filename
self.msg = None
def can_handle(self):
try:
self._environment = env.from_file(self.filename)
return True
except EnvironmentFileNotFound as e:
self.msg = e.message
return False
@property
def environment(self):
if not self._environment:
self.can_handle()
return self._environment
|
db751eabb690af3b2b4712f46a41b41c1e0499a2
|
lbrynet/__init__.py
|
lbrynet/__init__.py
|
import logging
__version__ = "0.17.1"
version = tuple(__version__.split('.'))
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
import logging
__version__ = "0.17.2rc1"
version = tuple(__version__.split('.'))
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
Bump version 0.17.1 --> 0.17.2rc1
|
Bump version 0.17.1 --> 0.17.2rc1
Signed-off-by: Jack Robison <40884020c67726395ea162083a125620dc32cdab@lbry.io>
|
Python
|
mit
|
lbryio/lbry,lbryio/lbry,lbryio/lbry
|
import logging
__version__ = "0.17.1"
version = tuple(__version__.split('.'))
logging.getLogger(__name__).addHandler(logging.NullHandler())
Bump version 0.17.1 --> 0.17.2rc1
Signed-off-by: Jack Robison <40884020c67726395ea162083a125620dc32cdab@lbry.io>
|
import logging
__version__ = "0.17.2rc1"
version = tuple(__version__.split('.'))
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
<commit_before>import logging
__version__ = "0.17.1"
version = tuple(__version__.split('.'))
logging.getLogger(__name__).addHandler(logging.NullHandler())
<commit_msg>Bump version 0.17.1 --> 0.17.2rc1
Signed-off-by: Jack Robison <40884020c67726395ea162083a125620dc32cdab@lbry.io><commit_after>
|
import logging
__version__ = "0.17.2rc1"
version = tuple(__version__.split('.'))
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
import logging
__version__ = "0.17.1"
version = tuple(__version__.split('.'))
logging.getLogger(__name__).addHandler(logging.NullHandler())
Bump version 0.17.1 --> 0.17.2rc1
Signed-off-by: Jack Robison <40884020c67726395ea162083a125620dc32cdab@lbry.io>import logging
__version__ = "0.17.2rc1"
version = tuple(__version__.split('.'))
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
<commit_before>import logging
__version__ = "0.17.1"
version = tuple(__version__.split('.'))
logging.getLogger(__name__).addHandler(logging.NullHandler())
<commit_msg>Bump version 0.17.1 --> 0.17.2rc1
Signed-off-by: Jack Robison <40884020c67726395ea162083a125620dc32cdab@lbry.io><commit_after>import logging
__version__ = "0.17.2rc1"
version = tuple(__version__.split('.'))
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.