commit stringlengths 40 40 | old_file stringlengths 4 118 | new_file stringlengths 4 118 | old_contents stringlengths 0 2.94k | new_contents stringlengths 1 4.43k | subject stringlengths 15 444 | message stringlengths 16 3.45k | lang stringclasses 1 value | license stringclasses 13 values | repos stringlengths 5 43.2k | prompt stringlengths 17 4.58k | response stringlengths 1 4.43k | prompt_tagged stringlengths 58 4.62k | response_tagged stringlengths 1 4.43k | text stringlengths 132 7.29k | text_tagged stringlengths 173 7.33k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e4e930587e6ad145dbdbf1f742b942d63bf645a2 | wandb/git_repo.py | wandb/git_repo.py | from git import Repo, exc
import os
class GitRepo(object):
def __init__(self, root=None, remote="origin", lazy=True):
self.remote_name = remote
self.root = root
self._repo = None
if not lazy:
self.repo
@property
def repo(self):
if self._repo is None:
if self.remote_name is None:
self._repo = False
else:
try:
self._repo = Repo(self.root or os.getcwd(), search_parent_directories=True)
except exc.InvalidGitRepositoryError:
self._repo = False
return self._repo
@property
def enabled(self):
return self.repo
@property
def dirty(self):
return self.repo.is_dirty()
@property
def last_commit(self):
if not self.repo:
return None
return self.repo.head.commit.hexsha
@property
def remote(self):
if not self.repo:
return None
try:
return self.repo.remotes[self.remote_name]
except IndexError:
return None
@property
def remote_url(self):
if not self.remote:
return None
return self.remote.url
def tag(self, name, message):
return self.repo.create_tag("wandb/"+name, message=message, force=True)
def push(self, name):
if self.remote:
return self.remote.push("wandb/"+name, force=True)
| from git import Repo, exc
import os
class GitRepo(object):
def __init__(self, root=None, remote="origin", lazy=True):
self.remote_name = remote
self.root = root
self._repo = None
if not lazy:
self.repo
@property
def repo(self):
if self._repo is None:
if self.remote_name is None:
self._repo = False
else:
try:
self._repo = Repo(self.root or os.getcwd(), search_parent_directories=True)
except exc.InvalidGitRepositoryError:
self._repo = False
return self._repo
@property
def enabled(self):
return self.repo
@property
def dirty(self):
return self.repo.is_dirty()
@property
def last_commit(self):
if not self.repo:
return None
return self.repo.head.commit.hexsha
@property
def remote(self):
if not self.repo:
return None
try:
return self.repo.remotes[self.remote_name]
except IndexError:
return None
@property
def remote_url(self):
if not self.remote:
return None
return self.remote.url
def tag(self, name, message):
try:
return self.repo.create_tag("wandb/"+name, message=message, force=True)
except GitCommandError:
print("Failed to tag repository.")
return None
def push(self, name):
if self.remote:
return self.remote.push("wandb/"+name, force=True)
| Handle no git user configured | Handle no git user configured
| Python | mit | wandb/client,wandb/client,wandb/client | from git import Repo, exc
import os
class GitRepo(object):
def __init__(self, root=None, remote="origin", lazy=True):
self.remote_name = remote
self.root = root
self._repo = None
if not lazy:
self.repo
@property
def repo(self):
if self._repo is None:
if self.remote_name is None:
self._repo = False
else:
try:
self._repo = Repo(self.root or os.getcwd(), search_parent_directories=True)
except exc.InvalidGitRepositoryError:
self._repo = False
return self._repo
@property
def enabled(self):
return self.repo
@property
def dirty(self):
return self.repo.is_dirty()
@property
def last_commit(self):
if not self.repo:
return None
return self.repo.head.commit.hexsha
@property
def remote(self):
if not self.repo:
return None
try:
return self.repo.remotes[self.remote_name]
except IndexError:
return None
@property
def remote_url(self):
if not self.remote:
return None
return self.remote.url
def tag(self, name, message):
return self.repo.create_tag("wandb/"+name, message=message, force=True)
def push(self, name):
if self.remote:
return self.remote.push("wandb/"+name, force=True)
Handle no git user configured | from git import Repo, exc
import os
class GitRepo(object):
def __init__(self, root=None, remote="origin", lazy=True):
self.remote_name = remote
self.root = root
self._repo = None
if not lazy:
self.repo
@property
def repo(self):
if self._repo is None:
if self.remote_name is None:
self._repo = False
else:
try:
self._repo = Repo(self.root or os.getcwd(), search_parent_directories=True)
except exc.InvalidGitRepositoryError:
self._repo = False
return self._repo
@property
def enabled(self):
return self.repo
@property
def dirty(self):
return self.repo.is_dirty()
@property
def last_commit(self):
if not self.repo:
return None
return self.repo.head.commit.hexsha
@property
def remote(self):
if not self.repo:
return None
try:
return self.repo.remotes[self.remote_name]
except IndexError:
return None
@property
def remote_url(self):
if not self.remote:
return None
return self.remote.url
def tag(self, name, message):
try:
return self.repo.create_tag("wandb/"+name, message=message, force=True)
except GitCommandError:
print("Failed to tag repository.")
return None
def push(self, name):
if self.remote:
return self.remote.push("wandb/"+name, force=True)
| <commit_before>from git import Repo, exc
import os
class GitRepo(object):
def __init__(self, root=None, remote="origin", lazy=True):
self.remote_name = remote
self.root = root
self._repo = None
if not lazy:
self.repo
@property
def repo(self):
if self._repo is None:
if self.remote_name is None:
self._repo = False
else:
try:
self._repo = Repo(self.root or os.getcwd(), search_parent_directories=True)
except exc.InvalidGitRepositoryError:
self._repo = False
return self._repo
@property
def enabled(self):
return self.repo
@property
def dirty(self):
return self.repo.is_dirty()
@property
def last_commit(self):
if not self.repo:
return None
return self.repo.head.commit.hexsha
@property
def remote(self):
if not self.repo:
return None
try:
return self.repo.remotes[self.remote_name]
except IndexError:
return None
@property
def remote_url(self):
if not self.remote:
return None
return self.remote.url
def tag(self, name, message):
return self.repo.create_tag("wandb/"+name, message=message, force=True)
def push(self, name):
if self.remote:
return self.remote.push("wandb/"+name, force=True)
<commit_msg>Handle no git user configured<commit_after> | from git import Repo, exc
import os
class GitRepo(object):
def __init__(self, root=None, remote="origin", lazy=True):
self.remote_name = remote
self.root = root
self._repo = None
if not lazy:
self.repo
@property
def repo(self):
if self._repo is None:
if self.remote_name is None:
self._repo = False
else:
try:
self._repo = Repo(self.root or os.getcwd(), search_parent_directories=True)
except exc.InvalidGitRepositoryError:
self._repo = False
return self._repo
@property
def enabled(self):
return self.repo
@property
def dirty(self):
return self.repo.is_dirty()
@property
def last_commit(self):
if not self.repo:
return None
return self.repo.head.commit.hexsha
@property
def remote(self):
if not self.repo:
return None
try:
return self.repo.remotes[self.remote_name]
except IndexError:
return None
@property
def remote_url(self):
if not self.remote:
return None
return self.remote.url
def tag(self, name, message):
try:
return self.repo.create_tag("wandb/"+name, message=message, force=True)
except GitCommandError:
print("Failed to tag repository.")
return None
def push(self, name):
if self.remote:
return self.remote.push("wandb/"+name, force=True)
| from git import Repo, exc
import os
class GitRepo(object):
def __init__(self, root=None, remote="origin", lazy=True):
self.remote_name = remote
self.root = root
self._repo = None
if not lazy:
self.repo
@property
def repo(self):
if self._repo is None:
if self.remote_name is None:
self._repo = False
else:
try:
self._repo = Repo(self.root or os.getcwd(), search_parent_directories=True)
except exc.InvalidGitRepositoryError:
self._repo = False
return self._repo
@property
def enabled(self):
return self.repo
@property
def dirty(self):
return self.repo.is_dirty()
@property
def last_commit(self):
if not self.repo:
return None
return self.repo.head.commit.hexsha
@property
def remote(self):
if not self.repo:
return None
try:
return self.repo.remotes[self.remote_name]
except IndexError:
return None
@property
def remote_url(self):
if not self.remote:
return None
return self.remote.url
def tag(self, name, message):
return self.repo.create_tag("wandb/"+name, message=message, force=True)
def push(self, name):
if self.remote:
return self.remote.push("wandb/"+name, force=True)
Handle no git user configuredfrom git import Repo, exc
import os
class GitRepo(object):
def __init__(self, root=None, remote="origin", lazy=True):
self.remote_name = remote
self.root = root
self._repo = None
if not lazy:
self.repo
@property
def repo(self):
if self._repo is None:
if self.remote_name is None:
self._repo = False
else:
try:
self._repo = Repo(self.root or os.getcwd(), search_parent_directories=True)
except exc.InvalidGitRepositoryError:
self._repo = False
return self._repo
@property
def enabled(self):
return self.repo
@property
def dirty(self):
return self.repo.is_dirty()
@property
def last_commit(self):
if not self.repo:
return None
return self.repo.head.commit.hexsha
@property
def remote(self):
if not self.repo:
return None
try:
return self.repo.remotes[self.remote_name]
except IndexError:
return None
@property
def remote_url(self):
if not self.remote:
return None
return self.remote.url
def tag(self, name, message):
try:
return self.repo.create_tag("wandb/"+name, message=message, force=True)
except GitCommandError:
print("Failed to tag repository.")
return None
def push(self, name):
if self.remote:
return self.remote.push("wandb/"+name, force=True)
| <commit_before>from git import Repo, exc
import os
class GitRepo(object):
def __init__(self, root=None, remote="origin", lazy=True):
self.remote_name = remote
self.root = root
self._repo = None
if not lazy:
self.repo
@property
def repo(self):
if self._repo is None:
if self.remote_name is None:
self._repo = False
else:
try:
self._repo = Repo(self.root or os.getcwd(), search_parent_directories=True)
except exc.InvalidGitRepositoryError:
self._repo = False
return self._repo
@property
def enabled(self):
return self.repo
@property
def dirty(self):
return self.repo.is_dirty()
@property
def last_commit(self):
if not self.repo:
return None
return self.repo.head.commit.hexsha
@property
def remote(self):
if not self.repo:
return None
try:
return self.repo.remotes[self.remote_name]
except IndexError:
return None
@property
def remote_url(self):
if not self.remote:
return None
return self.remote.url
def tag(self, name, message):
return self.repo.create_tag("wandb/"+name, message=message, force=True)
def push(self, name):
if self.remote:
return self.remote.push("wandb/"+name, force=True)
<commit_msg>Handle no git user configured<commit_after>from git import Repo, exc
import os
class GitRepo(object):
def __init__(self, root=None, remote="origin", lazy=True):
self.remote_name = remote
self.root = root
self._repo = None
if not lazy:
self.repo
@property
def repo(self):
if self._repo is None:
if self.remote_name is None:
self._repo = False
else:
try:
self._repo = Repo(self.root or os.getcwd(), search_parent_directories=True)
except exc.InvalidGitRepositoryError:
self._repo = False
return self._repo
@property
def enabled(self):
return self.repo
@property
def dirty(self):
return self.repo.is_dirty()
@property
def last_commit(self):
if not self.repo:
return None
return self.repo.head.commit.hexsha
@property
def remote(self):
if not self.repo:
return None
try:
return self.repo.remotes[self.remote_name]
except IndexError:
return None
@property
def remote_url(self):
if not self.remote:
return None
return self.remote.url
def tag(self, name, message):
try:
return self.repo.create_tag("wandb/"+name, message=message, force=True)
except GitCommandError:
print("Failed to tag repository.")
return None
def push(self, name):
if self.remote:
return self.remote.push("wandb/"+name, force=True)
|
cc46e4d251c479563318c93f419fead373fa0c12 | bugsnag/tornado/__init__.py | bugsnag/tornado/__init__.py | from tornado.web import RequestHandler
import bugsnag
class BugsnagRequestHandler(RequestHandler):
def _handle_request_exception(self, e):
# Set the request info
bugsnag.configure_request(
user_id = self.request.remote_ip,
context = "%s %s" % (self.request.method, self.request.uri.split('?')[0]),
request_data = {
"url": self.request.full_url(),
"method": self.request.method,
"arguments": self.request.arguments,
"cookies": self.cookies,
},
)
# Notify bugsnag
bugsnag.notify(e)
# Call the parent handler
RequestHandler._handle_request_exception(self, e) | from tornado.web import RequestHandler
import bugsnag
class BugsnagRequestHandler(RequestHandler):
def _handle_request_exception(self, e):
# Set the request info
bugsnag.configure_request(
user_id = self.request.remote_ip,
context = "%s %s" % (self.request.method, self.request.uri.split('?')[0]),
request_data = {
"url": self.request.full_url(),
"method": self.request.method,
"arguments": self.request.arguments,
},
)
# Notify bugsnag
bugsnag.notify(e)
# Call the parent handler
RequestHandler._handle_request_exception(self, e) | Remove cookie support from tornado until it works in all situations | Remove cookie support from tornado until it works in all situations
| Python | mit | bugsnag/bugsnag-python,overplumbum/bugsnag-python,bugsnag/bugsnag-python,overplumbum/bugsnag-python | from tornado.web import RequestHandler
import bugsnag
class BugsnagRequestHandler(RequestHandler):
def _handle_request_exception(self, e):
# Set the request info
bugsnag.configure_request(
user_id = self.request.remote_ip,
context = "%s %s" % (self.request.method, self.request.uri.split('?')[0]),
request_data = {
"url": self.request.full_url(),
"method": self.request.method,
"arguments": self.request.arguments,
"cookies": self.cookies,
},
)
# Notify bugsnag
bugsnag.notify(e)
# Call the parent handler
RequestHandler._handle_request_exception(self, e)Remove cookie support from tornado until it works in all situations | from tornado.web import RequestHandler
import bugsnag
class BugsnagRequestHandler(RequestHandler):
def _handle_request_exception(self, e):
# Set the request info
bugsnag.configure_request(
user_id = self.request.remote_ip,
context = "%s %s" % (self.request.method, self.request.uri.split('?')[0]),
request_data = {
"url": self.request.full_url(),
"method": self.request.method,
"arguments": self.request.arguments,
},
)
# Notify bugsnag
bugsnag.notify(e)
# Call the parent handler
RequestHandler._handle_request_exception(self, e) | <commit_before>from tornado.web import RequestHandler
import bugsnag
class BugsnagRequestHandler(RequestHandler):
def _handle_request_exception(self, e):
# Set the request info
bugsnag.configure_request(
user_id = self.request.remote_ip,
context = "%s %s" % (self.request.method, self.request.uri.split('?')[0]),
request_data = {
"url": self.request.full_url(),
"method": self.request.method,
"arguments": self.request.arguments,
"cookies": self.cookies,
},
)
# Notify bugsnag
bugsnag.notify(e)
# Call the parent handler
RequestHandler._handle_request_exception(self, e)<commit_msg>Remove cookie support from tornado until it works in all situations<commit_after> | from tornado.web import RequestHandler
import bugsnag
class BugsnagRequestHandler(RequestHandler):
def _handle_request_exception(self, e):
# Set the request info
bugsnag.configure_request(
user_id = self.request.remote_ip,
context = "%s %s" % (self.request.method, self.request.uri.split('?')[0]),
request_data = {
"url": self.request.full_url(),
"method": self.request.method,
"arguments": self.request.arguments,
},
)
# Notify bugsnag
bugsnag.notify(e)
# Call the parent handler
RequestHandler._handle_request_exception(self, e) | from tornado.web import RequestHandler
import bugsnag
class BugsnagRequestHandler(RequestHandler):
def _handle_request_exception(self, e):
# Set the request info
bugsnag.configure_request(
user_id = self.request.remote_ip,
context = "%s %s" % (self.request.method, self.request.uri.split('?')[0]),
request_data = {
"url": self.request.full_url(),
"method": self.request.method,
"arguments": self.request.arguments,
"cookies": self.cookies,
},
)
# Notify bugsnag
bugsnag.notify(e)
# Call the parent handler
RequestHandler._handle_request_exception(self, e)Remove cookie support from tornado until it works in all situationsfrom tornado.web import RequestHandler
import bugsnag
class BugsnagRequestHandler(RequestHandler):
def _handle_request_exception(self, e):
# Set the request info
bugsnag.configure_request(
user_id = self.request.remote_ip,
context = "%s %s" % (self.request.method, self.request.uri.split('?')[0]),
request_data = {
"url": self.request.full_url(),
"method": self.request.method,
"arguments": self.request.arguments,
},
)
# Notify bugsnag
bugsnag.notify(e)
# Call the parent handler
RequestHandler._handle_request_exception(self, e) | <commit_before>from tornado.web import RequestHandler
import bugsnag
class BugsnagRequestHandler(RequestHandler):
def _handle_request_exception(self, e):
# Set the request info
bugsnag.configure_request(
user_id = self.request.remote_ip,
context = "%s %s" % (self.request.method, self.request.uri.split('?')[0]),
request_data = {
"url": self.request.full_url(),
"method": self.request.method,
"arguments": self.request.arguments,
"cookies": self.cookies,
},
)
# Notify bugsnag
bugsnag.notify(e)
# Call the parent handler
RequestHandler._handle_request_exception(self, e)<commit_msg>Remove cookie support from tornado until it works in all situations<commit_after>from tornado.web import RequestHandler
import bugsnag
class BugsnagRequestHandler(RequestHandler):
def _handle_request_exception(self, e):
# Set the request info
bugsnag.configure_request(
user_id = self.request.remote_ip,
context = "%s %s" % (self.request.method, self.request.uri.split('?')[0]),
request_data = {
"url": self.request.full_url(),
"method": self.request.method,
"arguments": self.request.arguments,
},
)
# Notify bugsnag
bugsnag.notify(e)
# Call the parent handler
RequestHandler._handle_request_exception(self, e) |
187e55c8ad204c3a6196794aba1b59fd0fa62b00 | instance/config.py | instance/config.py | import os
class Config(object):
"""Parent configuration class"""
DEBUG = False
CSRF_ENABLED = True
# ALT: <variable> = os.getenv('<env_var_name>')
SECRET = 'HeathLEDGERwasTHEBESTidc'
# database with host configuration removed. Defaults to machine localhost
SQLALCHEMY_DATABASE_URI = "postgresql://bruce:Inline-360@localhost/bucketlist_api"
BCRYPT_LOG_ROUNDS = 13
class DevelopmentConfig(Config):
"""Configurations for Development"""
DEBUG = True
BCRYPT_LOG_ROUNDS = 4
class TestingConfig(Config):
"""Configurations for Testing with a separate test database"""
TESTING = True
SQLALCHEMY_DATABASE_URI = "postgresql://bruce:Inline-360@localhost/test_db"
DEBUG = True
BCRYPT_LOG_ROUNDS = 4
class StagingConfig(Config):
"""Configurations for staging"""
DEBUG = True
class ProductionConfig(Config):
"""Configurations for Production"""
DEBUG = False
TESTING = False
app_config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'staging': StagingConfig,
'production': ProductionConfig
}
| import os
class Config(object):
"""Parent configuration class"""
DEBUG = False
CSRF_ENABLED = True
# ALT: <variable> = os.getenv('<env_var_name>')
__SECRET = 'HeathLEDGERwasTHEBESTidc'
# database with host configuration removed. Defaults to machine localhost
__DB_NAME = "postgresql://bruce:Inline-360@localhost/bucketlist_api"
BCRYPT_LOG_ROUNDS = 13
SECRET_KEY = os.getenv('SECRET') or __SECRET
AUTH_TOKEN_DURATION = os.getenv('TOKEN_DURATION') or 300
SQLALCHEMY_DATABASE_URI = os.getenv('DB_NAME') or __DB_NAME
class DevelopmentConfig(Config):
"""Configurations for Development"""
DEBUG = True
BCRYPT_LOG_ROUNDS = 4
class TestingConfig(Config):
"""Configurations for Testing with a separate test database"""
TESTING = True
SQLALCHEMY_DATABASE_URI = "postgresql://bruce:Inline-360@localhost/test_db"
DEBUG = True
BCRYPT_LOG_ROUNDS = 4
class StagingConfig(Config):
"""Configurations for staging"""
DEBUG = True
class ProductionConfig(Config):
"""Configurations for Production"""
DEBUG = False
TESTING = False
app_config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'staging': StagingConfig,
'production': ProductionConfig
}
| Modify Config file Pick DB_NAME, TOKEN_DURATION and SECRET from the environment Use the hard coded details as backup in case not in the environment | Modify Config file
Pick DB_NAME, TOKEN_DURATION and SECRET from the environment
Use the hard coded details as backup in case not in the environment
| Python | mit | Elbertbiggs360/buckelist-api | import os
class Config(object):
"""Parent configuration class"""
DEBUG = False
CSRF_ENABLED = True
# ALT: <variable> = os.getenv('<env_var_name>')
SECRET = 'HeathLEDGERwasTHEBESTidc'
# database with host configuration removed. Defaults to machine localhost
SQLALCHEMY_DATABASE_URI = "postgresql://bruce:Inline-360@localhost/bucketlist_api"
BCRYPT_LOG_ROUNDS = 13
class DevelopmentConfig(Config):
"""Configurations for Development"""
DEBUG = True
BCRYPT_LOG_ROUNDS = 4
class TestingConfig(Config):
"""Configurations for Testing with a separate test database"""
TESTING = True
SQLALCHEMY_DATABASE_URI = "postgresql://bruce:Inline-360@localhost/test_db"
DEBUG = True
BCRYPT_LOG_ROUNDS = 4
class StagingConfig(Config):
"""Configurations for staging"""
DEBUG = True
class ProductionConfig(Config):
"""Configurations for Production"""
DEBUG = False
TESTING = False
app_config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'staging': StagingConfig,
'production': ProductionConfig
}
Modify Config file
Pick DB_NAME, TOKEN_DURATION and SECRET from the environment
Use the hard coded details as backup in case not in the environment | import os
class Config(object):
"""Parent configuration class"""
DEBUG = False
CSRF_ENABLED = True
# ALT: <variable> = os.getenv('<env_var_name>')
__SECRET = 'HeathLEDGERwasTHEBESTidc'
# database with host configuration removed. Defaults to machine localhost
__DB_NAME = "postgresql://bruce:Inline-360@localhost/bucketlist_api"
BCRYPT_LOG_ROUNDS = 13
SECRET_KEY = os.getenv('SECRET') or __SECRET
AUTH_TOKEN_DURATION = os.getenv('TOKEN_DURATION') or 300
SQLALCHEMY_DATABASE_URI = os.getenv('DB_NAME') or __DB_NAME
class DevelopmentConfig(Config):
"""Configurations for Development"""
DEBUG = True
BCRYPT_LOG_ROUNDS = 4
class TestingConfig(Config):
"""Configurations for Testing with a separate test database"""
TESTING = True
SQLALCHEMY_DATABASE_URI = "postgresql://bruce:Inline-360@localhost/test_db"
DEBUG = True
BCRYPT_LOG_ROUNDS = 4
class StagingConfig(Config):
"""Configurations for staging"""
DEBUG = True
class ProductionConfig(Config):
"""Configurations for Production"""
DEBUG = False
TESTING = False
app_config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'staging': StagingConfig,
'production': ProductionConfig
}
| <commit_before>import os
class Config(object):
"""Parent configuration class"""
DEBUG = False
CSRF_ENABLED = True
# ALT: <variable> = os.getenv('<env_var_name>')
SECRET = 'HeathLEDGERwasTHEBESTidc'
# database with host configuration removed. Defaults to machine localhost
SQLALCHEMY_DATABASE_URI = "postgresql://bruce:Inline-360@localhost/bucketlist_api"
BCRYPT_LOG_ROUNDS = 13
class DevelopmentConfig(Config):
"""Configurations for Development"""
DEBUG = True
BCRYPT_LOG_ROUNDS = 4
class TestingConfig(Config):
"""Configurations for Testing with a separate test database"""
TESTING = True
SQLALCHEMY_DATABASE_URI = "postgresql://bruce:Inline-360@localhost/test_db"
DEBUG = True
BCRYPT_LOG_ROUNDS = 4
class StagingConfig(Config):
"""Configurations for staging"""
DEBUG = True
class ProductionConfig(Config):
"""Configurations for Production"""
DEBUG = False
TESTING = False
app_config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'staging': StagingConfig,
'production': ProductionConfig
}
<commit_msg>Modify Config file
Pick DB_NAME, TOKEN_DURATION and SECRET from the environment
Use the hard coded details as backup in case not in the environment<commit_after> | import os
class Config(object):
"""Parent configuration class"""
DEBUG = False
CSRF_ENABLED = True
# ALT: <variable> = os.getenv('<env_var_name>')
__SECRET = 'HeathLEDGERwasTHEBESTidc'
# database with host configuration removed. Defaults to machine localhost
__DB_NAME = "postgresql://bruce:Inline-360@localhost/bucketlist_api"
BCRYPT_LOG_ROUNDS = 13
SECRET_KEY = os.getenv('SECRET') or __SECRET
AUTH_TOKEN_DURATION = os.getenv('TOKEN_DURATION') or 300
SQLALCHEMY_DATABASE_URI = os.getenv('DB_NAME') or __DB_NAME
class DevelopmentConfig(Config):
"""Configurations for Development"""
DEBUG = True
BCRYPT_LOG_ROUNDS = 4
class TestingConfig(Config):
"""Configurations for Testing with a separate test database"""
TESTING = True
SQLALCHEMY_DATABASE_URI = "postgresql://bruce:Inline-360@localhost/test_db"
DEBUG = True
BCRYPT_LOG_ROUNDS = 4
class StagingConfig(Config):
"""Configurations for staging"""
DEBUG = True
class ProductionConfig(Config):
"""Configurations for Production"""
DEBUG = False
TESTING = False
app_config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'staging': StagingConfig,
'production': ProductionConfig
}
| import os
class Config(object):
"""Parent configuration class"""
DEBUG = False
CSRF_ENABLED = True
# ALT: <variable> = os.getenv('<env_var_name>')
SECRET = 'HeathLEDGERwasTHEBESTidc'
# database with host configuration removed. Defaults to machine localhost
SQLALCHEMY_DATABASE_URI = "postgresql://bruce:Inline-360@localhost/bucketlist_api"
BCRYPT_LOG_ROUNDS = 13
class DevelopmentConfig(Config):
"""Configurations for Development"""
DEBUG = True
BCRYPT_LOG_ROUNDS = 4
class TestingConfig(Config):
"""Configurations for Testing with a separate test database"""
TESTING = True
SQLALCHEMY_DATABASE_URI = "postgresql://bruce:Inline-360@localhost/test_db"
DEBUG = True
BCRYPT_LOG_ROUNDS = 4
class StagingConfig(Config):
"""Configurations for staging"""
DEBUG = True
class ProductionConfig(Config):
"""Configurations for Production"""
DEBUG = False
TESTING = False
app_config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'staging': StagingConfig,
'production': ProductionConfig
}
Modify Config file
Pick DB_NAME, TOKEN_DURATION and SECRET from the environment
Use the hard coded details as backup in case not in the environmentimport os
class Config(object):
"""Parent configuration class"""
DEBUG = False
CSRF_ENABLED = True
# ALT: <variable> = os.getenv('<env_var_name>')
__SECRET = 'HeathLEDGERwasTHEBESTidc'
# database with host configuration removed. Defaults to machine localhost
__DB_NAME = "postgresql://bruce:Inline-360@localhost/bucketlist_api"
BCRYPT_LOG_ROUNDS = 13
SECRET_KEY = os.getenv('SECRET') or __SECRET
AUTH_TOKEN_DURATION = os.getenv('TOKEN_DURATION') or 300
SQLALCHEMY_DATABASE_URI = os.getenv('DB_NAME') or __DB_NAME
class DevelopmentConfig(Config):
"""Configurations for Development"""
DEBUG = True
BCRYPT_LOG_ROUNDS = 4
class TestingConfig(Config):
"""Configurations for Testing with a separate test database"""
TESTING = True
SQLALCHEMY_DATABASE_URI = "postgresql://bruce:Inline-360@localhost/test_db"
DEBUG = True
BCRYPT_LOG_ROUNDS = 4
class StagingConfig(Config):
"""Configurations for staging"""
DEBUG = True
class ProductionConfig(Config):
"""Configurations for Production"""
DEBUG = False
TESTING = False
app_config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'staging': StagingConfig,
'production': ProductionConfig
}
| <commit_before>import os
class Config(object):
"""Parent configuration class"""
DEBUG = False
CSRF_ENABLED = True
# ALT: <variable> = os.getenv('<env_var_name>')
SECRET = 'HeathLEDGERwasTHEBESTidc'
# database with host configuration removed. Defaults to machine localhost
SQLALCHEMY_DATABASE_URI = "postgresql://bruce:Inline-360@localhost/bucketlist_api"
BCRYPT_LOG_ROUNDS = 13
class DevelopmentConfig(Config):
"""Configurations for Development"""
DEBUG = True
BCRYPT_LOG_ROUNDS = 4
class TestingConfig(Config):
"""Configurations for Testing with a separate test database"""
TESTING = True
SQLALCHEMY_DATABASE_URI = "postgresql://bruce:Inline-360@localhost/test_db"
DEBUG = True
BCRYPT_LOG_ROUNDS = 4
class StagingConfig(Config):
"""Configurations for staging"""
DEBUG = True
class ProductionConfig(Config):
"""Configurations for Production"""
DEBUG = False
TESTING = False
app_config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'staging': StagingConfig,
'production': ProductionConfig
}
<commit_msg>Modify Config file
Pick DB_NAME, TOKEN_DURATION and SECRET from the environment
Use the hard coded details as backup in case not in the environment<commit_after>import os
class Config(object):
"""Parent configuration class"""
DEBUG = False
CSRF_ENABLED = True
# ALT: <variable> = os.getenv('<env_var_name>')
__SECRET = 'HeathLEDGERwasTHEBESTidc'
# database with host configuration removed. Defaults to machine localhost
__DB_NAME = "postgresql://bruce:Inline-360@localhost/bucketlist_api"
BCRYPT_LOG_ROUNDS = 13
SECRET_KEY = os.getenv('SECRET') or __SECRET
AUTH_TOKEN_DURATION = os.getenv('TOKEN_DURATION') or 300
SQLALCHEMY_DATABASE_URI = os.getenv('DB_NAME') or __DB_NAME
class DevelopmentConfig(Config):
"""Configurations for Development"""
DEBUG = True
BCRYPT_LOG_ROUNDS = 4
class TestingConfig(Config):
"""Configurations for Testing with a separate test database"""
TESTING = True
SQLALCHEMY_DATABASE_URI = "postgresql://bruce:Inline-360@localhost/test_db"
DEBUG = True
BCRYPT_LOG_ROUNDS = 4
class StagingConfig(Config):
"""Configurations for staging"""
DEBUG = True
class ProductionConfig(Config):
"""Configurations for Production"""
DEBUG = False
TESTING = False
app_config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'staging': StagingConfig,
'production': ProductionConfig
}
|
494884ae3510c0cf23704cb772ad4a024040a9c7 | bitHopper/Website/Worker_Page.py | bitHopper/Website/Worker_Page.py | from bitHopper.Website import app, flask
import btcnet_info
import bitHopper.Configuration.Workers
import logging
@app.route("/worker", methods=['POST', 'GET'])
def worker():
#Check if this is a form submission
handle_worker_post(flask.request.form)
#Get a list of currently configured workers
pools_workers = {}
for pool in btcnet_info.get_pools():
if pool.name is None:
logging.debug('Ignoring %s', pool)
continue
pools_workers[pool.name] = bitHopper.Configuration.Workers.get_worker_from(pool.name)
return flask.render_template('worker.html', pools = pools_workers)
def handle_worker_post(post):
for item in ['method','username','password', 'pool']:
if item not in post:
return
if post['method'] == 'remove':
bitHopper.Configuration.Workers.remove(
post['pool'], post['username'], post['password'])
elif post['method'] == 'add':
bitHopper.Configuration.Workers.add(
post['pool'], post['username'], post['password'])
| from bitHopper.Website import app, flask
import btcnet_info
import bitHopper.Configuration.Workers
import logging
@app.route("/worker", methods=['POST', 'GET'])
def worker():
#Check if this is a form submission
handle_worker_post(flask.request.form)
#Get a list of currently configured workers
pools_workers = {}
for pool in btcnet_info.get_pools():
if pool.name is None:
logging.debug('Ignoring a Pool. If no pools apear on /worker please update your version of btcnet_info')
continue
pools_workers[pool.name] = bitHopper.Configuration.Workers.get_worker_from(pool.name)
return flask.render_template('worker.html', pools = pools_workers)
def handle_worker_post(post):
for item in ['method','username','password', 'pool']:
if item not in post:
return
if post['method'] == 'remove':
bitHopper.Configuration.Workers.remove(
post['pool'], post['username'], post['password'])
elif post['method'] == 'add':
bitHopper.Configuration.Workers.add(
post['pool'], post['username'], post['password'])
| Update the ignoring error message | Update the ignoring error message
| Python | mit | c00w/bitHopper,c00w/bitHopper | from bitHopper.Website import app, flask
import btcnet_info
import bitHopper.Configuration.Workers
import logging
@app.route("/worker", methods=['POST', 'GET'])
def worker():
#Check if this is a form submission
handle_worker_post(flask.request.form)
#Get a list of currently configured workers
pools_workers = {}
for pool in btcnet_info.get_pools():
if pool.name is None:
logging.debug('Ignoring %s', pool)
continue
pools_workers[pool.name] = bitHopper.Configuration.Workers.get_worker_from(pool.name)
return flask.render_template('worker.html', pools = pools_workers)
def handle_worker_post(post):
for item in ['method','username','password', 'pool']:
if item not in post:
return
if post['method'] == 'remove':
bitHopper.Configuration.Workers.remove(
post['pool'], post['username'], post['password'])
elif post['method'] == 'add':
bitHopper.Configuration.Workers.add(
post['pool'], post['username'], post['password'])
Update the ignoring error message | from bitHopper.Website import app, flask
import btcnet_info
import bitHopper.Configuration.Workers
import logging
@app.route("/worker", methods=['POST', 'GET'])
def worker():
#Check if this is a form submission
handle_worker_post(flask.request.form)
#Get a list of currently configured workers
pools_workers = {}
for pool in btcnet_info.get_pools():
if pool.name is None:
logging.debug('Ignoring a Pool. If no pools apear on /worker please update your version of btcnet_info')
continue
pools_workers[pool.name] = bitHopper.Configuration.Workers.get_worker_from(pool.name)
return flask.render_template('worker.html', pools = pools_workers)
def handle_worker_post(post):
for item in ['method','username','password', 'pool']:
if item not in post:
return
if post['method'] == 'remove':
bitHopper.Configuration.Workers.remove(
post['pool'], post['username'], post['password'])
elif post['method'] == 'add':
bitHopper.Configuration.Workers.add(
post['pool'], post['username'], post['password'])
| <commit_before>from bitHopper.Website import app, flask
import btcnet_info
import bitHopper.Configuration.Workers
import logging
@app.route("/worker", methods=['POST', 'GET'])
def worker():
#Check if this is a form submission
handle_worker_post(flask.request.form)
#Get a list of currently configured workers
pools_workers = {}
for pool in btcnet_info.get_pools():
if pool.name is None:
logging.debug('Ignoring %s', pool)
continue
pools_workers[pool.name] = bitHopper.Configuration.Workers.get_worker_from(pool.name)
return flask.render_template('worker.html', pools = pools_workers)
def handle_worker_post(post):
for item in ['method','username','password', 'pool']:
if item not in post:
return
if post['method'] == 'remove':
bitHopper.Configuration.Workers.remove(
post['pool'], post['username'], post['password'])
elif post['method'] == 'add':
bitHopper.Configuration.Workers.add(
post['pool'], post['username'], post['password'])
<commit_msg>Update the ignoring error message<commit_after> | from bitHopper.Website import app, flask
import btcnet_info
import bitHopper.Configuration.Workers
import logging
@app.route("/worker", methods=['POST', 'GET'])
def worker():
#Check if this is a form submission
handle_worker_post(flask.request.form)
#Get a list of currently configured workers
pools_workers = {}
for pool in btcnet_info.get_pools():
if pool.name is None:
logging.debug('Ignoring a Pool. If no pools apear on /worker please update your version of btcnet_info')
continue
pools_workers[pool.name] = bitHopper.Configuration.Workers.get_worker_from(pool.name)
return flask.render_template('worker.html', pools = pools_workers)
def handle_worker_post(post):
for item in ['method','username','password', 'pool']:
if item not in post:
return
if post['method'] == 'remove':
bitHopper.Configuration.Workers.remove(
post['pool'], post['username'], post['password'])
elif post['method'] == 'add':
bitHopper.Configuration.Workers.add(
post['pool'], post['username'], post['password'])
| from bitHopper.Website import app, flask
import btcnet_info
import bitHopper.Configuration.Workers
import logging
@app.route("/worker", methods=['POST', 'GET'])
def worker():
#Check if this is a form submission
handle_worker_post(flask.request.form)
#Get a list of currently configured workers
pools_workers = {}
for pool in btcnet_info.get_pools():
if pool.name is None:
logging.debug('Ignoring %s', pool)
continue
pools_workers[pool.name] = bitHopper.Configuration.Workers.get_worker_from(pool.name)
return flask.render_template('worker.html', pools = pools_workers)
def handle_worker_post(post):
for item in ['method','username','password', 'pool']:
if item not in post:
return
if post['method'] == 'remove':
bitHopper.Configuration.Workers.remove(
post['pool'], post['username'], post['password'])
elif post['method'] == 'add':
bitHopper.Configuration.Workers.add(
post['pool'], post['username'], post['password'])
Update the ignoring error messagefrom bitHopper.Website import app, flask
import btcnet_info
import bitHopper.Configuration.Workers
import logging
@app.route("/worker", methods=['POST', 'GET'])
def worker():
#Check if this is a form submission
handle_worker_post(flask.request.form)
#Get a list of currently configured workers
pools_workers = {}
for pool in btcnet_info.get_pools():
if pool.name is None:
logging.debug('Ignoring a Pool. If no pools apear on /worker please update your version of btcnet_info')
continue
pools_workers[pool.name] = bitHopper.Configuration.Workers.get_worker_from(pool.name)
return flask.render_template('worker.html', pools = pools_workers)
def handle_worker_post(post):
for item in ['method','username','password', 'pool']:
if item not in post:
return
if post['method'] == 'remove':
bitHopper.Configuration.Workers.remove(
post['pool'], post['username'], post['password'])
elif post['method'] == 'add':
bitHopper.Configuration.Workers.add(
post['pool'], post['username'], post['password'])
| <commit_before>from bitHopper.Website import app, flask
import btcnet_info
import bitHopper.Configuration.Workers
import logging
@app.route("/worker", methods=['POST', 'GET'])
def worker():
#Check if this is a form submission
handle_worker_post(flask.request.form)
#Get a list of currently configured workers
pools_workers = {}
for pool in btcnet_info.get_pools():
if pool.name is None:
logging.debug('Ignoring %s', pool)
continue
pools_workers[pool.name] = bitHopper.Configuration.Workers.get_worker_from(pool.name)
return flask.render_template('worker.html', pools = pools_workers)
def handle_worker_post(post):
for item in ['method','username','password', 'pool']:
if item not in post:
return
if post['method'] == 'remove':
bitHopper.Configuration.Workers.remove(
post['pool'], post['username'], post['password'])
elif post['method'] == 'add':
bitHopper.Configuration.Workers.add(
post['pool'], post['username'], post['password'])
<commit_msg>Update the ignoring error message<commit_after>from bitHopper.Website import app, flask
import btcnet_info
import bitHopper.Configuration.Workers
import logging
@app.route("/worker", methods=['POST', 'GET'])
def worker():
#Check if this is a form submission
handle_worker_post(flask.request.form)
#Get a list of currently configured workers
pools_workers = {}
for pool in btcnet_info.get_pools():
if pool.name is None:
logging.debug('Ignoring a Pool. If no pools apear on /worker please update your version of btcnet_info')
continue
pools_workers[pool.name] = bitHopper.Configuration.Workers.get_worker_from(pool.name)
return flask.render_template('worker.html', pools = pools_workers)
def handle_worker_post(post):
for item in ['method','username','password', 'pool']:
if item not in post:
return
if post['method'] == 'remove':
bitHopper.Configuration.Workers.remove(
post['pool'], post['username'], post['password'])
elif post['method'] == 'add':
bitHopper.Configuration.Workers.add(
post['pool'], post['username'], post['password'])
|
194748bfbc67741275fd36eb2eaafbde55caeabb | django_emarsys/management/commands/emarsys_sync_events.py | django_emarsys/management/commands/emarsys_sync_events.py | # -*- coding: utf-8 -*-
from django.core.management import BaseCommand
from ...event import sync_events
class Command(BaseCommand):
def handle(self, *args, **options):
num_new_events, num_updated_ids, num_deleted_ids, \
unsynced_event_names = sync_events()
print("{} new events, {} event ids updated,"
" {} event ids deleted"
.format(num_new_events, num_updated_ids, num_deleted_ids))
if unsynced_event_names:
print("unsynced event names:\n {}"
.format('\n '.join(unsynced_event_names)))
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core.management import BaseCommand
from ...event import sync_events
class Command(BaseCommand):
def handle(self, *args, **options):
num_new_events, num_updated_ids, num_deleted_ids, \
unsynced_event_names = sync_events()
print("{} new events, {} event ids updated,"
" {} event ids deleted"
.format(num_new_events, num_updated_ids, num_deleted_ids))
if unsynced_event_names:
print("unsynced event names:\n {}"
.format('\n '.join(unsynced_event_names)))
| Fix issue with management command log output and non ascii event names | Fix issue with management command log output and non ascii event names
| Python | mit | machtfit/django-emarsys,machtfit/django-emarsys | # -*- coding: utf-8 -*-
from django.core.management import BaseCommand
from ...event import sync_events
class Command(BaseCommand):
def handle(self, *args, **options):
num_new_events, num_updated_ids, num_deleted_ids, \
unsynced_event_names = sync_events()
print("{} new events, {} event ids updated,"
" {} event ids deleted"
.format(num_new_events, num_updated_ids, num_deleted_ids))
if unsynced_event_names:
print("unsynced event names:\n {}"
.format('\n '.join(unsynced_event_names)))
Fix issue with management command log output and non ascii event names | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core.management import BaseCommand
from ...event import sync_events
class Command(BaseCommand):
def handle(self, *args, **options):
num_new_events, num_updated_ids, num_deleted_ids, \
unsynced_event_names = sync_events()
print("{} new events, {} event ids updated,"
" {} event ids deleted"
.format(num_new_events, num_updated_ids, num_deleted_ids))
if unsynced_event_names:
print("unsynced event names:\n {}"
.format('\n '.join(unsynced_event_names)))
| <commit_before># -*- coding: utf-8 -*-
from django.core.management import BaseCommand
from ...event import sync_events
class Command(BaseCommand):
def handle(self, *args, **options):
num_new_events, num_updated_ids, num_deleted_ids, \
unsynced_event_names = sync_events()
print("{} new events, {} event ids updated,"
" {} event ids deleted"
.format(num_new_events, num_updated_ids, num_deleted_ids))
if unsynced_event_names:
print("unsynced event names:\n {}"
.format('\n '.join(unsynced_event_names)))
<commit_msg>Fix issue with management command log output and non ascii event names<commit_after> | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core.management import BaseCommand
from ...event import sync_events
class Command(BaseCommand):
def handle(self, *args, **options):
num_new_events, num_updated_ids, num_deleted_ids, \
unsynced_event_names = sync_events()
print("{} new events, {} event ids updated,"
" {} event ids deleted"
.format(num_new_events, num_updated_ids, num_deleted_ids))
if unsynced_event_names:
print("unsynced event names:\n {}"
.format('\n '.join(unsynced_event_names)))
| # -*- coding: utf-8 -*-
from django.core.management import BaseCommand
from ...event import sync_events
class Command(BaseCommand):
def handle(self, *args, **options):
num_new_events, num_updated_ids, num_deleted_ids, \
unsynced_event_names = sync_events()
print("{} new events, {} event ids updated,"
" {} event ids deleted"
.format(num_new_events, num_updated_ids, num_deleted_ids))
if unsynced_event_names:
print("unsynced event names:\n {}"
.format('\n '.join(unsynced_event_names)))
Fix issue with management command log output and non ascii event names# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core.management import BaseCommand
from ...event import sync_events
class Command(BaseCommand):
def handle(self, *args, **options):
num_new_events, num_updated_ids, num_deleted_ids, \
unsynced_event_names = sync_events()
print("{} new events, {} event ids updated,"
" {} event ids deleted"
.format(num_new_events, num_updated_ids, num_deleted_ids))
if unsynced_event_names:
print("unsynced event names:\n {}"
.format('\n '.join(unsynced_event_names)))
| <commit_before># -*- coding: utf-8 -*-
from django.core.management import BaseCommand
from ...event import sync_events
class Command(BaseCommand):
def handle(self, *args, **options):
num_new_events, num_updated_ids, num_deleted_ids, \
unsynced_event_names = sync_events()
print("{} new events, {} event ids updated,"
" {} event ids deleted"
.format(num_new_events, num_updated_ids, num_deleted_ids))
if unsynced_event_names:
print("unsynced event names:\n {}"
.format('\n '.join(unsynced_event_names)))
<commit_msg>Fix issue with management command log output and non ascii event names<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core.management import BaseCommand
from ...event import sync_events
class Command(BaseCommand):
def handle(self, *args, **options):
num_new_events, num_updated_ids, num_deleted_ids, \
unsynced_event_names = sync_events()
print("{} new events, {} event ids updated,"
" {} event ids deleted"
.format(num_new_events, num_updated_ids, num_deleted_ids))
if unsynced_event_names:
print("unsynced event names:\n {}"
.format('\n '.join(unsynced_event_names)))
|
a2abc6342162c9158551b810f4d666d6d13dcd15 | client/python/plot_request_times.py | client/python/plot_request_times.py | import requests
r = requests.get('http://localhost:8081/monitor_results/1')
print(r.json())
for monitoring_data in r.json():
print 'URL: ' + monitoring_data['urlToMonitor']['url']
| import requests
from plotly.offline import plot
import plotly.graph_objs as go
r = requests.get('http://localhost:8081/monitor_results/1')
print(r.json())
# build traces for plotting from monitoring data
request_times = list()
timestamps = list()
timestamp = 0
url = r.json()[0]['urlToMonitor']['url']
for monitoring_data in r.json():
request_time = monitoring_data['timeNeededForRequest']
request_times.append(request_time)
timestamps.append(timestamp)
timestamp = timestamp + 1
plot([go.Scatter(x = timestamps, y = request_times, name = 'THE NAME'), go.Scatter(x = timestamps, y = request_times, name =
'THE OTHER NAME')], filename='request_times.html')
| Add prototype for plotting client | Add prototype for plotting client
| Python | mit | gernd/simple-site-mon | import requests
r = requests.get('http://localhost:8081/monitor_results/1')
print(r.json())
for monitoring_data in r.json():
print 'URL: ' + monitoring_data['urlToMonitor']['url']
Add prototype for plotting client | import requests
from plotly.offline import plot
import plotly.graph_objs as go
r = requests.get('http://localhost:8081/monitor_results/1')
print(r.json())
# build traces for plotting from monitoring data
request_times = list()
timestamps = list()
timestamp = 0
url = r.json()[0]['urlToMonitor']['url']
for monitoring_data in r.json():
request_time = monitoring_data['timeNeededForRequest']
request_times.append(request_time)
timestamps.append(timestamp)
timestamp = timestamp + 1
plot([go.Scatter(x = timestamps, y = request_times, name = 'THE NAME'), go.Scatter(x = timestamps, y = request_times, name =
'THE OTHER NAME')], filename='request_times.html')
| <commit_before>import requests
r = requests.get('http://localhost:8081/monitor_results/1')
print(r.json())
for monitoring_data in r.json():
print 'URL: ' + monitoring_data['urlToMonitor']['url']
<commit_msg>Add prototype for plotting client<commit_after> | import requests
from plotly.offline import plot
import plotly.graph_objs as go
r = requests.get('http://localhost:8081/monitor_results/1')
print(r.json())
# build traces for plotting from monitoring data
request_times = list()
timestamps = list()
timestamp = 0
url = r.json()[0]['urlToMonitor']['url']
for monitoring_data in r.json():
request_time = monitoring_data['timeNeededForRequest']
request_times.append(request_time)
timestamps.append(timestamp)
timestamp = timestamp + 1
plot([go.Scatter(x = timestamps, y = request_times, name = 'THE NAME'), go.Scatter(x = timestamps, y = request_times, name =
'THE OTHER NAME')], filename='request_times.html')
| import requests
r = requests.get('http://localhost:8081/monitor_results/1')
print(r.json())
for monitoring_data in r.json():
print 'URL: ' + monitoring_data['urlToMonitor']['url']
Add prototype for plotting clientimport requests
from plotly.offline import plot
import plotly.graph_objs as go
r = requests.get('http://localhost:8081/monitor_results/1')
print(r.json())
# build traces for plotting from monitoring data
request_times = list()
timestamps = list()
timestamp = 0
url = r.json()[0]['urlToMonitor']['url']
for monitoring_data in r.json():
request_time = monitoring_data['timeNeededForRequest']
request_times.append(request_time)
timestamps.append(timestamp)
timestamp = timestamp + 1
plot([go.Scatter(x = timestamps, y = request_times, name = 'THE NAME'), go.Scatter(x = timestamps, y = request_times, name =
'THE OTHER NAME')], filename='request_times.html')
| <commit_before>import requests
r = requests.get('http://localhost:8081/monitor_results/1')
print(r.json())
for monitoring_data in r.json():
print 'URL: ' + monitoring_data['urlToMonitor']['url']
<commit_msg>Add prototype for plotting client<commit_after>import requests
from plotly.offline import plot
import plotly.graph_objs as go
r = requests.get('http://localhost:8081/monitor_results/1')
print(r.json())
# build traces for plotting from monitoring data
request_times = list()
timestamps = list()
timestamp = 0
url = r.json()[0]['urlToMonitor']['url']
for monitoring_data in r.json():
request_time = monitoring_data['timeNeededForRequest']
request_times.append(request_time)
timestamps.append(timestamp)
timestamp = timestamp + 1
plot([go.Scatter(x = timestamps, y = request_times, name = 'THE NAME'), go.Scatter(x = timestamps, y = request_times, name =
'THE OTHER NAME')], filename='request_times.html')
|
cc2f0900b02891e0ab23133778065a6f6768cd5c | setup.py | setup.py | from distutils.core import setup
setup(
name = 'furs_fiscal',
packages = ['furs_fiscal'],
version = '0.1.0',
description = 'Python library for simplified communication with FURS (Financna uprava Republike Slovenije).',
author = 'Boris Savic',
author_email = 'boris70@gmail.com',
url = 'https://github.com/boris-savic/python-furs-fiscal',
download_url = 'https://github.com/boris-savic/python-furs-fiscal/tarball/0.1',
keywords = ['FURS', 'fiscal', 'fiscal register', 'davcne blagajne'],
classifiers = [],
install_requires=[
'requests',
'python-jose',
'pyOpenSSL',
'urllib3',
'pyasn1',
'ndg-httpsclient'
]
) | from distutils.core import setup
setup(
name = 'furs_fiscal',
packages = ['furs_fiscal'],
version = '0.1.3',
description = 'Python library for simplified communication with FURS (Financna uprava Republike Slovenije).',
author = 'Boris Savic',
author_email = 'boris70@gmail.com',
url = 'https://github.com/boris-savic/python-furs-fiscal',
download_url = 'https://github.com/boris-savic/python-furs-fiscal/tarball/0.1.3',
keywords = ['FURS', 'fiscal', 'fiscal register', 'davcne blagajne'],
classifiers = [],
package_data={'furs_fiscal': ['certs/*.pem']},
install_requires=[
'requests',
'python-jose',
'pyOpenSSL',
'urllib3',
'pyasn1',
'ndg-httpsclient'
]
) | Add test_certificate.pem to the release | Add test_certificate.pem to the release
| Python | mit | boris-savic/python-furs-fiscal | from distutils.core import setup
setup(
name = 'furs_fiscal',
packages = ['furs_fiscal'],
version = '0.1.0',
description = 'Python library for simplified communication with FURS (Financna uprava Republike Slovenije).',
author = 'Boris Savic',
author_email = 'boris70@gmail.com',
url = 'https://github.com/boris-savic/python-furs-fiscal',
download_url = 'https://github.com/boris-savic/python-furs-fiscal/tarball/0.1',
keywords = ['FURS', 'fiscal', 'fiscal register', 'davcne blagajne'],
classifiers = [],
install_requires=[
'requests',
'python-jose',
'pyOpenSSL',
'urllib3',
'pyasn1',
'ndg-httpsclient'
]
)Add test_certificate.pem to the release | from distutils.core import setup
setup(
name = 'furs_fiscal',
packages = ['furs_fiscal'],
version = '0.1.3',
description = 'Python library for simplified communication with FURS (Financna uprava Republike Slovenije).',
author = 'Boris Savic',
author_email = 'boris70@gmail.com',
url = 'https://github.com/boris-savic/python-furs-fiscal',
download_url = 'https://github.com/boris-savic/python-furs-fiscal/tarball/0.1.3',
keywords = ['FURS', 'fiscal', 'fiscal register', 'davcne blagajne'],
classifiers = [],
package_data={'furs_fiscal': ['certs/*.pem']},
install_requires=[
'requests',
'python-jose',
'pyOpenSSL',
'urllib3',
'pyasn1',
'ndg-httpsclient'
]
) | <commit_before>from distutils.core import setup
setup(
name = 'furs_fiscal',
packages = ['furs_fiscal'],
version = '0.1.0',
description = 'Python library for simplified communication with FURS (Financna uprava Republike Slovenije).',
author = 'Boris Savic',
author_email = 'boris70@gmail.com',
url = 'https://github.com/boris-savic/python-furs-fiscal',
download_url = 'https://github.com/boris-savic/python-furs-fiscal/tarball/0.1',
keywords = ['FURS', 'fiscal', 'fiscal register', 'davcne blagajne'],
classifiers = [],
install_requires=[
'requests',
'python-jose',
'pyOpenSSL',
'urllib3',
'pyasn1',
'ndg-httpsclient'
]
)<commit_msg>Add test_certificate.pem to the release<commit_after> | from distutils.core import setup
setup(
name = 'furs_fiscal',
packages = ['furs_fiscal'],
version = '0.1.3',
description = 'Python library for simplified communication with FURS (Financna uprava Republike Slovenije).',
author = 'Boris Savic',
author_email = 'boris70@gmail.com',
url = 'https://github.com/boris-savic/python-furs-fiscal',
download_url = 'https://github.com/boris-savic/python-furs-fiscal/tarball/0.1.3',
keywords = ['FURS', 'fiscal', 'fiscal register', 'davcne blagajne'],
classifiers = [],
package_data={'furs_fiscal': ['certs/*.pem']},
install_requires=[
'requests',
'python-jose',
'pyOpenSSL',
'urllib3',
'pyasn1',
'ndg-httpsclient'
]
) | from distutils.core import setup
setup(
name = 'furs_fiscal',
packages = ['furs_fiscal'],
version = '0.1.0',
description = 'Python library for simplified communication with FURS (Financna uprava Republike Slovenije).',
author = 'Boris Savic',
author_email = 'boris70@gmail.com',
url = 'https://github.com/boris-savic/python-furs-fiscal',
download_url = 'https://github.com/boris-savic/python-furs-fiscal/tarball/0.1',
keywords = ['FURS', 'fiscal', 'fiscal register', 'davcne blagajne'],
classifiers = [],
install_requires=[
'requests',
'python-jose',
'pyOpenSSL',
'urllib3',
'pyasn1',
'ndg-httpsclient'
]
)Add test_certificate.pem to the releasefrom distutils.core import setup
setup(
name = 'furs_fiscal',
packages = ['furs_fiscal'],
version = '0.1.3',
description = 'Python library for simplified communication with FURS (Financna uprava Republike Slovenije).',
author = 'Boris Savic',
author_email = 'boris70@gmail.com',
url = 'https://github.com/boris-savic/python-furs-fiscal',
download_url = 'https://github.com/boris-savic/python-furs-fiscal/tarball/0.1.3',
keywords = ['FURS', 'fiscal', 'fiscal register', 'davcne blagajne'],
classifiers = [],
package_data={'furs_fiscal': ['certs/*.pem']},
install_requires=[
'requests',
'python-jose',
'pyOpenSSL',
'urllib3',
'pyasn1',
'ndg-httpsclient'
]
) | <commit_before>from distutils.core import setup
setup(
name = 'furs_fiscal',
packages = ['furs_fiscal'],
version = '0.1.0',
description = 'Python library for simplified communication with FURS (Financna uprava Republike Slovenije).',
author = 'Boris Savic',
author_email = 'boris70@gmail.com',
url = 'https://github.com/boris-savic/python-furs-fiscal',
download_url = 'https://github.com/boris-savic/python-furs-fiscal/tarball/0.1',
keywords = ['FURS', 'fiscal', 'fiscal register', 'davcne blagajne'],
classifiers = [],
install_requires=[
'requests',
'python-jose',
'pyOpenSSL',
'urllib3',
'pyasn1',
'ndg-httpsclient'
]
)<commit_msg>Add test_certificate.pem to the release<commit_after>from distutils.core import setup
setup(
name = 'furs_fiscal',
packages = ['furs_fiscal'],
version = '0.1.3',
description = 'Python library for simplified communication with FURS (Financna uprava Republike Slovenije).',
author = 'Boris Savic',
author_email = 'boris70@gmail.com',
url = 'https://github.com/boris-savic/python-furs-fiscal',
download_url = 'https://github.com/boris-savic/python-furs-fiscal/tarball/0.1.3',
keywords = ['FURS', 'fiscal', 'fiscal register', 'davcne blagajne'],
classifiers = [],
package_data={'furs_fiscal': ['certs/*.pem']},
install_requires=[
'requests',
'python-jose',
'pyOpenSSL',
'urllib3',
'pyasn1',
'ndg-httpsclient'
]
) |
5ea25bc6c72e5c934e56a90c44f8019ad176bb27 | comet/utility/test/test_spawn.py | comet/utility/test/test_spawn.py | import sys
from twisted.trial import unittest
from twisted.python import failure
from ..spawn import SpawnCommand
class DummyEvent(object):
text = ""
class SpawnCommandProtocolTestCase(unittest.TestCase):
def test_good_process(self):
spawn = SpawnCommand(sys.executable)
d = spawn(DummyEvent())
d.addCallback(self.assertEqual, True)
return d
def test_bad_process(self):
spawn = SpawnCommand("/not/a/real/executable")
d = spawn(DummyEvent())
d.addErrback(self.assertIsInstance, failure.Failure)
return d
| import sys
from twisted.trial import unittest
from twisted.python import failure
from twisted.python import util
from ..spawn import SpawnCommand
class DummyEvent(object):
def __init__(self, text=None):
self.text = text
class SpawnCommandProtocolTestCase(unittest.TestCase):
def test_good_process(self):
spawn = SpawnCommand(sys.executable)
d = spawn(DummyEvent())
d.addCallback(self.assertEqual, True)
return d
def test_bad_process(self):
spawn = SpawnCommand("/not/a/real/executable")
d = spawn(DummyEvent())
d.addErrback(self.assertIsInstance, failure.Failure)
return d
def test_write_data(self):
TEXT = "Test spawn process"
def read_data(result):
f = open("spawnfile.txt")
try:
self.assertEqual(f.read(), TEXT)
finally:
f.close()
spawn = SpawnCommand(util.sibpath(__file__, "test_spawn.sh"))
d = spawn(DummyEvent(TEXT))
d.addCallback(read_data)
return d
| Test that spawned process actually writes data | Test that spawned process actually writes data
| Python | bsd-2-clause | jdswinbank/Comet,jdswinbank/Comet | import sys
from twisted.trial import unittest
from twisted.python import failure
from ..spawn import SpawnCommand
class DummyEvent(object):
text = ""
class SpawnCommandProtocolTestCase(unittest.TestCase):
def test_good_process(self):
spawn = SpawnCommand(sys.executable)
d = spawn(DummyEvent())
d.addCallback(self.assertEqual, True)
return d
def test_bad_process(self):
spawn = SpawnCommand("/not/a/real/executable")
d = spawn(DummyEvent())
d.addErrback(self.assertIsInstance, failure.Failure)
return d
Test that spawned process actually writes data | import sys
from twisted.trial import unittest
from twisted.python import failure
from twisted.python import util
from ..spawn import SpawnCommand
class DummyEvent(object):
def __init__(self, text=None):
self.text = text
class SpawnCommandProtocolTestCase(unittest.TestCase):
def test_good_process(self):
spawn = SpawnCommand(sys.executable)
d = spawn(DummyEvent())
d.addCallback(self.assertEqual, True)
return d
def test_bad_process(self):
spawn = SpawnCommand("/not/a/real/executable")
d = spawn(DummyEvent())
d.addErrback(self.assertIsInstance, failure.Failure)
return d
def test_write_data(self):
TEXT = "Test spawn process"
def read_data(result):
f = open("spawnfile.txt")
try:
self.assertEqual(f.read(), TEXT)
finally:
f.close()
spawn = SpawnCommand(util.sibpath(__file__, "test_spawn.sh"))
d = spawn(DummyEvent(TEXT))
d.addCallback(read_data)
return d
| <commit_before>import sys
from twisted.trial import unittest
from twisted.python import failure
from ..spawn import SpawnCommand
class DummyEvent(object):
text = ""
class SpawnCommandProtocolTestCase(unittest.TestCase):
def test_good_process(self):
spawn = SpawnCommand(sys.executable)
d = spawn(DummyEvent())
d.addCallback(self.assertEqual, True)
return d
def test_bad_process(self):
spawn = SpawnCommand("/not/a/real/executable")
d = spawn(DummyEvent())
d.addErrback(self.assertIsInstance, failure.Failure)
return d
<commit_msg>Test that spawned process actually writes data<commit_after> | import sys
from twisted.trial import unittest
from twisted.python import failure
from twisted.python import util
from ..spawn import SpawnCommand
class DummyEvent(object):
def __init__(self, text=None):
self.text = text
class SpawnCommandProtocolTestCase(unittest.TestCase):
def test_good_process(self):
spawn = SpawnCommand(sys.executable)
d = spawn(DummyEvent())
d.addCallback(self.assertEqual, True)
return d
def test_bad_process(self):
spawn = SpawnCommand("/not/a/real/executable")
d = spawn(DummyEvent())
d.addErrback(self.assertIsInstance, failure.Failure)
return d
def test_write_data(self):
TEXT = "Test spawn process"
def read_data(result):
f = open("spawnfile.txt")
try:
self.assertEqual(f.read(), TEXT)
finally:
f.close()
spawn = SpawnCommand(util.sibpath(__file__, "test_spawn.sh"))
d = spawn(DummyEvent(TEXT))
d.addCallback(read_data)
return d
| import sys
from twisted.trial import unittest
from twisted.python import failure
from ..spawn import SpawnCommand
class DummyEvent(object):
text = ""
class SpawnCommandProtocolTestCase(unittest.TestCase):
def test_good_process(self):
spawn = SpawnCommand(sys.executable)
d = spawn(DummyEvent())
d.addCallback(self.assertEqual, True)
return d
def test_bad_process(self):
spawn = SpawnCommand("/not/a/real/executable")
d = spawn(DummyEvent())
d.addErrback(self.assertIsInstance, failure.Failure)
return d
Test that spawned process actually writes dataimport sys
from twisted.trial import unittest
from twisted.python import failure
from twisted.python import util
from ..spawn import SpawnCommand
class DummyEvent(object):
def __init__(self, text=None):
self.text = text
class SpawnCommandProtocolTestCase(unittest.TestCase):
def test_good_process(self):
spawn = SpawnCommand(sys.executable)
d = spawn(DummyEvent())
d.addCallback(self.assertEqual, True)
return d
def test_bad_process(self):
spawn = SpawnCommand("/not/a/real/executable")
d = spawn(DummyEvent())
d.addErrback(self.assertIsInstance, failure.Failure)
return d
def test_write_data(self):
TEXT = "Test spawn process"
def read_data(result):
f = open("spawnfile.txt")
try:
self.assertEqual(f.read(), TEXT)
finally:
f.close()
spawn = SpawnCommand(util.sibpath(__file__, "test_spawn.sh"))
d = spawn(DummyEvent(TEXT))
d.addCallback(read_data)
return d
| <commit_before>import sys
from twisted.trial import unittest
from twisted.python import failure
from ..spawn import SpawnCommand
class DummyEvent(object):
text = ""
class SpawnCommandProtocolTestCase(unittest.TestCase):
def test_good_process(self):
spawn = SpawnCommand(sys.executable)
d = spawn(DummyEvent())
d.addCallback(self.assertEqual, True)
return d
def test_bad_process(self):
spawn = SpawnCommand("/not/a/real/executable")
d = spawn(DummyEvent())
d.addErrback(self.assertIsInstance, failure.Failure)
return d
<commit_msg>Test that spawned process actually writes data<commit_after>import sys
from twisted.trial import unittest
from twisted.python import failure
from twisted.python import util
from ..spawn import SpawnCommand
class DummyEvent(object):
def __init__(self, text=None):
self.text = text
class SpawnCommandProtocolTestCase(unittest.TestCase):
def test_good_process(self):
spawn = SpawnCommand(sys.executable)
d = spawn(DummyEvent())
d.addCallback(self.assertEqual, True)
return d
def test_bad_process(self):
spawn = SpawnCommand("/not/a/real/executable")
d = spawn(DummyEvent())
d.addErrback(self.assertIsInstance, failure.Failure)
return d
def test_write_data(self):
TEXT = "Test spawn process"
def read_data(result):
f = open("spawnfile.txt")
try:
self.assertEqual(f.read(), TEXT)
finally:
f.close()
spawn = SpawnCommand(util.sibpath(__file__, "test_spawn.sh"))
d = spawn(DummyEvent(TEXT))
d.addCallback(read_data)
return d
|
078f00ae743c2e16df76653090298ba56b277caf | pegasus/metrics/__init__.py | pegasus/metrics/__init__.py | import sys
import logging
def init_logging():
logFormat = "%(asctime)s %(levelname)s %(filename)s:%(lineno)s %(message)s"
logFormatter = logging.Formatter(fmt=logFormat)
logHandler = logging.StreamHandler(stream=sys.stderr)
logHandler.setFormatter(logFormatter)
log = logging.getLogger(__name__)
log.addHandler(logHandler)
init_logging()
from flask import Flask
app = Flask(__name__)
import pegasus.metrics.views
| import sys
import logging
def init_logging():
logFormat = "%(asctime)s %(levelname)s %(filename)s:%(lineno)s %(message)s"
logFormatter = logging.Formatter(fmt=logFormat)
logHandler = logging.StreamHandler()
logHandler.setFormatter(logFormatter)
log = logging.getLogger(__name__)
log.addHandler(logHandler)
init_logging()
from flask import Flask
app = Flask(__name__)
import pegasus.metrics.views
| Use default argument for StreamHandler (which is what want, sys.stderr) because they changed the name of the keyword argument in 2.7 | Use default argument for StreamHandler (which is what want, sys.stderr) because they changed the name of the keyword argument in 2.7
| Python | apache-2.0 | pegasus-isi/pegasus-metrics,pegasus-isi/pegasus-metrics,pegasus-isi/pegasus-metrics | import sys
import logging
def init_logging():
logFormat = "%(asctime)s %(levelname)s %(filename)s:%(lineno)s %(message)s"
logFormatter = logging.Formatter(fmt=logFormat)
logHandler = logging.StreamHandler(stream=sys.stderr)
logHandler.setFormatter(logFormatter)
log = logging.getLogger(__name__)
log.addHandler(logHandler)
init_logging()
from flask import Flask
app = Flask(__name__)
import pegasus.metrics.views
Use default argument for StreamHandler (which is what want, sys.stderr) because they changed the name of the keyword argument in 2.7 | import sys
import logging
def init_logging():
logFormat = "%(asctime)s %(levelname)s %(filename)s:%(lineno)s %(message)s"
logFormatter = logging.Formatter(fmt=logFormat)
logHandler = logging.StreamHandler()
logHandler.setFormatter(logFormatter)
log = logging.getLogger(__name__)
log.addHandler(logHandler)
init_logging()
from flask import Flask
app = Flask(__name__)
import pegasus.metrics.views
| <commit_before>import sys
import logging
def init_logging():
logFormat = "%(asctime)s %(levelname)s %(filename)s:%(lineno)s %(message)s"
logFormatter = logging.Formatter(fmt=logFormat)
logHandler = logging.StreamHandler(stream=sys.stderr)
logHandler.setFormatter(logFormatter)
log = logging.getLogger(__name__)
log.addHandler(logHandler)
init_logging()
from flask import Flask
app = Flask(__name__)
import pegasus.metrics.views
<commit_msg>Use default argument for StreamHandler (which is what want, sys.stderr) because they changed the name of the keyword argument in 2.7<commit_after> | import sys
import logging
def init_logging():
logFormat = "%(asctime)s %(levelname)s %(filename)s:%(lineno)s %(message)s"
logFormatter = logging.Formatter(fmt=logFormat)
logHandler = logging.StreamHandler()
logHandler.setFormatter(logFormatter)
log = logging.getLogger(__name__)
log.addHandler(logHandler)
init_logging()
from flask import Flask
app = Flask(__name__)
import pegasus.metrics.views
| import sys
import logging
def init_logging():
logFormat = "%(asctime)s %(levelname)s %(filename)s:%(lineno)s %(message)s"
logFormatter = logging.Formatter(fmt=logFormat)
logHandler = logging.StreamHandler(stream=sys.stderr)
logHandler.setFormatter(logFormatter)
log = logging.getLogger(__name__)
log.addHandler(logHandler)
init_logging()
from flask import Flask
app = Flask(__name__)
import pegasus.metrics.views
Use default argument for StreamHandler (which is what want, sys.stderr) because they changed the name of the keyword argument in 2.7import sys
import logging
def init_logging():
logFormat = "%(asctime)s %(levelname)s %(filename)s:%(lineno)s %(message)s"
logFormatter = logging.Formatter(fmt=logFormat)
logHandler = logging.StreamHandler()
logHandler.setFormatter(logFormatter)
log = logging.getLogger(__name__)
log.addHandler(logHandler)
init_logging()
from flask import Flask
app = Flask(__name__)
import pegasus.metrics.views
| <commit_before>import sys
import logging
def init_logging():
logFormat = "%(asctime)s %(levelname)s %(filename)s:%(lineno)s %(message)s"
logFormatter = logging.Formatter(fmt=logFormat)
logHandler = logging.StreamHandler(stream=sys.stderr)
logHandler.setFormatter(logFormatter)
log = logging.getLogger(__name__)
log.addHandler(logHandler)
init_logging()
from flask import Flask
app = Flask(__name__)
import pegasus.metrics.views
<commit_msg>Use default argument for StreamHandler (which is what want, sys.stderr) because they changed the name of the keyword argument in 2.7<commit_after>import sys
import logging
def init_logging():
logFormat = "%(asctime)s %(levelname)s %(filename)s:%(lineno)s %(message)s"
logFormatter = logging.Formatter(fmt=logFormat)
logHandler = logging.StreamHandler()
logHandler.setFormatter(logFormatter)
log = logging.getLogger(__name__)
log.addHandler(logHandler)
init_logging()
from flask import Flask
app = Flask(__name__)
import pegasus.metrics.views
|
2d52b37e8ed868099f5e808402b6c966987589e5 | nn-patterns/utils/tests/networks/__init__.py | nn-patterns/utils/tests/networks/__init__.py | # Begin: Python 2/3 compatibility header small
# Get Python 3 functionality:
from __future__ import\
absolute_import, print_function, division, unicode_literals
from future.utils import raise_with_traceback, raise_from
# catch exception with: except Exception as e
from builtins import range, map, zip, filter
from io import open
import six
# End: Python 2/3 compatability header small
import theano.tensor as T
from . import mnist
from . import cifar10
from . import imagenet
def iterator():
"""
Iterator over various networks.
"""
default_nonlinearity = T.nnet.relu
def fetch_networks(module_name, module):
ret = [("%s.%s" % (module_name, name),
getattr(module, name)(default_nonlinearity))
for name in module.__all__]
for name, network in ret:
network["name"] = name
return [x[1] for x in sorted(ret)]
networks = (
fetch_networks("mnist", mnist) +
fetch_networks("cifar10", cifar10) +
fetch_networks("imagenet", imagenet)
)
for network in networks:
yield network
| # Begin: Python 2/3 compatibility header small
# Get Python 3 functionality:
from __future__ import\
absolute_import, print_function, division, unicode_literals
from future.utils import raise_with_traceback, raise_from
# catch exception with: except Exception as e
from builtins import range, map, zip, filter
from io import open
import six
# End: Python 2/3 compatability header small
import fnmatch
import os
import theano.tensor as T
from . import mnist
from . import cifar10
from . import imagenet
def iterator():
"""
Iterator over various networks.
"""
default_nonlinearity = T.nnet.relu
# TODO: make this more transparent!
# Default test only for one network. To test all put "*"
name_filter = "mnist.cnn_2convb_2dense"
if "NNPATTERNS_TEST_FILTER" in os.environ:
name_filter = os.environ["NNPATTERNS_TEST_FILTER"]
def fetch_networks(module_name, module):
ret = [
("%s.%s" % (module_name, name),
getattr(module, name)(default_nonlinearity))
for name in module.__all__
if (fnmatch.fnmatch(name, name_filter) or
fnmatch.fnmatch("%s.%s" % (module_name, name), name_filter))
]
for name, network in ret:
network["name"] = name
return [x[1] for x in sorted(ret)]
networks = (
fetch_networks("mnist", mnist) +
fetch_networks("cifar10", cifar10) +
fetch_networks("imagenet", imagenet)
)
for network in networks:
yield network
| Add filter to customize which networks to test. | Add filter to customize which networks to test.
| Python | mit | pikinder/nn-patterns | # Begin: Python 2/3 compatibility header small
# Get Python 3 functionality:
from __future__ import\
absolute_import, print_function, division, unicode_literals
from future.utils import raise_with_traceback, raise_from
# catch exception with: except Exception as e
from builtins import range, map, zip, filter
from io import open
import six
# End: Python 2/3 compatability header small
import theano.tensor as T
from . import mnist
from . import cifar10
from . import imagenet
def iterator():
"""
Iterator over various networks.
"""
default_nonlinearity = T.nnet.relu
def fetch_networks(module_name, module):
ret = [("%s.%s" % (module_name, name),
getattr(module, name)(default_nonlinearity))
for name in module.__all__]
for name, network in ret:
network["name"] = name
return [x[1] for x in sorted(ret)]
networks = (
fetch_networks("mnist", mnist) +
fetch_networks("cifar10", cifar10) +
fetch_networks("imagenet", imagenet)
)
for network in networks:
yield network
Add filter to customize which networks to test. | # Begin: Python 2/3 compatibility header small
# Get Python 3 functionality:
from __future__ import\
absolute_import, print_function, division, unicode_literals
from future.utils import raise_with_traceback, raise_from
# catch exception with: except Exception as e
from builtins import range, map, zip, filter
from io import open
import six
# End: Python 2/3 compatability header small
import fnmatch
import os
import theano.tensor as T
from . import mnist
from . import cifar10
from . import imagenet
def iterator():
"""
Iterator over various networks.
"""
default_nonlinearity = T.nnet.relu
# TODO: make this more transparent!
# Default test only for one network. To test all put "*"
name_filter = "mnist.cnn_2convb_2dense"
if "NNPATTERNS_TEST_FILTER" in os.environ:
name_filter = os.environ["NNPATTERNS_TEST_FILTER"]
def fetch_networks(module_name, module):
ret = [
("%s.%s" % (module_name, name),
getattr(module, name)(default_nonlinearity))
for name in module.__all__
if (fnmatch.fnmatch(name, name_filter) or
fnmatch.fnmatch("%s.%s" % (module_name, name), name_filter))
]
for name, network in ret:
network["name"] = name
return [x[1] for x in sorted(ret)]
networks = (
fetch_networks("mnist", mnist) +
fetch_networks("cifar10", cifar10) +
fetch_networks("imagenet", imagenet)
)
for network in networks:
yield network
| <commit_before># Begin: Python 2/3 compatibility header small
# Get Python 3 functionality:
from __future__ import\
absolute_import, print_function, division, unicode_literals
from future.utils import raise_with_traceback, raise_from
# catch exception with: except Exception as e
from builtins import range, map, zip, filter
from io import open
import six
# End: Python 2/3 compatability header small
import theano.tensor as T
from . import mnist
from . import cifar10
from . import imagenet
def iterator():
"""
Iterator over various networks.
"""
default_nonlinearity = T.nnet.relu
def fetch_networks(module_name, module):
ret = [("%s.%s" % (module_name, name),
getattr(module, name)(default_nonlinearity))
for name in module.__all__]
for name, network in ret:
network["name"] = name
return [x[1] for x in sorted(ret)]
networks = (
fetch_networks("mnist", mnist) +
fetch_networks("cifar10", cifar10) +
fetch_networks("imagenet", imagenet)
)
for network in networks:
yield network
<commit_msg>Add filter to customize which networks to test.<commit_after> | # Begin: Python 2/3 compatibility header small
# Get Python 3 functionality:
from __future__ import\
absolute_import, print_function, division, unicode_literals
from future.utils import raise_with_traceback, raise_from
# catch exception with: except Exception as e
from builtins import range, map, zip, filter
from io import open
import six
# End: Python 2/3 compatability header small
import fnmatch
import os
import theano.tensor as T
from . import mnist
from . import cifar10
from . import imagenet
def iterator():
"""
Iterator over various networks.
"""
default_nonlinearity = T.nnet.relu
# TODO: make this more transparent!
# Default test only for one network. To test all put "*"
name_filter = "mnist.cnn_2convb_2dense"
if "NNPATTERNS_TEST_FILTER" in os.environ:
name_filter = os.environ["NNPATTERNS_TEST_FILTER"]
def fetch_networks(module_name, module):
ret = [
("%s.%s" % (module_name, name),
getattr(module, name)(default_nonlinearity))
for name in module.__all__
if (fnmatch.fnmatch(name, name_filter) or
fnmatch.fnmatch("%s.%s" % (module_name, name), name_filter))
]
for name, network in ret:
network["name"] = name
return [x[1] for x in sorted(ret)]
networks = (
fetch_networks("mnist", mnist) +
fetch_networks("cifar10", cifar10) +
fetch_networks("imagenet", imagenet)
)
for network in networks:
yield network
| # Begin: Python 2/3 compatibility header small
# Get Python 3 functionality:
from __future__ import\
absolute_import, print_function, division, unicode_literals
from future.utils import raise_with_traceback, raise_from
# catch exception with: except Exception as e
from builtins import range, map, zip, filter
from io import open
import six
# End: Python 2/3 compatability header small
import theano.tensor as T
from . import mnist
from . import cifar10
from . import imagenet
def iterator():
"""
Iterator over various networks.
"""
default_nonlinearity = T.nnet.relu
def fetch_networks(module_name, module):
ret = [("%s.%s" % (module_name, name),
getattr(module, name)(default_nonlinearity))
for name in module.__all__]
for name, network in ret:
network["name"] = name
return [x[1] for x in sorted(ret)]
networks = (
fetch_networks("mnist", mnist) +
fetch_networks("cifar10", cifar10) +
fetch_networks("imagenet", imagenet)
)
for network in networks:
yield network
Add filter to customize which networks to test.# Begin: Python 2/3 compatibility header small
# Get Python 3 functionality:
from __future__ import\
absolute_import, print_function, division, unicode_literals
from future.utils import raise_with_traceback, raise_from
# catch exception with: except Exception as e
from builtins import range, map, zip, filter
from io import open
import six
# End: Python 2/3 compatability header small
import fnmatch
import os
import theano.tensor as T
from . import mnist
from . import cifar10
from . import imagenet
def iterator():
"""
Iterator over various networks.
"""
default_nonlinearity = T.nnet.relu
# TODO: make this more transparent!
# Default test only for one network. To test all put "*"
name_filter = "mnist.cnn_2convb_2dense"
if "NNPATTERNS_TEST_FILTER" in os.environ:
name_filter = os.environ["NNPATTERNS_TEST_FILTER"]
def fetch_networks(module_name, module):
ret = [
("%s.%s" % (module_name, name),
getattr(module, name)(default_nonlinearity))
for name in module.__all__
if (fnmatch.fnmatch(name, name_filter) or
fnmatch.fnmatch("%s.%s" % (module_name, name), name_filter))
]
for name, network in ret:
network["name"] = name
return [x[1] for x in sorted(ret)]
networks = (
fetch_networks("mnist", mnist) +
fetch_networks("cifar10", cifar10) +
fetch_networks("imagenet", imagenet)
)
for network in networks:
yield network
| <commit_before># Begin: Python 2/3 compatibility header small
# Get Python 3 functionality:
from __future__ import\
absolute_import, print_function, division, unicode_literals
from future.utils import raise_with_traceback, raise_from
# catch exception with: except Exception as e
from builtins import range, map, zip, filter
from io import open
import six
# End: Python 2/3 compatability header small
import theano.tensor as T
from . import mnist
from . import cifar10
from . import imagenet
def iterator():
"""
Iterator over various networks.
"""
default_nonlinearity = T.nnet.relu
def fetch_networks(module_name, module):
ret = [("%s.%s" % (module_name, name),
getattr(module, name)(default_nonlinearity))
for name in module.__all__]
for name, network in ret:
network["name"] = name
return [x[1] for x in sorted(ret)]
networks = (
fetch_networks("mnist", mnist) +
fetch_networks("cifar10", cifar10) +
fetch_networks("imagenet", imagenet)
)
for network in networks:
yield network
<commit_msg>Add filter to customize which networks to test.<commit_after># Begin: Python 2/3 compatibility header small
# Get Python 3 functionality:
from __future__ import\
absolute_import, print_function, division, unicode_literals
from future.utils import raise_with_traceback, raise_from
# catch exception with: except Exception as e
from builtins import range, map, zip, filter
from io import open
import six
# End: Python 2/3 compatability header small
import fnmatch
import os
import theano.tensor as T
from . import mnist
from . import cifar10
from . import imagenet
def iterator():
"""
Iterator over various networks.
"""
default_nonlinearity = T.nnet.relu
# TODO: make this more transparent!
# Default test only for one network. To test all put "*"
name_filter = "mnist.cnn_2convb_2dense"
if "NNPATTERNS_TEST_FILTER" in os.environ:
name_filter = os.environ["NNPATTERNS_TEST_FILTER"]
def fetch_networks(module_name, module):
ret = [
("%s.%s" % (module_name, name),
getattr(module, name)(default_nonlinearity))
for name in module.__all__
if (fnmatch.fnmatch(name, name_filter) or
fnmatch.fnmatch("%s.%s" % (module_name, name), name_filter))
]
for name, network in ret:
network["name"] = name
return [x[1] for x in sorted(ret)]
networks = (
fetch_networks("mnist", mnist) +
fetch_networks("cifar10", cifar10) +
fetch_networks("imagenet", imagenet)
)
for network in networks:
yield network
|
2cc9f1781691865222acb90fb8bbd5e721cc2549 | csunplugged/utils/errors/InvalidYAMLFileError.py | csunplugged/utils/errors/InvalidYAMLFileError.py | """Custom error for invalid yaml file."""
from .Error import Error
ERROR_MESSAGE = """
Invalid YAML file (.yaml)
Options:
- Does the file match the expected layout?
- Does the file contain at least one key:value pair?
- Is the syntax correct? (are you missing a colon somewhere?)
"""
class InvalidYAMLFileError(Error):
"""custom error for invalid yaml file."""
def __init__(self, yaml_file_path):
"""Create error for invalid yaml file."""
super().__init__()
self.yaml_file_path = yaml_file_path
def __str__(self):
"""Override default error string.
Returns:
Error message for invalid yaml file.
"""
return self.base_message.format(filename=self.yaml_file_path) + ERROR_MESSAGE
| """Custom error for invalid yaml file."""
from .Error import Error
ERROR_MESSAGE = """
Invalid YAML file (.yaml).
Options:
- Does the file match the expected layout?
- Does the file contain at least one key:value pair?
- Is the syntax correct? (are you missing a colon somewhere?)
"""
class InvalidYAMLFileError(Error):
"""custom error for invalid yaml file."""
def __init__(self, yaml_file_path):
"""Create error for invalid yaml file."""
super().__init__()
self.yaml_file_path = yaml_file_path
def __str__(self):
"""Override default error string.
Returns:
Error message for invalid yaml file.
"""
return self.base_message.format(filename=self.yaml_file_path) + ERROR_MESSAGE
| Fix typo in error message | Fix typo in error message
| Python | mit | uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged | """Custom error for invalid yaml file."""
from .Error import Error
ERROR_MESSAGE = """
Invalid YAML file (.yaml)
Options:
- Does the file match the expected layout?
- Does the file contain at least one key:value pair?
- Is the syntax correct? (are you missing a colon somewhere?)
"""
class InvalidYAMLFileError(Error):
"""custom error for invalid yaml file."""
def __init__(self, yaml_file_path):
"""Create error for invalid yaml file."""
super().__init__()
self.yaml_file_path = yaml_file_path
def __str__(self):
"""Override default error string.
Returns:
Error message for invalid yaml file.
"""
return self.base_message.format(filename=self.yaml_file_path) + ERROR_MESSAGE
Fix typo in error message | """Custom error for invalid yaml file."""
from .Error import Error
ERROR_MESSAGE = """
Invalid YAML file (.yaml).
Options:
- Does the file match the expected layout?
- Does the file contain at least one key:value pair?
- Is the syntax correct? (are you missing a colon somewhere?)
"""
class InvalidYAMLFileError(Error):
"""custom error for invalid yaml file."""
def __init__(self, yaml_file_path):
"""Create error for invalid yaml file."""
super().__init__()
self.yaml_file_path = yaml_file_path
def __str__(self):
"""Override default error string.
Returns:
Error message for invalid yaml file.
"""
return self.base_message.format(filename=self.yaml_file_path) + ERROR_MESSAGE
| <commit_before>"""Custom error for invalid yaml file."""
from .Error import Error
ERROR_MESSAGE = """
Invalid YAML file (.yaml)
Options:
- Does the file match the expected layout?
- Does the file contain at least one key:value pair?
- Is the syntax correct? (are you missing a colon somewhere?)
"""
class InvalidYAMLFileError(Error):
"""custom error for invalid yaml file."""
def __init__(self, yaml_file_path):
"""Create error for invalid yaml file."""
super().__init__()
self.yaml_file_path = yaml_file_path
def __str__(self):
"""Override default error string.
Returns:
Error message for invalid yaml file.
"""
return self.base_message.format(filename=self.yaml_file_path) + ERROR_MESSAGE
<commit_msg>Fix typo in error message<commit_after> | """Custom error for invalid yaml file."""
from .Error import Error
ERROR_MESSAGE = """
Invalid YAML file (.yaml).
Options:
- Does the file match the expected layout?
- Does the file contain at least one key:value pair?
- Is the syntax correct? (are you missing a colon somewhere?)
"""
class InvalidYAMLFileError(Error):
"""custom error for invalid yaml file."""
def __init__(self, yaml_file_path):
"""Create error for invalid yaml file."""
super().__init__()
self.yaml_file_path = yaml_file_path
def __str__(self):
"""Override default error string.
Returns:
Error message for invalid yaml file.
"""
return self.base_message.format(filename=self.yaml_file_path) + ERROR_MESSAGE
| """Custom error for invalid yaml file."""
from .Error import Error
ERROR_MESSAGE = """
Invalid YAML file (.yaml)
Options:
- Does the file match the expected layout?
- Does the file contain at least one key:value pair?
- Is the syntax correct? (are you missing a colon somewhere?)
"""
class InvalidYAMLFileError(Error):
"""custom error for invalid yaml file."""
def __init__(self, yaml_file_path):
"""Create error for invalid yaml file."""
super().__init__()
self.yaml_file_path = yaml_file_path
def __str__(self):
"""Override default error string.
Returns:
Error message for invalid yaml file.
"""
return self.base_message.format(filename=self.yaml_file_path) + ERROR_MESSAGE
Fix typo in error message"""Custom error for invalid yaml file."""
from .Error import Error
ERROR_MESSAGE = """
Invalid YAML file (.yaml).
Options:
- Does the file match the expected layout?
- Does the file contain at least one key:value pair?
- Is the syntax correct? (are you missing a colon somewhere?)
"""
class InvalidYAMLFileError(Error):
"""custom error for invalid yaml file."""
def __init__(self, yaml_file_path):
"""Create error for invalid yaml file."""
super().__init__()
self.yaml_file_path = yaml_file_path
def __str__(self):
"""Override default error string.
Returns:
Error message for invalid yaml file.
"""
return self.base_message.format(filename=self.yaml_file_path) + ERROR_MESSAGE
| <commit_before>"""Custom error for invalid yaml file."""
from .Error import Error
ERROR_MESSAGE = """
Invalid YAML file (.yaml)
Options:
- Does the file match the expected layout?
- Does the file contain at least one key:value pair?
- Is the syntax correct? (are you missing a colon somewhere?)
"""
class InvalidYAMLFileError(Error):
"""custom error for invalid yaml file."""
def __init__(self, yaml_file_path):
"""Create error for invalid yaml file."""
super().__init__()
self.yaml_file_path = yaml_file_path
def __str__(self):
"""Override default error string.
Returns:
Error message for invalid yaml file.
"""
return self.base_message.format(filename=self.yaml_file_path) + ERROR_MESSAGE
<commit_msg>Fix typo in error message<commit_after>"""Custom error for invalid yaml file."""
from .Error import Error
ERROR_MESSAGE = """
Invalid YAML file (.yaml).
Options:
- Does the file match the expected layout?
- Does the file contain at least one key:value pair?
- Is the syntax correct? (are you missing a colon somewhere?)
"""
class InvalidYAMLFileError(Error):
"""custom error for invalid yaml file."""
def __init__(self, yaml_file_path):
"""Create error for invalid yaml file."""
super().__init__()
self.yaml_file_path = yaml_file_path
def __str__(self):
"""Override default error string.
Returns:
Error message for invalid yaml file.
"""
return self.base_message.format(filename=self.yaml_file_path) + ERROR_MESSAGE
|
cb39de495da5256e6e44773036f78d704f0d563d | tapioca_instagram/tapioca_instagram.py | tapioca_instagram/tapioca_instagram.py | # coding: utf-8
from tapioca import (JSONAdapterMixin, TapiocaAdapter,
generate_wrapper_from_adapter)
from .resource_mapping import RESOURCE_MAPPING
class InstagramClientAdapter(JSONAdapterMixin, TapiocaAdapter):
api_root = 'https://api.instagram.com/v1/'
resource_mapping = RESOURCE_MAPPING
def get_request_kwargs(self, api_params, *args, **kwargs):
params = super(InstagramClientAdapter, self).get_request_kwargs(
api_params, *args, **kwargs)
params['params'].update({
'access_token': api_params.get('access_token', '')
})
return params
def get_iterator_list(self, response_data):
return response_data['data']
def get_iterator_next_request_kwargs(self, iterator_request_kwargs,
response_data, response):
paging = response_data.get('pagination')
if not paging:
return
url = paging.get('next_url')
if url:
return {'url': url}
Instagram = generate_wrapper_from_adapter(InstagramClientAdapter)
| # coding: utf-8
from tapioca import (JSONAdapterMixin, TapiocaAdapter,
generate_wrapper_from_adapter)
from .resource_mapping import RESOURCE_MAPPING
class InstagramClientAdapter(JSONAdapterMixin, TapiocaAdapter):
api_root = 'https://api.instagram.com/v1/'
resource_mapping = RESOURCE_MAPPING
def get_request_kwargs(self, api_params, *args, **kwargs):
params = super(InstagramClientAdapter, self).get_request_kwargs(
api_params, *args, **kwargs)
params.setdefault('params', {}).update(
{'access_token': api_params.get('access_token', '')}
)
return params
def get_iterator_list(self, response_data):
return response_data['data']
def get_iterator_next_request_kwargs(self, iterator_request_kwargs,
response_data, response):
paging = response_data.get('pagination')
if not paging:
return
url = paging.get('next_url')
if url:
return {'url': url}
Instagram = generate_wrapper_from_adapter(InstagramClientAdapter)
| Fix KeyError exception when called with no parameters | Fix KeyError exception when called with no parameters
| Python | mit | vintasoftware/tapioca-instagram | # coding: utf-8
from tapioca import (JSONAdapterMixin, TapiocaAdapter,
generate_wrapper_from_adapter)
from .resource_mapping import RESOURCE_MAPPING
class InstagramClientAdapter(JSONAdapterMixin, TapiocaAdapter):
api_root = 'https://api.instagram.com/v1/'
resource_mapping = RESOURCE_MAPPING
def get_request_kwargs(self, api_params, *args, **kwargs):
params = super(InstagramClientAdapter, self).get_request_kwargs(
api_params, *args, **kwargs)
params['params'].update({
'access_token': api_params.get('access_token', '')
})
return params
def get_iterator_list(self, response_data):
return response_data['data']
def get_iterator_next_request_kwargs(self, iterator_request_kwargs,
response_data, response):
paging = response_data.get('pagination')
if not paging:
return
url = paging.get('next_url')
if url:
return {'url': url}
Instagram = generate_wrapper_from_adapter(InstagramClientAdapter)
Fix KeyError exception when called with no parameters | # coding: utf-8
from tapioca import (JSONAdapterMixin, TapiocaAdapter,
generate_wrapper_from_adapter)
from .resource_mapping import RESOURCE_MAPPING
class InstagramClientAdapter(JSONAdapterMixin, TapiocaAdapter):
api_root = 'https://api.instagram.com/v1/'
resource_mapping = RESOURCE_MAPPING
def get_request_kwargs(self, api_params, *args, **kwargs):
params = super(InstagramClientAdapter, self).get_request_kwargs(
api_params, *args, **kwargs)
params.setdefault('params', {}).update(
{'access_token': api_params.get('access_token', '')}
)
return params
def get_iterator_list(self, response_data):
return response_data['data']
def get_iterator_next_request_kwargs(self, iterator_request_kwargs,
response_data, response):
paging = response_data.get('pagination')
if not paging:
return
url = paging.get('next_url')
if url:
return {'url': url}
Instagram = generate_wrapper_from_adapter(InstagramClientAdapter)
| <commit_before># coding: utf-8
from tapioca import (JSONAdapterMixin, TapiocaAdapter,
generate_wrapper_from_adapter)
from .resource_mapping import RESOURCE_MAPPING
class InstagramClientAdapter(JSONAdapterMixin, TapiocaAdapter):
api_root = 'https://api.instagram.com/v1/'
resource_mapping = RESOURCE_MAPPING
def get_request_kwargs(self, api_params, *args, **kwargs):
params = super(InstagramClientAdapter, self).get_request_kwargs(
api_params, *args, **kwargs)
params['params'].update({
'access_token': api_params.get('access_token', '')
})
return params
def get_iterator_list(self, response_data):
return response_data['data']
def get_iterator_next_request_kwargs(self, iterator_request_kwargs,
response_data, response):
paging = response_data.get('pagination')
if not paging:
return
url = paging.get('next_url')
if url:
return {'url': url}
Instagram = generate_wrapper_from_adapter(InstagramClientAdapter)
<commit_msg>Fix KeyError exception when called with no parameters<commit_after> | # coding: utf-8
from tapioca import (JSONAdapterMixin, TapiocaAdapter,
generate_wrapper_from_adapter)
from .resource_mapping import RESOURCE_MAPPING
class InstagramClientAdapter(JSONAdapterMixin, TapiocaAdapter):
api_root = 'https://api.instagram.com/v1/'
resource_mapping = RESOURCE_MAPPING
def get_request_kwargs(self, api_params, *args, **kwargs):
params = super(InstagramClientAdapter, self).get_request_kwargs(
api_params, *args, **kwargs)
params.setdefault('params', {}).update(
{'access_token': api_params.get('access_token', '')}
)
return params
def get_iterator_list(self, response_data):
return response_data['data']
def get_iterator_next_request_kwargs(self, iterator_request_kwargs,
response_data, response):
paging = response_data.get('pagination')
if not paging:
return
url = paging.get('next_url')
if url:
return {'url': url}
Instagram = generate_wrapper_from_adapter(InstagramClientAdapter)
| # coding: utf-8
from tapioca import (JSONAdapterMixin, TapiocaAdapter,
generate_wrapper_from_adapter)
from .resource_mapping import RESOURCE_MAPPING
class InstagramClientAdapter(JSONAdapterMixin, TapiocaAdapter):
api_root = 'https://api.instagram.com/v1/'
resource_mapping = RESOURCE_MAPPING
def get_request_kwargs(self, api_params, *args, **kwargs):
params = super(InstagramClientAdapter, self).get_request_kwargs(
api_params, *args, **kwargs)
params['params'].update({
'access_token': api_params.get('access_token', '')
})
return params
def get_iterator_list(self, response_data):
return response_data['data']
def get_iterator_next_request_kwargs(self, iterator_request_kwargs,
response_data, response):
paging = response_data.get('pagination')
if not paging:
return
url = paging.get('next_url')
if url:
return {'url': url}
Instagram = generate_wrapper_from_adapter(InstagramClientAdapter)
Fix KeyError exception when called with no parameters# coding: utf-8
from tapioca import (JSONAdapterMixin, TapiocaAdapter,
generate_wrapper_from_adapter)
from .resource_mapping import RESOURCE_MAPPING
class InstagramClientAdapter(JSONAdapterMixin, TapiocaAdapter):
api_root = 'https://api.instagram.com/v1/'
resource_mapping = RESOURCE_MAPPING
def get_request_kwargs(self, api_params, *args, **kwargs):
params = super(InstagramClientAdapter, self).get_request_kwargs(
api_params, *args, **kwargs)
params.setdefault('params', {}).update(
{'access_token': api_params.get('access_token', '')}
)
return params
def get_iterator_list(self, response_data):
return response_data['data']
def get_iterator_next_request_kwargs(self, iterator_request_kwargs,
response_data, response):
paging = response_data.get('pagination')
if not paging:
return
url = paging.get('next_url')
if url:
return {'url': url}
Instagram = generate_wrapper_from_adapter(InstagramClientAdapter)
| <commit_before># coding: utf-8
from tapioca import (JSONAdapterMixin, TapiocaAdapter,
generate_wrapper_from_adapter)
from .resource_mapping import RESOURCE_MAPPING
class InstagramClientAdapter(JSONAdapterMixin, TapiocaAdapter):
api_root = 'https://api.instagram.com/v1/'
resource_mapping = RESOURCE_MAPPING
def get_request_kwargs(self, api_params, *args, **kwargs):
params = super(InstagramClientAdapter, self).get_request_kwargs(
api_params, *args, **kwargs)
params['params'].update({
'access_token': api_params.get('access_token', '')
})
return params
def get_iterator_list(self, response_data):
return response_data['data']
def get_iterator_next_request_kwargs(self, iterator_request_kwargs,
response_data, response):
paging = response_data.get('pagination')
if not paging:
return
url = paging.get('next_url')
if url:
return {'url': url}
Instagram = generate_wrapper_from_adapter(InstagramClientAdapter)
<commit_msg>Fix KeyError exception when called with no parameters<commit_after># coding: utf-8
from tapioca import (JSONAdapterMixin, TapiocaAdapter,
generate_wrapper_from_adapter)
from .resource_mapping import RESOURCE_MAPPING
class InstagramClientAdapter(JSONAdapterMixin, TapiocaAdapter):
api_root = 'https://api.instagram.com/v1/'
resource_mapping = RESOURCE_MAPPING
def get_request_kwargs(self, api_params, *args, **kwargs):
params = super(InstagramClientAdapter, self).get_request_kwargs(
api_params, *args, **kwargs)
params.setdefault('params', {}).update(
{'access_token': api_params.get('access_token', '')}
)
return params
def get_iterator_list(self, response_data):
return response_data['data']
def get_iterator_next_request_kwargs(self, iterator_request_kwargs,
response_data, response):
paging = response_data.get('pagination')
if not paging:
return
url = paging.get('next_url')
if url:
return {'url': url}
Instagram = generate_wrapper_from_adapter(InstagramClientAdapter)
|
93ed6f7db7060893214571bf5ec8a633fffa48ab | python/completers/cpp/clang_helpers.py | python/completers/cpp/clang_helpers.py | #!/usr/bin/env python
#
# Copyright (C) 2011, 2012 Strahinja Val Markovic <val@markovic.io>
#
# This file is part of YouCompleteMe.
#
# YouCompleteMe is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# YouCompleteMe is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with YouCompleteMe. If not, see <http://www.gnu.org/licenses/>.
import os
# Given an iterable object that produces strings (flags for Clang), removes the
# '-c' and '-o' options that Clang does not like to see when it's producing
# completions for a file.
def PrepareClangFlags( flags, filename ):
new_flags = []
skip = True
for flag in flags:
if skip:
skip = False
continue
if flag == '-c':
skip = True;
continue
if flag == '-o':
skip = True;
continue
if flag == filename or os.path.realpath(flag) == filename:
continue
new_flags.append( flag )
return new_flags
| #!/usr/bin/env python
#
# Copyright (C) 2011, 2012 Strahinja Val Markovic <val@markovic.io>
#
# This file is part of YouCompleteMe.
#
# YouCompleteMe is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# YouCompleteMe is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with YouCompleteMe. If not, see <http://www.gnu.org/licenses/>.
import os
# Given an iterable object that produces strings (flags for Clang), removes the
# '-c' and '-o' options that Clang does not like to see when it's producing
# completions for a file.
def PrepareClangFlags( flags, filename ):
new_flags = []
skip = True
for flag in flags:
if skip:
skip = False
continue
if flag == '-c':
continue
if flag == '-o':
skip = True;
continue
if flag == filename or os.path.realpath(flag) == filename:
continue
new_flags.append( flag )
return new_flags
| Fix bug with removing flag after "-c" | Fix bug with removing flag after "-c"
-c does not take an argument. Why did I think it did?
| Python | mit | nikmartin/dotfiles | #!/usr/bin/env python
#
# Copyright (C) 2011, 2012 Strahinja Val Markovic <val@markovic.io>
#
# This file is part of YouCompleteMe.
#
# YouCompleteMe is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# YouCompleteMe is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with YouCompleteMe. If not, see <http://www.gnu.org/licenses/>.
import os
# Given an iterable object that produces strings (flags for Clang), removes the
# '-c' and '-o' options that Clang does not like to see when it's producing
# completions for a file.
def PrepareClangFlags( flags, filename ):
new_flags = []
skip = True
for flag in flags:
if skip:
skip = False
continue
if flag == '-c':
skip = True;
continue
if flag == '-o':
skip = True;
continue
if flag == filename or os.path.realpath(flag) == filename:
continue
new_flags.append( flag )
return new_flags
Fix bug with removing flag after "-c"
-c does not take an argument. Why did I think it did? | #!/usr/bin/env python
#
# Copyright (C) 2011, 2012 Strahinja Val Markovic <val@markovic.io>
#
# This file is part of YouCompleteMe.
#
# YouCompleteMe is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# YouCompleteMe is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with YouCompleteMe. If not, see <http://www.gnu.org/licenses/>.
import os
# Given an iterable object that produces strings (flags for Clang), removes the
# '-c' and '-o' options that Clang does not like to see when it's producing
# completions for a file.
def PrepareClangFlags( flags, filename ):
new_flags = []
skip = True
for flag in flags:
if skip:
skip = False
continue
if flag == '-c':
continue
if flag == '-o':
skip = True;
continue
if flag == filename or os.path.realpath(flag) == filename:
continue
new_flags.append( flag )
return new_flags
| <commit_before>#!/usr/bin/env python
#
# Copyright (C) 2011, 2012 Strahinja Val Markovic <val@markovic.io>
#
# This file is part of YouCompleteMe.
#
# YouCompleteMe is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# YouCompleteMe is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with YouCompleteMe. If not, see <http://www.gnu.org/licenses/>.
import os
# Given an iterable object that produces strings (flags for Clang), removes the
# '-c' and '-o' options that Clang does not like to see when it's producing
# completions for a file.
def PrepareClangFlags( flags, filename ):
new_flags = []
skip = True
for flag in flags:
if skip:
skip = False
continue
if flag == '-c':
skip = True;
continue
if flag == '-o':
skip = True;
continue
if flag == filename or os.path.realpath(flag) == filename:
continue
new_flags.append( flag )
return new_flags
<commit_msg>Fix bug with removing flag after "-c"
-c does not take an argument. Why did I think it did?<commit_after> | #!/usr/bin/env python
#
# Copyright (C) 2011, 2012 Strahinja Val Markovic <val@markovic.io>
#
# This file is part of YouCompleteMe.
#
# YouCompleteMe is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# YouCompleteMe is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with YouCompleteMe. If not, see <http://www.gnu.org/licenses/>.
import os
# Given an iterable object that produces strings (flags for Clang), removes the
# '-c' and '-o' options that Clang does not like to see when it's producing
# completions for a file.
def PrepareClangFlags( flags, filename ):
new_flags = []
skip = True
for flag in flags:
if skip:
skip = False
continue
if flag == '-c':
continue
if flag == '-o':
skip = True;
continue
if flag == filename or os.path.realpath(flag) == filename:
continue
new_flags.append( flag )
return new_flags
| #!/usr/bin/env python
#
# Copyright (C) 2011, 2012 Strahinja Val Markovic <val@markovic.io>
#
# This file is part of YouCompleteMe.
#
# YouCompleteMe is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# YouCompleteMe is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with YouCompleteMe. If not, see <http://www.gnu.org/licenses/>.
import os
# Given an iterable object that produces strings (flags for Clang), removes the
# '-c' and '-o' options that Clang does not like to see when it's producing
# completions for a file.
def PrepareClangFlags( flags, filename ):
new_flags = []
skip = True
for flag in flags:
if skip:
skip = False
continue
if flag == '-c':
skip = True;
continue
if flag == '-o':
skip = True;
continue
if flag == filename or os.path.realpath(flag) == filename:
continue
new_flags.append( flag )
return new_flags
Fix bug with removing flag after "-c"
-c does not take an argument. Why did I think it did?#!/usr/bin/env python
#
# Copyright (C) 2011, 2012 Strahinja Val Markovic <val@markovic.io>
#
# This file is part of YouCompleteMe.
#
# YouCompleteMe is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# YouCompleteMe is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with YouCompleteMe. If not, see <http://www.gnu.org/licenses/>.
import os
# Given an iterable object that produces strings (flags for Clang), removes the
# '-c' and '-o' options that Clang does not like to see when it's producing
# completions for a file.
def PrepareClangFlags( flags, filename ):
new_flags = []
skip = True
for flag in flags:
if skip:
skip = False
continue
if flag == '-c':
continue
if flag == '-o':
skip = True;
continue
if flag == filename or os.path.realpath(flag) == filename:
continue
new_flags.append( flag )
return new_flags
| <commit_before>#!/usr/bin/env python
#
# Copyright (C) 2011, 2012 Strahinja Val Markovic <val@markovic.io>
#
# This file is part of YouCompleteMe.
#
# YouCompleteMe is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# YouCompleteMe is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with YouCompleteMe. If not, see <http://www.gnu.org/licenses/>.
import os
# Given an iterable object that produces strings (flags for Clang), removes the
# '-c' and '-o' options that Clang does not like to see when it's producing
# completions for a file.
def PrepareClangFlags( flags, filename ):
new_flags = []
skip = True
for flag in flags:
if skip:
skip = False
continue
if flag == '-c':
skip = True;
continue
if flag == '-o':
skip = True;
continue
if flag == filename or os.path.realpath(flag) == filename:
continue
new_flags.append( flag )
return new_flags
<commit_msg>Fix bug with removing flag after "-c"
-c does not take an argument. Why did I think it did?<commit_after>#!/usr/bin/env python
#
# Copyright (C) 2011, 2012 Strahinja Val Markovic <val@markovic.io>
#
# This file is part of YouCompleteMe.
#
# YouCompleteMe is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# YouCompleteMe is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with YouCompleteMe. If not, see <http://www.gnu.org/licenses/>.
import os
# Given an iterable object that produces strings (flags for Clang), removes the
# '-c' and '-o' options that Clang does not like to see when it's producing
# completions for a file.
def PrepareClangFlags( flags, filename ):
new_flags = []
skip = True
for flag in flags:
if skip:
skip = False
continue
if flag == '-c':
continue
if flag == '-o':
skip = True;
continue
if flag == filename or os.path.realpath(flag) == filename:
continue
new_flags.append( flag )
return new_flags
|
c7fb70585b0c488d523a4ff173e3b0675029e90b | cmsplugin_filer_link/cms_plugins.py | cmsplugin_filer_link/cms_plugins.py | from __future__ import unicode_literals
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from django.utils.translation import ugettext as _
from django.conf import settings
from .models import FilerLinkPlugin
class FilerLinkPlugin(CMSPluginBase):
module = 'Filer'
model = FilerLinkPlugin
name = _("Link")
text_enabled = True
raw_id_fields = ('page_link', )
render_template = "cmsplugin_filer_link/link.html"
def render(self, context, instance, placeholder):
if instance.file:
link = instance.file.url
elif instance.mailto:
link = "mailto:%s" % _(instance.mailto)
elif instance.url:
link = _(instance.url)
elif instance.page_link:
link = instance.page_link.get_absolute_url()
else:
link = ""
context.update({
'link': link,
'style': instance.link_style,
'name': instance.name,
'new_window': instance.new_window,
})
return context
def icon_src(self, instance):
return settings.STATIC_URL + "cms/images/plugins/link.png"
plugin_pool.register_plugin(FilerLinkPlugin)
| from __future__ import unicode_literals
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from django.utils.translation import ugettext as _
from django.conf import settings
from .models import FilerLinkPlugin
class FilerLinkPlugin(CMSPluginBase):
module = 'Filer'
model = FilerLinkPlugin
name = _("Link")
text_enabled = True
render_template = "cmsplugin_filer_link/link.html"
def render(self, context, instance, placeholder):
if instance.file:
link = instance.file.url
elif instance.mailto:
link = "mailto:%s" % _(instance.mailto)
elif instance.url:
link = _(instance.url)
elif instance.page_link:
link = instance.page_link.get_absolute_url()
else:
link = ""
context.update({
'link': link,
'style': instance.link_style,
'name': instance.name,
'new_window': instance.new_window,
})
return context
def icon_src(self, instance):
return settings.STATIC_URL + "cms/images/plugins/link.png"
plugin_pool.register_plugin(FilerLinkPlugin)
| Revert "Add "page_link" to "raw_id_fields" to prevent the run of "decompress"" | Revert "Add "page_link" to "raw_id_fields" to prevent the run of "decompress""
This reverts commit 4a85ecaaae1452e74acc485d032f00e8bedace47.
| Python | bsd-3-clause | yvess/cmsplugin-filer,divio/cmsplugin-filer,alsoicode/cmsplugin-filer,creimers/cmsplugin-filer,jschneier/cmsplugin-filer,stefanfoulis/cmsplugin-filer,brightinteractive/cmsplugin-filer,nephila/cmsplugin-filer,creimers/cmsplugin-filer,jschneier/cmsplugin-filer,brightinteractive/cmsplugin-filer,skirsdeda/cmsplugin-filer,divio/cmsplugin-filer,creimers/cmsplugin-filer,skirsdeda/cmsplugin-filer,divio/cmsplugin-filer,brightinteractive/cmsplugin-filer,wlanslovenija/cmsplugin-filer,skirsdeda/cmsplugin-filer,stefanfoulis/cmsplugin-filer,nephila/cmsplugin-filer,divio/cmsplugin-filer,douwevandermeij/cmsplugin-filer,wlanslovenija/cmsplugin-filer,sephii/cmsplugin-filer,nephila/cmsplugin-filer,douwevandermeij/cmsplugin-filer,alsoicode/cmsplugin-filer,stefanfoulis/cmsplugin-filer,wlanslovenija/cmsplugin-filer,sephii/cmsplugin-filer,yvess/cmsplugin-filer,sephii/cmsplugin-filer,alsoicode/cmsplugin-filer,stefanfoulis/cmsplugin-filer,douwevandermeij/cmsplugin-filer,yvess/cmsplugin-filer,jschneier/cmsplugin-filer,yvess/cmsplugin-filer | from __future__ import unicode_literals
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from django.utils.translation import ugettext as _
from django.conf import settings
from .models import FilerLinkPlugin
class FilerLinkPlugin(CMSPluginBase):
module = 'Filer'
model = FilerLinkPlugin
name = _("Link")
text_enabled = True
raw_id_fields = ('page_link', )
render_template = "cmsplugin_filer_link/link.html"
def render(self, context, instance, placeholder):
if instance.file:
link = instance.file.url
elif instance.mailto:
link = "mailto:%s" % _(instance.mailto)
elif instance.url:
link = _(instance.url)
elif instance.page_link:
link = instance.page_link.get_absolute_url()
else:
link = ""
context.update({
'link': link,
'style': instance.link_style,
'name': instance.name,
'new_window': instance.new_window,
})
return context
def icon_src(self, instance):
return settings.STATIC_URL + "cms/images/plugins/link.png"
plugin_pool.register_plugin(FilerLinkPlugin)
Revert "Add "page_link" to "raw_id_fields" to prevent the run of "decompress""
This reverts commit 4a85ecaaae1452e74acc485d032f00e8bedace47. | from __future__ import unicode_literals
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from django.utils.translation import ugettext as _
from django.conf import settings
from .models import FilerLinkPlugin
class FilerLinkPlugin(CMSPluginBase):
module = 'Filer'
model = FilerLinkPlugin
name = _("Link")
text_enabled = True
render_template = "cmsplugin_filer_link/link.html"
def render(self, context, instance, placeholder):
if instance.file:
link = instance.file.url
elif instance.mailto:
link = "mailto:%s" % _(instance.mailto)
elif instance.url:
link = _(instance.url)
elif instance.page_link:
link = instance.page_link.get_absolute_url()
else:
link = ""
context.update({
'link': link,
'style': instance.link_style,
'name': instance.name,
'new_window': instance.new_window,
})
return context
def icon_src(self, instance):
return settings.STATIC_URL + "cms/images/plugins/link.png"
plugin_pool.register_plugin(FilerLinkPlugin)
| <commit_before>from __future__ import unicode_literals
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from django.utils.translation import ugettext as _
from django.conf import settings
from .models import FilerLinkPlugin
class FilerLinkPlugin(CMSPluginBase):
module = 'Filer'
model = FilerLinkPlugin
name = _("Link")
text_enabled = True
raw_id_fields = ('page_link', )
render_template = "cmsplugin_filer_link/link.html"
def render(self, context, instance, placeholder):
if instance.file:
link = instance.file.url
elif instance.mailto:
link = "mailto:%s" % _(instance.mailto)
elif instance.url:
link = _(instance.url)
elif instance.page_link:
link = instance.page_link.get_absolute_url()
else:
link = ""
context.update({
'link': link,
'style': instance.link_style,
'name': instance.name,
'new_window': instance.new_window,
})
return context
def icon_src(self, instance):
return settings.STATIC_URL + "cms/images/plugins/link.png"
plugin_pool.register_plugin(FilerLinkPlugin)
<commit_msg>Revert "Add "page_link" to "raw_id_fields" to prevent the run of "decompress""
This reverts commit 4a85ecaaae1452e74acc485d032f00e8bedace47.<commit_after> | from __future__ import unicode_literals
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from django.utils.translation import ugettext as _
from django.conf import settings
from .models import FilerLinkPlugin
class FilerLinkPlugin(CMSPluginBase):
module = 'Filer'
model = FilerLinkPlugin
name = _("Link")
text_enabled = True
render_template = "cmsplugin_filer_link/link.html"
def render(self, context, instance, placeholder):
if instance.file:
link = instance.file.url
elif instance.mailto:
link = "mailto:%s" % _(instance.mailto)
elif instance.url:
link = _(instance.url)
elif instance.page_link:
link = instance.page_link.get_absolute_url()
else:
link = ""
context.update({
'link': link,
'style': instance.link_style,
'name': instance.name,
'new_window': instance.new_window,
})
return context
def icon_src(self, instance):
return settings.STATIC_URL + "cms/images/plugins/link.png"
plugin_pool.register_plugin(FilerLinkPlugin)
| from __future__ import unicode_literals
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from django.utils.translation import ugettext as _
from django.conf import settings
from .models import FilerLinkPlugin
class FilerLinkPlugin(CMSPluginBase):
module = 'Filer'
model = FilerLinkPlugin
name = _("Link")
text_enabled = True
raw_id_fields = ('page_link', )
render_template = "cmsplugin_filer_link/link.html"
def render(self, context, instance, placeholder):
if instance.file:
link = instance.file.url
elif instance.mailto:
link = "mailto:%s" % _(instance.mailto)
elif instance.url:
link = _(instance.url)
elif instance.page_link:
link = instance.page_link.get_absolute_url()
else:
link = ""
context.update({
'link': link,
'style': instance.link_style,
'name': instance.name,
'new_window': instance.new_window,
})
return context
def icon_src(self, instance):
return settings.STATIC_URL + "cms/images/plugins/link.png"
plugin_pool.register_plugin(FilerLinkPlugin)
Revert "Add "page_link" to "raw_id_fields" to prevent the run of "decompress""
This reverts commit 4a85ecaaae1452e74acc485d032f00e8bedace47.from __future__ import unicode_literals
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from django.utils.translation import ugettext as _
from django.conf import settings
from .models import FilerLinkPlugin
class FilerLinkPlugin(CMSPluginBase):
module = 'Filer'
model = FilerLinkPlugin
name = _("Link")
text_enabled = True
render_template = "cmsplugin_filer_link/link.html"
def render(self, context, instance, placeholder):
if instance.file:
link = instance.file.url
elif instance.mailto:
link = "mailto:%s" % _(instance.mailto)
elif instance.url:
link = _(instance.url)
elif instance.page_link:
link = instance.page_link.get_absolute_url()
else:
link = ""
context.update({
'link': link,
'style': instance.link_style,
'name': instance.name,
'new_window': instance.new_window,
})
return context
def icon_src(self, instance):
return settings.STATIC_URL + "cms/images/plugins/link.png"
plugin_pool.register_plugin(FilerLinkPlugin)
| <commit_before>from __future__ import unicode_literals
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from django.utils.translation import ugettext as _
from django.conf import settings
from .models import FilerLinkPlugin
class FilerLinkPlugin(CMSPluginBase):
module = 'Filer'
model = FilerLinkPlugin
name = _("Link")
text_enabled = True
raw_id_fields = ('page_link', )
render_template = "cmsplugin_filer_link/link.html"
def render(self, context, instance, placeholder):
if instance.file:
link = instance.file.url
elif instance.mailto:
link = "mailto:%s" % _(instance.mailto)
elif instance.url:
link = _(instance.url)
elif instance.page_link:
link = instance.page_link.get_absolute_url()
else:
link = ""
context.update({
'link': link,
'style': instance.link_style,
'name': instance.name,
'new_window': instance.new_window,
})
return context
def icon_src(self, instance):
return settings.STATIC_URL + "cms/images/plugins/link.png"
plugin_pool.register_plugin(FilerLinkPlugin)
<commit_msg>Revert "Add "page_link" to "raw_id_fields" to prevent the run of "decompress""
This reverts commit 4a85ecaaae1452e74acc485d032f00e8bedace47.<commit_after>from __future__ import unicode_literals
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from django.utils.translation import ugettext as _
from django.conf import settings
from .models import FilerLinkPlugin
class FilerLinkPlugin(CMSPluginBase):
module = 'Filer'
model = FilerLinkPlugin
name = _("Link")
text_enabled = True
render_template = "cmsplugin_filer_link/link.html"
def render(self, context, instance, placeholder):
if instance.file:
link = instance.file.url
elif instance.mailto:
link = "mailto:%s" % _(instance.mailto)
elif instance.url:
link = _(instance.url)
elif instance.page_link:
link = instance.page_link.get_absolute_url()
else:
link = ""
context.update({
'link': link,
'style': instance.link_style,
'name': instance.name,
'new_window': instance.new_window,
})
return context
def icon_src(self, instance):
return settings.STATIC_URL + "cms/images/plugins/link.png"
plugin_pool.register_plugin(FilerLinkPlugin)
|
dafa014fa5c4788affd2712b68ef5bee56b5e600 | engine/game.py | engine/game.py | from .gobject import GObject
from . import signals
class Game(GObject):
def __init__(self):
self.maps = {}
self.player = None
def run(self):
pass
def handle_signals(self):
signals.handle_signals(self)
@staticmethod
def reg_signal(*args):
signals.reg_signal(*args)
@staticmethod
def have_signals():
return signals.have_signals()
| from .gobject import GObject
from . import signals
import time
class Game(GObject):
def __init__(self):
self.maps = {}
self.player = None
def run(self):
while True:
self.handle_signals()
time.sleep(0.3)
def handle_signals(self):
signals.handle_signals(self)
@staticmethod
def reg_signal(*args):
signals.reg_signal(*args)
@staticmethod
def have_signals():
return signals.have_signals()
| Handle signals with default interface | Handle signals with default interface
| Python | bsd-3-clause | entwanne/NAGM | from .gobject import GObject
from . import signals
class Game(GObject):
def __init__(self):
self.maps = {}
self.player = None
def run(self):
pass
def handle_signals(self):
signals.handle_signals(self)
@staticmethod
def reg_signal(*args):
signals.reg_signal(*args)
@staticmethod
def have_signals():
return signals.have_signals()
Handle signals with default interface | from .gobject import GObject
from . import signals
import time
class Game(GObject):
def __init__(self):
self.maps = {}
self.player = None
def run(self):
while True:
self.handle_signals()
time.sleep(0.3)
def handle_signals(self):
signals.handle_signals(self)
@staticmethod
def reg_signal(*args):
signals.reg_signal(*args)
@staticmethod
def have_signals():
return signals.have_signals()
| <commit_before>from .gobject import GObject
from . import signals
class Game(GObject):
def __init__(self):
self.maps = {}
self.player = None
def run(self):
pass
def handle_signals(self):
signals.handle_signals(self)
@staticmethod
def reg_signal(*args):
signals.reg_signal(*args)
@staticmethod
def have_signals():
return signals.have_signals()
<commit_msg>Handle signals with default interface<commit_after> | from .gobject import GObject
from . import signals
import time
class Game(GObject):
def __init__(self):
self.maps = {}
self.player = None
def run(self):
while True:
self.handle_signals()
time.sleep(0.3)
def handle_signals(self):
signals.handle_signals(self)
@staticmethod
def reg_signal(*args):
signals.reg_signal(*args)
@staticmethod
def have_signals():
return signals.have_signals()
| from .gobject import GObject
from . import signals
class Game(GObject):
def __init__(self):
self.maps = {}
self.player = None
def run(self):
pass
def handle_signals(self):
signals.handle_signals(self)
@staticmethod
def reg_signal(*args):
signals.reg_signal(*args)
@staticmethod
def have_signals():
return signals.have_signals()
Handle signals with default interfacefrom .gobject import GObject
from . import signals
import time
class Game(GObject):
def __init__(self):
self.maps = {}
self.player = None
def run(self):
while True:
self.handle_signals()
time.sleep(0.3)
def handle_signals(self):
signals.handle_signals(self)
@staticmethod
def reg_signal(*args):
signals.reg_signal(*args)
@staticmethod
def have_signals():
return signals.have_signals()
| <commit_before>from .gobject import GObject
from . import signals
class Game(GObject):
def __init__(self):
self.maps = {}
self.player = None
def run(self):
pass
def handle_signals(self):
signals.handle_signals(self)
@staticmethod
def reg_signal(*args):
signals.reg_signal(*args)
@staticmethod
def have_signals():
return signals.have_signals()
<commit_msg>Handle signals with default interface<commit_after>from .gobject import GObject
from . import signals
import time
class Game(GObject):
def __init__(self):
self.maps = {}
self.player = None
def run(self):
while True:
self.handle_signals()
time.sleep(0.3)
def handle_signals(self):
signals.handle_signals(self)
@staticmethod
def reg_signal(*args):
signals.reg_signal(*args)
@staticmethod
def have_signals():
return signals.have_signals()
|
9858837add5105f2f4e78abe84930c4e164071b9 | examples/me.py | examples/me.py | from buffer.api import API
from buffer.user import User
token = '1/714ebdb617705ef9491a81fb21c1da42'
api = API(client_id='51cc6dd5f882a8ba18000055', client_secret='83b019d154cae4d2c734d813b33e5e53')
r = api.get('user.json?')
print r.content
#user = User(api=api)
| from buffer.api import API
from buffer.user import User
token = '1/714ebdb617705ef9491a81fb21c1da42'
api = API(client_id='51cc6dd5f882a8ba18000055', client_secret='83b019d154cae4d2c734d813b33e5e53', access_token=token)
user = User(api=api)
print user.id
| Create first examples that used the actual API | Create first examples that used the actual API
| Python | mit | vtemian/buffpy,bufferapp/buffer-python | from buffer.api import API
from buffer.user import User
token = '1/714ebdb617705ef9491a81fb21c1da42'
api = API(client_id='51cc6dd5f882a8ba18000055', client_secret='83b019d154cae4d2c734d813b33e5e53')
r = api.get('user.json?')
print r.content
#user = User(api=api)
Create first examples that used the actual API | from buffer.api import API
from buffer.user import User
token = '1/714ebdb617705ef9491a81fb21c1da42'
api = API(client_id='51cc6dd5f882a8ba18000055', client_secret='83b019d154cae4d2c734d813b33e5e53', access_token=token)
user = User(api=api)
print user.id
| <commit_before>from buffer.api import API
from buffer.user import User
token = '1/714ebdb617705ef9491a81fb21c1da42'
api = API(client_id='51cc6dd5f882a8ba18000055', client_secret='83b019d154cae4d2c734d813b33e5e53')
r = api.get('user.json?')
print r.content
#user = User(api=api)
<commit_msg>Create first examples that used the actual API<commit_after> | from buffer.api import API
from buffer.user import User
token = '1/714ebdb617705ef9491a81fb21c1da42'
api = API(client_id='51cc6dd5f882a8ba18000055', client_secret='83b019d154cae4d2c734d813b33e5e53', access_token=token)
user = User(api=api)
print user.id
| from buffer.api import API
from buffer.user import User
token = '1/714ebdb617705ef9491a81fb21c1da42'
api = API(client_id='51cc6dd5f882a8ba18000055', client_secret='83b019d154cae4d2c734d813b33e5e53')
r = api.get('user.json?')
print r.content
#user = User(api=api)
Create first examples that used the actual APIfrom buffer.api import API
from buffer.user import User
token = '1/714ebdb617705ef9491a81fb21c1da42'
api = API(client_id='51cc6dd5f882a8ba18000055', client_secret='83b019d154cae4d2c734d813b33e5e53', access_token=token)
user = User(api=api)
print user.id
| <commit_before>from buffer.api import API
from buffer.user import User
token = '1/714ebdb617705ef9491a81fb21c1da42'
api = API(client_id='51cc6dd5f882a8ba18000055', client_secret='83b019d154cae4d2c734d813b33e5e53')
r = api.get('user.json?')
print r.content
#user = User(api=api)
<commit_msg>Create first examples that used the actual API<commit_after>from buffer.api import API
from buffer.user import User
token = '1/714ebdb617705ef9491a81fb21c1da42'
api = API(client_id='51cc6dd5f882a8ba18000055', client_secret='83b019d154cae4d2c734d813b33e5e53', access_token=token)
user = User(api=api)
print user.id
|
30e10449205763363fe8663765645796b0dc8fd5 | IPython/nbconvert/preprocessors/clearoutput.py | IPython/nbconvert/preprocessors/clearoutput.py | """Module containing a preprocessor that removes the outputs from code cells"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from .base import Preprocessor
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class ClearOutputPreprocessor(Preprocessor):
"""
Removes the output from all code cells in a notebook.
"""
def preprocess_cell(self, cell, resources, cell_index):
"""
Apply a transformation on each cell. See base.py for details.
"""
if cell.cell_type == 'code':
cell.outputs = []
cell['prompt_number'] = None
return cell, resources
| """Module containing a preprocessor that removes the outputs from code cells"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from .base import Preprocessor
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class ClearOutputPreprocessor(Preprocessor):
"""
Removes the output from all code cells in a notebook.
"""
def preprocess_cell(self, cell, resources, cell_index):
"""
Apply a transformation on each cell. See base.py for details.
"""
if cell.cell_type == 'code':
cell.outputs = []
cell.prompt_number = None
return cell, resources
| Use cell.prompt_number rather than cell['prompt_number'] | Use cell.prompt_number rather than cell['prompt_number']
| Python | bsd-3-clause | ipython/ipython,ipython/ipython | """Module containing a preprocessor that removes the outputs from code cells"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from .base import Preprocessor
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class ClearOutputPreprocessor(Preprocessor):
"""
Removes the output from all code cells in a notebook.
"""
def preprocess_cell(self, cell, resources, cell_index):
"""
Apply a transformation on each cell. See base.py for details.
"""
if cell.cell_type == 'code':
cell.outputs = []
cell['prompt_number'] = None
return cell, resources
Use cell.prompt_number rather than cell['prompt_number'] | """Module containing a preprocessor that removes the outputs from code cells"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from .base import Preprocessor
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class ClearOutputPreprocessor(Preprocessor):
"""
Removes the output from all code cells in a notebook.
"""
def preprocess_cell(self, cell, resources, cell_index):
"""
Apply a transformation on each cell. See base.py for details.
"""
if cell.cell_type == 'code':
cell.outputs = []
cell.prompt_number = None
return cell, resources
| <commit_before>"""Module containing a preprocessor that removes the outputs from code cells"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from .base import Preprocessor
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class ClearOutputPreprocessor(Preprocessor):
"""
Removes the output from all code cells in a notebook.
"""
def preprocess_cell(self, cell, resources, cell_index):
"""
Apply a transformation on each cell. See base.py for details.
"""
if cell.cell_type == 'code':
cell.outputs = []
cell['prompt_number'] = None
return cell, resources
<commit_msg>Use cell.prompt_number rather than cell['prompt_number']<commit_after> | """Module containing a preprocessor that removes the outputs from code cells"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from .base import Preprocessor
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class ClearOutputPreprocessor(Preprocessor):
"""
Removes the output from all code cells in a notebook.
"""
def preprocess_cell(self, cell, resources, cell_index):
"""
Apply a transformation on each cell. See base.py for details.
"""
if cell.cell_type == 'code':
cell.outputs = []
cell.prompt_number = None
return cell, resources
| """Module containing a preprocessor that removes the outputs from code cells"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from .base import Preprocessor
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class ClearOutputPreprocessor(Preprocessor):
"""
Removes the output from all code cells in a notebook.
"""
def preprocess_cell(self, cell, resources, cell_index):
"""
Apply a transformation on each cell. See base.py for details.
"""
if cell.cell_type == 'code':
cell.outputs = []
cell['prompt_number'] = None
return cell, resources
Use cell.prompt_number rather than cell['prompt_number']"""Module containing a preprocessor that removes the outputs from code cells"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from .base import Preprocessor
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class ClearOutputPreprocessor(Preprocessor):
"""
Removes the output from all code cells in a notebook.
"""
def preprocess_cell(self, cell, resources, cell_index):
"""
Apply a transformation on each cell. See base.py for details.
"""
if cell.cell_type == 'code':
cell.outputs = []
cell.prompt_number = None
return cell, resources
| <commit_before>"""Module containing a preprocessor that removes the outputs from code cells"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from .base import Preprocessor
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class ClearOutputPreprocessor(Preprocessor):
"""
Removes the output from all code cells in a notebook.
"""
def preprocess_cell(self, cell, resources, cell_index):
"""
Apply a transformation on each cell. See base.py for details.
"""
if cell.cell_type == 'code':
cell.outputs = []
cell['prompt_number'] = None
return cell, resources
<commit_msg>Use cell.prompt_number rather than cell['prompt_number']<commit_after>"""Module containing a preprocessor that removes the outputs from code cells"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from .base import Preprocessor
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class ClearOutputPreprocessor(Preprocessor):
"""
Removes the output from all code cells in a notebook.
"""
def preprocess_cell(self, cell, resources, cell_index):
"""
Apply a transformation on each cell. See base.py for details.
"""
if cell.cell_type == 'code':
cell.outputs = []
cell.prompt_number = None
return cell, resources
|
dac3c4f163c694b9b083247e72189996e5e2125c | just/json_.py | just/json_.py | import json
def read(fn):
if fn.endswith(".jsonl"):
raise TypeError("JSON Newline format can only be read by iread")
with open(fn) as f:
return json.load(f)
def append(obj, fn):
with open(fn, "a+") as f:
f.write(json.dumps(obj) + "\n")
def write(obj, fn):
with open(fn, "w") as f:
json.dump(obj, f, indent=4)
def iread(fn):
with open(fn) as f:
for line in f:
yield json.loads(line)
def iwrite(obj, fn):
with open(fn, "w") as f:
for chunk in obj:
f.write(json.dumps(chunk) + "\n")
| import json
def read(fn):
if fn.endswith(".jsonl"):
raise TypeError("JSON Newline format can only be read by iread")
with open(fn) as f:
return json.load(f)
def append(obj, fn):
with open(fn, "a+") as f:
f.write(json.dumps(obj) + "\n")
def write(obj, fn):
with open(fn, "w") as f:
json.dump(obj, f, indent=4)
def iread(fn):
with open(fn) as f:
for i, line in enumerate(f):
try:
yield json.loads(line)
except json.decoder.JSONDecodeError as e:
raise json.decoder.JSONDecodeError(
"JSON-L parsing error in line number {} in the jsonl file".format(i),
line, e.pos)
def iwrite(obj, fn):
with open(fn, "w") as f:
for chunk in obj:
f.write(json.dumps(chunk) + "\n")
| Add detailed error message to jsonl parsing | Add detailed error message to jsonl parsing | Python | agpl-3.0 | kootenpv/just | import json
def read(fn):
if fn.endswith(".jsonl"):
raise TypeError("JSON Newline format can only be read by iread")
with open(fn) as f:
return json.load(f)
def append(obj, fn):
with open(fn, "a+") as f:
f.write(json.dumps(obj) + "\n")
def write(obj, fn):
with open(fn, "w") as f:
json.dump(obj, f, indent=4)
def iread(fn):
with open(fn) as f:
for line in f:
yield json.loads(line)
def iwrite(obj, fn):
with open(fn, "w") as f:
for chunk in obj:
f.write(json.dumps(chunk) + "\n")
Add detailed error message to jsonl parsing | import json
def read(fn):
if fn.endswith(".jsonl"):
raise TypeError("JSON Newline format can only be read by iread")
with open(fn) as f:
return json.load(f)
def append(obj, fn):
with open(fn, "a+") as f:
f.write(json.dumps(obj) + "\n")
def write(obj, fn):
with open(fn, "w") as f:
json.dump(obj, f, indent=4)
def iread(fn):
with open(fn) as f:
for i, line in enumerate(f):
try:
yield json.loads(line)
except json.decoder.JSONDecodeError as e:
raise json.decoder.JSONDecodeError(
"JSON-L parsing error in line number {} in the jsonl file".format(i),
line, e.pos)
def iwrite(obj, fn):
with open(fn, "w") as f:
for chunk in obj:
f.write(json.dumps(chunk) + "\n")
| <commit_before>import json
def read(fn):
if fn.endswith(".jsonl"):
raise TypeError("JSON Newline format can only be read by iread")
with open(fn) as f:
return json.load(f)
def append(obj, fn):
with open(fn, "a+") as f:
f.write(json.dumps(obj) + "\n")
def write(obj, fn):
with open(fn, "w") as f:
json.dump(obj, f, indent=4)
def iread(fn):
with open(fn) as f:
for line in f:
yield json.loads(line)
def iwrite(obj, fn):
with open(fn, "w") as f:
for chunk in obj:
f.write(json.dumps(chunk) + "\n")
<commit_msg>Add detailed error message to jsonl parsing<commit_after> | import json
def read(fn):
if fn.endswith(".jsonl"):
raise TypeError("JSON Newline format can only be read by iread")
with open(fn) as f:
return json.load(f)
def append(obj, fn):
with open(fn, "a+") as f:
f.write(json.dumps(obj) + "\n")
def write(obj, fn):
with open(fn, "w") as f:
json.dump(obj, f, indent=4)
def iread(fn):
with open(fn) as f:
for i, line in enumerate(f):
try:
yield json.loads(line)
except json.decoder.JSONDecodeError as e:
raise json.decoder.JSONDecodeError(
"JSON-L parsing error in line number {} in the jsonl file".format(i),
line, e.pos)
def iwrite(obj, fn):
with open(fn, "w") as f:
for chunk in obj:
f.write(json.dumps(chunk) + "\n")
| import json
def read(fn):
if fn.endswith(".jsonl"):
raise TypeError("JSON Newline format can only be read by iread")
with open(fn) as f:
return json.load(f)
def append(obj, fn):
with open(fn, "a+") as f:
f.write(json.dumps(obj) + "\n")
def write(obj, fn):
with open(fn, "w") as f:
json.dump(obj, f, indent=4)
def iread(fn):
with open(fn) as f:
for line in f:
yield json.loads(line)
def iwrite(obj, fn):
with open(fn, "w") as f:
for chunk in obj:
f.write(json.dumps(chunk) + "\n")
Add detailed error message to jsonl parsingimport json
def read(fn):
if fn.endswith(".jsonl"):
raise TypeError("JSON Newline format can only be read by iread")
with open(fn) as f:
return json.load(f)
def append(obj, fn):
with open(fn, "a+") as f:
f.write(json.dumps(obj) + "\n")
def write(obj, fn):
with open(fn, "w") as f:
json.dump(obj, f, indent=4)
def iread(fn):
with open(fn) as f:
for i, line in enumerate(f):
try:
yield json.loads(line)
except json.decoder.JSONDecodeError as e:
raise json.decoder.JSONDecodeError(
"JSON-L parsing error in line number {} in the jsonl file".format(i),
line, e.pos)
def iwrite(obj, fn):
with open(fn, "w") as f:
for chunk in obj:
f.write(json.dumps(chunk) + "\n")
| <commit_before>import json
def read(fn):
if fn.endswith(".jsonl"):
raise TypeError("JSON Newline format can only be read by iread")
with open(fn) as f:
return json.load(f)
def append(obj, fn):
with open(fn, "a+") as f:
f.write(json.dumps(obj) + "\n")
def write(obj, fn):
with open(fn, "w") as f:
json.dump(obj, f, indent=4)
def iread(fn):
with open(fn) as f:
for line in f:
yield json.loads(line)
def iwrite(obj, fn):
with open(fn, "w") as f:
for chunk in obj:
f.write(json.dumps(chunk) + "\n")
<commit_msg>Add detailed error message to jsonl parsing<commit_after>import json
def read(fn):
if fn.endswith(".jsonl"):
raise TypeError("JSON Newline format can only be read by iread")
with open(fn) as f:
return json.load(f)
def append(obj, fn):
with open(fn, "a+") as f:
f.write(json.dumps(obj) + "\n")
def write(obj, fn):
with open(fn, "w") as f:
json.dump(obj, f, indent=4)
def iread(fn):
with open(fn) as f:
for i, line in enumerate(f):
try:
yield json.loads(line)
except json.decoder.JSONDecodeError as e:
raise json.decoder.JSONDecodeError(
"JSON-L parsing error in line number {} in the jsonl file".format(i),
line, e.pos)
def iwrite(obj, fn):
with open(fn, "w") as f:
for chunk in obj:
f.write(json.dumps(chunk) + "\n")
|
60cbe21d95cc6e079979022a505dcc2099bd30c1 | cla_public/libs/call_centre_availability.py | cla_public/libs/call_centre_availability.py | import datetime
from flask.ext.babel import lazy_gettext as _
def time_choice(time):
display_format = "%I:%M %p"
end = time + datetime.timedelta(minutes=30)
display_string = time.strftime(display_format).lstrip("0") + " - " + end.strftime(display_format).lstrip("0")
return time.strftime("%H%M"), display_string
def suffix(d):
if 11 <= d <= 13:
return _("th")
return {1: _("st"), 2: _("nd"), 3: _("rd")}.get(d % 10, _("th"))
def day_choice(day):
return day.strftime("%Y%m%d"), "%s %s%s" % (_(day.strftime("%A")), day.strftime("%d").lstrip("0"), suffix(day.day))
| import datetime
from cla_public.libs.utils import get_locale
from flask.ext.babel import lazy_gettext as _
def time_choice(time):
display_format = "%I:%M %p"
end = time + datetime.timedelta(minutes=30)
display_string = time.strftime(display_format).lstrip("0") + " - " + end.strftime(display_format).lstrip("0")
return time.strftime("%H%M"), display_string
def suffix_welsh(day):
ordinals = {
"1": "af",
"2": "il",
"3": "ydd",
"4": "ydd",
"5": "ed",
"6": "ed",
"7": "fed",
"8": "fed",
"9": "fed",
"10": "fed",
"11": "eg",
"12": "fed",
"13": "eg",
"14": "eg",
"15": "fed",
"16": "eg",
"17": "eg",
"18": "fed",
"19": "eg",
"20": "fed",
}
return ordinals.get(str(day), "ain")
def suffix_english(day):
if 11 <= day <= 13:
return _("th")
return {1: _("st"), 2: _("nd"), 3: _("rd")}.get(day % 10, _("th"))
def suffix(day):
if get_locale()[:2] == "cy":
return suffix_welsh(day)
return suffix_english(day)
def day_choice(day):
return day.strftime("%Y%m%d"), "%s %s%s" % (_(day.strftime("%A")), day.strftime("%d").lstrip("0"), suffix(day.day))
| Add welsh days ordinal suffix | Add welsh days ordinal suffix
| Python | mit | ministryofjustice/cla_public,ministryofjustice/cla_public,ministryofjustice/cla_public,ministryofjustice/cla_public | import datetime
from flask.ext.babel import lazy_gettext as _
def time_choice(time):
display_format = "%I:%M %p"
end = time + datetime.timedelta(minutes=30)
display_string = time.strftime(display_format).lstrip("0") + " - " + end.strftime(display_format).lstrip("0")
return time.strftime("%H%M"), display_string
def suffix(d):
if 11 <= d <= 13:
return _("th")
return {1: _("st"), 2: _("nd"), 3: _("rd")}.get(d % 10, _("th"))
def day_choice(day):
return day.strftime("%Y%m%d"), "%s %s%s" % (_(day.strftime("%A")), day.strftime("%d").lstrip("0"), suffix(day.day))
Add welsh days ordinal suffix | import datetime
from cla_public.libs.utils import get_locale
from flask.ext.babel import lazy_gettext as _
def time_choice(time):
display_format = "%I:%M %p"
end = time + datetime.timedelta(minutes=30)
display_string = time.strftime(display_format).lstrip("0") + " - " + end.strftime(display_format).lstrip("0")
return time.strftime("%H%M"), display_string
def suffix_welsh(day):
ordinals = {
"1": "af",
"2": "il",
"3": "ydd",
"4": "ydd",
"5": "ed",
"6": "ed",
"7": "fed",
"8": "fed",
"9": "fed",
"10": "fed",
"11": "eg",
"12": "fed",
"13": "eg",
"14": "eg",
"15": "fed",
"16": "eg",
"17": "eg",
"18": "fed",
"19": "eg",
"20": "fed",
}
return ordinals.get(str(day), "ain")
def suffix_english(day):
if 11 <= day <= 13:
return _("th")
return {1: _("st"), 2: _("nd"), 3: _("rd")}.get(day % 10, _("th"))
def suffix(day):
if get_locale()[:2] == "cy":
return suffix_welsh(day)
return suffix_english(day)
def day_choice(day):
return day.strftime("%Y%m%d"), "%s %s%s" % (_(day.strftime("%A")), day.strftime("%d").lstrip("0"), suffix(day.day))
| <commit_before>import datetime
from flask.ext.babel import lazy_gettext as _
def time_choice(time):
display_format = "%I:%M %p"
end = time + datetime.timedelta(minutes=30)
display_string = time.strftime(display_format).lstrip("0") + " - " + end.strftime(display_format).lstrip("0")
return time.strftime("%H%M"), display_string
def suffix(d):
if 11 <= d <= 13:
return _("th")
return {1: _("st"), 2: _("nd"), 3: _("rd")}.get(d % 10, _("th"))
def day_choice(day):
return day.strftime("%Y%m%d"), "%s %s%s" % (_(day.strftime("%A")), day.strftime("%d").lstrip("0"), suffix(day.day))
<commit_msg>Add welsh days ordinal suffix<commit_after> | import datetime
from cla_public.libs.utils import get_locale
from flask.ext.babel import lazy_gettext as _
def time_choice(time):
display_format = "%I:%M %p"
end = time + datetime.timedelta(minutes=30)
display_string = time.strftime(display_format).lstrip("0") + " - " + end.strftime(display_format).lstrip("0")
return time.strftime("%H%M"), display_string
def suffix_welsh(day):
ordinals = {
"1": "af",
"2": "il",
"3": "ydd",
"4": "ydd",
"5": "ed",
"6": "ed",
"7": "fed",
"8": "fed",
"9": "fed",
"10": "fed",
"11": "eg",
"12": "fed",
"13": "eg",
"14": "eg",
"15": "fed",
"16": "eg",
"17": "eg",
"18": "fed",
"19": "eg",
"20": "fed",
}
return ordinals.get(str(day), "ain")
def suffix_english(day):
if 11 <= day <= 13:
return _("th")
return {1: _("st"), 2: _("nd"), 3: _("rd")}.get(day % 10, _("th"))
def suffix(day):
if get_locale()[:2] == "cy":
return suffix_welsh(day)
return suffix_english(day)
def day_choice(day):
return day.strftime("%Y%m%d"), "%s %s%s" % (_(day.strftime("%A")), day.strftime("%d").lstrip("0"), suffix(day.day))
| import datetime
from flask.ext.babel import lazy_gettext as _
def time_choice(time):
display_format = "%I:%M %p"
end = time + datetime.timedelta(minutes=30)
display_string = time.strftime(display_format).lstrip("0") + " - " + end.strftime(display_format).lstrip("0")
return time.strftime("%H%M"), display_string
def suffix(d):
if 11 <= d <= 13:
return _("th")
return {1: _("st"), 2: _("nd"), 3: _("rd")}.get(d % 10, _("th"))
def day_choice(day):
return day.strftime("%Y%m%d"), "%s %s%s" % (_(day.strftime("%A")), day.strftime("%d").lstrip("0"), suffix(day.day))
Add welsh days ordinal suffiximport datetime
from cla_public.libs.utils import get_locale
from flask.ext.babel import lazy_gettext as _
def time_choice(time):
display_format = "%I:%M %p"
end = time + datetime.timedelta(minutes=30)
display_string = time.strftime(display_format).lstrip("0") + " - " + end.strftime(display_format).lstrip("0")
return time.strftime("%H%M"), display_string
def suffix_welsh(day):
ordinals = {
"1": "af",
"2": "il",
"3": "ydd",
"4": "ydd",
"5": "ed",
"6": "ed",
"7": "fed",
"8": "fed",
"9": "fed",
"10": "fed",
"11": "eg",
"12": "fed",
"13": "eg",
"14": "eg",
"15": "fed",
"16": "eg",
"17": "eg",
"18": "fed",
"19": "eg",
"20": "fed",
}
return ordinals.get(str(day), "ain")
def suffix_english(day):
if 11 <= day <= 13:
return _("th")
return {1: _("st"), 2: _("nd"), 3: _("rd")}.get(day % 10, _("th"))
def suffix(day):
if get_locale()[:2] == "cy":
return suffix_welsh(day)
return suffix_english(day)
def day_choice(day):
return day.strftime("%Y%m%d"), "%s %s%s" % (_(day.strftime("%A")), day.strftime("%d").lstrip("0"), suffix(day.day))
| <commit_before>import datetime
from flask.ext.babel import lazy_gettext as _
def time_choice(time):
display_format = "%I:%M %p"
end = time + datetime.timedelta(minutes=30)
display_string = time.strftime(display_format).lstrip("0") + " - " + end.strftime(display_format).lstrip("0")
return time.strftime("%H%M"), display_string
def suffix(d):
if 11 <= d <= 13:
return _("th")
return {1: _("st"), 2: _("nd"), 3: _("rd")}.get(d % 10, _("th"))
def day_choice(day):
return day.strftime("%Y%m%d"), "%s %s%s" % (_(day.strftime("%A")), day.strftime("%d").lstrip("0"), suffix(day.day))
<commit_msg>Add welsh days ordinal suffix<commit_after>import datetime
from cla_public.libs.utils import get_locale
from flask.ext.babel import lazy_gettext as _
def time_choice(time):
display_format = "%I:%M %p"
end = time + datetime.timedelta(minutes=30)
display_string = time.strftime(display_format).lstrip("0") + " - " + end.strftime(display_format).lstrip("0")
return time.strftime("%H%M"), display_string
def suffix_welsh(day):
ordinals = {
"1": "af",
"2": "il",
"3": "ydd",
"4": "ydd",
"5": "ed",
"6": "ed",
"7": "fed",
"8": "fed",
"9": "fed",
"10": "fed",
"11": "eg",
"12": "fed",
"13": "eg",
"14": "eg",
"15": "fed",
"16": "eg",
"17": "eg",
"18": "fed",
"19": "eg",
"20": "fed",
}
return ordinals.get(str(day), "ain")
def suffix_english(day):
if 11 <= day <= 13:
return _("th")
return {1: _("st"), 2: _("nd"), 3: _("rd")}.get(day % 10, _("th"))
def suffix(day):
if get_locale()[:2] == "cy":
return suffix_welsh(day)
return suffix_english(day)
def day_choice(day):
return day.strftime("%Y%m%d"), "%s %s%s" % (_(day.strftime("%A")), day.strftime("%d").lstrip("0"), suffix(day.day))
|
c33bbe44708ccf60f4a0cf759b3f38b739b7fb5d | PartyUPLambda/purge.py | PartyUPLambda/purge.py | """
Scan through the Samples table for oldish entries and remove them.
"""
import json
import boto3
import time
import decimal
from boto3.dynamodb.conditions import Key, Attr
def purge_item(item, batch):
response = batch.delete_item(
Key={
'event' : item['event'],
'id': item['id']
}
)
def purge_handler(event, context):
dynamodb = boto3.resource('dynamodb')
table = dynamodb.Table('Samples')
filter = Attr('time').lte(decimal.Decimal(time.time()-172800))
with table.batch_writer() as batch:
response = table.scan(
FilterExpression=filter
)
for item in response['Items']:
purge_item(item, batch)
while 'LastEvaluatedKey' in response:
response = scan(
FilterExpression=filter,
ExclusiveStartKey=response['LastEvaluatedKey']
)
for item in response['Items']:
purge_item(item, batch)
| """
Scan through the Samples table for oldish entries and remove them.
"""
import logging
import json
import boto3
import time
import decimal
from boto3.dynamodb.conditions import Key, Attr
logger = logging.getLogger()
logger.setLevel(logging.ERROR)
def purge_item(item, batch):
response = batch.delete_item(
Key={
'event' : item['event'],
'id': item['id']
}
)
def purge_handler(event, context):
dynamodb = boto3.resource('dynamodb')
table = dynamodb.Table('Samples')
filter = Attr('time').lte(decimal.Decimal(time.time()-172800))
with table.batch_writer() as batch:
response = table.scan(
FilterExpression=filter
)
for item in response['Items']:
purge_item(item, batch)
while 'LastEvaluatedKey' in response:
response = scan(
FilterExpression=filter,
ExclusiveStartKey=response['LastEvaluatedKey']
)
for item in response['Items']:
purge_item(item, batch)
| Add error logging to aid tracking down future issues. | Add error logging to aid tracking down future issues.
| Python | mit | SandcastleApps/partyup,SandcastleApps/partyup,SandcastleApps/partyup | """
Scan through the Samples table for oldish entries and remove them.
"""
import json
import boto3
import time
import decimal
from boto3.dynamodb.conditions import Key, Attr
def purge_item(item, batch):
response = batch.delete_item(
Key={
'event' : item['event'],
'id': item['id']
}
)
def purge_handler(event, context):
dynamodb = boto3.resource('dynamodb')
table = dynamodb.Table('Samples')
filter = Attr('time').lte(decimal.Decimal(time.time()-172800))
with table.batch_writer() as batch:
response = table.scan(
FilterExpression=filter
)
for item in response['Items']:
purge_item(item, batch)
while 'LastEvaluatedKey' in response:
response = scan(
FilterExpression=filter,
ExclusiveStartKey=response['LastEvaluatedKey']
)
for item in response['Items']:
purge_item(item, batch)
Add error logging to aid tracking down future issues. | """
Scan through the Samples table for oldish entries and remove them.
"""
import logging
import json
import boto3
import time
import decimal
from boto3.dynamodb.conditions import Key, Attr
logger = logging.getLogger()
logger.setLevel(logging.ERROR)
def purge_item(item, batch):
response = batch.delete_item(
Key={
'event' : item['event'],
'id': item['id']
}
)
def purge_handler(event, context):
dynamodb = boto3.resource('dynamodb')
table = dynamodb.Table('Samples')
filter = Attr('time').lte(decimal.Decimal(time.time()-172800))
with table.batch_writer() as batch:
response = table.scan(
FilterExpression=filter
)
for item in response['Items']:
purge_item(item, batch)
while 'LastEvaluatedKey' in response:
response = scan(
FilterExpression=filter,
ExclusiveStartKey=response['LastEvaluatedKey']
)
for item in response['Items']:
purge_item(item, batch)
| <commit_before>"""
Scan through the Samples table for oldish entries and remove them.
"""
import json
import boto3
import time
import decimal
from boto3.dynamodb.conditions import Key, Attr
def purge_item(item, batch):
response = batch.delete_item(
Key={
'event' : item['event'],
'id': item['id']
}
)
def purge_handler(event, context):
dynamodb = boto3.resource('dynamodb')
table = dynamodb.Table('Samples')
filter = Attr('time').lte(decimal.Decimal(time.time()-172800))
with table.batch_writer() as batch:
response = table.scan(
FilterExpression=filter
)
for item in response['Items']:
purge_item(item, batch)
while 'LastEvaluatedKey' in response:
response = scan(
FilterExpression=filter,
ExclusiveStartKey=response['LastEvaluatedKey']
)
for item in response['Items']:
purge_item(item, batch)
<commit_msg>Add error logging to aid tracking down future issues.<commit_after> | """
Scan through the Samples table for oldish entries and remove them.
"""
import logging
import json
import boto3
import time
import decimal
from boto3.dynamodb.conditions import Key, Attr
logger = logging.getLogger()
logger.setLevel(logging.ERROR)
def purge_item(item, batch):
response = batch.delete_item(
Key={
'event' : item['event'],
'id': item['id']
}
)
def purge_handler(event, context):
dynamodb = boto3.resource('dynamodb')
table = dynamodb.Table('Samples')
filter = Attr('time').lte(decimal.Decimal(time.time()-172800))
with table.batch_writer() as batch:
response = table.scan(
FilterExpression=filter
)
for item in response['Items']:
purge_item(item, batch)
while 'LastEvaluatedKey' in response:
response = scan(
FilterExpression=filter,
ExclusiveStartKey=response['LastEvaluatedKey']
)
for item in response['Items']:
purge_item(item, batch)
| """
Scan through the Samples table for oldish entries and remove them.
"""
import json
import boto3
import time
import decimal
from boto3.dynamodb.conditions import Key, Attr
def purge_item(item, batch):
response = batch.delete_item(
Key={
'event' : item['event'],
'id': item['id']
}
)
def purge_handler(event, context):
dynamodb = boto3.resource('dynamodb')
table = dynamodb.Table('Samples')
filter = Attr('time').lte(decimal.Decimal(time.time()-172800))
with table.batch_writer() as batch:
response = table.scan(
FilterExpression=filter
)
for item in response['Items']:
purge_item(item, batch)
while 'LastEvaluatedKey' in response:
response = scan(
FilterExpression=filter,
ExclusiveStartKey=response['LastEvaluatedKey']
)
for item in response['Items']:
purge_item(item, batch)
Add error logging to aid tracking down future issues."""
Scan through the Samples table for oldish entries and remove them.
"""
import logging
import json
import boto3
import time
import decimal
from boto3.dynamodb.conditions import Key, Attr
logger = logging.getLogger()
logger.setLevel(logging.ERROR)
def purge_item(item, batch):
response = batch.delete_item(
Key={
'event' : item['event'],
'id': item['id']
}
)
def purge_handler(event, context):
dynamodb = boto3.resource('dynamodb')
table = dynamodb.Table('Samples')
filter = Attr('time').lte(decimal.Decimal(time.time()-172800))
with table.batch_writer() as batch:
response = table.scan(
FilterExpression=filter
)
for item in response['Items']:
purge_item(item, batch)
while 'LastEvaluatedKey' in response:
response = scan(
FilterExpression=filter,
ExclusiveStartKey=response['LastEvaluatedKey']
)
for item in response['Items']:
purge_item(item, batch)
| <commit_before>"""
Scan through the Samples table for oldish entries and remove them.
"""
import json
import boto3
import time
import decimal
from boto3.dynamodb.conditions import Key, Attr
def purge_item(item, batch):
response = batch.delete_item(
Key={
'event' : item['event'],
'id': item['id']
}
)
def purge_handler(event, context):
dynamodb = boto3.resource('dynamodb')
table = dynamodb.Table('Samples')
filter = Attr('time').lte(decimal.Decimal(time.time()-172800))
with table.batch_writer() as batch:
response = table.scan(
FilterExpression=filter
)
for item in response['Items']:
purge_item(item, batch)
while 'LastEvaluatedKey' in response:
response = scan(
FilterExpression=filter,
ExclusiveStartKey=response['LastEvaluatedKey']
)
for item in response['Items']:
purge_item(item, batch)
<commit_msg>Add error logging to aid tracking down future issues.<commit_after>"""
Scan through the Samples table for oldish entries and remove them.
"""
import logging
import json
import boto3
import time
import decimal
from boto3.dynamodb.conditions import Key, Attr
logger = logging.getLogger()
logger.setLevel(logging.ERROR)
def purge_item(item, batch):
response = batch.delete_item(
Key={
'event' : item['event'],
'id': item['id']
}
)
def purge_handler(event, context):
dynamodb = boto3.resource('dynamodb')
table = dynamodb.Table('Samples')
filter = Attr('time').lte(decimal.Decimal(time.time()-172800))
with table.batch_writer() as batch:
response = table.scan(
FilterExpression=filter
)
for item in response['Items']:
purge_item(item, batch)
while 'LastEvaluatedKey' in response:
response = scan(
FilterExpression=filter,
ExclusiveStartKey=response['LastEvaluatedKey']
)
for item in response['Items']:
purge_item(item, batch)
|
3f750865762e7751ce0cbd4a171d68e9d1d5a8a6 | contrib/tempest/tempest/cli/manilaclient.py | contrib/tempest/tempest/cli/manilaclient.py | # Copyright 2014 Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest_lib.cli import base # noqa
from tempest import config_share as config
from tempest import test
CONF = config.CONF
class ClientTestBase(base.ClientTestBase, test.BaseTestCase):
def manila(self, action, flags='', params='', fail_ok=False,
endpoint_type='publicURL', merge_stderr=False):
"""Executes manila command for the given action."""
flags += ' --endpoint-type %s' % endpoint_type
return self.clients.cmd_with_auth(
'manila', action, flags, params, fail_ok, merge_stderr)
def _get_clients(self):
clients = base.CLIClient(
CONF.identity.admin_username,
CONF.identity.admin_password,
CONF.identity.admin_tenant_name,
CONF.identity.uri,
CONF.cli.cli_dir,
)
return clients
| # Copyright 2014 Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest import cli
class ClientTestBase(cli.ClientTestBase):
def manila(self, action, flags='', params='', fail_ok=False,
endpoint_type='publicURL', merge_stderr=False):
"""Executes manila command for the given action."""
flags += ' --endpoint-type %s' % endpoint_type
return self.clients.cmd_with_auth(
'manila', action, flags, params, fail_ok, merge_stderr)
| Fix tempest compatibility for cli tests | Fix tempest compatibility for cli tests
Commit https://github.com/openstack/tempest/commit/2474f41f made changes to
tempest project that are inconsistent with our plugin.
Make our plugin use latest changes to keep compatibility.
Change-Id: I08d28b40fdd9ad54a0bcce30647d796943332116
| Python | apache-2.0 | bswartz/manila,jcsp/manila,bswartz/manila,weiting-chen/manila,sajuptpm/manila,openstack/manila,NetApp/manila,redhat-openstack/manila,sajuptpm/manila,vponomaryov/manila,weiting-chen/manila,NetApp/manila,jcsp/manila,vponomaryov/manila,redhat-openstack/manila,openstack/manila,scality/manila,scality/manila | # Copyright 2014 Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest_lib.cli import base # noqa
from tempest import config_share as config
from tempest import test
CONF = config.CONF
class ClientTestBase(base.ClientTestBase, test.BaseTestCase):
def manila(self, action, flags='', params='', fail_ok=False,
endpoint_type='publicURL', merge_stderr=False):
"""Executes manila command for the given action."""
flags += ' --endpoint-type %s' % endpoint_type
return self.clients.cmd_with_auth(
'manila', action, flags, params, fail_ok, merge_stderr)
def _get_clients(self):
clients = base.CLIClient(
CONF.identity.admin_username,
CONF.identity.admin_password,
CONF.identity.admin_tenant_name,
CONF.identity.uri,
CONF.cli.cli_dir,
)
return clients
Fix tempest compatibility for cli tests
Commit https://github.com/openstack/tempest/commit/2474f41f made changes to
tempest project that are inconsistent with our plugin.
Make our plugin use latest changes to keep compatibility.
Change-Id: I08d28b40fdd9ad54a0bcce30647d796943332116 | # Copyright 2014 Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest import cli
class ClientTestBase(cli.ClientTestBase):
def manila(self, action, flags='', params='', fail_ok=False,
endpoint_type='publicURL', merge_stderr=False):
"""Executes manila command for the given action."""
flags += ' --endpoint-type %s' % endpoint_type
return self.clients.cmd_with_auth(
'manila', action, flags, params, fail_ok, merge_stderr)
| <commit_before># Copyright 2014 Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest_lib.cli import base # noqa
from tempest import config_share as config
from tempest import test
CONF = config.CONF
class ClientTestBase(base.ClientTestBase, test.BaseTestCase):
def manila(self, action, flags='', params='', fail_ok=False,
endpoint_type='publicURL', merge_stderr=False):
"""Executes manila command for the given action."""
flags += ' --endpoint-type %s' % endpoint_type
return self.clients.cmd_with_auth(
'manila', action, flags, params, fail_ok, merge_stderr)
def _get_clients(self):
clients = base.CLIClient(
CONF.identity.admin_username,
CONF.identity.admin_password,
CONF.identity.admin_tenant_name,
CONF.identity.uri,
CONF.cli.cli_dir,
)
return clients
<commit_msg>Fix tempest compatibility for cli tests
Commit https://github.com/openstack/tempest/commit/2474f41f made changes to
tempest project that are inconsistent with our plugin.
Make our plugin use latest changes to keep compatibility.
Change-Id: I08d28b40fdd9ad54a0bcce30647d796943332116<commit_after> | # Copyright 2014 Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest import cli
class ClientTestBase(cli.ClientTestBase):
def manila(self, action, flags='', params='', fail_ok=False,
endpoint_type='publicURL', merge_stderr=False):
"""Executes manila command for the given action."""
flags += ' --endpoint-type %s' % endpoint_type
return self.clients.cmd_with_auth(
'manila', action, flags, params, fail_ok, merge_stderr)
| # Copyright 2014 Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest_lib.cli import base # noqa
from tempest import config_share as config
from tempest import test
CONF = config.CONF
class ClientTestBase(base.ClientTestBase, test.BaseTestCase):
def manila(self, action, flags='', params='', fail_ok=False,
endpoint_type='publicURL', merge_stderr=False):
"""Executes manila command for the given action."""
flags += ' --endpoint-type %s' % endpoint_type
return self.clients.cmd_with_auth(
'manila', action, flags, params, fail_ok, merge_stderr)
def _get_clients(self):
clients = base.CLIClient(
CONF.identity.admin_username,
CONF.identity.admin_password,
CONF.identity.admin_tenant_name,
CONF.identity.uri,
CONF.cli.cli_dir,
)
return clients
Fix tempest compatibility for cli tests
Commit https://github.com/openstack/tempest/commit/2474f41f made changes to
tempest project that are inconsistent with our plugin.
Make our plugin use latest changes to keep compatibility.
Change-Id: I08d28b40fdd9ad54a0bcce30647d796943332116# Copyright 2014 Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest import cli
class ClientTestBase(cli.ClientTestBase):
def manila(self, action, flags='', params='', fail_ok=False,
endpoint_type='publicURL', merge_stderr=False):
"""Executes manila command for the given action."""
flags += ' --endpoint-type %s' % endpoint_type
return self.clients.cmd_with_auth(
'manila', action, flags, params, fail_ok, merge_stderr)
| <commit_before># Copyright 2014 Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest_lib.cli import base # noqa
from tempest import config_share as config
from tempest import test
CONF = config.CONF
class ClientTestBase(base.ClientTestBase, test.BaseTestCase):
def manila(self, action, flags='', params='', fail_ok=False,
endpoint_type='publicURL', merge_stderr=False):
"""Executes manila command for the given action."""
flags += ' --endpoint-type %s' % endpoint_type
return self.clients.cmd_with_auth(
'manila', action, flags, params, fail_ok, merge_stderr)
def _get_clients(self):
clients = base.CLIClient(
CONF.identity.admin_username,
CONF.identity.admin_password,
CONF.identity.admin_tenant_name,
CONF.identity.uri,
CONF.cli.cli_dir,
)
return clients
<commit_msg>Fix tempest compatibility for cli tests
Commit https://github.com/openstack/tempest/commit/2474f41f made changes to
tempest project that are inconsistent with our plugin.
Make our plugin use latest changes to keep compatibility.
Change-Id: I08d28b40fdd9ad54a0bcce30647d796943332116<commit_after># Copyright 2014 Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest import cli
class ClientTestBase(cli.ClientTestBase):
def manila(self, action, flags='', params='', fail_ok=False,
endpoint_type='publicURL', merge_stderr=False):
"""Executes manila command for the given action."""
flags += ' --endpoint-type %s' % endpoint_type
return self.clients.cmd_with_auth(
'manila', action, flags, params, fail_ok, merge_stderr)
|
0a9aed9427b0b36d71a2e8fee74db74690727b15 | hardware/gpio/LEDblink_gpiozero.py | hardware/gpio/LEDblink_gpiozero.py | from gpiozero import LED
import time
# GPIO 24 ... LED ... 470 ohm resistor ... GND
led = LED(24)
try:
led.blink()
time.sleep(20)
except KeyboardInterrupt:
led.off()
| from gpiozero import LED
import time
# GPIO 24 ... LED ... 470 ohm resistor ... GND
led = LED(24)
try:
led.blink()
time.sleep(20)
except KeyboardInterrupt:
led.off()
print "done"
| Print message on completion, to aid debugging | Print message on completion, to aid debugging | Python | mit | claremacrae/raspi_code,claremacrae/raspi_code,claremacrae/raspi_code | from gpiozero import LED
import time
# GPIO 24 ... LED ... 470 ohm resistor ... GND
led = LED(24)
try:
led.blink()
time.sleep(20)
except KeyboardInterrupt:
led.off()
Print message on completion, to aid debugging | from gpiozero import LED
import time
# GPIO 24 ... LED ... 470 ohm resistor ... GND
led = LED(24)
try:
led.blink()
time.sleep(20)
except KeyboardInterrupt:
led.off()
print "done"
| <commit_before>from gpiozero import LED
import time
# GPIO 24 ... LED ... 470 ohm resistor ... GND
led = LED(24)
try:
led.blink()
time.sleep(20)
except KeyboardInterrupt:
led.off()
<commit_msg>Print message on completion, to aid debugging<commit_after> | from gpiozero import LED
import time
# GPIO 24 ... LED ... 470 ohm resistor ... GND
led = LED(24)
try:
led.blink()
time.sleep(20)
except KeyboardInterrupt:
led.off()
print "done"
| from gpiozero import LED
import time
# GPIO 24 ... LED ... 470 ohm resistor ... GND
led = LED(24)
try:
led.blink()
time.sleep(20)
except KeyboardInterrupt:
led.off()
Print message on completion, to aid debuggingfrom gpiozero import LED
import time
# GPIO 24 ... LED ... 470 ohm resistor ... GND
led = LED(24)
try:
led.blink()
time.sleep(20)
except KeyboardInterrupt:
led.off()
print "done"
| <commit_before>from gpiozero import LED
import time
# GPIO 24 ... LED ... 470 ohm resistor ... GND
led = LED(24)
try:
led.blink()
time.sleep(20)
except KeyboardInterrupt:
led.off()
<commit_msg>Print message on completion, to aid debugging<commit_after>from gpiozero import LED
import time
# GPIO 24 ... LED ... 470 ohm resistor ... GND
led = LED(24)
try:
led.blink()
time.sleep(20)
except KeyboardInterrupt:
led.off()
print "done"
|
f375fc031640c24dd7761283b931b402d8366421 | django_compat_patcher/patcher.py | django_compat_patcher/patcher.py | from __future__ import absolute_import, print_function, unicode_literals
from . import fixers, utilities, deprecation, registry
# TODO make it idempotent with registry of applied fixes, just log if double applications of same fixers !!!!!
def patch(settings=None):
"""Patches the Django package with relevant fixers.
A settings dict/objects can be provided, to REPLACE lookups in Django settings.
Returns a list of ids of fixers applied.
"""
print("Fixers are:", registry.FIXERS_REGISTRY)
django_version = utilities.get_django_version()
selected_fixers = registry.get_relevant_fixers(current_django_version=django_version, settings=settings)
for fixer in selected_fixers:
#print("Applying fixer", fixer)
# TODO - create custom injected "utils" object with context information, logging, warnings, etc.
fixer["fixer_callable"](utilities)
return selected_fixers
| from __future__ import absolute_import, print_function, unicode_literals
from . import fixers, utilities, deprecation, registry
__APPLIED_FIXERS = set()
def patch(settings=None):
"""Patches the Django package with relevant fixers.
A settings dict/objects can be provided, to REPLACE lookups in Django settings.
Returns a list of ids of fixers applied.
"""
print("Fixers are:", registry.FIXERS_REGISTRY)
django_version = utilities.get_django_version()
selected_fixers = registry.get_relevant_fixers(current_django_version=django_version, settings=settings)
for fixer in selected_fixers:
#print("Applying fixer", fixer)
# TODO - create custom injected "utils" object with context information, logging, warnings, etc.
if fixer['fixer_id'] not in __APPLIED_FIXERS:
fixer["fixer_callable"](utilities)
__APPLIED_FIXERS.add(fixer['fixer_id'])
else:
utilities.logger.warning("Fixer '{}' was already applied".format(fixer['fixer_id']))
return selected_fixers | Add __APPLIED_FIXERS set to make sure we are idempotent | Add __APPLIED_FIXERS set to make sure we are idempotent
| Python | mit | pakal/django-compat-patcher,pakal/django-compat-patcher | from __future__ import absolute_import, print_function, unicode_literals
from . import fixers, utilities, deprecation, registry
# TODO make it idempotent with registry of applied fixes, just log if double applications of same fixers !!!!!
def patch(settings=None):
"""Patches the Django package with relevant fixers.
A settings dict/objects can be provided, to REPLACE lookups in Django settings.
Returns a list of ids of fixers applied.
"""
print("Fixers are:", registry.FIXERS_REGISTRY)
django_version = utilities.get_django_version()
selected_fixers = registry.get_relevant_fixers(current_django_version=django_version, settings=settings)
for fixer in selected_fixers:
#print("Applying fixer", fixer)
# TODO - create custom injected "utils" object with context information, logging, warnings, etc.
fixer["fixer_callable"](utilities)
return selected_fixers
Add __APPLIED_FIXERS set to make sure we are idempotent | from __future__ import absolute_import, print_function, unicode_literals
from . import fixers, utilities, deprecation, registry
__APPLIED_FIXERS = set()
def patch(settings=None):
"""Patches the Django package with relevant fixers.
A settings dict/objects can be provided, to REPLACE lookups in Django settings.
Returns a list of ids of fixers applied.
"""
print("Fixers are:", registry.FIXERS_REGISTRY)
django_version = utilities.get_django_version()
selected_fixers = registry.get_relevant_fixers(current_django_version=django_version, settings=settings)
for fixer in selected_fixers:
#print("Applying fixer", fixer)
# TODO - create custom injected "utils" object with context information, logging, warnings, etc.
if fixer['fixer_id'] not in __APPLIED_FIXERS:
fixer["fixer_callable"](utilities)
__APPLIED_FIXERS.add(fixer['fixer_id'])
else:
utilities.logger.warning("Fixer '{}' was already applied".format(fixer['fixer_id']))
return selected_fixers | <commit_before>from __future__ import absolute_import, print_function, unicode_literals
from . import fixers, utilities, deprecation, registry
# TODO make it idempotent with registry of applied fixes, just log if double applications of same fixers !!!!!
def patch(settings=None):
"""Patches the Django package with relevant fixers.
A settings dict/objects can be provided, to REPLACE lookups in Django settings.
Returns a list of ids of fixers applied.
"""
print("Fixers are:", registry.FIXERS_REGISTRY)
django_version = utilities.get_django_version()
selected_fixers = registry.get_relevant_fixers(current_django_version=django_version, settings=settings)
for fixer in selected_fixers:
#print("Applying fixer", fixer)
# TODO - create custom injected "utils" object with context information, logging, warnings, etc.
fixer["fixer_callable"](utilities)
return selected_fixers
<commit_msg>Add __APPLIED_FIXERS set to make sure we are idempotent<commit_after> | from __future__ import absolute_import, print_function, unicode_literals
from . import fixers, utilities, deprecation, registry
__APPLIED_FIXERS = set()
def patch(settings=None):
"""Patches the Django package with relevant fixers.
A settings dict/objects can be provided, to REPLACE lookups in Django settings.
Returns a list of ids of fixers applied.
"""
print("Fixers are:", registry.FIXERS_REGISTRY)
django_version = utilities.get_django_version()
selected_fixers = registry.get_relevant_fixers(current_django_version=django_version, settings=settings)
for fixer in selected_fixers:
#print("Applying fixer", fixer)
# TODO - create custom injected "utils" object with context information, logging, warnings, etc.
if fixer['fixer_id'] not in __APPLIED_FIXERS:
fixer["fixer_callable"](utilities)
__APPLIED_FIXERS.add(fixer['fixer_id'])
else:
utilities.logger.warning("Fixer '{}' was already applied".format(fixer['fixer_id']))
return selected_fixers | from __future__ import absolute_import, print_function, unicode_literals
from . import fixers, utilities, deprecation, registry
# TODO make it idempotent with registry of applied fixes, just log if double applications of same fixers !!!!!
def patch(settings=None):
"""Patches the Django package with relevant fixers.
A settings dict/objects can be provided, to REPLACE lookups in Django settings.
Returns a list of ids of fixers applied.
"""
print("Fixers are:", registry.FIXERS_REGISTRY)
django_version = utilities.get_django_version()
selected_fixers = registry.get_relevant_fixers(current_django_version=django_version, settings=settings)
for fixer in selected_fixers:
#print("Applying fixer", fixer)
# TODO - create custom injected "utils" object with context information, logging, warnings, etc.
fixer["fixer_callable"](utilities)
return selected_fixers
Add __APPLIED_FIXERS set to make sure we are idempotentfrom __future__ import absolute_import, print_function, unicode_literals
from . import fixers, utilities, deprecation, registry
__APPLIED_FIXERS = set()
def patch(settings=None):
"""Patches the Django package with relevant fixers.
A settings dict/objects can be provided, to REPLACE lookups in Django settings.
Returns a list of ids of fixers applied.
"""
print("Fixers are:", registry.FIXERS_REGISTRY)
django_version = utilities.get_django_version()
selected_fixers = registry.get_relevant_fixers(current_django_version=django_version, settings=settings)
for fixer in selected_fixers:
#print("Applying fixer", fixer)
# TODO - create custom injected "utils" object with context information, logging, warnings, etc.
if fixer['fixer_id'] not in __APPLIED_FIXERS:
fixer["fixer_callable"](utilities)
__APPLIED_FIXERS.add(fixer['fixer_id'])
else:
utilities.logger.warning("Fixer '{}' was already applied".format(fixer['fixer_id']))
return selected_fixers | <commit_before>from __future__ import absolute_import, print_function, unicode_literals
from . import fixers, utilities, deprecation, registry
# TODO make it idempotent with registry of applied fixes, just log if double applications of same fixers !!!!!
def patch(settings=None):
"""Patches the Django package with relevant fixers.
A settings dict/objects can be provided, to REPLACE lookups in Django settings.
Returns a list of ids of fixers applied.
"""
print("Fixers are:", registry.FIXERS_REGISTRY)
django_version = utilities.get_django_version()
selected_fixers = registry.get_relevant_fixers(current_django_version=django_version, settings=settings)
for fixer in selected_fixers:
#print("Applying fixer", fixer)
# TODO - create custom injected "utils" object with context information, logging, warnings, etc.
fixer["fixer_callable"](utilities)
return selected_fixers
<commit_msg>Add __APPLIED_FIXERS set to make sure we are idempotent<commit_after>from __future__ import absolute_import, print_function, unicode_literals
from . import fixers, utilities, deprecation, registry
__APPLIED_FIXERS = set()
def patch(settings=None):
"""Patches the Django package with relevant fixers.
A settings dict/objects can be provided, to REPLACE lookups in Django settings.
Returns a list of ids of fixers applied.
"""
print("Fixers are:", registry.FIXERS_REGISTRY)
django_version = utilities.get_django_version()
selected_fixers = registry.get_relevant_fixers(current_django_version=django_version, settings=settings)
for fixer in selected_fixers:
#print("Applying fixer", fixer)
# TODO - create custom injected "utils" object with context information, logging, warnings, etc.
if fixer['fixer_id'] not in __APPLIED_FIXERS:
fixer["fixer_callable"](utilities)
__APPLIED_FIXERS.add(fixer['fixer_id'])
else:
utilities.logger.warning("Fixer '{}' was already applied".format(fixer['fixer_id']))
return selected_fixers |
e87017a724b274ad3289f4af674ae88bf8f21b8a | devbin/benchmark_proc_get_all.py | devbin/benchmark_proc_get_all.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Benchmark listing all processes
Usage:
benchmark_proc_get_all.py
"""
import os
MYDIR = os.path.dirname(os.path.abspath(__file__))
import sys
sys.path.insert(0, os.path.join(MYDIR, ".."))
import time
from px import px_process
LAPS=20
def main():
t0 = time.time()
for iteration in range(LAPS):
px_process.get_all()
t1 = time.time()
dt_seconds = t1 - t0
print("Getting all processes takes {:.3f}s".format(dt_seconds/LAPS))
if __name__ == "__main__":
main()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Benchmark listing all processes
Usage:
benchmark_proc_get_all.py
"""
import os
MYDIR = os.path.dirname(os.path.abspath(__file__))
import sys
sys.path.insert(0, os.path.join(MYDIR, ".."))
import time
from px import px_process
LAPS=20
def main():
t0 = time.time()
for iteration in range(LAPS):
px_process.get_all()
t1 = time.time()
dt_seconds = t1 - t0
print("Getting all processes takes {:.0f}ms".format(1000*dt_seconds/LAPS))
if __name__ == "__main__":
main()
| Use ms rather than s for process getting benchmark | Use ms rather than s for process getting benchmark
| Python | mit | walles/px,walles/px | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Benchmark listing all processes
Usage:
benchmark_proc_get_all.py
"""
import os
MYDIR = os.path.dirname(os.path.abspath(__file__))
import sys
sys.path.insert(0, os.path.join(MYDIR, ".."))
import time
from px import px_process
LAPS=20
def main():
t0 = time.time()
for iteration in range(LAPS):
px_process.get_all()
t1 = time.time()
dt_seconds = t1 - t0
print("Getting all processes takes {:.3f}s".format(dt_seconds/LAPS))
if __name__ == "__main__":
main()
Use ms rather than s for process getting benchmark | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Benchmark listing all processes
Usage:
benchmark_proc_get_all.py
"""
import os
MYDIR = os.path.dirname(os.path.abspath(__file__))
import sys
sys.path.insert(0, os.path.join(MYDIR, ".."))
import time
from px import px_process
LAPS=20
def main():
t0 = time.time()
for iteration in range(LAPS):
px_process.get_all()
t1 = time.time()
dt_seconds = t1 - t0
print("Getting all processes takes {:.0f}ms".format(1000*dt_seconds/LAPS))
if __name__ == "__main__":
main()
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Benchmark listing all processes
Usage:
benchmark_proc_get_all.py
"""
import os
MYDIR = os.path.dirname(os.path.abspath(__file__))
import sys
sys.path.insert(0, os.path.join(MYDIR, ".."))
import time
from px import px_process
LAPS=20
def main():
t0 = time.time()
for iteration in range(LAPS):
px_process.get_all()
t1 = time.time()
dt_seconds = t1 - t0
print("Getting all processes takes {:.3f}s".format(dt_seconds/LAPS))
if __name__ == "__main__":
main()
<commit_msg>Use ms rather than s for process getting benchmark<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Benchmark listing all processes
Usage:
benchmark_proc_get_all.py
"""
import os
MYDIR = os.path.dirname(os.path.abspath(__file__))
import sys
sys.path.insert(0, os.path.join(MYDIR, ".."))
import time
from px import px_process
LAPS=20
def main():
t0 = time.time()
for iteration in range(LAPS):
px_process.get_all()
t1 = time.time()
dt_seconds = t1 - t0
print("Getting all processes takes {:.0f}ms".format(1000*dt_seconds/LAPS))
if __name__ == "__main__":
main()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Benchmark listing all processes
Usage:
benchmark_proc_get_all.py
"""
import os
MYDIR = os.path.dirname(os.path.abspath(__file__))
import sys
sys.path.insert(0, os.path.join(MYDIR, ".."))
import time
from px import px_process
LAPS=20
def main():
t0 = time.time()
for iteration in range(LAPS):
px_process.get_all()
t1 = time.time()
dt_seconds = t1 - t0
print("Getting all processes takes {:.3f}s".format(dt_seconds/LAPS))
if __name__ == "__main__":
main()
Use ms rather than s for process getting benchmark#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Benchmark listing all processes
Usage:
benchmark_proc_get_all.py
"""
import os
MYDIR = os.path.dirname(os.path.abspath(__file__))
import sys
sys.path.insert(0, os.path.join(MYDIR, ".."))
import time
from px import px_process
LAPS=20
def main():
t0 = time.time()
for iteration in range(LAPS):
px_process.get_all()
t1 = time.time()
dt_seconds = t1 - t0
print("Getting all processes takes {:.0f}ms".format(1000*dt_seconds/LAPS))
if __name__ == "__main__":
main()
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Benchmark listing all processes
Usage:
benchmark_proc_get_all.py
"""
import os
MYDIR = os.path.dirname(os.path.abspath(__file__))
import sys
sys.path.insert(0, os.path.join(MYDIR, ".."))
import time
from px import px_process
LAPS=20
def main():
t0 = time.time()
for iteration in range(LAPS):
px_process.get_all()
t1 = time.time()
dt_seconds = t1 - t0
print("Getting all processes takes {:.3f}s".format(dt_seconds/LAPS))
if __name__ == "__main__":
main()
<commit_msg>Use ms rather than s for process getting benchmark<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Benchmark listing all processes
Usage:
benchmark_proc_get_all.py
"""
import os
MYDIR = os.path.dirname(os.path.abspath(__file__))
import sys
sys.path.insert(0, os.path.join(MYDIR, ".."))
import time
from px import px_process
LAPS=20
def main():
t0 = time.time()
for iteration in range(LAPS):
px_process.get_all()
t1 = time.time()
dt_seconds = t1 - t0
print("Getting all processes takes {:.0f}ms".format(1000*dt_seconds/LAPS))
if __name__ == "__main__":
main()
|
b49844f2c6da136d4b4c350b2b176e5f4cbdcd88 | reversebinary.py | reversebinary.py | #!/bin/python
"""
reversebinary puzzle for Spotify.com
v1
Jose Antonio Navarrete
You can find me at janavarretecristino@gmail.com
Follow me on twitter @joseanavarrete
"""
import unittest
MAX_VALUE = 1000000000
def reverse_binary(n):
"""
Receives an integer (n), converts it to its reverse binary
"""
if not 1 <= n <= MAX_VALUE:
raise ValueError
binary_str = bin(n) # '0bXXXX' where XXXX is n in binary
return int(binary_str[::-1][:-2], 2)
class ReverseBinaryTest(unittest.TestCase):
def test_reverse_binary(self):
self.assertEqual(reverse_binary(1), 1)
self.assertEqual(reverse_binary(13), 11)
self.assertEqual(reverse_binary(47), 61)
def test_wrong_input(self):
with self.assertRaises(ValueError):
reverse_binary(0)
reverse_binary(MAX_VALUE)
def main():
unittest.main()
if __name__ == '__main__':
main()
| #!/bin/python
"""
reversebinary puzzle for Spotify.com
v1
Jose Antonio Navarrete
You can find me at janavarretecristino@gmail.com
Follow me on twitter @joseanavarrete
"""
import unittest
MAX_VALUE = 1000000000
def reverse_binary(n):
"""
Receives an integer (n), converts it to its reverse binary
"""
if not 1 <= n <= MAX_VALUE:
raise ValueError
binary_str = bin(n) # '0bXXXX' where XXXX is n in binary
return int(binary_str[::-1][:-2], 2)
class ReverseBinaryTest(unittest.TestCase):
def test_reverse_binary(self):
self.assertEqual(reverse_binary(1), 1)
self.assertEqual(reverse_binary(13), 11)
self.assertEqual(reverse_binary(47), 61)
def test_wrong_input(self):
with self.assertRaises(ValueError):
reverse_binary(0)
reverse_binary(MAX_VALUE)
def main():
unittest.main()
if __name__ == '__main__':
main()
| Add breakline to better reading | Add breakline to better reading
| Python | mit | josenava/spotify_puzzle | #!/bin/python
"""
reversebinary puzzle for Spotify.com
v1
Jose Antonio Navarrete
You can find me at janavarretecristino@gmail.com
Follow me on twitter @joseanavarrete
"""
import unittest
MAX_VALUE = 1000000000
def reverse_binary(n):
"""
Receives an integer (n), converts it to its reverse binary
"""
if not 1 <= n <= MAX_VALUE:
raise ValueError
binary_str = bin(n) # '0bXXXX' where XXXX is n in binary
return int(binary_str[::-1][:-2], 2)
class ReverseBinaryTest(unittest.TestCase):
def test_reverse_binary(self):
self.assertEqual(reverse_binary(1), 1)
self.assertEqual(reverse_binary(13), 11)
self.assertEqual(reverse_binary(47), 61)
def test_wrong_input(self):
with self.assertRaises(ValueError):
reverse_binary(0)
reverse_binary(MAX_VALUE)
def main():
unittest.main()
if __name__ == '__main__':
main()
Add breakline to better reading | #!/bin/python
"""
reversebinary puzzle for Spotify.com
v1
Jose Antonio Navarrete
You can find me at janavarretecristino@gmail.com
Follow me on twitter @joseanavarrete
"""
import unittest
MAX_VALUE = 1000000000
def reverse_binary(n):
"""
Receives an integer (n), converts it to its reverse binary
"""
if not 1 <= n <= MAX_VALUE:
raise ValueError
binary_str = bin(n) # '0bXXXX' where XXXX is n in binary
return int(binary_str[::-1][:-2], 2)
class ReverseBinaryTest(unittest.TestCase):
def test_reverse_binary(self):
self.assertEqual(reverse_binary(1), 1)
self.assertEqual(reverse_binary(13), 11)
self.assertEqual(reverse_binary(47), 61)
def test_wrong_input(self):
with self.assertRaises(ValueError):
reverse_binary(0)
reverse_binary(MAX_VALUE)
def main():
unittest.main()
if __name__ == '__main__':
main()
| <commit_before>#!/bin/python
"""
reversebinary puzzle for Spotify.com
v1
Jose Antonio Navarrete
You can find me at janavarretecristino@gmail.com
Follow me on twitter @joseanavarrete
"""
import unittest
MAX_VALUE = 1000000000
def reverse_binary(n):
"""
Receives an integer (n), converts it to its reverse binary
"""
if not 1 <= n <= MAX_VALUE:
raise ValueError
binary_str = bin(n) # '0bXXXX' where XXXX is n in binary
return int(binary_str[::-1][:-2], 2)
class ReverseBinaryTest(unittest.TestCase):
def test_reverse_binary(self):
self.assertEqual(reverse_binary(1), 1)
self.assertEqual(reverse_binary(13), 11)
self.assertEqual(reverse_binary(47), 61)
def test_wrong_input(self):
with self.assertRaises(ValueError):
reverse_binary(0)
reverse_binary(MAX_VALUE)
def main():
unittest.main()
if __name__ == '__main__':
main()
<commit_msg>Add breakline to better reading<commit_after> | #!/bin/python
"""
reversebinary puzzle for Spotify.com
v1
Jose Antonio Navarrete
You can find me at janavarretecristino@gmail.com
Follow me on twitter @joseanavarrete
"""
import unittest
MAX_VALUE = 1000000000
def reverse_binary(n):
"""
Receives an integer (n), converts it to its reverse binary
"""
if not 1 <= n <= MAX_VALUE:
raise ValueError
binary_str = bin(n) # '0bXXXX' where XXXX is n in binary
return int(binary_str[::-1][:-2], 2)
class ReverseBinaryTest(unittest.TestCase):
def test_reverse_binary(self):
self.assertEqual(reverse_binary(1), 1)
self.assertEqual(reverse_binary(13), 11)
self.assertEqual(reverse_binary(47), 61)
def test_wrong_input(self):
with self.assertRaises(ValueError):
reverse_binary(0)
reverse_binary(MAX_VALUE)
def main():
unittest.main()
if __name__ == '__main__':
main()
| #!/bin/python
"""
reversebinary puzzle for Spotify.com
v1
Jose Antonio Navarrete
You can find me at janavarretecristino@gmail.com
Follow me on twitter @joseanavarrete
"""
import unittest
MAX_VALUE = 1000000000
def reverse_binary(n):
"""
Receives an integer (n), converts it to its reverse binary
"""
if not 1 <= n <= MAX_VALUE:
raise ValueError
binary_str = bin(n) # '0bXXXX' where XXXX is n in binary
return int(binary_str[::-1][:-2], 2)
class ReverseBinaryTest(unittest.TestCase):
def test_reverse_binary(self):
self.assertEqual(reverse_binary(1), 1)
self.assertEqual(reverse_binary(13), 11)
self.assertEqual(reverse_binary(47), 61)
def test_wrong_input(self):
with self.assertRaises(ValueError):
reverse_binary(0)
reverse_binary(MAX_VALUE)
def main():
unittest.main()
if __name__ == '__main__':
main()
Add breakline to better reading#!/bin/python
"""
reversebinary puzzle for Spotify.com
v1
Jose Antonio Navarrete
You can find me at janavarretecristino@gmail.com
Follow me on twitter @joseanavarrete
"""
import unittest
MAX_VALUE = 1000000000
def reverse_binary(n):
"""
Receives an integer (n), converts it to its reverse binary
"""
if not 1 <= n <= MAX_VALUE:
raise ValueError
binary_str = bin(n) # '0bXXXX' where XXXX is n in binary
return int(binary_str[::-1][:-2], 2)
class ReverseBinaryTest(unittest.TestCase):
def test_reverse_binary(self):
self.assertEqual(reverse_binary(1), 1)
self.assertEqual(reverse_binary(13), 11)
self.assertEqual(reverse_binary(47), 61)
def test_wrong_input(self):
with self.assertRaises(ValueError):
reverse_binary(0)
reverse_binary(MAX_VALUE)
def main():
unittest.main()
if __name__ == '__main__':
main()
| <commit_before>#!/bin/python
"""
reversebinary puzzle for Spotify.com
v1
Jose Antonio Navarrete
You can find me at janavarretecristino@gmail.com
Follow me on twitter @joseanavarrete
"""
import unittest
MAX_VALUE = 1000000000
def reverse_binary(n):
"""
Receives an integer (n), converts it to its reverse binary
"""
if not 1 <= n <= MAX_VALUE:
raise ValueError
binary_str = bin(n) # '0bXXXX' where XXXX is n in binary
return int(binary_str[::-1][:-2], 2)
class ReverseBinaryTest(unittest.TestCase):
def test_reverse_binary(self):
self.assertEqual(reverse_binary(1), 1)
self.assertEqual(reverse_binary(13), 11)
self.assertEqual(reverse_binary(47), 61)
def test_wrong_input(self):
with self.assertRaises(ValueError):
reverse_binary(0)
reverse_binary(MAX_VALUE)
def main():
unittest.main()
if __name__ == '__main__':
main()
<commit_msg>Add breakline to better reading<commit_after>#!/bin/python
"""
reversebinary puzzle for Spotify.com
v1
Jose Antonio Navarrete
You can find me at janavarretecristino@gmail.com
Follow me on twitter @joseanavarrete
"""
import unittest
MAX_VALUE = 1000000000
def reverse_binary(n):
"""
Receives an integer (n), converts it to its reverse binary
"""
if not 1 <= n <= MAX_VALUE:
raise ValueError
binary_str = bin(n) # '0bXXXX' where XXXX is n in binary
return int(binary_str[::-1][:-2], 2)
class ReverseBinaryTest(unittest.TestCase):
def test_reverse_binary(self):
self.assertEqual(reverse_binary(1), 1)
self.assertEqual(reverse_binary(13), 11)
self.assertEqual(reverse_binary(47), 61)
def test_wrong_input(self):
with self.assertRaises(ValueError):
reverse_binary(0)
reverse_binary(MAX_VALUE)
def main():
unittest.main()
if __name__ == '__main__':
main()
|
4f35af8502b8d091cd88ab61c0aca343918ac9f1 | ec2api/cmd/__init__.py | ec2api/cmd/__init__.py | # Copyright 2014
# The Cloudscaling Group, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import eventlet
eventlet.monkey_patch(os=False)
| Enable greenthreading for ec2api services | Enable greenthreading for ec2api services
Since ec2api and metadata services based on greenthreading, it have to
be initialized properly. This patch initializes it for cmd modules,
which allows to debug services from IDE.
Change-Id: Ic7ae69fbf5b58cfa4df822cd5d42f7c2cf45d848
| Python | apache-2.0 | openstack/ec2-api,stackforge/ec2-api,hayderimran7/ec2-api,stackforge/ec2-api,hayderimran7/ec2-api,openstack/ec2-api | Enable greenthreading for ec2api services
Since ec2api and metadata services based on greenthreading, it have to
be initialized properly. This patch initializes it for cmd modules,
which allows to debug services from IDE.
Change-Id: Ic7ae69fbf5b58cfa4df822cd5d42f7c2cf45d848 | # Copyright 2014
# The Cloudscaling Group, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import eventlet
eventlet.monkey_patch(os=False)
| <commit_before><commit_msg>Enable greenthreading for ec2api services
Since ec2api and metadata services based on greenthreading, it have to
be initialized properly. This patch initializes it for cmd modules,
which allows to debug services from IDE.
Change-Id: Ic7ae69fbf5b58cfa4df822cd5d42f7c2cf45d848<commit_after> | # Copyright 2014
# The Cloudscaling Group, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import eventlet
eventlet.monkey_patch(os=False)
| Enable greenthreading for ec2api services
Since ec2api and metadata services based on greenthreading, it have to
be initialized properly. This patch initializes it for cmd modules,
which allows to debug services from IDE.
Change-Id: Ic7ae69fbf5b58cfa4df822cd5d42f7c2cf45d848# Copyright 2014
# The Cloudscaling Group, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import eventlet
eventlet.monkey_patch(os=False)
| <commit_before><commit_msg>Enable greenthreading for ec2api services
Since ec2api and metadata services based on greenthreading, it have to
be initialized properly. This patch initializes it for cmd modules,
which allows to debug services from IDE.
Change-Id: Ic7ae69fbf5b58cfa4df822cd5d42f7c2cf45d848<commit_after># Copyright 2014
# The Cloudscaling Group, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import eventlet
eventlet.monkey_patch(os=False)
| |
92cb843c1b6ada9b63038ed1ce22f83ee6146aff | jazzy/scope.py | jazzy/scope.py | import uuid
class Scope:
def __init__(self):
self.pc = 0;
self.variables = {}
self.lvalue = self
self.rvalue = self
self.stack = [1,2,3]
self.name = uuid.uuid1()
def GetVar(self, name):
if name in self.variables:
return self.variables[name]
else:
return None
def SetVar(self, name, value):
self.variables[name] = value
def Step(self):
self.pc = self.pc+1
def PC(self,number=None):
if number is not None:
self.pc = number
return self.pc
def GetStackTop(self):
return self.stack[-1]
def LValue(self, newScope = None ):
if newScope is not None:
self.lvalue = newScope
return self.lvalue
def RValue(self, newScope = None ):
if newScope is not None:
self.rvalue = newScope
return self.rvalue
| import uuid
class Scope:
def __init__(self):
self.pc = 0;
self.variables = {}
self.lvalue = self
self.rvalue = self
self.stack = [1,2,3]
self.name = uuid.uuid1()
def GetVar(self, name):
if name in self.variables:
return self.variables[name]
else:
return None
def SetVar(self, name, value):
self.variables[name] = value
def GetAddress(self, name):
if name in self.variables:
return list(self.variables.keys()).index(name)
else:
return None
def GetVarFromAddress(self, addr):
_vars = list(self.variables.keys())
if addr < len(_vars):
name = list(self.variables.keys())[addr]
return (name, self.variables[name])
else:
return None
def Step(self):
self.pc = self.pc+1
def PC(self,number=None):
if number is not None:
self.pc = number
return self.pc
def GetStackTop(self):
return self.stack[-1]
def LValue(self, newScope = None ):
if newScope is not None:
self.lvalue = newScope
return self.lvalue
def RValue(self, newScope = None ):
if newScope is not None:
self.rvalue = newScope
return self.rvalue
| Add functions to get/set variables addresses | Add functions to get/set variables addresses
Since some of the jaz commands depend on the address of an variable,
made function to obtain it.
| Python | mit | joewashear007/jazzy | import uuid
class Scope:
def __init__(self):
self.pc = 0;
self.variables = {}
self.lvalue = self
self.rvalue = self
self.stack = [1,2,3]
self.name = uuid.uuid1()
def GetVar(self, name):
if name in self.variables:
return self.variables[name]
else:
return None
def SetVar(self, name, value):
self.variables[name] = value
def Step(self):
self.pc = self.pc+1
def PC(self,number=None):
if number is not None:
self.pc = number
return self.pc
def GetStackTop(self):
return self.stack[-1]
def LValue(self, newScope = None ):
if newScope is not None:
self.lvalue = newScope
return self.lvalue
def RValue(self, newScope = None ):
if newScope is not None:
self.rvalue = newScope
return self.rvalue
Add functions to get/set variables addresses
Since some of the jaz commands depend on the address of an variable,
made function to obtain it. | import uuid
class Scope:
def __init__(self):
self.pc = 0;
self.variables = {}
self.lvalue = self
self.rvalue = self
self.stack = [1,2,3]
self.name = uuid.uuid1()
def GetVar(self, name):
if name in self.variables:
return self.variables[name]
else:
return None
def SetVar(self, name, value):
self.variables[name] = value
def GetAddress(self, name):
if name in self.variables:
return list(self.variables.keys()).index(name)
else:
return None
def GetVarFromAddress(self, addr):
_vars = list(self.variables.keys())
if addr < len(_vars):
name = list(self.variables.keys())[addr]
return (name, self.variables[name])
else:
return None
def Step(self):
self.pc = self.pc+1
def PC(self,number=None):
if number is not None:
self.pc = number
return self.pc
def GetStackTop(self):
return self.stack[-1]
def LValue(self, newScope = None ):
if newScope is not None:
self.lvalue = newScope
return self.lvalue
def RValue(self, newScope = None ):
if newScope is not None:
self.rvalue = newScope
return self.rvalue
| <commit_before>import uuid
class Scope:
def __init__(self):
self.pc = 0;
self.variables = {}
self.lvalue = self
self.rvalue = self
self.stack = [1,2,3]
self.name = uuid.uuid1()
def GetVar(self, name):
if name in self.variables:
return self.variables[name]
else:
return None
def SetVar(self, name, value):
self.variables[name] = value
def Step(self):
self.pc = self.pc+1
def PC(self,number=None):
if number is not None:
self.pc = number
return self.pc
def GetStackTop(self):
return self.stack[-1]
def LValue(self, newScope = None ):
if newScope is not None:
self.lvalue = newScope
return self.lvalue
def RValue(self, newScope = None ):
if newScope is not None:
self.rvalue = newScope
return self.rvalue
<commit_msg>Add functions to get/set variables addresses
Since some of the jaz commands depend on the address of an variable,
made function to obtain it.<commit_after> | import uuid
class Scope:
def __init__(self):
self.pc = 0;
self.variables = {}
self.lvalue = self
self.rvalue = self
self.stack = [1,2,3]
self.name = uuid.uuid1()
def GetVar(self, name):
if name in self.variables:
return self.variables[name]
else:
return None
def SetVar(self, name, value):
self.variables[name] = value
def GetAddress(self, name):
if name in self.variables:
return list(self.variables.keys()).index(name)
else:
return None
def GetVarFromAddress(self, addr):
_vars = list(self.variables.keys())
if addr < len(_vars):
name = list(self.variables.keys())[addr]
return (name, self.variables[name])
else:
return None
def Step(self):
self.pc = self.pc+1
def PC(self,number=None):
if number is not None:
self.pc = number
return self.pc
def GetStackTop(self):
return self.stack[-1]
def LValue(self, newScope = None ):
if newScope is not None:
self.lvalue = newScope
return self.lvalue
def RValue(self, newScope = None ):
if newScope is not None:
self.rvalue = newScope
return self.rvalue
| import uuid
class Scope:
def __init__(self):
self.pc = 0;
self.variables = {}
self.lvalue = self
self.rvalue = self
self.stack = [1,2,3]
self.name = uuid.uuid1()
def GetVar(self, name):
if name in self.variables:
return self.variables[name]
else:
return None
def SetVar(self, name, value):
self.variables[name] = value
def Step(self):
self.pc = self.pc+1
def PC(self,number=None):
if number is not None:
self.pc = number
return self.pc
def GetStackTop(self):
return self.stack[-1]
def LValue(self, newScope = None ):
if newScope is not None:
self.lvalue = newScope
return self.lvalue
def RValue(self, newScope = None ):
if newScope is not None:
self.rvalue = newScope
return self.rvalue
Add functions to get/set variables addresses
Since some of the jaz commands depend on the address of an variable,
made function to obtain it.import uuid
class Scope:
def __init__(self):
self.pc = 0;
self.variables = {}
self.lvalue = self
self.rvalue = self
self.stack = [1,2,3]
self.name = uuid.uuid1()
def GetVar(self, name):
if name in self.variables:
return self.variables[name]
else:
return None
def SetVar(self, name, value):
self.variables[name] = value
def GetAddress(self, name):
if name in self.variables:
return list(self.variables.keys()).index(name)
else:
return None
def GetVarFromAddress(self, addr):
_vars = list(self.variables.keys())
if addr < len(_vars):
name = list(self.variables.keys())[addr]
return (name, self.variables[name])
else:
return None
def Step(self):
self.pc = self.pc+1
def PC(self,number=None):
if number is not None:
self.pc = number
return self.pc
def GetStackTop(self):
return self.stack[-1]
def LValue(self, newScope = None ):
if newScope is not None:
self.lvalue = newScope
return self.lvalue
def RValue(self, newScope = None ):
if newScope is not None:
self.rvalue = newScope
return self.rvalue
| <commit_before>import uuid
class Scope:
def __init__(self):
self.pc = 0;
self.variables = {}
self.lvalue = self
self.rvalue = self
self.stack = [1,2,3]
self.name = uuid.uuid1()
def GetVar(self, name):
if name in self.variables:
return self.variables[name]
else:
return None
def SetVar(self, name, value):
self.variables[name] = value
def Step(self):
self.pc = self.pc+1
def PC(self,number=None):
if number is not None:
self.pc = number
return self.pc
def GetStackTop(self):
return self.stack[-1]
def LValue(self, newScope = None ):
if newScope is not None:
self.lvalue = newScope
return self.lvalue
def RValue(self, newScope = None ):
if newScope is not None:
self.rvalue = newScope
return self.rvalue
<commit_msg>Add functions to get/set variables addresses
Since some of the jaz commands depend on the address of an variable,
made function to obtain it.<commit_after>import uuid
class Scope:
def __init__(self):
self.pc = 0;
self.variables = {}
self.lvalue = self
self.rvalue = self
self.stack = [1,2,3]
self.name = uuid.uuid1()
def GetVar(self, name):
if name in self.variables:
return self.variables[name]
else:
return None
def SetVar(self, name, value):
self.variables[name] = value
def GetAddress(self, name):
if name in self.variables:
return list(self.variables.keys()).index(name)
else:
return None
def GetVarFromAddress(self, addr):
_vars = list(self.variables.keys())
if addr < len(_vars):
name = list(self.variables.keys())[addr]
return (name, self.variables[name])
else:
return None
def Step(self):
self.pc = self.pc+1
def PC(self,number=None):
if number is not None:
self.pc = number
return self.pc
def GetStackTop(self):
return self.stack[-1]
def LValue(self, newScope = None ):
if newScope is not None:
self.lvalue = newScope
return self.lvalue
def RValue(self, newScope = None ):
if newScope is not None:
self.rvalue = newScope
return self.rvalue
|
cab8fc7ae9c7e162d555c107ed415f18782512e7 | dsub/_dsub_version.py | dsub/_dsub_version.py | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.5.dev0'
| # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.5'
| Update dsub version to 0.4.5 | Update dsub version to 0.4.5
PiperOrigin-RevId: 393155372
| Python | apache-2.0 | DataBiosphere/dsub,DataBiosphere/dsub | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.5.dev0'
Update dsub version to 0.4.5
PiperOrigin-RevId: 393155372 | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.5'
| <commit_before># Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.5.dev0'
<commit_msg>Update dsub version to 0.4.5
PiperOrigin-RevId: 393155372<commit_after> | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.5'
| # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.5.dev0'
Update dsub version to 0.4.5
PiperOrigin-RevId: 393155372# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.5'
| <commit_before># Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.5.dev0'
<commit_msg>Update dsub version to 0.4.5
PiperOrigin-RevId: 393155372<commit_after># Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.5'
|
c407c067495d76ebed7c36ef005861c80fdcfdce | textx/__init__.py | textx/__init__.py | __version__ = "1.6.dev"
| from textx.metamodel import metamodel_from_file, metamodel_from_str # noqa
from textx.langapi import get_language, iter_languages # noqa
__version__ = "1.6.dev"
| Make metamodel factory methods and lang API available in textx package. | Make metamodel factory methods and lang API available in textx package.
| Python | mit | igordejanovic/textX,igordejanovic/textX,igordejanovic/textX | __version__ = "1.6.dev"
Make metamodel factory methods and lang API available in textx package. | from textx.metamodel import metamodel_from_file, metamodel_from_str # noqa
from textx.langapi import get_language, iter_languages # noqa
__version__ = "1.6.dev"
| <commit_before>__version__ = "1.6.dev"
<commit_msg>Make metamodel factory methods and lang API available in textx package.<commit_after> | from textx.metamodel import metamodel_from_file, metamodel_from_str # noqa
from textx.langapi import get_language, iter_languages # noqa
__version__ = "1.6.dev"
| __version__ = "1.6.dev"
Make metamodel factory methods and lang API available in textx package.from textx.metamodel import metamodel_from_file, metamodel_from_str # noqa
from textx.langapi import get_language, iter_languages # noqa
__version__ = "1.6.dev"
| <commit_before>__version__ = "1.6.dev"
<commit_msg>Make metamodel factory methods and lang API available in textx package.<commit_after>from textx.metamodel import metamodel_from_file, metamodel_from_str # noqa
from textx.langapi import get_language, iter_languages # noqa
__version__ = "1.6.dev"
|
0e044d1ad8b6fb2b0ac2126bb0fccfa05de9da14 | file_transfer/datamover/__init__.py | file_transfer/datamover/__init__.py |
from .connectors import (GithubConnector, S3Connector,
FTPConnector, LocalConnector)
from .transporters import (BaltradToS3, LocalToS3)
from .s3enram import S3EnramHandler
from .utils import (parse_filename, extract_month_updates,
parse_coverage_month, coverage_to_csv)
|
from .connectors import (GithubConnector, S3Connector,
FTPConnector, LocalConnector)
from .transporters import (BaltradToS3, LocalToS3)
from .s3enram import S3EnramHandler
from .utils import (parse_filename, extract_month_updates,
parse_coverage_month, coverage_to_csv,
most_recent_to_csv)
| Add csv handling to module | Add csv handling to module
| Python | mit | enram/data-repository,enram/data-repository,enram/data-repository,enram/infrastructure,enram/data-repository,enram/infrastructure |
from .connectors import (GithubConnector, S3Connector,
FTPConnector, LocalConnector)
from .transporters import (BaltradToS3, LocalToS3)
from .s3enram import S3EnramHandler
from .utils import (parse_filename, extract_month_updates,
parse_coverage_month, coverage_to_csv)
Add csv handling to module |
from .connectors import (GithubConnector, S3Connector,
FTPConnector, LocalConnector)
from .transporters import (BaltradToS3, LocalToS3)
from .s3enram import S3EnramHandler
from .utils import (parse_filename, extract_month_updates,
parse_coverage_month, coverage_to_csv,
most_recent_to_csv)
| <commit_before>
from .connectors import (GithubConnector, S3Connector,
FTPConnector, LocalConnector)
from .transporters import (BaltradToS3, LocalToS3)
from .s3enram import S3EnramHandler
from .utils import (parse_filename, extract_month_updates,
parse_coverage_month, coverage_to_csv)
<commit_msg>Add csv handling to module<commit_after> |
from .connectors import (GithubConnector, S3Connector,
FTPConnector, LocalConnector)
from .transporters import (BaltradToS3, LocalToS3)
from .s3enram import S3EnramHandler
from .utils import (parse_filename, extract_month_updates,
parse_coverage_month, coverage_to_csv,
most_recent_to_csv)
|
from .connectors import (GithubConnector, S3Connector,
FTPConnector, LocalConnector)
from .transporters import (BaltradToS3, LocalToS3)
from .s3enram import S3EnramHandler
from .utils import (parse_filename, extract_month_updates,
parse_coverage_month, coverage_to_csv)
Add csv handling to module
from .connectors import (GithubConnector, S3Connector,
FTPConnector, LocalConnector)
from .transporters import (BaltradToS3, LocalToS3)
from .s3enram import S3EnramHandler
from .utils import (parse_filename, extract_month_updates,
parse_coverage_month, coverage_to_csv,
most_recent_to_csv)
| <commit_before>
from .connectors import (GithubConnector, S3Connector,
FTPConnector, LocalConnector)
from .transporters import (BaltradToS3, LocalToS3)
from .s3enram import S3EnramHandler
from .utils import (parse_filename, extract_month_updates,
parse_coverage_month, coverage_to_csv)
<commit_msg>Add csv handling to module<commit_after>
from .connectors import (GithubConnector, S3Connector,
FTPConnector, LocalConnector)
from .transporters import (BaltradToS3, LocalToS3)
from .s3enram import S3EnramHandler
from .utils import (parse_filename, extract_month_updates,
parse_coverage_month, coverage_to_csv,
most_recent_to_csv)
|
abc1d2095f18f6c7ff129f3b8bf9eae2d7a6239a | skimage/io/tests/test_io.py | skimage/io/tests/test_io.py | import os
from numpy.testing import *
import numpy as np
import skimage.io as io
from skimage import data_dir
def test_stack_basic():
x = np.arange(12).reshape(3, 4)
io.push(x)
assert_array_equal(io.pop(), x)
@raises(ValueError)
def test_stack_non_array():
io.push([[1, 2, 3]])
def test_imread_url():
image_url = 'file:{0}{0}{1}{0}camera.png'.format(os.path.sep, data_dir)
image = io.imread(image_url)
assert image.shape == (512, 512)
if __name__ == "__main__":
run_module_suite()
| import os
from numpy.testing import *
import numpy as np
import skimage.io as io
from skimage import data_dir
def test_stack_basic():
x = np.arange(12).reshape(3, 4)
io.push(x)
assert_array_equal(io.pop(), x)
@raises(ValueError)
def test_stack_non_array():
io.push([[1, 2, 3]])
def test_imread_url():
# tweak data path so that file URI works on both unix and windows.
data_path = data_dir.lstrip(os.path.sep)
data_path = data_path.replace(os.path.sep, '/')
image_url = 'file:///{0}/camera.png'.format(data_path)
image = io.imread(image_url)
assert image.shape == (512, 512)
if __name__ == "__main__":
run_module_suite()
| Fix file URI in test (2nd attempt) | BUG: Fix file URI in test (2nd attempt)
| Python | bsd-3-clause | WarrenWeckesser/scikits-image,SamHames/scikit-image,chintak/scikit-image,chintak/scikit-image,juliusbierk/scikit-image,oew1v07/scikit-image,blink1073/scikit-image,bsipocz/scikit-image,bennlich/scikit-image,youprofit/scikit-image,dpshelio/scikit-image,newville/scikit-image,dpshelio/scikit-image,GaZ3ll3/scikit-image,ofgulban/scikit-image,robintw/scikit-image,ClinicalGraphics/scikit-image,Hiyorimi/scikit-image,SamHames/scikit-image,warmspringwinds/scikit-image,almarklein/scikit-image,newville/scikit-image,ajaybhat/scikit-image,keflavich/scikit-image,keflavich/scikit-image,warmspringwinds/scikit-image,robintw/scikit-image,GaZ3ll3/scikit-image,paalge/scikit-image,ClinicalGraphics/scikit-image,vighneshbirodkar/scikit-image,jwiggins/scikit-image,paalge/scikit-image,bennlich/scikit-image,blink1073/scikit-image,SamHames/scikit-image,chintak/scikit-image,almarklein/scikit-image,michaelpacer/scikit-image,almarklein/scikit-image,oew1v07/scikit-image,bsipocz/scikit-image,Britefury/scikit-image,michaelpacer/scikit-image,rjeli/scikit-image,michaelaye/scikit-image,vighneshbirodkar/scikit-image,juliusbierk/scikit-image,SamHames/scikit-image,rjeli/scikit-image,chintak/scikit-image,Hiyorimi/scikit-image,Britefury/scikit-image,chriscrosscutler/scikit-image,jwiggins/scikit-image,emon10005/scikit-image,chriscrosscutler/scikit-image,michaelaye/scikit-image,Midafi/scikit-image,pratapvardhan/scikit-image,vighneshbirodkar/scikit-image,ofgulban/scikit-image,ajaybhat/scikit-image,paalge/scikit-image,emon10005/scikit-image,rjeli/scikit-image,youprofit/scikit-image,pratapvardhan/scikit-image,almarklein/scikit-image,Midafi/scikit-image,WarrenWeckesser/scikits-image,ofgulban/scikit-image | import os
from numpy.testing import *
import numpy as np
import skimage.io as io
from skimage import data_dir
def test_stack_basic():
x = np.arange(12).reshape(3, 4)
io.push(x)
assert_array_equal(io.pop(), x)
@raises(ValueError)
def test_stack_non_array():
io.push([[1, 2, 3]])
def test_imread_url():
image_url = 'file:{0}{0}{1}{0}camera.png'.format(os.path.sep, data_dir)
image = io.imread(image_url)
assert image.shape == (512, 512)
if __name__ == "__main__":
run_module_suite()
BUG: Fix file URI in test (2nd attempt) | import os
from numpy.testing import *
import numpy as np
import skimage.io as io
from skimage import data_dir
def test_stack_basic():
x = np.arange(12).reshape(3, 4)
io.push(x)
assert_array_equal(io.pop(), x)
@raises(ValueError)
def test_stack_non_array():
io.push([[1, 2, 3]])
def test_imread_url():
# tweak data path so that file URI works on both unix and windows.
data_path = data_dir.lstrip(os.path.sep)
data_path = data_path.replace(os.path.sep, '/')
image_url = 'file:///{0}/camera.png'.format(data_path)
image = io.imread(image_url)
assert image.shape == (512, 512)
if __name__ == "__main__":
run_module_suite()
| <commit_before>import os
from numpy.testing import *
import numpy as np
import skimage.io as io
from skimage import data_dir
def test_stack_basic():
x = np.arange(12).reshape(3, 4)
io.push(x)
assert_array_equal(io.pop(), x)
@raises(ValueError)
def test_stack_non_array():
io.push([[1, 2, 3]])
def test_imread_url():
image_url = 'file:{0}{0}{1}{0}camera.png'.format(os.path.sep, data_dir)
image = io.imread(image_url)
assert image.shape == (512, 512)
if __name__ == "__main__":
run_module_suite()
<commit_msg>BUG: Fix file URI in test (2nd attempt)<commit_after> | import os
from numpy.testing import *
import numpy as np
import skimage.io as io
from skimage import data_dir
def test_stack_basic():
x = np.arange(12).reshape(3, 4)
io.push(x)
assert_array_equal(io.pop(), x)
@raises(ValueError)
def test_stack_non_array():
io.push([[1, 2, 3]])
def test_imread_url():
# tweak data path so that file URI works on both unix and windows.
data_path = data_dir.lstrip(os.path.sep)
data_path = data_path.replace(os.path.sep, '/')
image_url = 'file:///{0}/camera.png'.format(data_path)
image = io.imread(image_url)
assert image.shape == (512, 512)
if __name__ == "__main__":
run_module_suite()
| import os
from numpy.testing import *
import numpy as np
import skimage.io as io
from skimage import data_dir
def test_stack_basic():
x = np.arange(12).reshape(3, 4)
io.push(x)
assert_array_equal(io.pop(), x)
@raises(ValueError)
def test_stack_non_array():
io.push([[1, 2, 3]])
def test_imread_url():
image_url = 'file:{0}{0}{1}{0}camera.png'.format(os.path.sep, data_dir)
image = io.imread(image_url)
assert image.shape == (512, 512)
if __name__ == "__main__":
run_module_suite()
BUG: Fix file URI in test (2nd attempt)import os
from numpy.testing import *
import numpy as np
import skimage.io as io
from skimage import data_dir
def test_stack_basic():
x = np.arange(12).reshape(3, 4)
io.push(x)
assert_array_equal(io.pop(), x)
@raises(ValueError)
def test_stack_non_array():
io.push([[1, 2, 3]])
def test_imread_url():
# tweak data path so that file URI works on both unix and windows.
data_path = data_dir.lstrip(os.path.sep)
data_path = data_path.replace(os.path.sep, '/')
image_url = 'file:///{0}/camera.png'.format(data_path)
image = io.imread(image_url)
assert image.shape == (512, 512)
if __name__ == "__main__":
run_module_suite()
| <commit_before>import os
from numpy.testing import *
import numpy as np
import skimage.io as io
from skimage import data_dir
def test_stack_basic():
x = np.arange(12).reshape(3, 4)
io.push(x)
assert_array_equal(io.pop(), x)
@raises(ValueError)
def test_stack_non_array():
io.push([[1, 2, 3]])
def test_imread_url():
image_url = 'file:{0}{0}{1}{0}camera.png'.format(os.path.sep, data_dir)
image = io.imread(image_url)
assert image.shape == (512, 512)
if __name__ == "__main__":
run_module_suite()
<commit_msg>BUG: Fix file URI in test (2nd attempt)<commit_after>import os
from numpy.testing import *
import numpy as np
import skimage.io as io
from skimage import data_dir
def test_stack_basic():
x = np.arange(12).reshape(3, 4)
io.push(x)
assert_array_equal(io.pop(), x)
@raises(ValueError)
def test_stack_non_array():
io.push([[1, 2, 3]])
def test_imread_url():
# tweak data path so that file URI works on both unix and windows.
data_path = data_dir.lstrip(os.path.sep)
data_path = data_path.replace(os.path.sep, '/')
image_url = 'file:///{0}/camera.png'.format(data_path)
image = io.imread(image_url)
assert image.shape == (512, 512)
if __name__ == "__main__":
run_module_suite()
|
f87cbadbbcfc9d67aa3e5d0662236c18f23ba63b | DataBase.py | DataBase.py |
''' Copyright 2015 RTeam (Edgar Kaziahmedov, Klim Kireev, Artem Yashuhin)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
|
'''
Copyright 2015 OneRTeam (Edgar Kaziahmedov, Klim Kireev, Artem Yashuhin)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
| Use source file with license 2 | Use source file with license 2
| Python | apache-2.0 | proffK/CourseManager |
''' Copyright 2015 RTeam (Edgar Kaziahmedov, Klim Kireev, Artem Yashuhin)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
Use source file with license 2 |
'''
Copyright 2015 OneRTeam (Edgar Kaziahmedov, Klim Kireev, Artem Yashuhin)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
| <commit_before>
''' Copyright 2015 RTeam (Edgar Kaziahmedov, Klim Kireev, Artem Yashuhin)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
<commit_msg>Use source file with license 2<commit_after> |
'''
Copyright 2015 OneRTeam (Edgar Kaziahmedov, Klim Kireev, Artem Yashuhin)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
|
''' Copyright 2015 RTeam (Edgar Kaziahmedov, Klim Kireev, Artem Yashuhin)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
Use source file with license 2
'''
Copyright 2015 OneRTeam (Edgar Kaziahmedov, Klim Kireev, Artem Yashuhin)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
| <commit_before>
''' Copyright 2015 RTeam (Edgar Kaziahmedov, Klim Kireev, Artem Yashuhin)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
<commit_msg>Use source file with license 2<commit_after>
'''
Copyright 2015 OneRTeam (Edgar Kaziahmedov, Klim Kireev, Artem Yashuhin)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
|
847ffbfaeb2c7ade9a6c74efad18cdc283b46fe7 | fedorasummerofhardware/models.py | fedorasummerofhardware/models.py | from datetime import datetime
from sqlalchemy import Column, DateTime, Integer, Text, Boolean, Date
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import scoped_session, sessionmaker
DBSession = scoped_session(sessionmaker())
Base = declarative_base()
class Application(Base):
__tablename__ = 'applications'
id = Column(Integer, primary_key=True)
realname = Column(Text)
username = Column(Text, unique=True)
country = Column(Text)
state = Column(Text)
hardware = Column(Text)
shield = Column(Text)
date = Column(DateTime, default=datetime.now)
text = Column(Text)
approved = Column(Boolean, default=False)
address = Column(Text)
phone = Column(Date)
def __repr__(self):
return "<Application %s %s>" % (self.username, self.hardware)
| from datetime import datetime
from sqlalchemy import Column, DateTime, Integer, Text, Boolean, Date
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import scoped_session, sessionmaker
DBSession = scoped_session(sessionmaker())
Base = declarative_base()
class Application(Base):
__tablename__ = 'applications'
id = Column(Integer, primary_key=True)
realname = Column(Text)
username = Column(Text, unique=True)
country = Column(Text)
state = Column(Text)
hardware = Column(Text)
shield = Column(Text)
date = Column(DateTime, default=datetime.now)
text = Column(Text)
approved = Column(Boolean, default=False)
address = Column(Text)
phone = Column(Text)
def __repr__(self):
return "<Application %s %s>" % (self.username, self.hardware)
| Fix the phone Column type | Fix the phone Column type
| Python | agpl-3.0 | fedora-infra/fedora-openhw2012,fedora-infra/fedora-openhw2012,fedora-infra/fedora-openhw2012 | from datetime import datetime
from sqlalchemy import Column, DateTime, Integer, Text, Boolean, Date
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import scoped_session, sessionmaker
DBSession = scoped_session(sessionmaker())
Base = declarative_base()
class Application(Base):
__tablename__ = 'applications'
id = Column(Integer, primary_key=True)
realname = Column(Text)
username = Column(Text, unique=True)
country = Column(Text)
state = Column(Text)
hardware = Column(Text)
shield = Column(Text)
date = Column(DateTime, default=datetime.now)
text = Column(Text)
approved = Column(Boolean, default=False)
address = Column(Text)
phone = Column(Date)
def __repr__(self):
return "<Application %s %s>" % (self.username, self.hardware)
Fix the phone Column type | from datetime import datetime
from sqlalchemy import Column, DateTime, Integer, Text, Boolean, Date
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import scoped_session, sessionmaker
DBSession = scoped_session(sessionmaker())
Base = declarative_base()
class Application(Base):
__tablename__ = 'applications'
id = Column(Integer, primary_key=True)
realname = Column(Text)
username = Column(Text, unique=True)
country = Column(Text)
state = Column(Text)
hardware = Column(Text)
shield = Column(Text)
date = Column(DateTime, default=datetime.now)
text = Column(Text)
approved = Column(Boolean, default=False)
address = Column(Text)
phone = Column(Text)
def __repr__(self):
return "<Application %s %s>" % (self.username, self.hardware)
| <commit_before>from datetime import datetime
from sqlalchemy import Column, DateTime, Integer, Text, Boolean, Date
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import scoped_session, sessionmaker
DBSession = scoped_session(sessionmaker())
Base = declarative_base()
class Application(Base):
__tablename__ = 'applications'
id = Column(Integer, primary_key=True)
realname = Column(Text)
username = Column(Text, unique=True)
country = Column(Text)
state = Column(Text)
hardware = Column(Text)
shield = Column(Text)
date = Column(DateTime, default=datetime.now)
text = Column(Text)
approved = Column(Boolean, default=False)
address = Column(Text)
phone = Column(Date)
def __repr__(self):
return "<Application %s %s>" % (self.username, self.hardware)
<commit_msg>Fix the phone Column type<commit_after> | from datetime import datetime
from sqlalchemy import Column, DateTime, Integer, Text, Boolean, Date
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import scoped_session, sessionmaker
DBSession = scoped_session(sessionmaker())
Base = declarative_base()
class Application(Base):
__tablename__ = 'applications'
id = Column(Integer, primary_key=True)
realname = Column(Text)
username = Column(Text, unique=True)
country = Column(Text)
state = Column(Text)
hardware = Column(Text)
shield = Column(Text)
date = Column(DateTime, default=datetime.now)
text = Column(Text)
approved = Column(Boolean, default=False)
address = Column(Text)
phone = Column(Text)
def __repr__(self):
return "<Application %s %s>" % (self.username, self.hardware)
| from datetime import datetime
from sqlalchemy import Column, DateTime, Integer, Text, Boolean, Date
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import scoped_session, sessionmaker
DBSession = scoped_session(sessionmaker())
Base = declarative_base()
class Application(Base):
__tablename__ = 'applications'
id = Column(Integer, primary_key=True)
realname = Column(Text)
username = Column(Text, unique=True)
country = Column(Text)
state = Column(Text)
hardware = Column(Text)
shield = Column(Text)
date = Column(DateTime, default=datetime.now)
text = Column(Text)
approved = Column(Boolean, default=False)
address = Column(Text)
phone = Column(Date)
def __repr__(self):
return "<Application %s %s>" % (self.username, self.hardware)
Fix the phone Column typefrom datetime import datetime
from sqlalchemy import Column, DateTime, Integer, Text, Boolean, Date
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import scoped_session, sessionmaker
DBSession = scoped_session(sessionmaker())
Base = declarative_base()
class Application(Base):
__tablename__ = 'applications'
id = Column(Integer, primary_key=True)
realname = Column(Text)
username = Column(Text, unique=True)
country = Column(Text)
state = Column(Text)
hardware = Column(Text)
shield = Column(Text)
date = Column(DateTime, default=datetime.now)
text = Column(Text)
approved = Column(Boolean, default=False)
address = Column(Text)
phone = Column(Text)
def __repr__(self):
return "<Application %s %s>" % (self.username, self.hardware)
| <commit_before>from datetime import datetime
from sqlalchemy import Column, DateTime, Integer, Text, Boolean, Date
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import scoped_session, sessionmaker
DBSession = scoped_session(sessionmaker())
Base = declarative_base()
class Application(Base):
__tablename__ = 'applications'
id = Column(Integer, primary_key=True)
realname = Column(Text)
username = Column(Text, unique=True)
country = Column(Text)
state = Column(Text)
hardware = Column(Text)
shield = Column(Text)
date = Column(DateTime, default=datetime.now)
text = Column(Text)
approved = Column(Boolean, default=False)
address = Column(Text)
phone = Column(Date)
def __repr__(self):
return "<Application %s %s>" % (self.username, self.hardware)
<commit_msg>Fix the phone Column type<commit_after>from datetime import datetime
from sqlalchemy import Column, DateTime, Integer, Text, Boolean, Date
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import scoped_session, sessionmaker
DBSession = scoped_session(sessionmaker())
Base = declarative_base()
class Application(Base):
__tablename__ = 'applications'
id = Column(Integer, primary_key=True)
realname = Column(Text)
username = Column(Text, unique=True)
country = Column(Text)
state = Column(Text)
hardware = Column(Text)
shield = Column(Text)
date = Column(DateTime, default=datetime.now)
text = Column(Text)
approved = Column(Boolean, default=False)
address = Column(Text)
phone = Column(Text)
def __repr__(self):
return "<Application %s %s>" % (self.username, self.hardware)
|
4e699d94c84f1123f36a331926ca77af3f86b474 | tensorflow/python/profiler/traceme.py | tensorflow/python/profiler/traceme.py | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""TraceMe allows the profiler to trace python events.
Usage:
with profiler.TraceMe('name'):
...
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python import pywrap_tensorflow
from tensorflow.python.util.tf_export import tf_export
@tf_export('profiler.TraceMe')
class TraceMe(object):
"""Context manager that generates a trace event in the profiler."""
def __init__(self, name):
self._traceme = pywrap_tensorflow.PythonTraceMe(name)
def __enter__(self):
self._traceme.Enter()
def __exit__(self, exc_type, exc_val, exc_tb):
self._traceme.Exit()
| # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""TraceMe allows the profiler to trace python events.
Usage:
with profiler.TraceMe('name'):
...
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python import pywrap_tensorflow
class TraceMe(object):
"""Context manager that generates a trace event in the profiler."""
def __init__(self, name):
self._traceme = pywrap_tensorflow.PythonTraceMe(name)
def __enter__(self):
self._traceme.Enter()
def __exit__(self, exc_type, exc_val, exc_tb):
self._traceme.Exit()
| Remove tf_export from TraceMe Python API. | Remove tf_export from TraceMe Python API.
PiperOrigin-RevId: 262247599
| Python | apache-2.0 | cxxgtxy/tensorflow,karllessard/tensorflow,jhseu/tensorflow,tensorflow/tensorflow,jhseu/tensorflow,yongtang/tensorflow,renyi533/tensorflow,petewarden/tensorflow,renyi533/tensorflow,freedomtan/tensorflow,renyi533/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,aam-at/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,petewarden/tensorflow,davidzchen/tensorflow,tensorflow/tensorflow,annarev/tensorflow,gunan/tensorflow,aam-at/tensorflow,Intel-tensorflow/tensorflow,gunan/tensorflow,gautam1858/tensorflow,sarvex/tensorflow,gautam1858/tensorflow,ppwwyyxx/tensorflow,sarvex/tensorflow,cxxgtxy/tensorflow,gunan/tensorflow,petewarden/tensorflow,aam-at/tensorflow,arborh/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,xzturn/tensorflow,sarvex/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,petewarden/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,aam-at/tensorflow,yongtang/tensorflow,ppwwyyxx/tensorflow,karllessard/tensorflow,adit-chandra/tensorflow,davidzchen/tensorflow,DavidNorman/tensorflow,Intel-tensorflow/tensorflow,freedomtan/tensorflow,paolodedios/tensorflow,jhseu/tensorflow,paolodedios/tensorflow,gunan/tensorflow,davidzchen/tensorflow,Intel-Corporation/tensorflow,ppwwyyxx/tensorflow,cxxgtxy/tensorflow,arborh/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,jhseu/tensorflow,paolodedios/tensorflow,petewarden/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow,annarev/tensorflow,yongtang/tensorflow,xzturn/tensorflow,tensorflow/tensorflow-pywrap_saved_model,arborh/tensorflow,frreiss/tensorflow-fred,frreiss/tensorflow-fred,xzturn/tensorflow,Intel-tensorflow/tensorflow,chemelnucfin/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,gunan/tensorflow,frreiss/tensorflow-fred,davidzchen/tensorflow,aldian/tensorflow,adit-chandra/tensorflow,DavidNorman/tensorflow,freedomtan/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,DavidNorman/tensorflow,renyi533/tensorflow,petewarden/tensorflow,renyi533/tensorflow,renyi533/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,arborh/tensorflow,adit-chandra/tensorflow,gautam1858/tensorflow,renyi533/tensorflow,DavidNorman/tensorflow,annarev/tensorflow,davidzchen/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,chemelnucfin/tensorflow,annarev/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,xzturn/tensorflow,adit-chandra/tensorflow,aldian/tensorflow,freedomtan/tensorflow,yongtang/tensorflow,ppwwyyxx/tensorflow,gunan/tensorflow,renyi533/tensorflow,aldian/tensorflow,petewarden/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,tensorflow/tensorflow,chemelnucfin/tensorflow,cxxgtxy/tensorflow,arborh/tensorflow,xzturn/tensorflow,freedomtan/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,adit-chandra/tensorflow,Intel-tensorflow/tensorflow,xzturn/tensorflow,gautam1858/tensorflow,ppwwyyxx/tensorflow,Intel-Corporation/tensorflow,petewarden/tensorflow,davidzchen/tensorflow,arborh/tensorflow,ppwwyyxx/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-Corporation/tensorflow,renyi533/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,arborh/tensorflow,tensorflow/tensorflow-pywrap_saved_model,jhseu/tensorflow,gunan/tensorflow,yongtang/tensorflow,xzturn/tensorflow,aldian/tensorflow,Intel-Corporation/tensorflow,jhseu/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,aldian/tensorflow,DavidNorman/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,davidzchen/tensorflow,ppwwyyxx/tensorflow,arborh/tensorflow,frreiss/tensorflow-fred,freedomtan/tensorflow,chemelnucfin/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,sarvex/tensorflow,petewarden/tensorflow,ppwwyyxx/tensorflow,yongtang/tensorflow,jhseu/tensorflow,DavidNorman/tensorflow,paolodedios/tensorflow,Intel-Corporation/tensorflow,gunan/tensorflow,arborh/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,chemelnucfin/tensorflow,chemelnucfin/tensorflow,ppwwyyxx/tensorflow,petewarden/tensorflow,Intel-tensorflow/tensorflow,gautam1858/tensorflow,aam-at/tensorflow,Intel-tensorflow/tensorflow,annarev/tensorflow,renyi533/tensorflow,yongtang/tensorflow,jhseu/tensorflow,xzturn/tensorflow,adit-chandra/tensorflow,yongtang/tensorflow,aam-at/tensorflow,karllessard/tensorflow,frreiss/tensorflow-fred,arborh/tensorflow,sarvex/tensorflow,renyi533/tensorflow,adit-chandra/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,arborh/tensorflow,adit-chandra/tensorflow,xzturn/tensorflow,sarvex/tensorflow,arborh/tensorflow,freedomtan/tensorflow,DavidNorman/tensorflow,DavidNorman/tensorflow,aam-at/tensorflow,petewarden/tensorflow,chemelnucfin/tensorflow,freedomtan/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,renyi533/tensorflow,aldian/tensorflow,aam-at/tensorflow,cxxgtxy/tensorflow,annarev/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,adit-chandra/tensorflow,gautam1858/tensorflow,annarev/tensorflow,tensorflow/tensorflow-pywrap_saved_model,davidzchen/tensorflow,DavidNorman/tensorflow,chemelnucfin/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,frreiss/tensorflow-fred,gautam1858/tensorflow,adit-chandra/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,chemelnucfin/tensorflow,chemelnucfin/tensorflow,xzturn/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,gautam1858/tensorflow,jhseu/tensorflow,freedomtan/tensorflow,gunan/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,freedomtan/tensorflow,annarev/tensorflow,aldian/tensorflow,gautam1858/tensorflow,chemelnucfin/tensorflow,annarev/tensorflow,tensorflow/tensorflow,gunan/tensorflow,frreiss/tensorflow-fred,adit-chandra/tensorflow,sarvex/tensorflow,frreiss/tensorflow-fred,aam-at/tensorflow,frreiss/tensorflow-fred,ppwwyyxx/tensorflow,tensorflow/tensorflow,DavidNorman/tensorflow,gautam1858/tensorflow,xzturn/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,jhseu/tensorflow,adit-chandra/tensorflow,DavidNorman/tensorflow,sarvex/tensorflow,aam-at/tensorflow,karllessard/tensorflow,cxxgtxy/tensorflow,freedomtan/tensorflow,ppwwyyxx/tensorflow,cxxgtxy/tensorflow,freedomtan/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,aam-at/tensorflow,ppwwyyxx/tensorflow,DavidNorman/tensorflow,cxxgtxy/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,jhseu/tensorflow,annarev/tensorflow,xzturn/tensorflow,paolodedios/tensorflow,davidzchen/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-pywrap_saved_model,aldian/tensorflow,Intel-Corporation/tensorflow,davidzchen/tensorflow,gunan/tensorflow,aam-at/tensorflow,paolodedios/tensorflow,davidzchen/tensorflow,Intel-Corporation/tensorflow,chemelnucfin/tensorflow,davidzchen/tensorflow,annarev/tensorflow,Intel-Corporation/tensorflow,jhseu/tensorflow,gunan/tensorflow,karllessard/tensorflow,petewarden/tensorflow | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""TraceMe allows the profiler to trace python events.
Usage:
with profiler.TraceMe('name'):
...
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python import pywrap_tensorflow
from tensorflow.python.util.tf_export import tf_export
@tf_export('profiler.TraceMe')
class TraceMe(object):
"""Context manager that generates a trace event in the profiler."""
def __init__(self, name):
self._traceme = pywrap_tensorflow.PythonTraceMe(name)
def __enter__(self):
self._traceme.Enter()
def __exit__(self, exc_type, exc_val, exc_tb):
self._traceme.Exit()
Remove tf_export from TraceMe Python API.
PiperOrigin-RevId: 262247599 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""TraceMe allows the profiler to trace python events.
Usage:
with profiler.TraceMe('name'):
...
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python import pywrap_tensorflow
class TraceMe(object):
"""Context manager that generates a trace event in the profiler."""
def __init__(self, name):
self._traceme = pywrap_tensorflow.PythonTraceMe(name)
def __enter__(self):
self._traceme.Enter()
def __exit__(self, exc_type, exc_val, exc_tb):
self._traceme.Exit()
| <commit_before># Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""TraceMe allows the profiler to trace python events.
Usage:
with profiler.TraceMe('name'):
...
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python import pywrap_tensorflow
from tensorflow.python.util.tf_export import tf_export
@tf_export('profiler.TraceMe')
class TraceMe(object):
"""Context manager that generates a trace event in the profiler."""
def __init__(self, name):
self._traceme = pywrap_tensorflow.PythonTraceMe(name)
def __enter__(self):
self._traceme.Enter()
def __exit__(self, exc_type, exc_val, exc_tb):
self._traceme.Exit()
<commit_msg>Remove tf_export from TraceMe Python API.
PiperOrigin-RevId: 262247599<commit_after> | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""TraceMe allows the profiler to trace python events.
Usage:
with profiler.TraceMe('name'):
...
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python import pywrap_tensorflow
class TraceMe(object):
"""Context manager that generates a trace event in the profiler."""
def __init__(self, name):
self._traceme = pywrap_tensorflow.PythonTraceMe(name)
def __enter__(self):
self._traceme.Enter()
def __exit__(self, exc_type, exc_val, exc_tb):
self._traceme.Exit()
| # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""TraceMe allows the profiler to trace python events.
Usage:
with profiler.TraceMe('name'):
...
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python import pywrap_tensorflow
from tensorflow.python.util.tf_export import tf_export
@tf_export('profiler.TraceMe')
class TraceMe(object):
"""Context manager that generates a trace event in the profiler."""
def __init__(self, name):
self._traceme = pywrap_tensorflow.PythonTraceMe(name)
def __enter__(self):
self._traceme.Enter()
def __exit__(self, exc_type, exc_val, exc_tb):
self._traceme.Exit()
Remove tf_export from TraceMe Python API.
PiperOrigin-RevId: 262247599# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""TraceMe allows the profiler to trace python events.
Usage:
with profiler.TraceMe('name'):
...
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python import pywrap_tensorflow
class TraceMe(object):
"""Context manager that generates a trace event in the profiler."""
def __init__(self, name):
self._traceme = pywrap_tensorflow.PythonTraceMe(name)
def __enter__(self):
self._traceme.Enter()
def __exit__(self, exc_type, exc_val, exc_tb):
self._traceme.Exit()
| <commit_before># Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""TraceMe allows the profiler to trace python events.
Usage:
with profiler.TraceMe('name'):
...
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python import pywrap_tensorflow
from tensorflow.python.util.tf_export import tf_export
@tf_export('profiler.TraceMe')
class TraceMe(object):
"""Context manager that generates a trace event in the profiler."""
def __init__(self, name):
self._traceme = pywrap_tensorflow.PythonTraceMe(name)
def __enter__(self):
self._traceme.Enter()
def __exit__(self, exc_type, exc_val, exc_tb):
self._traceme.Exit()
<commit_msg>Remove tf_export from TraceMe Python API.
PiperOrigin-RevId: 262247599<commit_after># Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""TraceMe allows the profiler to trace python events.
Usage:
with profiler.TraceMe('name'):
...
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python import pywrap_tensorflow
class TraceMe(object):
"""Context manager that generates a trace event in the profiler."""
def __init__(self, name):
self._traceme = pywrap_tensorflow.PythonTraceMe(name)
def __enter__(self):
self._traceme.Enter()
def __exit__(self, exc_type, exc_val, exc_tb):
self._traceme.Exit()
|
759f6a2e4ced9ce9beeda01e638f109d946050b1 | server/migrations/0006_auto_20150811_0811.py | server/migrations/0006_auto_20150811_0811.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('server', '0005_auto_20150717_1827'),
]
operations = [
migrations.AddField(
model_name='machine',
name='first_checkin',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='machine',
name='sal_version',
field=models.TextField(null=True, blank=True),
),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.shortcuts import get_object_or_404
from django.db import models, migrations
def add_initial_date(apps, schema_editor):
Machine = apps.get_model("server", "Machine")
for machine in Machine.objects.all():
if not machine.first_checkin:
machine.first_checkin = machine.last_checkin
machine.save()
class Migration(migrations.Migration):
dependencies = [
('server', '0005_auto_20150717_1827'),
]
operations = [
migrations.AddField(
model_name='machine',
name='first_checkin',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='machine',
name='sal_version',
field=models.TextField(null=True, blank=True),
),
migrations.RunPython(add_initial_date),
]
| Add in the first checkin date if it doesn't exist | Add in the first checkin date if it doesn't exist
| Python | apache-2.0 | sheagcraig/sal,salopensource/sal,salopensource/sal,macjustice/sal,chasetb/sal,salopensource/sal,sheagcraig/sal,erikng/sal,macjustice/sal,erikng/sal,chasetb/sal,chasetb/sal,erikng/sal,macjustice/sal,macjustice/sal,sheagcraig/sal,erikng/sal,sheagcraig/sal,chasetb/sal,salopensource/sal | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('server', '0005_auto_20150717_1827'),
]
operations = [
migrations.AddField(
model_name='machine',
name='first_checkin',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='machine',
name='sal_version',
field=models.TextField(null=True, blank=True),
),
]
Add in the first checkin date if it doesn't exist | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.shortcuts import get_object_or_404
from django.db import models, migrations
def add_initial_date(apps, schema_editor):
Machine = apps.get_model("server", "Machine")
for machine in Machine.objects.all():
if not machine.first_checkin:
machine.first_checkin = machine.last_checkin
machine.save()
class Migration(migrations.Migration):
dependencies = [
('server', '0005_auto_20150717_1827'),
]
operations = [
migrations.AddField(
model_name='machine',
name='first_checkin',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='machine',
name='sal_version',
field=models.TextField(null=True, blank=True),
),
migrations.RunPython(add_initial_date),
]
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('server', '0005_auto_20150717_1827'),
]
operations = [
migrations.AddField(
model_name='machine',
name='first_checkin',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='machine',
name='sal_version',
field=models.TextField(null=True, blank=True),
),
]
<commit_msg>Add in the first checkin date if it doesn't exist<commit_after> | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.shortcuts import get_object_or_404
from django.db import models, migrations
def add_initial_date(apps, schema_editor):
Machine = apps.get_model("server", "Machine")
for machine in Machine.objects.all():
if not machine.first_checkin:
machine.first_checkin = machine.last_checkin
machine.save()
class Migration(migrations.Migration):
dependencies = [
('server', '0005_auto_20150717_1827'),
]
operations = [
migrations.AddField(
model_name='machine',
name='first_checkin',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='machine',
name='sal_version',
field=models.TextField(null=True, blank=True),
),
migrations.RunPython(add_initial_date),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('server', '0005_auto_20150717_1827'),
]
operations = [
migrations.AddField(
model_name='machine',
name='first_checkin',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='machine',
name='sal_version',
field=models.TextField(null=True, blank=True),
),
]
Add in the first checkin date if it doesn't exist# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.shortcuts import get_object_or_404
from django.db import models, migrations
def add_initial_date(apps, schema_editor):
Machine = apps.get_model("server", "Machine")
for machine in Machine.objects.all():
if not machine.first_checkin:
machine.first_checkin = machine.last_checkin
machine.save()
class Migration(migrations.Migration):
dependencies = [
('server', '0005_auto_20150717_1827'),
]
operations = [
migrations.AddField(
model_name='machine',
name='first_checkin',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='machine',
name='sal_version',
field=models.TextField(null=True, blank=True),
),
migrations.RunPython(add_initial_date),
]
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('server', '0005_auto_20150717_1827'),
]
operations = [
migrations.AddField(
model_name='machine',
name='first_checkin',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='machine',
name='sal_version',
field=models.TextField(null=True, blank=True),
),
]
<commit_msg>Add in the first checkin date if it doesn't exist<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.shortcuts import get_object_or_404
from django.db import models, migrations
def add_initial_date(apps, schema_editor):
Machine = apps.get_model("server", "Machine")
for machine in Machine.objects.all():
if not machine.first_checkin:
machine.first_checkin = machine.last_checkin
machine.save()
class Migration(migrations.Migration):
dependencies = [
('server', '0005_auto_20150717_1827'),
]
operations = [
migrations.AddField(
model_name='machine',
name='first_checkin',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='machine',
name='sal_version',
field=models.TextField(null=True, blank=True),
),
migrations.RunPython(add_initial_date),
]
|
343d0bcdeb6981ca90673de342eb6064ca62c24e | pip_deploy.py | pip_deploy.py | import subprocess
subprocess.call('python setup.py sdist')
subprocess.call('python setup.py sdist bdist_wheel upload')
| import subprocess
subprocess.call('python setup.py sdist')
subprocess.call('python setup.py bdist_wheel --universal')
subprocess.call('twine upload dist/*')
| Update pip deploy script to use twine | Update pip deploy script to use twine
| Python | mit | partrita/Gooey,chriskiehl/Gooey,codingsnippets/Gooey | import subprocess
subprocess.call('python setup.py sdist')
subprocess.call('python setup.py sdist bdist_wheel upload')
Update pip deploy script to use twine | import subprocess
subprocess.call('python setup.py sdist')
subprocess.call('python setup.py bdist_wheel --universal')
subprocess.call('twine upload dist/*')
| <commit_before>import subprocess
subprocess.call('python setup.py sdist')
subprocess.call('python setup.py sdist bdist_wheel upload')
<commit_msg>Update pip deploy script to use twine<commit_after> | import subprocess
subprocess.call('python setup.py sdist')
subprocess.call('python setup.py bdist_wheel --universal')
subprocess.call('twine upload dist/*')
| import subprocess
subprocess.call('python setup.py sdist')
subprocess.call('python setup.py sdist bdist_wheel upload')
Update pip deploy script to use twineimport subprocess
subprocess.call('python setup.py sdist')
subprocess.call('python setup.py bdist_wheel --universal')
subprocess.call('twine upload dist/*')
| <commit_before>import subprocess
subprocess.call('python setup.py sdist')
subprocess.call('python setup.py sdist bdist_wheel upload')
<commit_msg>Update pip deploy script to use twine<commit_after>import subprocess
subprocess.call('python setup.py sdist')
subprocess.call('python setup.py bdist_wheel --universal')
subprocess.call('twine upload dist/*')
|
b916e8dbb08b6c4ebb2ff37e8515a41f05261f5b | scripts/setup.py | scripts/setup.py | import setuptools
from imgtool import imgtool_version
setuptools.setup(
name="imgtool",
version=imgtool_version,
author="The MCUboot commiters",
description=("MCUboot's image signing and key management"),
license="Apache Software License",
url="http://github.com/JuulLabs-OSS/mcuboot",
packages=setuptools.find_packages(),
install_requires=[
'cryptography>=2.4.2',
'intelhex>=2.2.1',
'click',
],
entry_points={
"console_scripts": ["imgtool=imgtool.main:imgtool"]
},
classifiers=[
"Programming Language :: Python :: 3",
"Development Status :: 4 - Beta",
"Topic :: Software Development :: Build Tools",
"License :: OSI Approved :: Apache Software License",
],
)
| import setuptools
from imgtool import imgtool_version
setuptools.setup(
name="imgtool",
version=imgtool_version,
author="The MCUboot commiters",
author_email="None",
description=("MCUboot's image signing and key management"),
license="Apache Software License",
url="http://github.com/JuulLabs-OSS/mcuboot",
packages=setuptools.find_packages(),
install_requires=[
'cryptography>=2.4.2',
'intelhex>=2.2.1',
'click',
],
entry_points={
"console_scripts": ["imgtool=imgtool.main:imgtool"]
},
classifiers=[
"Programming Language :: Python :: 3",
"Development Status :: 4 - Beta",
"Topic :: Software Development :: Build Tools",
"License :: OSI Approved :: Apache Software License",
],
)
| Fix author_email UNKNOWN in pip show | Fix author_email UNKNOWN in pip show
Signed-off-by: Fabio Utzig <66676b1ceaf93296e098708c09494361103aa635@apache.org>
| Python | apache-2.0 | utzig/mcuboot,utzig/mcuboot,ATmobica/mcuboot,runtimeco/mcuboot,runtimeco/mcuboot,runtimeco/mcuboot,runtimeco/mcuboot,tamban01/mcuboot,ATmobica/mcuboot,utzig/mcuboot,utzig/mcuboot,tamban01/mcuboot,tamban01/mcuboot,ATmobica/mcuboot,ATmobica/mcuboot,ATmobica/mcuboot,runtimeco/mcuboot,tamban01/mcuboot,tamban01/mcuboot,utzig/mcuboot | import setuptools
from imgtool import imgtool_version
setuptools.setup(
name="imgtool",
version=imgtool_version,
author="The MCUboot commiters",
description=("MCUboot's image signing and key management"),
license="Apache Software License",
url="http://github.com/JuulLabs-OSS/mcuboot",
packages=setuptools.find_packages(),
install_requires=[
'cryptography>=2.4.2',
'intelhex>=2.2.1',
'click',
],
entry_points={
"console_scripts": ["imgtool=imgtool.main:imgtool"]
},
classifiers=[
"Programming Language :: Python :: 3",
"Development Status :: 4 - Beta",
"Topic :: Software Development :: Build Tools",
"License :: OSI Approved :: Apache Software License",
],
)
Fix author_email UNKNOWN in pip show
Signed-off-by: Fabio Utzig <66676b1ceaf93296e098708c09494361103aa635@apache.org> | import setuptools
from imgtool import imgtool_version
setuptools.setup(
name="imgtool",
version=imgtool_version,
author="The MCUboot commiters",
author_email="None",
description=("MCUboot's image signing and key management"),
license="Apache Software License",
url="http://github.com/JuulLabs-OSS/mcuboot",
packages=setuptools.find_packages(),
install_requires=[
'cryptography>=2.4.2',
'intelhex>=2.2.1',
'click',
],
entry_points={
"console_scripts": ["imgtool=imgtool.main:imgtool"]
},
classifiers=[
"Programming Language :: Python :: 3",
"Development Status :: 4 - Beta",
"Topic :: Software Development :: Build Tools",
"License :: OSI Approved :: Apache Software License",
],
)
| <commit_before>import setuptools
from imgtool import imgtool_version
setuptools.setup(
name="imgtool",
version=imgtool_version,
author="The MCUboot commiters",
description=("MCUboot's image signing and key management"),
license="Apache Software License",
url="http://github.com/JuulLabs-OSS/mcuboot",
packages=setuptools.find_packages(),
install_requires=[
'cryptography>=2.4.2',
'intelhex>=2.2.1',
'click',
],
entry_points={
"console_scripts": ["imgtool=imgtool.main:imgtool"]
},
classifiers=[
"Programming Language :: Python :: 3",
"Development Status :: 4 - Beta",
"Topic :: Software Development :: Build Tools",
"License :: OSI Approved :: Apache Software License",
],
)
<commit_msg>Fix author_email UNKNOWN in pip show
Signed-off-by: Fabio Utzig <66676b1ceaf93296e098708c09494361103aa635@apache.org><commit_after> | import setuptools
from imgtool import imgtool_version
setuptools.setup(
name="imgtool",
version=imgtool_version,
author="The MCUboot commiters",
author_email="None",
description=("MCUboot's image signing and key management"),
license="Apache Software License",
url="http://github.com/JuulLabs-OSS/mcuboot",
packages=setuptools.find_packages(),
install_requires=[
'cryptography>=2.4.2',
'intelhex>=2.2.1',
'click',
],
entry_points={
"console_scripts": ["imgtool=imgtool.main:imgtool"]
},
classifiers=[
"Programming Language :: Python :: 3",
"Development Status :: 4 - Beta",
"Topic :: Software Development :: Build Tools",
"License :: OSI Approved :: Apache Software License",
],
)
| import setuptools
from imgtool import imgtool_version
setuptools.setup(
name="imgtool",
version=imgtool_version,
author="The MCUboot commiters",
description=("MCUboot's image signing and key management"),
license="Apache Software License",
url="http://github.com/JuulLabs-OSS/mcuboot",
packages=setuptools.find_packages(),
install_requires=[
'cryptography>=2.4.2',
'intelhex>=2.2.1',
'click',
],
entry_points={
"console_scripts": ["imgtool=imgtool.main:imgtool"]
},
classifiers=[
"Programming Language :: Python :: 3",
"Development Status :: 4 - Beta",
"Topic :: Software Development :: Build Tools",
"License :: OSI Approved :: Apache Software License",
],
)
Fix author_email UNKNOWN in pip show
Signed-off-by: Fabio Utzig <66676b1ceaf93296e098708c09494361103aa635@apache.org>import setuptools
from imgtool import imgtool_version
setuptools.setup(
name="imgtool",
version=imgtool_version,
author="The MCUboot commiters",
author_email="None",
description=("MCUboot's image signing and key management"),
license="Apache Software License",
url="http://github.com/JuulLabs-OSS/mcuboot",
packages=setuptools.find_packages(),
install_requires=[
'cryptography>=2.4.2',
'intelhex>=2.2.1',
'click',
],
entry_points={
"console_scripts": ["imgtool=imgtool.main:imgtool"]
},
classifiers=[
"Programming Language :: Python :: 3",
"Development Status :: 4 - Beta",
"Topic :: Software Development :: Build Tools",
"License :: OSI Approved :: Apache Software License",
],
)
| <commit_before>import setuptools
from imgtool import imgtool_version
setuptools.setup(
name="imgtool",
version=imgtool_version,
author="The MCUboot commiters",
description=("MCUboot's image signing and key management"),
license="Apache Software License",
url="http://github.com/JuulLabs-OSS/mcuboot",
packages=setuptools.find_packages(),
install_requires=[
'cryptography>=2.4.2',
'intelhex>=2.2.1',
'click',
],
entry_points={
"console_scripts": ["imgtool=imgtool.main:imgtool"]
},
classifiers=[
"Programming Language :: Python :: 3",
"Development Status :: 4 - Beta",
"Topic :: Software Development :: Build Tools",
"License :: OSI Approved :: Apache Software License",
],
)
<commit_msg>Fix author_email UNKNOWN in pip show
Signed-off-by: Fabio Utzig <66676b1ceaf93296e098708c09494361103aa635@apache.org><commit_after>import setuptools
from imgtool import imgtool_version
setuptools.setup(
name="imgtool",
version=imgtool_version,
author="The MCUboot commiters",
author_email="None",
description=("MCUboot's image signing and key management"),
license="Apache Software License",
url="http://github.com/JuulLabs-OSS/mcuboot",
packages=setuptools.find_packages(),
install_requires=[
'cryptography>=2.4.2',
'intelhex>=2.2.1',
'click',
],
entry_points={
"console_scripts": ["imgtool=imgtool.main:imgtool"]
},
classifiers=[
"Programming Language :: Python :: 3",
"Development Status :: 4 - Beta",
"Topic :: Software Development :: Build Tools",
"License :: OSI Approved :: Apache Software License",
],
)
|
9ee03ea335b438cf1005a2295360310456e27bad | repos/urls.py | repos/urls.py | from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth import views
from django.http import HttpResponseRedirect
urlpatterns = [
url(r'^$', lambda u: HttpResponseRedirect('/repo')),
url(r'^controller/logout/$', views.logout, {'next_page': '/repo/home/'}),
url(r'^controller/', admin.site.urls),
url(r'^repo/', include('repo.urls')),
url(r'^reg/', include('reg.urls')),
url(r'^tracker/', include('tracker.urls')),
url(r'^todos/', include('todo.urls')),
]
| from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth import views
from django.http import HttpResponseRedirect
urlpatterns = [
url(r'^$', lambda u: HttpResponseRedirect('/reg')),
url(r'^controller/logout/$', views.logout, {'next_page': '/repo/home/'}),
url(r'^controller/', admin.site.urls),
url(r'^repo/', include('repo.urls')),
url(r'^reg/', include('reg.urls')),
url(r'^tracker/', include('tracker.urls')),
url(r'^todos/', include('todo.urls')),
]
| Switch home page to reg/home | Switch home page to reg/home
| Python | mit | giantas/elibrary,giantas/elibrary,giantas/elibrary | from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth import views
from django.http import HttpResponseRedirect
urlpatterns = [
url(r'^$', lambda u: HttpResponseRedirect('/repo')),
url(r'^controller/logout/$', views.logout, {'next_page': '/repo/home/'}),
url(r'^controller/', admin.site.urls),
url(r'^repo/', include('repo.urls')),
url(r'^reg/', include('reg.urls')),
url(r'^tracker/', include('tracker.urls')),
url(r'^todos/', include('todo.urls')),
]
Switch home page to reg/home | from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth import views
from django.http import HttpResponseRedirect
urlpatterns = [
url(r'^$', lambda u: HttpResponseRedirect('/reg')),
url(r'^controller/logout/$', views.logout, {'next_page': '/repo/home/'}),
url(r'^controller/', admin.site.urls),
url(r'^repo/', include('repo.urls')),
url(r'^reg/', include('reg.urls')),
url(r'^tracker/', include('tracker.urls')),
url(r'^todos/', include('todo.urls')),
]
| <commit_before>from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth import views
from django.http import HttpResponseRedirect
urlpatterns = [
url(r'^$', lambda u: HttpResponseRedirect('/repo')),
url(r'^controller/logout/$', views.logout, {'next_page': '/repo/home/'}),
url(r'^controller/', admin.site.urls),
url(r'^repo/', include('repo.urls')),
url(r'^reg/', include('reg.urls')),
url(r'^tracker/', include('tracker.urls')),
url(r'^todos/', include('todo.urls')),
]
<commit_msg>Switch home page to reg/home<commit_after> | from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth import views
from django.http import HttpResponseRedirect
urlpatterns = [
url(r'^$', lambda u: HttpResponseRedirect('/reg')),
url(r'^controller/logout/$', views.logout, {'next_page': '/repo/home/'}),
url(r'^controller/', admin.site.urls),
url(r'^repo/', include('repo.urls')),
url(r'^reg/', include('reg.urls')),
url(r'^tracker/', include('tracker.urls')),
url(r'^todos/', include('todo.urls')),
]
| from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth import views
from django.http import HttpResponseRedirect
urlpatterns = [
url(r'^$', lambda u: HttpResponseRedirect('/repo')),
url(r'^controller/logout/$', views.logout, {'next_page': '/repo/home/'}),
url(r'^controller/', admin.site.urls),
url(r'^repo/', include('repo.urls')),
url(r'^reg/', include('reg.urls')),
url(r'^tracker/', include('tracker.urls')),
url(r'^todos/', include('todo.urls')),
]
Switch home page to reg/homefrom django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth import views
from django.http import HttpResponseRedirect
urlpatterns = [
url(r'^$', lambda u: HttpResponseRedirect('/reg')),
url(r'^controller/logout/$', views.logout, {'next_page': '/repo/home/'}),
url(r'^controller/', admin.site.urls),
url(r'^repo/', include('repo.urls')),
url(r'^reg/', include('reg.urls')),
url(r'^tracker/', include('tracker.urls')),
url(r'^todos/', include('todo.urls')),
]
| <commit_before>from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth import views
from django.http import HttpResponseRedirect
urlpatterns = [
url(r'^$', lambda u: HttpResponseRedirect('/repo')),
url(r'^controller/logout/$', views.logout, {'next_page': '/repo/home/'}),
url(r'^controller/', admin.site.urls),
url(r'^repo/', include('repo.urls')),
url(r'^reg/', include('reg.urls')),
url(r'^tracker/', include('tracker.urls')),
url(r'^todos/', include('todo.urls')),
]
<commit_msg>Switch home page to reg/home<commit_after>from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth import views
from django.http import HttpResponseRedirect
urlpatterns = [
url(r'^$', lambda u: HttpResponseRedirect('/reg')),
url(r'^controller/logout/$', views.logout, {'next_page': '/repo/home/'}),
url(r'^controller/', admin.site.urls),
url(r'^repo/', include('repo.urls')),
url(r'^reg/', include('reg.urls')),
url(r'^tracker/', include('tracker.urls')),
url(r'^todos/', include('todo.urls')),
]
|
7a98cd1c58985da9230ba5861731b6f252d2c611 | source/update.py | source/update.py | """updates subreddit css with compiled sass"""
import time
import sass
import praw
def css() -> str:
"""compiles sass and returns css"""
return sass.compile(filename="index.scss", output_style="compressed")
def uid() -> str:
"""return date and time"""
return "Subreddit upload on {}".format(time.strftime("%c"))
def update() -> None:
"""main function"""
reddit: praw.Reddit = praw.Reddit()
reddit.subreddit("neoliberal").stylesheet.update(css(), reason=uid())
return
| """updates subreddit css with compiled sass"""
import os
import time
from typing import List, Dict, Any, Tuple
import praw
import sass
WebhookResponse = Dict[str, Any] # pylint: disable=C0103
def css() -> str:
"""compiles sass and returns css"""
return sass.compile(filename="index.scss", output_style="compressed")
def uid() -> str:
"""return date and time"""
return "Subreddit upload on {}".format(time.strftime("%c"))
def changed_assets(data: WebhookResponse) -> Tuple[List[str], List[str]]:
"""identifies changed files to upload by checking if any changed files are images"""
endings: List[str] = ["png", "jpg"]
head_commit: Dict[str, Any] = data["head_commit"]
uploading_files: List[str] = [
file for file in (head_commit["modified"] + head_commit["added"])
for ending in endings
if os.path.splitext(file)[1] == ending
]
removed_files: List[str] = [
file for file in head_commit["removed"]
for ending in endings
if os.path.splitext(file)[1] == ending
]
return (uploading_files, removed_files)
def update(data: WebhookResponse) -> None:
"""main function"""
reddit: praw.Reddit = praw.Reddit()
reddit.subreddit("neoliberal").stylesheet.update(css(), reason=uid())
return
| Check for changed files from webhook | Check for changed files from webhook
Prevents uploading everything, only the changed assets
| Python | mit | neoliberal/css-updater | """updates subreddit css with compiled sass"""
import time
import sass
import praw
def css() -> str:
"""compiles sass and returns css"""
return sass.compile(filename="index.scss", output_style="compressed")
def uid() -> str:
"""return date and time"""
return "Subreddit upload on {}".format(time.strftime("%c"))
def update() -> None:
"""main function"""
reddit: praw.Reddit = praw.Reddit()
reddit.subreddit("neoliberal").stylesheet.update(css(), reason=uid())
return
Check for changed files from webhook
Prevents uploading everything, only the changed assets | """updates subreddit css with compiled sass"""
import os
import time
from typing import List, Dict, Any, Tuple
import praw
import sass
WebhookResponse = Dict[str, Any] # pylint: disable=C0103
def css() -> str:
"""compiles sass and returns css"""
return sass.compile(filename="index.scss", output_style="compressed")
def uid() -> str:
"""return date and time"""
return "Subreddit upload on {}".format(time.strftime("%c"))
def changed_assets(data: WebhookResponse) -> Tuple[List[str], List[str]]:
"""identifies changed files to upload by checking if any changed files are images"""
endings: List[str] = ["png", "jpg"]
head_commit: Dict[str, Any] = data["head_commit"]
uploading_files: List[str] = [
file for file in (head_commit["modified"] + head_commit["added"])
for ending in endings
if os.path.splitext(file)[1] == ending
]
removed_files: List[str] = [
file for file in head_commit["removed"]
for ending in endings
if os.path.splitext(file)[1] == ending
]
return (uploading_files, removed_files)
def update(data: WebhookResponse) -> None:
"""main function"""
reddit: praw.Reddit = praw.Reddit()
reddit.subreddit("neoliberal").stylesheet.update(css(), reason=uid())
return
| <commit_before>"""updates subreddit css with compiled sass"""
import time
import sass
import praw
def css() -> str:
"""compiles sass and returns css"""
return sass.compile(filename="index.scss", output_style="compressed")
def uid() -> str:
"""return date and time"""
return "Subreddit upload on {}".format(time.strftime("%c"))
def update() -> None:
"""main function"""
reddit: praw.Reddit = praw.Reddit()
reddit.subreddit("neoliberal").stylesheet.update(css(), reason=uid())
return
<commit_msg>Check for changed files from webhook
Prevents uploading everything, only the changed assets<commit_after> | """updates subreddit css with compiled sass"""
import os
import time
from typing import List, Dict, Any, Tuple
import praw
import sass
WebhookResponse = Dict[str, Any] # pylint: disable=C0103
def css() -> str:
"""compiles sass and returns css"""
return sass.compile(filename="index.scss", output_style="compressed")
def uid() -> str:
"""return date and time"""
return "Subreddit upload on {}".format(time.strftime("%c"))
def changed_assets(data: WebhookResponse) -> Tuple[List[str], List[str]]:
"""identifies changed files to upload by checking if any changed files are images"""
endings: List[str] = ["png", "jpg"]
head_commit: Dict[str, Any] = data["head_commit"]
uploading_files: List[str] = [
file for file in (head_commit["modified"] + head_commit["added"])
for ending in endings
if os.path.splitext(file)[1] == ending
]
removed_files: List[str] = [
file for file in head_commit["removed"]
for ending in endings
if os.path.splitext(file)[1] == ending
]
return (uploading_files, removed_files)
def update(data: WebhookResponse) -> None:
"""main function"""
reddit: praw.Reddit = praw.Reddit()
reddit.subreddit("neoliberal").stylesheet.update(css(), reason=uid())
return
| """updates subreddit css with compiled sass"""
import time
import sass
import praw
def css() -> str:
"""compiles sass and returns css"""
return sass.compile(filename="index.scss", output_style="compressed")
def uid() -> str:
"""return date and time"""
return "Subreddit upload on {}".format(time.strftime("%c"))
def update() -> None:
"""main function"""
reddit: praw.Reddit = praw.Reddit()
reddit.subreddit("neoliberal").stylesheet.update(css(), reason=uid())
return
Check for changed files from webhook
Prevents uploading everything, only the changed assets"""updates subreddit css with compiled sass"""
import os
import time
from typing import List, Dict, Any, Tuple
import praw
import sass
WebhookResponse = Dict[str, Any] # pylint: disable=C0103
def css() -> str:
"""compiles sass and returns css"""
return sass.compile(filename="index.scss", output_style="compressed")
def uid() -> str:
"""return date and time"""
return "Subreddit upload on {}".format(time.strftime("%c"))
def changed_assets(data: WebhookResponse) -> Tuple[List[str], List[str]]:
"""identifies changed files to upload by checking if any changed files are images"""
endings: List[str] = ["png", "jpg"]
head_commit: Dict[str, Any] = data["head_commit"]
uploading_files: List[str] = [
file for file in (head_commit["modified"] + head_commit["added"])
for ending in endings
if os.path.splitext(file)[1] == ending
]
removed_files: List[str] = [
file for file in head_commit["removed"]
for ending in endings
if os.path.splitext(file)[1] == ending
]
return (uploading_files, removed_files)
def update(data: WebhookResponse) -> None:
"""main function"""
reddit: praw.Reddit = praw.Reddit()
reddit.subreddit("neoliberal").stylesheet.update(css(), reason=uid())
return
| <commit_before>"""updates subreddit css with compiled sass"""
import time
import sass
import praw
def css() -> str:
"""compiles sass and returns css"""
return sass.compile(filename="index.scss", output_style="compressed")
def uid() -> str:
"""return date and time"""
return "Subreddit upload on {}".format(time.strftime("%c"))
def update() -> None:
"""main function"""
reddit: praw.Reddit = praw.Reddit()
reddit.subreddit("neoliberal").stylesheet.update(css(), reason=uid())
return
<commit_msg>Check for changed files from webhook
Prevents uploading everything, only the changed assets<commit_after>"""updates subreddit css with compiled sass"""
import os
import time
from typing import List, Dict, Any, Tuple
import praw
import sass
WebhookResponse = Dict[str, Any] # pylint: disable=C0103
def css() -> str:
"""compiles sass and returns css"""
return sass.compile(filename="index.scss", output_style="compressed")
def uid() -> str:
"""return date and time"""
return "Subreddit upload on {}".format(time.strftime("%c"))
def changed_assets(data: WebhookResponse) -> Tuple[List[str], List[str]]:
"""identifies changed files to upload by checking if any changed files are images"""
endings: List[str] = ["png", "jpg"]
head_commit: Dict[str, Any] = data["head_commit"]
uploading_files: List[str] = [
file for file in (head_commit["modified"] + head_commit["added"])
for ending in endings
if os.path.splitext(file)[1] == ending
]
removed_files: List[str] = [
file for file in head_commit["removed"]
for ending in endings
if os.path.splitext(file)[1] == ending
]
return (uploading_files, removed_files)
def update(data: WebhookResponse) -> None:
"""main function"""
reddit: praw.Reddit = praw.Reddit()
reddit.subreddit("neoliberal").stylesheet.update(css(), reason=uid())
return
|
e6b5c93a8c23fcea84768a8b50708ef7ef78dcd8 | functionaltests/api/base.py | functionaltests/api/base.py | # -*- coding: utf-8 -*-
#
# Copyright 2013 - Noorul Islam K M
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest import clients
from tempest.common import rest_client
from tempest import config
import testtools
CONF = config.CONF
class SolumClient(rest_client.RestClient):
def __init__(self, auth_provider):
super(SolumClient, self).__init__(auth_provider)
self.service = 'application_deployment'
self.endpoint_url = 'publicURL'
class TestCase(testtools.TestCase):
def setUp(self):
super(TestCase, self).setUp()
username = CONF.identity.username
password = CONF.identity.password
tenant_name = CONF.identity.tenant_name
mgr = clients.Manager(username, password, tenant_name)
auth_provider = mgr.get_auth_provider()
self.client = SolumClient(auth_provider)
| # -*- coding: utf-8 -*-
#
# Copyright 2013 - Noorul Islam K M
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest import clients
from tempest.common import rest_client
from tempest import config
import testtools
CONF = config.CONF
class SolumClient(rest_client.RestClient):
def __init__(self, auth_provider):
super(SolumClient, self).__init__(auth_provider)
self.service = 'application_deployment'
self.endpoint_url = 'publicURL'
class TestCase(testtools.TestCase):
def setUp(self):
super(TestCase, self).setUp()
username = CONF.identity.username
password = CONF.identity.password
tenant_name = CONF.identity.tenant_name
mgr = clients.Manager(username, password, tenant_name)
auth_provider = mgr.get_auth_provider(mgr.get_default_credentials())
self.client = SolumClient(auth_provider)
| Fix functionaltests (imported tempest code has changed) | Fix functionaltests (imported tempest code has changed)
get_auth_provider now takes an argument.
Change-Id: I4a80ef3fdf2914854268459cf1080a46922e93d5
| Python | apache-2.0 | gilbertpilz/solum,ed-/solum,ed-/solum,gilbertpilz/solum,openstack/solum,devdattakulkarni/test-solum,gilbertpilz/solum,stackforge/solum,openstack/solum,ed-/solum,stackforge/solum,devdattakulkarni/test-solum,ed-/solum,gilbertpilz/solum | # -*- coding: utf-8 -*-
#
# Copyright 2013 - Noorul Islam K M
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest import clients
from tempest.common import rest_client
from tempest import config
import testtools
CONF = config.CONF
class SolumClient(rest_client.RestClient):
def __init__(self, auth_provider):
super(SolumClient, self).__init__(auth_provider)
self.service = 'application_deployment'
self.endpoint_url = 'publicURL'
class TestCase(testtools.TestCase):
def setUp(self):
super(TestCase, self).setUp()
username = CONF.identity.username
password = CONF.identity.password
tenant_name = CONF.identity.tenant_name
mgr = clients.Manager(username, password, tenant_name)
auth_provider = mgr.get_auth_provider()
self.client = SolumClient(auth_provider)
Fix functionaltests (imported tempest code has changed)
get_auth_provider now takes an argument.
Change-Id: I4a80ef3fdf2914854268459cf1080a46922e93d5 | # -*- coding: utf-8 -*-
#
# Copyright 2013 - Noorul Islam K M
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest import clients
from tempest.common import rest_client
from tempest import config
import testtools
CONF = config.CONF
class SolumClient(rest_client.RestClient):
def __init__(self, auth_provider):
super(SolumClient, self).__init__(auth_provider)
self.service = 'application_deployment'
self.endpoint_url = 'publicURL'
class TestCase(testtools.TestCase):
def setUp(self):
super(TestCase, self).setUp()
username = CONF.identity.username
password = CONF.identity.password
tenant_name = CONF.identity.tenant_name
mgr = clients.Manager(username, password, tenant_name)
auth_provider = mgr.get_auth_provider(mgr.get_default_credentials())
self.client = SolumClient(auth_provider)
| <commit_before># -*- coding: utf-8 -*-
#
# Copyright 2013 - Noorul Islam K M
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest import clients
from tempest.common import rest_client
from tempest import config
import testtools
CONF = config.CONF
class SolumClient(rest_client.RestClient):
def __init__(self, auth_provider):
super(SolumClient, self).__init__(auth_provider)
self.service = 'application_deployment'
self.endpoint_url = 'publicURL'
class TestCase(testtools.TestCase):
def setUp(self):
super(TestCase, self).setUp()
username = CONF.identity.username
password = CONF.identity.password
tenant_name = CONF.identity.tenant_name
mgr = clients.Manager(username, password, tenant_name)
auth_provider = mgr.get_auth_provider()
self.client = SolumClient(auth_provider)
<commit_msg>Fix functionaltests (imported tempest code has changed)
get_auth_provider now takes an argument.
Change-Id: I4a80ef3fdf2914854268459cf1080a46922e93d5<commit_after> | # -*- coding: utf-8 -*-
#
# Copyright 2013 - Noorul Islam K M
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest import clients
from tempest.common import rest_client
from tempest import config
import testtools
CONF = config.CONF
class SolumClient(rest_client.RestClient):
def __init__(self, auth_provider):
super(SolumClient, self).__init__(auth_provider)
self.service = 'application_deployment'
self.endpoint_url = 'publicURL'
class TestCase(testtools.TestCase):
def setUp(self):
super(TestCase, self).setUp()
username = CONF.identity.username
password = CONF.identity.password
tenant_name = CONF.identity.tenant_name
mgr = clients.Manager(username, password, tenant_name)
auth_provider = mgr.get_auth_provider(mgr.get_default_credentials())
self.client = SolumClient(auth_provider)
| # -*- coding: utf-8 -*-
#
# Copyright 2013 - Noorul Islam K M
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest import clients
from tempest.common import rest_client
from tempest import config
import testtools
CONF = config.CONF
class SolumClient(rest_client.RestClient):
def __init__(self, auth_provider):
super(SolumClient, self).__init__(auth_provider)
self.service = 'application_deployment'
self.endpoint_url = 'publicURL'
class TestCase(testtools.TestCase):
def setUp(self):
super(TestCase, self).setUp()
username = CONF.identity.username
password = CONF.identity.password
tenant_name = CONF.identity.tenant_name
mgr = clients.Manager(username, password, tenant_name)
auth_provider = mgr.get_auth_provider()
self.client = SolumClient(auth_provider)
Fix functionaltests (imported tempest code has changed)
get_auth_provider now takes an argument.
Change-Id: I4a80ef3fdf2914854268459cf1080a46922e93d5# -*- coding: utf-8 -*-
#
# Copyright 2013 - Noorul Islam K M
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest import clients
from tempest.common import rest_client
from tempest import config
import testtools
CONF = config.CONF
class SolumClient(rest_client.RestClient):
def __init__(self, auth_provider):
super(SolumClient, self).__init__(auth_provider)
self.service = 'application_deployment'
self.endpoint_url = 'publicURL'
class TestCase(testtools.TestCase):
def setUp(self):
super(TestCase, self).setUp()
username = CONF.identity.username
password = CONF.identity.password
tenant_name = CONF.identity.tenant_name
mgr = clients.Manager(username, password, tenant_name)
auth_provider = mgr.get_auth_provider(mgr.get_default_credentials())
self.client = SolumClient(auth_provider)
| <commit_before># -*- coding: utf-8 -*-
#
# Copyright 2013 - Noorul Islam K M
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest import clients
from tempest.common import rest_client
from tempest import config
import testtools
CONF = config.CONF
class SolumClient(rest_client.RestClient):
def __init__(self, auth_provider):
super(SolumClient, self).__init__(auth_provider)
self.service = 'application_deployment'
self.endpoint_url = 'publicURL'
class TestCase(testtools.TestCase):
def setUp(self):
super(TestCase, self).setUp()
username = CONF.identity.username
password = CONF.identity.password
tenant_name = CONF.identity.tenant_name
mgr = clients.Manager(username, password, tenant_name)
auth_provider = mgr.get_auth_provider()
self.client = SolumClient(auth_provider)
<commit_msg>Fix functionaltests (imported tempest code has changed)
get_auth_provider now takes an argument.
Change-Id: I4a80ef3fdf2914854268459cf1080a46922e93d5<commit_after># -*- coding: utf-8 -*-
#
# Copyright 2013 - Noorul Islam K M
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest import clients
from tempest.common import rest_client
from tempest import config
import testtools
CONF = config.CONF
class SolumClient(rest_client.RestClient):
def __init__(self, auth_provider):
super(SolumClient, self).__init__(auth_provider)
self.service = 'application_deployment'
self.endpoint_url = 'publicURL'
class TestCase(testtools.TestCase):
def setUp(self):
super(TestCase, self).setUp()
username = CONF.identity.username
password = CONF.identity.password
tenant_name = CONF.identity.tenant_name
mgr = clients.Manager(username, password, tenant_name)
auth_provider = mgr.get_auth_provider(mgr.get_default_credentials())
self.client = SolumClient(auth_provider)
|
32994f27d1644415e8cd4a22f1b47d4938d3620c | fulfil_client/oauth.py | fulfil_client/oauth.py | from requests_oauthlib import OAuth2Session
class Session(OAuth2Session):
client_id = None
client_secret = None
def __init__(self, subdomain, **kwargs):
client_id = self.client_id
client_secret = self.client_secret
self.fulfil_subdomain = subdomain
if not (client_id and client_secret):
raise Exception('Missing client_id or client_secret.')
super(Session, self).__init__(client_id=client_id, **kwargs)
@classmethod
def setup(cls, client_id, client_secret):
"""Configure client in session
"""
cls.client_id = client_id
cls.client_secret = client_secret
@property
def base_url(self):
if self.fulfil_subdomain == 'localhost':
return 'http://localhost:8000/'
else:
return 'https://%s.fulfil.io/' % self.fulfil_subdomain
def create_authorization_url(self, redirect_uri, scope):
self.redirect_uri = redirect_uri
self.scope = scope
return self.authorization_url(self.base_url + 'oauth/authorize')
def get_token(self, code):
token_url = self.base_url + 'oauth/token'
return self.fetch_token(
token_url, client_secret=self.client_secret, code=code
)
| from requests_oauthlib import OAuth2Session
class Session(OAuth2Session):
client_id = None
client_secret = None
def __init__(self, subdomain, **kwargs):
client_id = self.client_id
client_secret = self.client_secret
self.fulfil_subdomain = subdomain
if not (client_id and client_secret):
raise Exception('Missing client_id or client_secret.')
super(Session, self).__init__(client_id=client_id, **kwargs)
@classmethod
def setup(cls, client_id, client_secret):
"""Configure client in session
"""
cls.client_id = client_id
cls.client_secret = client_secret
@property
def base_url(self):
if self.fulfil_subdomain == 'localhost':
return 'http://localhost:8000/'
else:
return 'https://%s.fulfil.io/' % self.fulfil_subdomain
def create_authorization_url(self, redirect_uri, scope, **kwargs):
self.redirect_uri = redirect_uri
self.scope = scope
return self.authorization_url(
self.base_url + 'oauth/authorize', **kwargs)
def get_token(self, code):
token_url = self.base_url + 'oauth/token'
return self.fetch_token(
token_url, client_secret=self.client_secret, code=code
)
| Add provision to pass extra args in auth url | Add provision to pass extra args in auth url
| Python | isc | fulfilio/fulfil-python-api,sharoonthomas/fulfil-python-api | from requests_oauthlib import OAuth2Session
class Session(OAuth2Session):
client_id = None
client_secret = None
def __init__(self, subdomain, **kwargs):
client_id = self.client_id
client_secret = self.client_secret
self.fulfil_subdomain = subdomain
if not (client_id and client_secret):
raise Exception('Missing client_id or client_secret.')
super(Session, self).__init__(client_id=client_id, **kwargs)
@classmethod
def setup(cls, client_id, client_secret):
"""Configure client in session
"""
cls.client_id = client_id
cls.client_secret = client_secret
@property
def base_url(self):
if self.fulfil_subdomain == 'localhost':
return 'http://localhost:8000/'
else:
return 'https://%s.fulfil.io/' % self.fulfil_subdomain
def create_authorization_url(self, redirect_uri, scope):
self.redirect_uri = redirect_uri
self.scope = scope
return self.authorization_url(self.base_url + 'oauth/authorize')
def get_token(self, code):
token_url = self.base_url + 'oauth/token'
return self.fetch_token(
token_url, client_secret=self.client_secret, code=code
)
Add provision to pass extra args in auth url | from requests_oauthlib import OAuth2Session
class Session(OAuth2Session):
client_id = None
client_secret = None
def __init__(self, subdomain, **kwargs):
client_id = self.client_id
client_secret = self.client_secret
self.fulfil_subdomain = subdomain
if not (client_id and client_secret):
raise Exception('Missing client_id or client_secret.')
super(Session, self).__init__(client_id=client_id, **kwargs)
@classmethod
def setup(cls, client_id, client_secret):
"""Configure client in session
"""
cls.client_id = client_id
cls.client_secret = client_secret
@property
def base_url(self):
if self.fulfil_subdomain == 'localhost':
return 'http://localhost:8000/'
else:
return 'https://%s.fulfil.io/' % self.fulfil_subdomain
def create_authorization_url(self, redirect_uri, scope, **kwargs):
self.redirect_uri = redirect_uri
self.scope = scope
return self.authorization_url(
self.base_url + 'oauth/authorize', **kwargs)
def get_token(self, code):
token_url = self.base_url + 'oauth/token'
return self.fetch_token(
token_url, client_secret=self.client_secret, code=code
)
| <commit_before>from requests_oauthlib import OAuth2Session
class Session(OAuth2Session):
client_id = None
client_secret = None
def __init__(self, subdomain, **kwargs):
client_id = self.client_id
client_secret = self.client_secret
self.fulfil_subdomain = subdomain
if not (client_id and client_secret):
raise Exception('Missing client_id or client_secret.')
super(Session, self).__init__(client_id=client_id, **kwargs)
@classmethod
def setup(cls, client_id, client_secret):
"""Configure client in session
"""
cls.client_id = client_id
cls.client_secret = client_secret
@property
def base_url(self):
if self.fulfil_subdomain == 'localhost':
return 'http://localhost:8000/'
else:
return 'https://%s.fulfil.io/' % self.fulfil_subdomain
def create_authorization_url(self, redirect_uri, scope):
self.redirect_uri = redirect_uri
self.scope = scope
return self.authorization_url(self.base_url + 'oauth/authorize')
def get_token(self, code):
token_url = self.base_url + 'oauth/token'
return self.fetch_token(
token_url, client_secret=self.client_secret, code=code
)
<commit_msg>Add provision to pass extra args in auth url<commit_after> | from requests_oauthlib import OAuth2Session
class Session(OAuth2Session):
client_id = None
client_secret = None
def __init__(self, subdomain, **kwargs):
client_id = self.client_id
client_secret = self.client_secret
self.fulfil_subdomain = subdomain
if not (client_id and client_secret):
raise Exception('Missing client_id or client_secret.')
super(Session, self).__init__(client_id=client_id, **kwargs)
@classmethod
def setup(cls, client_id, client_secret):
"""Configure client in session
"""
cls.client_id = client_id
cls.client_secret = client_secret
@property
def base_url(self):
if self.fulfil_subdomain == 'localhost':
return 'http://localhost:8000/'
else:
return 'https://%s.fulfil.io/' % self.fulfil_subdomain
def create_authorization_url(self, redirect_uri, scope, **kwargs):
self.redirect_uri = redirect_uri
self.scope = scope
return self.authorization_url(
self.base_url + 'oauth/authorize', **kwargs)
def get_token(self, code):
token_url = self.base_url + 'oauth/token'
return self.fetch_token(
token_url, client_secret=self.client_secret, code=code
)
| from requests_oauthlib import OAuth2Session
class Session(OAuth2Session):
client_id = None
client_secret = None
def __init__(self, subdomain, **kwargs):
client_id = self.client_id
client_secret = self.client_secret
self.fulfil_subdomain = subdomain
if not (client_id and client_secret):
raise Exception('Missing client_id or client_secret.')
super(Session, self).__init__(client_id=client_id, **kwargs)
@classmethod
def setup(cls, client_id, client_secret):
"""Configure client in session
"""
cls.client_id = client_id
cls.client_secret = client_secret
@property
def base_url(self):
if self.fulfil_subdomain == 'localhost':
return 'http://localhost:8000/'
else:
return 'https://%s.fulfil.io/' % self.fulfil_subdomain
def create_authorization_url(self, redirect_uri, scope):
self.redirect_uri = redirect_uri
self.scope = scope
return self.authorization_url(self.base_url + 'oauth/authorize')
def get_token(self, code):
token_url = self.base_url + 'oauth/token'
return self.fetch_token(
token_url, client_secret=self.client_secret, code=code
)
Add provision to pass extra args in auth urlfrom requests_oauthlib import OAuth2Session
class Session(OAuth2Session):
client_id = None
client_secret = None
def __init__(self, subdomain, **kwargs):
client_id = self.client_id
client_secret = self.client_secret
self.fulfil_subdomain = subdomain
if not (client_id and client_secret):
raise Exception('Missing client_id or client_secret.')
super(Session, self).__init__(client_id=client_id, **kwargs)
@classmethod
def setup(cls, client_id, client_secret):
"""Configure client in session
"""
cls.client_id = client_id
cls.client_secret = client_secret
@property
def base_url(self):
if self.fulfil_subdomain == 'localhost':
return 'http://localhost:8000/'
else:
return 'https://%s.fulfil.io/' % self.fulfil_subdomain
def create_authorization_url(self, redirect_uri, scope, **kwargs):
self.redirect_uri = redirect_uri
self.scope = scope
return self.authorization_url(
self.base_url + 'oauth/authorize', **kwargs)
def get_token(self, code):
token_url = self.base_url + 'oauth/token'
return self.fetch_token(
token_url, client_secret=self.client_secret, code=code
)
| <commit_before>from requests_oauthlib import OAuth2Session
class Session(OAuth2Session):
client_id = None
client_secret = None
def __init__(self, subdomain, **kwargs):
client_id = self.client_id
client_secret = self.client_secret
self.fulfil_subdomain = subdomain
if not (client_id and client_secret):
raise Exception('Missing client_id or client_secret.')
super(Session, self).__init__(client_id=client_id, **kwargs)
@classmethod
def setup(cls, client_id, client_secret):
"""Configure client in session
"""
cls.client_id = client_id
cls.client_secret = client_secret
@property
def base_url(self):
if self.fulfil_subdomain == 'localhost':
return 'http://localhost:8000/'
else:
return 'https://%s.fulfil.io/' % self.fulfil_subdomain
def create_authorization_url(self, redirect_uri, scope):
self.redirect_uri = redirect_uri
self.scope = scope
return self.authorization_url(self.base_url + 'oauth/authorize')
def get_token(self, code):
token_url = self.base_url + 'oauth/token'
return self.fetch_token(
token_url, client_secret=self.client_secret, code=code
)
<commit_msg>Add provision to pass extra args in auth url<commit_after>from requests_oauthlib import OAuth2Session
class Session(OAuth2Session):
client_id = None
client_secret = None
def __init__(self, subdomain, **kwargs):
client_id = self.client_id
client_secret = self.client_secret
self.fulfil_subdomain = subdomain
if not (client_id and client_secret):
raise Exception('Missing client_id or client_secret.')
super(Session, self).__init__(client_id=client_id, **kwargs)
@classmethod
def setup(cls, client_id, client_secret):
"""Configure client in session
"""
cls.client_id = client_id
cls.client_secret = client_secret
@property
def base_url(self):
if self.fulfil_subdomain == 'localhost':
return 'http://localhost:8000/'
else:
return 'https://%s.fulfil.io/' % self.fulfil_subdomain
def create_authorization_url(self, redirect_uri, scope, **kwargs):
self.redirect_uri = redirect_uri
self.scope = scope
return self.authorization_url(
self.base_url + 'oauth/authorize', **kwargs)
def get_token(self, code):
token_url = self.base_url + 'oauth/token'
return self.fetch_token(
token_url, client_secret=self.client_secret, code=code
)
|
c7d5a39fd21c2d5c9c5f8a2b88b5e09c98e9e776 | ovp_users/emails.py | ovp_users/emails.py | from ovp_core.emails import BaseMail
class UserMail(BaseMail):
"""
This class is responsible for firing emails for Users
"""
def __init__(self, user, async_mail=None):
super(UserMail, self).__init__(user.email, async_mail)
def sendWelcome(self, context={}):
"""
Sent when user registers
"""
return self.sendEmail('welcome', 'Welcome', context)
def sendRecoveryToken(self, context):
"""
Sent when volunteer requests recovery token
"""
context.update({
'user_email': user.email
})
return self.sendEmail('recoveryToken', 'Password recovery', context)
| from ovp_core.emails import BaseMail
class UserMail(BaseMail):
"""
This class is responsible for firing emails for Users
"""
def __init__(self, user, async_mail=None):
super(UserMail, self).__init__(user.email, async_mail)
def sendWelcome(self, context={}):
"""
Sent when user registers
"""
return self.sendEmail('welcome', 'Welcome', context)
def sendRecoveryToken(self, context):
"""
Sent when volunteer requests recovery token
"""
return self.sendEmail('recoveryToken', 'Password recovery', context)
| Revert "pass 'user_email' on sendRecoveryToken's context" | Revert "pass 'user_email' on sendRecoveryToken's context"
This info is already sent as 'email' on context. This reverts commit a366c2ed02cd7dda54607fe5e6a317603d442b47.
| Python | agpl-3.0 | OpenVolunteeringPlatform/django-ovp-users,OpenVolunteeringPlatform/django-ovp-users | from ovp_core.emails import BaseMail
class UserMail(BaseMail):
"""
This class is responsible for firing emails for Users
"""
def __init__(self, user, async_mail=None):
super(UserMail, self).__init__(user.email, async_mail)
def sendWelcome(self, context={}):
"""
Sent when user registers
"""
return self.sendEmail('welcome', 'Welcome', context)
def sendRecoveryToken(self, context):
"""
Sent when volunteer requests recovery token
"""
context.update({
'user_email': user.email
})
return self.sendEmail('recoveryToken', 'Password recovery', context)
Revert "pass 'user_email' on sendRecoveryToken's context"
This info is already sent as 'email' on context. This reverts commit a366c2ed02cd7dda54607fe5e6a317603d442b47. | from ovp_core.emails import BaseMail
class UserMail(BaseMail):
"""
This class is responsible for firing emails for Users
"""
def __init__(self, user, async_mail=None):
super(UserMail, self).__init__(user.email, async_mail)
def sendWelcome(self, context={}):
"""
Sent when user registers
"""
return self.sendEmail('welcome', 'Welcome', context)
def sendRecoveryToken(self, context):
"""
Sent when volunteer requests recovery token
"""
return self.sendEmail('recoveryToken', 'Password recovery', context)
| <commit_before>from ovp_core.emails import BaseMail
class UserMail(BaseMail):
"""
This class is responsible for firing emails for Users
"""
def __init__(self, user, async_mail=None):
super(UserMail, self).__init__(user.email, async_mail)
def sendWelcome(self, context={}):
"""
Sent when user registers
"""
return self.sendEmail('welcome', 'Welcome', context)
def sendRecoveryToken(self, context):
"""
Sent when volunteer requests recovery token
"""
context.update({
'user_email': user.email
})
return self.sendEmail('recoveryToken', 'Password recovery', context)
<commit_msg>Revert "pass 'user_email' on sendRecoveryToken's context"
This info is already sent as 'email' on context. This reverts commit a366c2ed02cd7dda54607fe5e6a317603d442b47.<commit_after> | from ovp_core.emails import BaseMail
class UserMail(BaseMail):
"""
This class is responsible for firing emails for Users
"""
def __init__(self, user, async_mail=None):
super(UserMail, self).__init__(user.email, async_mail)
def sendWelcome(self, context={}):
"""
Sent when user registers
"""
return self.sendEmail('welcome', 'Welcome', context)
def sendRecoveryToken(self, context):
"""
Sent when volunteer requests recovery token
"""
return self.sendEmail('recoveryToken', 'Password recovery', context)
| from ovp_core.emails import BaseMail
class UserMail(BaseMail):
"""
This class is responsible for firing emails for Users
"""
def __init__(self, user, async_mail=None):
super(UserMail, self).__init__(user.email, async_mail)
def sendWelcome(self, context={}):
"""
Sent when user registers
"""
return self.sendEmail('welcome', 'Welcome', context)
def sendRecoveryToken(self, context):
"""
Sent when volunteer requests recovery token
"""
context.update({
'user_email': user.email
})
return self.sendEmail('recoveryToken', 'Password recovery', context)
Revert "pass 'user_email' on sendRecoveryToken's context"
This info is already sent as 'email' on context. This reverts commit a366c2ed02cd7dda54607fe5e6a317603d442b47.from ovp_core.emails import BaseMail
class UserMail(BaseMail):
"""
This class is responsible for firing emails for Users
"""
def __init__(self, user, async_mail=None):
super(UserMail, self).__init__(user.email, async_mail)
def sendWelcome(self, context={}):
"""
Sent when user registers
"""
return self.sendEmail('welcome', 'Welcome', context)
def sendRecoveryToken(self, context):
"""
Sent when volunteer requests recovery token
"""
return self.sendEmail('recoveryToken', 'Password recovery', context)
| <commit_before>from ovp_core.emails import BaseMail
class UserMail(BaseMail):
"""
This class is responsible for firing emails for Users
"""
def __init__(self, user, async_mail=None):
super(UserMail, self).__init__(user.email, async_mail)
def sendWelcome(self, context={}):
"""
Sent when user registers
"""
return self.sendEmail('welcome', 'Welcome', context)
def sendRecoveryToken(self, context):
"""
Sent when volunteer requests recovery token
"""
context.update({
'user_email': user.email
})
return self.sendEmail('recoveryToken', 'Password recovery', context)
<commit_msg>Revert "pass 'user_email' on sendRecoveryToken's context"
This info is already sent as 'email' on context. This reverts commit a366c2ed02cd7dda54607fe5e6a317603d442b47.<commit_after>from ovp_core.emails import BaseMail
class UserMail(BaseMail):
"""
This class is responsible for firing emails for Users
"""
def __init__(self, user, async_mail=None):
super(UserMail, self).__init__(user.email, async_mail)
def sendWelcome(self, context={}):
"""
Sent when user registers
"""
return self.sendEmail('welcome', 'Welcome', context)
def sendRecoveryToken(self, context):
"""
Sent when volunteer requests recovery token
"""
return self.sendEmail('recoveryToken', 'Password recovery', context)
|
387ca6153d0f584f3caf27add6cf01d1da081fc3 | plasmapy/physics/__init__.py | plasmapy/physics/__init__.py | # 'physics' is a tentative name for this subpackage. Another
# possibility is 'plasma'. The organization is to be decided by v0.1.
from .parameters import Alfven_speed, ion_sound_speed, thermal_speed, kappa_thermal_speed, gyrofrequency, gyroradius, plasma_frequency, Debye_length, Debye_number, inertial_length, magnetic_pressure, magnetic_energy_density, upper_hybrid_frequency, lower_hybrid_frequency
from .quantum import deBroglie_wavelength, thermal_deBroglie_wavelength, Fermi_energy, Thomas_Fermi_length
from .relativity import Lorentz_factor
from .transport import Coulomb_logarithm
from .distribution import Maxwellian_1D, Maxwellian_velocity_3D, Maxwellian_speed_1D, Maxwellian_speed_3D,kappa_velocity_3D, kappa_velocity_1D
from .dielectric import cold_plasma_permittivity_LRP, cold_plasma_permittivity_SDP
| # 'physics' is a tentative name for this subpackage. Another
# possibility is 'plasma'. The organization is to be decided by v0.1.
from .parameters import Alfven_speed, ion_sound_speed, thermal_speed, kappa_thermal_speed, gyrofrequency, gyroradius, plasma_frequency, Debye_length, Debye_number, inertial_length, magnetic_pressure, magnetic_energy_density, upper_hybrid_frequency, lower_hybrid_frequency
from .quantum import deBroglie_wavelength, thermal_deBroglie_wavelength, Fermi_energy, Thomas_Fermi_length
from .relativity import Lorentz_factor
from .transport import Coulomb_logarithm, classical_transport
from .distribution import Maxwellian_1D, Maxwellian_velocity_3D, Maxwellian_speed_1D, Maxwellian_speed_3D,kappa_velocity_3D, kappa_velocity_1D
from .dielectric import cold_plasma_permittivity_LRP, cold_plasma_permittivity_SDP
| Add classical_transport to init file | Add classical_transport to init file
| Python | bsd-3-clause | StanczakDominik/PlasmaPy | # 'physics' is a tentative name for this subpackage. Another
# possibility is 'plasma'. The organization is to be decided by v0.1.
from .parameters import Alfven_speed, ion_sound_speed, thermal_speed, kappa_thermal_speed, gyrofrequency, gyroradius, plasma_frequency, Debye_length, Debye_number, inertial_length, magnetic_pressure, magnetic_energy_density, upper_hybrid_frequency, lower_hybrid_frequency
from .quantum import deBroglie_wavelength, thermal_deBroglie_wavelength, Fermi_energy, Thomas_Fermi_length
from .relativity import Lorentz_factor
from .transport import Coulomb_logarithm
from .distribution import Maxwellian_1D, Maxwellian_velocity_3D, Maxwellian_speed_1D, Maxwellian_speed_3D,kappa_velocity_3D, kappa_velocity_1D
from .dielectric import cold_plasma_permittivity_LRP, cold_plasma_permittivity_SDP
Add classical_transport to init file | # 'physics' is a tentative name for this subpackage. Another
# possibility is 'plasma'. The organization is to be decided by v0.1.
from .parameters import Alfven_speed, ion_sound_speed, thermal_speed, kappa_thermal_speed, gyrofrequency, gyroradius, plasma_frequency, Debye_length, Debye_number, inertial_length, magnetic_pressure, magnetic_energy_density, upper_hybrid_frequency, lower_hybrid_frequency
from .quantum import deBroglie_wavelength, thermal_deBroglie_wavelength, Fermi_energy, Thomas_Fermi_length
from .relativity import Lorentz_factor
from .transport import Coulomb_logarithm, classical_transport
from .distribution import Maxwellian_1D, Maxwellian_velocity_3D, Maxwellian_speed_1D, Maxwellian_speed_3D,kappa_velocity_3D, kappa_velocity_1D
from .dielectric import cold_plasma_permittivity_LRP, cold_plasma_permittivity_SDP
| <commit_before># 'physics' is a tentative name for this subpackage. Another
# possibility is 'plasma'. The organization is to be decided by v0.1.
from .parameters import Alfven_speed, ion_sound_speed, thermal_speed, kappa_thermal_speed, gyrofrequency, gyroradius, plasma_frequency, Debye_length, Debye_number, inertial_length, magnetic_pressure, magnetic_energy_density, upper_hybrid_frequency, lower_hybrid_frequency
from .quantum import deBroglie_wavelength, thermal_deBroglie_wavelength, Fermi_energy, Thomas_Fermi_length
from .relativity import Lorentz_factor
from .transport import Coulomb_logarithm
from .distribution import Maxwellian_1D, Maxwellian_velocity_3D, Maxwellian_speed_1D, Maxwellian_speed_3D,kappa_velocity_3D, kappa_velocity_1D
from .dielectric import cold_plasma_permittivity_LRP, cold_plasma_permittivity_SDP
<commit_msg>Add classical_transport to init file<commit_after> | # 'physics' is a tentative name for this subpackage. Another
# possibility is 'plasma'. The organization is to be decided by v0.1.
from .parameters import Alfven_speed, ion_sound_speed, thermal_speed, kappa_thermal_speed, gyrofrequency, gyroradius, plasma_frequency, Debye_length, Debye_number, inertial_length, magnetic_pressure, magnetic_energy_density, upper_hybrid_frequency, lower_hybrid_frequency
from .quantum import deBroglie_wavelength, thermal_deBroglie_wavelength, Fermi_energy, Thomas_Fermi_length
from .relativity import Lorentz_factor
from .transport import Coulomb_logarithm, classical_transport
from .distribution import Maxwellian_1D, Maxwellian_velocity_3D, Maxwellian_speed_1D, Maxwellian_speed_3D,kappa_velocity_3D, kappa_velocity_1D
from .dielectric import cold_plasma_permittivity_LRP, cold_plasma_permittivity_SDP
| # 'physics' is a tentative name for this subpackage. Another
# possibility is 'plasma'. The organization is to be decided by v0.1.
from .parameters import Alfven_speed, ion_sound_speed, thermal_speed, kappa_thermal_speed, gyrofrequency, gyroradius, plasma_frequency, Debye_length, Debye_number, inertial_length, magnetic_pressure, magnetic_energy_density, upper_hybrid_frequency, lower_hybrid_frequency
from .quantum import deBroglie_wavelength, thermal_deBroglie_wavelength, Fermi_energy, Thomas_Fermi_length
from .relativity import Lorentz_factor
from .transport import Coulomb_logarithm
from .distribution import Maxwellian_1D, Maxwellian_velocity_3D, Maxwellian_speed_1D, Maxwellian_speed_3D,kappa_velocity_3D, kappa_velocity_1D
from .dielectric import cold_plasma_permittivity_LRP, cold_plasma_permittivity_SDP
Add classical_transport to init file# 'physics' is a tentative name for this subpackage. Another
# possibility is 'plasma'. The organization is to be decided by v0.1.
from .parameters import Alfven_speed, ion_sound_speed, thermal_speed, kappa_thermal_speed, gyrofrequency, gyroradius, plasma_frequency, Debye_length, Debye_number, inertial_length, magnetic_pressure, magnetic_energy_density, upper_hybrid_frequency, lower_hybrid_frequency
from .quantum import deBroglie_wavelength, thermal_deBroglie_wavelength, Fermi_energy, Thomas_Fermi_length
from .relativity import Lorentz_factor
from .transport import Coulomb_logarithm, classical_transport
from .distribution import Maxwellian_1D, Maxwellian_velocity_3D, Maxwellian_speed_1D, Maxwellian_speed_3D,kappa_velocity_3D, kappa_velocity_1D
from .dielectric import cold_plasma_permittivity_LRP, cold_plasma_permittivity_SDP
| <commit_before># 'physics' is a tentative name for this subpackage. Another
# possibility is 'plasma'. The organization is to be decided by v0.1.
from .parameters import Alfven_speed, ion_sound_speed, thermal_speed, kappa_thermal_speed, gyrofrequency, gyroradius, plasma_frequency, Debye_length, Debye_number, inertial_length, magnetic_pressure, magnetic_energy_density, upper_hybrid_frequency, lower_hybrid_frequency
from .quantum import deBroglie_wavelength, thermal_deBroglie_wavelength, Fermi_energy, Thomas_Fermi_length
from .relativity import Lorentz_factor
from .transport import Coulomb_logarithm
from .distribution import Maxwellian_1D, Maxwellian_velocity_3D, Maxwellian_speed_1D, Maxwellian_speed_3D,kappa_velocity_3D, kappa_velocity_1D
from .dielectric import cold_plasma_permittivity_LRP, cold_plasma_permittivity_SDP
<commit_msg>Add classical_transport to init file<commit_after># 'physics' is a tentative name for this subpackage. Another
# possibility is 'plasma'. The organization is to be decided by v0.1.
from .parameters import Alfven_speed, ion_sound_speed, thermal_speed, kappa_thermal_speed, gyrofrequency, gyroradius, plasma_frequency, Debye_length, Debye_number, inertial_length, magnetic_pressure, magnetic_energy_density, upper_hybrid_frequency, lower_hybrid_frequency
from .quantum import deBroglie_wavelength, thermal_deBroglie_wavelength, Fermi_energy, Thomas_Fermi_length
from .relativity import Lorentz_factor
from .transport import Coulomb_logarithm, classical_transport
from .distribution import Maxwellian_1D, Maxwellian_velocity_3D, Maxwellian_speed_1D, Maxwellian_speed_3D,kappa_velocity_3D, kappa_velocity_1D
from .dielectric import cold_plasma_permittivity_LRP, cold_plasma_permittivity_SDP
|
2d45775e3823cf5a27df92350cbc89963aecc84c | gym/envs/tests/test_registration.py | gym/envs/tests/test_registration.py | # -*- coding: utf-8 -*-
from gym import error, envs
from gym.envs import registration
from gym.envs.classic_control import cartpole
def test_make():
env = envs.make('CartPole-v0')
assert env.spec.id == 'CartPole-v0'
assert isinstance(env, cartpole.CartPoleEnv)
def test_make_deprecated():
try:
envs.make('Humanoid-v0')
except error.Error:
pass
else:
assert False
def test_spec():
spec = envs.spec('CartPole-v0')
assert spec.id == 'CartPole-v0'
def test_missing_lookup():
registry = registration.EnvRegistry()
registry.register(id='Test-v0', entry_point=None)
registry.register(id='Test-v15', entry_point=None)
registry.register(id='Test-v9', entry_point=None)
registry.register(id='Other-v100', entry_point=None)
try:
registry.spec('Test-v1')
except error.UnregisteredEnv:
pass
else:
assert False
def test_malformed_lookup():
registry = registration.EnvRegistry()
try:
registry.spec(u'“Breakout-v0”')
except error.Error as e:
assert 'malformed environment ID' in '{}'.format(e), 'Unexpected message: {}'.format(e)
else:
assert False
| # -*- coding: utf-8 -*-
from gym import error, envs
from gym.envs import registration
from gym.envs.classic_control import cartpole
def test_make():
env = envs.make('CartPole-v0')
assert env.spec.id == 'CartPole-v0'
assert isinstance(env, cartpole.CartPoleEnv)
def test_make_deprecated():
try:
envs.make('Humanoid-v0')
except error.Error:
pass
else:
assert False
def test_spec():
spec = envs.spec('CartPole-v0')
assert spec.id == 'CartPole-v0'
def test_missing_lookup():
registry = registration.EnvRegistry()
registry.register(id='Test-v0', entry_point=None)
registry.register(id='Test-v15', entry_point=None)
registry.register(id='Test-v9', entry_point=None)
registry.register(id='Other-v100', entry_point=None)
try:
registry.spec('Test-v1') # must match an env name but not the version above
except error.DeprecatedEnv:
pass
else:
assert False
try:
registry.spec('Unknown-v1')
except error.UnregisteredEnv:
pass
else:
assert False
def test_malformed_lookup():
registry = registration.EnvRegistry()
try:
registry.spec(u'“Breakout-v0”')
except error.Error as e:
assert 'malformed environment ID' in '{}'.format(e), 'Unexpected message: {}'.format(e)
else:
assert False
| Fix broken registration test to handle new DeprecatedEnv error | Fix broken registration test to handle new DeprecatedEnv error
| Python | mit | d1hotpep/openai_gym,d1hotpep/openai_gym,dianchen96/gym,machinaut/gym,Farama-Foundation/Gymnasium,machinaut/gym,dianchen96/gym,Farama-Foundation/Gymnasium | # -*- coding: utf-8 -*-
from gym import error, envs
from gym.envs import registration
from gym.envs.classic_control import cartpole
def test_make():
env = envs.make('CartPole-v0')
assert env.spec.id == 'CartPole-v0'
assert isinstance(env, cartpole.CartPoleEnv)
def test_make_deprecated():
try:
envs.make('Humanoid-v0')
except error.Error:
pass
else:
assert False
def test_spec():
spec = envs.spec('CartPole-v0')
assert spec.id == 'CartPole-v0'
def test_missing_lookup():
registry = registration.EnvRegistry()
registry.register(id='Test-v0', entry_point=None)
registry.register(id='Test-v15', entry_point=None)
registry.register(id='Test-v9', entry_point=None)
registry.register(id='Other-v100', entry_point=None)
try:
registry.spec('Test-v1')
except error.UnregisteredEnv:
pass
else:
assert False
def test_malformed_lookup():
registry = registration.EnvRegistry()
try:
registry.spec(u'“Breakout-v0”')
except error.Error as e:
assert 'malformed environment ID' in '{}'.format(e), 'Unexpected message: {}'.format(e)
else:
assert False
Fix broken registration test to handle new DeprecatedEnv error | # -*- coding: utf-8 -*-
from gym import error, envs
from gym.envs import registration
from gym.envs.classic_control import cartpole
def test_make():
env = envs.make('CartPole-v0')
assert env.spec.id == 'CartPole-v0'
assert isinstance(env, cartpole.CartPoleEnv)
def test_make_deprecated():
try:
envs.make('Humanoid-v0')
except error.Error:
pass
else:
assert False
def test_spec():
spec = envs.spec('CartPole-v0')
assert spec.id == 'CartPole-v0'
def test_missing_lookup():
registry = registration.EnvRegistry()
registry.register(id='Test-v0', entry_point=None)
registry.register(id='Test-v15', entry_point=None)
registry.register(id='Test-v9', entry_point=None)
registry.register(id='Other-v100', entry_point=None)
try:
registry.spec('Test-v1') # must match an env name but not the version above
except error.DeprecatedEnv:
pass
else:
assert False
try:
registry.spec('Unknown-v1')
except error.UnregisteredEnv:
pass
else:
assert False
def test_malformed_lookup():
registry = registration.EnvRegistry()
try:
registry.spec(u'“Breakout-v0”')
except error.Error as e:
assert 'malformed environment ID' in '{}'.format(e), 'Unexpected message: {}'.format(e)
else:
assert False
| <commit_before># -*- coding: utf-8 -*-
from gym import error, envs
from gym.envs import registration
from gym.envs.classic_control import cartpole
def test_make():
env = envs.make('CartPole-v0')
assert env.spec.id == 'CartPole-v0'
assert isinstance(env, cartpole.CartPoleEnv)
def test_make_deprecated():
try:
envs.make('Humanoid-v0')
except error.Error:
pass
else:
assert False
def test_spec():
spec = envs.spec('CartPole-v0')
assert spec.id == 'CartPole-v0'
def test_missing_lookup():
registry = registration.EnvRegistry()
registry.register(id='Test-v0', entry_point=None)
registry.register(id='Test-v15', entry_point=None)
registry.register(id='Test-v9', entry_point=None)
registry.register(id='Other-v100', entry_point=None)
try:
registry.spec('Test-v1')
except error.UnregisteredEnv:
pass
else:
assert False
def test_malformed_lookup():
registry = registration.EnvRegistry()
try:
registry.spec(u'“Breakout-v0”')
except error.Error as e:
assert 'malformed environment ID' in '{}'.format(e), 'Unexpected message: {}'.format(e)
else:
assert False
<commit_msg>Fix broken registration test to handle new DeprecatedEnv error<commit_after> | # -*- coding: utf-8 -*-
from gym import error, envs
from gym.envs import registration
from gym.envs.classic_control import cartpole
def test_make():
env = envs.make('CartPole-v0')
assert env.spec.id == 'CartPole-v0'
assert isinstance(env, cartpole.CartPoleEnv)
def test_make_deprecated():
try:
envs.make('Humanoid-v0')
except error.Error:
pass
else:
assert False
def test_spec():
spec = envs.spec('CartPole-v0')
assert spec.id == 'CartPole-v0'
def test_missing_lookup():
registry = registration.EnvRegistry()
registry.register(id='Test-v0', entry_point=None)
registry.register(id='Test-v15', entry_point=None)
registry.register(id='Test-v9', entry_point=None)
registry.register(id='Other-v100', entry_point=None)
try:
registry.spec('Test-v1') # must match an env name but not the version above
except error.DeprecatedEnv:
pass
else:
assert False
try:
registry.spec('Unknown-v1')
except error.UnregisteredEnv:
pass
else:
assert False
def test_malformed_lookup():
registry = registration.EnvRegistry()
try:
registry.spec(u'“Breakout-v0”')
except error.Error as e:
assert 'malformed environment ID' in '{}'.format(e), 'Unexpected message: {}'.format(e)
else:
assert False
| # -*- coding: utf-8 -*-
from gym import error, envs
from gym.envs import registration
from gym.envs.classic_control import cartpole
def test_make():
env = envs.make('CartPole-v0')
assert env.spec.id == 'CartPole-v0'
assert isinstance(env, cartpole.CartPoleEnv)
def test_make_deprecated():
try:
envs.make('Humanoid-v0')
except error.Error:
pass
else:
assert False
def test_spec():
spec = envs.spec('CartPole-v0')
assert spec.id == 'CartPole-v0'
def test_missing_lookup():
registry = registration.EnvRegistry()
registry.register(id='Test-v0', entry_point=None)
registry.register(id='Test-v15', entry_point=None)
registry.register(id='Test-v9', entry_point=None)
registry.register(id='Other-v100', entry_point=None)
try:
registry.spec('Test-v1')
except error.UnregisteredEnv:
pass
else:
assert False
def test_malformed_lookup():
registry = registration.EnvRegistry()
try:
registry.spec(u'“Breakout-v0”')
except error.Error as e:
assert 'malformed environment ID' in '{}'.format(e), 'Unexpected message: {}'.format(e)
else:
assert False
Fix broken registration test to handle new DeprecatedEnv error# -*- coding: utf-8 -*-
from gym import error, envs
from gym.envs import registration
from gym.envs.classic_control import cartpole
def test_make():
env = envs.make('CartPole-v0')
assert env.spec.id == 'CartPole-v0'
assert isinstance(env, cartpole.CartPoleEnv)
def test_make_deprecated():
try:
envs.make('Humanoid-v0')
except error.Error:
pass
else:
assert False
def test_spec():
spec = envs.spec('CartPole-v0')
assert spec.id == 'CartPole-v0'
def test_missing_lookup():
registry = registration.EnvRegistry()
registry.register(id='Test-v0', entry_point=None)
registry.register(id='Test-v15', entry_point=None)
registry.register(id='Test-v9', entry_point=None)
registry.register(id='Other-v100', entry_point=None)
try:
registry.spec('Test-v1') # must match an env name but not the version above
except error.DeprecatedEnv:
pass
else:
assert False
try:
registry.spec('Unknown-v1')
except error.UnregisteredEnv:
pass
else:
assert False
def test_malformed_lookup():
registry = registration.EnvRegistry()
try:
registry.spec(u'“Breakout-v0”')
except error.Error as e:
assert 'malformed environment ID' in '{}'.format(e), 'Unexpected message: {}'.format(e)
else:
assert False
| <commit_before># -*- coding: utf-8 -*-
from gym import error, envs
from gym.envs import registration
from gym.envs.classic_control import cartpole
def test_make():
env = envs.make('CartPole-v0')
assert env.spec.id == 'CartPole-v0'
assert isinstance(env, cartpole.CartPoleEnv)
def test_make_deprecated():
try:
envs.make('Humanoid-v0')
except error.Error:
pass
else:
assert False
def test_spec():
spec = envs.spec('CartPole-v0')
assert spec.id == 'CartPole-v0'
def test_missing_lookup():
registry = registration.EnvRegistry()
registry.register(id='Test-v0', entry_point=None)
registry.register(id='Test-v15', entry_point=None)
registry.register(id='Test-v9', entry_point=None)
registry.register(id='Other-v100', entry_point=None)
try:
registry.spec('Test-v1')
except error.UnregisteredEnv:
pass
else:
assert False
def test_malformed_lookup():
registry = registration.EnvRegistry()
try:
registry.spec(u'“Breakout-v0”')
except error.Error as e:
assert 'malformed environment ID' in '{}'.format(e), 'Unexpected message: {}'.format(e)
else:
assert False
<commit_msg>Fix broken registration test to handle new DeprecatedEnv error<commit_after># -*- coding: utf-8 -*-
from gym import error, envs
from gym.envs import registration
from gym.envs.classic_control import cartpole
def test_make():
env = envs.make('CartPole-v0')
assert env.spec.id == 'CartPole-v0'
assert isinstance(env, cartpole.CartPoleEnv)
def test_make_deprecated():
try:
envs.make('Humanoid-v0')
except error.Error:
pass
else:
assert False
def test_spec():
spec = envs.spec('CartPole-v0')
assert spec.id == 'CartPole-v0'
def test_missing_lookup():
registry = registration.EnvRegistry()
registry.register(id='Test-v0', entry_point=None)
registry.register(id='Test-v15', entry_point=None)
registry.register(id='Test-v9', entry_point=None)
registry.register(id='Other-v100', entry_point=None)
try:
registry.spec('Test-v1') # must match an env name but not the version above
except error.DeprecatedEnv:
pass
else:
assert False
try:
registry.spec('Unknown-v1')
except error.UnregisteredEnv:
pass
else:
assert False
def test_malformed_lookup():
registry = registration.EnvRegistry()
try:
registry.spec(u'“Breakout-v0”')
except error.Error as e:
assert 'malformed environment ID' in '{}'.format(e), 'Unexpected message: {}'.format(e)
else:
assert False
|
7599f60a0e64f1d1d076695af67a212be751a89b | tests/rules_tests/grammarManipulation_tests/InactiveRulesTest.py | tests/rules_tests/grammarManipulation_tests/InactiveRulesTest.py | #!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Grammar, Nonterminal, Rule as _R
from ..grammar import *
class InactiveRulesTest(TestCase):
def __init__(self, *args):
super().__init__(*args)
self.g = Grammar()
def setUp(self):
g = Grammar()
g.add_term([0, 1, 2, 'a', 'b', 'c'])
g.add_nonterm([NFirst, NSecond, NThird, NFourth])
self.g = g
if __name__ == '__main__':
main()
| #!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Grammar, Nonterminal, Rule as _R
from ..grammar import *
class InactiveRulesTest(TestCase):
def __init__(self, *args):
super().__init__(*args)
self.g = Grammar()
def setUp(self):
g = Grammar()
g.add_term([0, 1, 2, 'a', 'b', 'c'])
g.add_nonterm([NFirst, NSecond, NThird, NFourth])
self.g = g
def test_countWithInactive(self):
class Tmp1(_R):
rule = ([NFirst], ['a', 0])
_active = False
self.g.add_rule(Tmp1)
self.assertEqual(self.g.rules_count(), 0)
self.assertTrue(self.g.have_rule(Tmp1))
self.assertNotIn(Tmp1, self.g.rules())
class Tmp2(_R):
rule = ([NSecond], ['a', 0, NFourth])
self.g.add_rule(Tmp2)
self.assertEqual(self.g.rules_count(), 1)
self.assertTrue(self.g.have_rule(Tmp1))
self.assertNotIn(Tmp1, self.g.rules())
self.assertIn(Tmp2, self.g.rules())
if __name__ == '__main__':
main()
| Add test when rule with inactive is passed | Add test when rule with inactive is passed
| Python | mit | PatrikValkovic/grammpy | #!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Grammar, Nonterminal, Rule as _R
from ..grammar import *
class InactiveRulesTest(TestCase):
def __init__(self, *args):
super().__init__(*args)
self.g = Grammar()
def setUp(self):
g = Grammar()
g.add_term([0, 1, 2, 'a', 'b', 'c'])
g.add_nonterm([NFirst, NSecond, NThird, NFourth])
self.g = g
if __name__ == '__main__':
main()
Add test when rule with inactive is passed | #!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Grammar, Nonterminal, Rule as _R
from ..grammar import *
class InactiveRulesTest(TestCase):
def __init__(self, *args):
super().__init__(*args)
self.g = Grammar()
def setUp(self):
g = Grammar()
g.add_term([0, 1, 2, 'a', 'b', 'c'])
g.add_nonterm([NFirst, NSecond, NThird, NFourth])
self.g = g
def test_countWithInactive(self):
class Tmp1(_R):
rule = ([NFirst], ['a', 0])
_active = False
self.g.add_rule(Tmp1)
self.assertEqual(self.g.rules_count(), 0)
self.assertTrue(self.g.have_rule(Tmp1))
self.assertNotIn(Tmp1, self.g.rules())
class Tmp2(_R):
rule = ([NSecond], ['a', 0, NFourth])
self.g.add_rule(Tmp2)
self.assertEqual(self.g.rules_count(), 1)
self.assertTrue(self.g.have_rule(Tmp1))
self.assertNotIn(Tmp1, self.g.rules())
self.assertIn(Tmp2, self.g.rules())
if __name__ == '__main__':
main()
| <commit_before>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Grammar, Nonterminal, Rule as _R
from ..grammar import *
class InactiveRulesTest(TestCase):
def __init__(self, *args):
super().__init__(*args)
self.g = Grammar()
def setUp(self):
g = Grammar()
g.add_term([0, 1, 2, 'a', 'b', 'c'])
g.add_nonterm([NFirst, NSecond, NThird, NFourth])
self.g = g
if __name__ == '__main__':
main()
<commit_msg>Add test when rule with inactive is passed<commit_after> | #!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Grammar, Nonterminal, Rule as _R
from ..grammar import *
class InactiveRulesTest(TestCase):
def __init__(self, *args):
super().__init__(*args)
self.g = Grammar()
def setUp(self):
g = Grammar()
g.add_term([0, 1, 2, 'a', 'b', 'c'])
g.add_nonterm([NFirst, NSecond, NThird, NFourth])
self.g = g
def test_countWithInactive(self):
class Tmp1(_R):
rule = ([NFirst], ['a', 0])
_active = False
self.g.add_rule(Tmp1)
self.assertEqual(self.g.rules_count(), 0)
self.assertTrue(self.g.have_rule(Tmp1))
self.assertNotIn(Tmp1, self.g.rules())
class Tmp2(_R):
rule = ([NSecond], ['a', 0, NFourth])
self.g.add_rule(Tmp2)
self.assertEqual(self.g.rules_count(), 1)
self.assertTrue(self.g.have_rule(Tmp1))
self.assertNotIn(Tmp1, self.g.rules())
self.assertIn(Tmp2, self.g.rules())
if __name__ == '__main__':
main()
| #!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Grammar, Nonterminal, Rule as _R
from ..grammar import *
class InactiveRulesTest(TestCase):
def __init__(self, *args):
super().__init__(*args)
self.g = Grammar()
def setUp(self):
g = Grammar()
g.add_term([0, 1, 2, 'a', 'b', 'c'])
g.add_nonterm([NFirst, NSecond, NThird, NFourth])
self.g = g
if __name__ == '__main__':
main()
Add test when rule with inactive is passed#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Grammar, Nonterminal, Rule as _R
from ..grammar import *
class InactiveRulesTest(TestCase):
def __init__(self, *args):
super().__init__(*args)
self.g = Grammar()
def setUp(self):
g = Grammar()
g.add_term([0, 1, 2, 'a', 'b', 'c'])
g.add_nonterm([NFirst, NSecond, NThird, NFourth])
self.g = g
def test_countWithInactive(self):
class Tmp1(_R):
rule = ([NFirst], ['a', 0])
_active = False
self.g.add_rule(Tmp1)
self.assertEqual(self.g.rules_count(), 0)
self.assertTrue(self.g.have_rule(Tmp1))
self.assertNotIn(Tmp1, self.g.rules())
class Tmp2(_R):
rule = ([NSecond], ['a', 0, NFourth])
self.g.add_rule(Tmp2)
self.assertEqual(self.g.rules_count(), 1)
self.assertTrue(self.g.have_rule(Tmp1))
self.assertNotIn(Tmp1, self.g.rules())
self.assertIn(Tmp2, self.g.rules())
if __name__ == '__main__':
main()
| <commit_before>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Grammar, Nonterminal, Rule as _R
from ..grammar import *
class InactiveRulesTest(TestCase):
def __init__(self, *args):
super().__init__(*args)
self.g = Grammar()
def setUp(self):
g = Grammar()
g.add_term([0, 1, 2, 'a', 'b', 'c'])
g.add_nonterm([NFirst, NSecond, NThird, NFourth])
self.g = g
if __name__ == '__main__':
main()
<commit_msg>Add test when rule with inactive is passed<commit_after>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Grammar, Nonterminal, Rule as _R
from ..grammar import *
class InactiveRulesTest(TestCase):
def __init__(self, *args):
super().__init__(*args)
self.g = Grammar()
def setUp(self):
g = Grammar()
g.add_term([0, 1, 2, 'a', 'b', 'c'])
g.add_nonterm([NFirst, NSecond, NThird, NFourth])
self.g = g
def test_countWithInactive(self):
class Tmp1(_R):
rule = ([NFirst], ['a', 0])
_active = False
self.g.add_rule(Tmp1)
self.assertEqual(self.g.rules_count(), 0)
self.assertTrue(self.g.have_rule(Tmp1))
self.assertNotIn(Tmp1, self.g.rules())
class Tmp2(_R):
rule = ([NSecond], ['a', 0, NFourth])
self.g.add_rule(Tmp2)
self.assertEqual(self.g.rules_count(), 1)
self.assertTrue(self.g.have_rule(Tmp1))
self.assertNotIn(Tmp1, self.g.rules())
self.assertIn(Tmp2, self.g.rules())
if __name__ == '__main__':
main()
|
1370018ac3f96a5a04d119afa95d482a7504119e | IPython/utils/dir2.py | IPython/utils/dir2.py | # encoding: utf-8
"""A fancy version of Python's builtin :func:`dir` function.
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008-2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from .py3compat import string_types
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
def safe_hasattr(obj, attr):
"""In recent versions of Python, hasattr() only catches AttributeError.
This catches all errors.
"""
try:
getattr(obj, attr)
return True
except:
return False
def dir2(obj):
"""dir2(obj) -> list of strings
Extended version of the Python builtin dir(), which does a few extra
checks, and handles Traits objects, which can confuse dir().
This version is guaranteed to return only a list of true strings, whereas
dir() returns anything that objects inject into themselves, even if they
are later not really valid for attribute access (many extension libraries
have such bugs).
"""
# Start building the attribute list via dir(), and then complete it
# with a few extra special-purpose calls.
try:
words = set(dir(obj))
except Exception:
# TypeError: dir(obj) does not return a list
words = set()
# filter out non-string attributes which may be stuffed by dir() calls
# and poor coding in third-party modules
words = [w for w in words if isinstance(w, string_types)]
return sorted(words)
| # encoding: utf-8
"""A fancy version of Python's builtin :func:`dir` function.
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008-2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from .py3compat import string_types
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
def safe_hasattr(obj, attr):
"""In recent versions of Python, hasattr() only catches AttributeError.
This catches all errors.
"""
try:
getattr(obj, attr)
return True
except:
return False
def dir2(obj):
"""dir2(obj) -> list of strings
Extended version of the Python builtin dir(), which does a few extra
checks.
This version is guaranteed to return only a list of true strings, whereas
dir() returns anything that objects inject into themselves, even if they
are later not really valid for attribute access (many extension libraries
have such bugs).
"""
# Start building the attribute list via dir(), and then complete it
# with a few extra special-purpose calls.
try:
words = set(dir(obj))
except Exception:
# TypeError: dir(obj) does not return a list
words = set()
# filter out non-string attributes which may be stuffed by dir() calls
# and poor coding in third-party modules
words = [w for w in words if isinstance(w, string_types)]
return sorted(words)
| Remove mention of Traits (removed in e1ced0b3) | Remove mention of Traits (removed in e1ced0b3)
| Python | bsd-3-clause | ipython/ipython,ipython/ipython | # encoding: utf-8
"""A fancy version of Python's builtin :func:`dir` function.
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008-2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from .py3compat import string_types
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
def safe_hasattr(obj, attr):
"""In recent versions of Python, hasattr() only catches AttributeError.
This catches all errors.
"""
try:
getattr(obj, attr)
return True
except:
return False
def dir2(obj):
"""dir2(obj) -> list of strings
Extended version of the Python builtin dir(), which does a few extra
checks, and handles Traits objects, which can confuse dir().
This version is guaranteed to return only a list of true strings, whereas
dir() returns anything that objects inject into themselves, even if they
are later not really valid for attribute access (many extension libraries
have such bugs).
"""
# Start building the attribute list via dir(), and then complete it
# with a few extra special-purpose calls.
try:
words = set(dir(obj))
except Exception:
# TypeError: dir(obj) does not return a list
words = set()
# filter out non-string attributes which may be stuffed by dir() calls
# and poor coding in third-party modules
words = [w for w in words if isinstance(w, string_types)]
return sorted(words)
Remove mention of Traits (removed in e1ced0b3) | # encoding: utf-8
"""A fancy version of Python's builtin :func:`dir` function.
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008-2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from .py3compat import string_types
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
def safe_hasattr(obj, attr):
"""In recent versions of Python, hasattr() only catches AttributeError.
This catches all errors.
"""
try:
getattr(obj, attr)
return True
except:
return False
def dir2(obj):
"""dir2(obj) -> list of strings
Extended version of the Python builtin dir(), which does a few extra
checks.
This version is guaranteed to return only a list of true strings, whereas
dir() returns anything that objects inject into themselves, even if they
are later not really valid for attribute access (many extension libraries
have such bugs).
"""
# Start building the attribute list via dir(), and then complete it
# with a few extra special-purpose calls.
try:
words = set(dir(obj))
except Exception:
# TypeError: dir(obj) does not return a list
words = set()
# filter out non-string attributes which may be stuffed by dir() calls
# and poor coding in third-party modules
words = [w for w in words if isinstance(w, string_types)]
return sorted(words)
| <commit_before># encoding: utf-8
"""A fancy version of Python's builtin :func:`dir` function.
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008-2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from .py3compat import string_types
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
def safe_hasattr(obj, attr):
"""In recent versions of Python, hasattr() only catches AttributeError.
This catches all errors.
"""
try:
getattr(obj, attr)
return True
except:
return False
def dir2(obj):
"""dir2(obj) -> list of strings
Extended version of the Python builtin dir(), which does a few extra
checks, and handles Traits objects, which can confuse dir().
This version is guaranteed to return only a list of true strings, whereas
dir() returns anything that objects inject into themselves, even if they
are later not really valid for attribute access (many extension libraries
have such bugs).
"""
# Start building the attribute list via dir(), and then complete it
# with a few extra special-purpose calls.
try:
words = set(dir(obj))
except Exception:
# TypeError: dir(obj) does not return a list
words = set()
# filter out non-string attributes which may be stuffed by dir() calls
# and poor coding in third-party modules
words = [w for w in words if isinstance(w, string_types)]
return sorted(words)
<commit_msg>Remove mention of Traits (removed in e1ced0b3)<commit_after> | # encoding: utf-8
"""A fancy version of Python's builtin :func:`dir` function.
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008-2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from .py3compat import string_types
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
def safe_hasattr(obj, attr):
"""In recent versions of Python, hasattr() only catches AttributeError.
This catches all errors.
"""
try:
getattr(obj, attr)
return True
except:
return False
def dir2(obj):
"""dir2(obj) -> list of strings
Extended version of the Python builtin dir(), which does a few extra
checks.
This version is guaranteed to return only a list of true strings, whereas
dir() returns anything that objects inject into themselves, even if they
are later not really valid for attribute access (many extension libraries
have such bugs).
"""
# Start building the attribute list via dir(), and then complete it
# with a few extra special-purpose calls.
try:
words = set(dir(obj))
except Exception:
# TypeError: dir(obj) does not return a list
words = set()
# filter out non-string attributes which may be stuffed by dir() calls
# and poor coding in third-party modules
words = [w for w in words if isinstance(w, string_types)]
return sorted(words)
| # encoding: utf-8
"""A fancy version of Python's builtin :func:`dir` function.
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008-2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from .py3compat import string_types
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
def safe_hasattr(obj, attr):
"""In recent versions of Python, hasattr() only catches AttributeError.
This catches all errors.
"""
try:
getattr(obj, attr)
return True
except:
return False
def dir2(obj):
"""dir2(obj) -> list of strings
Extended version of the Python builtin dir(), which does a few extra
checks, and handles Traits objects, which can confuse dir().
This version is guaranteed to return only a list of true strings, whereas
dir() returns anything that objects inject into themselves, even if they
are later not really valid for attribute access (many extension libraries
have such bugs).
"""
# Start building the attribute list via dir(), and then complete it
# with a few extra special-purpose calls.
try:
words = set(dir(obj))
except Exception:
# TypeError: dir(obj) does not return a list
words = set()
# filter out non-string attributes which may be stuffed by dir() calls
# and poor coding in third-party modules
words = [w for w in words if isinstance(w, string_types)]
return sorted(words)
Remove mention of Traits (removed in e1ced0b3)# encoding: utf-8
"""A fancy version of Python's builtin :func:`dir` function.
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008-2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from .py3compat import string_types
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
def safe_hasattr(obj, attr):
"""In recent versions of Python, hasattr() only catches AttributeError.
This catches all errors.
"""
try:
getattr(obj, attr)
return True
except:
return False
def dir2(obj):
"""dir2(obj) -> list of strings
Extended version of the Python builtin dir(), which does a few extra
checks.
This version is guaranteed to return only a list of true strings, whereas
dir() returns anything that objects inject into themselves, even if they
are later not really valid for attribute access (many extension libraries
have such bugs).
"""
# Start building the attribute list via dir(), and then complete it
# with a few extra special-purpose calls.
try:
words = set(dir(obj))
except Exception:
# TypeError: dir(obj) does not return a list
words = set()
# filter out non-string attributes which may be stuffed by dir() calls
# and poor coding in third-party modules
words = [w for w in words if isinstance(w, string_types)]
return sorted(words)
| <commit_before># encoding: utf-8
"""A fancy version of Python's builtin :func:`dir` function.
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008-2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from .py3compat import string_types
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
def safe_hasattr(obj, attr):
"""In recent versions of Python, hasattr() only catches AttributeError.
This catches all errors.
"""
try:
getattr(obj, attr)
return True
except:
return False
def dir2(obj):
"""dir2(obj) -> list of strings
Extended version of the Python builtin dir(), which does a few extra
checks, and handles Traits objects, which can confuse dir().
This version is guaranteed to return only a list of true strings, whereas
dir() returns anything that objects inject into themselves, even if they
are later not really valid for attribute access (many extension libraries
have such bugs).
"""
# Start building the attribute list via dir(), and then complete it
# with a few extra special-purpose calls.
try:
words = set(dir(obj))
except Exception:
# TypeError: dir(obj) does not return a list
words = set()
# filter out non-string attributes which may be stuffed by dir() calls
# and poor coding in third-party modules
words = [w for w in words if isinstance(w, string_types)]
return sorted(words)
<commit_msg>Remove mention of Traits (removed in e1ced0b3)<commit_after># encoding: utf-8
"""A fancy version of Python's builtin :func:`dir` function.
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008-2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from .py3compat import string_types
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
def safe_hasattr(obj, attr):
"""In recent versions of Python, hasattr() only catches AttributeError.
This catches all errors.
"""
try:
getattr(obj, attr)
return True
except:
return False
def dir2(obj):
"""dir2(obj) -> list of strings
Extended version of the Python builtin dir(), which does a few extra
checks.
This version is guaranteed to return only a list of true strings, whereas
dir() returns anything that objects inject into themselves, even if they
are later not really valid for attribute access (many extension libraries
have such bugs).
"""
# Start building the attribute list via dir(), and then complete it
# with a few extra special-purpose calls.
try:
words = set(dir(obj))
except Exception:
# TypeError: dir(obj) does not return a list
words = set()
# filter out non-string attributes which may be stuffed by dir() calls
# and poor coding in third-party modules
words = [w for w in words if isinstance(w, string_types)]
return sorted(words)
|
38a2a2d6a1e8307e1e0ed58769df7b77d2ef9355 | loom/tasks.py | loom/tasks.py | from fabric.api import *
import subprocess
__all__ = ['ssh', 'all', 'uptime', 'upgrade', 'restart']
@task
def all():
"""
Select all hosts
"""
env.hosts = []
for hosts in env.roledefs.values():
env.hosts.extend(hosts)
# remove dupes
env.hosts = list(set(env.hosts))
@task
def uptime():
run('uptime')
@task
def upgrade():
"""
Upgrade apt packages
"""
with settings(hide('stdout'), show('running')):
sudo('apt-get update')
sudo("apt-get upgrade -y")
@task
def ssh(*cmd):
"""
Open an interactive ssh session
"""
run = ['ssh', '-A', '-t']
if env.key_filename:
run.extend(["-i", env.key_filename])
run.append('%s@%s' % (env.user, env.host_string))
run += cmd
subprocess.call(run)
@task
def restart(service):
"""
Restart or start an upstart service
"""
with settings(warn_only=True):
result = sudo('restart %s' % service)
if result.failed:
sudo('start %s' % service)
@task
def reboot():
"""
Reboot a server
"""
run('reboot')
| from fabric.api import *
import subprocess
__all__ = ['ssh', 'all', 'uptime', 'upgrade', 'restart']
@task
def all():
"""
Select all hosts
"""
env.hosts = []
for hosts in env.roledefs.values():
env.hosts.extend(hosts)
# remove dupes
env.hosts = list(set(env.hosts))
@task
def uptime():
run('uptime')
@task
def upgrade():
"""
Upgrade apt packages
"""
with settings(hide('stdout'), show('running')):
sudo('apt-get update')
sudo("apt-get upgrade")
@task
def ssh(*cmd):
"""
Open an interactive ssh session
"""
run = ['ssh', '-A', '-t']
if env.key_filename:
run.extend(["-i", env.key_filename])
run.append('%s@%s' % (env.user, env.host_string))
run += cmd
subprocess.call(run)
@task
def restart(service):
"""
Restart or start an upstart service
"""
with settings(warn_only=True):
result = sudo('restart %s' % service)
if result.failed:
sudo('start %s' % service)
@task
def reboot():
"""
Reboot a server
"""
run('reboot')
| Remove "-y" flag from apt-get upgrade | Remove "-y" flag from apt-get upgrade
| Python | bsd-3-clause | nithinphilips/loom,bfirsh/loom,nithinphilips/loom,bfirsh/loom | from fabric.api import *
import subprocess
__all__ = ['ssh', 'all', 'uptime', 'upgrade', 'restart']
@task
def all():
"""
Select all hosts
"""
env.hosts = []
for hosts in env.roledefs.values():
env.hosts.extend(hosts)
# remove dupes
env.hosts = list(set(env.hosts))
@task
def uptime():
run('uptime')
@task
def upgrade():
"""
Upgrade apt packages
"""
with settings(hide('stdout'), show('running')):
sudo('apt-get update')
sudo("apt-get upgrade -y")
@task
def ssh(*cmd):
"""
Open an interactive ssh session
"""
run = ['ssh', '-A', '-t']
if env.key_filename:
run.extend(["-i", env.key_filename])
run.append('%s@%s' % (env.user, env.host_string))
run += cmd
subprocess.call(run)
@task
def restart(service):
"""
Restart or start an upstart service
"""
with settings(warn_only=True):
result = sudo('restart %s' % service)
if result.failed:
sudo('start %s' % service)
@task
def reboot():
"""
Reboot a server
"""
run('reboot')
Remove "-y" flag from apt-get upgrade | from fabric.api import *
import subprocess
__all__ = ['ssh', 'all', 'uptime', 'upgrade', 'restart']
@task
def all():
"""
Select all hosts
"""
env.hosts = []
for hosts in env.roledefs.values():
env.hosts.extend(hosts)
# remove dupes
env.hosts = list(set(env.hosts))
@task
def uptime():
run('uptime')
@task
def upgrade():
"""
Upgrade apt packages
"""
with settings(hide('stdout'), show('running')):
sudo('apt-get update')
sudo("apt-get upgrade")
@task
def ssh(*cmd):
"""
Open an interactive ssh session
"""
run = ['ssh', '-A', '-t']
if env.key_filename:
run.extend(["-i", env.key_filename])
run.append('%s@%s' % (env.user, env.host_string))
run += cmd
subprocess.call(run)
@task
def restart(service):
"""
Restart or start an upstart service
"""
with settings(warn_only=True):
result = sudo('restart %s' % service)
if result.failed:
sudo('start %s' % service)
@task
def reboot():
"""
Reboot a server
"""
run('reboot')
| <commit_before>from fabric.api import *
import subprocess
__all__ = ['ssh', 'all', 'uptime', 'upgrade', 'restart']
@task
def all():
"""
Select all hosts
"""
env.hosts = []
for hosts in env.roledefs.values():
env.hosts.extend(hosts)
# remove dupes
env.hosts = list(set(env.hosts))
@task
def uptime():
run('uptime')
@task
def upgrade():
"""
Upgrade apt packages
"""
with settings(hide('stdout'), show('running')):
sudo('apt-get update')
sudo("apt-get upgrade -y")
@task
def ssh(*cmd):
"""
Open an interactive ssh session
"""
run = ['ssh', '-A', '-t']
if env.key_filename:
run.extend(["-i", env.key_filename])
run.append('%s@%s' % (env.user, env.host_string))
run += cmd
subprocess.call(run)
@task
def restart(service):
"""
Restart or start an upstart service
"""
with settings(warn_only=True):
result = sudo('restart %s' % service)
if result.failed:
sudo('start %s' % service)
@task
def reboot():
"""
Reboot a server
"""
run('reboot')
<commit_msg>Remove "-y" flag from apt-get upgrade<commit_after> | from fabric.api import *
import subprocess
__all__ = ['ssh', 'all', 'uptime', 'upgrade', 'restart']
@task
def all():
"""
Select all hosts
"""
env.hosts = []
for hosts in env.roledefs.values():
env.hosts.extend(hosts)
# remove dupes
env.hosts = list(set(env.hosts))
@task
def uptime():
run('uptime')
@task
def upgrade():
"""
Upgrade apt packages
"""
with settings(hide('stdout'), show('running')):
sudo('apt-get update')
sudo("apt-get upgrade")
@task
def ssh(*cmd):
"""
Open an interactive ssh session
"""
run = ['ssh', '-A', '-t']
if env.key_filename:
run.extend(["-i", env.key_filename])
run.append('%s@%s' % (env.user, env.host_string))
run += cmd
subprocess.call(run)
@task
def restart(service):
"""
Restart or start an upstart service
"""
with settings(warn_only=True):
result = sudo('restart %s' % service)
if result.failed:
sudo('start %s' % service)
@task
def reboot():
"""
Reboot a server
"""
run('reboot')
| from fabric.api import *
import subprocess
__all__ = ['ssh', 'all', 'uptime', 'upgrade', 'restart']
@task
def all():
"""
Select all hosts
"""
env.hosts = []
for hosts in env.roledefs.values():
env.hosts.extend(hosts)
# remove dupes
env.hosts = list(set(env.hosts))
@task
def uptime():
run('uptime')
@task
def upgrade():
"""
Upgrade apt packages
"""
with settings(hide('stdout'), show('running')):
sudo('apt-get update')
sudo("apt-get upgrade -y")
@task
def ssh(*cmd):
"""
Open an interactive ssh session
"""
run = ['ssh', '-A', '-t']
if env.key_filename:
run.extend(["-i", env.key_filename])
run.append('%s@%s' % (env.user, env.host_string))
run += cmd
subprocess.call(run)
@task
def restart(service):
"""
Restart or start an upstart service
"""
with settings(warn_only=True):
result = sudo('restart %s' % service)
if result.failed:
sudo('start %s' % service)
@task
def reboot():
"""
Reboot a server
"""
run('reboot')
Remove "-y" flag from apt-get upgradefrom fabric.api import *
import subprocess
__all__ = ['ssh', 'all', 'uptime', 'upgrade', 'restart']
@task
def all():
"""
Select all hosts
"""
env.hosts = []
for hosts in env.roledefs.values():
env.hosts.extend(hosts)
# remove dupes
env.hosts = list(set(env.hosts))
@task
def uptime():
run('uptime')
@task
def upgrade():
"""
Upgrade apt packages
"""
with settings(hide('stdout'), show('running')):
sudo('apt-get update')
sudo("apt-get upgrade")
@task
def ssh(*cmd):
"""
Open an interactive ssh session
"""
run = ['ssh', '-A', '-t']
if env.key_filename:
run.extend(["-i", env.key_filename])
run.append('%s@%s' % (env.user, env.host_string))
run += cmd
subprocess.call(run)
@task
def restart(service):
"""
Restart or start an upstart service
"""
with settings(warn_only=True):
result = sudo('restart %s' % service)
if result.failed:
sudo('start %s' % service)
@task
def reboot():
"""
Reboot a server
"""
run('reboot')
| <commit_before>from fabric.api import *
import subprocess
__all__ = ['ssh', 'all', 'uptime', 'upgrade', 'restart']
@task
def all():
"""
Select all hosts
"""
env.hosts = []
for hosts in env.roledefs.values():
env.hosts.extend(hosts)
# remove dupes
env.hosts = list(set(env.hosts))
@task
def uptime():
run('uptime')
@task
def upgrade():
"""
Upgrade apt packages
"""
with settings(hide('stdout'), show('running')):
sudo('apt-get update')
sudo("apt-get upgrade -y")
@task
def ssh(*cmd):
"""
Open an interactive ssh session
"""
run = ['ssh', '-A', '-t']
if env.key_filename:
run.extend(["-i", env.key_filename])
run.append('%s@%s' % (env.user, env.host_string))
run += cmd
subprocess.call(run)
@task
def restart(service):
"""
Restart or start an upstart service
"""
with settings(warn_only=True):
result = sudo('restart %s' % service)
if result.failed:
sudo('start %s' % service)
@task
def reboot():
"""
Reboot a server
"""
run('reboot')
<commit_msg>Remove "-y" flag from apt-get upgrade<commit_after>from fabric.api import *
import subprocess
__all__ = ['ssh', 'all', 'uptime', 'upgrade', 'restart']
@task
def all():
"""
Select all hosts
"""
env.hosts = []
for hosts in env.roledefs.values():
env.hosts.extend(hosts)
# remove dupes
env.hosts = list(set(env.hosts))
@task
def uptime():
run('uptime')
@task
def upgrade():
"""
Upgrade apt packages
"""
with settings(hide('stdout'), show('running')):
sudo('apt-get update')
sudo("apt-get upgrade")
@task
def ssh(*cmd):
"""
Open an interactive ssh session
"""
run = ['ssh', '-A', '-t']
if env.key_filename:
run.extend(["-i", env.key_filename])
run.append('%s@%s' % (env.user, env.host_string))
run += cmd
subprocess.call(run)
@task
def restart(service):
"""
Restart or start an upstart service
"""
with settings(warn_only=True):
result = sudo('restart %s' % service)
if result.failed:
sudo('start %s' % service)
@task
def reboot():
"""
Reboot a server
"""
run('reboot')
|
e9541dbd1959b7a2ad1ee9145d3168c5898fe204 | python/generate_sorl_xml.py | python/generate_sorl_xml.py | #!/usr/bin/env python3
import sys
from message import Message
from persistence import Persistence
from const import XML_FILE_EXTENSION
message = Persistence.get_message_from_file(sys.argv[1])
if (message is not None):
Persistence.message_to_solr_xml(message, sys.argv[2] + message.identifier + XML_FILE_EXTENSION)
| #!/usr/bin/env python3
import sys
from document import Document
from persistence import Persistence
from const import XML_FILE_EXTENSION
document = Persistence.get_document_from_file(sys.argv[1])
if (document is not None):
Persistence.document_to_solr_xml(document, sys.argv[2] + document.identifier + XML_FILE_EXTENSION)
| Fix wrong naming in generate...py | Fix wrong naming in generate...py
Signed-off-by: Fabio Benigno <7de248c2f6c04081ad3a3569b5954b1b677fee3b@gmail.com>
| Python | apache-2.0 | fpbfabio/newsgroups1000s,fpbfabio/dblp_data_processing | #!/usr/bin/env python3
import sys
from message import Message
from persistence import Persistence
from const import XML_FILE_EXTENSION
message = Persistence.get_message_from_file(sys.argv[1])
if (message is not None):
Persistence.message_to_solr_xml(message, sys.argv[2] + message.identifier + XML_FILE_EXTENSION)
Fix wrong naming in generate...py
Signed-off-by: Fabio Benigno <7de248c2f6c04081ad3a3569b5954b1b677fee3b@gmail.com> | #!/usr/bin/env python3
import sys
from document import Document
from persistence import Persistence
from const import XML_FILE_EXTENSION
document = Persistence.get_document_from_file(sys.argv[1])
if (document is not None):
Persistence.document_to_solr_xml(document, sys.argv[2] + document.identifier + XML_FILE_EXTENSION)
| <commit_before>#!/usr/bin/env python3
import sys
from message import Message
from persistence import Persistence
from const import XML_FILE_EXTENSION
message = Persistence.get_message_from_file(sys.argv[1])
if (message is not None):
Persistence.message_to_solr_xml(message, sys.argv[2] + message.identifier + XML_FILE_EXTENSION)
<commit_msg>Fix wrong naming in generate...py
Signed-off-by: Fabio Benigno <7de248c2f6c04081ad3a3569b5954b1b677fee3b@gmail.com><commit_after> | #!/usr/bin/env python3
import sys
from document import Document
from persistence import Persistence
from const import XML_FILE_EXTENSION
document = Persistence.get_document_from_file(sys.argv[1])
if (document is not None):
Persistence.document_to_solr_xml(document, sys.argv[2] + document.identifier + XML_FILE_EXTENSION)
| #!/usr/bin/env python3
import sys
from message import Message
from persistence import Persistence
from const import XML_FILE_EXTENSION
message = Persistence.get_message_from_file(sys.argv[1])
if (message is not None):
Persistence.message_to_solr_xml(message, sys.argv[2] + message.identifier + XML_FILE_EXTENSION)
Fix wrong naming in generate...py
Signed-off-by: Fabio Benigno <7de248c2f6c04081ad3a3569b5954b1b677fee3b@gmail.com>#!/usr/bin/env python3
import sys
from document import Document
from persistence import Persistence
from const import XML_FILE_EXTENSION
document = Persistence.get_document_from_file(sys.argv[1])
if (document is not None):
Persistence.document_to_solr_xml(document, sys.argv[2] + document.identifier + XML_FILE_EXTENSION)
| <commit_before>#!/usr/bin/env python3
import sys
from message import Message
from persistence import Persistence
from const import XML_FILE_EXTENSION
message = Persistence.get_message_from_file(sys.argv[1])
if (message is not None):
Persistence.message_to_solr_xml(message, sys.argv[2] + message.identifier + XML_FILE_EXTENSION)
<commit_msg>Fix wrong naming in generate...py
Signed-off-by: Fabio Benigno <7de248c2f6c04081ad3a3569b5954b1b677fee3b@gmail.com><commit_after>#!/usr/bin/env python3
import sys
from document import Document
from persistence import Persistence
from const import XML_FILE_EXTENSION
document = Persistence.get_document_from_file(sys.argv[1])
if (document is not None):
Persistence.document_to_solr_xml(document, sys.argv[2] + document.identifier + XML_FILE_EXTENSION)
|
76afe0ef2f45ff7ff62dd5ea4d1217ce794770ba | us_ignite/snippets/management/commands/snippets_load_fixtures.py | us_ignite/snippets/management/commands/snippets_load_fixtures.py | from django.core.management.base import BaseCommand
from us_ignite.snippets.models import Snippet
FIXTURES = [
{
'slug': 'home-box',
'name': 'UP NEXT: LOREM IPSUM',
'body': '',
'url_text': 'GET INVOLVED',
'url': '',
},
{
'slug': 'featured',
'name': 'FEATURED CONTENT',
'body': '',
'url_text': 'FEATURED',
'url': '',
},
]
class Command(BaseCommand):
def handle(self, *args, **options):
for data in FIXTURES:
try:
# Ignore existing snippets:
Snippet.objects.get(slug=data['slug'])
continue
except Snippet.DoesNotExist:
pass
data.update({
'status': Snippet.PUBLISHED,
})
Snippet.objects.create(**data)
print u'Importing %s' % data['slug']
print "Done!"
| from django.core.management.base import BaseCommand
from us_ignite.snippets.models import Snippet
FIXTURES = [
{
'slug': 'home-box',
'name': 'UP NEXT: LOREM IPSUM',
'body': '',
'url_text': 'GET INVOLVED',
'url': '',
},
{
'slug': 'featured',
'name': 'FEATURED CONTENT',
'body': '',
'url_text': 'FEATURED',
'url': '',
},
{
'slug': 'welcome-email',
'name': 'Welcome to US Ignite',
'body': '',
'url_text': '',
'url': '',
},
]
class Command(BaseCommand):
def handle(self, *args, **options):
for data in FIXTURES:
try:
# Ignore existing snippets:
Snippet.objects.get(slug=data['slug'])
continue
except Snippet.DoesNotExist:
pass
data.update({
'status': Snippet.PUBLISHED,
})
Snippet.objects.create(**data)
print u'Importing %s' % data['slug']
print "Done!"
| Load initial fixture for the welcome email. | Load initial fixture for the welcome email.
https://github.com/madewithbytes/us_ignite/issues/172
| Python | bsd-3-clause | us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite | from django.core.management.base import BaseCommand
from us_ignite.snippets.models import Snippet
FIXTURES = [
{
'slug': 'home-box',
'name': 'UP NEXT: LOREM IPSUM',
'body': '',
'url_text': 'GET INVOLVED',
'url': '',
},
{
'slug': 'featured',
'name': 'FEATURED CONTENT',
'body': '',
'url_text': 'FEATURED',
'url': '',
},
]
class Command(BaseCommand):
def handle(self, *args, **options):
for data in FIXTURES:
try:
# Ignore existing snippets:
Snippet.objects.get(slug=data['slug'])
continue
except Snippet.DoesNotExist:
pass
data.update({
'status': Snippet.PUBLISHED,
})
Snippet.objects.create(**data)
print u'Importing %s' % data['slug']
print "Done!"
Load initial fixture for the welcome email.
https://github.com/madewithbytes/us_ignite/issues/172 | from django.core.management.base import BaseCommand
from us_ignite.snippets.models import Snippet
FIXTURES = [
{
'slug': 'home-box',
'name': 'UP NEXT: LOREM IPSUM',
'body': '',
'url_text': 'GET INVOLVED',
'url': '',
},
{
'slug': 'featured',
'name': 'FEATURED CONTENT',
'body': '',
'url_text': 'FEATURED',
'url': '',
},
{
'slug': 'welcome-email',
'name': 'Welcome to US Ignite',
'body': '',
'url_text': '',
'url': '',
},
]
class Command(BaseCommand):
def handle(self, *args, **options):
for data in FIXTURES:
try:
# Ignore existing snippets:
Snippet.objects.get(slug=data['slug'])
continue
except Snippet.DoesNotExist:
pass
data.update({
'status': Snippet.PUBLISHED,
})
Snippet.objects.create(**data)
print u'Importing %s' % data['slug']
print "Done!"
| <commit_before>from django.core.management.base import BaseCommand
from us_ignite.snippets.models import Snippet
FIXTURES = [
{
'slug': 'home-box',
'name': 'UP NEXT: LOREM IPSUM',
'body': '',
'url_text': 'GET INVOLVED',
'url': '',
},
{
'slug': 'featured',
'name': 'FEATURED CONTENT',
'body': '',
'url_text': 'FEATURED',
'url': '',
},
]
class Command(BaseCommand):
def handle(self, *args, **options):
for data in FIXTURES:
try:
# Ignore existing snippets:
Snippet.objects.get(slug=data['slug'])
continue
except Snippet.DoesNotExist:
pass
data.update({
'status': Snippet.PUBLISHED,
})
Snippet.objects.create(**data)
print u'Importing %s' % data['slug']
print "Done!"
<commit_msg>Load initial fixture for the welcome email.
https://github.com/madewithbytes/us_ignite/issues/172<commit_after> | from django.core.management.base import BaseCommand
from us_ignite.snippets.models import Snippet
FIXTURES = [
{
'slug': 'home-box',
'name': 'UP NEXT: LOREM IPSUM',
'body': '',
'url_text': 'GET INVOLVED',
'url': '',
},
{
'slug': 'featured',
'name': 'FEATURED CONTENT',
'body': '',
'url_text': 'FEATURED',
'url': '',
},
{
'slug': 'welcome-email',
'name': 'Welcome to US Ignite',
'body': '',
'url_text': '',
'url': '',
},
]
class Command(BaseCommand):
def handle(self, *args, **options):
for data in FIXTURES:
try:
# Ignore existing snippets:
Snippet.objects.get(slug=data['slug'])
continue
except Snippet.DoesNotExist:
pass
data.update({
'status': Snippet.PUBLISHED,
})
Snippet.objects.create(**data)
print u'Importing %s' % data['slug']
print "Done!"
| from django.core.management.base import BaseCommand
from us_ignite.snippets.models import Snippet
FIXTURES = [
{
'slug': 'home-box',
'name': 'UP NEXT: LOREM IPSUM',
'body': '',
'url_text': 'GET INVOLVED',
'url': '',
},
{
'slug': 'featured',
'name': 'FEATURED CONTENT',
'body': '',
'url_text': 'FEATURED',
'url': '',
},
]
class Command(BaseCommand):
def handle(self, *args, **options):
for data in FIXTURES:
try:
# Ignore existing snippets:
Snippet.objects.get(slug=data['slug'])
continue
except Snippet.DoesNotExist:
pass
data.update({
'status': Snippet.PUBLISHED,
})
Snippet.objects.create(**data)
print u'Importing %s' % data['slug']
print "Done!"
Load initial fixture for the welcome email.
https://github.com/madewithbytes/us_ignite/issues/172from django.core.management.base import BaseCommand
from us_ignite.snippets.models import Snippet
FIXTURES = [
{
'slug': 'home-box',
'name': 'UP NEXT: LOREM IPSUM',
'body': '',
'url_text': 'GET INVOLVED',
'url': '',
},
{
'slug': 'featured',
'name': 'FEATURED CONTENT',
'body': '',
'url_text': 'FEATURED',
'url': '',
},
{
'slug': 'welcome-email',
'name': 'Welcome to US Ignite',
'body': '',
'url_text': '',
'url': '',
},
]
class Command(BaseCommand):
def handle(self, *args, **options):
for data in FIXTURES:
try:
# Ignore existing snippets:
Snippet.objects.get(slug=data['slug'])
continue
except Snippet.DoesNotExist:
pass
data.update({
'status': Snippet.PUBLISHED,
})
Snippet.objects.create(**data)
print u'Importing %s' % data['slug']
print "Done!"
| <commit_before>from django.core.management.base import BaseCommand
from us_ignite.snippets.models import Snippet
FIXTURES = [
{
'slug': 'home-box',
'name': 'UP NEXT: LOREM IPSUM',
'body': '',
'url_text': 'GET INVOLVED',
'url': '',
},
{
'slug': 'featured',
'name': 'FEATURED CONTENT',
'body': '',
'url_text': 'FEATURED',
'url': '',
},
]
class Command(BaseCommand):
def handle(self, *args, **options):
for data in FIXTURES:
try:
# Ignore existing snippets:
Snippet.objects.get(slug=data['slug'])
continue
except Snippet.DoesNotExist:
pass
data.update({
'status': Snippet.PUBLISHED,
})
Snippet.objects.create(**data)
print u'Importing %s' % data['slug']
print "Done!"
<commit_msg>Load initial fixture for the welcome email.
https://github.com/madewithbytes/us_ignite/issues/172<commit_after>from django.core.management.base import BaseCommand
from us_ignite.snippets.models import Snippet
FIXTURES = [
{
'slug': 'home-box',
'name': 'UP NEXT: LOREM IPSUM',
'body': '',
'url_text': 'GET INVOLVED',
'url': '',
},
{
'slug': 'featured',
'name': 'FEATURED CONTENT',
'body': '',
'url_text': 'FEATURED',
'url': '',
},
{
'slug': 'welcome-email',
'name': 'Welcome to US Ignite',
'body': '',
'url_text': '',
'url': '',
},
]
class Command(BaseCommand):
def handle(self, *args, **options):
for data in FIXTURES:
try:
# Ignore existing snippets:
Snippet.objects.get(slug=data['slug'])
continue
except Snippet.DoesNotExist:
pass
data.update({
'status': Snippet.PUBLISHED,
})
Snippet.objects.create(**data)
print u'Importing %s' % data['slug']
print "Done!"
|
b8386212826701131e3c5aaaadef726df97f6646 | api/serializers.py | api/serializers.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib.auth.models import User
from rest_framework import serializers
from core.models import Timesheet, Task, Entry
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('id', 'url', 'username',)
class TimesheetSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Timesheet
fields = ('id', 'url', 'name',)
class TaskSerializer(serializers.HyperlinkedModelSerializer):
timesheet_details = TimesheetSerializer(source='timesheet', read_only=True)
class Meta:
model = Task
fields = ('id', 'url', 'timesheet', 'timesheet_details', 'name',)
class EntrySerializer(serializers.HyperlinkedModelSerializer):
task_details = TaskSerializer(source='task', read_only=True)
user_details = UserSerializer(source='user', read_only=True)
class Meta:
model = Entry
fields = ('id', 'url', 'task', 'task_details', 'user', 'user_details',
'date', 'duration', 'note',)
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib.auth.models import User
from rest_framework import serializers
from core.models import Timesheet, Task, Entry
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('id', 'url', 'username',)
class TimesheetSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Timesheet
fields = ('id', 'url', 'name', 'complete',)
class TaskSerializer(serializers.HyperlinkedModelSerializer):
timesheet_details = TimesheetSerializer(source='timesheet', read_only=True)
class Meta:
model = Task
fields = ('id', 'url', 'timesheet', 'timesheet_details', 'name',
'complete',)
class EntrySerializer(serializers.HyperlinkedModelSerializer):
task_details = TaskSerializer(source='task', read_only=True)
user_details = UserSerializer(source='user', read_only=True)
class Meta:
model = Entry
fields = ('id', 'url', 'task', 'task_details', 'user', 'user_details',
'date', 'duration', 'note',)
| Add complete field to task and timesheet api | Add complete field to task and timesheet api
| Python | bsd-2-clause | Leahelisabeth/timestrap,overshard/timestrap,cdubz/timestrap,Leahelisabeth/timestrap,overshard/timestrap,muhleder/timestrap,Leahelisabeth/timestrap,cdubz/timestrap,overshard/timestrap,muhleder/timestrap,cdubz/timestrap,muhleder/timestrap,Leahelisabeth/timestrap | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib.auth.models import User
from rest_framework import serializers
from core.models import Timesheet, Task, Entry
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('id', 'url', 'username',)
class TimesheetSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Timesheet
fields = ('id', 'url', 'name',)
class TaskSerializer(serializers.HyperlinkedModelSerializer):
timesheet_details = TimesheetSerializer(source='timesheet', read_only=True)
class Meta:
model = Task
fields = ('id', 'url', 'timesheet', 'timesheet_details', 'name',)
class EntrySerializer(serializers.HyperlinkedModelSerializer):
task_details = TaskSerializer(source='task', read_only=True)
user_details = UserSerializer(source='user', read_only=True)
class Meta:
model = Entry
fields = ('id', 'url', 'task', 'task_details', 'user', 'user_details',
'date', 'duration', 'note',)
Add complete field to task and timesheet api | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib.auth.models import User
from rest_framework import serializers
from core.models import Timesheet, Task, Entry
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('id', 'url', 'username',)
class TimesheetSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Timesheet
fields = ('id', 'url', 'name', 'complete',)
class TaskSerializer(serializers.HyperlinkedModelSerializer):
timesheet_details = TimesheetSerializer(source='timesheet', read_only=True)
class Meta:
model = Task
fields = ('id', 'url', 'timesheet', 'timesheet_details', 'name',
'complete',)
class EntrySerializer(serializers.HyperlinkedModelSerializer):
task_details = TaskSerializer(source='task', read_only=True)
user_details = UserSerializer(source='user', read_only=True)
class Meta:
model = Entry
fields = ('id', 'url', 'task', 'task_details', 'user', 'user_details',
'date', 'duration', 'note',)
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib.auth.models import User
from rest_framework import serializers
from core.models import Timesheet, Task, Entry
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('id', 'url', 'username',)
class TimesheetSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Timesheet
fields = ('id', 'url', 'name',)
class TaskSerializer(serializers.HyperlinkedModelSerializer):
timesheet_details = TimesheetSerializer(source='timesheet', read_only=True)
class Meta:
model = Task
fields = ('id', 'url', 'timesheet', 'timesheet_details', 'name',)
class EntrySerializer(serializers.HyperlinkedModelSerializer):
task_details = TaskSerializer(source='task', read_only=True)
user_details = UserSerializer(source='user', read_only=True)
class Meta:
model = Entry
fields = ('id', 'url', 'task', 'task_details', 'user', 'user_details',
'date', 'duration', 'note',)
<commit_msg>Add complete field to task and timesheet api<commit_after> | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib.auth.models import User
from rest_framework import serializers
from core.models import Timesheet, Task, Entry
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('id', 'url', 'username',)
class TimesheetSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Timesheet
fields = ('id', 'url', 'name', 'complete',)
class TaskSerializer(serializers.HyperlinkedModelSerializer):
timesheet_details = TimesheetSerializer(source='timesheet', read_only=True)
class Meta:
model = Task
fields = ('id', 'url', 'timesheet', 'timesheet_details', 'name',
'complete',)
class EntrySerializer(serializers.HyperlinkedModelSerializer):
task_details = TaskSerializer(source='task', read_only=True)
user_details = UserSerializer(source='user', read_only=True)
class Meta:
model = Entry
fields = ('id', 'url', 'task', 'task_details', 'user', 'user_details',
'date', 'duration', 'note',)
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib.auth.models import User
from rest_framework import serializers
from core.models import Timesheet, Task, Entry
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('id', 'url', 'username',)
class TimesheetSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Timesheet
fields = ('id', 'url', 'name',)
class TaskSerializer(serializers.HyperlinkedModelSerializer):
timesheet_details = TimesheetSerializer(source='timesheet', read_only=True)
class Meta:
model = Task
fields = ('id', 'url', 'timesheet', 'timesheet_details', 'name',)
class EntrySerializer(serializers.HyperlinkedModelSerializer):
task_details = TaskSerializer(source='task', read_only=True)
user_details = UserSerializer(source='user', read_only=True)
class Meta:
model = Entry
fields = ('id', 'url', 'task', 'task_details', 'user', 'user_details',
'date', 'duration', 'note',)
Add complete field to task and timesheet api# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib.auth.models import User
from rest_framework import serializers
from core.models import Timesheet, Task, Entry
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('id', 'url', 'username',)
class TimesheetSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Timesheet
fields = ('id', 'url', 'name', 'complete',)
class TaskSerializer(serializers.HyperlinkedModelSerializer):
timesheet_details = TimesheetSerializer(source='timesheet', read_only=True)
class Meta:
model = Task
fields = ('id', 'url', 'timesheet', 'timesheet_details', 'name',
'complete',)
class EntrySerializer(serializers.HyperlinkedModelSerializer):
task_details = TaskSerializer(source='task', read_only=True)
user_details = UserSerializer(source='user', read_only=True)
class Meta:
model = Entry
fields = ('id', 'url', 'task', 'task_details', 'user', 'user_details',
'date', 'duration', 'note',)
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib.auth.models import User
from rest_framework import serializers
from core.models import Timesheet, Task, Entry
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('id', 'url', 'username',)
class TimesheetSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Timesheet
fields = ('id', 'url', 'name',)
class TaskSerializer(serializers.HyperlinkedModelSerializer):
timesheet_details = TimesheetSerializer(source='timesheet', read_only=True)
class Meta:
model = Task
fields = ('id', 'url', 'timesheet', 'timesheet_details', 'name',)
class EntrySerializer(serializers.HyperlinkedModelSerializer):
task_details = TaskSerializer(source='task', read_only=True)
user_details = UserSerializer(source='user', read_only=True)
class Meta:
model = Entry
fields = ('id', 'url', 'task', 'task_details', 'user', 'user_details',
'date', 'duration', 'note',)
<commit_msg>Add complete field to task and timesheet api<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib.auth.models import User
from rest_framework import serializers
from core.models import Timesheet, Task, Entry
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('id', 'url', 'username',)
class TimesheetSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Timesheet
fields = ('id', 'url', 'name', 'complete',)
class TaskSerializer(serializers.HyperlinkedModelSerializer):
timesheet_details = TimesheetSerializer(source='timesheet', read_only=True)
class Meta:
model = Task
fields = ('id', 'url', 'timesheet', 'timesheet_details', 'name',
'complete',)
class EntrySerializer(serializers.HyperlinkedModelSerializer):
task_details = TaskSerializer(source='task', read_only=True)
user_details = UserSerializer(source='user', read_only=True)
class Meta:
model = Entry
fields = ('id', 'url', 'task', 'task_details', 'user', 'user_details',
'date', 'duration', 'note',)
|
ffb42ba8e9b0a5d7a269ee9d13a5347f4ffee563 | mama_cas/tests/test_callbacks.py | mama_cas/tests/test_callbacks.py | from django.test import TestCase
from .factories import UserFactory
from mama_cas.callbacks import user_model_attributes
from mama_cas.callbacks import user_name_attributes
class CallbacksTests(TestCase):
url = 'http://www.example.com/'
def setUp(self):
self.user = UserFactory()
def test_user_name(self):
"""
The callback should return a username and full_name
attribute.
"""
attributes = user_name_attributes(self.user, self.url)
self.assertIn('username', attributes)
self.assertEqual(attributes['username'], 'ellen')
self.assertIn('full_name', attributes)
self.assertEqual(attributes['full_name'], 'Ellen Cohen')
def test_user_model_attributes(self):
"""
The callback should return at least a username attribute.
"""
attributes = user_model_attributes(self.user, self.url)
self.assertIn('username', attributes)
self.assertEqual(attributes['username'], 'ellen')
| from django.test import TestCase
from .factories import UserFactory
from mama_cas.callbacks import user_model_attributes
from mama_cas.callbacks import user_name_attributes
class CallbacksTests(TestCase):
def setUp(self):
self.user = UserFactory()
def test_user_name_attributes(self):
"""
The callback should return a username, full_name and
short_name attribute.
"""
attributes = user_name_attributes(self.user, 'http://www.example.com/')
self.assertIn('username', attributes)
self.assertEqual(attributes['username'], 'ellen')
self.assertIn('full_name', attributes)
self.assertEqual(attributes['full_name'], 'Ellen Cohen')
self.assertIn('short_name', attributes)
self.assertEqual(attributes['short_name'], 'Ellen')
def test_user_model_attributes(self):
"""The callback should return at least a username attribute."""
attributes = user_model_attributes(self.user, 'http://www.example.com/')
self.assertIn('username', attributes)
self.assertEqual(attributes['username'], 'ellen')
| Test short_name for user name attributes callback | Test short_name for user name attributes callback
| Python | bsd-3-clause | jbittel/django-mama-cas,orbitvu/django-mama-cas,orbitvu/django-mama-cas,jbittel/django-mama-cas | from django.test import TestCase
from .factories import UserFactory
from mama_cas.callbacks import user_model_attributes
from mama_cas.callbacks import user_name_attributes
class CallbacksTests(TestCase):
url = 'http://www.example.com/'
def setUp(self):
self.user = UserFactory()
def test_user_name(self):
"""
The callback should return a username and full_name
attribute.
"""
attributes = user_name_attributes(self.user, self.url)
self.assertIn('username', attributes)
self.assertEqual(attributes['username'], 'ellen')
self.assertIn('full_name', attributes)
self.assertEqual(attributes['full_name'], 'Ellen Cohen')
def test_user_model_attributes(self):
"""
The callback should return at least a username attribute.
"""
attributes = user_model_attributes(self.user, self.url)
self.assertIn('username', attributes)
self.assertEqual(attributes['username'], 'ellen')
Test short_name for user name attributes callback | from django.test import TestCase
from .factories import UserFactory
from mama_cas.callbacks import user_model_attributes
from mama_cas.callbacks import user_name_attributes
class CallbacksTests(TestCase):
def setUp(self):
self.user = UserFactory()
def test_user_name_attributes(self):
"""
The callback should return a username, full_name and
short_name attribute.
"""
attributes = user_name_attributes(self.user, 'http://www.example.com/')
self.assertIn('username', attributes)
self.assertEqual(attributes['username'], 'ellen')
self.assertIn('full_name', attributes)
self.assertEqual(attributes['full_name'], 'Ellen Cohen')
self.assertIn('short_name', attributes)
self.assertEqual(attributes['short_name'], 'Ellen')
def test_user_model_attributes(self):
"""The callback should return at least a username attribute."""
attributes = user_model_attributes(self.user, 'http://www.example.com/')
self.assertIn('username', attributes)
self.assertEqual(attributes['username'], 'ellen')
| <commit_before>from django.test import TestCase
from .factories import UserFactory
from mama_cas.callbacks import user_model_attributes
from mama_cas.callbacks import user_name_attributes
class CallbacksTests(TestCase):
url = 'http://www.example.com/'
def setUp(self):
self.user = UserFactory()
def test_user_name(self):
"""
The callback should return a username and full_name
attribute.
"""
attributes = user_name_attributes(self.user, self.url)
self.assertIn('username', attributes)
self.assertEqual(attributes['username'], 'ellen')
self.assertIn('full_name', attributes)
self.assertEqual(attributes['full_name'], 'Ellen Cohen')
def test_user_model_attributes(self):
"""
The callback should return at least a username attribute.
"""
attributes = user_model_attributes(self.user, self.url)
self.assertIn('username', attributes)
self.assertEqual(attributes['username'], 'ellen')
<commit_msg>Test short_name for user name attributes callback<commit_after> | from django.test import TestCase
from .factories import UserFactory
from mama_cas.callbacks import user_model_attributes
from mama_cas.callbacks import user_name_attributes
class CallbacksTests(TestCase):
def setUp(self):
self.user = UserFactory()
def test_user_name_attributes(self):
"""
The callback should return a username, full_name and
short_name attribute.
"""
attributes = user_name_attributes(self.user, 'http://www.example.com/')
self.assertIn('username', attributes)
self.assertEqual(attributes['username'], 'ellen')
self.assertIn('full_name', attributes)
self.assertEqual(attributes['full_name'], 'Ellen Cohen')
self.assertIn('short_name', attributes)
self.assertEqual(attributes['short_name'], 'Ellen')
def test_user_model_attributes(self):
"""The callback should return at least a username attribute."""
attributes = user_model_attributes(self.user, 'http://www.example.com/')
self.assertIn('username', attributes)
self.assertEqual(attributes['username'], 'ellen')
| from django.test import TestCase
from .factories import UserFactory
from mama_cas.callbacks import user_model_attributes
from mama_cas.callbacks import user_name_attributes
class CallbacksTests(TestCase):
url = 'http://www.example.com/'
def setUp(self):
self.user = UserFactory()
def test_user_name(self):
"""
The callback should return a username and full_name
attribute.
"""
attributes = user_name_attributes(self.user, self.url)
self.assertIn('username', attributes)
self.assertEqual(attributes['username'], 'ellen')
self.assertIn('full_name', attributes)
self.assertEqual(attributes['full_name'], 'Ellen Cohen')
def test_user_model_attributes(self):
"""
The callback should return at least a username attribute.
"""
attributes = user_model_attributes(self.user, self.url)
self.assertIn('username', attributes)
self.assertEqual(attributes['username'], 'ellen')
Test short_name for user name attributes callbackfrom django.test import TestCase
from .factories import UserFactory
from mama_cas.callbacks import user_model_attributes
from mama_cas.callbacks import user_name_attributes
class CallbacksTests(TestCase):
def setUp(self):
self.user = UserFactory()
def test_user_name_attributes(self):
"""
The callback should return a username, full_name and
short_name attribute.
"""
attributes = user_name_attributes(self.user, 'http://www.example.com/')
self.assertIn('username', attributes)
self.assertEqual(attributes['username'], 'ellen')
self.assertIn('full_name', attributes)
self.assertEqual(attributes['full_name'], 'Ellen Cohen')
self.assertIn('short_name', attributes)
self.assertEqual(attributes['short_name'], 'Ellen')
def test_user_model_attributes(self):
"""The callback should return at least a username attribute."""
attributes = user_model_attributes(self.user, 'http://www.example.com/')
self.assertIn('username', attributes)
self.assertEqual(attributes['username'], 'ellen')
| <commit_before>from django.test import TestCase
from .factories import UserFactory
from mama_cas.callbacks import user_model_attributes
from mama_cas.callbacks import user_name_attributes
class CallbacksTests(TestCase):
url = 'http://www.example.com/'
def setUp(self):
self.user = UserFactory()
def test_user_name(self):
"""
The callback should return a username and full_name
attribute.
"""
attributes = user_name_attributes(self.user, self.url)
self.assertIn('username', attributes)
self.assertEqual(attributes['username'], 'ellen')
self.assertIn('full_name', attributes)
self.assertEqual(attributes['full_name'], 'Ellen Cohen')
def test_user_model_attributes(self):
"""
The callback should return at least a username attribute.
"""
attributes = user_model_attributes(self.user, self.url)
self.assertIn('username', attributes)
self.assertEqual(attributes['username'], 'ellen')
<commit_msg>Test short_name for user name attributes callback<commit_after>from django.test import TestCase
from .factories import UserFactory
from mama_cas.callbacks import user_model_attributes
from mama_cas.callbacks import user_name_attributes
class CallbacksTests(TestCase):
def setUp(self):
self.user = UserFactory()
def test_user_name_attributes(self):
"""
The callback should return a username, full_name and
short_name attribute.
"""
attributes = user_name_attributes(self.user, 'http://www.example.com/')
self.assertIn('username', attributes)
self.assertEqual(attributes['username'], 'ellen')
self.assertIn('full_name', attributes)
self.assertEqual(attributes['full_name'], 'Ellen Cohen')
self.assertIn('short_name', attributes)
self.assertEqual(attributes['short_name'], 'Ellen')
def test_user_model_attributes(self):
"""The callback should return at least a username attribute."""
attributes = user_model_attributes(self.user, 'http://www.example.com/')
self.assertIn('username', attributes)
self.assertEqual(attributes['username'], 'ellen')
|
8c0d1acf6ea41adc3743a4e190eaf777188282c0 | nova/policies/floating_ip_pools.py | nova/policies/floating_ip_pools.py | # Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:os-floating-ip-pools'
floating_ip_pools_policies = [
policy.DocumentedRuleDefault(
BASE_POLICY_NAME,
base.RULE_ADMIN_OR_OWNER,
"List floating IP pools. This API is deprecated.",
[
{
'method': 'GET',
'path': '/os-floating-ip-pools'
}
]),
]
def list_rules():
return floating_ip_pools_policies
| # Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:os-floating-ip-pools'
floating_ip_pools_policies = [
policy.DocumentedRuleDefault(
name=BASE_POLICY_NAME,
check_str=base.RULE_ADMIN_OR_OWNER,
description="List floating IP pools. This API is deprecated.",
operations=[
{
'method': 'GET',
'path': '/os-floating-ip-pools'
}
],
scope_types=['system', 'project']),
]
def list_rules():
return floating_ip_pools_policies
| Introduce scope_types in FIP pools | Introduce scope_types in FIP pools
Appropriate scope_type for nova case:
- https://specs.openstack.org/openstack/nova-specs/specs/ussuri/approved/policy-defaults-refresh.html#scope
This commit introduce scope_type for FIP pools policies
as 'system' and 'project'
Partial implement blueprint policy-defaults-refresh-deprecated-apis
Change-Id: Ica29330ac8c22a40bbf5e88ef554bbc44e1ac293
| Python | apache-2.0 | mahak/nova,mahak/nova,klmitch/nova,openstack/nova,klmitch/nova,openstack/nova,mahak/nova,klmitch/nova,openstack/nova,klmitch/nova | # Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:os-floating-ip-pools'
floating_ip_pools_policies = [
policy.DocumentedRuleDefault(
BASE_POLICY_NAME,
base.RULE_ADMIN_OR_OWNER,
"List floating IP pools. This API is deprecated.",
[
{
'method': 'GET',
'path': '/os-floating-ip-pools'
}
]),
]
def list_rules():
return floating_ip_pools_policies
Introduce scope_types in FIP pools
Appropriate scope_type for nova case:
- https://specs.openstack.org/openstack/nova-specs/specs/ussuri/approved/policy-defaults-refresh.html#scope
This commit introduce scope_type for FIP pools policies
as 'system' and 'project'
Partial implement blueprint policy-defaults-refresh-deprecated-apis
Change-Id: Ica29330ac8c22a40bbf5e88ef554bbc44e1ac293 | # Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:os-floating-ip-pools'
floating_ip_pools_policies = [
policy.DocumentedRuleDefault(
name=BASE_POLICY_NAME,
check_str=base.RULE_ADMIN_OR_OWNER,
description="List floating IP pools. This API is deprecated.",
operations=[
{
'method': 'GET',
'path': '/os-floating-ip-pools'
}
],
scope_types=['system', 'project']),
]
def list_rules():
return floating_ip_pools_policies
| <commit_before># Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:os-floating-ip-pools'
floating_ip_pools_policies = [
policy.DocumentedRuleDefault(
BASE_POLICY_NAME,
base.RULE_ADMIN_OR_OWNER,
"List floating IP pools. This API is deprecated.",
[
{
'method': 'GET',
'path': '/os-floating-ip-pools'
}
]),
]
def list_rules():
return floating_ip_pools_policies
<commit_msg>Introduce scope_types in FIP pools
Appropriate scope_type for nova case:
- https://specs.openstack.org/openstack/nova-specs/specs/ussuri/approved/policy-defaults-refresh.html#scope
This commit introduce scope_type for FIP pools policies
as 'system' and 'project'
Partial implement blueprint policy-defaults-refresh-deprecated-apis
Change-Id: Ica29330ac8c22a40bbf5e88ef554bbc44e1ac293<commit_after> | # Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:os-floating-ip-pools'
floating_ip_pools_policies = [
policy.DocumentedRuleDefault(
name=BASE_POLICY_NAME,
check_str=base.RULE_ADMIN_OR_OWNER,
description="List floating IP pools. This API is deprecated.",
operations=[
{
'method': 'GET',
'path': '/os-floating-ip-pools'
}
],
scope_types=['system', 'project']),
]
def list_rules():
return floating_ip_pools_policies
| # Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:os-floating-ip-pools'
floating_ip_pools_policies = [
policy.DocumentedRuleDefault(
BASE_POLICY_NAME,
base.RULE_ADMIN_OR_OWNER,
"List floating IP pools. This API is deprecated.",
[
{
'method': 'GET',
'path': '/os-floating-ip-pools'
}
]),
]
def list_rules():
return floating_ip_pools_policies
Introduce scope_types in FIP pools
Appropriate scope_type for nova case:
- https://specs.openstack.org/openstack/nova-specs/specs/ussuri/approved/policy-defaults-refresh.html#scope
This commit introduce scope_type for FIP pools policies
as 'system' and 'project'
Partial implement blueprint policy-defaults-refresh-deprecated-apis
Change-Id: Ica29330ac8c22a40bbf5e88ef554bbc44e1ac293# Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:os-floating-ip-pools'
floating_ip_pools_policies = [
policy.DocumentedRuleDefault(
name=BASE_POLICY_NAME,
check_str=base.RULE_ADMIN_OR_OWNER,
description="List floating IP pools. This API is deprecated.",
operations=[
{
'method': 'GET',
'path': '/os-floating-ip-pools'
}
],
scope_types=['system', 'project']),
]
def list_rules():
return floating_ip_pools_policies
| <commit_before># Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:os-floating-ip-pools'
floating_ip_pools_policies = [
policy.DocumentedRuleDefault(
BASE_POLICY_NAME,
base.RULE_ADMIN_OR_OWNER,
"List floating IP pools. This API is deprecated.",
[
{
'method': 'GET',
'path': '/os-floating-ip-pools'
}
]),
]
def list_rules():
return floating_ip_pools_policies
<commit_msg>Introduce scope_types in FIP pools
Appropriate scope_type for nova case:
- https://specs.openstack.org/openstack/nova-specs/specs/ussuri/approved/policy-defaults-refresh.html#scope
This commit introduce scope_type for FIP pools policies
as 'system' and 'project'
Partial implement blueprint policy-defaults-refresh-deprecated-apis
Change-Id: Ica29330ac8c22a40bbf5e88ef554bbc44e1ac293<commit_after># Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:os-floating-ip-pools'
floating_ip_pools_policies = [
policy.DocumentedRuleDefault(
name=BASE_POLICY_NAME,
check_str=base.RULE_ADMIN_OR_OWNER,
description="List floating IP pools. This API is deprecated.",
operations=[
{
'method': 'GET',
'path': '/os-floating-ip-pools'
}
],
scope_types=['system', 'project']),
]
def list_rules():
return floating_ip_pools_policies
|
67c6ecff4d5c65cd5b919bb1316f188bc6ab1098 | tests/python_tests/test_core_device.py | tests/python_tests/test_core_device.py | import pytest
import xchainer
def test_device():
device = xchainer.get_current_device()
xchainer.set_current_device('cpu')
assert str(xchainer.get_current_device()) == '<Device cpu>'
xchainer.set_current_device('cuda')
assert str(xchainer.get_current_device()) == '<Device cuda>'
with pytest.raises(xchainer.DeviceError):
xchainer.set_current_device('invalid_device')
xchainer.set_current_device(device)
| import pytest
import xchainer
def test_device():
cpu1 = xchainer.Device('cpu')
cpu2 = xchainer.Device('cpu')
cuda = xchainer.Device('cuda')
assert cpu1 == cpu2
assert not (cpu1 != cpu2)
assert not (cpu1 == cuda)
assert cpu1 != cuda
with pytest.raises(xchainer.DeviceError):
xchainer.Device('a' * 8) # too long device name
def test_current_device():
device = xchainer.get_current_device()
xchainer.set_current_device('cpu')
assert str(xchainer.get_current_device()) == '<Device cpu>'
xchainer.set_current_device('cuda')
assert str(xchainer.get_current_device()) == '<Device cuda>'
with pytest.raises(xchainer.DeviceError):
xchainer.set_current_device('invalid_device')
xchainer.set_current_device(device)
| Add tests for python device | Add tests for python device
| Python | mit | okuta/chainer,keisuke-umezawa/chainer,chainer/chainer,ktnyt/chainer,ktnyt/chainer,ktnyt/chainer,wkentaro/chainer,hvy/chainer,jnishi/chainer,okuta/chainer,hvy/chainer,pfnet/chainer,niboshi/chainer,niboshi/chainer,wkentaro/chainer,hvy/chainer,tkerola/chainer,jnishi/chainer,niboshi/chainer,jnishi/chainer,keisuke-umezawa/chainer,jnishi/chainer,niboshi/chainer,keisuke-umezawa/chainer,ktnyt/chainer,hvy/chainer,okuta/chainer,wkentaro/chainer,keisuke-umezawa/chainer,chainer/chainer,chainer/chainer,okuta/chainer,wkentaro/chainer,chainer/chainer | import pytest
import xchainer
def test_device():
device = xchainer.get_current_device()
xchainer.set_current_device('cpu')
assert str(xchainer.get_current_device()) == '<Device cpu>'
xchainer.set_current_device('cuda')
assert str(xchainer.get_current_device()) == '<Device cuda>'
with pytest.raises(xchainer.DeviceError):
xchainer.set_current_device('invalid_device')
xchainer.set_current_device(device)
Add tests for python device | import pytest
import xchainer
def test_device():
cpu1 = xchainer.Device('cpu')
cpu2 = xchainer.Device('cpu')
cuda = xchainer.Device('cuda')
assert cpu1 == cpu2
assert not (cpu1 != cpu2)
assert not (cpu1 == cuda)
assert cpu1 != cuda
with pytest.raises(xchainer.DeviceError):
xchainer.Device('a' * 8) # too long device name
def test_current_device():
device = xchainer.get_current_device()
xchainer.set_current_device('cpu')
assert str(xchainer.get_current_device()) == '<Device cpu>'
xchainer.set_current_device('cuda')
assert str(xchainer.get_current_device()) == '<Device cuda>'
with pytest.raises(xchainer.DeviceError):
xchainer.set_current_device('invalid_device')
xchainer.set_current_device(device)
| <commit_before>import pytest
import xchainer
def test_device():
device = xchainer.get_current_device()
xchainer.set_current_device('cpu')
assert str(xchainer.get_current_device()) == '<Device cpu>'
xchainer.set_current_device('cuda')
assert str(xchainer.get_current_device()) == '<Device cuda>'
with pytest.raises(xchainer.DeviceError):
xchainer.set_current_device('invalid_device')
xchainer.set_current_device(device)
<commit_msg>Add tests for python device<commit_after> | import pytest
import xchainer
def test_device():
cpu1 = xchainer.Device('cpu')
cpu2 = xchainer.Device('cpu')
cuda = xchainer.Device('cuda')
assert cpu1 == cpu2
assert not (cpu1 != cpu2)
assert not (cpu1 == cuda)
assert cpu1 != cuda
with pytest.raises(xchainer.DeviceError):
xchainer.Device('a' * 8) # too long device name
def test_current_device():
device = xchainer.get_current_device()
xchainer.set_current_device('cpu')
assert str(xchainer.get_current_device()) == '<Device cpu>'
xchainer.set_current_device('cuda')
assert str(xchainer.get_current_device()) == '<Device cuda>'
with pytest.raises(xchainer.DeviceError):
xchainer.set_current_device('invalid_device')
xchainer.set_current_device(device)
| import pytest
import xchainer
def test_device():
device = xchainer.get_current_device()
xchainer.set_current_device('cpu')
assert str(xchainer.get_current_device()) == '<Device cpu>'
xchainer.set_current_device('cuda')
assert str(xchainer.get_current_device()) == '<Device cuda>'
with pytest.raises(xchainer.DeviceError):
xchainer.set_current_device('invalid_device')
xchainer.set_current_device(device)
Add tests for python deviceimport pytest
import xchainer
def test_device():
cpu1 = xchainer.Device('cpu')
cpu2 = xchainer.Device('cpu')
cuda = xchainer.Device('cuda')
assert cpu1 == cpu2
assert not (cpu1 != cpu2)
assert not (cpu1 == cuda)
assert cpu1 != cuda
with pytest.raises(xchainer.DeviceError):
xchainer.Device('a' * 8) # too long device name
def test_current_device():
device = xchainer.get_current_device()
xchainer.set_current_device('cpu')
assert str(xchainer.get_current_device()) == '<Device cpu>'
xchainer.set_current_device('cuda')
assert str(xchainer.get_current_device()) == '<Device cuda>'
with pytest.raises(xchainer.DeviceError):
xchainer.set_current_device('invalid_device')
xchainer.set_current_device(device)
| <commit_before>import pytest
import xchainer
def test_device():
device = xchainer.get_current_device()
xchainer.set_current_device('cpu')
assert str(xchainer.get_current_device()) == '<Device cpu>'
xchainer.set_current_device('cuda')
assert str(xchainer.get_current_device()) == '<Device cuda>'
with pytest.raises(xchainer.DeviceError):
xchainer.set_current_device('invalid_device')
xchainer.set_current_device(device)
<commit_msg>Add tests for python device<commit_after>import pytest
import xchainer
def test_device():
cpu1 = xchainer.Device('cpu')
cpu2 = xchainer.Device('cpu')
cuda = xchainer.Device('cuda')
assert cpu1 == cpu2
assert not (cpu1 != cpu2)
assert not (cpu1 == cuda)
assert cpu1 != cuda
with pytest.raises(xchainer.DeviceError):
xchainer.Device('a' * 8) # too long device name
def test_current_device():
device = xchainer.get_current_device()
xchainer.set_current_device('cpu')
assert str(xchainer.get_current_device()) == '<Device cpu>'
xchainer.set_current_device('cuda')
assert str(xchainer.get_current_device()) == '<Device cuda>'
with pytest.raises(xchainer.DeviceError):
xchainer.set_current_device('invalid_device')
xchainer.set_current_device(device)
|
6dea14c3b8d0d607fa5d9e4adbbd8d07d41cd272 | procurement_purchase_no_grouping/__manifest__.py | procurement_purchase_no_grouping/__manifest__.py | # Copyright 2015 AvanzOsc (http://www.avanzosc.es)
# Copyright 2015-2017 Tecnativa - Pedro M. Baeza
# Copyright 2018 Tecnativa - Carlos Dauden
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl)
{
"name": "Procurement Purchase No Grouping",
"version": "13.0.1.0.1",
"author": "AvanzOSC," "Tecnativa," "Odoo Community Association (OCA)",
"website": "https://github.com/OCA/purchase-workflow",
"category": "Procurements",
"depends": ["purchase_stock"],
"data": ["views/product_category_view.xml", "views/res_config_settings_views.xml"],
"installable": True,
"license": "AGPL-3",
}
| # Copyright 2015 AvanzOsc (http://www.avanzosc.es)
# Copyright 2015-2017 Tecnativa - Pedro M. Baeza
# Copyright 2018 Tecnativa - Carlos Dauden
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl)
{
"name": "Procurement Purchase No Grouping",
"version": "13.0.1.0.1",
"author": "AvanzOSC, Tecnativa, Odoo Community Association (OCA)",
"website": "https://github.com/OCA/purchase-workflow",
"category": "Procurements",
"depends": ["purchase_stock"],
"data": ["views/product_category_view.xml", "views/res_config_settings_views.xml"],
"installable": True,
"license": "AGPL-3",
}
| Delete empty " " spaces in same string line | [FIX] Delete empty " " spaces in same string line
| Python | agpl-3.0 | OCA/purchase-workflow,OCA/purchase-workflow | # Copyright 2015 AvanzOsc (http://www.avanzosc.es)
# Copyright 2015-2017 Tecnativa - Pedro M. Baeza
# Copyright 2018 Tecnativa - Carlos Dauden
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl)
{
"name": "Procurement Purchase No Grouping",
"version": "13.0.1.0.1",
"author": "AvanzOSC," "Tecnativa," "Odoo Community Association (OCA)",
"website": "https://github.com/OCA/purchase-workflow",
"category": "Procurements",
"depends": ["purchase_stock"],
"data": ["views/product_category_view.xml", "views/res_config_settings_views.xml"],
"installable": True,
"license": "AGPL-3",
}
[FIX] Delete empty " " spaces in same string line | # Copyright 2015 AvanzOsc (http://www.avanzosc.es)
# Copyright 2015-2017 Tecnativa - Pedro M. Baeza
# Copyright 2018 Tecnativa - Carlos Dauden
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl)
{
"name": "Procurement Purchase No Grouping",
"version": "13.0.1.0.1",
"author": "AvanzOSC, Tecnativa, Odoo Community Association (OCA)",
"website": "https://github.com/OCA/purchase-workflow",
"category": "Procurements",
"depends": ["purchase_stock"],
"data": ["views/product_category_view.xml", "views/res_config_settings_views.xml"],
"installable": True,
"license": "AGPL-3",
}
| <commit_before># Copyright 2015 AvanzOsc (http://www.avanzosc.es)
# Copyright 2015-2017 Tecnativa - Pedro M. Baeza
# Copyright 2018 Tecnativa - Carlos Dauden
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl)
{
"name": "Procurement Purchase No Grouping",
"version": "13.0.1.0.1",
"author": "AvanzOSC," "Tecnativa," "Odoo Community Association (OCA)",
"website": "https://github.com/OCA/purchase-workflow",
"category": "Procurements",
"depends": ["purchase_stock"],
"data": ["views/product_category_view.xml", "views/res_config_settings_views.xml"],
"installable": True,
"license": "AGPL-3",
}
<commit_msg>[FIX] Delete empty " " spaces in same string line<commit_after> | # Copyright 2015 AvanzOsc (http://www.avanzosc.es)
# Copyright 2015-2017 Tecnativa - Pedro M. Baeza
# Copyright 2018 Tecnativa - Carlos Dauden
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl)
{
"name": "Procurement Purchase No Grouping",
"version": "13.0.1.0.1",
"author": "AvanzOSC, Tecnativa, Odoo Community Association (OCA)",
"website": "https://github.com/OCA/purchase-workflow",
"category": "Procurements",
"depends": ["purchase_stock"],
"data": ["views/product_category_view.xml", "views/res_config_settings_views.xml"],
"installable": True,
"license": "AGPL-3",
}
| # Copyright 2015 AvanzOsc (http://www.avanzosc.es)
# Copyright 2015-2017 Tecnativa - Pedro M. Baeza
# Copyright 2018 Tecnativa - Carlos Dauden
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl)
{
"name": "Procurement Purchase No Grouping",
"version": "13.0.1.0.1",
"author": "AvanzOSC," "Tecnativa," "Odoo Community Association (OCA)",
"website": "https://github.com/OCA/purchase-workflow",
"category": "Procurements",
"depends": ["purchase_stock"],
"data": ["views/product_category_view.xml", "views/res_config_settings_views.xml"],
"installable": True,
"license": "AGPL-3",
}
[FIX] Delete empty " " spaces in same string line# Copyright 2015 AvanzOsc (http://www.avanzosc.es)
# Copyright 2015-2017 Tecnativa - Pedro M. Baeza
# Copyright 2018 Tecnativa - Carlos Dauden
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl)
{
"name": "Procurement Purchase No Grouping",
"version": "13.0.1.0.1",
"author": "AvanzOSC, Tecnativa, Odoo Community Association (OCA)",
"website": "https://github.com/OCA/purchase-workflow",
"category": "Procurements",
"depends": ["purchase_stock"],
"data": ["views/product_category_view.xml", "views/res_config_settings_views.xml"],
"installable": True,
"license": "AGPL-3",
}
| <commit_before># Copyright 2015 AvanzOsc (http://www.avanzosc.es)
# Copyright 2015-2017 Tecnativa - Pedro M. Baeza
# Copyright 2018 Tecnativa - Carlos Dauden
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl)
{
"name": "Procurement Purchase No Grouping",
"version": "13.0.1.0.1",
"author": "AvanzOSC," "Tecnativa," "Odoo Community Association (OCA)",
"website": "https://github.com/OCA/purchase-workflow",
"category": "Procurements",
"depends": ["purchase_stock"],
"data": ["views/product_category_view.xml", "views/res_config_settings_views.xml"],
"installable": True,
"license": "AGPL-3",
}
<commit_msg>[FIX] Delete empty " " spaces in same string line<commit_after># Copyright 2015 AvanzOsc (http://www.avanzosc.es)
# Copyright 2015-2017 Tecnativa - Pedro M. Baeza
# Copyright 2018 Tecnativa - Carlos Dauden
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl)
{
"name": "Procurement Purchase No Grouping",
"version": "13.0.1.0.1",
"author": "AvanzOSC, Tecnativa, Odoo Community Association (OCA)",
"website": "https://github.com/OCA/purchase-workflow",
"category": "Procurements",
"depends": ["purchase_stock"],
"data": ["views/product_category_view.xml", "views/res_config_settings_views.xml"],
"installable": True,
"license": "AGPL-3",
}
|
3c798673cfb5f7e63e2aebb300ba7cc92c72fa8a | aggregator/base.py | aggregator/base.py | from abc import ABCMeta, abstractmethod
import requests
from bs4 import BeautifulSoup
def make_soup(url):
response = requests.get(url)
soup = BeautifulSoup(response.text, 'html.parser')
return soup
class Aggregator(metaclass=ABCMeta):
base_url = ''
@abstractmethod
def extract(self):
pass
@abstractmethod
def generate_url(self):
pass
| import collections
from abc import ABCMeta, abstractmethod
import requests
from bs4 import BeautifulSoup
Article = collections.namedtuple('Article', ['source', 'title', 'url', 'author',
'date_published'])
def make_soup(url):
response = requests.get(url)
soup = BeautifulSoup(response.text, 'html.parser')
return soup
class Aggregator(metaclass=ABCMeta):
base_url = ''
source = ''
@abstractmethod
def extract(self):
pass
@abstractmethod
def generate_url(self):
pass
@abstractmethod
def crawl(self, *args, **kwargs):
pass
@abstractmethod
def get_author(self, *args, **kwargs):
pass
@abstractmethod
def get_date_published(self, *args, **kwargs):
pass
@abstractmethod
def get_title(self, *args, **kwargs):
pass
| Define new abstract methods and namedtuple | Define new abstract methods and namedtuple
| Python | apache-2.0 | footynews/fn_backend | from abc import ABCMeta, abstractmethod
import requests
from bs4 import BeautifulSoup
def make_soup(url):
response = requests.get(url)
soup = BeautifulSoup(response.text, 'html.parser')
return soup
class Aggregator(metaclass=ABCMeta):
base_url = ''
@abstractmethod
def extract(self):
pass
@abstractmethod
def generate_url(self):
pass
Define new abstract methods and namedtuple | import collections
from abc import ABCMeta, abstractmethod
import requests
from bs4 import BeautifulSoup
Article = collections.namedtuple('Article', ['source', 'title', 'url', 'author',
'date_published'])
def make_soup(url):
response = requests.get(url)
soup = BeautifulSoup(response.text, 'html.parser')
return soup
class Aggregator(metaclass=ABCMeta):
base_url = ''
source = ''
@abstractmethod
def extract(self):
pass
@abstractmethod
def generate_url(self):
pass
@abstractmethod
def crawl(self, *args, **kwargs):
pass
@abstractmethod
def get_author(self, *args, **kwargs):
pass
@abstractmethod
def get_date_published(self, *args, **kwargs):
pass
@abstractmethod
def get_title(self, *args, **kwargs):
pass
| <commit_before>from abc import ABCMeta, abstractmethod
import requests
from bs4 import BeautifulSoup
def make_soup(url):
response = requests.get(url)
soup = BeautifulSoup(response.text, 'html.parser')
return soup
class Aggregator(metaclass=ABCMeta):
base_url = ''
@abstractmethod
def extract(self):
pass
@abstractmethod
def generate_url(self):
pass
<commit_msg>Define new abstract methods and namedtuple<commit_after> | import collections
from abc import ABCMeta, abstractmethod
import requests
from bs4 import BeautifulSoup
Article = collections.namedtuple('Article', ['source', 'title', 'url', 'author',
'date_published'])
def make_soup(url):
response = requests.get(url)
soup = BeautifulSoup(response.text, 'html.parser')
return soup
class Aggregator(metaclass=ABCMeta):
base_url = ''
source = ''
@abstractmethod
def extract(self):
pass
@abstractmethod
def generate_url(self):
pass
@abstractmethod
def crawl(self, *args, **kwargs):
pass
@abstractmethod
def get_author(self, *args, **kwargs):
pass
@abstractmethod
def get_date_published(self, *args, **kwargs):
pass
@abstractmethod
def get_title(self, *args, **kwargs):
pass
| from abc import ABCMeta, abstractmethod
import requests
from bs4 import BeautifulSoup
def make_soup(url):
response = requests.get(url)
soup = BeautifulSoup(response.text, 'html.parser')
return soup
class Aggregator(metaclass=ABCMeta):
base_url = ''
@abstractmethod
def extract(self):
pass
@abstractmethod
def generate_url(self):
pass
Define new abstract methods and namedtupleimport collections
from abc import ABCMeta, abstractmethod
import requests
from bs4 import BeautifulSoup
Article = collections.namedtuple('Article', ['source', 'title', 'url', 'author',
'date_published'])
def make_soup(url):
response = requests.get(url)
soup = BeautifulSoup(response.text, 'html.parser')
return soup
class Aggregator(metaclass=ABCMeta):
base_url = ''
source = ''
@abstractmethod
def extract(self):
pass
@abstractmethod
def generate_url(self):
pass
@abstractmethod
def crawl(self, *args, **kwargs):
pass
@abstractmethod
def get_author(self, *args, **kwargs):
pass
@abstractmethod
def get_date_published(self, *args, **kwargs):
pass
@abstractmethod
def get_title(self, *args, **kwargs):
pass
| <commit_before>from abc import ABCMeta, abstractmethod
import requests
from bs4 import BeautifulSoup
def make_soup(url):
response = requests.get(url)
soup = BeautifulSoup(response.text, 'html.parser')
return soup
class Aggregator(metaclass=ABCMeta):
base_url = ''
@abstractmethod
def extract(self):
pass
@abstractmethod
def generate_url(self):
pass
<commit_msg>Define new abstract methods and namedtuple<commit_after>import collections
from abc import ABCMeta, abstractmethod
import requests
from bs4 import BeautifulSoup
Article = collections.namedtuple('Article', ['source', 'title', 'url', 'author',
'date_published'])
def make_soup(url):
response = requests.get(url)
soup = BeautifulSoup(response.text, 'html.parser')
return soup
class Aggregator(metaclass=ABCMeta):
base_url = ''
source = ''
@abstractmethod
def extract(self):
pass
@abstractmethod
def generate_url(self):
pass
@abstractmethod
def crawl(self, *args, **kwargs):
pass
@abstractmethod
def get_author(self, *args, **kwargs):
pass
@abstractmethod
def get_date_published(self, *args, **kwargs):
pass
@abstractmethod
def get_title(self, *args, **kwargs):
pass
|
9da3f2a835fa2aaba5d91ffe31b3fcaf8d83a4c9 | snake/main.py | snake/main.py | import os
import sys
from snake.core import Snake
SNAKEFILE_LOADED = False
def abort(msg):
print >> sys.stderr, "Error: %s" % msg
sys.exit(1)
def load_snakefile(path, fail_silently=False):
global SNAKEFILE_LOADED
if not SNAKEFILE_LOADED:
sys.path.insert(0, path)
try:
return __import__('snakefile')
except ImportError:
if not fail_silently:
abort("couldn't find any snakefile.")
else:
SNAKEFILE_LOADED = True
del sys.path[0]
def find_snakefile():
global SNAKEFILE_LOADED
path = os.getcwd()
while True:
filepath = os.path.join(path, 'snakefile.py')
if os.path.isfile(filepath):
return load_snakefile(path), filepath
if not os.path.split(path)[1]:
break
path = os.path.split(path)[0]
if not SNAKEFILE_LOADED:
abort("couldn't find any snakefile.")
def main():
snakefile, snakefilepath = find_snakefile()
for name in dir(snakefile):
attr = getattr(snakefile, name)
if isinstance(attr, Snake):
attr.run(snakefilepath)
break
else:
abort("couldn't find any Snake instance in snakefile.")
| import imp
import os
import sys
from snake.core import Snake
def abort(msg):
print >> sys.stderr, "Error: %s" % msg
sys.exit(1)
def get_ascending_paths(path):
paths = []
while True:
paths.append(path)
path, tail = os.path.split(path)
if not tail:
break
return paths
def find_snakefile():
paths = get_ascending_paths(os.getcwd())
try:
return imp.find_module('snakefile', paths)
except:
abort("couldn't find any snakefile.")
def get_snakefile():
return imp.load_module('snakefile', *find_snakefile())
def main():
snakefile = get_snakefile()
for name in dir(snakefile):
attr = getattr(snakefile, name)
if isinstance(attr, Snake):
attr.run(snakefile.__file__)
break
else:
abort("couldn't find any Snake instance in snakefile.")
| Improve the way snakefile loading works | Improve the way snakefile loading works
| Python | bsd-2-clause | yumike/snake | import os
import sys
from snake.core import Snake
SNAKEFILE_LOADED = False
def abort(msg):
print >> sys.stderr, "Error: %s" % msg
sys.exit(1)
def load_snakefile(path, fail_silently=False):
global SNAKEFILE_LOADED
if not SNAKEFILE_LOADED:
sys.path.insert(0, path)
try:
return __import__('snakefile')
except ImportError:
if not fail_silently:
abort("couldn't find any snakefile.")
else:
SNAKEFILE_LOADED = True
del sys.path[0]
def find_snakefile():
global SNAKEFILE_LOADED
path = os.getcwd()
while True:
filepath = os.path.join(path, 'snakefile.py')
if os.path.isfile(filepath):
return load_snakefile(path), filepath
if not os.path.split(path)[1]:
break
path = os.path.split(path)[0]
if not SNAKEFILE_LOADED:
abort("couldn't find any snakefile.")
def main():
snakefile, snakefilepath = find_snakefile()
for name in dir(snakefile):
attr = getattr(snakefile, name)
if isinstance(attr, Snake):
attr.run(snakefilepath)
break
else:
abort("couldn't find any Snake instance in snakefile.")
Improve the way snakefile loading works | import imp
import os
import sys
from snake.core import Snake
def abort(msg):
print >> sys.stderr, "Error: %s" % msg
sys.exit(1)
def get_ascending_paths(path):
paths = []
while True:
paths.append(path)
path, tail = os.path.split(path)
if not tail:
break
return paths
def find_snakefile():
paths = get_ascending_paths(os.getcwd())
try:
return imp.find_module('snakefile', paths)
except:
abort("couldn't find any snakefile.")
def get_snakefile():
return imp.load_module('snakefile', *find_snakefile())
def main():
snakefile = get_snakefile()
for name in dir(snakefile):
attr = getattr(snakefile, name)
if isinstance(attr, Snake):
attr.run(snakefile.__file__)
break
else:
abort("couldn't find any Snake instance in snakefile.")
| <commit_before>import os
import sys
from snake.core import Snake
SNAKEFILE_LOADED = False
def abort(msg):
print >> sys.stderr, "Error: %s" % msg
sys.exit(1)
def load_snakefile(path, fail_silently=False):
global SNAKEFILE_LOADED
if not SNAKEFILE_LOADED:
sys.path.insert(0, path)
try:
return __import__('snakefile')
except ImportError:
if not fail_silently:
abort("couldn't find any snakefile.")
else:
SNAKEFILE_LOADED = True
del sys.path[0]
def find_snakefile():
global SNAKEFILE_LOADED
path = os.getcwd()
while True:
filepath = os.path.join(path, 'snakefile.py')
if os.path.isfile(filepath):
return load_snakefile(path), filepath
if not os.path.split(path)[1]:
break
path = os.path.split(path)[0]
if not SNAKEFILE_LOADED:
abort("couldn't find any snakefile.")
def main():
snakefile, snakefilepath = find_snakefile()
for name in dir(snakefile):
attr = getattr(snakefile, name)
if isinstance(attr, Snake):
attr.run(snakefilepath)
break
else:
abort("couldn't find any Snake instance in snakefile.")
<commit_msg>Improve the way snakefile loading works<commit_after> | import imp
import os
import sys
from snake.core import Snake
def abort(msg):
print >> sys.stderr, "Error: %s" % msg
sys.exit(1)
def get_ascending_paths(path):
paths = []
while True:
paths.append(path)
path, tail = os.path.split(path)
if not tail:
break
return paths
def find_snakefile():
paths = get_ascending_paths(os.getcwd())
try:
return imp.find_module('snakefile', paths)
except:
abort("couldn't find any snakefile.")
def get_snakefile():
return imp.load_module('snakefile', *find_snakefile())
def main():
snakefile = get_snakefile()
for name in dir(snakefile):
attr = getattr(snakefile, name)
if isinstance(attr, Snake):
attr.run(snakefile.__file__)
break
else:
abort("couldn't find any Snake instance in snakefile.")
| import os
import sys
from snake.core import Snake
SNAKEFILE_LOADED = False
def abort(msg):
print >> sys.stderr, "Error: %s" % msg
sys.exit(1)
def load_snakefile(path, fail_silently=False):
global SNAKEFILE_LOADED
if not SNAKEFILE_LOADED:
sys.path.insert(0, path)
try:
return __import__('snakefile')
except ImportError:
if not fail_silently:
abort("couldn't find any snakefile.")
else:
SNAKEFILE_LOADED = True
del sys.path[0]
def find_snakefile():
global SNAKEFILE_LOADED
path = os.getcwd()
while True:
filepath = os.path.join(path, 'snakefile.py')
if os.path.isfile(filepath):
return load_snakefile(path), filepath
if not os.path.split(path)[1]:
break
path = os.path.split(path)[0]
if not SNAKEFILE_LOADED:
abort("couldn't find any snakefile.")
def main():
snakefile, snakefilepath = find_snakefile()
for name in dir(snakefile):
attr = getattr(snakefile, name)
if isinstance(attr, Snake):
attr.run(snakefilepath)
break
else:
abort("couldn't find any Snake instance in snakefile.")
Improve the way snakefile loading worksimport imp
import os
import sys
from snake.core import Snake
def abort(msg):
print >> sys.stderr, "Error: %s" % msg
sys.exit(1)
def get_ascending_paths(path):
paths = []
while True:
paths.append(path)
path, tail = os.path.split(path)
if not tail:
break
return paths
def find_snakefile():
paths = get_ascending_paths(os.getcwd())
try:
return imp.find_module('snakefile', paths)
except:
abort("couldn't find any snakefile.")
def get_snakefile():
return imp.load_module('snakefile', *find_snakefile())
def main():
snakefile = get_snakefile()
for name in dir(snakefile):
attr = getattr(snakefile, name)
if isinstance(attr, Snake):
attr.run(snakefile.__file__)
break
else:
abort("couldn't find any Snake instance in snakefile.")
| <commit_before>import os
import sys
from snake.core import Snake
SNAKEFILE_LOADED = False
def abort(msg):
print >> sys.stderr, "Error: %s" % msg
sys.exit(1)
def load_snakefile(path, fail_silently=False):
global SNAKEFILE_LOADED
if not SNAKEFILE_LOADED:
sys.path.insert(0, path)
try:
return __import__('snakefile')
except ImportError:
if not fail_silently:
abort("couldn't find any snakefile.")
else:
SNAKEFILE_LOADED = True
del sys.path[0]
def find_snakefile():
global SNAKEFILE_LOADED
path = os.getcwd()
while True:
filepath = os.path.join(path, 'snakefile.py')
if os.path.isfile(filepath):
return load_snakefile(path), filepath
if not os.path.split(path)[1]:
break
path = os.path.split(path)[0]
if not SNAKEFILE_LOADED:
abort("couldn't find any snakefile.")
def main():
snakefile, snakefilepath = find_snakefile()
for name in dir(snakefile):
attr = getattr(snakefile, name)
if isinstance(attr, Snake):
attr.run(snakefilepath)
break
else:
abort("couldn't find any Snake instance in snakefile.")
<commit_msg>Improve the way snakefile loading works<commit_after>import imp
import os
import sys
from snake.core import Snake
def abort(msg):
print >> sys.stderr, "Error: %s" % msg
sys.exit(1)
def get_ascending_paths(path):
paths = []
while True:
paths.append(path)
path, tail = os.path.split(path)
if not tail:
break
return paths
def find_snakefile():
paths = get_ascending_paths(os.getcwd())
try:
return imp.find_module('snakefile', paths)
except:
abort("couldn't find any snakefile.")
def get_snakefile():
return imp.load_module('snakefile', *find_snakefile())
def main():
snakefile = get_snakefile()
for name in dir(snakefile):
attr = getattr(snakefile, name)
if isinstance(attr, Snake):
attr.run(snakefile.__file__)
break
else:
abort("couldn't find any Snake instance in snakefile.")
|
b2239ab0329f129da21f3ab82eaf9543b95fc01b | pal/services/joke_service.py | pal/services/joke_service.py | import re
from pal.services.service import Service
from pal.services.service import wrap_response
class JokeService(Service):
_JOKES = {
'open the pod bay doors pal':
"I'm sorry, Jeff, I'm afraid I can't do that.",
'laws of robotics':
"1. A robot may not injure a human being or, through inaction, "
"allow a human being to come to harm.\n2. A robot must obey the "
"orders given it by human beings, except where such orders would "
"conflict with the First Law.\n3. A robot must protect its own "
"existence as long as such protection does not conflict with the "
"First or Second Law.",
'knock knock': "Who's there?",
'tom hanks': "As far as I'm concerned, Tom Hanks was in 1 movies.",
}
def applies_to_me(self, client, feature_request_type):
return True
def get_confidence(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return 9001
return 0
@wrap_response
def go(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return self._JOKES[joke]
return ('ERROR', 'Tom Hanks was in 1 movies.')
| import re
from pal.services.service import Service
from pal.services.service import wrap_response
class JokeService(Service):
_JOKES = {
'pod bay doors':
"I'm sorry Jeff, I'm afraid I can't do that.",
'laws of robotics':
"1. A robot may not injure a human being or, through inaction, "
"allow a human being to come to harm.\n2. A robot must obey the "
"orders given it by human beings, except where such orders would "
"conflict with the First Law.\n3. A robot must protect its own "
"existence as long as such protection does not conflict with the "
"First or Second Law.",
'knock knock': "Who's there?",
'tom hanks': "As far as I'm concerned, Tom Hanks was in 1 movies.",
"where's waldo": "He's right there, can't you see him?",
}
def applies_to_me(self, client, feature_request_type):
return True
def get_confidence(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return 9001
return 0
@wrap_response
def go(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return self._JOKES[joke]
return ('ERROR', 'Tom Hanks was in 1 movies.')
| Add Waldo joke and simplify HAL joke | Add Waldo joke and simplify HAL joke
| Python | bsd-3-clause | Machyne/pal,Machyne/pal,Machyne/pal,Machyne/pal | import re
from pal.services.service import Service
from pal.services.service import wrap_response
class JokeService(Service):
_JOKES = {
'open the pod bay doors pal':
"I'm sorry, Jeff, I'm afraid I can't do that.",
'laws of robotics':
"1. A robot may not injure a human being or, through inaction, "
"allow a human being to come to harm.\n2. A robot must obey the "
"orders given it by human beings, except where such orders would "
"conflict with the First Law.\n3. A robot must protect its own "
"existence as long as such protection does not conflict with the "
"First or Second Law.",
'knock knock': "Who's there?",
'tom hanks': "As far as I'm concerned, Tom Hanks was in 1 movies.",
}
def applies_to_me(self, client, feature_request_type):
return True
def get_confidence(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return 9001
return 0
@wrap_response
def go(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return self._JOKES[joke]
return ('ERROR', 'Tom Hanks was in 1 movies.')
Add Waldo joke and simplify HAL joke | import re
from pal.services.service import Service
from pal.services.service import wrap_response
class JokeService(Service):
_JOKES = {
'pod bay doors':
"I'm sorry Jeff, I'm afraid I can't do that.",
'laws of robotics':
"1. A robot may not injure a human being or, through inaction, "
"allow a human being to come to harm.\n2. A robot must obey the "
"orders given it by human beings, except where such orders would "
"conflict with the First Law.\n3. A robot must protect its own "
"existence as long as such protection does not conflict with the "
"First or Second Law.",
'knock knock': "Who's there?",
'tom hanks': "As far as I'm concerned, Tom Hanks was in 1 movies.",
"where's waldo": "He's right there, can't you see him?",
}
def applies_to_me(self, client, feature_request_type):
return True
def get_confidence(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return 9001
return 0
@wrap_response
def go(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return self._JOKES[joke]
return ('ERROR', 'Tom Hanks was in 1 movies.')
| <commit_before>import re
from pal.services.service import Service
from pal.services.service import wrap_response
class JokeService(Service):
_JOKES = {
'open the pod bay doors pal':
"I'm sorry, Jeff, I'm afraid I can't do that.",
'laws of robotics':
"1. A robot may not injure a human being or, through inaction, "
"allow a human being to come to harm.\n2. A robot must obey the "
"orders given it by human beings, except where such orders would "
"conflict with the First Law.\n3. A robot must protect its own "
"existence as long as such protection does not conflict with the "
"First or Second Law.",
'knock knock': "Who's there?",
'tom hanks': "As far as I'm concerned, Tom Hanks was in 1 movies.",
}
def applies_to_me(self, client, feature_request_type):
return True
def get_confidence(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return 9001
return 0
@wrap_response
def go(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return self._JOKES[joke]
return ('ERROR', 'Tom Hanks was in 1 movies.')
<commit_msg>Add Waldo joke and simplify HAL joke<commit_after> | import re
from pal.services.service import Service
from pal.services.service import wrap_response
class JokeService(Service):
_JOKES = {
'pod bay doors':
"I'm sorry Jeff, I'm afraid I can't do that.",
'laws of robotics':
"1. A robot may not injure a human being or, through inaction, "
"allow a human being to come to harm.\n2. A robot must obey the "
"orders given it by human beings, except where such orders would "
"conflict with the First Law.\n3. A robot must protect its own "
"existence as long as such protection does not conflict with the "
"First or Second Law.",
'knock knock': "Who's there?",
'tom hanks': "As far as I'm concerned, Tom Hanks was in 1 movies.",
"where's waldo": "He's right there, can't you see him?",
}
def applies_to_me(self, client, feature_request_type):
return True
def get_confidence(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return 9001
return 0
@wrap_response
def go(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return self._JOKES[joke]
return ('ERROR', 'Tom Hanks was in 1 movies.')
| import re
from pal.services.service import Service
from pal.services.service import wrap_response
class JokeService(Service):
_JOKES = {
'open the pod bay doors pal':
"I'm sorry, Jeff, I'm afraid I can't do that.",
'laws of robotics':
"1. A robot may not injure a human being or, through inaction, "
"allow a human being to come to harm.\n2. A robot must obey the "
"orders given it by human beings, except where such orders would "
"conflict with the First Law.\n3. A robot must protect its own "
"existence as long as such protection does not conflict with the "
"First or Second Law.",
'knock knock': "Who's there?",
'tom hanks': "As far as I'm concerned, Tom Hanks was in 1 movies.",
}
def applies_to_me(self, client, feature_request_type):
return True
def get_confidence(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return 9001
return 0
@wrap_response
def go(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return self._JOKES[joke]
return ('ERROR', 'Tom Hanks was in 1 movies.')
Add Waldo joke and simplify HAL jokeimport re
from pal.services.service import Service
from pal.services.service import wrap_response
class JokeService(Service):
_JOKES = {
'pod bay doors':
"I'm sorry Jeff, I'm afraid I can't do that.",
'laws of robotics':
"1. A robot may not injure a human being or, through inaction, "
"allow a human being to come to harm.\n2. A robot must obey the "
"orders given it by human beings, except where such orders would "
"conflict with the First Law.\n3. A robot must protect its own "
"existence as long as such protection does not conflict with the "
"First or Second Law.",
'knock knock': "Who's there?",
'tom hanks': "As far as I'm concerned, Tom Hanks was in 1 movies.",
"where's waldo": "He's right there, can't you see him?",
}
def applies_to_me(self, client, feature_request_type):
return True
def get_confidence(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return 9001
return 0
@wrap_response
def go(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return self._JOKES[joke]
return ('ERROR', 'Tom Hanks was in 1 movies.')
| <commit_before>import re
from pal.services.service import Service
from pal.services.service import wrap_response
class JokeService(Service):
_JOKES = {
'open the pod bay doors pal':
"I'm sorry, Jeff, I'm afraid I can't do that.",
'laws of robotics':
"1. A robot may not injure a human being or, through inaction, "
"allow a human being to come to harm.\n2. A robot must obey the "
"orders given it by human beings, except where such orders would "
"conflict with the First Law.\n3. A robot must protect its own "
"existence as long as such protection does not conflict with the "
"First or Second Law.",
'knock knock': "Who's there?",
'tom hanks': "As far as I'm concerned, Tom Hanks was in 1 movies.",
}
def applies_to_me(self, client, feature_request_type):
return True
def get_confidence(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return 9001
return 0
@wrap_response
def go(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return self._JOKES[joke]
return ('ERROR', 'Tom Hanks was in 1 movies.')
<commit_msg>Add Waldo joke and simplify HAL joke<commit_after>import re
from pal.services.service import Service
from pal.services.service import wrap_response
class JokeService(Service):
_JOKES = {
'pod bay doors':
"I'm sorry Jeff, I'm afraid I can't do that.",
'laws of robotics':
"1. A robot may not injure a human being or, through inaction, "
"allow a human being to come to harm.\n2. A robot must obey the "
"orders given it by human beings, except where such orders would "
"conflict with the First Law.\n3. A robot must protect its own "
"existence as long as such protection does not conflict with the "
"First or Second Law.",
'knock knock': "Who's there?",
'tom hanks': "As far as I'm concerned, Tom Hanks was in 1 movies.",
"where's waldo": "He's right there, can't you see him?",
}
def applies_to_me(self, client, feature_request_type):
return True
def get_confidence(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return 9001
return 0
@wrap_response
def go(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return self._JOKES[joke]
return ('ERROR', 'Tom Hanks was in 1 movies.')
|
c046d7915c08221e4a84a01edf3ca08a27a931a8 | opps/api/urls.py | opps/api/urls.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, url
from piston.resource import Resource
from opps.containers.api import ContainerHandler, ContainerBoxHandler
container = Resource(handler=ContainerHandler)
containerbox = Resource(handler=ContainerBoxHandler)
urlpatterns = patterns(
'',
url(r'^container/$', container),
url(r'^containerbox/$', containerbox),
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, url
from piston.resource import Resource
from opps.containers.api import ContainerHandler, ContainerBoxHandler
container = Resource(handler=ContainerHandler)
containerbox = Resource(handler=ContainerBoxHandler)
urlpatterns = patterns(
'',
url(r'^container/$', container, {'emitter_format': 'json'}),
url(r'^containerbox/$', containerbox, {'emitter_format': 'json'}),
)
| Set emitter format json in api | Set emitter format json in api
| Python | mit | williamroot/opps,jeanmask/opps,williamroot/opps,opps/opps,jeanmask/opps,williamroot/opps,williamroot/opps,opps/opps,YACOWS/opps,YACOWS/opps,jeanmask/opps,opps/opps,YACOWS/opps,YACOWS/opps,jeanmask/opps,opps/opps | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, url
from piston.resource import Resource
from opps.containers.api import ContainerHandler, ContainerBoxHandler
container = Resource(handler=ContainerHandler)
containerbox = Resource(handler=ContainerBoxHandler)
urlpatterns = patterns(
'',
url(r'^container/$', container),
url(r'^containerbox/$', containerbox),
)
Set emitter format json in api | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, url
from piston.resource import Resource
from opps.containers.api import ContainerHandler, ContainerBoxHandler
container = Resource(handler=ContainerHandler)
containerbox = Resource(handler=ContainerBoxHandler)
urlpatterns = patterns(
'',
url(r'^container/$', container, {'emitter_format': 'json'}),
url(r'^containerbox/$', containerbox, {'emitter_format': 'json'}),
)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, url
from piston.resource import Resource
from opps.containers.api import ContainerHandler, ContainerBoxHandler
container = Resource(handler=ContainerHandler)
containerbox = Resource(handler=ContainerBoxHandler)
urlpatterns = patterns(
'',
url(r'^container/$', container),
url(r'^containerbox/$', containerbox),
)
<commit_msg>Set emitter format json in api<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, url
from piston.resource import Resource
from opps.containers.api import ContainerHandler, ContainerBoxHandler
container = Resource(handler=ContainerHandler)
containerbox = Resource(handler=ContainerBoxHandler)
urlpatterns = patterns(
'',
url(r'^container/$', container, {'emitter_format': 'json'}),
url(r'^containerbox/$', containerbox, {'emitter_format': 'json'}),
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, url
from piston.resource import Resource
from opps.containers.api import ContainerHandler, ContainerBoxHandler
container = Resource(handler=ContainerHandler)
containerbox = Resource(handler=ContainerBoxHandler)
urlpatterns = patterns(
'',
url(r'^container/$', container),
url(r'^containerbox/$', containerbox),
)
Set emitter format json in api#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, url
from piston.resource import Resource
from opps.containers.api import ContainerHandler, ContainerBoxHandler
container = Resource(handler=ContainerHandler)
containerbox = Resource(handler=ContainerBoxHandler)
urlpatterns = patterns(
'',
url(r'^container/$', container, {'emitter_format': 'json'}),
url(r'^containerbox/$', containerbox, {'emitter_format': 'json'}),
)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, url
from piston.resource import Resource
from opps.containers.api import ContainerHandler, ContainerBoxHandler
container = Resource(handler=ContainerHandler)
containerbox = Resource(handler=ContainerBoxHandler)
urlpatterns = patterns(
'',
url(r'^container/$', container),
url(r'^containerbox/$', containerbox),
)
<commit_msg>Set emitter format json in api<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, url
from piston.resource import Resource
from opps.containers.api import ContainerHandler, ContainerBoxHandler
container = Resource(handler=ContainerHandler)
containerbox = Resource(handler=ContainerBoxHandler)
urlpatterns = patterns(
'',
url(r'^container/$', container, {'emitter_format': 'json'}),
url(r'^containerbox/$', containerbox, {'emitter_format': 'json'}),
)
|
a18339c0d5ed9bbfcfc7b763d31a18c117a38069 | bed/record.py | bed/record.py | from __future__ import division
import time
import serial
import csv
from datetime import datetime as dt
DEBUG = True
def timestamp():
ts = dt.now()
return time.mktime(ts.timetuple())+(ts.microsecond/1e6)
bufsize = 1 if DEBUG else -1
with open('rec.csv', 'wa', bufsize) as f:
writer = csv.writer(f)
with serial.Serial('/dev/ttyACM0', 57600, timeout=1.0) as ser:
# Empty whatever is in the buffer
ser.flushInput()
while ser.inWaiting():
ser.read()
while True:
values = []
for _ in range(50):
line = ser.readline().strip()
try:
values.append(int(line))
except ValueError:
print str(dt.now()), 'NaN:', line
print timestamp(), min(values), max(values)
writer.writerow([timestamp(), min(values), max(values)])
| from __future__ import division
import time
import serial
import csv
from datetime import datetime as dt
DEBUG = True
def timestamp():
ts = dt.now()
return time.mktime(ts.timetuple())+(ts.microsecond/1e6)
bufsize = 1 if DEBUG else -1
with open('db/rec.csv', 'wa', bufsize) as f:
writer = csv.writer(f)
with serial.Serial('/dev/ttyACM0', 57600, timeout=1.0) as ser:
# Empty whatever is in the buffer
ser.flushInput()
while ser.inWaiting():
ser.read()
while True:
values = []
for _ in range(50):
line = ser.readline().strip()
try:
values.append(int(line))
except ValueError:
print str(dt.now()), 'NaN:', line
print timestamp(), min(values), max(values)
writer.writerow([timestamp(), min(values), max(values)])
| Fix path to data file | Fix path to data file
| Python | mit | wonkoderverstaendige/telemetry,wonkoderverstaendige/telemetry,wonkoderverstaendige/telemetry | from __future__ import division
import time
import serial
import csv
from datetime import datetime as dt
DEBUG = True
def timestamp():
ts = dt.now()
return time.mktime(ts.timetuple())+(ts.microsecond/1e6)
bufsize = 1 if DEBUG else -1
with open('rec.csv', 'wa', bufsize) as f:
writer = csv.writer(f)
with serial.Serial('/dev/ttyACM0', 57600, timeout=1.0) as ser:
# Empty whatever is in the buffer
ser.flushInput()
while ser.inWaiting():
ser.read()
while True:
values = []
for _ in range(50):
line = ser.readline().strip()
try:
values.append(int(line))
except ValueError:
print str(dt.now()), 'NaN:', line
print timestamp(), min(values), max(values)
writer.writerow([timestamp(), min(values), max(values)])
Fix path to data file | from __future__ import division
import time
import serial
import csv
from datetime import datetime as dt
DEBUG = True
def timestamp():
ts = dt.now()
return time.mktime(ts.timetuple())+(ts.microsecond/1e6)
bufsize = 1 if DEBUG else -1
with open('db/rec.csv', 'wa', bufsize) as f:
writer = csv.writer(f)
with serial.Serial('/dev/ttyACM0', 57600, timeout=1.0) as ser:
# Empty whatever is in the buffer
ser.flushInput()
while ser.inWaiting():
ser.read()
while True:
values = []
for _ in range(50):
line = ser.readline().strip()
try:
values.append(int(line))
except ValueError:
print str(dt.now()), 'NaN:', line
print timestamp(), min(values), max(values)
writer.writerow([timestamp(), min(values), max(values)])
| <commit_before>from __future__ import division
import time
import serial
import csv
from datetime import datetime as dt
DEBUG = True
def timestamp():
ts = dt.now()
return time.mktime(ts.timetuple())+(ts.microsecond/1e6)
bufsize = 1 if DEBUG else -1
with open('rec.csv', 'wa', bufsize) as f:
writer = csv.writer(f)
with serial.Serial('/dev/ttyACM0', 57600, timeout=1.0) as ser:
# Empty whatever is in the buffer
ser.flushInput()
while ser.inWaiting():
ser.read()
while True:
values = []
for _ in range(50):
line = ser.readline().strip()
try:
values.append(int(line))
except ValueError:
print str(dt.now()), 'NaN:', line
print timestamp(), min(values), max(values)
writer.writerow([timestamp(), min(values), max(values)])
<commit_msg>Fix path to data file<commit_after> | from __future__ import division
import time
import serial
import csv
from datetime import datetime as dt
DEBUG = True
def timestamp():
ts = dt.now()
return time.mktime(ts.timetuple())+(ts.microsecond/1e6)
bufsize = 1 if DEBUG else -1
with open('db/rec.csv', 'wa', bufsize) as f:
writer = csv.writer(f)
with serial.Serial('/dev/ttyACM0', 57600, timeout=1.0) as ser:
# Empty whatever is in the buffer
ser.flushInput()
while ser.inWaiting():
ser.read()
while True:
values = []
for _ in range(50):
line = ser.readline().strip()
try:
values.append(int(line))
except ValueError:
print str(dt.now()), 'NaN:', line
print timestamp(), min(values), max(values)
writer.writerow([timestamp(), min(values), max(values)])
| from __future__ import division
import time
import serial
import csv
from datetime import datetime as dt
DEBUG = True
def timestamp():
ts = dt.now()
return time.mktime(ts.timetuple())+(ts.microsecond/1e6)
bufsize = 1 if DEBUG else -1
with open('rec.csv', 'wa', bufsize) as f:
writer = csv.writer(f)
with serial.Serial('/dev/ttyACM0', 57600, timeout=1.0) as ser:
# Empty whatever is in the buffer
ser.flushInput()
while ser.inWaiting():
ser.read()
while True:
values = []
for _ in range(50):
line = ser.readline().strip()
try:
values.append(int(line))
except ValueError:
print str(dt.now()), 'NaN:', line
print timestamp(), min(values), max(values)
writer.writerow([timestamp(), min(values), max(values)])
Fix path to data filefrom __future__ import division
import time
import serial
import csv
from datetime import datetime as dt
DEBUG = True
def timestamp():
ts = dt.now()
return time.mktime(ts.timetuple())+(ts.microsecond/1e6)
bufsize = 1 if DEBUG else -1
with open('db/rec.csv', 'wa', bufsize) as f:
writer = csv.writer(f)
with serial.Serial('/dev/ttyACM0', 57600, timeout=1.0) as ser:
# Empty whatever is in the buffer
ser.flushInput()
while ser.inWaiting():
ser.read()
while True:
values = []
for _ in range(50):
line = ser.readline().strip()
try:
values.append(int(line))
except ValueError:
print str(dt.now()), 'NaN:', line
print timestamp(), min(values), max(values)
writer.writerow([timestamp(), min(values), max(values)])
| <commit_before>from __future__ import division
import time
import serial
import csv
from datetime import datetime as dt
DEBUG = True
def timestamp():
ts = dt.now()
return time.mktime(ts.timetuple())+(ts.microsecond/1e6)
bufsize = 1 if DEBUG else -1
with open('rec.csv', 'wa', bufsize) as f:
writer = csv.writer(f)
with serial.Serial('/dev/ttyACM0', 57600, timeout=1.0) as ser:
# Empty whatever is in the buffer
ser.flushInput()
while ser.inWaiting():
ser.read()
while True:
values = []
for _ in range(50):
line = ser.readline().strip()
try:
values.append(int(line))
except ValueError:
print str(dt.now()), 'NaN:', line
print timestamp(), min(values), max(values)
writer.writerow([timestamp(), min(values), max(values)])
<commit_msg>Fix path to data file<commit_after>from __future__ import division
import time
import serial
import csv
from datetime import datetime as dt
DEBUG = True
def timestamp():
ts = dt.now()
return time.mktime(ts.timetuple())+(ts.microsecond/1e6)
bufsize = 1 if DEBUG else -1
with open('db/rec.csv', 'wa', bufsize) as f:
writer = csv.writer(f)
with serial.Serial('/dev/ttyACM0', 57600, timeout=1.0) as ser:
# Empty whatever is in the buffer
ser.flushInput()
while ser.inWaiting():
ser.read()
while True:
values = []
for _ in range(50):
line = ser.readline().strip()
try:
values.append(int(line))
except ValueError:
print str(dt.now()), 'NaN:', line
print timestamp(), min(values), max(values)
writer.writerow([timestamp(), min(values), max(values)])
|
5b2a63706d2f9d2853ba1f6ad8d1cf80f8c07676 | tohu/__init__.py | tohu/__init__.py | from .v4.base import *
from .v4.primitive_generators import *
from .v4.derived_generators import *
from .v4.dispatch_generators import *
from .v4.custom_generator import CustomGenerator
from .v4.logging import logger
from .v4.utils import print_generated_sequence
from .v4 import base
from .v4 import primitive_generators
from .v4 import derived_generators
from .v4 import dispatch_generators
from .v4 import custom_generator
from .v4 import set_special_methods
__all__ = base.__all__ \
+ primitive_generators.__all__ \
+ derived_generators.__all__ \
+ dispatch_generators.__all__ \
+ custom_generator.__all__ \
+ ['tohu_logger', 'print_generated_sequence']
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
tohu_logger = logger # alias | from distutils.version import StrictVersion
from platform import python_version
min_supported_python_version = '3.6'
if StrictVersion(python_version()) < StrictVersion(min_supported_python_version):
error_msg = (
"Tohu requires Python {min_supported_python_version} or greater to run "
"(currently running under Python {python_version()})"
)
raise RuntimeError(error_msg)
from .v4.base import *
from .v4.primitive_generators import *
from .v4.derived_generators import *
from .v4.dispatch_generators import *
from .v4.custom_generator import CustomGenerator
from .v4.logging import logger
from .v4.utils import print_generated_sequence
from .v4 import base
from .v4 import primitive_generators
from .v4 import derived_generators
from .v4 import dispatch_generators
from .v4 import custom_generator
from .v4 import set_special_methods
__all__ = base.__all__ \
+ primitive_generators.__all__ \
+ derived_generators.__all__ \
+ dispatch_generators.__all__ \
+ custom_generator.__all__ \
+ ['tohu_logger', 'print_generated_sequence']
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
tohu_logger = logger # alias | Check Python version at startup | Check Python version at startup
| Python | mit | maxalbert/tohu | from .v4.base import *
from .v4.primitive_generators import *
from .v4.derived_generators import *
from .v4.dispatch_generators import *
from .v4.custom_generator import CustomGenerator
from .v4.logging import logger
from .v4.utils import print_generated_sequence
from .v4 import base
from .v4 import primitive_generators
from .v4 import derived_generators
from .v4 import dispatch_generators
from .v4 import custom_generator
from .v4 import set_special_methods
__all__ = base.__all__ \
+ primitive_generators.__all__ \
+ derived_generators.__all__ \
+ dispatch_generators.__all__ \
+ custom_generator.__all__ \
+ ['tohu_logger', 'print_generated_sequence']
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
tohu_logger = logger # aliasCheck Python version at startup | from distutils.version import StrictVersion
from platform import python_version
min_supported_python_version = '3.6'
if StrictVersion(python_version()) < StrictVersion(min_supported_python_version):
error_msg = (
"Tohu requires Python {min_supported_python_version} or greater to run "
"(currently running under Python {python_version()})"
)
raise RuntimeError(error_msg)
from .v4.base import *
from .v4.primitive_generators import *
from .v4.derived_generators import *
from .v4.dispatch_generators import *
from .v4.custom_generator import CustomGenerator
from .v4.logging import logger
from .v4.utils import print_generated_sequence
from .v4 import base
from .v4 import primitive_generators
from .v4 import derived_generators
from .v4 import dispatch_generators
from .v4 import custom_generator
from .v4 import set_special_methods
__all__ = base.__all__ \
+ primitive_generators.__all__ \
+ derived_generators.__all__ \
+ dispatch_generators.__all__ \
+ custom_generator.__all__ \
+ ['tohu_logger', 'print_generated_sequence']
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
tohu_logger = logger # alias | <commit_before>from .v4.base import *
from .v4.primitive_generators import *
from .v4.derived_generators import *
from .v4.dispatch_generators import *
from .v4.custom_generator import CustomGenerator
from .v4.logging import logger
from .v4.utils import print_generated_sequence
from .v4 import base
from .v4 import primitive_generators
from .v4 import derived_generators
from .v4 import dispatch_generators
from .v4 import custom_generator
from .v4 import set_special_methods
__all__ = base.__all__ \
+ primitive_generators.__all__ \
+ derived_generators.__all__ \
+ dispatch_generators.__all__ \
+ custom_generator.__all__ \
+ ['tohu_logger', 'print_generated_sequence']
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
tohu_logger = logger # alias<commit_msg>Check Python version at startup<commit_after> | from distutils.version import StrictVersion
from platform import python_version
min_supported_python_version = '3.6'
if StrictVersion(python_version()) < StrictVersion(min_supported_python_version):
error_msg = (
"Tohu requires Python {min_supported_python_version} or greater to run "
"(currently running under Python {python_version()})"
)
raise RuntimeError(error_msg)
from .v4.base import *
from .v4.primitive_generators import *
from .v4.derived_generators import *
from .v4.dispatch_generators import *
from .v4.custom_generator import CustomGenerator
from .v4.logging import logger
from .v4.utils import print_generated_sequence
from .v4 import base
from .v4 import primitive_generators
from .v4 import derived_generators
from .v4 import dispatch_generators
from .v4 import custom_generator
from .v4 import set_special_methods
__all__ = base.__all__ \
+ primitive_generators.__all__ \
+ derived_generators.__all__ \
+ dispatch_generators.__all__ \
+ custom_generator.__all__ \
+ ['tohu_logger', 'print_generated_sequence']
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
tohu_logger = logger # alias | from .v4.base import *
from .v4.primitive_generators import *
from .v4.derived_generators import *
from .v4.dispatch_generators import *
from .v4.custom_generator import CustomGenerator
from .v4.logging import logger
from .v4.utils import print_generated_sequence
from .v4 import base
from .v4 import primitive_generators
from .v4 import derived_generators
from .v4 import dispatch_generators
from .v4 import custom_generator
from .v4 import set_special_methods
__all__ = base.__all__ \
+ primitive_generators.__all__ \
+ derived_generators.__all__ \
+ dispatch_generators.__all__ \
+ custom_generator.__all__ \
+ ['tohu_logger', 'print_generated_sequence']
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
tohu_logger = logger # aliasCheck Python version at startupfrom distutils.version import StrictVersion
from platform import python_version
min_supported_python_version = '3.6'
if StrictVersion(python_version()) < StrictVersion(min_supported_python_version):
error_msg = (
"Tohu requires Python {min_supported_python_version} or greater to run "
"(currently running under Python {python_version()})"
)
raise RuntimeError(error_msg)
from .v4.base import *
from .v4.primitive_generators import *
from .v4.derived_generators import *
from .v4.dispatch_generators import *
from .v4.custom_generator import CustomGenerator
from .v4.logging import logger
from .v4.utils import print_generated_sequence
from .v4 import base
from .v4 import primitive_generators
from .v4 import derived_generators
from .v4 import dispatch_generators
from .v4 import custom_generator
from .v4 import set_special_methods
__all__ = base.__all__ \
+ primitive_generators.__all__ \
+ derived_generators.__all__ \
+ dispatch_generators.__all__ \
+ custom_generator.__all__ \
+ ['tohu_logger', 'print_generated_sequence']
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
tohu_logger = logger # alias | <commit_before>from .v4.base import *
from .v4.primitive_generators import *
from .v4.derived_generators import *
from .v4.dispatch_generators import *
from .v4.custom_generator import CustomGenerator
from .v4.logging import logger
from .v4.utils import print_generated_sequence
from .v4 import base
from .v4 import primitive_generators
from .v4 import derived_generators
from .v4 import dispatch_generators
from .v4 import custom_generator
from .v4 import set_special_methods
__all__ = base.__all__ \
+ primitive_generators.__all__ \
+ derived_generators.__all__ \
+ dispatch_generators.__all__ \
+ custom_generator.__all__ \
+ ['tohu_logger', 'print_generated_sequence']
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
tohu_logger = logger # alias<commit_msg>Check Python version at startup<commit_after>from distutils.version import StrictVersion
from platform import python_version
min_supported_python_version = '3.6'
if StrictVersion(python_version()) < StrictVersion(min_supported_python_version):
error_msg = (
"Tohu requires Python {min_supported_python_version} or greater to run "
"(currently running under Python {python_version()})"
)
raise RuntimeError(error_msg)
from .v4.base import *
from .v4.primitive_generators import *
from .v4.derived_generators import *
from .v4.dispatch_generators import *
from .v4.custom_generator import CustomGenerator
from .v4.logging import logger
from .v4.utils import print_generated_sequence
from .v4 import base
from .v4 import primitive_generators
from .v4 import derived_generators
from .v4 import dispatch_generators
from .v4 import custom_generator
from .v4 import set_special_methods
__all__ = base.__all__ \
+ primitive_generators.__all__ \
+ derived_generators.__all__ \
+ dispatch_generators.__all__ \
+ custom_generator.__all__ \
+ ['tohu_logger', 'print_generated_sequence']
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
tohu_logger = logger # alias |
3bf41213abc7ddd8421e11c2149b536c255c13eb | pixpack/utils.py | pixpack/utils.py | #!/usr/bin/env python3
# utility.py
# PixPack Photo Organiser
# It contains some useful functions to increase user experience
import locale
import os
def sys_trans_var():
# check system language
sys_loc = locale.getlocale()
sys_lang = sys_loc[0] # system default language
if sys_lang == 'en_EN' or sys_lang == 'en_GB':
return 0
elif sys_lang == 'tr_TR':
return 1
else:
return 0
def name_existing_photos(dest_directory, dest_file, copy_suffix):
# rename if the file is existed already, for instance: photo_1
dest_file_path = os.path.join(dest_directory, dest_file)
i=1
while os.path.exists(dest_file_path):
dest_file_path = os.path.join(dest_directory, dest_file)
file_name = os.path.basename(dest_file_path)
name, ext = os.path.splitext(file_name)
name = name + "_" + str(copy_suffix) + str(i)
new_name = name + ext
dest_file_path = os.path.join(dest_directory, new_name)
i+=1
return dest_file_path
| #!/usr/bin/env python3
# utility.py
# PixPack Photo Organiser
# It contains some useful functions to increase user experience
import locale
import os
def sys_trans_var():
# check system language
sys_loc = locale.getlocale()
sys_lang = sys_loc[0] # system default language
if sys_lang == 'en_EN' or sys_lang == 'en_GB':
return 0
elif sys_lang == 'tr_TR':
return 1
else:
return 0
def name_existing_photos(dest_directory, dest_file, copy_suffix):
# rename if the file is existed already, for instance: photo_1
dest_file_path = os.path.join(dest_directory, dest_file)
i=1
if os.path.exists(dest_file_path):
dest_directory = os.path.join(dest_directory, "copies")
if not os.path.exists(dest_directory):
os.makedirs(dest_directory)
while os.path.exists(dest_file_path):
dest_file_path = os.path.join(dest_directory, dest_file)
file_name = os.path.basename(dest_file_path)
name, ext = os.path.splitext(file_name)
name = name + "_" + str(copy_suffix) + str(i)
new_name = name + ext
dest_file_path = os.path.join(dest_directory, new_name)
i+=1
return dest_file_path
| Store the duplicated items separately in related folder | Store the duplicated items separately in related folder
| Python | mit | OrhanOdabasi/PixPack,OrhanOdabasi/PixPack | #!/usr/bin/env python3
# utility.py
# PixPack Photo Organiser
# It contains some useful functions to increase user experience
import locale
import os
def sys_trans_var():
# check system language
sys_loc = locale.getlocale()
sys_lang = sys_loc[0] # system default language
if sys_lang == 'en_EN' or sys_lang == 'en_GB':
return 0
elif sys_lang == 'tr_TR':
return 1
else:
return 0
def name_existing_photos(dest_directory, dest_file, copy_suffix):
# rename if the file is existed already, for instance: photo_1
dest_file_path = os.path.join(dest_directory, dest_file)
i=1
while os.path.exists(dest_file_path):
dest_file_path = os.path.join(dest_directory, dest_file)
file_name = os.path.basename(dest_file_path)
name, ext = os.path.splitext(file_name)
name = name + "_" + str(copy_suffix) + str(i)
new_name = name + ext
dest_file_path = os.path.join(dest_directory, new_name)
i+=1
return dest_file_path
Store the duplicated items separately in related folder | #!/usr/bin/env python3
# utility.py
# PixPack Photo Organiser
# It contains some useful functions to increase user experience
import locale
import os
def sys_trans_var():
# check system language
sys_loc = locale.getlocale()
sys_lang = sys_loc[0] # system default language
if sys_lang == 'en_EN' or sys_lang == 'en_GB':
return 0
elif sys_lang == 'tr_TR':
return 1
else:
return 0
def name_existing_photos(dest_directory, dest_file, copy_suffix):
# rename if the file is existed already, for instance: photo_1
dest_file_path = os.path.join(dest_directory, dest_file)
i=1
if os.path.exists(dest_file_path):
dest_directory = os.path.join(dest_directory, "copies")
if not os.path.exists(dest_directory):
os.makedirs(dest_directory)
while os.path.exists(dest_file_path):
dest_file_path = os.path.join(dest_directory, dest_file)
file_name = os.path.basename(dest_file_path)
name, ext = os.path.splitext(file_name)
name = name + "_" + str(copy_suffix) + str(i)
new_name = name + ext
dest_file_path = os.path.join(dest_directory, new_name)
i+=1
return dest_file_path
| <commit_before>#!/usr/bin/env python3
# utility.py
# PixPack Photo Organiser
# It contains some useful functions to increase user experience
import locale
import os
def sys_trans_var():
# check system language
sys_loc = locale.getlocale()
sys_lang = sys_loc[0] # system default language
if sys_lang == 'en_EN' or sys_lang == 'en_GB':
return 0
elif sys_lang == 'tr_TR':
return 1
else:
return 0
def name_existing_photos(dest_directory, dest_file, copy_suffix):
# rename if the file is existed already, for instance: photo_1
dest_file_path = os.path.join(dest_directory, dest_file)
i=1
while os.path.exists(dest_file_path):
dest_file_path = os.path.join(dest_directory, dest_file)
file_name = os.path.basename(dest_file_path)
name, ext = os.path.splitext(file_name)
name = name + "_" + str(copy_suffix) + str(i)
new_name = name + ext
dest_file_path = os.path.join(dest_directory, new_name)
i+=1
return dest_file_path
<commit_msg>Store the duplicated items separately in related folder<commit_after> | #!/usr/bin/env python3
# utility.py
# PixPack Photo Organiser
# It contains some useful functions to increase user experience
import locale
import os
def sys_trans_var():
# check system language
sys_loc = locale.getlocale()
sys_lang = sys_loc[0] # system default language
if sys_lang == 'en_EN' or sys_lang == 'en_GB':
return 0
elif sys_lang == 'tr_TR':
return 1
else:
return 0
def name_existing_photos(dest_directory, dest_file, copy_suffix):
# rename if the file is existed already, for instance: photo_1
dest_file_path = os.path.join(dest_directory, dest_file)
i=1
if os.path.exists(dest_file_path):
dest_directory = os.path.join(dest_directory, "copies")
if not os.path.exists(dest_directory):
os.makedirs(dest_directory)
while os.path.exists(dest_file_path):
dest_file_path = os.path.join(dest_directory, dest_file)
file_name = os.path.basename(dest_file_path)
name, ext = os.path.splitext(file_name)
name = name + "_" + str(copy_suffix) + str(i)
new_name = name + ext
dest_file_path = os.path.join(dest_directory, new_name)
i+=1
return dest_file_path
| #!/usr/bin/env python3
# utility.py
# PixPack Photo Organiser
# It contains some useful functions to increase user experience
import locale
import os
def sys_trans_var():
# check system language
sys_loc = locale.getlocale()
sys_lang = sys_loc[0] # system default language
if sys_lang == 'en_EN' or sys_lang == 'en_GB':
return 0
elif sys_lang == 'tr_TR':
return 1
else:
return 0
def name_existing_photos(dest_directory, dest_file, copy_suffix):
# rename if the file is existed already, for instance: photo_1
dest_file_path = os.path.join(dest_directory, dest_file)
i=1
while os.path.exists(dest_file_path):
dest_file_path = os.path.join(dest_directory, dest_file)
file_name = os.path.basename(dest_file_path)
name, ext = os.path.splitext(file_name)
name = name + "_" + str(copy_suffix) + str(i)
new_name = name + ext
dest_file_path = os.path.join(dest_directory, new_name)
i+=1
return dest_file_path
Store the duplicated items separately in related folder#!/usr/bin/env python3
# utility.py
# PixPack Photo Organiser
# It contains some useful functions to increase user experience
import locale
import os
def sys_trans_var():
# check system language
sys_loc = locale.getlocale()
sys_lang = sys_loc[0] # system default language
if sys_lang == 'en_EN' or sys_lang == 'en_GB':
return 0
elif sys_lang == 'tr_TR':
return 1
else:
return 0
def name_existing_photos(dest_directory, dest_file, copy_suffix):
# rename if the file is existed already, for instance: photo_1
dest_file_path = os.path.join(dest_directory, dest_file)
i=1
if os.path.exists(dest_file_path):
dest_directory = os.path.join(dest_directory, "copies")
if not os.path.exists(dest_directory):
os.makedirs(dest_directory)
while os.path.exists(dest_file_path):
dest_file_path = os.path.join(dest_directory, dest_file)
file_name = os.path.basename(dest_file_path)
name, ext = os.path.splitext(file_name)
name = name + "_" + str(copy_suffix) + str(i)
new_name = name + ext
dest_file_path = os.path.join(dest_directory, new_name)
i+=1
return dest_file_path
| <commit_before>#!/usr/bin/env python3
# utility.py
# PixPack Photo Organiser
# It contains some useful functions to increase user experience
import locale
import os
def sys_trans_var():
# check system language
sys_loc = locale.getlocale()
sys_lang = sys_loc[0] # system default language
if sys_lang == 'en_EN' or sys_lang == 'en_GB':
return 0
elif sys_lang == 'tr_TR':
return 1
else:
return 0
def name_existing_photos(dest_directory, dest_file, copy_suffix):
# rename if the file is existed already, for instance: photo_1
dest_file_path = os.path.join(dest_directory, dest_file)
i=1
while os.path.exists(dest_file_path):
dest_file_path = os.path.join(dest_directory, dest_file)
file_name = os.path.basename(dest_file_path)
name, ext = os.path.splitext(file_name)
name = name + "_" + str(copy_suffix) + str(i)
new_name = name + ext
dest_file_path = os.path.join(dest_directory, new_name)
i+=1
return dest_file_path
<commit_msg>Store the duplicated items separately in related folder<commit_after>#!/usr/bin/env python3
# utility.py
# PixPack Photo Organiser
# It contains some useful functions to increase user experience
import locale
import os
def sys_trans_var():
# check system language
sys_loc = locale.getlocale()
sys_lang = sys_loc[0] # system default language
if sys_lang == 'en_EN' or sys_lang == 'en_GB':
return 0
elif sys_lang == 'tr_TR':
return 1
else:
return 0
def name_existing_photos(dest_directory, dest_file, copy_suffix):
# rename if the file is existed already, for instance: photo_1
dest_file_path = os.path.join(dest_directory, dest_file)
i=1
if os.path.exists(dest_file_path):
dest_directory = os.path.join(dest_directory, "copies")
if not os.path.exists(dest_directory):
os.makedirs(dest_directory)
while os.path.exists(dest_file_path):
dest_file_path = os.path.join(dest_directory, dest_file)
file_name = os.path.basename(dest_file_path)
name, ext = os.path.splitext(file_name)
name = name + "_" + str(copy_suffix) + str(i)
new_name = name + ext
dest_file_path = os.path.join(dest_directory, new_name)
i+=1
return dest_file_path
|
b99f0839a8c9ce88127634f507605d065c22e5a7 | kafka/kafkaConsumer.py | kafka/kafkaConsumer.py | #!/usr/bin/env python
import threading, logging, time
from kafka import KafkaConsumer
class Consumer(threading.Thread):
daemon = True
def run(self):
#consumer = KafkaConsumer(bootstrap_servers='10.100.198.220:9092',
consumer = KafkaConsumer(bootstrap_servers='10.0.2.15:9092',
auto_offset_reset='earliest')
consumer.subscribe(['voltha-heartbeat'])
for message in consumer:
print (message)
def main():
threads = [
Consumer()
]
for t in threads:
t.start()
time.sleep(3000)
if __name__ == "__main__":
logging.basicConfig(
format='%(asctime)s.%(msecs)s:%(name)s:%(thread)d:%(levelname)s:%(process)d:%(message)s',
level=logging.INFO
)
main()
| #!/usr/bin/env python
import threading, logging, time
from kafka import KafkaConsumer
class Consumer(threading.Thread):
daemon = True
def run(self):
consumer = KafkaConsumer(bootstrap_servers='10.100.198.220:9092',
#consumer = KafkaConsumer(bootstrap_servers='10.0.2.15:9092',
auto_offset_reset='earliest')
consumer.subscribe(['voltha-heartbeat'])
for message in consumer:
print (message)
def main():
threads = [
Consumer()
]
for t in threads:
t.start()
time.sleep(3000)
if __name__ == "__main__":
logging.basicConfig(
format='%(asctime)s.%(msecs)s:%(name)s:%(thread)d:%(levelname)s:%(process)d:%(message)s',
level=logging.INFO
)
main()
| Update IP address of kafka consumer | Update IP address of kafka consumer
| Python | apache-2.0 | opencord/voltha,opencord/voltha,opencord/voltha,opencord/voltha,opencord/voltha | #!/usr/bin/env python
import threading, logging, time
from kafka import KafkaConsumer
class Consumer(threading.Thread):
daemon = True
def run(self):
#consumer = KafkaConsumer(bootstrap_servers='10.100.198.220:9092',
consumer = KafkaConsumer(bootstrap_servers='10.0.2.15:9092',
auto_offset_reset='earliest')
consumer.subscribe(['voltha-heartbeat'])
for message in consumer:
print (message)
def main():
threads = [
Consumer()
]
for t in threads:
t.start()
time.sleep(3000)
if __name__ == "__main__":
logging.basicConfig(
format='%(asctime)s.%(msecs)s:%(name)s:%(thread)d:%(levelname)s:%(process)d:%(message)s',
level=logging.INFO
)
main()
Update IP address of kafka consumer | #!/usr/bin/env python
import threading, logging, time
from kafka import KafkaConsumer
class Consumer(threading.Thread):
daemon = True
def run(self):
consumer = KafkaConsumer(bootstrap_servers='10.100.198.220:9092',
#consumer = KafkaConsumer(bootstrap_servers='10.0.2.15:9092',
auto_offset_reset='earliest')
consumer.subscribe(['voltha-heartbeat'])
for message in consumer:
print (message)
def main():
threads = [
Consumer()
]
for t in threads:
t.start()
time.sleep(3000)
if __name__ == "__main__":
logging.basicConfig(
format='%(asctime)s.%(msecs)s:%(name)s:%(thread)d:%(levelname)s:%(process)d:%(message)s',
level=logging.INFO
)
main()
| <commit_before>#!/usr/bin/env python
import threading, logging, time
from kafka import KafkaConsumer
class Consumer(threading.Thread):
daemon = True
def run(self):
#consumer = KafkaConsumer(bootstrap_servers='10.100.198.220:9092',
consumer = KafkaConsumer(bootstrap_servers='10.0.2.15:9092',
auto_offset_reset='earliest')
consumer.subscribe(['voltha-heartbeat'])
for message in consumer:
print (message)
def main():
threads = [
Consumer()
]
for t in threads:
t.start()
time.sleep(3000)
if __name__ == "__main__":
logging.basicConfig(
format='%(asctime)s.%(msecs)s:%(name)s:%(thread)d:%(levelname)s:%(process)d:%(message)s',
level=logging.INFO
)
main()
<commit_msg>Update IP address of kafka consumer<commit_after> | #!/usr/bin/env python
import threading, logging, time
from kafka import KafkaConsumer
class Consumer(threading.Thread):
daemon = True
def run(self):
consumer = KafkaConsumer(bootstrap_servers='10.100.198.220:9092',
#consumer = KafkaConsumer(bootstrap_servers='10.0.2.15:9092',
auto_offset_reset='earliest')
consumer.subscribe(['voltha-heartbeat'])
for message in consumer:
print (message)
def main():
threads = [
Consumer()
]
for t in threads:
t.start()
time.sleep(3000)
if __name__ == "__main__":
logging.basicConfig(
format='%(asctime)s.%(msecs)s:%(name)s:%(thread)d:%(levelname)s:%(process)d:%(message)s',
level=logging.INFO
)
main()
| #!/usr/bin/env python
import threading, logging, time
from kafka import KafkaConsumer
class Consumer(threading.Thread):
daemon = True
def run(self):
#consumer = KafkaConsumer(bootstrap_servers='10.100.198.220:9092',
consumer = KafkaConsumer(bootstrap_servers='10.0.2.15:9092',
auto_offset_reset='earliest')
consumer.subscribe(['voltha-heartbeat'])
for message in consumer:
print (message)
def main():
threads = [
Consumer()
]
for t in threads:
t.start()
time.sleep(3000)
if __name__ == "__main__":
logging.basicConfig(
format='%(asctime)s.%(msecs)s:%(name)s:%(thread)d:%(levelname)s:%(process)d:%(message)s',
level=logging.INFO
)
main()
Update IP address of kafka consumer#!/usr/bin/env python
import threading, logging, time
from kafka import KafkaConsumer
class Consumer(threading.Thread):
daemon = True
def run(self):
consumer = KafkaConsumer(bootstrap_servers='10.100.198.220:9092',
#consumer = KafkaConsumer(bootstrap_servers='10.0.2.15:9092',
auto_offset_reset='earliest')
consumer.subscribe(['voltha-heartbeat'])
for message in consumer:
print (message)
def main():
threads = [
Consumer()
]
for t in threads:
t.start()
time.sleep(3000)
if __name__ == "__main__":
logging.basicConfig(
format='%(asctime)s.%(msecs)s:%(name)s:%(thread)d:%(levelname)s:%(process)d:%(message)s',
level=logging.INFO
)
main()
| <commit_before>#!/usr/bin/env python
import threading, logging, time
from kafka import KafkaConsumer
class Consumer(threading.Thread):
daemon = True
def run(self):
#consumer = KafkaConsumer(bootstrap_servers='10.100.198.220:9092',
consumer = KafkaConsumer(bootstrap_servers='10.0.2.15:9092',
auto_offset_reset='earliest')
consumer.subscribe(['voltha-heartbeat'])
for message in consumer:
print (message)
def main():
threads = [
Consumer()
]
for t in threads:
t.start()
time.sleep(3000)
if __name__ == "__main__":
logging.basicConfig(
format='%(asctime)s.%(msecs)s:%(name)s:%(thread)d:%(levelname)s:%(process)d:%(message)s',
level=logging.INFO
)
main()
<commit_msg>Update IP address of kafka consumer<commit_after>#!/usr/bin/env python
import threading, logging, time
from kafka import KafkaConsumer
class Consumer(threading.Thread):
daemon = True
def run(self):
consumer = KafkaConsumer(bootstrap_servers='10.100.198.220:9092',
#consumer = KafkaConsumer(bootstrap_servers='10.0.2.15:9092',
auto_offset_reset='earliest')
consumer.subscribe(['voltha-heartbeat'])
for message in consumer:
print (message)
def main():
threads = [
Consumer()
]
for t in threads:
t.start()
time.sleep(3000)
if __name__ == "__main__":
logging.basicConfig(
format='%(asctime)s.%(msecs)s:%(name)s:%(thread)d:%(levelname)s:%(process)d:%(message)s',
level=logging.INFO
)
main()
|
956f4d02036aa20fa594ee7d369573496525d15e | src/server.py | src/server.py | from flow import Flow
import logging
from . import settings
LOG = logging.getLogger("flowbot.server")
class Server(object):
"""A connection to Flow."""
def __init__(self):
"""Initialize a flow server instance."""
self.flow = Flow()
self._start_server()
self._setup_account()
self._setup_device()
self._setup_org()
def _start_server(self):
"""Attempt to start the flow server."""
try:
self.flow.start_up(username=settings.USERNAME)
LOG.info("local account '%s' started", settings.USERNAME)
except Flow.FlowError as start_up_err:
LOG.debug("start_up failed: '%s'", str(start_up_err))
def _setup_account(self):
"""Create an account, if it doesn't already exist."""
try:
self.flow.create_account(
username=settings.USERNAME,
password=settings.PASSWORD
)
except Flow.FlowError as create_account_err:
LOG.debug("Create account failed: '%s'", str(create_account_err))
def _setup_device(self):
"""Create a device if it doesn't already exist."""
try:
self.flow.create_device(
username=settings.USERNAME,
password=settings.PASSWORD
)
LOG.info("local Device for '%s' created", settings.USERNAME)
except Flow.FlowError as create_device_err:
LOG.debug("create_device failed: '%s'", str(create_device_err))
def _setup_org(self):
""""Join the org if not already a member."""
try:
self.flow.new_org_join_request(oid=settings.ORG_ID)
except Flow.FlowError as org_join_err:
LOG.debug("org join failed: '%s'", str(org_join_err))
| from flow import Flow
import logging
from . import settings
LOG = logging.getLogger("flowbot.server")
class Server(object):
"""A connection to Flow."""
def __init__(self):
"""Initialize a flow server instance."""
self.flow = Flow()
self._start_server()
self._setup_account()
self._setup_org()
def _start_server(self):
"""Attempt to start the flow server."""
try:
self.flow.start_up(username=settings.USERNAME)
LOG.info("local account '%s' started", settings.USERNAME)
except Flow.FlowError as start_up_err:
LOG.debug("start_up failed: '%s'", str(start_up_err))
def _setup_account(self):
"""Create an account, if it doesn't already exist."""
try:
self.flow.create_account(
username=settings.USERNAME,
password=settings.PASSWORD
)
except Flow.FlowError as create_account_err:
LOG.debug("Create account failed: '%s'", str(create_account_err))
def _setup_org(self):
""""Join the org if not already a member."""
try:
self.flow.new_org_join_request(oid=settings.ORG_ID)
except Flow.FlowError as org_join_err:
LOG.debug("org join failed: '%s'", str(org_join_err))
| Remove create device step (not needed?) | Remove create device step (not needed?)
| Python | mpl-2.0 | SpiderOak/flowbot | from flow import Flow
import logging
from . import settings
LOG = logging.getLogger("flowbot.server")
class Server(object):
"""A connection to Flow."""
def __init__(self):
"""Initialize a flow server instance."""
self.flow = Flow()
self._start_server()
self._setup_account()
self._setup_device()
self._setup_org()
def _start_server(self):
"""Attempt to start the flow server."""
try:
self.flow.start_up(username=settings.USERNAME)
LOG.info("local account '%s' started", settings.USERNAME)
except Flow.FlowError as start_up_err:
LOG.debug("start_up failed: '%s'", str(start_up_err))
def _setup_account(self):
"""Create an account, if it doesn't already exist."""
try:
self.flow.create_account(
username=settings.USERNAME,
password=settings.PASSWORD
)
except Flow.FlowError as create_account_err:
LOG.debug("Create account failed: '%s'", str(create_account_err))
def _setup_device(self):
"""Create a device if it doesn't already exist."""
try:
self.flow.create_device(
username=settings.USERNAME,
password=settings.PASSWORD
)
LOG.info("local Device for '%s' created", settings.USERNAME)
except Flow.FlowError as create_device_err:
LOG.debug("create_device failed: '%s'", str(create_device_err))
def _setup_org(self):
""""Join the org if not already a member."""
try:
self.flow.new_org_join_request(oid=settings.ORG_ID)
except Flow.FlowError as org_join_err:
LOG.debug("org join failed: '%s'", str(org_join_err))
Remove create device step (not needed?) | from flow import Flow
import logging
from . import settings
LOG = logging.getLogger("flowbot.server")
class Server(object):
"""A connection to Flow."""
def __init__(self):
"""Initialize a flow server instance."""
self.flow = Flow()
self._start_server()
self._setup_account()
self._setup_org()
def _start_server(self):
"""Attempt to start the flow server."""
try:
self.flow.start_up(username=settings.USERNAME)
LOG.info("local account '%s' started", settings.USERNAME)
except Flow.FlowError as start_up_err:
LOG.debug("start_up failed: '%s'", str(start_up_err))
def _setup_account(self):
"""Create an account, if it doesn't already exist."""
try:
self.flow.create_account(
username=settings.USERNAME,
password=settings.PASSWORD
)
except Flow.FlowError as create_account_err:
LOG.debug("Create account failed: '%s'", str(create_account_err))
def _setup_org(self):
""""Join the org if not already a member."""
try:
self.flow.new_org_join_request(oid=settings.ORG_ID)
except Flow.FlowError as org_join_err:
LOG.debug("org join failed: '%s'", str(org_join_err))
| <commit_before>from flow import Flow
import logging
from . import settings
LOG = logging.getLogger("flowbot.server")
class Server(object):
"""A connection to Flow."""
def __init__(self):
"""Initialize a flow server instance."""
self.flow = Flow()
self._start_server()
self._setup_account()
self._setup_device()
self._setup_org()
def _start_server(self):
"""Attempt to start the flow server."""
try:
self.flow.start_up(username=settings.USERNAME)
LOG.info("local account '%s' started", settings.USERNAME)
except Flow.FlowError as start_up_err:
LOG.debug("start_up failed: '%s'", str(start_up_err))
def _setup_account(self):
"""Create an account, if it doesn't already exist."""
try:
self.flow.create_account(
username=settings.USERNAME,
password=settings.PASSWORD
)
except Flow.FlowError as create_account_err:
LOG.debug("Create account failed: '%s'", str(create_account_err))
def _setup_device(self):
"""Create a device if it doesn't already exist."""
try:
self.flow.create_device(
username=settings.USERNAME,
password=settings.PASSWORD
)
LOG.info("local Device for '%s' created", settings.USERNAME)
except Flow.FlowError as create_device_err:
LOG.debug("create_device failed: '%s'", str(create_device_err))
def _setup_org(self):
""""Join the org if not already a member."""
try:
self.flow.new_org_join_request(oid=settings.ORG_ID)
except Flow.FlowError as org_join_err:
LOG.debug("org join failed: '%s'", str(org_join_err))
<commit_msg>Remove create device step (not needed?)<commit_after> | from flow import Flow
import logging
from . import settings
LOG = logging.getLogger("flowbot.server")
class Server(object):
"""A connection to Flow."""
def __init__(self):
"""Initialize a flow server instance."""
self.flow = Flow()
self._start_server()
self._setup_account()
self._setup_org()
def _start_server(self):
"""Attempt to start the flow server."""
try:
self.flow.start_up(username=settings.USERNAME)
LOG.info("local account '%s' started", settings.USERNAME)
except Flow.FlowError as start_up_err:
LOG.debug("start_up failed: '%s'", str(start_up_err))
def _setup_account(self):
"""Create an account, if it doesn't already exist."""
try:
self.flow.create_account(
username=settings.USERNAME,
password=settings.PASSWORD
)
except Flow.FlowError as create_account_err:
LOG.debug("Create account failed: '%s'", str(create_account_err))
def _setup_org(self):
""""Join the org if not already a member."""
try:
self.flow.new_org_join_request(oid=settings.ORG_ID)
except Flow.FlowError as org_join_err:
LOG.debug("org join failed: '%s'", str(org_join_err))
| from flow import Flow
import logging
from . import settings
LOG = logging.getLogger("flowbot.server")
class Server(object):
"""A connection to Flow."""
def __init__(self):
"""Initialize a flow server instance."""
self.flow = Flow()
self._start_server()
self._setup_account()
self._setup_device()
self._setup_org()
def _start_server(self):
"""Attempt to start the flow server."""
try:
self.flow.start_up(username=settings.USERNAME)
LOG.info("local account '%s' started", settings.USERNAME)
except Flow.FlowError as start_up_err:
LOG.debug("start_up failed: '%s'", str(start_up_err))
def _setup_account(self):
"""Create an account, if it doesn't already exist."""
try:
self.flow.create_account(
username=settings.USERNAME,
password=settings.PASSWORD
)
except Flow.FlowError as create_account_err:
LOG.debug("Create account failed: '%s'", str(create_account_err))
def _setup_device(self):
"""Create a device if it doesn't already exist."""
try:
self.flow.create_device(
username=settings.USERNAME,
password=settings.PASSWORD
)
LOG.info("local Device for '%s' created", settings.USERNAME)
except Flow.FlowError as create_device_err:
LOG.debug("create_device failed: '%s'", str(create_device_err))
def _setup_org(self):
""""Join the org if not already a member."""
try:
self.flow.new_org_join_request(oid=settings.ORG_ID)
except Flow.FlowError as org_join_err:
LOG.debug("org join failed: '%s'", str(org_join_err))
Remove create device step (not needed?)from flow import Flow
import logging
from . import settings
LOG = logging.getLogger("flowbot.server")
class Server(object):
"""A connection to Flow."""
def __init__(self):
"""Initialize a flow server instance."""
self.flow = Flow()
self._start_server()
self._setup_account()
self._setup_org()
def _start_server(self):
"""Attempt to start the flow server."""
try:
self.flow.start_up(username=settings.USERNAME)
LOG.info("local account '%s' started", settings.USERNAME)
except Flow.FlowError as start_up_err:
LOG.debug("start_up failed: '%s'", str(start_up_err))
def _setup_account(self):
"""Create an account, if it doesn't already exist."""
try:
self.flow.create_account(
username=settings.USERNAME,
password=settings.PASSWORD
)
except Flow.FlowError as create_account_err:
LOG.debug("Create account failed: '%s'", str(create_account_err))
def _setup_org(self):
""""Join the org if not already a member."""
try:
self.flow.new_org_join_request(oid=settings.ORG_ID)
except Flow.FlowError as org_join_err:
LOG.debug("org join failed: '%s'", str(org_join_err))
| <commit_before>from flow import Flow
import logging
from . import settings
LOG = logging.getLogger("flowbot.server")
class Server(object):
"""A connection to Flow."""
def __init__(self):
"""Initialize a flow server instance."""
self.flow = Flow()
self._start_server()
self._setup_account()
self._setup_device()
self._setup_org()
def _start_server(self):
"""Attempt to start the flow server."""
try:
self.flow.start_up(username=settings.USERNAME)
LOG.info("local account '%s' started", settings.USERNAME)
except Flow.FlowError as start_up_err:
LOG.debug("start_up failed: '%s'", str(start_up_err))
def _setup_account(self):
"""Create an account, if it doesn't already exist."""
try:
self.flow.create_account(
username=settings.USERNAME,
password=settings.PASSWORD
)
except Flow.FlowError as create_account_err:
LOG.debug("Create account failed: '%s'", str(create_account_err))
def _setup_device(self):
"""Create a device if it doesn't already exist."""
try:
self.flow.create_device(
username=settings.USERNAME,
password=settings.PASSWORD
)
LOG.info("local Device for '%s' created", settings.USERNAME)
except Flow.FlowError as create_device_err:
LOG.debug("create_device failed: '%s'", str(create_device_err))
def _setup_org(self):
""""Join the org if not already a member."""
try:
self.flow.new_org_join_request(oid=settings.ORG_ID)
except Flow.FlowError as org_join_err:
LOG.debug("org join failed: '%s'", str(org_join_err))
<commit_msg>Remove create device step (not needed?)<commit_after>from flow import Flow
import logging
from . import settings
LOG = logging.getLogger("flowbot.server")
class Server(object):
"""A connection to Flow."""
def __init__(self):
"""Initialize a flow server instance."""
self.flow = Flow()
self._start_server()
self._setup_account()
self._setup_org()
def _start_server(self):
"""Attempt to start the flow server."""
try:
self.flow.start_up(username=settings.USERNAME)
LOG.info("local account '%s' started", settings.USERNAME)
except Flow.FlowError as start_up_err:
LOG.debug("start_up failed: '%s'", str(start_up_err))
def _setup_account(self):
"""Create an account, if it doesn't already exist."""
try:
self.flow.create_account(
username=settings.USERNAME,
password=settings.PASSWORD
)
except Flow.FlowError as create_account_err:
LOG.debug("Create account failed: '%s'", str(create_account_err))
def _setup_org(self):
""""Join the org if not already a member."""
try:
self.flow.new_org_join_request(oid=settings.ORG_ID)
except Flow.FlowError as org_join_err:
LOG.debug("org join failed: '%s'", str(org_join_err))
|
dcb62a352b7473779f1cc907c920c9b42ee9ceac | django_extensions/management/commands/print_settings.py | django_extensions/management/commands/print_settings.py | from django.core.management.base import BaseCommand
from django.conf import settings
class Command(BaseCommand):
help = "Print the active Django settings."
def handle(self, *args, **options):
for key in dir(settings):
if key.startswith('__'):
continue
value = getattr(settings, key)
print('%-40s : %s' % (key, value))
| """
print_settings
==============
Django command similar to 'diffsettings' but shows all active Django settings.
"""
from django.core.management.base import NoArgsCommand
from django.conf import settings
from optparse import make_option
class Command(NoArgsCommand):
"""print_settings command"""
help = "Print the active Django settings."
option_list = NoArgsCommand.option_list + (
make_option('--format', default='simple', dest='format',
help='Specifies output format.'),
make_option('--indent', default=4, dest='indent', type='int',
help='Specifies indent level for JSON and YAML'),
)
def handle_noargs(self, **options):
a_dict = {}
for attr in dir(settings):
if self.include_attr(attr):
value = getattr(settings, attr)
a_dict[attr] = value
output_format = options.get('format', 'json')
indent = options.get('indent', 4)
if output_format == 'json':
json = self.import_json()
print json.dumps(a_dict, indent=indent)
elif output_format == 'yaml':
import yaml # requires PyYAML
print yaml.dump(a_dict, indent=indent)
elif output_format == 'pprint':
from pprint import pprint
pprint(a_dict)
else:
self.print_simple(a_dict)
@staticmethod
def include_attr(attr):
"""Whether or not to include attribute in output"""
if attr.startswith('__'):
return False
else:
return True
@staticmethod
def print_simple(a_dict):
"""A very simple output format"""
for key, value in a_dict.items():
print('%-40s = %r' % (key, value))
@staticmethod
def import_json():
"""Import a module for JSON"""
try:
import json
except ImportError:
import simplejson as json
else:
return json
| Make output format configurable (simple, pprint, json, yaml) | Make output format configurable (simple, pprint, json, yaml)
$ pylint django-extensions/django_extensions/management/commands/print_settings.py | grep rated
No config file found, using default configuration
Your code has been rated at 10.00/10 (previous run: 10.00/10)
| Python | mit | lamby/django-extensions,barseghyanartur/django-extensions,dpetzold/django-extensions,jpadilla/django-extensions,Moulde/django-extensions,marctc/django-extensions,levic/django-extensions,kevgathuku/django-extensions,bionikspoon/django-extensions,atchariya/django-extensions,maroux/django-extensions,frewsxcv/django-extensions,maroux/django-extensions,JoseTomasTocino/django-extensions,jpadilla/django-extensions,levic/django-extensions,github-account-because-they-want-it/django-extensions,ctrl-alt-d/django-extensions,django-extensions/django-extensions,ctrl-alt-d/django-extensions,VishvajitP/django-extensions,helenst/django-extensions,mandx/django-extensions,joeyespo/django-extensions,joeyespo/django-extensions,artscoop/django-extensions,zefciu/django-extensions,atchariya/django-extensions,artscoop/django-extensions,ewjoachim/django-extensions,artscoop/django-extensions,linuxmaniac/django-extensions,haakenlid/django-extensions,maroux/django-extensions,rodo/django-extensions,github-account-because-they-want-it/django-extensions,haakenlid/django-extensions,JoseTomasTocino/django-extensions,rodo/django-extensions,levic/django-extensions,bionikspoon/django-extensions,dpetzold/django-extensions,lamby/django-extensions,nikolas/django-extensions,t1m0thy/django-extensions,fusionbox/django-extensions,mandx/django-extensions,frewsxcv/django-extensions,linuxmaniac/django-extensions,nikolas/django-extensions,barseghyanartur/django-extensions,dpetzold/django-extensions,Moulde/django-extensions,t1m0thy/django-extensions,zefciu/django-extensions,kevgathuku/django-extensions,gvangool/django-extensions,ctrl-alt-d/django-extensions,django-extensions/django-extensions,marctc/django-extensions,atchariya/django-extensions,lamby/django-extensions,Moulde/django-extensions,barseghyanartur/django-extensions,frewsxcv/django-extensions,JoseTomasTocino/django-extensions,t1m0thy/django-extensions,linuxmaniac/django-extensions,fusionbox/django-extensions,haakenlid/django-extensions,helenst/django-extensions,marctc/django-extensions,zefciu/django-extensions,helenst/django-extensions,gvangool/django-extensions,joeyespo/django-extensions,gvangool/django-extensions,nikolas/django-extensions,bionikspoon/django-extensions,rodo/django-extensions,django-extensions/django-extensions,jpadilla/django-extensions,github-account-because-they-want-it/django-extensions,VishvajitP/django-extensions,ewjoachim/django-extensions,mandx/django-extensions,VishvajitP/django-extensions,kevgathuku/django-extensions,ewjoachim/django-extensions | from django.core.management.base import BaseCommand
from django.conf import settings
class Command(BaseCommand):
help = "Print the active Django settings."
def handle(self, *args, **options):
for key in dir(settings):
if key.startswith('__'):
continue
value = getattr(settings, key)
print('%-40s : %s' % (key, value))
Make output format configurable (simple, pprint, json, yaml)
$ pylint django-extensions/django_extensions/management/commands/print_settings.py | grep rated
No config file found, using default configuration
Your code has been rated at 10.00/10 (previous run: 10.00/10) | """
print_settings
==============
Django command similar to 'diffsettings' but shows all active Django settings.
"""
from django.core.management.base import NoArgsCommand
from django.conf import settings
from optparse import make_option
class Command(NoArgsCommand):
"""print_settings command"""
help = "Print the active Django settings."
option_list = NoArgsCommand.option_list + (
make_option('--format', default='simple', dest='format',
help='Specifies output format.'),
make_option('--indent', default=4, dest='indent', type='int',
help='Specifies indent level for JSON and YAML'),
)
def handle_noargs(self, **options):
a_dict = {}
for attr in dir(settings):
if self.include_attr(attr):
value = getattr(settings, attr)
a_dict[attr] = value
output_format = options.get('format', 'json')
indent = options.get('indent', 4)
if output_format == 'json':
json = self.import_json()
print json.dumps(a_dict, indent=indent)
elif output_format == 'yaml':
import yaml # requires PyYAML
print yaml.dump(a_dict, indent=indent)
elif output_format == 'pprint':
from pprint import pprint
pprint(a_dict)
else:
self.print_simple(a_dict)
@staticmethod
def include_attr(attr):
"""Whether or not to include attribute in output"""
if attr.startswith('__'):
return False
else:
return True
@staticmethod
def print_simple(a_dict):
"""A very simple output format"""
for key, value in a_dict.items():
print('%-40s = %r' % (key, value))
@staticmethod
def import_json():
"""Import a module for JSON"""
try:
import json
except ImportError:
import simplejson as json
else:
return json
| <commit_before>from django.core.management.base import BaseCommand
from django.conf import settings
class Command(BaseCommand):
help = "Print the active Django settings."
def handle(self, *args, **options):
for key in dir(settings):
if key.startswith('__'):
continue
value = getattr(settings, key)
print('%-40s : %s' % (key, value))
<commit_msg>Make output format configurable (simple, pprint, json, yaml)
$ pylint django-extensions/django_extensions/management/commands/print_settings.py | grep rated
No config file found, using default configuration
Your code has been rated at 10.00/10 (previous run: 10.00/10)<commit_after> | """
print_settings
==============
Django command similar to 'diffsettings' but shows all active Django settings.
"""
from django.core.management.base import NoArgsCommand
from django.conf import settings
from optparse import make_option
class Command(NoArgsCommand):
"""print_settings command"""
help = "Print the active Django settings."
option_list = NoArgsCommand.option_list + (
make_option('--format', default='simple', dest='format',
help='Specifies output format.'),
make_option('--indent', default=4, dest='indent', type='int',
help='Specifies indent level for JSON and YAML'),
)
def handle_noargs(self, **options):
a_dict = {}
for attr in dir(settings):
if self.include_attr(attr):
value = getattr(settings, attr)
a_dict[attr] = value
output_format = options.get('format', 'json')
indent = options.get('indent', 4)
if output_format == 'json':
json = self.import_json()
print json.dumps(a_dict, indent=indent)
elif output_format == 'yaml':
import yaml # requires PyYAML
print yaml.dump(a_dict, indent=indent)
elif output_format == 'pprint':
from pprint import pprint
pprint(a_dict)
else:
self.print_simple(a_dict)
@staticmethod
def include_attr(attr):
"""Whether or not to include attribute in output"""
if attr.startswith('__'):
return False
else:
return True
@staticmethod
def print_simple(a_dict):
"""A very simple output format"""
for key, value in a_dict.items():
print('%-40s = %r' % (key, value))
@staticmethod
def import_json():
"""Import a module for JSON"""
try:
import json
except ImportError:
import simplejson as json
else:
return json
| from django.core.management.base import BaseCommand
from django.conf import settings
class Command(BaseCommand):
help = "Print the active Django settings."
def handle(self, *args, **options):
for key in dir(settings):
if key.startswith('__'):
continue
value = getattr(settings, key)
print('%-40s : %s' % (key, value))
Make output format configurable (simple, pprint, json, yaml)
$ pylint django-extensions/django_extensions/management/commands/print_settings.py | grep rated
No config file found, using default configuration
Your code has been rated at 10.00/10 (previous run: 10.00/10)"""
print_settings
==============
Django command similar to 'diffsettings' but shows all active Django settings.
"""
from django.core.management.base import NoArgsCommand
from django.conf import settings
from optparse import make_option
class Command(NoArgsCommand):
"""print_settings command"""
help = "Print the active Django settings."
option_list = NoArgsCommand.option_list + (
make_option('--format', default='simple', dest='format',
help='Specifies output format.'),
make_option('--indent', default=4, dest='indent', type='int',
help='Specifies indent level for JSON and YAML'),
)
def handle_noargs(self, **options):
a_dict = {}
for attr in dir(settings):
if self.include_attr(attr):
value = getattr(settings, attr)
a_dict[attr] = value
output_format = options.get('format', 'json')
indent = options.get('indent', 4)
if output_format == 'json':
json = self.import_json()
print json.dumps(a_dict, indent=indent)
elif output_format == 'yaml':
import yaml # requires PyYAML
print yaml.dump(a_dict, indent=indent)
elif output_format == 'pprint':
from pprint import pprint
pprint(a_dict)
else:
self.print_simple(a_dict)
@staticmethod
def include_attr(attr):
"""Whether or not to include attribute in output"""
if attr.startswith('__'):
return False
else:
return True
@staticmethod
def print_simple(a_dict):
"""A very simple output format"""
for key, value in a_dict.items():
print('%-40s = %r' % (key, value))
@staticmethod
def import_json():
"""Import a module for JSON"""
try:
import json
except ImportError:
import simplejson as json
else:
return json
| <commit_before>from django.core.management.base import BaseCommand
from django.conf import settings
class Command(BaseCommand):
help = "Print the active Django settings."
def handle(self, *args, **options):
for key in dir(settings):
if key.startswith('__'):
continue
value = getattr(settings, key)
print('%-40s : %s' % (key, value))
<commit_msg>Make output format configurable (simple, pprint, json, yaml)
$ pylint django-extensions/django_extensions/management/commands/print_settings.py | grep rated
No config file found, using default configuration
Your code has been rated at 10.00/10 (previous run: 10.00/10)<commit_after>"""
print_settings
==============
Django command similar to 'diffsettings' but shows all active Django settings.
"""
from django.core.management.base import NoArgsCommand
from django.conf import settings
from optparse import make_option
class Command(NoArgsCommand):
"""print_settings command"""
help = "Print the active Django settings."
option_list = NoArgsCommand.option_list + (
make_option('--format', default='simple', dest='format',
help='Specifies output format.'),
make_option('--indent', default=4, dest='indent', type='int',
help='Specifies indent level for JSON and YAML'),
)
def handle_noargs(self, **options):
a_dict = {}
for attr in dir(settings):
if self.include_attr(attr):
value = getattr(settings, attr)
a_dict[attr] = value
output_format = options.get('format', 'json')
indent = options.get('indent', 4)
if output_format == 'json':
json = self.import_json()
print json.dumps(a_dict, indent=indent)
elif output_format == 'yaml':
import yaml # requires PyYAML
print yaml.dump(a_dict, indent=indent)
elif output_format == 'pprint':
from pprint import pprint
pprint(a_dict)
else:
self.print_simple(a_dict)
@staticmethod
def include_attr(attr):
"""Whether or not to include attribute in output"""
if attr.startswith('__'):
return False
else:
return True
@staticmethod
def print_simple(a_dict):
"""A very simple output format"""
for key, value in a_dict.items():
print('%-40s = %r' % (key, value))
@staticmethod
def import_json():
"""Import a module for JSON"""
try:
import json
except ImportError:
import simplejson as json
else:
return json
|
a1ec7fbf4bb00d2a24dfba0acf6baf18d1b016ee | froide/comments/forms.py | froide/comments/forms.py | from django import forms
from django.utils.translation import gettext_lazy as _
from django_comments.forms import (
CommentForm as DjangoCommentForm,
COMMENT_MAX_LENGTH
)
class CommentForm(DjangoCommentForm):
name = forms.CharField(
label=_('Name'),
required=True,
help_text=_('Your name will only be visible to logged in users.'),
widget=forms.TextInput(
attrs={
'class': 'form-control'
}
)
)
comment = forms.CharField(
label=_('Comment'),
widget=forms.Textarea(
attrs={
'class': 'form-control',
'rows': '4'
}
),
max_length=COMMENT_MAX_LENGTH
)
| from django import forms
from django.utils.translation import gettext_lazy as _
from django_comments.forms import (
CommentForm as DjangoCommentForm,
COMMENT_MAX_LENGTH
)
class CommentForm(DjangoCommentForm):
name = forms.CharField(
label=_('Name'),
required=True,
max_length=50,
help_text=_('Your name will only be visible to logged in users.'),
widget=forms.TextInput(
attrs={
'class': 'form-control'
}
)
)
comment = forms.CharField(
label=_('Comment'),
widget=forms.Textarea(
attrs={
'class': 'form-control',
'rows': '4'
}
),
max_length=COMMENT_MAX_LENGTH
)
| Add max length to comment field | Add max length to comment field | Python | mit | fin/froide,fin/froide,stefanw/froide,fin/froide,stefanw/froide,stefanw/froide,fin/froide,stefanw/froide,stefanw/froide | from django import forms
from django.utils.translation import gettext_lazy as _
from django_comments.forms import (
CommentForm as DjangoCommentForm,
COMMENT_MAX_LENGTH
)
class CommentForm(DjangoCommentForm):
name = forms.CharField(
label=_('Name'),
required=True,
help_text=_('Your name will only be visible to logged in users.'),
widget=forms.TextInput(
attrs={
'class': 'form-control'
}
)
)
comment = forms.CharField(
label=_('Comment'),
widget=forms.Textarea(
attrs={
'class': 'form-control',
'rows': '4'
}
),
max_length=COMMENT_MAX_LENGTH
)
Add max length to comment field | from django import forms
from django.utils.translation import gettext_lazy as _
from django_comments.forms import (
CommentForm as DjangoCommentForm,
COMMENT_MAX_LENGTH
)
class CommentForm(DjangoCommentForm):
name = forms.CharField(
label=_('Name'),
required=True,
max_length=50,
help_text=_('Your name will only be visible to logged in users.'),
widget=forms.TextInput(
attrs={
'class': 'form-control'
}
)
)
comment = forms.CharField(
label=_('Comment'),
widget=forms.Textarea(
attrs={
'class': 'form-control',
'rows': '4'
}
),
max_length=COMMENT_MAX_LENGTH
)
| <commit_before>from django import forms
from django.utils.translation import gettext_lazy as _
from django_comments.forms import (
CommentForm as DjangoCommentForm,
COMMENT_MAX_LENGTH
)
class CommentForm(DjangoCommentForm):
name = forms.CharField(
label=_('Name'),
required=True,
help_text=_('Your name will only be visible to logged in users.'),
widget=forms.TextInput(
attrs={
'class': 'form-control'
}
)
)
comment = forms.CharField(
label=_('Comment'),
widget=forms.Textarea(
attrs={
'class': 'form-control',
'rows': '4'
}
),
max_length=COMMENT_MAX_LENGTH
)
<commit_msg>Add max length to comment field<commit_after> | from django import forms
from django.utils.translation import gettext_lazy as _
from django_comments.forms import (
CommentForm as DjangoCommentForm,
COMMENT_MAX_LENGTH
)
class CommentForm(DjangoCommentForm):
name = forms.CharField(
label=_('Name'),
required=True,
max_length=50,
help_text=_('Your name will only be visible to logged in users.'),
widget=forms.TextInput(
attrs={
'class': 'form-control'
}
)
)
comment = forms.CharField(
label=_('Comment'),
widget=forms.Textarea(
attrs={
'class': 'form-control',
'rows': '4'
}
),
max_length=COMMENT_MAX_LENGTH
)
| from django import forms
from django.utils.translation import gettext_lazy as _
from django_comments.forms import (
CommentForm as DjangoCommentForm,
COMMENT_MAX_LENGTH
)
class CommentForm(DjangoCommentForm):
name = forms.CharField(
label=_('Name'),
required=True,
help_text=_('Your name will only be visible to logged in users.'),
widget=forms.TextInput(
attrs={
'class': 'form-control'
}
)
)
comment = forms.CharField(
label=_('Comment'),
widget=forms.Textarea(
attrs={
'class': 'form-control',
'rows': '4'
}
),
max_length=COMMENT_MAX_LENGTH
)
Add max length to comment fieldfrom django import forms
from django.utils.translation import gettext_lazy as _
from django_comments.forms import (
CommentForm as DjangoCommentForm,
COMMENT_MAX_LENGTH
)
class CommentForm(DjangoCommentForm):
name = forms.CharField(
label=_('Name'),
required=True,
max_length=50,
help_text=_('Your name will only be visible to logged in users.'),
widget=forms.TextInput(
attrs={
'class': 'form-control'
}
)
)
comment = forms.CharField(
label=_('Comment'),
widget=forms.Textarea(
attrs={
'class': 'form-control',
'rows': '4'
}
),
max_length=COMMENT_MAX_LENGTH
)
| <commit_before>from django import forms
from django.utils.translation import gettext_lazy as _
from django_comments.forms import (
CommentForm as DjangoCommentForm,
COMMENT_MAX_LENGTH
)
class CommentForm(DjangoCommentForm):
name = forms.CharField(
label=_('Name'),
required=True,
help_text=_('Your name will only be visible to logged in users.'),
widget=forms.TextInput(
attrs={
'class': 'form-control'
}
)
)
comment = forms.CharField(
label=_('Comment'),
widget=forms.Textarea(
attrs={
'class': 'form-control',
'rows': '4'
}
),
max_length=COMMENT_MAX_LENGTH
)
<commit_msg>Add max length to comment field<commit_after>from django import forms
from django.utils.translation import gettext_lazy as _
from django_comments.forms import (
CommentForm as DjangoCommentForm,
COMMENT_MAX_LENGTH
)
class CommentForm(DjangoCommentForm):
name = forms.CharField(
label=_('Name'),
required=True,
max_length=50,
help_text=_('Your name will only be visible to logged in users.'),
widget=forms.TextInput(
attrs={
'class': 'form-control'
}
)
)
comment = forms.CharField(
label=_('Comment'),
widget=forms.Textarea(
attrs={
'class': 'form-control',
'rows': '4'
}
),
max_length=COMMENT_MAX_LENGTH
)
|
06458ef8dd3db840c37127a6a4c0c41ed2ffe6f4 | pybossa/sentinel/__init__.py | pybossa/sentinel/__init__.py | from redis import sentinel, StrictRedis
class Sentinel(object):
def __init__(self, app=None):
self.app = app
self.master = StrictRedis()
self.slave = self.master
if app is not None: # pragma: no cover
self.init_app(app)
def init_app(self, app):
self.connection = sentinel.Sentinel(app.config['REDIS_SENTINEL'],
socket_timeout=0.1)
redis_db = app.config.get('REDIS_DB') or 0
self.master = self.connection.master_for('mymaster', db=redis_db)
self.slave = self.connection.slave_for('mymaster', db=redis_db)
| from redis import sentinel, StrictRedis
class Sentinel(object):
def __init__(self, app=None):
self.app = app
self.master = StrictRedis()
self.slave = self.master
if app is not None: # pragma: no cover
self.init_app(app)
def init_app(self, app):
self.connection = sentinel.Sentinel(app.config['REDIS_SENTINEL'],
socket_timeout=0.1,
retry_on_timeout=True)
redis_db = app.config.get('REDIS_DB') or 0
self.master = self.connection.master_for('mymaster', db=redis_db)
self.slave = self.connection.slave_for('mymaster', db=redis_db)
| Add retry on timeout option to sentinel connection | Add retry on timeout option to sentinel connection
| Python | agpl-3.0 | geotagx/pybossa,inteligencia-coletiva-lsd/pybossa,jean/pybossa,geotagx/pybossa,OpenNewsLabs/pybossa,PyBossa/pybossa,OpenNewsLabs/pybossa,jean/pybossa,PyBossa/pybossa,Scifabric/pybossa,inteligencia-coletiva-lsd/pybossa,Scifabric/pybossa | from redis import sentinel, StrictRedis
class Sentinel(object):
def __init__(self, app=None):
self.app = app
self.master = StrictRedis()
self.slave = self.master
if app is not None: # pragma: no cover
self.init_app(app)
def init_app(self, app):
self.connection = sentinel.Sentinel(app.config['REDIS_SENTINEL'],
socket_timeout=0.1)
redis_db = app.config.get('REDIS_DB') or 0
self.master = self.connection.master_for('mymaster', db=redis_db)
self.slave = self.connection.slave_for('mymaster', db=redis_db)
Add retry on timeout option to sentinel connection | from redis import sentinel, StrictRedis
class Sentinel(object):
def __init__(self, app=None):
self.app = app
self.master = StrictRedis()
self.slave = self.master
if app is not None: # pragma: no cover
self.init_app(app)
def init_app(self, app):
self.connection = sentinel.Sentinel(app.config['REDIS_SENTINEL'],
socket_timeout=0.1,
retry_on_timeout=True)
redis_db = app.config.get('REDIS_DB') or 0
self.master = self.connection.master_for('mymaster', db=redis_db)
self.slave = self.connection.slave_for('mymaster', db=redis_db)
| <commit_before>from redis import sentinel, StrictRedis
class Sentinel(object):
def __init__(self, app=None):
self.app = app
self.master = StrictRedis()
self.slave = self.master
if app is not None: # pragma: no cover
self.init_app(app)
def init_app(self, app):
self.connection = sentinel.Sentinel(app.config['REDIS_SENTINEL'],
socket_timeout=0.1)
redis_db = app.config.get('REDIS_DB') or 0
self.master = self.connection.master_for('mymaster', db=redis_db)
self.slave = self.connection.slave_for('mymaster', db=redis_db)
<commit_msg>Add retry on timeout option to sentinel connection<commit_after> | from redis import sentinel, StrictRedis
class Sentinel(object):
def __init__(self, app=None):
self.app = app
self.master = StrictRedis()
self.slave = self.master
if app is not None: # pragma: no cover
self.init_app(app)
def init_app(self, app):
self.connection = sentinel.Sentinel(app.config['REDIS_SENTINEL'],
socket_timeout=0.1,
retry_on_timeout=True)
redis_db = app.config.get('REDIS_DB') or 0
self.master = self.connection.master_for('mymaster', db=redis_db)
self.slave = self.connection.slave_for('mymaster', db=redis_db)
| from redis import sentinel, StrictRedis
class Sentinel(object):
def __init__(self, app=None):
self.app = app
self.master = StrictRedis()
self.slave = self.master
if app is not None: # pragma: no cover
self.init_app(app)
def init_app(self, app):
self.connection = sentinel.Sentinel(app.config['REDIS_SENTINEL'],
socket_timeout=0.1)
redis_db = app.config.get('REDIS_DB') or 0
self.master = self.connection.master_for('mymaster', db=redis_db)
self.slave = self.connection.slave_for('mymaster', db=redis_db)
Add retry on timeout option to sentinel connectionfrom redis import sentinel, StrictRedis
class Sentinel(object):
def __init__(self, app=None):
self.app = app
self.master = StrictRedis()
self.slave = self.master
if app is not None: # pragma: no cover
self.init_app(app)
def init_app(self, app):
self.connection = sentinel.Sentinel(app.config['REDIS_SENTINEL'],
socket_timeout=0.1,
retry_on_timeout=True)
redis_db = app.config.get('REDIS_DB') or 0
self.master = self.connection.master_for('mymaster', db=redis_db)
self.slave = self.connection.slave_for('mymaster', db=redis_db)
| <commit_before>from redis import sentinel, StrictRedis
class Sentinel(object):
def __init__(self, app=None):
self.app = app
self.master = StrictRedis()
self.slave = self.master
if app is not None: # pragma: no cover
self.init_app(app)
def init_app(self, app):
self.connection = sentinel.Sentinel(app.config['REDIS_SENTINEL'],
socket_timeout=0.1)
redis_db = app.config.get('REDIS_DB') or 0
self.master = self.connection.master_for('mymaster', db=redis_db)
self.slave = self.connection.slave_for('mymaster', db=redis_db)
<commit_msg>Add retry on timeout option to sentinel connection<commit_after>from redis import sentinel, StrictRedis
class Sentinel(object):
def __init__(self, app=None):
self.app = app
self.master = StrictRedis()
self.slave = self.master
if app is not None: # pragma: no cover
self.init_app(app)
def init_app(self, app):
self.connection = sentinel.Sentinel(app.config['REDIS_SENTINEL'],
socket_timeout=0.1,
retry_on_timeout=True)
redis_db = app.config.get('REDIS_DB') or 0
self.master = self.connection.master_for('mymaster', db=redis_db)
self.slave = self.connection.slave_for('mymaster', db=redis_db)
|
ec92c0cedb6da3180284273926ccfe05ac334729 | snippets/base/middleware.py | snippets/base/middleware.py | from django.core.urlresolvers import resolve
from snippets.base.views import fetch_snippets
class FetchSnippetsMiddleware(object):
"""
If the incoming request is for the fetch_snippets view, execute the view
and return it before other middleware can run.
fetch_snippets is a very very basic view that doesn't need any of the
middleware that the rest of the site needs, such as the session or csrf
middlewares. To avoid unintended issues (such as headers we don't want
being added to the response) this middleware detects requests to that view
and executes the view early, bypassing the rest of the middleware.
"""
def process_request(self, request):
result = resolve(request.path)
if result.func == fetch_snippets:
return fetch_snippets(request, *result.args, **result.kwargs)
| from django.core.urlresolvers import resolve
from snippets.base.views import fetch_snippets
class FetchSnippetsMiddleware(object):
"""
If the incoming request is for the fetch_snippets view, execute the view
and return it before other middleware can run.
fetch_snippets is a very very basic view that doesn't need any of the
middleware that the rest of the site needs, such as the session or csrf
middlewares. To avoid unintended issues (such as headers we don't want
being added to the response) this middleware detects requests to that view
and executes the view early, bypassing the rest of the middleware.
Also disables New Relic's apdex for views that aren't the
fetch_snippets, as we only really care about the apdex for
fetch_snippets.
"""
def process_request(self, request):
result = resolve(request.path)
if result.func == fetch_snippets:
return fetch_snippets(request, *result.args, **result.kwargs)
else:
# Not fetch_snippets? Then no New Relic for you!
try:
import newrelic.agent
except ImportError:
pass
else:
newrelic.agent.suppress_apdex_metric()
| Disable New Relic's apdex metric on non-fetch_snippets views. | Disable New Relic's apdex metric on non-fetch_snippets views.
New Relic doesn't allow us to set different thresholds for different
pages across the site, so in order to get valuable metrics on the main
view for snippets, fetch_snippets, we need to disable apdex for the
admin interface and public snippets views, which don't need as close
monitoring. | Python | mpl-2.0 | akatsoulas/snippets-service,schalkneethling/snippets-service,mozilla/snippets-service,mozilla/snippets-service,mozilla/snippets-service,mozmar/snippets-service,schalkneethling/snippets-service,Osmose/snippets-service,bensternthal/snippets-service,akatsoulas/snippets-service,mozmar/snippets-service,glogiotatidis/snippets-service,mozilla/snippets-service,mozmar/snippets-service,bensternthal/snippets-service,glogiotatidis/snippets-service,glogiotatidis/snippets-service,Osmose/snippets-service,Osmose/snippets-service,glogiotatidis/snippets-service,Osmose/snippets-service,schalkneethling/snippets-service,mozmar/snippets-service,bensternthal/snippets-service,akatsoulas/snippets-service,schalkneethling/snippets-service,bensternthal/snippets-service,akatsoulas/snippets-service | from django.core.urlresolvers import resolve
from snippets.base.views import fetch_snippets
class FetchSnippetsMiddleware(object):
"""
If the incoming request is for the fetch_snippets view, execute the view
and return it before other middleware can run.
fetch_snippets is a very very basic view that doesn't need any of the
middleware that the rest of the site needs, such as the session or csrf
middlewares. To avoid unintended issues (such as headers we don't want
being added to the response) this middleware detects requests to that view
and executes the view early, bypassing the rest of the middleware.
"""
def process_request(self, request):
result = resolve(request.path)
if result.func == fetch_snippets:
return fetch_snippets(request, *result.args, **result.kwargs)
Disable New Relic's apdex metric on non-fetch_snippets views.
New Relic doesn't allow us to set different thresholds for different
pages across the site, so in order to get valuable metrics on the main
view for snippets, fetch_snippets, we need to disable apdex for the
admin interface and public snippets views, which don't need as close
monitoring. | from django.core.urlresolvers import resolve
from snippets.base.views import fetch_snippets
class FetchSnippetsMiddleware(object):
"""
If the incoming request is for the fetch_snippets view, execute the view
and return it before other middleware can run.
fetch_snippets is a very very basic view that doesn't need any of the
middleware that the rest of the site needs, such as the session or csrf
middlewares. To avoid unintended issues (such as headers we don't want
being added to the response) this middleware detects requests to that view
and executes the view early, bypassing the rest of the middleware.
Also disables New Relic's apdex for views that aren't the
fetch_snippets, as we only really care about the apdex for
fetch_snippets.
"""
def process_request(self, request):
result = resolve(request.path)
if result.func == fetch_snippets:
return fetch_snippets(request, *result.args, **result.kwargs)
else:
# Not fetch_snippets? Then no New Relic for you!
try:
import newrelic.agent
except ImportError:
pass
else:
newrelic.agent.suppress_apdex_metric()
| <commit_before>from django.core.urlresolvers import resolve
from snippets.base.views import fetch_snippets
class FetchSnippetsMiddleware(object):
"""
If the incoming request is for the fetch_snippets view, execute the view
and return it before other middleware can run.
fetch_snippets is a very very basic view that doesn't need any of the
middleware that the rest of the site needs, such as the session or csrf
middlewares. To avoid unintended issues (such as headers we don't want
being added to the response) this middleware detects requests to that view
and executes the view early, bypassing the rest of the middleware.
"""
def process_request(self, request):
result = resolve(request.path)
if result.func == fetch_snippets:
return fetch_snippets(request, *result.args, **result.kwargs)
<commit_msg>Disable New Relic's apdex metric on non-fetch_snippets views.
New Relic doesn't allow us to set different thresholds for different
pages across the site, so in order to get valuable metrics on the main
view for snippets, fetch_snippets, we need to disable apdex for the
admin interface and public snippets views, which don't need as close
monitoring.<commit_after> | from django.core.urlresolvers import resolve
from snippets.base.views import fetch_snippets
class FetchSnippetsMiddleware(object):
"""
If the incoming request is for the fetch_snippets view, execute the view
and return it before other middleware can run.
fetch_snippets is a very very basic view that doesn't need any of the
middleware that the rest of the site needs, such as the session or csrf
middlewares. To avoid unintended issues (such as headers we don't want
being added to the response) this middleware detects requests to that view
and executes the view early, bypassing the rest of the middleware.
Also disables New Relic's apdex for views that aren't the
fetch_snippets, as we only really care about the apdex for
fetch_snippets.
"""
def process_request(self, request):
result = resolve(request.path)
if result.func == fetch_snippets:
return fetch_snippets(request, *result.args, **result.kwargs)
else:
# Not fetch_snippets? Then no New Relic for you!
try:
import newrelic.agent
except ImportError:
pass
else:
newrelic.agent.suppress_apdex_metric()
| from django.core.urlresolvers import resolve
from snippets.base.views import fetch_snippets
class FetchSnippetsMiddleware(object):
"""
If the incoming request is for the fetch_snippets view, execute the view
and return it before other middleware can run.
fetch_snippets is a very very basic view that doesn't need any of the
middleware that the rest of the site needs, such as the session or csrf
middlewares. To avoid unintended issues (such as headers we don't want
being added to the response) this middleware detects requests to that view
and executes the view early, bypassing the rest of the middleware.
"""
def process_request(self, request):
result = resolve(request.path)
if result.func == fetch_snippets:
return fetch_snippets(request, *result.args, **result.kwargs)
Disable New Relic's apdex metric on non-fetch_snippets views.
New Relic doesn't allow us to set different thresholds for different
pages across the site, so in order to get valuable metrics on the main
view for snippets, fetch_snippets, we need to disable apdex for the
admin interface and public snippets views, which don't need as close
monitoring.from django.core.urlresolvers import resolve
from snippets.base.views import fetch_snippets
class FetchSnippetsMiddleware(object):
"""
If the incoming request is for the fetch_snippets view, execute the view
and return it before other middleware can run.
fetch_snippets is a very very basic view that doesn't need any of the
middleware that the rest of the site needs, such as the session or csrf
middlewares. To avoid unintended issues (such as headers we don't want
being added to the response) this middleware detects requests to that view
and executes the view early, bypassing the rest of the middleware.
Also disables New Relic's apdex for views that aren't the
fetch_snippets, as we only really care about the apdex for
fetch_snippets.
"""
def process_request(self, request):
result = resolve(request.path)
if result.func == fetch_snippets:
return fetch_snippets(request, *result.args, **result.kwargs)
else:
# Not fetch_snippets? Then no New Relic for you!
try:
import newrelic.agent
except ImportError:
pass
else:
newrelic.agent.suppress_apdex_metric()
| <commit_before>from django.core.urlresolvers import resolve
from snippets.base.views import fetch_snippets
class FetchSnippetsMiddleware(object):
"""
If the incoming request is for the fetch_snippets view, execute the view
and return it before other middleware can run.
fetch_snippets is a very very basic view that doesn't need any of the
middleware that the rest of the site needs, such as the session or csrf
middlewares. To avoid unintended issues (such as headers we don't want
being added to the response) this middleware detects requests to that view
and executes the view early, bypassing the rest of the middleware.
"""
def process_request(self, request):
result = resolve(request.path)
if result.func == fetch_snippets:
return fetch_snippets(request, *result.args, **result.kwargs)
<commit_msg>Disable New Relic's apdex metric on non-fetch_snippets views.
New Relic doesn't allow us to set different thresholds for different
pages across the site, so in order to get valuable metrics on the main
view for snippets, fetch_snippets, we need to disable apdex for the
admin interface and public snippets views, which don't need as close
monitoring.<commit_after>from django.core.urlresolvers import resolve
from snippets.base.views import fetch_snippets
class FetchSnippetsMiddleware(object):
"""
If the incoming request is for the fetch_snippets view, execute the view
and return it before other middleware can run.
fetch_snippets is a very very basic view that doesn't need any of the
middleware that the rest of the site needs, such as the session or csrf
middlewares. To avoid unintended issues (such as headers we don't want
being added to the response) this middleware detects requests to that view
and executes the view early, bypassing the rest of the middleware.
Also disables New Relic's apdex for views that aren't the
fetch_snippets, as we only really care about the apdex for
fetch_snippets.
"""
def process_request(self, request):
result = resolve(request.path)
if result.func == fetch_snippets:
return fetch_snippets(request, *result.args, **result.kwargs)
else:
# Not fetch_snippets? Then no New Relic for you!
try:
import newrelic.agent
except ImportError:
pass
else:
newrelic.agent.suppress_apdex_metric()
|
439f0326cc71cf19e41137745aedeec391727207 | src/sentry/web/frontend/organization_api_keys.py | src/sentry/web/frontend/organization_api_keys.py | from __future__ import absolute_import
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from sentry.models import ApiKey, OrganizationMemberType
from sentry.web.frontend.base import OrganizationView
class OrganizationApiKeysView(OrganizationView):
required_access = OrganizationMemberType.ADMIN
def handle(self, request, organization):
if request.POST.get('op') == 'newkey':
key = ApiKey.objects.create(organization=organization)
redirect_uri = reverse('sentry-organization-api-key-settings', args=[
organization.slug, key.id,
])
return HttpResponseRedirect(redirect_uri)
key_list = sorted(ApiKey.objects.filter(
organization=organization,
), key=lambda x: x.label)
context = {
'key_list': key_list,
}
return self.respond('sentry/organization-api-keys.html', context)
| from __future__ import absolute_import
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from operator import or_
from sentry.models import ApiKey, OrganizationMemberType
from sentry.web.frontend.base import OrganizationView
DEFAULT_SCOPES = [
'project:read',
'event:read',
'team:read',
'org:read',
]
class OrganizationApiKeysView(OrganizationView):
required_access = OrganizationMemberType.ADMIN
def handle(self, request, organization):
if request.POST.get('op') == 'newkey':
key = ApiKey.objects.create(
organization=organization,
scopes=reduce(or_, [getattr(ApiKey.scopes, s) for s in DEFAULT_SCOPES])
)
redirect_uri = reverse('sentry-organization-api-key-settings', args=[
organization.slug, key.id,
])
return HttpResponseRedirect(redirect_uri)
key_list = sorted(ApiKey.objects.filter(
organization=organization,
), key=lambda x: x.label)
context = {
'key_list': key_list,
}
return self.respond('sentry/organization-api-keys.html', context)
| Set default scopes to read | Set default scopes to read
| Python | bsd-3-clause | pauloschilling/sentry,llonchj/sentry,JamesMura/sentry,hongliang5623/sentry,gg7/sentry,gencer/sentry,BuildingLink/sentry,llonchj/sentry,ifduyue/sentry,kevinastone/sentry,drcapulet/sentry,BayanGroup/sentry,Kryz/sentry,1tush/sentry,kevinlondon/sentry,beeftornado/sentry,beeftornado/sentry,felixbuenemann/sentry,JamesMura/sentry,nicholasserra/sentry,fotinakis/sentry,zenefits/sentry,beeftornado/sentry,gencer/sentry,BuildingLink/sentry,JamesMura/sentry,jean/sentry,kevinastone/sentry,BuildingLink/sentry,ewdurbin/sentry,Natim/sentry,kevinastone/sentry,ifduyue/sentry,nicholasserra/sentry,JackDanger/sentry,drcapulet/sentry,Kryz/sentry,vperron/sentry,alexm92/sentry,pauloschilling/sentry,songyi199111/sentry,zenefits/sentry,boneyao/sentry,mvaled/sentry,looker/sentry,fotinakis/sentry,1tush/sentry,mitsuhiko/sentry,vperron/sentry,boneyao/sentry,gg7/sentry,kevinlondon/sentry,fuziontech/sentry,argonemyth/sentry,jean/sentry,looker/sentry,BayanGroup/sentry,gg7/sentry,mvaled/sentry,ngonzalvez/sentry,songyi199111/sentry,korealerts1/sentry,TedaLIEz/sentry,Kryz/sentry,felixbuenemann/sentry,fotinakis/sentry,zenefits/sentry,mvaled/sentry,korealerts1/sentry,1tush/sentry,pauloschilling/sentry,imankulov/sentry,daevaorn/sentry,fotinakis/sentry,BuildingLink/sentry,JTCunning/sentry,alexm92/sentry,ifduyue/sentry,daevaorn/sentry,Natim/sentry,JamesMura/sentry,felixbuenemann/sentry,alexm92/sentry,ewdurbin/sentry,argonemyth/sentry,TedaLIEz/sentry,hongliang5623/sentry,ngonzalvez/sentry,wong2/sentry,wujuguang/sentry,JackDanger/sentry,fuziontech/sentry,looker/sentry,looker/sentry,jean/sentry,TedaLIEz/sentry,JackDanger/sentry,daevaorn/sentry,drcapulet/sentry,mitsuhiko/sentry,gencer/sentry,fuziontech/sentry,kevinlondon/sentry,nicholasserra/sentry,looker/sentry,wujuguang/sentry,BayanGroup/sentry,hongliang5623/sentry,wujuguang/sentry,mvaled/sentry,songyi199111/sentry,jean/sentry,ewdurbin/sentry,zenefits/sentry,mvaled/sentry,JTCunning/sentry,ngonzalvez/sentry,argonemyth/sentry,JTCunning/sentry,JamesMura/sentry,wong2/sentry,imankulov/sentry,boneyao/sentry,korealerts1/sentry,gencer/sentry,imankulov/sentry,llonchj/sentry,wong2/sentry,daevaorn/sentry,ifduyue/sentry,BuildingLink/sentry,gencer/sentry,ifduyue/sentry,jean/sentry,mvaled/sentry,vperron/sentry,Natim/sentry,zenefits/sentry | from __future__ import absolute_import
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from sentry.models import ApiKey, OrganizationMemberType
from sentry.web.frontend.base import OrganizationView
class OrganizationApiKeysView(OrganizationView):
required_access = OrganizationMemberType.ADMIN
def handle(self, request, organization):
if request.POST.get('op') == 'newkey':
key = ApiKey.objects.create(organization=organization)
redirect_uri = reverse('sentry-organization-api-key-settings', args=[
organization.slug, key.id,
])
return HttpResponseRedirect(redirect_uri)
key_list = sorted(ApiKey.objects.filter(
organization=organization,
), key=lambda x: x.label)
context = {
'key_list': key_list,
}
return self.respond('sentry/organization-api-keys.html', context)
Set default scopes to read | from __future__ import absolute_import
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from operator import or_
from sentry.models import ApiKey, OrganizationMemberType
from sentry.web.frontend.base import OrganizationView
DEFAULT_SCOPES = [
'project:read',
'event:read',
'team:read',
'org:read',
]
class OrganizationApiKeysView(OrganizationView):
required_access = OrganizationMemberType.ADMIN
def handle(self, request, organization):
if request.POST.get('op') == 'newkey':
key = ApiKey.objects.create(
organization=organization,
scopes=reduce(or_, [getattr(ApiKey.scopes, s) for s in DEFAULT_SCOPES])
)
redirect_uri = reverse('sentry-organization-api-key-settings', args=[
organization.slug, key.id,
])
return HttpResponseRedirect(redirect_uri)
key_list = sorted(ApiKey.objects.filter(
organization=organization,
), key=lambda x: x.label)
context = {
'key_list': key_list,
}
return self.respond('sentry/organization-api-keys.html', context)
| <commit_before>from __future__ import absolute_import
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from sentry.models import ApiKey, OrganizationMemberType
from sentry.web.frontend.base import OrganizationView
class OrganizationApiKeysView(OrganizationView):
required_access = OrganizationMemberType.ADMIN
def handle(self, request, organization):
if request.POST.get('op') == 'newkey':
key = ApiKey.objects.create(organization=organization)
redirect_uri = reverse('sentry-organization-api-key-settings', args=[
organization.slug, key.id,
])
return HttpResponseRedirect(redirect_uri)
key_list = sorted(ApiKey.objects.filter(
organization=organization,
), key=lambda x: x.label)
context = {
'key_list': key_list,
}
return self.respond('sentry/organization-api-keys.html', context)
<commit_msg>Set default scopes to read<commit_after> | from __future__ import absolute_import
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from operator import or_
from sentry.models import ApiKey, OrganizationMemberType
from sentry.web.frontend.base import OrganizationView
DEFAULT_SCOPES = [
'project:read',
'event:read',
'team:read',
'org:read',
]
class OrganizationApiKeysView(OrganizationView):
required_access = OrganizationMemberType.ADMIN
def handle(self, request, organization):
if request.POST.get('op') == 'newkey':
key = ApiKey.objects.create(
organization=organization,
scopes=reduce(or_, [getattr(ApiKey.scopes, s) for s in DEFAULT_SCOPES])
)
redirect_uri = reverse('sentry-organization-api-key-settings', args=[
organization.slug, key.id,
])
return HttpResponseRedirect(redirect_uri)
key_list = sorted(ApiKey.objects.filter(
organization=organization,
), key=lambda x: x.label)
context = {
'key_list': key_list,
}
return self.respond('sentry/organization-api-keys.html', context)
| from __future__ import absolute_import
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from sentry.models import ApiKey, OrganizationMemberType
from sentry.web.frontend.base import OrganizationView
class OrganizationApiKeysView(OrganizationView):
required_access = OrganizationMemberType.ADMIN
def handle(self, request, organization):
if request.POST.get('op') == 'newkey':
key = ApiKey.objects.create(organization=organization)
redirect_uri = reverse('sentry-organization-api-key-settings', args=[
organization.slug, key.id,
])
return HttpResponseRedirect(redirect_uri)
key_list = sorted(ApiKey.objects.filter(
organization=organization,
), key=lambda x: x.label)
context = {
'key_list': key_list,
}
return self.respond('sentry/organization-api-keys.html', context)
Set default scopes to readfrom __future__ import absolute_import
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from operator import or_
from sentry.models import ApiKey, OrganizationMemberType
from sentry.web.frontend.base import OrganizationView
DEFAULT_SCOPES = [
'project:read',
'event:read',
'team:read',
'org:read',
]
class OrganizationApiKeysView(OrganizationView):
required_access = OrganizationMemberType.ADMIN
def handle(self, request, organization):
if request.POST.get('op') == 'newkey':
key = ApiKey.objects.create(
organization=organization,
scopes=reduce(or_, [getattr(ApiKey.scopes, s) for s in DEFAULT_SCOPES])
)
redirect_uri = reverse('sentry-organization-api-key-settings', args=[
organization.slug, key.id,
])
return HttpResponseRedirect(redirect_uri)
key_list = sorted(ApiKey.objects.filter(
organization=organization,
), key=lambda x: x.label)
context = {
'key_list': key_list,
}
return self.respond('sentry/organization-api-keys.html', context)
| <commit_before>from __future__ import absolute_import
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from sentry.models import ApiKey, OrganizationMemberType
from sentry.web.frontend.base import OrganizationView
class OrganizationApiKeysView(OrganizationView):
required_access = OrganizationMemberType.ADMIN
def handle(self, request, organization):
if request.POST.get('op') == 'newkey':
key = ApiKey.objects.create(organization=organization)
redirect_uri = reverse('sentry-organization-api-key-settings', args=[
organization.slug, key.id,
])
return HttpResponseRedirect(redirect_uri)
key_list = sorted(ApiKey.objects.filter(
organization=organization,
), key=lambda x: x.label)
context = {
'key_list': key_list,
}
return self.respond('sentry/organization-api-keys.html', context)
<commit_msg>Set default scopes to read<commit_after>from __future__ import absolute_import
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from operator import or_
from sentry.models import ApiKey, OrganizationMemberType
from sentry.web.frontend.base import OrganizationView
DEFAULT_SCOPES = [
'project:read',
'event:read',
'team:read',
'org:read',
]
class OrganizationApiKeysView(OrganizationView):
required_access = OrganizationMemberType.ADMIN
def handle(self, request, organization):
if request.POST.get('op') == 'newkey':
key = ApiKey.objects.create(
organization=organization,
scopes=reduce(or_, [getattr(ApiKey.scopes, s) for s in DEFAULT_SCOPES])
)
redirect_uri = reverse('sentry-organization-api-key-settings', args=[
organization.slug, key.id,
])
return HttpResponseRedirect(redirect_uri)
key_list = sorted(ApiKey.objects.filter(
organization=organization,
), key=lambda x: x.label)
context = {
'key_list': key_list,
}
return self.respond('sentry/organization-api-keys.html', context)
|
f8bb6849aaf82ed74a37d12eaa14d111a85e5e50 | lms/djangoapps/philu_overrides/migrations/0001_initial.py | lms/djangoapps/philu_overrides/migrations/0001_initial.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, connection
#TODO: find a more better way of handling history of builtin packages
class Migration(migrations.Migration):
dependencies = [
('enterprise', '0009_auto_20161130_1651'),
]
def add_history_columns(apps, schema_editor):
cursor = connection.cursor()
query = 'ALTER TABLE enterprise_historicalenterprisecustomer ADD start_date DATETIME NULL, ADD end_date DATETIME NULL;'
cursor.execute(query)
operations = [
migrations.RunPython(add_history_columns),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, connection
#TODO: find a more better way of handling history of builtin packages
class Migration(migrations.Migration):
dependencies = [
('enterprise', '0009_auto_20161130_1651'),
]
def add_history_columns(apps, schema_editor):
cursor = connection.cursor()
tables = [
'enterprise_historicalenterprisecustomer',
'enterprise_historicalenterprisecustomerentitlement',
'enterprise_historicalenterprisecourseenrollment',
'enterprise_historicalenterprisecustomercatalog',
'enterprise_historicalenrollmentnotificationemailtemplate',
'consent_historicaldatasharingconsent',
'degreed_historicaldegreedenterprisecustomerconfiguration'
]
for t in tables:
query = 'ALTER TABLE %s ADD start_date DATETIME NULL, ADD end_date DATETIME NULL;' % t
cursor.execute(query)
operations = [
migrations.RunPython(add_history_columns),
]
| Add history columns `start_date` and `end_date` to edx builtin packages | Add history columns `start_date` and `end_date` to edx builtin packages
| Python | agpl-3.0 | philanthropy-u/edx-platform,philanthropy-u/edx-platform,philanthropy-u/edx-platform,philanthropy-u/edx-platform | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, connection
#TODO: find a more better way of handling history of builtin packages
class Migration(migrations.Migration):
dependencies = [
('enterprise', '0009_auto_20161130_1651'),
]
def add_history_columns(apps, schema_editor):
cursor = connection.cursor()
query = 'ALTER TABLE enterprise_historicalenterprisecustomer ADD start_date DATETIME NULL, ADD end_date DATETIME NULL;'
cursor.execute(query)
operations = [
migrations.RunPython(add_history_columns),
]
Add history columns `start_date` and `end_date` to edx builtin packages | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, connection
#TODO: find a more better way of handling history of builtin packages
class Migration(migrations.Migration):
dependencies = [
('enterprise', '0009_auto_20161130_1651'),
]
def add_history_columns(apps, schema_editor):
cursor = connection.cursor()
tables = [
'enterprise_historicalenterprisecustomer',
'enterprise_historicalenterprisecustomerentitlement',
'enterprise_historicalenterprisecourseenrollment',
'enterprise_historicalenterprisecustomercatalog',
'enterprise_historicalenrollmentnotificationemailtemplate',
'consent_historicaldatasharingconsent',
'degreed_historicaldegreedenterprisecustomerconfiguration'
]
for t in tables:
query = 'ALTER TABLE %s ADD start_date DATETIME NULL, ADD end_date DATETIME NULL;' % t
cursor.execute(query)
operations = [
migrations.RunPython(add_history_columns),
]
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, connection
#TODO: find a more better way of handling history of builtin packages
class Migration(migrations.Migration):
dependencies = [
('enterprise', '0009_auto_20161130_1651'),
]
def add_history_columns(apps, schema_editor):
cursor = connection.cursor()
query = 'ALTER TABLE enterprise_historicalenterprisecustomer ADD start_date DATETIME NULL, ADD end_date DATETIME NULL;'
cursor.execute(query)
operations = [
migrations.RunPython(add_history_columns),
]
<commit_msg>Add history columns `start_date` and `end_date` to edx builtin packages<commit_after> | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, connection
#TODO: find a more better way of handling history of builtin packages
class Migration(migrations.Migration):
dependencies = [
('enterprise', '0009_auto_20161130_1651'),
]
def add_history_columns(apps, schema_editor):
cursor = connection.cursor()
tables = [
'enterprise_historicalenterprisecustomer',
'enterprise_historicalenterprisecustomerentitlement',
'enterprise_historicalenterprisecourseenrollment',
'enterprise_historicalenterprisecustomercatalog',
'enterprise_historicalenrollmentnotificationemailtemplate',
'consent_historicaldatasharingconsent',
'degreed_historicaldegreedenterprisecustomerconfiguration'
]
for t in tables:
query = 'ALTER TABLE %s ADD start_date DATETIME NULL, ADD end_date DATETIME NULL;' % t
cursor.execute(query)
operations = [
migrations.RunPython(add_history_columns),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, connection
#TODO: find a more better way of handling history of builtin packages
class Migration(migrations.Migration):
dependencies = [
('enterprise', '0009_auto_20161130_1651'),
]
def add_history_columns(apps, schema_editor):
cursor = connection.cursor()
query = 'ALTER TABLE enterprise_historicalenterprisecustomer ADD start_date DATETIME NULL, ADD end_date DATETIME NULL;'
cursor.execute(query)
operations = [
migrations.RunPython(add_history_columns),
]
Add history columns `start_date` and `end_date` to edx builtin packages# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, connection
#TODO: find a more better way of handling history of builtin packages
class Migration(migrations.Migration):
dependencies = [
('enterprise', '0009_auto_20161130_1651'),
]
def add_history_columns(apps, schema_editor):
cursor = connection.cursor()
tables = [
'enterprise_historicalenterprisecustomer',
'enterprise_historicalenterprisecustomerentitlement',
'enterprise_historicalenterprisecourseenrollment',
'enterprise_historicalenterprisecustomercatalog',
'enterprise_historicalenrollmentnotificationemailtemplate',
'consent_historicaldatasharingconsent',
'degreed_historicaldegreedenterprisecustomerconfiguration'
]
for t in tables:
query = 'ALTER TABLE %s ADD start_date DATETIME NULL, ADD end_date DATETIME NULL;' % t
cursor.execute(query)
operations = [
migrations.RunPython(add_history_columns),
]
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, connection
#TODO: find a more better way of handling history of builtin packages
class Migration(migrations.Migration):
dependencies = [
('enterprise', '0009_auto_20161130_1651'),
]
def add_history_columns(apps, schema_editor):
cursor = connection.cursor()
query = 'ALTER TABLE enterprise_historicalenterprisecustomer ADD start_date DATETIME NULL, ADD end_date DATETIME NULL;'
cursor.execute(query)
operations = [
migrations.RunPython(add_history_columns),
]
<commit_msg>Add history columns `start_date` and `end_date` to edx builtin packages<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, connection
#TODO: find a more better way of handling history of builtin packages
class Migration(migrations.Migration):
dependencies = [
('enterprise', '0009_auto_20161130_1651'),
]
def add_history_columns(apps, schema_editor):
cursor = connection.cursor()
tables = [
'enterprise_historicalenterprisecustomer',
'enterprise_historicalenterprisecustomerentitlement',
'enterprise_historicalenterprisecourseenrollment',
'enterprise_historicalenterprisecustomercatalog',
'enterprise_historicalenrollmentnotificationemailtemplate',
'consent_historicaldatasharingconsent',
'degreed_historicaldegreedenterprisecustomerconfiguration'
]
for t in tables:
query = 'ALTER TABLE %s ADD start_date DATETIME NULL, ADD end_date DATETIME NULL;' % t
cursor.execute(query)
operations = [
migrations.RunPython(add_history_columns),
]
|
b9a25c0bf991c0579a9c9f1ed371251337173214 | namestand/patterns.py | namestand/patterns.py | import re
non_alphanumeric = re.compile(r"[^0-9a-z]+", re.I)
non_namey = re.compile(r"[^\w\-' ]+", re.UNICODE)
comma_suffix = re.compile(r", *(JR|SR|I+|IV|VI*)\b")
last_first = re.compile(r"([^,]*), +([^,]*)")
starts_with_num = re.compile(r"^(\d)")
name_cruft = re.compile(r"\b(MR|MS|MRS|ESQ|SIR|HON)\b")
company_cruft = re.compile(r"\b(LLC|LTD|INC)\b")
whitespace = re.compile(r"\s+")
| import re
non_alphanumeric = re.compile(r"[^0-9a-z]+", re.I)
non_namey = re.compile(r"[^\w\-' ]+", re.UNICODE)
comma_suffix = re.compile(r", *(JR|SR|I+|IV|VI*)\b")
last_first = re.compile(r"([^,]*), +([^,]*)")
starts_with_num = re.compile(r"^(\d)")
name_cruft = re.compile(r"\b(MR|MS|MRS|ESQ|SIR|HON)\b")
company_cruft = re.compile(r"\b(LLC|LTD|INC|LLP)\b")
whitespace = re.compile(r"\s+")
| Add "LLP" to company cruft | Add "LLP" to company cruft
| Python | mit | BuzzFeedNews/namestand | import re
non_alphanumeric = re.compile(r"[^0-9a-z]+", re.I)
non_namey = re.compile(r"[^\w\-' ]+", re.UNICODE)
comma_suffix = re.compile(r", *(JR|SR|I+|IV|VI*)\b")
last_first = re.compile(r"([^,]*), +([^,]*)")
starts_with_num = re.compile(r"^(\d)")
name_cruft = re.compile(r"\b(MR|MS|MRS|ESQ|SIR|HON)\b")
company_cruft = re.compile(r"\b(LLC|LTD|INC)\b")
whitespace = re.compile(r"\s+")
Add "LLP" to company cruft | import re
non_alphanumeric = re.compile(r"[^0-9a-z]+", re.I)
non_namey = re.compile(r"[^\w\-' ]+", re.UNICODE)
comma_suffix = re.compile(r", *(JR|SR|I+|IV|VI*)\b")
last_first = re.compile(r"([^,]*), +([^,]*)")
starts_with_num = re.compile(r"^(\d)")
name_cruft = re.compile(r"\b(MR|MS|MRS|ESQ|SIR|HON)\b")
company_cruft = re.compile(r"\b(LLC|LTD|INC|LLP)\b")
whitespace = re.compile(r"\s+")
| <commit_before>import re
non_alphanumeric = re.compile(r"[^0-9a-z]+", re.I)
non_namey = re.compile(r"[^\w\-' ]+", re.UNICODE)
comma_suffix = re.compile(r", *(JR|SR|I+|IV|VI*)\b")
last_first = re.compile(r"([^,]*), +([^,]*)")
starts_with_num = re.compile(r"^(\d)")
name_cruft = re.compile(r"\b(MR|MS|MRS|ESQ|SIR|HON)\b")
company_cruft = re.compile(r"\b(LLC|LTD|INC)\b")
whitespace = re.compile(r"\s+")
<commit_msg>Add "LLP" to company cruft<commit_after> | import re
non_alphanumeric = re.compile(r"[^0-9a-z]+", re.I)
non_namey = re.compile(r"[^\w\-' ]+", re.UNICODE)
comma_suffix = re.compile(r", *(JR|SR|I+|IV|VI*)\b")
last_first = re.compile(r"([^,]*), +([^,]*)")
starts_with_num = re.compile(r"^(\d)")
name_cruft = re.compile(r"\b(MR|MS|MRS|ESQ|SIR|HON)\b")
company_cruft = re.compile(r"\b(LLC|LTD|INC|LLP)\b")
whitespace = re.compile(r"\s+")
| import re
non_alphanumeric = re.compile(r"[^0-9a-z]+", re.I)
non_namey = re.compile(r"[^\w\-' ]+", re.UNICODE)
comma_suffix = re.compile(r", *(JR|SR|I+|IV|VI*)\b")
last_first = re.compile(r"([^,]*), +([^,]*)")
starts_with_num = re.compile(r"^(\d)")
name_cruft = re.compile(r"\b(MR|MS|MRS|ESQ|SIR|HON)\b")
company_cruft = re.compile(r"\b(LLC|LTD|INC)\b")
whitespace = re.compile(r"\s+")
Add "LLP" to company cruftimport re
non_alphanumeric = re.compile(r"[^0-9a-z]+", re.I)
non_namey = re.compile(r"[^\w\-' ]+", re.UNICODE)
comma_suffix = re.compile(r", *(JR|SR|I+|IV|VI*)\b")
last_first = re.compile(r"([^,]*), +([^,]*)")
starts_with_num = re.compile(r"^(\d)")
name_cruft = re.compile(r"\b(MR|MS|MRS|ESQ|SIR|HON)\b")
company_cruft = re.compile(r"\b(LLC|LTD|INC|LLP)\b")
whitespace = re.compile(r"\s+")
| <commit_before>import re
non_alphanumeric = re.compile(r"[^0-9a-z]+", re.I)
non_namey = re.compile(r"[^\w\-' ]+", re.UNICODE)
comma_suffix = re.compile(r", *(JR|SR|I+|IV|VI*)\b")
last_first = re.compile(r"([^,]*), +([^,]*)")
starts_with_num = re.compile(r"^(\d)")
name_cruft = re.compile(r"\b(MR|MS|MRS|ESQ|SIR|HON)\b")
company_cruft = re.compile(r"\b(LLC|LTD|INC)\b")
whitespace = re.compile(r"\s+")
<commit_msg>Add "LLP" to company cruft<commit_after>import re
non_alphanumeric = re.compile(r"[^0-9a-z]+", re.I)
non_namey = re.compile(r"[^\w\-' ]+", re.UNICODE)
comma_suffix = re.compile(r", *(JR|SR|I+|IV|VI*)\b")
last_first = re.compile(r"([^,]*), +([^,]*)")
starts_with_num = re.compile(r"^(\d)")
name_cruft = re.compile(r"\b(MR|MS|MRS|ESQ|SIR|HON)\b")
company_cruft = re.compile(r"\b(LLC|LTD|INC|LLP)\b")
whitespace = re.compile(r"\s+")
|
2c26434b7dcd71530d453989372b8d67d90ad3c7 | rwt/scripts.py | rwt/scripts.py | import sys
import tokenize
def run(cmdline):
"""
Execute the script as if it had been invoked naturally.
"""
namespace = dict()
filename = cmdline[0]
namespace['__file__'] = filename
namespace['__name__'] = '__main__'
sys.argv[:] = cmdline
open_ = getattr(tokenize, 'open', open)
script = open_(filename).read()
norm_script = script.replace('\\r\\n', '\\n')
code = compile(norm_script, filename, 'exec')
exec(code, namespace)
| import sys
import ast
import tokenize
def read_deps(script, var_name='__requires__'):
"""
Given a script path, read the dependencies from the
indicated variable (default __requires__). Does not
execute the script, so expects the var_name to be
assigned a static list of strings.
"""
with open(script) as stream:
return _read_deps(stream.read())
def _read_deps(script, var_name='__requires__'):
"""
>>> _read_deps("__requires__=['foo']")
['foo']
"""
mod = ast.parse(script)
node, = (
node
for node in mod.body
if isinstance(node, ast.Assign)
and len(node.targets) == 1
and node.targets[0].id == var_name
)
return ast.literal_eval(node.value)
def run(cmdline):
"""
Execute the script as if it had been invoked naturally.
"""
namespace = dict()
filename = cmdline[0]
namespace['__file__'] = filename
namespace['__name__'] = '__main__'
sys.argv[:] = cmdline
open_ = getattr(tokenize, 'open', open)
script = open_(filename).read()
norm_script = script.replace('\\r\\n', '\\n')
code = compile(norm_script, filename, 'exec')
exec(code, namespace)
| Add routine for loading deps from a script. | Add routine for loading deps from a script.
| Python | mit | jaraco/rwt | import sys
import tokenize
def run(cmdline):
"""
Execute the script as if it had been invoked naturally.
"""
namespace = dict()
filename = cmdline[0]
namespace['__file__'] = filename
namespace['__name__'] = '__main__'
sys.argv[:] = cmdline
open_ = getattr(tokenize, 'open', open)
script = open_(filename).read()
norm_script = script.replace('\\r\\n', '\\n')
code = compile(norm_script, filename, 'exec')
exec(code, namespace)
Add routine for loading deps from a script. | import sys
import ast
import tokenize
def read_deps(script, var_name='__requires__'):
"""
Given a script path, read the dependencies from the
indicated variable (default __requires__). Does not
execute the script, so expects the var_name to be
assigned a static list of strings.
"""
with open(script) as stream:
return _read_deps(stream.read())
def _read_deps(script, var_name='__requires__'):
"""
>>> _read_deps("__requires__=['foo']")
['foo']
"""
mod = ast.parse(script)
node, = (
node
for node in mod.body
if isinstance(node, ast.Assign)
and len(node.targets) == 1
and node.targets[0].id == var_name
)
return ast.literal_eval(node.value)
def run(cmdline):
"""
Execute the script as if it had been invoked naturally.
"""
namespace = dict()
filename = cmdline[0]
namespace['__file__'] = filename
namespace['__name__'] = '__main__'
sys.argv[:] = cmdline
open_ = getattr(tokenize, 'open', open)
script = open_(filename).read()
norm_script = script.replace('\\r\\n', '\\n')
code = compile(norm_script, filename, 'exec')
exec(code, namespace)
| <commit_before>import sys
import tokenize
def run(cmdline):
"""
Execute the script as if it had been invoked naturally.
"""
namespace = dict()
filename = cmdline[0]
namespace['__file__'] = filename
namespace['__name__'] = '__main__'
sys.argv[:] = cmdline
open_ = getattr(tokenize, 'open', open)
script = open_(filename).read()
norm_script = script.replace('\\r\\n', '\\n')
code = compile(norm_script, filename, 'exec')
exec(code, namespace)
<commit_msg>Add routine for loading deps from a script.<commit_after> | import sys
import ast
import tokenize
def read_deps(script, var_name='__requires__'):
"""
Given a script path, read the dependencies from the
indicated variable (default __requires__). Does not
execute the script, so expects the var_name to be
assigned a static list of strings.
"""
with open(script) as stream:
return _read_deps(stream.read())
def _read_deps(script, var_name='__requires__'):
"""
>>> _read_deps("__requires__=['foo']")
['foo']
"""
mod = ast.parse(script)
node, = (
node
for node in mod.body
if isinstance(node, ast.Assign)
and len(node.targets) == 1
and node.targets[0].id == var_name
)
return ast.literal_eval(node.value)
def run(cmdline):
"""
Execute the script as if it had been invoked naturally.
"""
namespace = dict()
filename = cmdline[0]
namespace['__file__'] = filename
namespace['__name__'] = '__main__'
sys.argv[:] = cmdline
open_ = getattr(tokenize, 'open', open)
script = open_(filename).read()
norm_script = script.replace('\\r\\n', '\\n')
code = compile(norm_script, filename, 'exec')
exec(code, namespace)
| import sys
import tokenize
def run(cmdline):
"""
Execute the script as if it had been invoked naturally.
"""
namespace = dict()
filename = cmdline[0]
namespace['__file__'] = filename
namespace['__name__'] = '__main__'
sys.argv[:] = cmdline
open_ = getattr(tokenize, 'open', open)
script = open_(filename).read()
norm_script = script.replace('\\r\\n', '\\n')
code = compile(norm_script, filename, 'exec')
exec(code, namespace)
Add routine for loading deps from a script.import sys
import ast
import tokenize
def read_deps(script, var_name='__requires__'):
"""
Given a script path, read the dependencies from the
indicated variable (default __requires__). Does not
execute the script, so expects the var_name to be
assigned a static list of strings.
"""
with open(script) as stream:
return _read_deps(stream.read())
def _read_deps(script, var_name='__requires__'):
"""
>>> _read_deps("__requires__=['foo']")
['foo']
"""
mod = ast.parse(script)
node, = (
node
for node in mod.body
if isinstance(node, ast.Assign)
and len(node.targets) == 1
and node.targets[0].id == var_name
)
return ast.literal_eval(node.value)
def run(cmdline):
"""
Execute the script as if it had been invoked naturally.
"""
namespace = dict()
filename = cmdline[0]
namespace['__file__'] = filename
namespace['__name__'] = '__main__'
sys.argv[:] = cmdline
open_ = getattr(tokenize, 'open', open)
script = open_(filename).read()
norm_script = script.replace('\\r\\n', '\\n')
code = compile(norm_script, filename, 'exec')
exec(code, namespace)
| <commit_before>import sys
import tokenize
def run(cmdline):
"""
Execute the script as if it had been invoked naturally.
"""
namespace = dict()
filename = cmdline[0]
namespace['__file__'] = filename
namespace['__name__'] = '__main__'
sys.argv[:] = cmdline
open_ = getattr(tokenize, 'open', open)
script = open_(filename).read()
norm_script = script.replace('\\r\\n', '\\n')
code = compile(norm_script, filename, 'exec')
exec(code, namespace)
<commit_msg>Add routine for loading deps from a script.<commit_after>import sys
import ast
import tokenize
def read_deps(script, var_name='__requires__'):
"""
Given a script path, read the dependencies from the
indicated variable (default __requires__). Does not
execute the script, so expects the var_name to be
assigned a static list of strings.
"""
with open(script) as stream:
return _read_deps(stream.read())
def _read_deps(script, var_name='__requires__'):
"""
>>> _read_deps("__requires__=['foo']")
['foo']
"""
mod = ast.parse(script)
node, = (
node
for node in mod.body
if isinstance(node, ast.Assign)
and len(node.targets) == 1
and node.targets[0].id == var_name
)
return ast.literal_eval(node.value)
def run(cmdline):
"""
Execute the script as if it had been invoked naturally.
"""
namespace = dict()
filename = cmdline[0]
namespace['__file__'] = filename
namespace['__name__'] = '__main__'
sys.argv[:] = cmdline
open_ = getattr(tokenize, 'open', open)
script = open_(filename).read()
norm_script = script.replace('\\r\\n', '\\n')
code = compile(norm_script, filename, 'exec')
exec(code, namespace)
|
dec97fd68509cabfd53dcf588952b3b25d3e0e17 | normandy/base/urls.py | normandy/base/urls.py | from django.conf.urls import include, url
from normandy.base import views
from normandy.base.api import views as api_views
from normandy.base.api.routers import MixedViewRouter
# API Router
router = MixedViewRouter()
router.register("user", api_views.UserViewSet)
router.register("group", api_views.GroupViewSet)
urlpatterns = [
url(r"^$", views.index, name="index"),
url(r"^favicon.ico", views.favicon),
url(r"^api/v2/service_info/", api_views.ServiceInfoView.as_view(), name="service-info"),
url(r"^api/v1/user/me/", api_views.CurrentUserView.as_view(), name="current-user"),
url(r"^api/v3/", include(router.urls)),
]
| from django.conf.urls import include, url
from normandy.base import views
from normandy.base.api import views as api_views
from normandy.base.api.routers import MixedViewRouter
# API Router
router = MixedViewRouter()
router.register("user", api_views.UserViewSet)
router.register("group", api_views.GroupViewSet)
urlpatterns = [
url(r"^$", views.index, name="index"),
url(r"^favicon.ico", views.favicon),
url(r"^api/v2/service_info/", api_views.ServiceInfoView.as_view(), name="service-info-v2"),
url(r"^api/v3/service_info/", api_views.ServiceInfoView.as_view(), name="service-info"),
url(r"^api/v1/user/me/", api_views.CurrentUserView.as_view(), name="current-user"),
url(r"^api/v3/", include(router.urls)),
]
| Make service info available on v3 API | Make service info available on v3 API
| Python | mpl-2.0 | mozilla/normandy,mozilla/normandy,mozilla/normandy,mozilla/normandy | from django.conf.urls import include, url
from normandy.base import views
from normandy.base.api import views as api_views
from normandy.base.api.routers import MixedViewRouter
# API Router
router = MixedViewRouter()
router.register("user", api_views.UserViewSet)
router.register("group", api_views.GroupViewSet)
urlpatterns = [
url(r"^$", views.index, name="index"),
url(r"^favicon.ico", views.favicon),
url(r"^api/v2/service_info/", api_views.ServiceInfoView.as_view(), name="service-info"),
url(r"^api/v1/user/me/", api_views.CurrentUserView.as_view(), name="current-user"),
url(r"^api/v3/", include(router.urls)),
]
Make service info available on v3 API | from django.conf.urls import include, url
from normandy.base import views
from normandy.base.api import views as api_views
from normandy.base.api.routers import MixedViewRouter
# API Router
router = MixedViewRouter()
router.register("user", api_views.UserViewSet)
router.register("group", api_views.GroupViewSet)
urlpatterns = [
url(r"^$", views.index, name="index"),
url(r"^favicon.ico", views.favicon),
url(r"^api/v2/service_info/", api_views.ServiceInfoView.as_view(), name="service-info-v2"),
url(r"^api/v3/service_info/", api_views.ServiceInfoView.as_view(), name="service-info"),
url(r"^api/v1/user/me/", api_views.CurrentUserView.as_view(), name="current-user"),
url(r"^api/v3/", include(router.urls)),
]
| <commit_before>from django.conf.urls import include, url
from normandy.base import views
from normandy.base.api import views as api_views
from normandy.base.api.routers import MixedViewRouter
# API Router
router = MixedViewRouter()
router.register("user", api_views.UserViewSet)
router.register("group", api_views.GroupViewSet)
urlpatterns = [
url(r"^$", views.index, name="index"),
url(r"^favicon.ico", views.favicon),
url(r"^api/v2/service_info/", api_views.ServiceInfoView.as_view(), name="service-info"),
url(r"^api/v1/user/me/", api_views.CurrentUserView.as_view(), name="current-user"),
url(r"^api/v3/", include(router.urls)),
]
<commit_msg>Make service info available on v3 API<commit_after> | from django.conf.urls import include, url
from normandy.base import views
from normandy.base.api import views as api_views
from normandy.base.api.routers import MixedViewRouter
# API Router
router = MixedViewRouter()
router.register("user", api_views.UserViewSet)
router.register("group", api_views.GroupViewSet)
urlpatterns = [
url(r"^$", views.index, name="index"),
url(r"^favicon.ico", views.favicon),
url(r"^api/v2/service_info/", api_views.ServiceInfoView.as_view(), name="service-info-v2"),
url(r"^api/v3/service_info/", api_views.ServiceInfoView.as_view(), name="service-info"),
url(r"^api/v1/user/me/", api_views.CurrentUserView.as_view(), name="current-user"),
url(r"^api/v3/", include(router.urls)),
]
| from django.conf.urls import include, url
from normandy.base import views
from normandy.base.api import views as api_views
from normandy.base.api.routers import MixedViewRouter
# API Router
router = MixedViewRouter()
router.register("user", api_views.UserViewSet)
router.register("group", api_views.GroupViewSet)
urlpatterns = [
url(r"^$", views.index, name="index"),
url(r"^favicon.ico", views.favicon),
url(r"^api/v2/service_info/", api_views.ServiceInfoView.as_view(), name="service-info"),
url(r"^api/v1/user/me/", api_views.CurrentUserView.as_view(), name="current-user"),
url(r"^api/v3/", include(router.urls)),
]
Make service info available on v3 APIfrom django.conf.urls import include, url
from normandy.base import views
from normandy.base.api import views as api_views
from normandy.base.api.routers import MixedViewRouter
# API Router
router = MixedViewRouter()
router.register("user", api_views.UserViewSet)
router.register("group", api_views.GroupViewSet)
urlpatterns = [
url(r"^$", views.index, name="index"),
url(r"^favicon.ico", views.favicon),
url(r"^api/v2/service_info/", api_views.ServiceInfoView.as_view(), name="service-info-v2"),
url(r"^api/v3/service_info/", api_views.ServiceInfoView.as_view(), name="service-info"),
url(r"^api/v1/user/me/", api_views.CurrentUserView.as_view(), name="current-user"),
url(r"^api/v3/", include(router.urls)),
]
| <commit_before>from django.conf.urls import include, url
from normandy.base import views
from normandy.base.api import views as api_views
from normandy.base.api.routers import MixedViewRouter
# API Router
router = MixedViewRouter()
router.register("user", api_views.UserViewSet)
router.register("group", api_views.GroupViewSet)
urlpatterns = [
url(r"^$", views.index, name="index"),
url(r"^favicon.ico", views.favicon),
url(r"^api/v2/service_info/", api_views.ServiceInfoView.as_view(), name="service-info"),
url(r"^api/v1/user/me/", api_views.CurrentUserView.as_view(), name="current-user"),
url(r"^api/v3/", include(router.urls)),
]
<commit_msg>Make service info available on v3 API<commit_after>from django.conf.urls import include, url
from normandy.base import views
from normandy.base.api import views as api_views
from normandy.base.api.routers import MixedViewRouter
# API Router
router = MixedViewRouter()
router.register("user", api_views.UserViewSet)
router.register("group", api_views.GroupViewSet)
urlpatterns = [
url(r"^$", views.index, name="index"),
url(r"^favicon.ico", views.favicon),
url(r"^api/v2/service_info/", api_views.ServiceInfoView.as_view(), name="service-info-v2"),
url(r"^api/v3/service_info/", api_views.ServiceInfoView.as_view(), name="service-info"),
url(r"^api/v1/user/me/", api_views.CurrentUserView.as_view(), name="current-user"),
url(r"^api/v3/", include(router.urls)),
]
|
767b9867a1e28063fae33ea46478372818b5a129 | cla_backend/apps/core/views.py | cla_backend/apps/core/views.py | from django.views import defaults
from sentry_sdk import capture_message, push_scope
def page_not_found(request, *args, **kwargs):
with push_scope() as scope:
scope.set_tag("type", "404")
scope.set_extra("path", request.path)
capture_message("Page not found", level="error")
return defaults.page_not_found(request, *args, **kwargs)
| from django.views import defaults
from sentry_sdk import capture_message, push_scope
def page_not_found(request, *args, **kwargs):
with push_scope() as scope:
scope.set_tag("path", request.path)
for i, part in enumerate(request.path.strip("/").split("/")):
scope.set_tag("path_{}".format(i), part)
capture_message("Page not found", level="error")
return defaults.page_not_found(request, *args, **kwargs)
| Tag sentry event with each part of path | Tag sentry event with each part of path
| Python | mit | ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend | from django.views import defaults
from sentry_sdk import capture_message, push_scope
def page_not_found(request, *args, **kwargs):
with push_scope() as scope:
scope.set_tag("type", "404")
scope.set_extra("path", request.path)
capture_message("Page not found", level="error")
return defaults.page_not_found(request, *args, **kwargs)
Tag sentry event with each part of path | from django.views import defaults
from sentry_sdk import capture_message, push_scope
def page_not_found(request, *args, **kwargs):
with push_scope() as scope:
scope.set_tag("path", request.path)
for i, part in enumerate(request.path.strip("/").split("/")):
scope.set_tag("path_{}".format(i), part)
capture_message("Page not found", level="error")
return defaults.page_not_found(request, *args, **kwargs)
| <commit_before>from django.views import defaults
from sentry_sdk import capture_message, push_scope
def page_not_found(request, *args, **kwargs):
with push_scope() as scope:
scope.set_tag("type", "404")
scope.set_extra("path", request.path)
capture_message("Page not found", level="error")
return defaults.page_not_found(request, *args, **kwargs)
<commit_msg>Tag sentry event with each part of path<commit_after> | from django.views import defaults
from sentry_sdk import capture_message, push_scope
def page_not_found(request, *args, **kwargs):
with push_scope() as scope:
scope.set_tag("path", request.path)
for i, part in enumerate(request.path.strip("/").split("/")):
scope.set_tag("path_{}".format(i), part)
capture_message("Page not found", level="error")
return defaults.page_not_found(request, *args, **kwargs)
| from django.views import defaults
from sentry_sdk import capture_message, push_scope
def page_not_found(request, *args, **kwargs):
with push_scope() as scope:
scope.set_tag("type", "404")
scope.set_extra("path", request.path)
capture_message("Page not found", level="error")
return defaults.page_not_found(request, *args, **kwargs)
Tag sentry event with each part of pathfrom django.views import defaults
from sentry_sdk import capture_message, push_scope
def page_not_found(request, *args, **kwargs):
with push_scope() as scope:
scope.set_tag("path", request.path)
for i, part in enumerate(request.path.strip("/").split("/")):
scope.set_tag("path_{}".format(i), part)
capture_message("Page not found", level="error")
return defaults.page_not_found(request, *args, **kwargs)
| <commit_before>from django.views import defaults
from sentry_sdk import capture_message, push_scope
def page_not_found(request, *args, **kwargs):
with push_scope() as scope:
scope.set_tag("type", "404")
scope.set_extra("path", request.path)
capture_message("Page not found", level="error")
return defaults.page_not_found(request, *args, **kwargs)
<commit_msg>Tag sentry event with each part of path<commit_after>from django.views import defaults
from sentry_sdk import capture_message, push_scope
def page_not_found(request, *args, **kwargs):
with push_scope() as scope:
scope.set_tag("path", request.path)
for i, part in enumerate(request.path.strip("/").split("/")):
scope.set_tag("path_{}".format(i), part)
capture_message("Page not found", level="error")
return defaults.page_not_found(request, *args, **kwargs)
|
e08c7352fc5de7e098e434bfc1f2df4384c3405a | tests/base.py | tests/base.py | import unittest
import glob
import os
import mmstats
class MmstatsTestCase(unittest.TestCase):
def setUp(self):
super(MmstatsTestCase, self).setUp()
self.path = mmstats.DEFAULT_PATH
# Clean out stale mmstats files
for fn in glob.glob(os.path.join(self.path, 'test*.mmstats')):
try:
os.remove(fn)
pass
except OSError:
print 'Could not remove: %s' % fn
def tearDown(self):
# clean the dir after tests
for fn in glob.glob(os.path.join(self.path, 'test*.mmstats')):
try:
os.remove(fn)
pass
except OSError:
continue
| import unittest
import glob
import os
import mmstats
class MmstatsTestCase(unittest.TestCase):
@property
def files(self):
return glob.glob(os.path.join(self.path, 'test*.mmstats'))
def setUp(self):
super(MmstatsTestCase, self).setUp()
self.path = mmstats.DEFAULT_PATH
# Clean out stale mmstats files
for fn in self.files:
try:
os.remove(fn)
pass
except OSError:
print 'Could not remove: %s' % fn
def tearDown(self):
# clean the dir after tests
for fn in self.files:
try:
os.remove(fn)
pass
except OSError:
continue
| Refactor test harness file discovery | Refactor test harness file discovery
| Python | bsd-3-clause | schmichael/mmstats,schmichael/mmstats,schmichael/mmstats,schmichael/mmstats | import unittest
import glob
import os
import mmstats
class MmstatsTestCase(unittest.TestCase):
def setUp(self):
super(MmstatsTestCase, self).setUp()
self.path = mmstats.DEFAULT_PATH
# Clean out stale mmstats files
for fn in glob.glob(os.path.join(self.path, 'test*.mmstats')):
try:
os.remove(fn)
pass
except OSError:
print 'Could not remove: %s' % fn
def tearDown(self):
# clean the dir after tests
for fn in glob.glob(os.path.join(self.path, 'test*.mmstats')):
try:
os.remove(fn)
pass
except OSError:
continue
Refactor test harness file discovery | import unittest
import glob
import os
import mmstats
class MmstatsTestCase(unittest.TestCase):
@property
def files(self):
return glob.glob(os.path.join(self.path, 'test*.mmstats'))
def setUp(self):
super(MmstatsTestCase, self).setUp()
self.path = mmstats.DEFAULT_PATH
# Clean out stale mmstats files
for fn in self.files:
try:
os.remove(fn)
pass
except OSError:
print 'Could not remove: %s' % fn
def tearDown(self):
# clean the dir after tests
for fn in self.files:
try:
os.remove(fn)
pass
except OSError:
continue
| <commit_before>import unittest
import glob
import os
import mmstats
class MmstatsTestCase(unittest.TestCase):
def setUp(self):
super(MmstatsTestCase, self).setUp()
self.path = mmstats.DEFAULT_PATH
# Clean out stale mmstats files
for fn in glob.glob(os.path.join(self.path, 'test*.mmstats')):
try:
os.remove(fn)
pass
except OSError:
print 'Could not remove: %s' % fn
def tearDown(self):
# clean the dir after tests
for fn in glob.glob(os.path.join(self.path, 'test*.mmstats')):
try:
os.remove(fn)
pass
except OSError:
continue
<commit_msg>Refactor test harness file discovery<commit_after> | import unittest
import glob
import os
import mmstats
class MmstatsTestCase(unittest.TestCase):
@property
def files(self):
return glob.glob(os.path.join(self.path, 'test*.mmstats'))
def setUp(self):
super(MmstatsTestCase, self).setUp()
self.path = mmstats.DEFAULT_PATH
# Clean out stale mmstats files
for fn in self.files:
try:
os.remove(fn)
pass
except OSError:
print 'Could not remove: %s' % fn
def tearDown(self):
# clean the dir after tests
for fn in self.files:
try:
os.remove(fn)
pass
except OSError:
continue
| import unittest
import glob
import os
import mmstats
class MmstatsTestCase(unittest.TestCase):
def setUp(self):
super(MmstatsTestCase, self).setUp()
self.path = mmstats.DEFAULT_PATH
# Clean out stale mmstats files
for fn in glob.glob(os.path.join(self.path, 'test*.mmstats')):
try:
os.remove(fn)
pass
except OSError:
print 'Could not remove: %s' % fn
def tearDown(self):
# clean the dir after tests
for fn in glob.glob(os.path.join(self.path, 'test*.mmstats')):
try:
os.remove(fn)
pass
except OSError:
continue
Refactor test harness file discoveryimport unittest
import glob
import os
import mmstats
class MmstatsTestCase(unittest.TestCase):
@property
def files(self):
return glob.glob(os.path.join(self.path, 'test*.mmstats'))
def setUp(self):
super(MmstatsTestCase, self).setUp()
self.path = mmstats.DEFAULT_PATH
# Clean out stale mmstats files
for fn in self.files:
try:
os.remove(fn)
pass
except OSError:
print 'Could not remove: %s' % fn
def tearDown(self):
# clean the dir after tests
for fn in self.files:
try:
os.remove(fn)
pass
except OSError:
continue
| <commit_before>import unittest
import glob
import os
import mmstats
class MmstatsTestCase(unittest.TestCase):
def setUp(self):
super(MmstatsTestCase, self).setUp()
self.path = mmstats.DEFAULT_PATH
# Clean out stale mmstats files
for fn in glob.glob(os.path.join(self.path, 'test*.mmstats')):
try:
os.remove(fn)
pass
except OSError:
print 'Could not remove: %s' % fn
def tearDown(self):
# clean the dir after tests
for fn in glob.glob(os.path.join(self.path, 'test*.mmstats')):
try:
os.remove(fn)
pass
except OSError:
continue
<commit_msg>Refactor test harness file discovery<commit_after>import unittest
import glob
import os
import mmstats
class MmstatsTestCase(unittest.TestCase):
@property
def files(self):
return glob.glob(os.path.join(self.path, 'test*.mmstats'))
def setUp(self):
super(MmstatsTestCase, self).setUp()
self.path = mmstats.DEFAULT_PATH
# Clean out stale mmstats files
for fn in self.files:
try:
os.remove(fn)
pass
except OSError:
print 'Could not remove: %s' % fn
def tearDown(self):
# clean the dir after tests
for fn in self.files:
try:
os.remove(fn)
pass
except OSError:
continue
|
35b08e6e7e60a440fe33b7120843766b9f2592c6 | tests/urls.py | tests/urls.py | from django.conf.urls import include, url
from django.contrib import admin
from . import views
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url('foo/', views.MyView.as_view(), name='class-view'),
url('bar/', views.MyAPIView.as_view(), name='api-view'),
url('', views.my_view, name='function-view'),
]
| from django.conf.urls import include, url
from django.contrib import admin
from . import views
from incuna_test_utils.compat import DJANGO_LT_17
if DJANGO_LT_17:
admin.autodiscover()
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url('foo/', views.MyView.as_view(), name='class-view'),
url('bar/', views.MyAPIView.as_view(), name='api-view'),
url('', views.my_view, name='function-view'),
]
| Add compatibility with django < 1.7 | Add compatibility with django < 1.7
| Python | bsd-2-clause | incuna/incuna-test-utils,incuna/incuna-test-utils | from django.conf.urls import include, url
from django.contrib import admin
from . import views
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url('foo/', views.MyView.as_view(), name='class-view'),
url('bar/', views.MyAPIView.as_view(), name='api-view'),
url('', views.my_view, name='function-view'),
]
Add compatibility with django < 1.7 | from django.conf.urls import include, url
from django.contrib import admin
from . import views
from incuna_test_utils.compat import DJANGO_LT_17
if DJANGO_LT_17:
admin.autodiscover()
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url('foo/', views.MyView.as_view(), name='class-view'),
url('bar/', views.MyAPIView.as_view(), name='api-view'),
url('', views.my_view, name='function-view'),
]
| <commit_before>from django.conf.urls import include, url
from django.contrib import admin
from . import views
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url('foo/', views.MyView.as_view(), name='class-view'),
url('bar/', views.MyAPIView.as_view(), name='api-view'),
url('', views.my_view, name='function-view'),
]
<commit_msg>Add compatibility with django < 1.7<commit_after> | from django.conf.urls import include, url
from django.contrib import admin
from . import views
from incuna_test_utils.compat import DJANGO_LT_17
if DJANGO_LT_17:
admin.autodiscover()
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url('foo/', views.MyView.as_view(), name='class-view'),
url('bar/', views.MyAPIView.as_view(), name='api-view'),
url('', views.my_view, name='function-view'),
]
| from django.conf.urls import include, url
from django.contrib import admin
from . import views
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url('foo/', views.MyView.as_view(), name='class-view'),
url('bar/', views.MyAPIView.as_view(), name='api-view'),
url('', views.my_view, name='function-view'),
]
Add compatibility with django < 1.7from django.conf.urls import include, url
from django.contrib import admin
from . import views
from incuna_test_utils.compat import DJANGO_LT_17
if DJANGO_LT_17:
admin.autodiscover()
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url('foo/', views.MyView.as_view(), name='class-view'),
url('bar/', views.MyAPIView.as_view(), name='api-view'),
url('', views.my_view, name='function-view'),
]
| <commit_before>from django.conf.urls import include, url
from django.contrib import admin
from . import views
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url('foo/', views.MyView.as_view(), name='class-view'),
url('bar/', views.MyAPIView.as_view(), name='api-view'),
url('', views.my_view, name='function-view'),
]
<commit_msg>Add compatibility with django < 1.7<commit_after>from django.conf.urls import include, url
from django.contrib import admin
from . import views
from incuna_test_utils.compat import DJANGO_LT_17
if DJANGO_LT_17:
admin.autodiscover()
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url('foo/', views.MyView.as_view(), name='class-view'),
url('bar/', views.MyAPIView.as_view(), name='api-view'),
url('', views.my_view, name='function-view'),
]
|
c0de670c5b8b78280ee588ee31a75cf8b3f44799 | lms/djangoapps/commerce/migrations/0001_data__add_ecommerce_service_user.py | lms/djangoapps/commerce/migrations/0001_data__add_ecommerce_service_user.py | # -*- coding: utf-8 -*-
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.auth.models import User
from django.db import migrations, models
USERNAME = settings.ECOMMERCE_SERVICE_WORKER_USERNAME
EMAIL = USERNAME + '@fake.email'
def forwards(apps, schema_editor):
"""Add the service user."""
User = get_user_model()
user, created = User.objects.get_or_create(username=USERNAME, email=EMAIL)
if created:
user.set_unusable_password()
user.save()
def backwards(apps, schema_editor):
"""Remove the service user."""
User.objects.get(username=USERNAME, email=EMAIL).delete()
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('user_api', '0002_retirementstate_userretirementstatus'),
]
operations = [
migrations.RunPython(forwards, backwards),
]
| # -*- coding: utf-8 -*-
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.auth.models import User
from django.db import migrations, models
USERNAME = settings.ECOMMERCE_SERVICE_WORKER_USERNAME
EMAIL = USERNAME + '@fake.email'
def forwards(apps, schema_editor):
"""Add the service user."""
User = get_user_model()
user, created = User.objects.get_or_create(username=USERNAME, email=EMAIL)
if created:
user.set_unusable_password()
user.save()
def backwards(apps, schema_editor):
"""Remove the service user."""
User.objects.get(username=USERNAME, email=EMAIL).delete()
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('user_api', '0002_retirementstate_userretirementstatus'),
('student', '0033_userprofile_state'),
]
operations = [
migrations.RunPython(forwards, backwards),
]
| Fix sandboax builds, because of signal chains, UserProfile table must be predent before adding a User | Fix sandboax builds, because of signal chains, UserProfile table must be predent before adding a User
| Python | agpl-3.0 | arbrandes/edx-platform,eduNEXT/edx-platform,stvstnfrd/edx-platform,stvstnfrd/edx-platform,EDUlib/edx-platform,arbrandes/edx-platform,angelapper/edx-platform,arbrandes/edx-platform,eduNEXT/edunext-platform,EDUlib/edx-platform,stvstnfrd/edx-platform,edx/edx-platform,EDUlib/edx-platform,edx/edx-platform,stvstnfrd/edx-platform,eduNEXT/edunext-platform,arbrandes/edx-platform,angelapper/edx-platform,eduNEXT/edunext-platform,eduNEXT/edx-platform,eduNEXT/edx-platform,edx/edx-platform,angelapper/edx-platform,edx/edx-platform,EDUlib/edx-platform,angelapper/edx-platform,eduNEXT/edunext-platform,eduNEXT/edx-platform | # -*- coding: utf-8 -*-
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.auth.models import User
from django.db import migrations, models
USERNAME = settings.ECOMMERCE_SERVICE_WORKER_USERNAME
EMAIL = USERNAME + '@fake.email'
def forwards(apps, schema_editor):
"""Add the service user."""
User = get_user_model()
user, created = User.objects.get_or_create(username=USERNAME, email=EMAIL)
if created:
user.set_unusable_password()
user.save()
def backwards(apps, schema_editor):
"""Remove the service user."""
User.objects.get(username=USERNAME, email=EMAIL).delete()
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('user_api', '0002_retirementstate_userretirementstatus'),
]
operations = [
migrations.RunPython(forwards, backwards),
]
Fix sandboax builds, because of signal chains, UserProfile table must be predent before adding a User | # -*- coding: utf-8 -*-
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.auth.models import User
from django.db import migrations, models
USERNAME = settings.ECOMMERCE_SERVICE_WORKER_USERNAME
EMAIL = USERNAME + '@fake.email'
def forwards(apps, schema_editor):
"""Add the service user."""
User = get_user_model()
user, created = User.objects.get_or_create(username=USERNAME, email=EMAIL)
if created:
user.set_unusable_password()
user.save()
def backwards(apps, schema_editor):
"""Remove the service user."""
User.objects.get(username=USERNAME, email=EMAIL).delete()
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('user_api', '0002_retirementstate_userretirementstatus'),
('student', '0033_userprofile_state'),
]
operations = [
migrations.RunPython(forwards, backwards),
]
| <commit_before># -*- coding: utf-8 -*-
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.auth.models import User
from django.db import migrations, models
USERNAME = settings.ECOMMERCE_SERVICE_WORKER_USERNAME
EMAIL = USERNAME + '@fake.email'
def forwards(apps, schema_editor):
"""Add the service user."""
User = get_user_model()
user, created = User.objects.get_or_create(username=USERNAME, email=EMAIL)
if created:
user.set_unusable_password()
user.save()
def backwards(apps, schema_editor):
"""Remove the service user."""
User.objects.get(username=USERNAME, email=EMAIL).delete()
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('user_api', '0002_retirementstate_userretirementstatus'),
]
operations = [
migrations.RunPython(forwards, backwards),
]
<commit_msg>Fix sandboax builds, because of signal chains, UserProfile table must be predent before adding a User<commit_after> | # -*- coding: utf-8 -*-
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.auth.models import User
from django.db import migrations, models
USERNAME = settings.ECOMMERCE_SERVICE_WORKER_USERNAME
EMAIL = USERNAME + '@fake.email'
def forwards(apps, schema_editor):
"""Add the service user."""
User = get_user_model()
user, created = User.objects.get_or_create(username=USERNAME, email=EMAIL)
if created:
user.set_unusable_password()
user.save()
def backwards(apps, schema_editor):
"""Remove the service user."""
User.objects.get(username=USERNAME, email=EMAIL).delete()
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('user_api', '0002_retirementstate_userretirementstatus'),
('student', '0033_userprofile_state'),
]
operations = [
migrations.RunPython(forwards, backwards),
]
| # -*- coding: utf-8 -*-
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.auth.models import User
from django.db import migrations, models
USERNAME = settings.ECOMMERCE_SERVICE_WORKER_USERNAME
EMAIL = USERNAME + '@fake.email'
def forwards(apps, schema_editor):
"""Add the service user."""
User = get_user_model()
user, created = User.objects.get_or_create(username=USERNAME, email=EMAIL)
if created:
user.set_unusable_password()
user.save()
def backwards(apps, schema_editor):
"""Remove the service user."""
User.objects.get(username=USERNAME, email=EMAIL).delete()
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('user_api', '0002_retirementstate_userretirementstatus'),
]
operations = [
migrations.RunPython(forwards, backwards),
]
Fix sandboax builds, because of signal chains, UserProfile table must be predent before adding a User# -*- coding: utf-8 -*-
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.auth.models import User
from django.db import migrations, models
USERNAME = settings.ECOMMERCE_SERVICE_WORKER_USERNAME
EMAIL = USERNAME + '@fake.email'
def forwards(apps, schema_editor):
"""Add the service user."""
User = get_user_model()
user, created = User.objects.get_or_create(username=USERNAME, email=EMAIL)
if created:
user.set_unusable_password()
user.save()
def backwards(apps, schema_editor):
"""Remove the service user."""
User.objects.get(username=USERNAME, email=EMAIL).delete()
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('user_api', '0002_retirementstate_userretirementstatus'),
('student', '0033_userprofile_state'),
]
operations = [
migrations.RunPython(forwards, backwards),
]
| <commit_before># -*- coding: utf-8 -*-
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.auth.models import User
from django.db import migrations, models
USERNAME = settings.ECOMMERCE_SERVICE_WORKER_USERNAME
EMAIL = USERNAME + '@fake.email'
def forwards(apps, schema_editor):
"""Add the service user."""
User = get_user_model()
user, created = User.objects.get_or_create(username=USERNAME, email=EMAIL)
if created:
user.set_unusable_password()
user.save()
def backwards(apps, schema_editor):
"""Remove the service user."""
User.objects.get(username=USERNAME, email=EMAIL).delete()
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('user_api', '0002_retirementstate_userretirementstatus'),
]
operations = [
migrations.RunPython(forwards, backwards),
]
<commit_msg>Fix sandboax builds, because of signal chains, UserProfile table must be predent before adding a User<commit_after># -*- coding: utf-8 -*-
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.auth.models import User
from django.db import migrations, models
USERNAME = settings.ECOMMERCE_SERVICE_WORKER_USERNAME
EMAIL = USERNAME + '@fake.email'
def forwards(apps, schema_editor):
"""Add the service user."""
User = get_user_model()
user, created = User.objects.get_or_create(username=USERNAME, email=EMAIL)
if created:
user.set_unusable_password()
user.save()
def backwards(apps, schema_editor):
"""Remove the service user."""
User.objects.get(username=USERNAME, email=EMAIL).delete()
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('user_api', '0002_retirementstate_userretirementstatus'),
('student', '0033_userprofile_state'),
]
operations = [
migrations.RunPython(forwards, backwards),
]
|
47ddf999dd7ef8cd7600710ad6ad7611dd55a218 | bin/testNetwork.py | bin/testNetwork.py | #!/usr/bin/python3
import subprocess
import os
from time import sleep
env = {}
HOME = os.environ.get("HOME", "/root")
scannerConf = open(HOME+"/scanner.conf", "rt")
while True:
in_line = scannerConf.readline()
if not in_line:
break
in_line = in_line[:-1]
key, value = in_line.split("=")
env[key] = value
scannerConf.close()
GATEWAY = '192.168.1.1'
if env['GATEWAY']:
GATEWAY = env['GATEWAY']
IFACE = 'wlan0'
if env['IFACE']:
IFACE = env['IFACE']
print("Testing GATEWAY=%s, IFACE=%s" % (GATEWAY, IFACE))
while True:
ret = subprocess.call(['/bin/ping','-I', IFACE, '-nc4', GATEWAY])
if ret == 0:
print("Network appears to be up")
else:
print("Network appears to be down, restarting...")
ret = subprocess.call(['/sbin/ifdown', '--force', IFACE])
ret = subprocess.call(['/sbin/ifup', IFACE])
sleep(60)
| #!/usr/bin/python3
import subprocess
import os
from time import sleep
env = {}
HOME = os.environ.get("HOME", "/root")
scannerConf = open(HOME+"/scanner.conf", "rt")
while True:
in_line = scannerConf.readline()
if not in_line:
break
in_line = in_line[:-1]
key, value = in_line.split("=")
env[key] = value
scannerConf.close()
GATEWAY = '192.168.1.1'
if 'GATEWAY' in env:
GATEWAY = env['GATEWAY']
IFACE = 'wlan0'
if 'IFACE' in env:
IFACE = env['IFACE']
print("Testing GATEWAY=%s, IFACE=%s" % (GATEWAY, IFACE))
while True:
ret = subprocess.call(['/bin/ping','-I', IFACE, '-nc4', GATEWAY])
if ret == 0:
print("Network appears to be up")
else:
print("Network appears to be down, restarting...")
ret = subprocess.call(['/sbin/ifdown', '--force', IFACE])
ret = subprocess.call(['/sbin/ifup', IFACE])
sleep(60)
| Change the config dictionary key validation | Change the config dictionary key validation
| Python | apache-2.0 | starksm64/NativeRaspberryPiBeaconParser,starksm64/NativeRaspberryPiBeaconParser,starksm64/NativeRaspberryPiBeaconParser,starksm64/NativeRaspberryPiBeaconParser,starksm64/NativeRaspberryPiBeaconParser | #!/usr/bin/python3
import subprocess
import os
from time import sleep
env = {}
HOME = os.environ.get("HOME", "/root")
scannerConf = open(HOME+"/scanner.conf", "rt")
while True:
in_line = scannerConf.readline()
if not in_line:
break
in_line = in_line[:-1]
key, value = in_line.split("=")
env[key] = value
scannerConf.close()
GATEWAY = '192.168.1.1'
if env['GATEWAY']:
GATEWAY = env['GATEWAY']
IFACE = 'wlan0'
if env['IFACE']:
IFACE = env['IFACE']
print("Testing GATEWAY=%s, IFACE=%s" % (GATEWAY, IFACE))
while True:
ret = subprocess.call(['/bin/ping','-I', IFACE, '-nc4', GATEWAY])
if ret == 0:
print("Network appears to be up")
else:
print("Network appears to be down, restarting...")
ret = subprocess.call(['/sbin/ifdown', '--force', IFACE])
ret = subprocess.call(['/sbin/ifup', IFACE])
sleep(60)
Change the config dictionary key validation | #!/usr/bin/python3
import subprocess
import os
from time import sleep
env = {}
HOME = os.environ.get("HOME", "/root")
scannerConf = open(HOME+"/scanner.conf", "rt")
while True:
in_line = scannerConf.readline()
if not in_line:
break
in_line = in_line[:-1]
key, value = in_line.split("=")
env[key] = value
scannerConf.close()
GATEWAY = '192.168.1.1'
if 'GATEWAY' in env:
GATEWAY = env['GATEWAY']
IFACE = 'wlan0'
if 'IFACE' in env:
IFACE = env['IFACE']
print("Testing GATEWAY=%s, IFACE=%s" % (GATEWAY, IFACE))
while True:
ret = subprocess.call(['/bin/ping','-I', IFACE, '-nc4', GATEWAY])
if ret == 0:
print("Network appears to be up")
else:
print("Network appears to be down, restarting...")
ret = subprocess.call(['/sbin/ifdown', '--force', IFACE])
ret = subprocess.call(['/sbin/ifup', IFACE])
sleep(60)
| <commit_before>#!/usr/bin/python3
import subprocess
import os
from time import sleep
env = {}
HOME = os.environ.get("HOME", "/root")
scannerConf = open(HOME+"/scanner.conf", "rt")
while True:
in_line = scannerConf.readline()
if not in_line:
break
in_line = in_line[:-1]
key, value = in_line.split("=")
env[key] = value
scannerConf.close()
GATEWAY = '192.168.1.1'
if env['GATEWAY']:
GATEWAY = env['GATEWAY']
IFACE = 'wlan0'
if env['IFACE']:
IFACE = env['IFACE']
print("Testing GATEWAY=%s, IFACE=%s" % (GATEWAY, IFACE))
while True:
ret = subprocess.call(['/bin/ping','-I', IFACE, '-nc4', GATEWAY])
if ret == 0:
print("Network appears to be up")
else:
print("Network appears to be down, restarting...")
ret = subprocess.call(['/sbin/ifdown', '--force', IFACE])
ret = subprocess.call(['/sbin/ifup', IFACE])
sleep(60)
<commit_msg>Change the config dictionary key validation<commit_after> | #!/usr/bin/python3
import subprocess
import os
from time import sleep
env = {}
HOME = os.environ.get("HOME", "/root")
scannerConf = open(HOME+"/scanner.conf", "rt")
while True:
in_line = scannerConf.readline()
if not in_line:
break
in_line = in_line[:-1]
key, value = in_line.split("=")
env[key] = value
scannerConf.close()
GATEWAY = '192.168.1.1'
if 'GATEWAY' in env:
GATEWAY = env['GATEWAY']
IFACE = 'wlan0'
if 'IFACE' in env:
IFACE = env['IFACE']
print("Testing GATEWAY=%s, IFACE=%s" % (GATEWAY, IFACE))
while True:
ret = subprocess.call(['/bin/ping','-I', IFACE, '-nc4', GATEWAY])
if ret == 0:
print("Network appears to be up")
else:
print("Network appears to be down, restarting...")
ret = subprocess.call(['/sbin/ifdown', '--force', IFACE])
ret = subprocess.call(['/sbin/ifup', IFACE])
sleep(60)
| #!/usr/bin/python3
import subprocess
import os
from time import sleep
env = {}
HOME = os.environ.get("HOME", "/root")
scannerConf = open(HOME+"/scanner.conf", "rt")
while True:
in_line = scannerConf.readline()
if not in_line:
break
in_line = in_line[:-1]
key, value = in_line.split("=")
env[key] = value
scannerConf.close()
GATEWAY = '192.168.1.1'
if env['GATEWAY']:
GATEWAY = env['GATEWAY']
IFACE = 'wlan0'
if env['IFACE']:
IFACE = env['IFACE']
print("Testing GATEWAY=%s, IFACE=%s" % (GATEWAY, IFACE))
while True:
ret = subprocess.call(['/bin/ping','-I', IFACE, '-nc4', GATEWAY])
if ret == 0:
print("Network appears to be up")
else:
print("Network appears to be down, restarting...")
ret = subprocess.call(['/sbin/ifdown', '--force', IFACE])
ret = subprocess.call(['/sbin/ifup', IFACE])
sleep(60)
Change the config dictionary key validation#!/usr/bin/python3
import subprocess
import os
from time import sleep
env = {}
HOME = os.environ.get("HOME", "/root")
scannerConf = open(HOME+"/scanner.conf", "rt")
while True:
in_line = scannerConf.readline()
if not in_line:
break
in_line = in_line[:-1]
key, value = in_line.split("=")
env[key] = value
scannerConf.close()
GATEWAY = '192.168.1.1'
if 'GATEWAY' in env:
GATEWAY = env['GATEWAY']
IFACE = 'wlan0'
if 'IFACE' in env:
IFACE = env['IFACE']
print("Testing GATEWAY=%s, IFACE=%s" % (GATEWAY, IFACE))
while True:
ret = subprocess.call(['/bin/ping','-I', IFACE, '-nc4', GATEWAY])
if ret == 0:
print("Network appears to be up")
else:
print("Network appears to be down, restarting...")
ret = subprocess.call(['/sbin/ifdown', '--force', IFACE])
ret = subprocess.call(['/sbin/ifup', IFACE])
sleep(60)
| <commit_before>#!/usr/bin/python3
import subprocess
import os
from time import sleep
env = {}
HOME = os.environ.get("HOME", "/root")
scannerConf = open(HOME+"/scanner.conf", "rt")
while True:
in_line = scannerConf.readline()
if not in_line:
break
in_line = in_line[:-1]
key, value = in_line.split("=")
env[key] = value
scannerConf.close()
GATEWAY = '192.168.1.1'
if env['GATEWAY']:
GATEWAY = env['GATEWAY']
IFACE = 'wlan0'
if env['IFACE']:
IFACE = env['IFACE']
print("Testing GATEWAY=%s, IFACE=%s" % (GATEWAY, IFACE))
while True:
ret = subprocess.call(['/bin/ping','-I', IFACE, '-nc4', GATEWAY])
if ret == 0:
print("Network appears to be up")
else:
print("Network appears to be down, restarting...")
ret = subprocess.call(['/sbin/ifdown', '--force', IFACE])
ret = subprocess.call(['/sbin/ifup', IFACE])
sleep(60)
<commit_msg>Change the config dictionary key validation<commit_after>#!/usr/bin/python3
import subprocess
import os
from time import sleep
env = {}
HOME = os.environ.get("HOME", "/root")
scannerConf = open(HOME+"/scanner.conf", "rt")
while True:
in_line = scannerConf.readline()
if not in_line:
break
in_line = in_line[:-1]
key, value = in_line.split("=")
env[key] = value
scannerConf.close()
GATEWAY = '192.168.1.1'
if 'GATEWAY' in env:
GATEWAY = env['GATEWAY']
IFACE = 'wlan0'
if 'IFACE' in env:
IFACE = env['IFACE']
print("Testing GATEWAY=%s, IFACE=%s" % (GATEWAY, IFACE))
while True:
ret = subprocess.call(['/bin/ping','-I', IFACE, '-nc4', GATEWAY])
if ret == 0:
print("Network appears to be up")
else:
print("Network appears to be down, restarting...")
ret = subprocess.call(['/sbin/ifdown', '--force', IFACE])
ret = subprocess.call(['/sbin/ifup', IFACE])
sleep(60)
|
22e16ba6e2bf7135933895162744424e89ca514d | article/tests/article_admin_tests.py | article/tests/article_admin_tests.py | from django.contrib.auth.models import User
from django.test import TestCase, override_settings, Client
from django.test import RequestFactory
from mock import MagicMock
from wagtail.wagtailsearch.backends.elasticsearch import Elasticsearch
from django.db import DataError, IntegrityError
from django.core.exceptions import ValidationError
from article.models import Article
from functional_tests.factory import ArticleFactory, AuthorFactory
class ArticleAdminFormTest(TestCase):
def setUp(self):
self.client = Client()
self.test_author = AuthorFactory(name='xyz', slug="xyz")
self.article = ArticleFactory(title="english_article", authors=(self.test_author,), language='en')
def login_admin(self):
User.objects.create_superuser('pari', 'pari@test.com', "pari")
self.client.login(username="pari", password="pari")
def test_no_article_can_be_stored_without_a_title(self):
with self.assertRaisesRegexp(ValidationError,
"This field cannot be blank"): # Slug and title fields cannot be null.
ArticleFactory(title="")
def test_article_cannot_be_stored_without_content(self):
with self.assertRaisesRegexp(ValidationError,
"This field cannot be blank"):
ArticleFactory(title='Test', content='')
def test_article_cannot_be_stored_without_language(self):
with self.assertRaisesRegexp(ValidationError,
"This field cannot be blank"):
ArticleFactory(title='Test', language='')
| from django.contrib.auth.models import User
from django.test import TestCase, override_settings, Client
from django.test import RequestFactory
from mock import MagicMock
from wagtail.wagtailsearch.backends.elasticsearch import Elasticsearch
from django.db import DataError, IntegrityError
from django.core.exceptions import ValidationError
from article.models import Article
from functional_tests.factory import ArticleFactory, AuthorFactory
class ArticleAdminFormTest(TestCase):
def setUp(self):
self.client = Client()
self.test_author = AuthorFactory(name='xyz', slug="xyz")
self.article = ArticleFactory(title="english_article", authors=(self.test_author,), language='en')
def login_admin(self):
User.objects.create_superuser('pari', 'pari@test.com', "pari")
self.client.login(username="pari", password="pari")
def test_no_article_can_be_stored_without_a_title(self):
with self.assertRaisesRegexp(ValidationError,
"This field cannot be blank"): # Slug and title fields cannot be null.
ArticleFactory(title="")
def test_article_can_be_stored_without_content(self):
article = ArticleFactory(title='Test', content='')
self.assertEqual(article.title, 'Test')
def test_article_cannot_be_stored_without_language(self):
with self.assertRaisesRegexp(ValidationError,
"This field cannot be blank"):
ArticleFactory(title='Test', language='')
| Fix test for storing article without any content | Fix test for storing article without any content
| Python | bsd-3-clause | PARINetwork/pari,PARINetwork/pari,PARINetwork/pari,PARINetwork/pari | from django.contrib.auth.models import User
from django.test import TestCase, override_settings, Client
from django.test import RequestFactory
from mock import MagicMock
from wagtail.wagtailsearch.backends.elasticsearch import Elasticsearch
from django.db import DataError, IntegrityError
from django.core.exceptions import ValidationError
from article.models import Article
from functional_tests.factory import ArticleFactory, AuthorFactory
class ArticleAdminFormTest(TestCase):
def setUp(self):
self.client = Client()
self.test_author = AuthorFactory(name='xyz', slug="xyz")
self.article = ArticleFactory(title="english_article", authors=(self.test_author,), language='en')
def login_admin(self):
User.objects.create_superuser('pari', 'pari@test.com', "pari")
self.client.login(username="pari", password="pari")
def test_no_article_can_be_stored_without_a_title(self):
with self.assertRaisesRegexp(ValidationError,
"This field cannot be blank"): # Slug and title fields cannot be null.
ArticleFactory(title="")
def test_article_cannot_be_stored_without_content(self):
with self.assertRaisesRegexp(ValidationError,
"This field cannot be blank"):
ArticleFactory(title='Test', content='')
def test_article_cannot_be_stored_without_language(self):
with self.assertRaisesRegexp(ValidationError,
"This field cannot be blank"):
ArticleFactory(title='Test', language='')
Fix test for storing article without any content | from django.contrib.auth.models import User
from django.test import TestCase, override_settings, Client
from django.test import RequestFactory
from mock import MagicMock
from wagtail.wagtailsearch.backends.elasticsearch import Elasticsearch
from django.db import DataError, IntegrityError
from django.core.exceptions import ValidationError
from article.models import Article
from functional_tests.factory import ArticleFactory, AuthorFactory
class ArticleAdminFormTest(TestCase):
def setUp(self):
self.client = Client()
self.test_author = AuthorFactory(name='xyz', slug="xyz")
self.article = ArticleFactory(title="english_article", authors=(self.test_author,), language='en')
def login_admin(self):
User.objects.create_superuser('pari', 'pari@test.com', "pari")
self.client.login(username="pari", password="pari")
def test_no_article_can_be_stored_without_a_title(self):
with self.assertRaisesRegexp(ValidationError,
"This field cannot be blank"): # Slug and title fields cannot be null.
ArticleFactory(title="")
def test_article_can_be_stored_without_content(self):
article = ArticleFactory(title='Test', content='')
self.assertEqual(article.title, 'Test')
def test_article_cannot_be_stored_without_language(self):
with self.assertRaisesRegexp(ValidationError,
"This field cannot be blank"):
ArticleFactory(title='Test', language='')
| <commit_before>from django.contrib.auth.models import User
from django.test import TestCase, override_settings, Client
from django.test import RequestFactory
from mock import MagicMock
from wagtail.wagtailsearch.backends.elasticsearch import Elasticsearch
from django.db import DataError, IntegrityError
from django.core.exceptions import ValidationError
from article.models import Article
from functional_tests.factory import ArticleFactory, AuthorFactory
class ArticleAdminFormTest(TestCase):
def setUp(self):
self.client = Client()
self.test_author = AuthorFactory(name='xyz', slug="xyz")
self.article = ArticleFactory(title="english_article", authors=(self.test_author,), language='en')
def login_admin(self):
User.objects.create_superuser('pari', 'pari@test.com', "pari")
self.client.login(username="pari", password="pari")
def test_no_article_can_be_stored_without_a_title(self):
with self.assertRaisesRegexp(ValidationError,
"This field cannot be blank"): # Slug and title fields cannot be null.
ArticleFactory(title="")
def test_article_cannot_be_stored_without_content(self):
with self.assertRaisesRegexp(ValidationError,
"This field cannot be blank"):
ArticleFactory(title='Test', content='')
def test_article_cannot_be_stored_without_language(self):
with self.assertRaisesRegexp(ValidationError,
"This field cannot be blank"):
ArticleFactory(title='Test', language='')
<commit_msg>Fix test for storing article without any content<commit_after> | from django.contrib.auth.models import User
from django.test import TestCase, override_settings, Client
from django.test import RequestFactory
from mock import MagicMock
from wagtail.wagtailsearch.backends.elasticsearch import Elasticsearch
from django.db import DataError, IntegrityError
from django.core.exceptions import ValidationError
from article.models import Article
from functional_tests.factory import ArticleFactory, AuthorFactory
class ArticleAdminFormTest(TestCase):
def setUp(self):
self.client = Client()
self.test_author = AuthorFactory(name='xyz', slug="xyz")
self.article = ArticleFactory(title="english_article", authors=(self.test_author,), language='en')
def login_admin(self):
User.objects.create_superuser('pari', 'pari@test.com', "pari")
self.client.login(username="pari", password="pari")
def test_no_article_can_be_stored_without_a_title(self):
with self.assertRaisesRegexp(ValidationError,
"This field cannot be blank"): # Slug and title fields cannot be null.
ArticleFactory(title="")
def test_article_can_be_stored_without_content(self):
article = ArticleFactory(title='Test', content='')
self.assertEqual(article.title, 'Test')
def test_article_cannot_be_stored_without_language(self):
with self.assertRaisesRegexp(ValidationError,
"This field cannot be blank"):
ArticleFactory(title='Test', language='')
| from django.contrib.auth.models import User
from django.test import TestCase, override_settings, Client
from django.test import RequestFactory
from mock import MagicMock
from wagtail.wagtailsearch.backends.elasticsearch import Elasticsearch
from django.db import DataError, IntegrityError
from django.core.exceptions import ValidationError
from article.models import Article
from functional_tests.factory import ArticleFactory, AuthorFactory
class ArticleAdminFormTest(TestCase):
def setUp(self):
self.client = Client()
self.test_author = AuthorFactory(name='xyz', slug="xyz")
self.article = ArticleFactory(title="english_article", authors=(self.test_author,), language='en')
def login_admin(self):
User.objects.create_superuser('pari', 'pari@test.com', "pari")
self.client.login(username="pari", password="pari")
def test_no_article_can_be_stored_without_a_title(self):
with self.assertRaisesRegexp(ValidationError,
"This field cannot be blank"): # Slug and title fields cannot be null.
ArticleFactory(title="")
def test_article_cannot_be_stored_without_content(self):
with self.assertRaisesRegexp(ValidationError,
"This field cannot be blank"):
ArticleFactory(title='Test', content='')
def test_article_cannot_be_stored_without_language(self):
with self.assertRaisesRegexp(ValidationError,
"This field cannot be blank"):
ArticleFactory(title='Test', language='')
Fix test for storing article without any contentfrom django.contrib.auth.models import User
from django.test import TestCase, override_settings, Client
from django.test import RequestFactory
from mock import MagicMock
from wagtail.wagtailsearch.backends.elasticsearch import Elasticsearch
from django.db import DataError, IntegrityError
from django.core.exceptions import ValidationError
from article.models import Article
from functional_tests.factory import ArticleFactory, AuthorFactory
class ArticleAdminFormTest(TestCase):
def setUp(self):
self.client = Client()
self.test_author = AuthorFactory(name='xyz', slug="xyz")
self.article = ArticleFactory(title="english_article", authors=(self.test_author,), language='en')
def login_admin(self):
User.objects.create_superuser('pari', 'pari@test.com', "pari")
self.client.login(username="pari", password="pari")
def test_no_article_can_be_stored_without_a_title(self):
with self.assertRaisesRegexp(ValidationError,
"This field cannot be blank"): # Slug and title fields cannot be null.
ArticleFactory(title="")
def test_article_can_be_stored_without_content(self):
article = ArticleFactory(title='Test', content='')
self.assertEqual(article.title, 'Test')
def test_article_cannot_be_stored_without_language(self):
with self.assertRaisesRegexp(ValidationError,
"This field cannot be blank"):
ArticleFactory(title='Test', language='')
| <commit_before>from django.contrib.auth.models import User
from django.test import TestCase, override_settings, Client
from django.test import RequestFactory
from mock import MagicMock
from wagtail.wagtailsearch.backends.elasticsearch import Elasticsearch
from django.db import DataError, IntegrityError
from django.core.exceptions import ValidationError
from article.models import Article
from functional_tests.factory import ArticleFactory, AuthorFactory
class ArticleAdminFormTest(TestCase):
def setUp(self):
self.client = Client()
self.test_author = AuthorFactory(name='xyz', slug="xyz")
self.article = ArticleFactory(title="english_article", authors=(self.test_author,), language='en')
def login_admin(self):
User.objects.create_superuser('pari', 'pari@test.com', "pari")
self.client.login(username="pari", password="pari")
def test_no_article_can_be_stored_without_a_title(self):
with self.assertRaisesRegexp(ValidationError,
"This field cannot be blank"): # Slug and title fields cannot be null.
ArticleFactory(title="")
def test_article_cannot_be_stored_without_content(self):
with self.assertRaisesRegexp(ValidationError,
"This field cannot be blank"):
ArticleFactory(title='Test', content='')
def test_article_cannot_be_stored_without_language(self):
with self.assertRaisesRegexp(ValidationError,
"This field cannot be blank"):
ArticleFactory(title='Test', language='')
<commit_msg>Fix test for storing article without any content<commit_after>from django.contrib.auth.models import User
from django.test import TestCase, override_settings, Client
from django.test import RequestFactory
from mock import MagicMock
from wagtail.wagtailsearch.backends.elasticsearch import Elasticsearch
from django.db import DataError, IntegrityError
from django.core.exceptions import ValidationError
from article.models import Article
from functional_tests.factory import ArticleFactory, AuthorFactory
class ArticleAdminFormTest(TestCase):
def setUp(self):
self.client = Client()
self.test_author = AuthorFactory(name='xyz', slug="xyz")
self.article = ArticleFactory(title="english_article", authors=(self.test_author,), language='en')
def login_admin(self):
User.objects.create_superuser('pari', 'pari@test.com', "pari")
self.client.login(username="pari", password="pari")
def test_no_article_can_be_stored_without_a_title(self):
with self.assertRaisesRegexp(ValidationError,
"This field cannot be blank"): # Slug and title fields cannot be null.
ArticleFactory(title="")
def test_article_can_be_stored_without_content(self):
article = ArticleFactory(title='Test', content='')
self.assertEqual(article.title, 'Test')
def test_article_cannot_be_stored_without_language(self):
with self.assertRaisesRegexp(ValidationError,
"This field cannot be blank"):
ArticleFactory(title='Test', language='')
|
d12be3446cad8b3fa3ae9a1860b3bd6ed20a1d9e | cass-prototype/reddit/api/serializers.py | cass-prototype/reddit/api/serializers.py | from reddit.models import Blog
from rest_framework import serializers
class BlogSerializer(serializers.Serializer):
blog_id = serializers.UUIDField()
created_at = serializers.DateTimeField()
user = serializers.CharField()
description = serializers.CharField()
def create(self, validated_data):
# Taken from: http://www.cdrf.co/3.3/rest_framework.serializers/ModelSerializer.html
return Blog.objects.create(**validated_data)
def _validate_created_at(self, validated_data):
"""
Cassandra creates a timezone unaware DateTime while DRF automatically
sets the timezone to UTC. We clean the data by removing the UTC
"""
validated_data['created_at'] = validated_data['created_at'].replace(tzinfo=None)
return validated_data
def update(self, instance, validated_data):
validated_data = self._validate_created_at(validated_data) # Clean data
for attr, value in validated_data.items():
setattr(instance, attr, value)
instance.save()
return instance
| from reddit.models import Blog
from rest_framework import serializers
class BlogSerializer(serializers.Serializer):
blog_id = serializers.UUIDField()
created_at = serializers.DateTimeField()
user = serializers.CharField()
description = serializers.CharField()
def create(self, validated_data):
# Taken from: http://www.cdrf.co/3.3/rest_framework.serializers/ModelSerializer.html
return Blog.objects.create(**validated_data)
def _validate_created_at(self, validated_data):
"""
Cassandra creates a timezone unaware DateTime while DRF automatically
sets the timezone to UTC. We clean the data by removing the UTC
"""
validated_data['created_at'] = validated_data['created_at'].replace(tzinfo=None)
return validated_data
def update(self, instance, validated_data):
if validated_data.get('created_at', None):
validated_data = self._validate_created_at(validated_data) # Clean data
for attr, value in validated_data.items():
setattr(instance, attr, value)
instance.save()
return instance
| Fix to API Update to allow PATCH | Fix to API Update to allow PATCH
| Python | mit | WilliamQLiu/django-cassandra-prototype,WilliamQLiu/django-cassandra-prototype | from reddit.models import Blog
from rest_framework import serializers
class BlogSerializer(serializers.Serializer):
blog_id = serializers.UUIDField()
created_at = serializers.DateTimeField()
user = serializers.CharField()
description = serializers.CharField()
def create(self, validated_data):
# Taken from: http://www.cdrf.co/3.3/rest_framework.serializers/ModelSerializer.html
return Blog.objects.create(**validated_data)
def _validate_created_at(self, validated_data):
"""
Cassandra creates a timezone unaware DateTime while DRF automatically
sets the timezone to UTC. We clean the data by removing the UTC
"""
validated_data['created_at'] = validated_data['created_at'].replace(tzinfo=None)
return validated_data
def update(self, instance, validated_data):
validated_data = self._validate_created_at(validated_data) # Clean data
for attr, value in validated_data.items():
setattr(instance, attr, value)
instance.save()
return instance
Fix to API Update to allow PATCH | from reddit.models import Blog
from rest_framework import serializers
class BlogSerializer(serializers.Serializer):
blog_id = serializers.UUIDField()
created_at = serializers.DateTimeField()
user = serializers.CharField()
description = serializers.CharField()
def create(self, validated_data):
# Taken from: http://www.cdrf.co/3.3/rest_framework.serializers/ModelSerializer.html
return Blog.objects.create(**validated_data)
def _validate_created_at(self, validated_data):
"""
Cassandra creates a timezone unaware DateTime while DRF automatically
sets the timezone to UTC. We clean the data by removing the UTC
"""
validated_data['created_at'] = validated_data['created_at'].replace(tzinfo=None)
return validated_data
def update(self, instance, validated_data):
if validated_data.get('created_at', None):
validated_data = self._validate_created_at(validated_data) # Clean data
for attr, value in validated_data.items():
setattr(instance, attr, value)
instance.save()
return instance
| <commit_before>from reddit.models import Blog
from rest_framework import serializers
class BlogSerializer(serializers.Serializer):
blog_id = serializers.UUIDField()
created_at = serializers.DateTimeField()
user = serializers.CharField()
description = serializers.CharField()
def create(self, validated_data):
# Taken from: http://www.cdrf.co/3.3/rest_framework.serializers/ModelSerializer.html
return Blog.objects.create(**validated_data)
def _validate_created_at(self, validated_data):
"""
Cassandra creates a timezone unaware DateTime while DRF automatically
sets the timezone to UTC. We clean the data by removing the UTC
"""
validated_data['created_at'] = validated_data['created_at'].replace(tzinfo=None)
return validated_data
def update(self, instance, validated_data):
validated_data = self._validate_created_at(validated_data) # Clean data
for attr, value in validated_data.items():
setattr(instance, attr, value)
instance.save()
return instance
<commit_msg>Fix to API Update to allow PATCH<commit_after> | from reddit.models import Blog
from rest_framework import serializers
class BlogSerializer(serializers.Serializer):
blog_id = serializers.UUIDField()
created_at = serializers.DateTimeField()
user = serializers.CharField()
description = serializers.CharField()
def create(self, validated_data):
# Taken from: http://www.cdrf.co/3.3/rest_framework.serializers/ModelSerializer.html
return Blog.objects.create(**validated_data)
def _validate_created_at(self, validated_data):
"""
Cassandra creates a timezone unaware DateTime while DRF automatically
sets the timezone to UTC. We clean the data by removing the UTC
"""
validated_data['created_at'] = validated_data['created_at'].replace(tzinfo=None)
return validated_data
def update(self, instance, validated_data):
if validated_data.get('created_at', None):
validated_data = self._validate_created_at(validated_data) # Clean data
for attr, value in validated_data.items():
setattr(instance, attr, value)
instance.save()
return instance
| from reddit.models import Blog
from rest_framework import serializers
class BlogSerializer(serializers.Serializer):
blog_id = serializers.UUIDField()
created_at = serializers.DateTimeField()
user = serializers.CharField()
description = serializers.CharField()
def create(self, validated_data):
# Taken from: http://www.cdrf.co/3.3/rest_framework.serializers/ModelSerializer.html
return Blog.objects.create(**validated_data)
def _validate_created_at(self, validated_data):
"""
Cassandra creates a timezone unaware DateTime while DRF automatically
sets the timezone to UTC. We clean the data by removing the UTC
"""
validated_data['created_at'] = validated_data['created_at'].replace(tzinfo=None)
return validated_data
def update(self, instance, validated_data):
validated_data = self._validate_created_at(validated_data) # Clean data
for attr, value in validated_data.items():
setattr(instance, attr, value)
instance.save()
return instance
Fix to API Update to allow PATCHfrom reddit.models import Blog
from rest_framework import serializers
class BlogSerializer(serializers.Serializer):
blog_id = serializers.UUIDField()
created_at = serializers.DateTimeField()
user = serializers.CharField()
description = serializers.CharField()
def create(self, validated_data):
# Taken from: http://www.cdrf.co/3.3/rest_framework.serializers/ModelSerializer.html
return Blog.objects.create(**validated_data)
def _validate_created_at(self, validated_data):
"""
Cassandra creates a timezone unaware DateTime while DRF automatically
sets the timezone to UTC. We clean the data by removing the UTC
"""
validated_data['created_at'] = validated_data['created_at'].replace(tzinfo=None)
return validated_data
def update(self, instance, validated_data):
if validated_data.get('created_at', None):
validated_data = self._validate_created_at(validated_data) # Clean data
for attr, value in validated_data.items():
setattr(instance, attr, value)
instance.save()
return instance
| <commit_before>from reddit.models import Blog
from rest_framework import serializers
class BlogSerializer(serializers.Serializer):
blog_id = serializers.UUIDField()
created_at = serializers.DateTimeField()
user = serializers.CharField()
description = serializers.CharField()
def create(self, validated_data):
# Taken from: http://www.cdrf.co/3.3/rest_framework.serializers/ModelSerializer.html
return Blog.objects.create(**validated_data)
def _validate_created_at(self, validated_data):
"""
Cassandra creates a timezone unaware DateTime while DRF automatically
sets the timezone to UTC. We clean the data by removing the UTC
"""
validated_data['created_at'] = validated_data['created_at'].replace(tzinfo=None)
return validated_data
def update(self, instance, validated_data):
validated_data = self._validate_created_at(validated_data) # Clean data
for attr, value in validated_data.items():
setattr(instance, attr, value)
instance.save()
return instance
<commit_msg>Fix to API Update to allow PATCH<commit_after>from reddit.models import Blog
from rest_framework import serializers
class BlogSerializer(serializers.Serializer):
blog_id = serializers.UUIDField()
created_at = serializers.DateTimeField()
user = serializers.CharField()
description = serializers.CharField()
def create(self, validated_data):
# Taken from: http://www.cdrf.co/3.3/rest_framework.serializers/ModelSerializer.html
return Blog.objects.create(**validated_data)
def _validate_created_at(self, validated_data):
"""
Cassandra creates a timezone unaware DateTime while DRF automatically
sets the timezone to UTC. We clean the data by removing the UTC
"""
validated_data['created_at'] = validated_data['created_at'].replace(tzinfo=None)
return validated_data
def update(self, instance, validated_data):
if validated_data.get('created_at', None):
validated_data = self._validate_created_at(validated_data) # Clean data
for attr, value in validated_data.items():
setattr(instance, attr, value)
instance.save()
return instance
|
bbd98c7da097d4962d371cd5ff75d9f67a0e3fd1 | renderMenu.py | renderMenu.py | #!/usr/bin/env python
import json, os, requests
from awsauth import S3Auth
from datetime import datetime
from pytz import timezone
from flask import Flask, render_template, url_for
app = Flask(__name__)
@app.route("/")
def renderMenu():
nowWaterloo = datetime.now(timezone('America/Toronto'))
currentDatetime = nowWaterloo.strftime('%I:%M %p on %A, %B %d, %Y')
ACCESS_KEY = os.environ.get('AWS_ACCESS_KEY_ID')
SECRET_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
MIXPANEL_TOKEN = os.environ.get('MIXPANEL_TOKEN')
r = requests.get('http://s3.amazonaws.com/uwfoodmenu/multi.txt', auth=S3Auth(ACCESS_KEY, SECRET_KEY))
menu = r.json()['response']['data']
return render_template('index.html', menu=menu, nowWaterloo=nowWaterloo, currentDatetime=currentDatetime, mixpanelToken=MIXPANEL_TOKEN)
if __name__ == "__main__":
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
| #!/usr/bin/env python
import json, os, requests
from awsauth import S3Auth
from datetime import datetime
from pytz import timezone
from flask import Flask, render_template, url_for
app = Flask(__name__)
@app.route("/")
def renderMenu():
nowWaterloo = datetime.now(timezone('America/Toronto'))
currentDatetime = nowWaterloo.strftime('%I:%M %p on %A, %B %d, %Y')
ACCESS_KEY = os.environ.get('AWS_ACCESS_KEY_ID')
SECRET_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
MIXPANEL_TOKEN = os.environ.get('MIXPANEL_TOKEN')
r = requests.get('http://s3.amazonaws.com/uwfoodmenu/response.txt', auth=S3Auth(ACCESS_KEY, SECRET_KEY))
menu = r.json()['response']['data']
return render_template('index.html', menu=menu, nowWaterloo=nowWaterloo, currentDatetime=currentDatetime, mixpanelToken=MIXPANEL_TOKEN)
if __name__ == "__main__":
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
| Switch from development to actual data. | Switch from development to actual data.
| Python | mit | alykhank/FoodMenu,alykhank/FoodMenu,alykhank/FoodMenu | #!/usr/bin/env python
import json, os, requests
from awsauth import S3Auth
from datetime import datetime
from pytz import timezone
from flask import Flask, render_template, url_for
app = Flask(__name__)
@app.route("/")
def renderMenu():
nowWaterloo = datetime.now(timezone('America/Toronto'))
currentDatetime = nowWaterloo.strftime('%I:%M %p on %A, %B %d, %Y')
ACCESS_KEY = os.environ.get('AWS_ACCESS_KEY_ID')
SECRET_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
MIXPANEL_TOKEN = os.environ.get('MIXPANEL_TOKEN')
r = requests.get('http://s3.amazonaws.com/uwfoodmenu/multi.txt', auth=S3Auth(ACCESS_KEY, SECRET_KEY))
menu = r.json()['response']['data']
return render_template('index.html', menu=menu, nowWaterloo=nowWaterloo, currentDatetime=currentDatetime, mixpanelToken=MIXPANEL_TOKEN)
if __name__ == "__main__":
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
Switch from development to actual data. | #!/usr/bin/env python
import json, os, requests
from awsauth import S3Auth
from datetime import datetime
from pytz import timezone
from flask import Flask, render_template, url_for
app = Flask(__name__)
@app.route("/")
def renderMenu():
nowWaterloo = datetime.now(timezone('America/Toronto'))
currentDatetime = nowWaterloo.strftime('%I:%M %p on %A, %B %d, %Y')
ACCESS_KEY = os.environ.get('AWS_ACCESS_KEY_ID')
SECRET_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
MIXPANEL_TOKEN = os.environ.get('MIXPANEL_TOKEN')
r = requests.get('http://s3.amazonaws.com/uwfoodmenu/response.txt', auth=S3Auth(ACCESS_KEY, SECRET_KEY))
menu = r.json()['response']['data']
return render_template('index.html', menu=menu, nowWaterloo=nowWaterloo, currentDatetime=currentDatetime, mixpanelToken=MIXPANEL_TOKEN)
if __name__ == "__main__":
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
| <commit_before>#!/usr/bin/env python
import json, os, requests
from awsauth import S3Auth
from datetime import datetime
from pytz import timezone
from flask import Flask, render_template, url_for
app = Flask(__name__)
@app.route("/")
def renderMenu():
nowWaterloo = datetime.now(timezone('America/Toronto'))
currentDatetime = nowWaterloo.strftime('%I:%M %p on %A, %B %d, %Y')
ACCESS_KEY = os.environ.get('AWS_ACCESS_KEY_ID')
SECRET_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
MIXPANEL_TOKEN = os.environ.get('MIXPANEL_TOKEN')
r = requests.get('http://s3.amazonaws.com/uwfoodmenu/multi.txt', auth=S3Auth(ACCESS_KEY, SECRET_KEY))
menu = r.json()['response']['data']
return render_template('index.html', menu=menu, nowWaterloo=nowWaterloo, currentDatetime=currentDatetime, mixpanelToken=MIXPANEL_TOKEN)
if __name__ == "__main__":
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
<commit_msg>Switch from development to actual data.<commit_after> | #!/usr/bin/env python
import json, os, requests
from awsauth import S3Auth
from datetime import datetime
from pytz import timezone
from flask import Flask, render_template, url_for
app = Flask(__name__)
@app.route("/")
def renderMenu():
nowWaterloo = datetime.now(timezone('America/Toronto'))
currentDatetime = nowWaterloo.strftime('%I:%M %p on %A, %B %d, %Y')
ACCESS_KEY = os.environ.get('AWS_ACCESS_KEY_ID')
SECRET_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
MIXPANEL_TOKEN = os.environ.get('MIXPANEL_TOKEN')
r = requests.get('http://s3.amazonaws.com/uwfoodmenu/response.txt', auth=S3Auth(ACCESS_KEY, SECRET_KEY))
menu = r.json()['response']['data']
return render_template('index.html', menu=menu, nowWaterloo=nowWaterloo, currentDatetime=currentDatetime, mixpanelToken=MIXPANEL_TOKEN)
if __name__ == "__main__":
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
| #!/usr/bin/env python
import json, os, requests
from awsauth import S3Auth
from datetime import datetime
from pytz import timezone
from flask import Flask, render_template, url_for
app = Flask(__name__)
@app.route("/")
def renderMenu():
nowWaterloo = datetime.now(timezone('America/Toronto'))
currentDatetime = nowWaterloo.strftime('%I:%M %p on %A, %B %d, %Y')
ACCESS_KEY = os.environ.get('AWS_ACCESS_KEY_ID')
SECRET_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
MIXPANEL_TOKEN = os.environ.get('MIXPANEL_TOKEN')
r = requests.get('http://s3.amazonaws.com/uwfoodmenu/multi.txt', auth=S3Auth(ACCESS_KEY, SECRET_KEY))
menu = r.json()['response']['data']
return render_template('index.html', menu=menu, nowWaterloo=nowWaterloo, currentDatetime=currentDatetime, mixpanelToken=MIXPANEL_TOKEN)
if __name__ == "__main__":
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
Switch from development to actual data.#!/usr/bin/env python
import json, os, requests
from awsauth import S3Auth
from datetime import datetime
from pytz import timezone
from flask import Flask, render_template, url_for
app = Flask(__name__)
@app.route("/")
def renderMenu():
nowWaterloo = datetime.now(timezone('America/Toronto'))
currentDatetime = nowWaterloo.strftime('%I:%M %p on %A, %B %d, %Y')
ACCESS_KEY = os.environ.get('AWS_ACCESS_KEY_ID')
SECRET_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
MIXPANEL_TOKEN = os.environ.get('MIXPANEL_TOKEN')
r = requests.get('http://s3.amazonaws.com/uwfoodmenu/response.txt', auth=S3Auth(ACCESS_KEY, SECRET_KEY))
menu = r.json()['response']['data']
return render_template('index.html', menu=menu, nowWaterloo=nowWaterloo, currentDatetime=currentDatetime, mixpanelToken=MIXPANEL_TOKEN)
if __name__ == "__main__":
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
| <commit_before>#!/usr/bin/env python
import json, os, requests
from awsauth import S3Auth
from datetime import datetime
from pytz import timezone
from flask import Flask, render_template, url_for
app = Flask(__name__)
@app.route("/")
def renderMenu():
nowWaterloo = datetime.now(timezone('America/Toronto'))
currentDatetime = nowWaterloo.strftime('%I:%M %p on %A, %B %d, %Y')
ACCESS_KEY = os.environ.get('AWS_ACCESS_KEY_ID')
SECRET_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
MIXPANEL_TOKEN = os.environ.get('MIXPANEL_TOKEN')
r = requests.get('http://s3.amazonaws.com/uwfoodmenu/multi.txt', auth=S3Auth(ACCESS_KEY, SECRET_KEY))
menu = r.json()['response']['data']
return render_template('index.html', menu=menu, nowWaterloo=nowWaterloo, currentDatetime=currentDatetime, mixpanelToken=MIXPANEL_TOKEN)
if __name__ == "__main__":
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
<commit_msg>Switch from development to actual data.<commit_after>#!/usr/bin/env python
import json, os, requests
from awsauth import S3Auth
from datetime import datetime
from pytz import timezone
from flask import Flask, render_template, url_for
app = Flask(__name__)
@app.route("/")
def renderMenu():
nowWaterloo = datetime.now(timezone('America/Toronto'))
currentDatetime = nowWaterloo.strftime('%I:%M %p on %A, %B %d, %Y')
ACCESS_KEY = os.environ.get('AWS_ACCESS_KEY_ID')
SECRET_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
MIXPANEL_TOKEN = os.environ.get('MIXPANEL_TOKEN')
r = requests.get('http://s3.amazonaws.com/uwfoodmenu/response.txt', auth=S3Auth(ACCESS_KEY, SECRET_KEY))
menu = r.json()['response']['data']
return render_template('index.html', menu=menu, nowWaterloo=nowWaterloo, currentDatetime=currentDatetime, mixpanelToken=MIXPANEL_TOKEN)
if __name__ == "__main__":
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
|
9b7f23e02d0f35b27a5a0f1aad47c363b59d48e3 | configuration/configuration.py | configuration/configuration.py | from ConfigParser import ConfigParser
from environment import ENV
import sys
import os
import re
def get_config(environment):
config_dir = "%s/%s" % (re.sub('configuration\.(py|pyc)', '', os.path.abspath(__file__)), environment)
config_files = os.listdir(config_dir)
config_files = ["%s/%s" % (config_dir, file_name) for file_name in config_files]
configuration = ConfigParser()
configuration.read(config_files)
return configuration
config = get_config(ENV)
| from ConfigParser import ConfigParser
from environment import ENV
import sys
import os
import re
def get_config(environment):
config_dir = "%s/%s" % (re.sub('configuration\.(pyc|py)', '', os.path.abspath(__file__)), environment)
config_files = os.listdir(config_dir)
config_files = ["%s/%s" % (config_dir, file_name) for file_name in config_files]
configuration = ConfigParser()
configuration.read(config_files)
return configuration
config = get_config(ENV)
| Fix to the order of py and pyc in the regex | Fix to the order of py and pyc in the regex
| Python | agpl-3.0 | ushahidi/swiftgate,ushahidi/swiftgate | from ConfigParser import ConfigParser
from environment import ENV
import sys
import os
import re
def get_config(environment):
config_dir = "%s/%s" % (re.sub('configuration\.(py|pyc)', '', os.path.abspath(__file__)), environment)
config_files = os.listdir(config_dir)
config_files = ["%s/%s" % (config_dir, file_name) for file_name in config_files]
configuration = ConfigParser()
configuration.read(config_files)
return configuration
config = get_config(ENV)
Fix to the order of py and pyc in the regex | from ConfigParser import ConfigParser
from environment import ENV
import sys
import os
import re
def get_config(environment):
config_dir = "%s/%s" % (re.sub('configuration\.(pyc|py)', '', os.path.abspath(__file__)), environment)
config_files = os.listdir(config_dir)
config_files = ["%s/%s" % (config_dir, file_name) for file_name in config_files]
configuration = ConfigParser()
configuration.read(config_files)
return configuration
config = get_config(ENV)
| <commit_before>from ConfigParser import ConfigParser
from environment import ENV
import sys
import os
import re
def get_config(environment):
config_dir = "%s/%s" % (re.sub('configuration\.(py|pyc)', '', os.path.abspath(__file__)), environment)
config_files = os.listdir(config_dir)
config_files = ["%s/%s" % (config_dir, file_name) for file_name in config_files]
configuration = ConfigParser()
configuration.read(config_files)
return configuration
config = get_config(ENV)
<commit_msg>Fix to the order of py and pyc in the regex<commit_after> | from ConfigParser import ConfigParser
from environment import ENV
import sys
import os
import re
def get_config(environment):
config_dir = "%s/%s" % (re.sub('configuration\.(pyc|py)', '', os.path.abspath(__file__)), environment)
config_files = os.listdir(config_dir)
config_files = ["%s/%s" % (config_dir, file_name) for file_name in config_files]
configuration = ConfigParser()
configuration.read(config_files)
return configuration
config = get_config(ENV)
| from ConfigParser import ConfigParser
from environment import ENV
import sys
import os
import re
def get_config(environment):
config_dir = "%s/%s" % (re.sub('configuration\.(py|pyc)', '', os.path.abspath(__file__)), environment)
config_files = os.listdir(config_dir)
config_files = ["%s/%s" % (config_dir, file_name) for file_name in config_files]
configuration = ConfigParser()
configuration.read(config_files)
return configuration
config = get_config(ENV)
Fix to the order of py and pyc in the regexfrom ConfigParser import ConfigParser
from environment import ENV
import sys
import os
import re
def get_config(environment):
config_dir = "%s/%s" % (re.sub('configuration\.(pyc|py)', '', os.path.abspath(__file__)), environment)
config_files = os.listdir(config_dir)
config_files = ["%s/%s" % (config_dir, file_name) for file_name in config_files]
configuration = ConfigParser()
configuration.read(config_files)
return configuration
config = get_config(ENV)
| <commit_before>from ConfigParser import ConfigParser
from environment import ENV
import sys
import os
import re
def get_config(environment):
config_dir = "%s/%s" % (re.sub('configuration\.(py|pyc)', '', os.path.abspath(__file__)), environment)
config_files = os.listdir(config_dir)
config_files = ["%s/%s" % (config_dir, file_name) for file_name in config_files]
configuration = ConfigParser()
configuration.read(config_files)
return configuration
config = get_config(ENV)
<commit_msg>Fix to the order of py and pyc in the regex<commit_after>from ConfigParser import ConfigParser
from environment import ENV
import sys
import os
import re
def get_config(environment):
config_dir = "%s/%s" % (re.sub('configuration\.(pyc|py)', '', os.path.abspath(__file__)), environment)
config_files = os.listdir(config_dir)
config_files = ["%s/%s" % (config_dir, file_name) for file_name in config_files]
configuration = ConfigParser()
configuration.read(config_files)
return configuration
config = get_config(ENV)
|
d8e0c07363069e664dcb6071bc84e8ecc0706739 | plugins/join_on_invite/plugin.py | plugins/join_on_invite/plugin.py | class InviteJoinPlugin(object):
"""Simple plugin that joins a channel if an invite is given."""
callback_id = None
"""ID generated when callback was added for the irc.invite event"""
def __init__(self, cardinal):
"""Register our callback and save the callback ID"""
self.callback_id = cardinal.event_manager.register_callback("irc.invite", self.join_channel)
def join_channel(self, cardinal, user, channel):
"""Callback for irc.invite that joins a channel"""
cardinal.join(channel)
def close(self, cardinal):
"""When the plugin is closed, removes our callback"""
cardinal.event_manager.remove_callback("irc.invite", self.callback_id)
def setup(cardinal):
return InviteJoinPlugin(cardinal)
| from cardinal.decorators import event
class InviteJoinPlugin(object):
"""Simple plugin that joins a channel if an invite is given."""
@event('irc.invite')
def join_channel(self, cardinal, user, channel):
"""Callback for irc.invite that joins a channel"""
cardinal.join(channel)
def setup(cardinal):
return InviteJoinPlugin()
| Use join_on_invite as @event decorator example | Use join_on_invite as @event decorator example
| Python | mit | JohnMaguire/Cardinal,BiohZn/Cardinal | class InviteJoinPlugin(object):
"""Simple plugin that joins a channel if an invite is given."""
callback_id = None
"""ID generated when callback was added for the irc.invite event"""
def __init__(self, cardinal):
"""Register our callback and save the callback ID"""
self.callback_id = cardinal.event_manager.register_callback("irc.invite", self.join_channel)
def join_channel(self, cardinal, user, channel):
"""Callback for irc.invite that joins a channel"""
cardinal.join(channel)
def close(self, cardinal):
"""When the plugin is closed, removes our callback"""
cardinal.event_manager.remove_callback("irc.invite", self.callback_id)
def setup(cardinal):
return InviteJoinPlugin(cardinal)
Use join_on_invite as @event decorator example | from cardinal.decorators import event
class InviteJoinPlugin(object):
"""Simple plugin that joins a channel if an invite is given."""
@event('irc.invite')
def join_channel(self, cardinal, user, channel):
"""Callback for irc.invite that joins a channel"""
cardinal.join(channel)
def setup(cardinal):
return InviteJoinPlugin()
| <commit_before>class InviteJoinPlugin(object):
"""Simple plugin that joins a channel if an invite is given."""
callback_id = None
"""ID generated when callback was added for the irc.invite event"""
def __init__(self, cardinal):
"""Register our callback and save the callback ID"""
self.callback_id = cardinal.event_manager.register_callback("irc.invite", self.join_channel)
def join_channel(self, cardinal, user, channel):
"""Callback for irc.invite that joins a channel"""
cardinal.join(channel)
def close(self, cardinal):
"""When the plugin is closed, removes our callback"""
cardinal.event_manager.remove_callback("irc.invite", self.callback_id)
def setup(cardinal):
return InviteJoinPlugin(cardinal)
<commit_msg>Use join_on_invite as @event decorator example<commit_after> | from cardinal.decorators import event
class InviteJoinPlugin(object):
"""Simple plugin that joins a channel if an invite is given."""
@event('irc.invite')
def join_channel(self, cardinal, user, channel):
"""Callback for irc.invite that joins a channel"""
cardinal.join(channel)
def setup(cardinal):
return InviteJoinPlugin()
| class InviteJoinPlugin(object):
"""Simple plugin that joins a channel if an invite is given."""
callback_id = None
"""ID generated when callback was added for the irc.invite event"""
def __init__(self, cardinal):
"""Register our callback and save the callback ID"""
self.callback_id = cardinal.event_manager.register_callback("irc.invite", self.join_channel)
def join_channel(self, cardinal, user, channel):
"""Callback for irc.invite that joins a channel"""
cardinal.join(channel)
def close(self, cardinal):
"""When the plugin is closed, removes our callback"""
cardinal.event_manager.remove_callback("irc.invite", self.callback_id)
def setup(cardinal):
return InviteJoinPlugin(cardinal)
Use join_on_invite as @event decorator examplefrom cardinal.decorators import event
class InviteJoinPlugin(object):
"""Simple plugin that joins a channel if an invite is given."""
@event('irc.invite')
def join_channel(self, cardinal, user, channel):
"""Callback for irc.invite that joins a channel"""
cardinal.join(channel)
def setup(cardinal):
return InviteJoinPlugin()
| <commit_before>class InviteJoinPlugin(object):
"""Simple plugin that joins a channel if an invite is given."""
callback_id = None
"""ID generated when callback was added for the irc.invite event"""
def __init__(self, cardinal):
"""Register our callback and save the callback ID"""
self.callback_id = cardinal.event_manager.register_callback("irc.invite", self.join_channel)
def join_channel(self, cardinal, user, channel):
"""Callback for irc.invite that joins a channel"""
cardinal.join(channel)
def close(self, cardinal):
"""When the plugin is closed, removes our callback"""
cardinal.event_manager.remove_callback("irc.invite", self.callback_id)
def setup(cardinal):
return InviteJoinPlugin(cardinal)
<commit_msg>Use join_on_invite as @event decorator example<commit_after>from cardinal.decorators import event
class InviteJoinPlugin(object):
"""Simple plugin that joins a channel if an invite is given."""
@event('irc.invite')
def join_channel(self, cardinal, user, channel):
"""Callback for irc.invite that joins a channel"""
cardinal.join(channel)
def setup(cardinal):
return InviteJoinPlugin()
|
efdb4f57f3fe18f3c9a16df0adb735f13cd7c567 | vigir_synthesis_manager/src/vigir_synthesis_manager/ltl_compilation_client.py | vigir_synthesis_manager/src/vigir_synthesis_manager/ltl_compilation_client.py | #!/usr/bin/env python
import rospy
from vigir_synthesis_msgs.srv import LTLCompilation
# from vigir_synthesis_msgs.msg import LTLSpecification, BSErrorCodes
def ltl_compilation_client(system, goals, initial_conditions, custom_ltl = None):
'''Client'''
rospy.wait_for_service('ltl_compilation')
try:
ltl_compilation_srv = rospy.ServiceProxy('ltl_compilation', LTLCompilation)
response = ltl_compilation_srv(system, goals, initial_conditions)
#DEBUG
print response.ltl_specification.sys_init
print response.ltl_specification.env_init
print response.ltl_specification.sys_trans
print response.ltl_specification.env_trans
print response.ltl_specification.sys_liveness
print response.ltl_specification.env_liveness
print 'LTL Compilation error code: ', response.error_code
return response
except rospy.ServiceException as e:
print("Service call failed: %s" % e)
if __name__ == "__main__":
ltl_compilation_client('atlas', ['pickup'], ['stand'])
| #!/usr/bin/env python
import rospy
from vigir_synthesis_msgs.srv import LTLCompilation
# from vigir_synthesis_msgs.msg import LTLSpecification, BSErrorCodes
def ltl_compilation_client(system, goals, initial_conditions, custom_ltl = None):
'''Client'''
rospy.wait_for_service('ltl_compilation')
try:
ltl_compilation_srv = rospy.ServiceProxy('ltl_compilation', LTLCompilation)
response = ltl_compilation_srv(system, goals, initial_conditions)
#DEBUG
# print response.ltl_specification
print 'LTL Compilation error code: ', response.error_code
return response
except rospy.ServiceException as e:
print("Service call failed: %s" % e)
if __name__ == "__main__":
ltl_compilation_client('atlas', ['pickup'], ['stand'])
| Remove prints from LTL compilation client | [vigir_synthesis_manager] Remove prints from LTL compilation client
| Python | bsd-3-clause | team-vigir/vigir_behavior_synthesis,team-vigir/vigir_behavior_synthesis | #!/usr/bin/env python
import rospy
from vigir_synthesis_msgs.srv import LTLCompilation
# from vigir_synthesis_msgs.msg import LTLSpecification, BSErrorCodes
def ltl_compilation_client(system, goals, initial_conditions, custom_ltl = None):
'''Client'''
rospy.wait_for_service('ltl_compilation')
try:
ltl_compilation_srv = rospy.ServiceProxy('ltl_compilation', LTLCompilation)
response = ltl_compilation_srv(system, goals, initial_conditions)
#DEBUG
print response.ltl_specification.sys_init
print response.ltl_specification.env_init
print response.ltl_specification.sys_trans
print response.ltl_specification.env_trans
print response.ltl_specification.sys_liveness
print response.ltl_specification.env_liveness
print 'LTL Compilation error code: ', response.error_code
return response
except rospy.ServiceException as e:
print("Service call failed: %s" % e)
if __name__ == "__main__":
ltl_compilation_client('atlas', ['pickup'], ['stand'])
[vigir_synthesis_manager] Remove prints from LTL compilation client | #!/usr/bin/env python
import rospy
from vigir_synthesis_msgs.srv import LTLCompilation
# from vigir_synthesis_msgs.msg import LTLSpecification, BSErrorCodes
def ltl_compilation_client(system, goals, initial_conditions, custom_ltl = None):
'''Client'''
rospy.wait_for_service('ltl_compilation')
try:
ltl_compilation_srv = rospy.ServiceProxy('ltl_compilation', LTLCompilation)
response = ltl_compilation_srv(system, goals, initial_conditions)
#DEBUG
# print response.ltl_specification
print 'LTL Compilation error code: ', response.error_code
return response
except rospy.ServiceException as e:
print("Service call failed: %s" % e)
if __name__ == "__main__":
ltl_compilation_client('atlas', ['pickup'], ['stand'])
| <commit_before>#!/usr/bin/env python
import rospy
from vigir_synthesis_msgs.srv import LTLCompilation
# from vigir_synthesis_msgs.msg import LTLSpecification, BSErrorCodes
def ltl_compilation_client(system, goals, initial_conditions, custom_ltl = None):
'''Client'''
rospy.wait_for_service('ltl_compilation')
try:
ltl_compilation_srv = rospy.ServiceProxy('ltl_compilation', LTLCompilation)
response = ltl_compilation_srv(system, goals, initial_conditions)
#DEBUG
print response.ltl_specification.sys_init
print response.ltl_specification.env_init
print response.ltl_specification.sys_trans
print response.ltl_specification.env_trans
print response.ltl_specification.sys_liveness
print response.ltl_specification.env_liveness
print 'LTL Compilation error code: ', response.error_code
return response
except rospy.ServiceException as e:
print("Service call failed: %s" % e)
if __name__ == "__main__":
ltl_compilation_client('atlas', ['pickup'], ['stand'])
<commit_msg>[vigir_synthesis_manager] Remove prints from LTL compilation client<commit_after> | #!/usr/bin/env python
import rospy
from vigir_synthesis_msgs.srv import LTLCompilation
# from vigir_synthesis_msgs.msg import LTLSpecification, BSErrorCodes
def ltl_compilation_client(system, goals, initial_conditions, custom_ltl = None):
'''Client'''
rospy.wait_for_service('ltl_compilation')
try:
ltl_compilation_srv = rospy.ServiceProxy('ltl_compilation', LTLCompilation)
response = ltl_compilation_srv(system, goals, initial_conditions)
#DEBUG
# print response.ltl_specification
print 'LTL Compilation error code: ', response.error_code
return response
except rospy.ServiceException as e:
print("Service call failed: %s" % e)
if __name__ == "__main__":
ltl_compilation_client('atlas', ['pickup'], ['stand'])
| #!/usr/bin/env python
import rospy
from vigir_synthesis_msgs.srv import LTLCompilation
# from vigir_synthesis_msgs.msg import LTLSpecification, BSErrorCodes
def ltl_compilation_client(system, goals, initial_conditions, custom_ltl = None):
'''Client'''
rospy.wait_for_service('ltl_compilation')
try:
ltl_compilation_srv = rospy.ServiceProxy('ltl_compilation', LTLCompilation)
response = ltl_compilation_srv(system, goals, initial_conditions)
#DEBUG
print response.ltl_specification.sys_init
print response.ltl_specification.env_init
print response.ltl_specification.sys_trans
print response.ltl_specification.env_trans
print response.ltl_specification.sys_liveness
print response.ltl_specification.env_liveness
print 'LTL Compilation error code: ', response.error_code
return response
except rospy.ServiceException as e:
print("Service call failed: %s" % e)
if __name__ == "__main__":
ltl_compilation_client('atlas', ['pickup'], ['stand'])
[vigir_synthesis_manager] Remove prints from LTL compilation client#!/usr/bin/env python
import rospy
from vigir_synthesis_msgs.srv import LTLCompilation
# from vigir_synthesis_msgs.msg import LTLSpecification, BSErrorCodes
def ltl_compilation_client(system, goals, initial_conditions, custom_ltl = None):
'''Client'''
rospy.wait_for_service('ltl_compilation')
try:
ltl_compilation_srv = rospy.ServiceProxy('ltl_compilation', LTLCompilation)
response = ltl_compilation_srv(system, goals, initial_conditions)
#DEBUG
# print response.ltl_specification
print 'LTL Compilation error code: ', response.error_code
return response
except rospy.ServiceException as e:
print("Service call failed: %s" % e)
if __name__ == "__main__":
ltl_compilation_client('atlas', ['pickup'], ['stand'])
| <commit_before>#!/usr/bin/env python
import rospy
from vigir_synthesis_msgs.srv import LTLCompilation
# from vigir_synthesis_msgs.msg import LTLSpecification, BSErrorCodes
def ltl_compilation_client(system, goals, initial_conditions, custom_ltl = None):
'''Client'''
rospy.wait_for_service('ltl_compilation')
try:
ltl_compilation_srv = rospy.ServiceProxy('ltl_compilation', LTLCompilation)
response = ltl_compilation_srv(system, goals, initial_conditions)
#DEBUG
print response.ltl_specification.sys_init
print response.ltl_specification.env_init
print response.ltl_specification.sys_trans
print response.ltl_specification.env_trans
print response.ltl_specification.sys_liveness
print response.ltl_specification.env_liveness
print 'LTL Compilation error code: ', response.error_code
return response
except rospy.ServiceException as e:
print("Service call failed: %s" % e)
if __name__ == "__main__":
ltl_compilation_client('atlas', ['pickup'], ['stand'])
<commit_msg>[vigir_synthesis_manager] Remove prints from LTL compilation client<commit_after>#!/usr/bin/env python
import rospy
from vigir_synthesis_msgs.srv import LTLCompilation
# from vigir_synthesis_msgs.msg import LTLSpecification, BSErrorCodes
def ltl_compilation_client(system, goals, initial_conditions, custom_ltl = None):
'''Client'''
rospy.wait_for_service('ltl_compilation')
try:
ltl_compilation_srv = rospy.ServiceProxy('ltl_compilation', LTLCompilation)
response = ltl_compilation_srv(system, goals, initial_conditions)
#DEBUG
# print response.ltl_specification
print 'LTL Compilation error code: ', response.error_code
return response
except rospy.ServiceException as e:
print("Service call failed: %s" % e)
if __name__ == "__main__":
ltl_compilation_client('atlas', ['pickup'], ['stand'])
|
bc32e0aadfe2d83d8acb2f219f2fb6bf5f5bb150 | ehriportal/portal/management/commands/geocode_addresses.py | ehriportal/portal/management/commands/geocode_addresses.py | """Geocode Contact objects."""
import sys
from geopy import geocoders
from django.core.management.base import BaseCommand, CommandError
from portal import models
class Command(BaseCommand):
"""Set lat/long fields on contacts with a street address,
currently just using Google's geocoder."""
def handle(self, *args, **kwargs):
"""Run geocode."""
self.geocoder = geocoders.GeoNames()
for contact in models.Contact.objects.all():
self.geocode_contact(contact)
def geocode_contact(self, contact):
"""Set lat/long fields on contact objects."""
if contact.street_address:
sys.stderr.write("Geocoding: %s: %s\n" % (contact.repository.name, contact.format()))
try:
formaddr, latlon = self.geocoder.geocode(contact.format().encode("utf8"))
except ValueError:
sys.stderr.write(" - More than one value found!\n")
except geocoders.google.GTooManyQueriesError:
raise CommandError("Too many queries for Google Geocode.")
except geocoders.google.GQueryError:
sys.stderr.write(" - Unable to get latlong for address\n")
else:
contact.lat = latlon[0]
contact.lon = latlon[1]
contact.save()
sys.stderr("Set lat/lon: %s, %s\n\n" % latlon)
| """Geocode Contact objects."""
import sys
import time
from geopy import geocoders
from django.core.management.base import BaseCommand, CommandError
from portal import models
class Command(BaseCommand):
"""Set lat/long fields on contacts with a street address,
currently just using Google's geocoder."""
def handle(self, *args, **kwargs):
"""Run geocode."""
self.geocoder = geocoders.Google()
for contact in models.Contact.objects.all():
self.geocode_contact(contact)
def geocode_contact(self, contact):
"""Set lat/long fields on contact objects."""
if contact.street_address and not contact.lat:
sys.stderr.write("Geocoding: %s: %s\n" % (contact.repository.name, contact.format()))
try:
formaddr, latlon = self.geocoder.geocode(contact.format().encode("utf8"))
except ValueError:
sys.stderr.write(" - More than one value found!\n")
except geocoders.google.GTooManyQueriesError:
raise CommandError("Too many queries for Google Geocode.")
except geocoders.google.GQueryError:
sys.stderr.write(" - Unable to get latlong for address\n")
else:
contact.lat = latlon[0]
contact.lon = latlon[1]
contact.save()
sys.stderr.write("Set lat/lon: %s, %s\n\n" % latlon)
# delay to keep Google rate limit happy (hopefully)
time.sleep(0.25)
| Fix logging, add a sleep to molify Google's rate limit. | Fix logging, add a sleep to molify Google's rate limit.
| Python | mit | mikesname/ehri-collections,mikesname/ehri-collections,mikesname/ehri-collections | """Geocode Contact objects."""
import sys
from geopy import geocoders
from django.core.management.base import BaseCommand, CommandError
from portal import models
class Command(BaseCommand):
"""Set lat/long fields on contacts with a street address,
currently just using Google's geocoder."""
def handle(self, *args, **kwargs):
"""Run geocode."""
self.geocoder = geocoders.GeoNames()
for contact in models.Contact.objects.all():
self.geocode_contact(contact)
def geocode_contact(self, contact):
"""Set lat/long fields on contact objects."""
if contact.street_address:
sys.stderr.write("Geocoding: %s: %s\n" % (contact.repository.name, contact.format()))
try:
formaddr, latlon = self.geocoder.geocode(contact.format().encode("utf8"))
except ValueError:
sys.stderr.write(" - More than one value found!\n")
except geocoders.google.GTooManyQueriesError:
raise CommandError("Too many queries for Google Geocode.")
except geocoders.google.GQueryError:
sys.stderr.write(" - Unable to get latlong for address\n")
else:
contact.lat = latlon[0]
contact.lon = latlon[1]
contact.save()
sys.stderr("Set lat/lon: %s, %s\n\n" % latlon)
Fix logging, add a sleep to molify Google's rate limit. | """Geocode Contact objects."""
import sys
import time
from geopy import geocoders
from django.core.management.base import BaseCommand, CommandError
from portal import models
class Command(BaseCommand):
"""Set lat/long fields on contacts with a street address,
currently just using Google's geocoder."""
def handle(self, *args, **kwargs):
"""Run geocode."""
self.geocoder = geocoders.Google()
for contact in models.Contact.objects.all():
self.geocode_contact(contact)
def geocode_contact(self, contact):
"""Set lat/long fields on contact objects."""
if contact.street_address and not contact.lat:
sys.stderr.write("Geocoding: %s: %s\n" % (contact.repository.name, contact.format()))
try:
formaddr, latlon = self.geocoder.geocode(contact.format().encode("utf8"))
except ValueError:
sys.stderr.write(" - More than one value found!\n")
except geocoders.google.GTooManyQueriesError:
raise CommandError("Too many queries for Google Geocode.")
except geocoders.google.GQueryError:
sys.stderr.write(" - Unable to get latlong for address\n")
else:
contact.lat = latlon[0]
contact.lon = latlon[1]
contact.save()
sys.stderr.write("Set lat/lon: %s, %s\n\n" % latlon)
# delay to keep Google rate limit happy (hopefully)
time.sleep(0.25)
| <commit_before>"""Geocode Contact objects."""
import sys
from geopy import geocoders
from django.core.management.base import BaseCommand, CommandError
from portal import models
class Command(BaseCommand):
"""Set lat/long fields on contacts with a street address,
currently just using Google's geocoder."""
def handle(self, *args, **kwargs):
"""Run geocode."""
self.geocoder = geocoders.GeoNames()
for contact in models.Contact.objects.all():
self.geocode_contact(contact)
def geocode_contact(self, contact):
"""Set lat/long fields on contact objects."""
if contact.street_address:
sys.stderr.write("Geocoding: %s: %s\n" % (contact.repository.name, contact.format()))
try:
formaddr, latlon = self.geocoder.geocode(contact.format().encode("utf8"))
except ValueError:
sys.stderr.write(" - More than one value found!\n")
except geocoders.google.GTooManyQueriesError:
raise CommandError("Too many queries for Google Geocode.")
except geocoders.google.GQueryError:
sys.stderr.write(" - Unable to get latlong for address\n")
else:
contact.lat = latlon[0]
contact.lon = latlon[1]
contact.save()
sys.stderr("Set lat/lon: %s, %s\n\n" % latlon)
<commit_msg>Fix logging, add a sleep to molify Google's rate limit.<commit_after> | """Geocode Contact objects."""
import sys
import time
from geopy import geocoders
from django.core.management.base import BaseCommand, CommandError
from portal import models
class Command(BaseCommand):
"""Set lat/long fields on contacts with a street address,
currently just using Google's geocoder."""
def handle(self, *args, **kwargs):
"""Run geocode."""
self.geocoder = geocoders.Google()
for contact in models.Contact.objects.all():
self.geocode_contact(contact)
def geocode_contact(self, contact):
"""Set lat/long fields on contact objects."""
if contact.street_address and not contact.lat:
sys.stderr.write("Geocoding: %s: %s\n" % (contact.repository.name, contact.format()))
try:
formaddr, latlon = self.geocoder.geocode(contact.format().encode("utf8"))
except ValueError:
sys.stderr.write(" - More than one value found!\n")
except geocoders.google.GTooManyQueriesError:
raise CommandError("Too many queries for Google Geocode.")
except geocoders.google.GQueryError:
sys.stderr.write(" - Unable to get latlong for address\n")
else:
contact.lat = latlon[0]
contact.lon = latlon[1]
contact.save()
sys.stderr.write("Set lat/lon: %s, %s\n\n" % latlon)
# delay to keep Google rate limit happy (hopefully)
time.sleep(0.25)
| """Geocode Contact objects."""
import sys
from geopy import geocoders
from django.core.management.base import BaseCommand, CommandError
from portal import models
class Command(BaseCommand):
"""Set lat/long fields on contacts with a street address,
currently just using Google's geocoder."""
def handle(self, *args, **kwargs):
"""Run geocode."""
self.geocoder = geocoders.GeoNames()
for contact in models.Contact.objects.all():
self.geocode_contact(contact)
def geocode_contact(self, contact):
"""Set lat/long fields on contact objects."""
if contact.street_address:
sys.stderr.write("Geocoding: %s: %s\n" % (contact.repository.name, contact.format()))
try:
formaddr, latlon = self.geocoder.geocode(contact.format().encode("utf8"))
except ValueError:
sys.stderr.write(" - More than one value found!\n")
except geocoders.google.GTooManyQueriesError:
raise CommandError("Too many queries for Google Geocode.")
except geocoders.google.GQueryError:
sys.stderr.write(" - Unable to get latlong for address\n")
else:
contact.lat = latlon[0]
contact.lon = latlon[1]
contact.save()
sys.stderr("Set lat/lon: %s, %s\n\n" % latlon)
Fix logging, add a sleep to molify Google's rate limit."""Geocode Contact objects."""
import sys
import time
from geopy import geocoders
from django.core.management.base import BaseCommand, CommandError
from portal import models
class Command(BaseCommand):
"""Set lat/long fields on contacts with a street address,
currently just using Google's geocoder."""
def handle(self, *args, **kwargs):
"""Run geocode."""
self.geocoder = geocoders.Google()
for contact in models.Contact.objects.all():
self.geocode_contact(contact)
def geocode_contact(self, contact):
"""Set lat/long fields on contact objects."""
if contact.street_address and not contact.lat:
sys.stderr.write("Geocoding: %s: %s\n" % (contact.repository.name, contact.format()))
try:
formaddr, latlon = self.geocoder.geocode(contact.format().encode("utf8"))
except ValueError:
sys.stderr.write(" - More than one value found!\n")
except geocoders.google.GTooManyQueriesError:
raise CommandError("Too many queries for Google Geocode.")
except geocoders.google.GQueryError:
sys.stderr.write(" - Unable to get latlong for address\n")
else:
contact.lat = latlon[0]
contact.lon = latlon[1]
contact.save()
sys.stderr.write("Set lat/lon: %s, %s\n\n" % latlon)
# delay to keep Google rate limit happy (hopefully)
time.sleep(0.25)
| <commit_before>"""Geocode Contact objects."""
import sys
from geopy import geocoders
from django.core.management.base import BaseCommand, CommandError
from portal import models
class Command(BaseCommand):
"""Set lat/long fields on contacts with a street address,
currently just using Google's geocoder."""
def handle(self, *args, **kwargs):
"""Run geocode."""
self.geocoder = geocoders.GeoNames()
for contact in models.Contact.objects.all():
self.geocode_contact(contact)
def geocode_contact(self, contact):
"""Set lat/long fields on contact objects."""
if contact.street_address:
sys.stderr.write("Geocoding: %s: %s\n" % (contact.repository.name, contact.format()))
try:
formaddr, latlon = self.geocoder.geocode(contact.format().encode("utf8"))
except ValueError:
sys.stderr.write(" - More than one value found!\n")
except geocoders.google.GTooManyQueriesError:
raise CommandError("Too many queries for Google Geocode.")
except geocoders.google.GQueryError:
sys.stderr.write(" - Unable to get latlong for address\n")
else:
contact.lat = latlon[0]
contact.lon = latlon[1]
contact.save()
sys.stderr("Set lat/lon: %s, %s\n\n" % latlon)
<commit_msg>Fix logging, add a sleep to molify Google's rate limit.<commit_after>"""Geocode Contact objects."""
import sys
import time
from geopy import geocoders
from django.core.management.base import BaseCommand, CommandError
from portal import models
class Command(BaseCommand):
"""Set lat/long fields on contacts with a street address,
currently just using Google's geocoder."""
def handle(self, *args, **kwargs):
"""Run geocode."""
self.geocoder = geocoders.Google()
for contact in models.Contact.objects.all():
self.geocode_contact(contact)
def geocode_contact(self, contact):
"""Set lat/long fields on contact objects."""
if contact.street_address and not contact.lat:
sys.stderr.write("Geocoding: %s: %s\n" % (contact.repository.name, contact.format()))
try:
formaddr, latlon = self.geocoder.geocode(contact.format().encode("utf8"))
except ValueError:
sys.stderr.write(" - More than one value found!\n")
except geocoders.google.GTooManyQueriesError:
raise CommandError("Too many queries for Google Geocode.")
except geocoders.google.GQueryError:
sys.stderr.write(" - Unable to get latlong for address\n")
else:
contact.lat = latlon[0]
contact.lon = latlon[1]
contact.save()
sys.stderr.write("Set lat/lon: %s, %s\n\n" % latlon)
# delay to keep Google rate limit happy (hopefully)
time.sleep(0.25)
|
082076cce996593c9959fc0743f13b62d2e4842b | chared/__init__.py | chared/__init__.py | # Copyright (c) 2011 Vit Suchomel and Jan Pomikalek
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
try:
__version__ = 'v' + __import__('pkg_resources').get_distribution('chared').version
except:
__version__ = 'r$Rev$'
| # Copyright (c) 2011 Vit Suchomel and Jan Pomikalek
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
try:
__version__ = 'v' + __import__('pkg_resources').get_distribution('chared').version
except:
import re
__version__ = re.sub('.*(\d+).*', r'rev\1', '$Rev$')
| Make sure the version is displayed as r<revision number> if the information about the package version is not available. | Make sure the version is displayed as r<revision number> if the information about the package version is not available. | Python | bsd-2-clause | gilesbrown/chared,xmichelf/chared | # Copyright (c) 2011 Vit Suchomel and Jan Pomikalek
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
try:
__version__ = 'v' + __import__('pkg_resources').get_distribution('chared').version
except:
__version__ = 'r$Rev$'
Make sure the version is displayed as r<revision number> if the information about the package version is not available. | # Copyright (c) 2011 Vit Suchomel and Jan Pomikalek
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
try:
__version__ = 'v' + __import__('pkg_resources').get_distribution('chared').version
except:
import re
__version__ = re.sub('.*(\d+).*', r'rev\1', '$Rev$')
| <commit_before># Copyright (c) 2011 Vit Suchomel and Jan Pomikalek
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
try:
__version__ = 'v' + __import__('pkg_resources').get_distribution('chared').version
except:
__version__ = 'r$Rev$'
<commit_msg>Make sure the version is displayed as r<revision number> if the information about the package version is not available.<commit_after> | # Copyright (c) 2011 Vit Suchomel and Jan Pomikalek
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
try:
__version__ = 'v' + __import__('pkg_resources').get_distribution('chared').version
except:
import re
__version__ = re.sub('.*(\d+).*', r'rev\1', '$Rev$')
| # Copyright (c) 2011 Vit Suchomel and Jan Pomikalek
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
try:
__version__ = 'v' + __import__('pkg_resources').get_distribution('chared').version
except:
__version__ = 'r$Rev$'
Make sure the version is displayed as r<revision number> if the information about the package version is not available.# Copyright (c) 2011 Vit Suchomel and Jan Pomikalek
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
try:
__version__ = 'v' + __import__('pkg_resources').get_distribution('chared').version
except:
import re
__version__ = re.sub('.*(\d+).*', r'rev\1', '$Rev$')
| <commit_before># Copyright (c) 2011 Vit Suchomel and Jan Pomikalek
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
try:
__version__ = 'v' + __import__('pkg_resources').get_distribution('chared').version
except:
__version__ = 'r$Rev$'
<commit_msg>Make sure the version is displayed as r<revision number> if the information about the package version is not available.<commit_after># Copyright (c) 2011 Vit Suchomel and Jan Pomikalek
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
try:
__version__ = 'v' + __import__('pkg_resources').get_distribution('chared').version
except:
import re
__version__ = re.sub('.*(\d+).*', r'rev\1', '$Rev$')
|
2db4fda62c2eec2d5424448fcd57bedd91ad2e64 | test_support/test_fdbsql.py | test_support/test_fdbsql.py | DATABASES = {
'default': {
'ENGINE': 'django_fdbsql',
'NAME': 'django_default',
'OPTIONS': {
'supports_sequence_reset': True,
'use_sequence_reset_function': True,
},
},
'other': {
'ENGINE': 'django_fdbsql',
'NAME': 'django_other',
}
}
# And also add 'south' to ALWAYS_INSTALLED_APPS in runtests.py
SKIP_SOUTH_TESTS = False
SOUTH_DATABASE_ADAPTERS = {
'default': 'django_fdbsql.south_fdbsql'
}
SECRET_KEY = "django_tests_secret_key"
PASSWORD_HASHERS = (
# Preferred, faster for tests
'django.contrib.auth.hashers.MD5PasswordHasher',
# Required by 1.4.x tests
'django.contrib.auth.hashers.SHA1PasswordHasher',
'django.contrib.auth.hashers.UnsaltedMD5PasswordHasher',
)
| # https://docs.djangoproject.com/en/1.7/internals/contributing/writing-code/unit-tests/#using-another-settings-module
DATABASES = {
'default': {
'ENGINE': 'django_fdbsql',
'NAME': 'django_default',
'OPTIONS': {
'supports_sequence_reset': True,
'use_sequence_reset_function': True,
},
},
# Nothing adapter specific, only used to confirm multiple DATABASES work.
'other': {
'ENGINE': 'django.db.backends.sqlite3',
}
}
# And also add 'south' to ALWAYS_INSTALLED_APPS in runtests.py
SKIP_SOUTH_TESTS = False
SOUTH_DATABASE_ADAPTERS = {
'default': 'django_fdbsql.south_fdbsql'
}
SECRET_KEY = "django_tests_secret_key"
PASSWORD_HASHERS = (
# Preferred, faster for tests
'django.contrib.auth.hashers.MD5PasswordHasher',
# Required by 1.4.x tests
'django.contrib.auth.hashers.SHA1PasswordHasher',
'django.contrib.auth.hashers.UnsaltedMD5PasswordHasher',
)
| Use sqlite3 for 'other' database test config | Use sqlite3 for 'other' database test config
| Python | mit | freyley/sql-layer-adapter-django | DATABASES = {
'default': {
'ENGINE': 'django_fdbsql',
'NAME': 'django_default',
'OPTIONS': {
'supports_sequence_reset': True,
'use_sequence_reset_function': True,
},
},
'other': {
'ENGINE': 'django_fdbsql',
'NAME': 'django_other',
}
}
# And also add 'south' to ALWAYS_INSTALLED_APPS in runtests.py
SKIP_SOUTH_TESTS = False
SOUTH_DATABASE_ADAPTERS = {
'default': 'django_fdbsql.south_fdbsql'
}
SECRET_KEY = "django_tests_secret_key"
PASSWORD_HASHERS = (
# Preferred, faster for tests
'django.contrib.auth.hashers.MD5PasswordHasher',
# Required by 1.4.x tests
'django.contrib.auth.hashers.SHA1PasswordHasher',
'django.contrib.auth.hashers.UnsaltedMD5PasswordHasher',
)
Use sqlite3 for 'other' database test config | # https://docs.djangoproject.com/en/1.7/internals/contributing/writing-code/unit-tests/#using-another-settings-module
DATABASES = {
'default': {
'ENGINE': 'django_fdbsql',
'NAME': 'django_default',
'OPTIONS': {
'supports_sequence_reset': True,
'use_sequence_reset_function': True,
},
},
# Nothing adapter specific, only used to confirm multiple DATABASES work.
'other': {
'ENGINE': 'django.db.backends.sqlite3',
}
}
# And also add 'south' to ALWAYS_INSTALLED_APPS in runtests.py
SKIP_SOUTH_TESTS = False
SOUTH_DATABASE_ADAPTERS = {
'default': 'django_fdbsql.south_fdbsql'
}
SECRET_KEY = "django_tests_secret_key"
PASSWORD_HASHERS = (
# Preferred, faster for tests
'django.contrib.auth.hashers.MD5PasswordHasher',
# Required by 1.4.x tests
'django.contrib.auth.hashers.SHA1PasswordHasher',
'django.contrib.auth.hashers.UnsaltedMD5PasswordHasher',
)
| <commit_before>DATABASES = {
'default': {
'ENGINE': 'django_fdbsql',
'NAME': 'django_default',
'OPTIONS': {
'supports_sequence_reset': True,
'use_sequence_reset_function': True,
},
},
'other': {
'ENGINE': 'django_fdbsql',
'NAME': 'django_other',
}
}
# And also add 'south' to ALWAYS_INSTALLED_APPS in runtests.py
SKIP_SOUTH_TESTS = False
SOUTH_DATABASE_ADAPTERS = {
'default': 'django_fdbsql.south_fdbsql'
}
SECRET_KEY = "django_tests_secret_key"
PASSWORD_HASHERS = (
# Preferred, faster for tests
'django.contrib.auth.hashers.MD5PasswordHasher',
# Required by 1.4.x tests
'django.contrib.auth.hashers.SHA1PasswordHasher',
'django.contrib.auth.hashers.UnsaltedMD5PasswordHasher',
)
<commit_msg>Use sqlite3 for 'other' database test config<commit_after> | # https://docs.djangoproject.com/en/1.7/internals/contributing/writing-code/unit-tests/#using-another-settings-module
DATABASES = {
'default': {
'ENGINE': 'django_fdbsql',
'NAME': 'django_default',
'OPTIONS': {
'supports_sequence_reset': True,
'use_sequence_reset_function': True,
},
},
# Nothing adapter specific, only used to confirm multiple DATABASES work.
'other': {
'ENGINE': 'django.db.backends.sqlite3',
}
}
# And also add 'south' to ALWAYS_INSTALLED_APPS in runtests.py
SKIP_SOUTH_TESTS = False
SOUTH_DATABASE_ADAPTERS = {
'default': 'django_fdbsql.south_fdbsql'
}
SECRET_KEY = "django_tests_secret_key"
PASSWORD_HASHERS = (
# Preferred, faster for tests
'django.contrib.auth.hashers.MD5PasswordHasher',
# Required by 1.4.x tests
'django.contrib.auth.hashers.SHA1PasswordHasher',
'django.contrib.auth.hashers.UnsaltedMD5PasswordHasher',
)
| DATABASES = {
'default': {
'ENGINE': 'django_fdbsql',
'NAME': 'django_default',
'OPTIONS': {
'supports_sequence_reset': True,
'use_sequence_reset_function': True,
},
},
'other': {
'ENGINE': 'django_fdbsql',
'NAME': 'django_other',
}
}
# And also add 'south' to ALWAYS_INSTALLED_APPS in runtests.py
SKIP_SOUTH_TESTS = False
SOUTH_DATABASE_ADAPTERS = {
'default': 'django_fdbsql.south_fdbsql'
}
SECRET_KEY = "django_tests_secret_key"
PASSWORD_HASHERS = (
# Preferred, faster for tests
'django.contrib.auth.hashers.MD5PasswordHasher',
# Required by 1.4.x tests
'django.contrib.auth.hashers.SHA1PasswordHasher',
'django.contrib.auth.hashers.UnsaltedMD5PasswordHasher',
)
Use sqlite3 for 'other' database test config# https://docs.djangoproject.com/en/1.7/internals/contributing/writing-code/unit-tests/#using-another-settings-module
DATABASES = {
'default': {
'ENGINE': 'django_fdbsql',
'NAME': 'django_default',
'OPTIONS': {
'supports_sequence_reset': True,
'use_sequence_reset_function': True,
},
},
# Nothing adapter specific, only used to confirm multiple DATABASES work.
'other': {
'ENGINE': 'django.db.backends.sqlite3',
}
}
# And also add 'south' to ALWAYS_INSTALLED_APPS in runtests.py
SKIP_SOUTH_TESTS = False
SOUTH_DATABASE_ADAPTERS = {
'default': 'django_fdbsql.south_fdbsql'
}
SECRET_KEY = "django_tests_secret_key"
PASSWORD_HASHERS = (
# Preferred, faster for tests
'django.contrib.auth.hashers.MD5PasswordHasher',
# Required by 1.4.x tests
'django.contrib.auth.hashers.SHA1PasswordHasher',
'django.contrib.auth.hashers.UnsaltedMD5PasswordHasher',
)
| <commit_before>DATABASES = {
'default': {
'ENGINE': 'django_fdbsql',
'NAME': 'django_default',
'OPTIONS': {
'supports_sequence_reset': True,
'use_sequence_reset_function': True,
},
},
'other': {
'ENGINE': 'django_fdbsql',
'NAME': 'django_other',
}
}
# And also add 'south' to ALWAYS_INSTALLED_APPS in runtests.py
SKIP_SOUTH_TESTS = False
SOUTH_DATABASE_ADAPTERS = {
'default': 'django_fdbsql.south_fdbsql'
}
SECRET_KEY = "django_tests_secret_key"
PASSWORD_HASHERS = (
# Preferred, faster for tests
'django.contrib.auth.hashers.MD5PasswordHasher',
# Required by 1.4.x tests
'django.contrib.auth.hashers.SHA1PasswordHasher',
'django.contrib.auth.hashers.UnsaltedMD5PasswordHasher',
)
<commit_msg>Use sqlite3 for 'other' database test config<commit_after># https://docs.djangoproject.com/en/1.7/internals/contributing/writing-code/unit-tests/#using-another-settings-module
DATABASES = {
'default': {
'ENGINE': 'django_fdbsql',
'NAME': 'django_default',
'OPTIONS': {
'supports_sequence_reset': True,
'use_sequence_reset_function': True,
},
},
# Nothing adapter specific, only used to confirm multiple DATABASES work.
'other': {
'ENGINE': 'django.db.backends.sqlite3',
}
}
# And also add 'south' to ALWAYS_INSTALLED_APPS in runtests.py
SKIP_SOUTH_TESTS = False
SOUTH_DATABASE_ADAPTERS = {
'default': 'django_fdbsql.south_fdbsql'
}
SECRET_KEY = "django_tests_secret_key"
PASSWORD_HASHERS = (
# Preferred, faster for tests
'django.contrib.auth.hashers.MD5PasswordHasher',
# Required by 1.4.x tests
'django.contrib.auth.hashers.SHA1PasswordHasher',
'django.contrib.auth.hashers.UnsaltedMD5PasswordHasher',
)
|
d63463d8dc8acb4445b01fcebeeb6a20ea1d7b9b | tests/unit/test_template.py | tests/unit/test_template.py | import json
import pytest
from path import Path
from formica import cli
@pytest.fixture
def logger(mocker):
return mocker.patch('formica.cli.logger')
def test_template_calls_template(tmpdir, logger):
with Path(tmpdir):
with open('test.template.json', 'w') as f:
f.write('{"Description": "{{ \'test\' | title }}"}')
cli.main(['template'])
logger.info.assert_called()
assert {"Description": "Test"} == json.loads(logger.info.call_args[0][0])
| import json
import yaml
import pytest
from path import Path
from formica import cli
@pytest.fixture
def logger(mocker):
return mocker.patch('formica.cli.logger')
def test_template_calls_template(tmpdir, logger):
with Path(tmpdir):
with open('test.template.json', 'w') as f:
f.write('{"Description": "{{ \'test\' | title }}"}')
cli.main(['template'])
logger.info.assert_called()
assert {"Description": "Test"} == json.loads(logger.info.call_args[0][0])
def test_template_calls_template_with_yaml(tmpdir, logger):
with Path(tmpdir):
with open('test.template.json', 'w') as f:
f.write('{"Description": "{{ \'test\' | title }}"}')
cli.main(['template', '--yaml'])
logger.info.assert_called()
assert {"Description": "Test"} == yaml.load(logger.info.call_args[0][0])
| Test yaml argument for template command | Test yaml argument for template command
| Python | mit | flomotlik/formica | import json
import pytest
from path import Path
from formica import cli
@pytest.fixture
def logger(mocker):
return mocker.patch('formica.cli.logger')
def test_template_calls_template(tmpdir, logger):
with Path(tmpdir):
with open('test.template.json', 'w') as f:
f.write('{"Description": "{{ \'test\' | title }}"}')
cli.main(['template'])
logger.info.assert_called()
assert {"Description": "Test"} == json.loads(logger.info.call_args[0][0])
Test yaml argument for template command | import json
import yaml
import pytest
from path import Path
from formica import cli
@pytest.fixture
def logger(mocker):
return mocker.patch('formica.cli.logger')
def test_template_calls_template(tmpdir, logger):
with Path(tmpdir):
with open('test.template.json', 'w') as f:
f.write('{"Description": "{{ \'test\' | title }}"}')
cli.main(['template'])
logger.info.assert_called()
assert {"Description": "Test"} == json.loads(logger.info.call_args[0][0])
def test_template_calls_template_with_yaml(tmpdir, logger):
with Path(tmpdir):
with open('test.template.json', 'w') as f:
f.write('{"Description": "{{ \'test\' | title }}"}')
cli.main(['template', '--yaml'])
logger.info.assert_called()
assert {"Description": "Test"} == yaml.load(logger.info.call_args[0][0])
| <commit_before>import json
import pytest
from path import Path
from formica import cli
@pytest.fixture
def logger(mocker):
return mocker.patch('formica.cli.logger')
def test_template_calls_template(tmpdir, logger):
with Path(tmpdir):
with open('test.template.json', 'w') as f:
f.write('{"Description": "{{ \'test\' | title }}"}')
cli.main(['template'])
logger.info.assert_called()
assert {"Description": "Test"} == json.loads(logger.info.call_args[0][0])
<commit_msg>Test yaml argument for template command<commit_after> | import json
import yaml
import pytest
from path import Path
from formica import cli
@pytest.fixture
def logger(mocker):
return mocker.patch('formica.cli.logger')
def test_template_calls_template(tmpdir, logger):
with Path(tmpdir):
with open('test.template.json', 'w') as f:
f.write('{"Description": "{{ \'test\' | title }}"}')
cli.main(['template'])
logger.info.assert_called()
assert {"Description": "Test"} == json.loads(logger.info.call_args[0][0])
def test_template_calls_template_with_yaml(tmpdir, logger):
with Path(tmpdir):
with open('test.template.json', 'w') as f:
f.write('{"Description": "{{ \'test\' | title }}"}')
cli.main(['template', '--yaml'])
logger.info.assert_called()
assert {"Description": "Test"} == yaml.load(logger.info.call_args[0][0])
| import json
import pytest
from path import Path
from formica import cli
@pytest.fixture
def logger(mocker):
return mocker.patch('formica.cli.logger')
def test_template_calls_template(tmpdir, logger):
with Path(tmpdir):
with open('test.template.json', 'w') as f:
f.write('{"Description": "{{ \'test\' | title }}"}')
cli.main(['template'])
logger.info.assert_called()
assert {"Description": "Test"} == json.loads(logger.info.call_args[0][0])
Test yaml argument for template commandimport json
import yaml
import pytest
from path import Path
from formica import cli
@pytest.fixture
def logger(mocker):
return mocker.patch('formica.cli.logger')
def test_template_calls_template(tmpdir, logger):
with Path(tmpdir):
with open('test.template.json', 'w') as f:
f.write('{"Description": "{{ \'test\' | title }}"}')
cli.main(['template'])
logger.info.assert_called()
assert {"Description": "Test"} == json.loads(logger.info.call_args[0][0])
def test_template_calls_template_with_yaml(tmpdir, logger):
with Path(tmpdir):
with open('test.template.json', 'w') as f:
f.write('{"Description": "{{ \'test\' | title }}"}')
cli.main(['template', '--yaml'])
logger.info.assert_called()
assert {"Description": "Test"} == yaml.load(logger.info.call_args[0][0])
| <commit_before>import json
import pytest
from path import Path
from formica import cli
@pytest.fixture
def logger(mocker):
return mocker.patch('formica.cli.logger')
def test_template_calls_template(tmpdir, logger):
with Path(tmpdir):
with open('test.template.json', 'w') as f:
f.write('{"Description": "{{ \'test\' | title }}"}')
cli.main(['template'])
logger.info.assert_called()
assert {"Description": "Test"} == json.loads(logger.info.call_args[0][0])
<commit_msg>Test yaml argument for template command<commit_after>import json
import yaml
import pytest
from path import Path
from formica import cli
@pytest.fixture
def logger(mocker):
return mocker.patch('formica.cli.logger')
def test_template_calls_template(tmpdir, logger):
with Path(tmpdir):
with open('test.template.json', 'w') as f:
f.write('{"Description": "{{ \'test\' | title }}"}')
cli.main(['template'])
logger.info.assert_called()
assert {"Description": "Test"} == json.loads(logger.info.call_args[0][0])
def test_template_calls_template_with_yaml(tmpdir, logger):
with Path(tmpdir):
with open('test.template.json', 'w') as f:
f.write('{"Description": "{{ \'test\' | title }}"}')
cli.main(['template', '--yaml'])
logger.info.assert_called()
assert {"Description": "Test"} == yaml.load(logger.info.call_args[0][0])
|
1de93393e402d1387e7d1c8057c6010c12a21848 | tests/window/window_util.py | tests/window/window_util.py | #!/usr/bin/python
# $Id:$
from pyglet.gl import *
def draw_client_border(window):
glClearColor(0, 0, 0, 1)
glClear(GL_COLOR_BUFFER_BIT)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
glOrtho(0, window.width, 0, window.height, -1, 1)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
def rect(x1, y1, x2, y2):
glBegin(GL_LINE_LOOP)
glVertex2f(x1, y1)
glVertex2f(x2, y1)
glVertex2f(x2, y2)
glVertex2f(x1, y2)
glEnd()
glColor3f(1, 0, 0)
rect(-1, -1, window.width + 1, window.height - 1)
glColor3f(0, 1, 0)
rect(0, 0, window.width - 1, window.height - 1)
| #!/usr/bin/python
# $Id:$
from pyglet.gl import *
def draw_client_border(window):
glClearColor(0, 0, 0, 1)
glClear(GL_COLOR_BUFFER_BIT)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
glOrtho(0, window.width, 0, window.height, -1, 1)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
def rect(x1, y1, x2, y2):
glBegin(GL_LINE_LOOP)
glVertex2f(x1, y1)
glVertex2f(x2, y1)
glVertex2f(x2, y2)
glVertex2f(x1, y2)
glEnd()
glColor3f(1, 0, 0)
rect(-1, -1, window.width, window.height)
glColor3f(0, 1, 0)
rect(0, 0, window.width - 1, window.height - 1)
| Fix window test border _again_ (more fixed). | Fix window test border _again_ (more fixed).
| Python | bsd-3-clause | shaileshgoogler/pyglet,mpasternak/pyglet-fix-issue-518-522,mpasternak/pyglet-fix-issue-518-522,odyaka341/pyglet,cledio66/pyglet,kmonsoor/pyglet,Alwnikrotikz/pyglet,gdkar/pyglet,Alwnikrotikz/pyglet,mpasternak/pyglet-fix-issue-552,arifgursel/pyglet,Alwnikrotikz/pyglet,google-code-export/pyglet,gdkar/pyglet,cledio66/pyglet,Austin503/pyglet,mpasternak/michaldtz-fix-552,mpasternak/pyglet-fix-issue-518-522,mpasternak/michaldtz-fix-552,mpasternak/michaldtz-fixes-518-522,mpasternak/pyglet-fix-issue-552,shaileshgoogler/pyglet,Alwnikrotikz/pyglet,kmonsoor/pyglet,shaileshgoogler/pyglet,xshotD/pyglet,xshotD/pyglet,odyaka341/pyglet,mpasternak/michaldtz-fixes-518-522,cledio66/pyglet,xshotD/pyglet,xshotD/pyglet,kmonsoor/pyglet,Austin503/pyglet,cledio66/pyglet,google-code-export/pyglet,xshotD/pyglet,google-code-export/pyglet,mpasternak/pyglet-fix-issue-518-522,odyaka341/pyglet,Austin503/pyglet,shaileshgoogler/pyglet,mpasternak/michaldtz-fix-552,Austin503/pyglet,mpasternak/michaldtz-fix-552,kmonsoor/pyglet,qbektrix/pyglet,mpasternak/pyglet-fix-issue-552,shaileshgoogler/pyglet,Austin503/pyglet,arifgursel/pyglet,google-code-export/pyglet,arifgursel/pyglet,cledio66/pyglet,Alwnikrotikz/pyglet,odyaka341/pyglet,gdkar/pyglet,qbektrix/pyglet,odyaka341/pyglet,google-code-export/pyglet,mpasternak/michaldtz-fixes-518-522,mpasternak/michaldtz-fixes-518-522,kmonsoor/pyglet,qbektrix/pyglet,mpasternak/pyglet-fix-issue-552,qbektrix/pyglet,arifgursel/pyglet,arifgursel/pyglet,qbektrix/pyglet,gdkar/pyglet,gdkar/pyglet | #!/usr/bin/python
# $Id:$
from pyglet.gl import *
def draw_client_border(window):
glClearColor(0, 0, 0, 1)
glClear(GL_COLOR_BUFFER_BIT)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
glOrtho(0, window.width, 0, window.height, -1, 1)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
def rect(x1, y1, x2, y2):
glBegin(GL_LINE_LOOP)
glVertex2f(x1, y1)
glVertex2f(x2, y1)
glVertex2f(x2, y2)
glVertex2f(x1, y2)
glEnd()
glColor3f(1, 0, 0)
rect(-1, -1, window.width + 1, window.height - 1)
glColor3f(0, 1, 0)
rect(0, 0, window.width - 1, window.height - 1)
Fix window test border _again_ (more fixed). | #!/usr/bin/python
# $Id:$
from pyglet.gl import *
def draw_client_border(window):
glClearColor(0, 0, 0, 1)
glClear(GL_COLOR_BUFFER_BIT)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
glOrtho(0, window.width, 0, window.height, -1, 1)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
def rect(x1, y1, x2, y2):
glBegin(GL_LINE_LOOP)
glVertex2f(x1, y1)
glVertex2f(x2, y1)
glVertex2f(x2, y2)
glVertex2f(x1, y2)
glEnd()
glColor3f(1, 0, 0)
rect(-1, -1, window.width, window.height)
glColor3f(0, 1, 0)
rect(0, 0, window.width - 1, window.height - 1)
| <commit_before>#!/usr/bin/python
# $Id:$
from pyglet.gl import *
def draw_client_border(window):
glClearColor(0, 0, 0, 1)
glClear(GL_COLOR_BUFFER_BIT)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
glOrtho(0, window.width, 0, window.height, -1, 1)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
def rect(x1, y1, x2, y2):
glBegin(GL_LINE_LOOP)
glVertex2f(x1, y1)
glVertex2f(x2, y1)
glVertex2f(x2, y2)
glVertex2f(x1, y2)
glEnd()
glColor3f(1, 0, 0)
rect(-1, -1, window.width + 1, window.height - 1)
glColor3f(0, 1, 0)
rect(0, 0, window.width - 1, window.height - 1)
<commit_msg>Fix window test border _again_ (more fixed).<commit_after> | #!/usr/bin/python
# $Id:$
from pyglet.gl import *
def draw_client_border(window):
glClearColor(0, 0, 0, 1)
glClear(GL_COLOR_BUFFER_BIT)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
glOrtho(0, window.width, 0, window.height, -1, 1)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
def rect(x1, y1, x2, y2):
glBegin(GL_LINE_LOOP)
glVertex2f(x1, y1)
glVertex2f(x2, y1)
glVertex2f(x2, y2)
glVertex2f(x1, y2)
glEnd()
glColor3f(1, 0, 0)
rect(-1, -1, window.width, window.height)
glColor3f(0, 1, 0)
rect(0, 0, window.width - 1, window.height - 1)
| #!/usr/bin/python
# $Id:$
from pyglet.gl import *
def draw_client_border(window):
glClearColor(0, 0, 0, 1)
glClear(GL_COLOR_BUFFER_BIT)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
glOrtho(0, window.width, 0, window.height, -1, 1)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
def rect(x1, y1, x2, y2):
glBegin(GL_LINE_LOOP)
glVertex2f(x1, y1)
glVertex2f(x2, y1)
glVertex2f(x2, y2)
glVertex2f(x1, y2)
glEnd()
glColor3f(1, 0, 0)
rect(-1, -1, window.width + 1, window.height - 1)
glColor3f(0, 1, 0)
rect(0, 0, window.width - 1, window.height - 1)
Fix window test border _again_ (more fixed).#!/usr/bin/python
# $Id:$
from pyglet.gl import *
def draw_client_border(window):
glClearColor(0, 0, 0, 1)
glClear(GL_COLOR_BUFFER_BIT)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
glOrtho(0, window.width, 0, window.height, -1, 1)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
def rect(x1, y1, x2, y2):
glBegin(GL_LINE_LOOP)
glVertex2f(x1, y1)
glVertex2f(x2, y1)
glVertex2f(x2, y2)
glVertex2f(x1, y2)
glEnd()
glColor3f(1, 0, 0)
rect(-1, -1, window.width, window.height)
glColor3f(0, 1, 0)
rect(0, 0, window.width - 1, window.height - 1)
| <commit_before>#!/usr/bin/python
# $Id:$
from pyglet.gl import *
def draw_client_border(window):
glClearColor(0, 0, 0, 1)
glClear(GL_COLOR_BUFFER_BIT)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
glOrtho(0, window.width, 0, window.height, -1, 1)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
def rect(x1, y1, x2, y2):
glBegin(GL_LINE_LOOP)
glVertex2f(x1, y1)
glVertex2f(x2, y1)
glVertex2f(x2, y2)
glVertex2f(x1, y2)
glEnd()
glColor3f(1, 0, 0)
rect(-1, -1, window.width + 1, window.height - 1)
glColor3f(0, 1, 0)
rect(0, 0, window.width - 1, window.height - 1)
<commit_msg>Fix window test border _again_ (more fixed).<commit_after>#!/usr/bin/python
# $Id:$
from pyglet.gl import *
def draw_client_border(window):
glClearColor(0, 0, 0, 1)
glClear(GL_COLOR_BUFFER_BIT)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
glOrtho(0, window.width, 0, window.height, -1, 1)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
def rect(x1, y1, x2, y2):
glBegin(GL_LINE_LOOP)
glVertex2f(x1, y1)
glVertex2f(x2, y1)
glVertex2f(x2, y2)
glVertex2f(x1, y2)
glEnd()
glColor3f(1, 0, 0)
rect(-1, -1, window.width, window.height)
glColor3f(0, 1, 0)
rect(0, 0, window.width - 1, window.height - 1)
|
cd71740daaf4f1f770b7d6959e2865ed50b76bd7 | tilequeue/query/__init__.py | tilequeue/query/__init__.py | import tilequeue.query.postgres
make_db_data_fetcher = postgres.make_db_data_fetcher
| import tilequeue.query.postgres
make_db_data_fetcher = tilequeue.query.postgres.make_db_data_fetcher
| Fix error identified by flake8. | Fix error identified by flake8.
| Python | mit | mapzen/tilequeue,tilezen/tilequeue | import tilequeue.query.postgres
make_db_data_fetcher = postgres.make_db_data_fetcher
Fix error identified by flake8. | import tilequeue.query.postgres
make_db_data_fetcher = tilequeue.query.postgres.make_db_data_fetcher
| <commit_before>import tilequeue.query.postgres
make_db_data_fetcher = postgres.make_db_data_fetcher
<commit_msg>Fix error identified by flake8.<commit_after> | import tilequeue.query.postgres
make_db_data_fetcher = tilequeue.query.postgres.make_db_data_fetcher
| import tilequeue.query.postgres
make_db_data_fetcher = postgres.make_db_data_fetcher
Fix error identified by flake8.import tilequeue.query.postgres
make_db_data_fetcher = tilequeue.query.postgres.make_db_data_fetcher
| <commit_before>import tilequeue.query.postgres
make_db_data_fetcher = postgres.make_db_data_fetcher
<commit_msg>Fix error identified by flake8.<commit_after>import tilequeue.query.postgres
make_db_data_fetcher = tilequeue.query.postgres.make_db_data_fetcher
|
0e6646de573dc04360634828cdb3b7da8cc31d2b | cobe/instatrace.py | cobe/instatrace.py | # Copyright (C) 2010 Peter Teichman
import math
import time
def singleton(cls):
instances = {}
def getinstance():
if cls not in instances:
instances[cls] = cls()
return instances[cls]
return getinstance
@singleton
class Instatrace:
def __init__(self):
self._fd = None
def init(self, filename):
if self._fd is not None:
self._fd.close()
self._fd = open(filename, "w")
def is_enabled(self):
return self._fd is not None
def now(self):
"""High resolution, integer now"""
if not self.is_enabled():
return 0
return int(time.time()*100000)
def trace(self, statName, statValue, userData=None):
if not self.is_enabled():
return
extra = ""
if userData is not None:
extra = " " + repr(userData)
self._fd.write("%s %d%s\n" % (statName, statValue, extra))
self._fd.flush()
| # Copyright (C) 2010 Peter Teichman
import math
import time
def singleton(cls):
instances = {}
def getinstance():
if cls not in instances:
instances[cls] = cls()
return instances[cls]
return getinstance
@singleton
class Instatrace:
def __init__(self):
self._fd = None
def init(self, filename):
if self._fd is not None:
self._fd.close()
if filename is None:
self._fd = None
else:
self._fd = open(filename, "w")
def is_enabled(self):
return self._fd is not None
def now(self):
"""High resolution, integer now"""
if not self.is_enabled():
return 0
return int(time.time()*100000)
def trace(self, statName, statValue, userData=None):
if not self.is_enabled():
return
extra = ""
if userData is not None:
extra = " " + repr(userData)
self._fd.write("%s %d%s\n" % (statName, statValue, extra))
self._fd.flush()
| Allow Instatrace().init(None) to disable tracing at runtime | Allow Instatrace().init(None) to disable tracing at runtime
| Python | mit | meska/cobe,pteichman/cobe,DarkMio/cobe,wodim/cobe-ng,LeMagnesium/cobe,tiagochiavericosta/cobe,wodim/cobe-ng,DarkMio/cobe,LeMagnesium/cobe,meska/cobe,tiagochiavericosta/cobe,pteichman/cobe | # Copyright (C) 2010 Peter Teichman
import math
import time
def singleton(cls):
instances = {}
def getinstance():
if cls not in instances:
instances[cls] = cls()
return instances[cls]
return getinstance
@singleton
class Instatrace:
def __init__(self):
self._fd = None
def init(self, filename):
if self._fd is not None:
self._fd.close()
self._fd = open(filename, "w")
def is_enabled(self):
return self._fd is not None
def now(self):
"""High resolution, integer now"""
if not self.is_enabled():
return 0
return int(time.time()*100000)
def trace(self, statName, statValue, userData=None):
if not self.is_enabled():
return
extra = ""
if userData is not None:
extra = " " + repr(userData)
self._fd.write("%s %d%s\n" % (statName, statValue, extra))
self._fd.flush()
Allow Instatrace().init(None) to disable tracing at runtime | # Copyright (C) 2010 Peter Teichman
import math
import time
def singleton(cls):
instances = {}
def getinstance():
if cls not in instances:
instances[cls] = cls()
return instances[cls]
return getinstance
@singleton
class Instatrace:
def __init__(self):
self._fd = None
def init(self, filename):
if self._fd is not None:
self._fd.close()
if filename is None:
self._fd = None
else:
self._fd = open(filename, "w")
def is_enabled(self):
return self._fd is not None
def now(self):
"""High resolution, integer now"""
if not self.is_enabled():
return 0
return int(time.time()*100000)
def trace(self, statName, statValue, userData=None):
if not self.is_enabled():
return
extra = ""
if userData is not None:
extra = " " + repr(userData)
self._fd.write("%s %d%s\n" % (statName, statValue, extra))
self._fd.flush()
| <commit_before># Copyright (C) 2010 Peter Teichman
import math
import time
def singleton(cls):
instances = {}
def getinstance():
if cls not in instances:
instances[cls] = cls()
return instances[cls]
return getinstance
@singleton
class Instatrace:
def __init__(self):
self._fd = None
def init(self, filename):
if self._fd is not None:
self._fd.close()
self._fd = open(filename, "w")
def is_enabled(self):
return self._fd is not None
def now(self):
"""High resolution, integer now"""
if not self.is_enabled():
return 0
return int(time.time()*100000)
def trace(self, statName, statValue, userData=None):
if not self.is_enabled():
return
extra = ""
if userData is not None:
extra = " " + repr(userData)
self._fd.write("%s %d%s\n" % (statName, statValue, extra))
self._fd.flush()
<commit_msg>Allow Instatrace().init(None) to disable tracing at runtime<commit_after> | # Copyright (C) 2010 Peter Teichman
import math
import time
def singleton(cls):
instances = {}
def getinstance():
if cls not in instances:
instances[cls] = cls()
return instances[cls]
return getinstance
@singleton
class Instatrace:
def __init__(self):
self._fd = None
def init(self, filename):
if self._fd is not None:
self._fd.close()
if filename is None:
self._fd = None
else:
self._fd = open(filename, "w")
def is_enabled(self):
return self._fd is not None
def now(self):
"""High resolution, integer now"""
if not self.is_enabled():
return 0
return int(time.time()*100000)
def trace(self, statName, statValue, userData=None):
if not self.is_enabled():
return
extra = ""
if userData is not None:
extra = " " + repr(userData)
self._fd.write("%s %d%s\n" % (statName, statValue, extra))
self._fd.flush()
| # Copyright (C) 2010 Peter Teichman
import math
import time
def singleton(cls):
instances = {}
def getinstance():
if cls not in instances:
instances[cls] = cls()
return instances[cls]
return getinstance
@singleton
class Instatrace:
def __init__(self):
self._fd = None
def init(self, filename):
if self._fd is not None:
self._fd.close()
self._fd = open(filename, "w")
def is_enabled(self):
return self._fd is not None
def now(self):
"""High resolution, integer now"""
if not self.is_enabled():
return 0
return int(time.time()*100000)
def trace(self, statName, statValue, userData=None):
if not self.is_enabled():
return
extra = ""
if userData is not None:
extra = " " + repr(userData)
self._fd.write("%s %d%s\n" % (statName, statValue, extra))
self._fd.flush()
Allow Instatrace().init(None) to disable tracing at runtime# Copyright (C) 2010 Peter Teichman
import math
import time
def singleton(cls):
instances = {}
def getinstance():
if cls not in instances:
instances[cls] = cls()
return instances[cls]
return getinstance
@singleton
class Instatrace:
def __init__(self):
self._fd = None
def init(self, filename):
if self._fd is not None:
self._fd.close()
if filename is None:
self._fd = None
else:
self._fd = open(filename, "w")
def is_enabled(self):
return self._fd is not None
def now(self):
"""High resolution, integer now"""
if not self.is_enabled():
return 0
return int(time.time()*100000)
def trace(self, statName, statValue, userData=None):
if not self.is_enabled():
return
extra = ""
if userData is not None:
extra = " " + repr(userData)
self._fd.write("%s %d%s\n" % (statName, statValue, extra))
self._fd.flush()
| <commit_before># Copyright (C) 2010 Peter Teichman
import math
import time
def singleton(cls):
instances = {}
def getinstance():
if cls not in instances:
instances[cls] = cls()
return instances[cls]
return getinstance
@singleton
class Instatrace:
def __init__(self):
self._fd = None
def init(self, filename):
if self._fd is not None:
self._fd.close()
self._fd = open(filename, "w")
def is_enabled(self):
return self._fd is not None
def now(self):
"""High resolution, integer now"""
if not self.is_enabled():
return 0
return int(time.time()*100000)
def trace(self, statName, statValue, userData=None):
if not self.is_enabled():
return
extra = ""
if userData is not None:
extra = " " + repr(userData)
self._fd.write("%s %d%s\n" % (statName, statValue, extra))
self._fd.flush()
<commit_msg>Allow Instatrace().init(None) to disable tracing at runtime<commit_after># Copyright (C) 2010 Peter Teichman
import math
import time
def singleton(cls):
instances = {}
def getinstance():
if cls not in instances:
instances[cls] = cls()
return instances[cls]
return getinstance
@singleton
class Instatrace:
def __init__(self):
self._fd = None
def init(self, filename):
if self._fd is not None:
self._fd.close()
if filename is None:
self._fd = None
else:
self._fd = open(filename, "w")
def is_enabled(self):
return self._fd is not None
def now(self):
"""High resolution, integer now"""
if not self.is_enabled():
return 0
return int(time.time()*100000)
def trace(self, statName, statValue, userData=None):
if not self.is_enabled():
return
extra = ""
if userData is not None:
extra = " " + repr(userData)
self._fd.write("%s %d%s\n" % (statName, statValue, extra))
self._fd.flush()
|
3d2f9087e62006f8a5f19476ae23324a4cfa7793 | regex.py | regex.py | import re
import sys
f = open ('/var/local/meTypesetTests/tests/testOutput/'+sys.argv[1] +'/nlm/out.xml', "r")
print ("open operation complete")
fd = f.read()
s = ''
fd =
pattern = re.compile(r'(?:(&#\d*|>))(.*?)(?=(&#\d*|<))')
for e in re.findall(pattern, fd):
s += ' '
s += e[1]
s = re.sub('-', ' ', s)
s = re.sub(r'\,', ' ', s)
s = re.sub(r'\.', ' ', s)
s = re.sub('\'', '', s)
s = re.sub(r'\;', ' ', s)
s = re.sub('s', ' ', s)
s = re.sub(r'\(.*?\)', ' ', s)
s = re.sub(r'(\[.*?\])', ' ', s)
f.close()
o = open ( '/var/local/meTypesetTests/tests/regexOutput/'+sys.argv[1], "w")
o.write(s)
o.close()
| import re
import sys
f = open ('/var/local/meTypesetTests/tests/testOutput/'+sys.argv[1] +'/nlm/out.xml', "r")
print ("open operation complete")
fd = f.read()
s = ''
fd = re.sub(r'\<.*?\>\;', ' ', fd)
pattern = re.compile(r'(?:(&#\d*|>))(.*?)(?=(&#\d*|<))')
for e in re.findall(pattern, fd):
s += ' '
s += e[1]
s = re.sub('-', ' ', s)
s = re.sub(r'\,', ' ', s)
s = re.sub(r'\.', ' ', s)
s = re.sub('\'', '', s)
s = re.sub(r'\;', ' ', s)
s = re.sub('s', ' ', s)
s = re.sub(r'\(.*?\)', ' ', s)
s = re.sub(r'(\[.*?\])', ' ', s)
f.close()
o = open ( '/var/local/meTypesetTests/tests/regexOutput/'+sys.argv[1], "w")
o.write(s)
o.close()
| Update of work over prior couple weeks. | Update of work over prior couple weeks.
| Python | mit | jnicolls/meTypeset-Test,jnicolls/Joseph | import re
import sys
f = open ('/var/local/meTypesetTests/tests/testOutput/'+sys.argv[1] +'/nlm/out.xml', "r")
print ("open operation complete")
fd = f.read()
s = ''
fd =
pattern = re.compile(r'(?:(&#\d*|>))(.*?)(?=(&#\d*|<))')
for e in re.findall(pattern, fd):
s += ' '
s += e[1]
s = re.sub('-', ' ', s)
s = re.sub(r'\,', ' ', s)
s = re.sub(r'\.', ' ', s)
s = re.sub('\'', '', s)
s = re.sub(r'\;', ' ', s)
s = re.sub('s', ' ', s)
s = re.sub(r'\(.*?\)', ' ', s)
s = re.sub(r'(\[.*?\])', ' ', s)
f.close()
o = open ( '/var/local/meTypesetTests/tests/regexOutput/'+sys.argv[1], "w")
o.write(s)
o.close()
Update of work over prior couple weeks. | import re
import sys
f = open ('/var/local/meTypesetTests/tests/testOutput/'+sys.argv[1] +'/nlm/out.xml', "r")
print ("open operation complete")
fd = f.read()
s = ''
fd = re.sub(r'\<.*?\>\;', ' ', fd)
pattern = re.compile(r'(?:(&#\d*|>))(.*?)(?=(&#\d*|<))')
for e in re.findall(pattern, fd):
s += ' '
s += e[1]
s = re.sub('-', ' ', s)
s = re.sub(r'\,', ' ', s)
s = re.sub(r'\.', ' ', s)
s = re.sub('\'', '', s)
s = re.sub(r'\;', ' ', s)
s = re.sub('s', ' ', s)
s = re.sub(r'\(.*?\)', ' ', s)
s = re.sub(r'(\[.*?\])', ' ', s)
f.close()
o = open ( '/var/local/meTypesetTests/tests/regexOutput/'+sys.argv[1], "w")
o.write(s)
o.close()
| <commit_before>import re
import sys
f = open ('/var/local/meTypesetTests/tests/testOutput/'+sys.argv[1] +'/nlm/out.xml', "r")
print ("open operation complete")
fd = f.read()
s = ''
fd =
pattern = re.compile(r'(?:(&#\d*|>))(.*?)(?=(&#\d*|<))')
for e in re.findall(pattern, fd):
s += ' '
s += e[1]
s = re.sub('-', ' ', s)
s = re.sub(r'\,', ' ', s)
s = re.sub(r'\.', ' ', s)
s = re.sub('\'', '', s)
s = re.sub(r'\;', ' ', s)
s = re.sub('s', ' ', s)
s = re.sub(r'\(.*?\)', ' ', s)
s = re.sub(r'(\[.*?\])', ' ', s)
f.close()
o = open ( '/var/local/meTypesetTests/tests/regexOutput/'+sys.argv[1], "w")
o.write(s)
o.close()
<commit_msg>Update of work over prior couple weeks.<commit_after> | import re
import sys
f = open ('/var/local/meTypesetTests/tests/testOutput/'+sys.argv[1] +'/nlm/out.xml', "r")
print ("open operation complete")
fd = f.read()
s = ''
fd = re.sub(r'\<.*?\>\;', ' ', fd)
pattern = re.compile(r'(?:(&#\d*|>))(.*?)(?=(&#\d*|<))')
for e in re.findall(pattern, fd):
s += ' '
s += e[1]
s = re.sub('-', ' ', s)
s = re.sub(r'\,', ' ', s)
s = re.sub(r'\.', ' ', s)
s = re.sub('\'', '', s)
s = re.sub(r'\;', ' ', s)
s = re.sub('s', ' ', s)
s = re.sub(r'\(.*?\)', ' ', s)
s = re.sub(r'(\[.*?\])', ' ', s)
f.close()
o = open ( '/var/local/meTypesetTests/tests/regexOutput/'+sys.argv[1], "w")
o.write(s)
o.close()
| import re
import sys
f = open ('/var/local/meTypesetTests/tests/testOutput/'+sys.argv[1] +'/nlm/out.xml', "r")
print ("open operation complete")
fd = f.read()
s = ''
fd =
pattern = re.compile(r'(?:(&#\d*|>))(.*?)(?=(&#\d*|<))')
for e in re.findall(pattern, fd):
s += ' '
s += e[1]
s = re.sub('-', ' ', s)
s = re.sub(r'\,', ' ', s)
s = re.sub(r'\.', ' ', s)
s = re.sub('\'', '', s)
s = re.sub(r'\;', ' ', s)
s = re.sub('s', ' ', s)
s = re.sub(r'\(.*?\)', ' ', s)
s = re.sub(r'(\[.*?\])', ' ', s)
f.close()
o = open ( '/var/local/meTypesetTests/tests/regexOutput/'+sys.argv[1], "w")
o.write(s)
o.close()
Update of work over prior couple weeks.import re
import sys
f = open ('/var/local/meTypesetTests/tests/testOutput/'+sys.argv[1] +'/nlm/out.xml', "r")
print ("open operation complete")
fd = f.read()
s = ''
fd = re.sub(r'\<.*?\>\;', ' ', fd)
pattern = re.compile(r'(?:(&#\d*|>))(.*?)(?=(&#\d*|<))')
for e in re.findall(pattern, fd):
s += ' '
s += e[1]
s = re.sub('-', ' ', s)
s = re.sub(r'\,', ' ', s)
s = re.sub(r'\.', ' ', s)
s = re.sub('\'', '', s)
s = re.sub(r'\;', ' ', s)
s = re.sub('s', ' ', s)
s = re.sub(r'\(.*?\)', ' ', s)
s = re.sub(r'(\[.*?\])', ' ', s)
f.close()
o = open ( '/var/local/meTypesetTests/tests/regexOutput/'+sys.argv[1], "w")
o.write(s)
o.close()
| <commit_before>import re
import sys
f = open ('/var/local/meTypesetTests/tests/testOutput/'+sys.argv[1] +'/nlm/out.xml', "r")
print ("open operation complete")
fd = f.read()
s = ''
fd =
pattern = re.compile(r'(?:(&#\d*|>))(.*?)(?=(&#\d*|<))')
for e in re.findall(pattern, fd):
s += ' '
s += e[1]
s = re.sub('-', ' ', s)
s = re.sub(r'\,', ' ', s)
s = re.sub(r'\.', ' ', s)
s = re.sub('\'', '', s)
s = re.sub(r'\;', ' ', s)
s = re.sub('s', ' ', s)
s = re.sub(r'\(.*?\)', ' ', s)
s = re.sub(r'(\[.*?\])', ' ', s)
f.close()
o = open ( '/var/local/meTypesetTests/tests/regexOutput/'+sys.argv[1], "w")
o.write(s)
o.close()
<commit_msg>Update of work over prior couple weeks.<commit_after>import re
import sys
f = open ('/var/local/meTypesetTests/tests/testOutput/'+sys.argv[1] +'/nlm/out.xml', "r")
print ("open operation complete")
fd = f.read()
s = ''
fd = re.sub(r'\<.*?\>\;', ' ', fd)
pattern = re.compile(r'(?:(&#\d*|>))(.*?)(?=(&#\d*|<))')
for e in re.findall(pattern, fd):
s += ' '
s += e[1]
s = re.sub('-', ' ', s)
s = re.sub(r'\,', ' ', s)
s = re.sub(r'\.', ' ', s)
s = re.sub('\'', '', s)
s = re.sub(r'\;', ' ', s)
s = re.sub('s', ' ', s)
s = re.sub(r'\(.*?\)', ' ', s)
s = re.sub(r'(\[.*?\])', ' ', s)
f.close()
o = open ( '/var/local/meTypesetTests/tests/regexOutput/'+sys.argv[1], "w")
o.write(s)
o.close()
|
0946379d23131aeec07dc29bebd4e57d95298d00 | recipes/sos-notebook/run_test.py | recipes/sos-notebook/run_test.py | # Test that sos kernel is installed
import jupyter_client
try:
jupyter_client.kernelspec.get_kernel_spec('sos')
except jupyter_client.kernelspec.NoSuchKernel:
print('sos kernel was not installed')
print('The following kernels are installed:')
print('jupyter_client.kernelspec.find_kernel_specs()')
print(jupyter_client.kernelspec.find_kernel_specs())
# Test that sos kernel is functional
import unittest
from ipykernel.tests.utils import execute, wait_for_idle, assemble_output
from sos_notebook.test_utils import sos_kernel
class TestSoSKernel(unittest.TestCase):
def testKernel(self):
with sos_kernel() as kc:
execute(kc=kc, code='a = 1\nprint(a)')
stdout, stderr = assemble_output(kc.iopub_channel)
self.assertEqual(stderr, '')
self.assertEqual(stdout.strip(), '1')
if __name__ == '__main__':
unittest.main()
| # Test that sos kernel is installed
import jupyter_client
try:
jupyter_client.kernelspec.get_kernel_spec('sos')
except jupyter_client.kernelspec.NoSuchKernel:
print('sos kernel was not installed')
print('The following kernels are installed:')
print('jupyter_client.kernelspec.find_kernel_specs()')
print(jupyter_client.kernelspec.find_kernel_specs())
# Test that sos kernel is functional
import unittest
from sos_notebook.test_utils import sos_kernel
from ipykernel.tests.utils import execute, wait_for_idle, assemble_output
class TestSoSKernel(unittest.TestCase):
def testKernel(self):
with sos_kernel() as kc:
execute(kc=kc, code='a = 1\nprint(a)')
stdout, stderr = assemble_output(kc.iopub_channel)
self.assertEqual(stderr, '')
self.assertEqual(stdout.strip(), '1')
if __name__ == '__main__':
unittest.main()
| Use longer TIMEOUT defined in sos_notebook.test_utils. | Use longer TIMEOUT defined in sos_notebook.test_utils.
| Python | bsd-3-clause | SylvainCorlay/staged-recipes,synapticarbors/staged-recipes,jochym/staged-recipes,mcs07/staged-recipes,ceholden/staged-recipes,igortg/staged-recipes,goanpeca/staged-recipes,johanneskoester/staged-recipes,isuruf/staged-recipes,scopatz/staged-recipes,chrisburr/staged-recipes,petrushy/staged-recipes,asmeurer/staged-recipes,Juanlu001/staged-recipes,dschreij/staged-recipes,synapticarbors/staged-recipes,mariusvniekerk/staged-recipes,basnijholt/staged-recipes,mariusvniekerk/staged-recipes,mcs07/staged-recipes,conda-forge/staged-recipes,birdsarah/staged-recipes,birdsarah/staged-recipes,jjhelmus/staged-recipes,cpaulik/staged-recipes,petrushy/staged-recipes,jjhelmus/staged-recipes,SylvainCorlay/staged-recipes,jochym/staged-recipes,dschreij/staged-recipes,jakirkham/staged-recipes,hadim/staged-recipes,jakirkham/staged-recipes,patricksnape/staged-recipes,patricksnape/staged-recipes,igortg/staged-recipes,scopatz/staged-recipes,stuertz/staged-recipes,johanneskoester/staged-recipes,ceholden/staged-recipes,ReimarBauer/staged-recipes,hadim/staged-recipes,basnijholt/staged-recipes,kwilcox/staged-recipes,chrisburr/staged-recipes,kwilcox/staged-recipes,cpaulik/staged-recipes,stuertz/staged-recipes,goanpeca/staged-recipes,ocefpaf/staged-recipes,isuruf/staged-recipes,Juanlu001/staged-recipes,conda-forge/staged-recipes,ReimarBauer/staged-recipes,ocefpaf/staged-recipes,asmeurer/staged-recipes | # Test that sos kernel is installed
import jupyter_client
try:
jupyter_client.kernelspec.get_kernel_spec('sos')
except jupyter_client.kernelspec.NoSuchKernel:
print('sos kernel was not installed')
print('The following kernels are installed:')
print('jupyter_client.kernelspec.find_kernel_specs()')
print(jupyter_client.kernelspec.find_kernel_specs())
# Test that sos kernel is functional
import unittest
from ipykernel.tests.utils import execute, wait_for_idle, assemble_output
from sos_notebook.test_utils import sos_kernel
class TestSoSKernel(unittest.TestCase):
def testKernel(self):
with sos_kernel() as kc:
execute(kc=kc, code='a = 1\nprint(a)')
stdout, stderr = assemble_output(kc.iopub_channel)
self.assertEqual(stderr, '')
self.assertEqual(stdout.strip(), '1')
if __name__ == '__main__':
unittest.main()
Use longer TIMEOUT defined in sos_notebook.test_utils. | # Test that sos kernel is installed
import jupyter_client
try:
jupyter_client.kernelspec.get_kernel_spec('sos')
except jupyter_client.kernelspec.NoSuchKernel:
print('sos kernel was not installed')
print('The following kernels are installed:')
print('jupyter_client.kernelspec.find_kernel_specs()')
print(jupyter_client.kernelspec.find_kernel_specs())
# Test that sos kernel is functional
import unittest
from sos_notebook.test_utils import sos_kernel
from ipykernel.tests.utils import execute, wait_for_idle, assemble_output
class TestSoSKernel(unittest.TestCase):
def testKernel(self):
with sos_kernel() as kc:
execute(kc=kc, code='a = 1\nprint(a)')
stdout, stderr = assemble_output(kc.iopub_channel)
self.assertEqual(stderr, '')
self.assertEqual(stdout.strip(), '1')
if __name__ == '__main__':
unittest.main()
| <commit_before># Test that sos kernel is installed
import jupyter_client
try:
jupyter_client.kernelspec.get_kernel_spec('sos')
except jupyter_client.kernelspec.NoSuchKernel:
print('sos kernel was not installed')
print('The following kernels are installed:')
print('jupyter_client.kernelspec.find_kernel_specs()')
print(jupyter_client.kernelspec.find_kernel_specs())
# Test that sos kernel is functional
import unittest
from ipykernel.tests.utils import execute, wait_for_idle, assemble_output
from sos_notebook.test_utils import sos_kernel
class TestSoSKernel(unittest.TestCase):
def testKernel(self):
with sos_kernel() as kc:
execute(kc=kc, code='a = 1\nprint(a)')
stdout, stderr = assemble_output(kc.iopub_channel)
self.assertEqual(stderr, '')
self.assertEqual(stdout.strip(), '1')
if __name__ == '__main__':
unittest.main()
<commit_msg>Use longer TIMEOUT defined in sos_notebook.test_utils.<commit_after> | # Test that sos kernel is installed
import jupyter_client
try:
jupyter_client.kernelspec.get_kernel_spec('sos')
except jupyter_client.kernelspec.NoSuchKernel:
print('sos kernel was not installed')
print('The following kernels are installed:')
print('jupyter_client.kernelspec.find_kernel_specs()')
print(jupyter_client.kernelspec.find_kernel_specs())
# Test that sos kernel is functional
import unittest
from sos_notebook.test_utils import sos_kernel
from ipykernel.tests.utils import execute, wait_for_idle, assemble_output
class TestSoSKernel(unittest.TestCase):
def testKernel(self):
with sos_kernel() as kc:
execute(kc=kc, code='a = 1\nprint(a)')
stdout, stderr = assemble_output(kc.iopub_channel)
self.assertEqual(stderr, '')
self.assertEqual(stdout.strip(), '1')
if __name__ == '__main__':
unittest.main()
| # Test that sos kernel is installed
import jupyter_client
try:
jupyter_client.kernelspec.get_kernel_spec('sos')
except jupyter_client.kernelspec.NoSuchKernel:
print('sos kernel was not installed')
print('The following kernels are installed:')
print('jupyter_client.kernelspec.find_kernel_specs()')
print(jupyter_client.kernelspec.find_kernel_specs())
# Test that sos kernel is functional
import unittest
from ipykernel.tests.utils import execute, wait_for_idle, assemble_output
from sos_notebook.test_utils import sos_kernel
class TestSoSKernel(unittest.TestCase):
def testKernel(self):
with sos_kernel() as kc:
execute(kc=kc, code='a = 1\nprint(a)')
stdout, stderr = assemble_output(kc.iopub_channel)
self.assertEqual(stderr, '')
self.assertEqual(stdout.strip(), '1')
if __name__ == '__main__':
unittest.main()
Use longer TIMEOUT defined in sos_notebook.test_utils.# Test that sos kernel is installed
import jupyter_client
try:
jupyter_client.kernelspec.get_kernel_spec('sos')
except jupyter_client.kernelspec.NoSuchKernel:
print('sos kernel was not installed')
print('The following kernels are installed:')
print('jupyter_client.kernelspec.find_kernel_specs()')
print(jupyter_client.kernelspec.find_kernel_specs())
# Test that sos kernel is functional
import unittest
from sos_notebook.test_utils import sos_kernel
from ipykernel.tests.utils import execute, wait_for_idle, assemble_output
class TestSoSKernel(unittest.TestCase):
def testKernel(self):
with sos_kernel() as kc:
execute(kc=kc, code='a = 1\nprint(a)')
stdout, stderr = assemble_output(kc.iopub_channel)
self.assertEqual(stderr, '')
self.assertEqual(stdout.strip(), '1')
if __name__ == '__main__':
unittest.main()
| <commit_before># Test that sos kernel is installed
import jupyter_client
try:
jupyter_client.kernelspec.get_kernel_spec('sos')
except jupyter_client.kernelspec.NoSuchKernel:
print('sos kernel was not installed')
print('The following kernels are installed:')
print('jupyter_client.kernelspec.find_kernel_specs()')
print(jupyter_client.kernelspec.find_kernel_specs())
# Test that sos kernel is functional
import unittest
from ipykernel.tests.utils import execute, wait_for_idle, assemble_output
from sos_notebook.test_utils import sos_kernel
class TestSoSKernel(unittest.TestCase):
def testKernel(self):
with sos_kernel() as kc:
execute(kc=kc, code='a = 1\nprint(a)')
stdout, stderr = assemble_output(kc.iopub_channel)
self.assertEqual(stderr, '')
self.assertEqual(stdout.strip(), '1')
if __name__ == '__main__':
unittest.main()
<commit_msg>Use longer TIMEOUT defined in sos_notebook.test_utils.<commit_after># Test that sos kernel is installed
import jupyter_client
try:
jupyter_client.kernelspec.get_kernel_spec('sos')
except jupyter_client.kernelspec.NoSuchKernel:
print('sos kernel was not installed')
print('The following kernels are installed:')
print('jupyter_client.kernelspec.find_kernel_specs()')
print(jupyter_client.kernelspec.find_kernel_specs())
# Test that sos kernel is functional
import unittest
from sos_notebook.test_utils import sos_kernel
from ipykernel.tests.utils import execute, wait_for_idle, assemble_output
class TestSoSKernel(unittest.TestCase):
def testKernel(self):
with sos_kernel() as kc:
execute(kc=kc, code='a = 1\nprint(a)')
stdout, stderr = assemble_output(kc.iopub_channel)
self.assertEqual(stderr, '')
self.assertEqual(stdout.strip(), '1')
if __name__ == '__main__':
unittest.main()
|
c54ea6322177f8665173ae0faa2d34d37a70dea6 | setup.py | setup.py | from distutils.core import setup
import skyfield # safe, because __init__.py contains no import statements
setup(
name='skyfield',
version=skyfield.__version__,
description=skyfield.__doc__.split('\n', 1)[0],
long_description=open('README.rst', 'rb').read().decode('utf-8'),
license='MIT',
author='Brandon Rhodes',
author_email='brandon@rhodesmill.org',
url='http://github.com/brandon-rhodes/python-skyfield/',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Scientific/Engineering :: Astronomy',
],
packages=[
'skyfield',
'skyfield.data',
'skyfield.tests',
],
package_data = {
'skyfield': ['documentation/*.rst'],
'skyfield.data': ['*.npy', '*.txt'],
},
install_requires=[
'jplephem>=2.3',
'numpy',
'sgp4>=1.4',
])
| from distutils.core import setup
import skyfield # safe, because __init__.py contains no import statements
setup(
name='skyfield',
version=skyfield.__version__,
description=skyfield.__doc__.split('\n', 1)[0],
long_description=open('README.rst', 'rb').read().decode('utf-8'),
license='MIT',
author='Brandon Rhodes',
author_email='brandon@rhodesmill.org',
url='http://github.com/brandon-rhodes/python-skyfield/',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Scientific/Engineering :: Astronomy',
],
packages=[
'skyfield',
'skyfield.data',
'skyfield.tests',
],
package_data = {
'skyfield': ['documentation/*.rst'],
'skyfield.data': ['*.npy', '*.txt'],
},
install_requires=[
'jplephem>=2.3',
'numpy',
'sgp4>=1.4',
],
test_requires=[
'pytz',
],
)
| Add pytz as a test requirement | Add pytz as a test requirement
| Python | mit | skyfielders/python-skyfield,skyfielders/python-skyfield | from distutils.core import setup
import skyfield # safe, because __init__.py contains no import statements
setup(
name='skyfield',
version=skyfield.__version__,
description=skyfield.__doc__.split('\n', 1)[0],
long_description=open('README.rst', 'rb').read().decode('utf-8'),
license='MIT',
author='Brandon Rhodes',
author_email='brandon@rhodesmill.org',
url='http://github.com/brandon-rhodes/python-skyfield/',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Scientific/Engineering :: Astronomy',
],
packages=[
'skyfield',
'skyfield.data',
'skyfield.tests',
],
package_data = {
'skyfield': ['documentation/*.rst'],
'skyfield.data': ['*.npy', '*.txt'],
},
install_requires=[
'jplephem>=2.3',
'numpy',
'sgp4>=1.4',
])
Add pytz as a test requirement | from distutils.core import setup
import skyfield # safe, because __init__.py contains no import statements
setup(
name='skyfield',
version=skyfield.__version__,
description=skyfield.__doc__.split('\n', 1)[0],
long_description=open('README.rst', 'rb').read().decode('utf-8'),
license='MIT',
author='Brandon Rhodes',
author_email='brandon@rhodesmill.org',
url='http://github.com/brandon-rhodes/python-skyfield/',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Scientific/Engineering :: Astronomy',
],
packages=[
'skyfield',
'skyfield.data',
'skyfield.tests',
],
package_data = {
'skyfield': ['documentation/*.rst'],
'skyfield.data': ['*.npy', '*.txt'],
},
install_requires=[
'jplephem>=2.3',
'numpy',
'sgp4>=1.4',
],
test_requires=[
'pytz',
],
)
| <commit_before>from distutils.core import setup
import skyfield # safe, because __init__.py contains no import statements
setup(
name='skyfield',
version=skyfield.__version__,
description=skyfield.__doc__.split('\n', 1)[0],
long_description=open('README.rst', 'rb').read().decode('utf-8'),
license='MIT',
author='Brandon Rhodes',
author_email='brandon@rhodesmill.org',
url='http://github.com/brandon-rhodes/python-skyfield/',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Scientific/Engineering :: Astronomy',
],
packages=[
'skyfield',
'skyfield.data',
'skyfield.tests',
],
package_data = {
'skyfield': ['documentation/*.rst'],
'skyfield.data': ['*.npy', '*.txt'],
},
install_requires=[
'jplephem>=2.3',
'numpy',
'sgp4>=1.4',
])
<commit_msg>Add pytz as a test requirement<commit_after> | from distutils.core import setup
import skyfield # safe, because __init__.py contains no import statements
setup(
name='skyfield',
version=skyfield.__version__,
description=skyfield.__doc__.split('\n', 1)[0],
long_description=open('README.rst', 'rb').read().decode('utf-8'),
license='MIT',
author='Brandon Rhodes',
author_email='brandon@rhodesmill.org',
url='http://github.com/brandon-rhodes/python-skyfield/',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Scientific/Engineering :: Astronomy',
],
packages=[
'skyfield',
'skyfield.data',
'skyfield.tests',
],
package_data = {
'skyfield': ['documentation/*.rst'],
'skyfield.data': ['*.npy', '*.txt'],
},
install_requires=[
'jplephem>=2.3',
'numpy',
'sgp4>=1.4',
],
test_requires=[
'pytz',
],
)
| from distutils.core import setup
import skyfield # safe, because __init__.py contains no import statements
setup(
name='skyfield',
version=skyfield.__version__,
description=skyfield.__doc__.split('\n', 1)[0],
long_description=open('README.rst', 'rb').read().decode('utf-8'),
license='MIT',
author='Brandon Rhodes',
author_email='brandon@rhodesmill.org',
url='http://github.com/brandon-rhodes/python-skyfield/',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Scientific/Engineering :: Astronomy',
],
packages=[
'skyfield',
'skyfield.data',
'skyfield.tests',
],
package_data = {
'skyfield': ['documentation/*.rst'],
'skyfield.data': ['*.npy', '*.txt'],
},
install_requires=[
'jplephem>=2.3',
'numpy',
'sgp4>=1.4',
])
Add pytz as a test requirementfrom distutils.core import setup
import skyfield # safe, because __init__.py contains no import statements
setup(
name='skyfield',
version=skyfield.__version__,
description=skyfield.__doc__.split('\n', 1)[0],
long_description=open('README.rst', 'rb').read().decode('utf-8'),
license='MIT',
author='Brandon Rhodes',
author_email='brandon@rhodesmill.org',
url='http://github.com/brandon-rhodes/python-skyfield/',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Scientific/Engineering :: Astronomy',
],
packages=[
'skyfield',
'skyfield.data',
'skyfield.tests',
],
package_data = {
'skyfield': ['documentation/*.rst'],
'skyfield.data': ['*.npy', '*.txt'],
},
install_requires=[
'jplephem>=2.3',
'numpy',
'sgp4>=1.4',
],
test_requires=[
'pytz',
],
)
| <commit_before>from distutils.core import setup
import skyfield # safe, because __init__.py contains no import statements
setup(
name='skyfield',
version=skyfield.__version__,
description=skyfield.__doc__.split('\n', 1)[0],
long_description=open('README.rst', 'rb').read().decode('utf-8'),
license='MIT',
author='Brandon Rhodes',
author_email='brandon@rhodesmill.org',
url='http://github.com/brandon-rhodes/python-skyfield/',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Scientific/Engineering :: Astronomy',
],
packages=[
'skyfield',
'skyfield.data',
'skyfield.tests',
],
package_data = {
'skyfield': ['documentation/*.rst'],
'skyfield.data': ['*.npy', '*.txt'],
},
install_requires=[
'jplephem>=2.3',
'numpy',
'sgp4>=1.4',
])
<commit_msg>Add pytz as a test requirement<commit_after>from distutils.core import setup
import skyfield # safe, because __init__.py contains no import statements
setup(
name='skyfield',
version=skyfield.__version__,
description=skyfield.__doc__.split('\n', 1)[0],
long_description=open('README.rst', 'rb').read().decode('utf-8'),
license='MIT',
author='Brandon Rhodes',
author_email='brandon@rhodesmill.org',
url='http://github.com/brandon-rhodes/python-skyfield/',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Scientific/Engineering :: Astronomy',
],
packages=[
'skyfield',
'skyfield.data',
'skyfield.tests',
],
package_data = {
'skyfield': ['documentation/*.rst'],
'skyfield.data': ['*.npy', '*.txt'],
},
install_requires=[
'jplephem>=2.3',
'numpy',
'sgp4>=1.4',
],
test_requires=[
'pytz',
],
)
|
3581c3c71bdf3ff84961df4b328f0bfc2adf0bc7 | apps/provider/urls.py | apps/provider/urls.py | from __future__ import absolute_import
from __future__ import unicode_literals
from django.conf.urls import patterns, include, url
from .views import *
urlpatterns = patterns('',
url(r'^pjson/push$', pjson_provider_push, name="pjson_provider_push"),
url(r'^fhir/practitioner/push$', fhir_practitioner_push, name="fhir_practitioner_push"),
url(r'^fhir/organization/push$', fhir_organization_push, name="fhir_organization_push"),
)
| from __future__ import absolute_import
from __future__ import unicode_literals
from django.conf.urls import patterns, include, url
from .views import *
urlpatterns = patterns('',
url(r'^pjson/push$', pjson_provider_push, name="pjson_provider_push"),
url(r'^fhir/practitioner/push$', fhir_practitioner_push, name="fhir_practitioner_push"),
url(r'^fhir/organization/push$', fhir_organization_push, name="fhir_organization_push"),
url(r'^fhir/practitioner/update$', fhir_practitioner_update, name="fhir_practitioner_update"),
url(r'^fhir/organization/update$', fhir_organization_update, name="fhir_organization_update"),
)
| Add url for update vs. push for pract and org | Add url for update vs. push for pract and org
| Python | apache-2.0 | TransparentHealth/hhs_oauth_client,TransparentHealth/hhs_oauth_client,TransparentHealth/hhs_oauth_client,TransparentHealth/hhs_oauth_client | from __future__ import absolute_import
from __future__ import unicode_literals
from django.conf.urls import patterns, include, url
from .views import *
urlpatterns = patterns('',
url(r'^pjson/push$', pjson_provider_push, name="pjson_provider_push"),
url(r'^fhir/practitioner/push$', fhir_practitioner_push, name="fhir_practitioner_push"),
url(r'^fhir/organization/push$', fhir_organization_push, name="fhir_organization_push"),
)
Add url for update vs. push for pract and org | from __future__ import absolute_import
from __future__ import unicode_literals
from django.conf.urls import patterns, include, url
from .views import *
urlpatterns = patterns('',
url(r'^pjson/push$', pjson_provider_push, name="pjson_provider_push"),
url(r'^fhir/practitioner/push$', fhir_practitioner_push, name="fhir_practitioner_push"),
url(r'^fhir/organization/push$', fhir_organization_push, name="fhir_organization_push"),
url(r'^fhir/practitioner/update$', fhir_practitioner_update, name="fhir_practitioner_update"),
url(r'^fhir/organization/update$', fhir_organization_update, name="fhir_organization_update"),
)
| <commit_before>from __future__ import absolute_import
from __future__ import unicode_literals
from django.conf.urls import patterns, include, url
from .views import *
urlpatterns = patterns('',
url(r'^pjson/push$', pjson_provider_push, name="pjson_provider_push"),
url(r'^fhir/practitioner/push$', fhir_practitioner_push, name="fhir_practitioner_push"),
url(r'^fhir/organization/push$', fhir_organization_push, name="fhir_organization_push"),
)
<commit_msg>Add url for update vs. push for pract and org<commit_after> | from __future__ import absolute_import
from __future__ import unicode_literals
from django.conf.urls import patterns, include, url
from .views import *
urlpatterns = patterns('',
url(r'^pjson/push$', pjson_provider_push, name="pjson_provider_push"),
url(r'^fhir/practitioner/push$', fhir_practitioner_push, name="fhir_practitioner_push"),
url(r'^fhir/organization/push$', fhir_organization_push, name="fhir_organization_push"),
url(r'^fhir/practitioner/update$', fhir_practitioner_update, name="fhir_practitioner_update"),
url(r'^fhir/organization/update$', fhir_organization_update, name="fhir_organization_update"),
)
| from __future__ import absolute_import
from __future__ import unicode_literals
from django.conf.urls import patterns, include, url
from .views import *
urlpatterns = patterns('',
url(r'^pjson/push$', pjson_provider_push, name="pjson_provider_push"),
url(r'^fhir/practitioner/push$', fhir_practitioner_push, name="fhir_practitioner_push"),
url(r'^fhir/organization/push$', fhir_organization_push, name="fhir_organization_push"),
)
Add url for update vs. push for pract and orgfrom __future__ import absolute_import
from __future__ import unicode_literals
from django.conf.urls import patterns, include, url
from .views import *
urlpatterns = patterns('',
url(r'^pjson/push$', pjson_provider_push, name="pjson_provider_push"),
url(r'^fhir/practitioner/push$', fhir_practitioner_push, name="fhir_practitioner_push"),
url(r'^fhir/organization/push$', fhir_organization_push, name="fhir_organization_push"),
url(r'^fhir/practitioner/update$', fhir_practitioner_update, name="fhir_practitioner_update"),
url(r'^fhir/organization/update$', fhir_organization_update, name="fhir_organization_update"),
)
| <commit_before>from __future__ import absolute_import
from __future__ import unicode_literals
from django.conf.urls import patterns, include, url
from .views import *
urlpatterns = patterns('',
url(r'^pjson/push$', pjson_provider_push, name="pjson_provider_push"),
url(r'^fhir/practitioner/push$', fhir_practitioner_push, name="fhir_practitioner_push"),
url(r'^fhir/organization/push$', fhir_organization_push, name="fhir_organization_push"),
)
<commit_msg>Add url for update vs. push for pract and org<commit_after>from __future__ import absolute_import
from __future__ import unicode_literals
from django.conf.urls import patterns, include, url
from .views import *
urlpatterns = patterns('',
url(r'^pjson/push$', pjson_provider_push, name="pjson_provider_push"),
url(r'^fhir/practitioner/push$', fhir_practitioner_push, name="fhir_practitioner_push"),
url(r'^fhir/organization/push$', fhir_organization_push, name="fhir_organization_push"),
url(r'^fhir/practitioner/update$', fhir_practitioner_update, name="fhir_practitioner_update"),
url(r'^fhir/organization/update$', fhir_organization_update, name="fhir_organization_update"),
)
|
148866c1c2b18afcc6da5c55632d20bfdec4004a | setup.py | setup.py | #!/usr/bin/env python
"""
A light wrapper for Cybersource SOAP Toolkit API
"""
from setuptools import setup
import pycybersource
setup(
name='pycybersource',
version=pycybersource.__version__,
description='A light wrapper for Cybersource SOAP Toolkit API',
author='Eric Bartels',
author_email='ebartels@gmail.com',
url='',
packages=['pycybersource'],
platforms=['Platform Independent'],
license='BSD',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
keywords='cybersource payment soap suds api wrapper',
requires=['suds'],
install_requires=['suds-jurko>=0.6'],
test_suite='pycybersource.tests',
)
| #!/usr/bin/env python
"""
A light wrapper for Cybersource SOAP Toolkit API
"""
from setuptools import setup
setup(
name='pycybersource',
version='0.1.2alpha',
description='A light wrapper for Cybersource SOAP Toolkit API',
author='Eric Bartels',
author_email='ebartels@gmail.com',
url='',
packages=['pycybersource'],
platforms=['Platform Independent'],
license='BSD',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
keywords='cybersource payment soap suds api wrapper',
requires=['suds'],
install_requires=['suds-jurko>=0.6'],
test_suite='pycybersource.tests',
)
| Fix for installing with PIP | Fix for installing with PIP | Python | bsd-3-clause | SideStudios/pycybersource | #!/usr/bin/env python
"""
A light wrapper for Cybersource SOAP Toolkit API
"""
from setuptools import setup
import pycybersource
setup(
name='pycybersource',
version=pycybersource.__version__,
description='A light wrapper for Cybersource SOAP Toolkit API',
author='Eric Bartels',
author_email='ebartels@gmail.com',
url='',
packages=['pycybersource'],
platforms=['Platform Independent'],
license='BSD',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
keywords='cybersource payment soap suds api wrapper',
requires=['suds'],
install_requires=['suds-jurko>=0.6'],
test_suite='pycybersource.tests',
)
Fix for installing with PIP | #!/usr/bin/env python
"""
A light wrapper for Cybersource SOAP Toolkit API
"""
from setuptools import setup
setup(
name='pycybersource',
version='0.1.2alpha',
description='A light wrapper for Cybersource SOAP Toolkit API',
author='Eric Bartels',
author_email='ebartels@gmail.com',
url='',
packages=['pycybersource'],
platforms=['Platform Independent'],
license='BSD',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
keywords='cybersource payment soap suds api wrapper',
requires=['suds'],
install_requires=['suds-jurko>=0.6'],
test_suite='pycybersource.tests',
)
| <commit_before>#!/usr/bin/env python
"""
A light wrapper for Cybersource SOAP Toolkit API
"""
from setuptools import setup
import pycybersource
setup(
name='pycybersource',
version=pycybersource.__version__,
description='A light wrapper for Cybersource SOAP Toolkit API',
author='Eric Bartels',
author_email='ebartels@gmail.com',
url='',
packages=['pycybersource'],
platforms=['Platform Independent'],
license='BSD',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
keywords='cybersource payment soap suds api wrapper',
requires=['suds'],
install_requires=['suds-jurko>=0.6'],
test_suite='pycybersource.tests',
)
<commit_msg>Fix for installing with PIP<commit_after> | #!/usr/bin/env python
"""
A light wrapper for Cybersource SOAP Toolkit API
"""
from setuptools import setup
setup(
name='pycybersource',
version='0.1.2alpha',
description='A light wrapper for Cybersource SOAP Toolkit API',
author='Eric Bartels',
author_email='ebartels@gmail.com',
url='',
packages=['pycybersource'],
platforms=['Platform Independent'],
license='BSD',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
keywords='cybersource payment soap suds api wrapper',
requires=['suds'],
install_requires=['suds-jurko>=0.6'],
test_suite='pycybersource.tests',
)
| #!/usr/bin/env python
"""
A light wrapper for Cybersource SOAP Toolkit API
"""
from setuptools import setup
import pycybersource
setup(
name='pycybersource',
version=pycybersource.__version__,
description='A light wrapper for Cybersource SOAP Toolkit API',
author='Eric Bartels',
author_email='ebartels@gmail.com',
url='',
packages=['pycybersource'],
platforms=['Platform Independent'],
license='BSD',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
keywords='cybersource payment soap suds api wrapper',
requires=['suds'],
install_requires=['suds-jurko>=0.6'],
test_suite='pycybersource.tests',
)
Fix for installing with PIP#!/usr/bin/env python
"""
A light wrapper for Cybersource SOAP Toolkit API
"""
from setuptools import setup
setup(
name='pycybersource',
version='0.1.2alpha',
description='A light wrapper for Cybersource SOAP Toolkit API',
author='Eric Bartels',
author_email='ebartels@gmail.com',
url='',
packages=['pycybersource'],
platforms=['Platform Independent'],
license='BSD',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
keywords='cybersource payment soap suds api wrapper',
requires=['suds'],
install_requires=['suds-jurko>=0.6'],
test_suite='pycybersource.tests',
)
| <commit_before>#!/usr/bin/env python
"""
A light wrapper for Cybersource SOAP Toolkit API
"""
from setuptools import setup
import pycybersource
setup(
name='pycybersource',
version=pycybersource.__version__,
description='A light wrapper for Cybersource SOAP Toolkit API',
author='Eric Bartels',
author_email='ebartels@gmail.com',
url='',
packages=['pycybersource'],
platforms=['Platform Independent'],
license='BSD',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
keywords='cybersource payment soap suds api wrapper',
requires=['suds'],
install_requires=['suds-jurko>=0.6'],
test_suite='pycybersource.tests',
)
<commit_msg>Fix for installing with PIP<commit_after>#!/usr/bin/env python
"""
A light wrapper for Cybersource SOAP Toolkit API
"""
from setuptools import setup
setup(
name='pycybersource',
version='0.1.2alpha',
description='A light wrapper for Cybersource SOAP Toolkit API',
author='Eric Bartels',
author_email='ebartels@gmail.com',
url='',
packages=['pycybersource'],
platforms=['Platform Independent'],
license='BSD',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
keywords='cybersource payment soap suds api wrapper',
requires=['suds'],
install_requires=['suds-jurko>=0.6'],
test_suite='pycybersource.tests',
)
|
f13f80db99ed43479336b116e38512e3566e4623 | setup.py | setup.py | import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'parserutils.tests.tests'])
raise SystemExit(errno)
with open('README.md') as readme:
long_description = readme.read()
setup(
name='parserutils',
description='A collection of performant parsing utilities',
long_description=long_description,
keywords='parser,parsing,utils,utilities,collections,dates,elements,numbers,strings,url,xml',
version='1.1.1',
packages=[
'parserutils', 'parserutils.tests'
],
install_requires=[
'defusedxml>=0.4.1', 'python-dateutil>=2.4.2', 'six>=1.9.0'
],
url='https://github.com/consbio/parserutils',
license='BSD',
cmdclass={'test': RunTests}
)
| import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'parserutils.tests.tests'])
raise SystemExit(errno)
with open('README.md') as readme:
long_description = readme.read()
setup(
name='parserutils',
description='A collection of performant parsing utilities',
long_description=long_description,
long_description_content_type='text/markdown',
keywords='parser,parsing,utils,utilities,collections,dates,elements,numbers,strings,url,xml',
version='1.1.2',
packages=[
'parserutils', 'parserutils.tests'
],
install_requires=[
'defusedxml>=0.4.1', 'python-dateutil>=2.4.2', 'six>=1.9.0'
],
url='https://github.com/consbio/parserutils',
license='BSD',
cmdclass={'test': RunTests}
)
| Enable markdown for PyPI README | Enable markdown for PyPI README | Python | bsd-3-clause | consbio/parserutils | import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'parserutils.tests.tests'])
raise SystemExit(errno)
with open('README.md') as readme:
long_description = readme.read()
setup(
name='parserutils',
description='A collection of performant parsing utilities',
long_description=long_description,
keywords='parser,parsing,utils,utilities,collections,dates,elements,numbers,strings,url,xml',
version='1.1.1',
packages=[
'parserutils', 'parserutils.tests'
],
install_requires=[
'defusedxml>=0.4.1', 'python-dateutil>=2.4.2', 'six>=1.9.0'
],
url='https://github.com/consbio/parserutils',
license='BSD',
cmdclass={'test': RunTests}
)
Enable markdown for PyPI README | import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'parserutils.tests.tests'])
raise SystemExit(errno)
with open('README.md') as readme:
long_description = readme.read()
setup(
name='parserutils',
description='A collection of performant parsing utilities',
long_description=long_description,
long_description_content_type='text/markdown',
keywords='parser,parsing,utils,utilities,collections,dates,elements,numbers,strings,url,xml',
version='1.1.2',
packages=[
'parserutils', 'parserutils.tests'
],
install_requires=[
'defusedxml>=0.4.1', 'python-dateutil>=2.4.2', 'six>=1.9.0'
],
url='https://github.com/consbio/parserutils',
license='BSD',
cmdclass={'test': RunTests}
)
| <commit_before>import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'parserutils.tests.tests'])
raise SystemExit(errno)
with open('README.md') as readme:
long_description = readme.read()
setup(
name='parserutils',
description='A collection of performant parsing utilities',
long_description=long_description,
keywords='parser,parsing,utils,utilities,collections,dates,elements,numbers,strings,url,xml',
version='1.1.1',
packages=[
'parserutils', 'parserutils.tests'
],
install_requires=[
'defusedxml>=0.4.1', 'python-dateutil>=2.4.2', 'six>=1.9.0'
],
url='https://github.com/consbio/parserutils',
license='BSD',
cmdclass={'test': RunTests}
)
<commit_msg>Enable markdown for PyPI README<commit_after> | import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'parserutils.tests.tests'])
raise SystemExit(errno)
with open('README.md') as readme:
long_description = readme.read()
setup(
name='parserutils',
description='A collection of performant parsing utilities',
long_description=long_description,
long_description_content_type='text/markdown',
keywords='parser,parsing,utils,utilities,collections,dates,elements,numbers,strings,url,xml',
version='1.1.2',
packages=[
'parserutils', 'parserutils.tests'
],
install_requires=[
'defusedxml>=0.4.1', 'python-dateutil>=2.4.2', 'six>=1.9.0'
],
url='https://github.com/consbio/parserutils',
license='BSD',
cmdclass={'test': RunTests}
)
| import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'parserutils.tests.tests'])
raise SystemExit(errno)
with open('README.md') as readme:
long_description = readme.read()
setup(
name='parserutils',
description='A collection of performant parsing utilities',
long_description=long_description,
keywords='parser,parsing,utils,utilities,collections,dates,elements,numbers,strings,url,xml',
version='1.1.1',
packages=[
'parserutils', 'parserutils.tests'
],
install_requires=[
'defusedxml>=0.4.1', 'python-dateutil>=2.4.2', 'six>=1.9.0'
],
url='https://github.com/consbio/parserutils',
license='BSD',
cmdclass={'test': RunTests}
)
Enable markdown for PyPI READMEimport subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'parserutils.tests.tests'])
raise SystemExit(errno)
with open('README.md') as readme:
long_description = readme.read()
setup(
name='parserutils',
description='A collection of performant parsing utilities',
long_description=long_description,
long_description_content_type='text/markdown',
keywords='parser,parsing,utils,utilities,collections,dates,elements,numbers,strings,url,xml',
version='1.1.2',
packages=[
'parserutils', 'parserutils.tests'
],
install_requires=[
'defusedxml>=0.4.1', 'python-dateutil>=2.4.2', 'six>=1.9.0'
],
url='https://github.com/consbio/parserutils',
license='BSD',
cmdclass={'test': RunTests}
)
| <commit_before>import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'parserutils.tests.tests'])
raise SystemExit(errno)
with open('README.md') as readme:
long_description = readme.read()
setup(
name='parserutils',
description='A collection of performant parsing utilities',
long_description=long_description,
keywords='parser,parsing,utils,utilities,collections,dates,elements,numbers,strings,url,xml',
version='1.1.1',
packages=[
'parserutils', 'parserutils.tests'
],
install_requires=[
'defusedxml>=0.4.1', 'python-dateutil>=2.4.2', 'six>=1.9.0'
],
url='https://github.com/consbio/parserutils',
license='BSD',
cmdclass={'test': RunTests}
)
<commit_msg>Enable markdown for PyPI README<commit_after>import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'parserutils.tests.tests'])
raise SystemExit(errno)
with open('README.md') as readme:
long_description = readme.read()
setup(
name='parserutils',
description='A collection of performant parsing utilities',
long_description=long_description,
long_description_content_type='text/markdown',
keywords='parser,parsing,utils,utilities,collections,dates,elements,numbers,strings,url,xml',
version='1.1.2',
packages=[
'parserutils', 'parserutils.tests'
],
install_requires=[
'defusedxml>=0.4.1', 'python-dateutil>=2.4.2', 'six>=1.9.0'
],
url='https://github.com/consbio/parserutils',
license='BSD',
cmdclass={'test': RunTests}
)
|
b7da1cac55665b71678abbda144c9f589e6c8b11 | setup.py | setup.py | #!/usr/bin/env python
from setuptools import setup, find_packages
version = '1.3'
long_desc = """
nose-perfdump is a Nose plugin that collects per-test performance metrics into an
SQLite3 database and reports the slowest tests, test files, and total time
spent in tests. It is designed to make profiling tests to improve their speed
easier.
[Github](https://github.com/etscrivner/nose-perfdump)
"""
setup(
name='nose-perfdump',
version=version,
description='Dump per-test performance metrics to an SQLite database for querying.',
long_description=long_desc,
author='Eric Scrivner',
keywords='nose,nose plugin,profiler,profiling,tests,unittest',
install_requires=['nose'],
author_email='eric.t.scrivner@gmail.com',
url='https://github.com/etscrivner/nose-perfdump',
license='BSD',
packages=find_packages(),
include_package_data=True,
entry_points={
'nose.plugins.0.10': [
'perfdump = perfdump:PerfDumpPlugin'
],
'console_scripts': [
'perfdump.console:main'
]
}
)
| #!/usr/bin/env python
from setuptools import setup, find_packages
version = '1.3'
long_desc = """
nose-perfdump is a Nose plugin that collects per-test performance metrics into an
SQLite3 database and reports the slowest tests, test files, and total time
spent in tests. It is designed to make profiling tests to improve their speed
easier.
[Github](https://github.com/etscrivner/nose-perfdump)
"""
setup(
name='nose-perfdump',
version=version,
description='Dump per-test performance metrics to an SQLite database for querying.',
long_description=long_desc,
author='Eric Scrivner',
keywords='nose,nose plugin,profiler,profiling,tests,unittest',
install_requires=['nose', 'pyparsing'],
author_email='eric.t.scrivner@gmail.com',
url='https://github.com/etscrivner/nose-perfdump',
license='BSD',
packages=find_packages(),
include_package_data=True,
entry_points={
'nose.plugins.0.10': [
'perfdump = perfdump:PerfDumpPlugin'
],
'console_scripts': [
'perfdump.console:main'
]
}
)
| Add Additional Dependencies To Setup | Add Additional Dependencies To Setup
| Python | bsd-3-clause | etscrivner/nose-perfdump | #!/usr/bin/env python
from setuptools import setup, find_packages
version = '1.3'
long_desc = """
nose-perfdump is a Nose plugin that collects per-test performance metrics into an
SQLite3 database and reports the slowest tests, test files, and total time
spent in tests. It is designed to make profiling tests to improve their speed
easier.
[Github](https://github.com/etscrivner/nose-perfdump)
"""
setup(
name='nose-perfdump',
version=version,
description='Dump per-test performance metrics to an SQLite database for querying.',
long_description=long_desc,
author='Eric Scrivner',
keywords='nose,nose plugin,profiler,profiling,tests,unittest',
install_requires=['nose'],
author_email='eric.t.scrivner@gmail.com',
url='https://github.com/etscrivner/nose-perfdump',
license='BSD',
packages=find_packages(),
include_package_data=True,
entry_points={
'nose.plugins.0.10': [
'perfdump = perfdump:PerfDumpPlugin'
],
'console_scripts': [
'perfdump.console:main'
]
}
)
Add Additional Dependencies To Setup | #!/usr/bin/env python
from setuptools import setup, find_packages
version = '1.3'
long_desc = """
nose-perfdump is a Nose plugin that collects per-test performance metrics into an
SQLite3 database and reports the slowest tests, test files, and total time
spent in tests. It is designed to make profiling tests to improve their speed
easier.
[Github](https://github.com/etscrivner/nose-perfdump)
"""
setup(
name='nose-perfdump',
version=version,
description='Dump per-test performance metrics to an SQLite database for querying.',
long_description=long_desc,
author='Eric Scrivner',
keywords='nose,nose plugin,profiler,profiling,tests,unittest',
install_requires=['nose', 'pyparsing'],
author_email='eric.t.scrivner@gmail.com',
url='https://github.com/etscrivner/nose-perfdump',
license='BSD',
packages=find_packages(),
include_package_data=True,
entry_points={
'nose.plugins.0.10': [
'perfdump = perfdump:PerfDumpPlugin'
],
'console_scripts': [
'perfdump.console:main'
]
}
)
| <commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
version = '1.3'
long_desc = """
nose-perfdump is a Nose plugin that collects per-test performance metrics into an
SQLite3 database and reports the slowest tests, test files, and total time
spent in tests. It is designed to make profiling tests to improve their speed
easier.
[Github](https://github.com/etscrivner/nose-perfdump)
"""
setup(
name='nose-perfdump',
version=version,
description='Dump per-test performance metrics to an SQLite database for querying.',
long_description=long_desc,
author='Eric Scrivner',
keywords='nose,nose plugin,profiler,profiling,tests,unittest',
install_requires=['nose'],
author_email='eric.t.scrivner@gmail.com',
url='https://github.com/etscrivner/nose-perfdump',
license='BSD',
packages=find_packages(),
include_package_data=True,
entry_points={
'nose.plugins.0.10': [
'perfdump = perfdump:PerfDumpPlugin'
],
'console_scripts': [
'perfdump.console:main'
]
}
)
<commit_msg>Add Additional Dependencies To Setup<commit_after> | #!/usr/bin/env python
from setuptools import setup, find_packages
version = '1.3'
long_desc = """
nose-perfdump is a Nose plugin that collects per-test performance metrics into an
SQLite3 database and reports the slowest tests, test files, and total time
spent in tests. It is designed to make profiling tests to improve their speed
easier.
[Github](https://github.com/etscrivner/nose-perfdump)
"""
setup(
name='nose-perfdump',
version=version,
description='Dump per-test performance metrics to an SQLite database for querying.',
long_description=long_desc,
author='Eric Scrivner',
keywords='nose,nose plugin,profiler,profiling,tests,unittest',
install_requires=['nose', 'pyparsing'],
author_email='eric.t.scrivner@gmail.com',
url='https://github.com/etscrivner/nose-perfdump',
license='BSD',
packages=find_packages(),
include_package_data=True,
entry_points={
'nose.plugins.0.10': [
'perfdump = perfdump:PerfDumpPlugin'
],
'console_scripts': [
'perfdump.console:main'
]
}
)
| #!/usr/bin/env python
from setuptools import setup, find_packages
version = '1.3'
long_desc = """
nose-perfdump is a Nose plugin that collects per-test performance metrics into an
SQLite3 database and reports the slowest tests, test files, and total time
spent in tests. It is designed to make profiling tests to improve their speed
easier.
[Github](https://github.com/etscrivner/nose-perfdump)
"""
setup(
name='nose-perfdump',
version=version,
description='Dump per-test performance metrics to an SQLite database for querying.',
long_description=long_desc,
author='Eric Scrivner',
keywords='nose,nose plugin,profiler,profiling,tests,unittest',
install_requires=['nose'],
author_email='eric.t.scrivner@gmail.com',
url='https://github.com/etscrivner/nose-perfdump',
license='BSD',
packages=find_packages(),
include_package_data=True,
entry_points={
'nose.plugins.0.10': [
'perfdump = perfdump:PerfDumpPlugin'
],
'console_scripts': [
'perfdump.console:main'
]
}
)
Add Additional Dependencies To Setup#!/usr/bin/env python
from setuptools import setup, find_packages
version = '1.3'
long_desc = """
nose-perfdump is a Nose plugin that collects per-test performance metrics into an
SQLite3 database and reports the slowest tests, test files, and total time
spent in tests. It is designed to make profiling tests to improve their speed
easier.
[Github](https://github.com/etscrivner/nose-perfdump)
"""
setup(
name='nose-perfdump',
version=version,
description='Dump per-test performance metrics to an SQLite database for querying.',
long_description=long_desc,
author='Eric Scrivner',
keywords='nose,nose plugin,profiler,profiling,tests,unittest',
install_requires=['nose', 'pyparsing'],
author_email='eric.t.scrivner@gmail.com',
url='https://github.com/etscrivner/nose-perfdump',
license='BSD',
packages=find_packages(),
include_package_data=True,
entry_points={
'nose.plugins.0.10': [
'perfdump = perfdump:PerfDumpPlugin'
],
'console_scripts': [
'perfdump.console:main'
]
}
)
| <commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
version = '1.3'
long_desc = """
nose-perfdump is a Nose plugin that collects per-test performance metrics into an
SQLite3 database and reports the slowest tests, test files, and total time
spent in tests. It is designed to make profiling tests to improve their speed
easier.
[Github](https://github.com/etscrivner/nose-perfdump)
"""
setup(
name='nose-perfdump',
version=version,
description='Dump per-test performance metrics to an SQLite database for querying.',
long_description=long_desc,
author='Eric Scrivner',
keywords='nose,nose plugin,profiler,profiling,tests,unittest',
install_requires=['nose'],
author_email='eric.t.scrivner@gmail.com',
url='https://github.com/etscrivner/nose-perfdump',
license='BSD',
packages=find_packages(),
include_package_data=True,
entry_points={
'nose.plugins.0.10': [
'perfdump = perfdump:PerfDumpPlugin'
],
'console_scripts': [
'perfdump.console:main'
]
}
)
<commit_msg>Add Additional Dependencies To Setup<commit_after>#!/usr/bin/env python
from setuptools import setup, find_packages
version = '1.3'
long_desc = """
nose-perfdump is a Nose plugin that collects per-test performance metrics into an
SQLite3 database and reports the slowest tests, test files, and total time
spent in tests. It is designed to make profiling tests to improve their speed
easier.
[Github](https://github.com/etscrivner/nose-perfdump)
"""
setup(
name='nose-perfdump',
version=version,
description='Dump per-test performance metrics to an SQLite database for querying.',
long_description=long_desc,
author='Eric Scrivner',
keywords='nose,nose plugin,profiler,profiling,tests,unittest',
install_requires=['nose', 'pyparsing'],
author_email='eric.t.scrivner@gmail.com',
url='https://github.com/etscrivner/nose-perfdump',
license='BSD',
packages=find_packages(),
include_package_data=True,
entry_points={
'nose.plugins.0.10': [
'perfdump = perfdump:PerfDumpPlugin'
],
'console_scripts': [
'perfdump.console:main'
]
}
)
|
d04d3ac7eefce9d37e2c62d68a856fafd41d3877 | setup.py | setup.py | from setuptools import setup
setup(
name='djangorestframework-httpsignature',
version='0.2.1',
url='https://github.com/etoccalino/django-rest-framework-httpsignature',
license='LICENSE.txt',
description='HTTP Signature support for Django REST framework',
long_description=open('README.rst').read(),
install_requires=[
'Django>=1.6.2,<1.8',
'djangorestframework>=2.3.14,<2.4',
'Django>=1.6.2',
'djangorestframework>=2.3.12',
'pycrypto>=2.6.1',
'httpsig',
],
author='Elvio Toccalino',
author_email='me@etoccalino.com',
packages=['rest_framework_httpsignature'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Security',
]
)
| from setuptools import setup
setup(
name='djangorestframework-httpsignature',
version='0.2.1',
url='https://github.com/etoccalino/django-rest-framework-httpsignature',
license='LICENSE.txt',
description='HTTP Signature support for Django REST framework',
long_description=open('README.rst').read(),
install_requires=[
'Django>=1.6.2,<1.8',
'djangorestframework>=2.3.14,<2.4',
'pycrypto>=2.6.1',
'httpsig',
],
author='Elvio Toccalino',
author_email='me@etoccalino.com',
packages=['rest_framework_httpsignature'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Security',
]
)
| Remove duplicate dependency specifications from install_requires | Remove duplicate dependency specifications from install_requires | Python | mit | pombredanne/django-rest-framework-httpsignature,etoccalino/django-rest-framework-httpsignature,etoccalino/django-rest-framework-httpsignature,pombredanne/django-rest-framework-httpsignature | from setuptools import setup
setup(
name='djangorestframework-httpsignature',
version='0.2.1',
url='https://github.com/etoccalino/django-rest-framework-httpsignature',
license='LICENSE.txt',
description='HTTP Signature support for Django REST framework',
long_description=open('README.rst').read(),
install_requires=[
'Django>=1.6.2,<1.8',
'djangorestframework>=2.3.14,<2.4',
'Django>=1.6.2',
'djangorestframework>=2.3.12',
'pycrypto>=2.6.1',
'httpsig',
],
author='Elvio Toccalino',
author_email='me@etoccalino.com',
packages=['rest_framework_httpsignature'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Security',
]
)
Remove duplicate dependency specifications from install_requires | from setuptools import setup
setup(
name='djangorestframework-httpsignature',
version='0.2.1',
url='https://github.com/etoccalino/django-rest-framework-httpsignature',
license='LICENSE.txt',
description='HTTP Signature support for Django REST framework',
long_description=open('README.rst').read(),
install_requires=[
'Django>=1.6.2,<1.8',
'djangorestframework>=2.3.14,<2.4',
'pycrypto>=2.6.1',
'httpsig',
],
author='Elvio Toccalino',
author_email='me@etoccalino.com',
packages=['rest_framework_httpsignature'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Security',
]
)
| <commit_before>from setuptools import setup
setup(
name='djangorestframework-httpsignature',
version='0.2.1',
url='https://github.com/etoccalino/django-rest-framework-httpsignature',
license='LICENSE.txt',
description='HTTP Signature support for Django REST framework',
long_description=open('README.rst').read(),
install_requires=[
'Django>=1.6.2,<1.8',
'djangorestframework>=2.3.14,<2.4',
'Django>=1.6.2',
'djangorestframework>=2.3.12',
'pycrypto>=2.6.1',
'httpsig',
],
author='Elvio Toccalino',
author_email='me@etoccalino.com',
packages=['rest_framework_httpsignature'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Security',
]
)
<commit_msg>Remove duplicate dependency specifications from install_requires<commit_after> | from setuptools import setup
setup(
name='djangorestframework-httpsignature',
version='0.2.1',
url='https://github.com/etoccalino/django-rest-framework-httpsignature',
license='LICENSE.txt',
description='HTTP Signature support for Django REST framework',
long_description=open('README.rst').read(),
install_requires=[
'Django>=1.6.2,<1.8',
'djangorestframework>=2.3.14,<2.4',
'pycrypto>=2.6.1',
'httpsig',
],
author='Elvio Toccalino',
author_email='me@etoccalino.com',
packages=['rest_framework_httpsignature'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Security',
]
)
| from setuptools import setup
setup(
name='djangorestframework-httpsignature',
version='0.2.1',
url='https://github.com/etoccalino/django-rest-framework-httpsignature',
license='LICENSE.txt',
description='HTTP Signature support for Django REST framework',
long_description=open('README.rst').read(),
install_requires=[
'Django>=1.6.2,<1.8',
'djangorestframework>=2.3.14,<2.4',
'Django>=1.6.2',
'djangorestframework>=2.3.12',
'pycrypto>=2.6.1',
'httpsig',
],
author='Elvio Toccalino',
author_email='me@etoccalino.com',
packages=['rest_framework_httpsignature'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Security',
]
)
Remove duplicate dependency specifications from install_requiresfrom setuptools import setup
setup(
name='djangorestframework-httpsignature',
version='0.2.1',
url='https://github.com/etoccalino/django-rest-framework-httpsignature',
license='LICENSE.txt',
description='HTTP Signature support for Django REST framework',
long_description=open('README.rst').read(),
install_requires=[
'Django>=1.6.2,<1.8',
'djangorestframework>=2.3.14,<2.4',
'pycrypto>=2.6.1',
'httpsig',
],
author='Elvio Toccalino',
author_email='me@etoccalino.com',
packages=['rest_framework_httpsignature'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Security',
]
)
| <commit_before>from setuptools import setup
setup(
name='djangorestframework-httpsignature',
version='0.2.1',
url='https://github.com/etoccalino/django-rest-framework-httpsignature',
license='LICENSE.txt',
description='HTTP Signature support for Django REST framework',
long_description=open('README.rst').read(),
install_requires=[
'Django>=1.6.2,<1.8',
'djangorestframework>=2.3.14,<2.4',
'Django>=1.6.2',
'djangorestframework>=2.3.12',
'pycrypto>=2.6.1',
'httpsig',
],
author='Elvio Toccalino',
author_email='me@etoccalino.com',
packages=['rest_framework_httpsignature'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Security',
]
)
<commit_msg>Remove duplicate dependency specifications from install_requires<commit_after>from setuptools import setup
setup(
name='djangorestframework-httpsignature',
version='0.2.1',
url='https://github.com/etoccalino/django-rest-framework-httpsignature',
license='LICENSE.txt',
description='HTTP Signature support for Django REST framework',
long_description=open('README.rst').read(),
install_requires=[
'Django>=1.6.2,<1.8',
'djangorestframework>=2.3.14,<2.4',
'pycrypto>=2.6.1',
'httpsig',
],
author='Elvio Toccalino',
author_email='me@etoccalino.com',
packages=['rest_framework_httpsignature'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Security',
]
)
|
13e1fc4ca4ed80f24b6b1532d162197af8df55f2 | setup.py | setup.py | import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='pyaavso',
version=__import__('pyaavso').__version__,
description='A Python library for working with AAVSO data.',
long_description=read('README.rst'),
author='Zbigniew Siciarz',
author_email='zbigniew@siciarz.net',
url='http://github.com/zsiciarz/pyaavso',
download_url='http://pypi.python.org/pypi/pyaavso',
license='MIT',
packages=find_packages(exclude=['tests']),
include_package_data=True,
install_requires=['lxml>=2.0', 'requests>=1.0'],
tests_require=['pytest'],
platforms='any',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Scientific/Engineering :: Astronomy',
'Topic :: Utilities'
],
)
| import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='pyaavso',
version=__import__('pyaavso').__version__,
description='A Python library for working with AAVSO data.',
long_description=read('README.rst'),
author='Zbigniew Siciarz',
author_email='zbigniew@siciarz.net',
url='http://github.com/zsiciarz/pyaavso',
download_url='http://pypi.python.org/pypi/pyaavso',
license='MIT',
packages=find_packages(exclude=['tests']),
include_package_data=True,
install_requires=['lxml>=2.0', 'requests>=1.0'],
tests_require=['pytest'],
platforms='any',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering :: Astronomy',
'Topic :: Utilities'
],
)
| Mark compatibility with Python 3.6 | Mark compatibility with Python 3.6
| Python | mit | zsiciarz/pyaavso | import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='pyaavso',
version=__import__('pyaavso').__version__,
description='A Python library for working with AAVSO data.',
long_description=read('README.rst'),
author='Zbigniew Siciarz',
author_email='zbigniew@siciarz.net',
url='http://github.com/zsiciarz/pyaavso',
download_url='http://pypi.python.org/pypi/pyaavso',
license='MIT',
packages=find_packages(exclude=['tests']),
include_package_data=True,
install_requires=['lxml>=2.0', 'requests>=1.0'],
tests_require=['pytest'],
platforms='any',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Scientific/Engineering :: Astronomy',
'Topic :: Utilities'
],
)
Mark compatibility with Python 3.6 | import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='pyaavso',
version=__import__('pyaavso').__version__,
description='A Python library for working with AAVSO data.',
long_description=read('README.rst'),
author='Zbigniew Siciarz',
author_email='zbigniew@siciarz.net',
url='http://github.com/zsiciarz/pyaavso',
download_url='http://pypi.python.org/pypi/pyaavso',
license='MIT',
packages=find_packages(exclude=['tests']),
include_package_data=True,
install_requires=['lxml>=2.0', 'requests>=1.0'],
tests_require=['pytest'],
platforms='any',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering :: Astronomy',
'Topic :: Utilities'
],
)
| <commit_before>import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='pyaavso',
version=__import__('pyaavso').__version__,
description='A Python library for working with AAVSO data.',
long_description=read('README.rst'),
author='Zbigniew Siciarz',
author_email='zbigniew@siciarz.net',
url='http://github.com/zsiciarz/pyaavso',
download_url='http://pypi.python.org/pypi/pyaavso',
license='MIT',
packages=find_packages(exclude=['tests']),
include_package_data=True,
install_requires=['lxml>=2.0', 'requests>=1.0'],
tests_require=['pytest'],
platforms='any',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Scientific/Engineering :: Astronomy',
'Topic :: Utilities'
],
)
<commit_msg>Mark compatibility with Python 3.6<commit_after> | import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='pyaavso',
version=__import__('pyaavso').__version__,
description='A Python library for working with AAVSO data.',
long_description=read('README.rst'),
author='Zbigniew Siciarz',
author_email='zbigniew@siciarz.net',
url='http://github.com/zsiciarz/pyaavso',
download_url='http://pypi.python.org/pypi/pyaavso',
license='MIT',
packages=find_packages(exclude=['tests']),
include_package_data=True,
install_requires=['lxml>=2.0', 'requests>=1.0'],
tests_require=['pytest'],
platforms='any',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering :: Astronomy',
'Topic :: Utilities'
],
)
| import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='pyaavso',
version=__import__('pyaavso').__version__,
description='A Python library for working with AAVSO data.',
long_description=read('README.rst'),
author='Zbigniew Siciarz',
author_email='zbigniew@siciarz.net',
url='http://github.com/zsiciarz/pyaavso',
download_url='http://pypi.python.org/pypi/pyaavso',
license='MIT',
packages=find_packages(exclude=['tests']),
include_package_data=True,
install_requires=['lxml>=2.0', 'requests>=1.0'],
tests_require=['pytest'],
platforms='any',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Scientific/Engineering :: Astronomy',
'Topic :: Utilities'
],
)
Mark compatibility with Python 3.6import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='pyaavso',
version=__import__('pyaavso').__version__,
description='A Python library for working with AAVSO data.',
long_description=read('README.rst'),
author='Zbigniew Siciarz',
author_email='zbigniew@siciarz.net',
url='http://github.com/zsiciarz/pyaavso',
download_url='http://pypi.python.org/pypi/pyaavso',
license='MIT',
packages=find_packages(exclude=['tests']),
include_package_data=True,
install_requires=['lxml>=2.0', 'requests>=1.0'],
tests_require=['pytest'],
platforms='any',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering :: Astronomy',
'Topic :: Utilities'
],
)
| <commit_before>import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='pyaavso',
version=__import__('pyaavso').__version__,
description='A Python library for working with AAVSO data.',
long_description=read('README.rst'),
author='Zbigniew Siciarz',
author_email='zbigniew@siciarz.net',
url='http://github.com/zsiciarz/pyaavso',
download_url='http://pypi.python.org/pypi/pyaavso',
license='MIT',
packages=find_packages(exclude=['tests']),
include_package_data=True,
install_requires=['lxml>=2.0', 'requests>=1.0'],
tests_require=['pytest'],
platforms='any',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Scientific/Engineering :: Astronomy',
'Topic :: Utilities'
],
)
<commit_msg>Mark compatibility with Python 3.6<commit_after>import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='pyaavso',
version=__import__('pyaavso').__version__,
description='A Python library for working with AAVSO data.',
long_description=read('README.rst'),
author='Zbigniew Siciarz',
author_email='zbigniew@siciarz.net',
url='http://github.com/zsiciarz/pyaavso',
download_url='http://pypi.python.org/pypi/pyaavso',
license='MIT',
packages=find_packages(exclude=['tests']),
include_package_data=True,
install_requires=['lxml>=2.0', 'requests>=1.0'],
tests_require=['pytest'],
platforms='any',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering :: Astronomy',
'Topic :: Utilities'
],
)
|
8da94ee88cf4bf768e16e21ad8b3626970692e27 | setup.py | setup.py | # -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0.11',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
| # -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0.12',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
| Update the PyPI version to 7.0.12. | Update the PyPI version to 7.0.12.
| Python | mit | Doist/todoist-python | # -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0.11',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
Update the PyPI version to 7.0.12. | # -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0.12',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
| <commit_before># -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0.11',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
<commit_msg>Update the PyPI version to 7.0.12.<commit_after> | # -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0.12',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
| # -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0.11',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
Update the PyPI version to 7.0.12.# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0.12',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
| <commit_before># -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0.11',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
<commit_msg>Update the PyPI version to 7.0.12.<commit_after># -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0.12',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.