commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c8921cf12418762c17d0b858ea2e134f292b2838
|
fireplace/cards/wog/neutral_epic.py
|
fireplace/cards/wog/neutral_epic.py
|
from ..utils import *
##
# Minions
class OG_271:
"Scaled Nightmare"
events = OWN_TURN_BEGIN.on(Buff(SELF, "OG_271e"))
class OG_271e:
atk = lambda self, i: i * 2
|
from ..utils import *
##
# Minions
class OG_271:
"Scaled Nightmare"
events = OWN_TURN_BEGIN.on(Buff(SELF, "OG_271e"))
class OG_271e:
atk = lambda self, i: i * 2
class OG_272:
"Twilight Summoner"
deathrattle = Summon(CONTROLLER, "OG_272t")
class OG_337:
"Cyclopian Horror"
play = Buff(SELF, "OG_337e") * Count(ENEMY_MINIONS)
OG_337e = buff(health=1)
|
Implement Twilight Summoner and Cyclopian Horror
|
Implement Twilight Summoner and Cyclopian Horror
|
Python
|
agpl-3.0
|
beheh/fireplace,NightKev/fireplace,jleclanche/fireplace
|
from ..utils import *
##
# Minions
class OG_271:
"Scaled Nightmare"
events = OWN_TURN_BEGIN.on(Buff(SELF, "OG_271e"))
class OG_271e:
atk = lambda self, i: i * 2
Implement Twilight Summoner and Cyclopian Horror
|
from ..utils import *
##
# Minions
class OG_271:
"Scaled Nightmare"
events = OWN_TURN_BEGIN.on(Buff(SELF, "OG_271e"))
class OG_271e:
atk = lambda self, i: i * 2
class OG_272:
"Twilight Summoner"
deathrattle = Summon(CONTROLLER, "OG_272t")
class OG_337:
"Cyclopian Horror"
play = Buff(SELF, "OG_337e") * Count(ENEMY_MINIONS)
OG_337e = buff(health=1)
|
<commit_before>from ..utils import *
##
# Minions
class OG_271:
"Scaled Nightmare"
events = OWN_TURN_BEGIN.on(Buff(SELF, "OG_271e"))
class OG_271e:
atk = lambda self, i: i * 2
<commit_msg>Implement Twilight Summoner and Cyclopian Horror<commit_after>
|
from ..utils import *
##
# Minions
class OG_271:
"Scaled Nightmare"
events = OWN_TURN_BEGIN.on(Buff(SELF, "OG_271e"))
class OG_271e:
atk = lambda self, i: i * 2
class OG_272:
"Twilight Summoner"
deathrattle = Summon(CONTROLLER, "OG_272t")
class OG_337:
"Cyclopian Horror"
play = Buff(SELF, "OG_337e") * Count(ENEMY_MINIONS)
OG_337e = buff(health=1)
|
from ..utils import *
##
# Minions
class OG_271:
"Scaled Nightmare"
events = OWN_TURN_BEGIN.on(Buff(SELF, "OG_271e"))
class OG_271e:
atk = lambda self, i: i * 2
Implement Twilight Summoner and Cyclopian Horrorfrom ..utils import *
##
# Minions
class OG_271:
"Scaled Nightmare"
events = OWN_TURN_BEGIN.on(Buff(SELF, "OG_271e"))
class OG_271e:
atk = lambda self, i: i * 2
class OG_272:
"Twilight Summoner"
deathrattle = Summon(CONTROLLER, "OG_272t")
class OG_337:
"Cyclopian Horror"
play = Buff(SELF, "OG_337e") * Count(ENEMY_MINIONS)
OG_337e = buff(health=1)
|
<commit_before>from ..utils import *
##
# Minions
class OG_271:
"Scaled Nightmare"
events = OWN_TURN_BEGIN.on(Buff(SELF, "OG_271e"))
class OG_271e:
atk = lambda self, i: i * 2
<commit_msg>Implement Twilight Summoner and Cyclopian Horror<commit_after>from ..utils import *
##
# Minions
class OG_271:
"Scaled Nightmare"
events = OWN_TURN_BEGIN.on(Buff(SELF, "OG_271e"))
class OG_271e:
atk = lambda self, i: i * 2
class OG_272:
"Twilight Summoner"
deathrattle = Summon(CONTROLLER, "OG_272t")
class OG_337:
"Cyclopian Horror"
play = Buff(SELF, "OG_337e") * Count(ENEMY_MINIONS)
OG_337e = buff(health=1)
|
923d57d91b99fc3ac052d46de0314e7559b008a5
|
lessons/lesson-2.04/models_example.py
|
lessons/lesson-2.04/models_example.py
|
import arrow
from betterapis import db
class Track(db.Model, SimpleSerializing):
id = db.Column(db.Integer, primary_key=True)
name = db.Clumn(db.String)
details = db.Column(db.String)
def __init__(self, **kwargs):
self.name = kwargs.get('name')
self.details = kwargs.get('details')
def update(self, **kwargs):
self.__init__(**kwargs)
def __repr__(self):
return '<Review {} {}>'.format(self.name, self.details)
|
import arrow
from betterapis import db
class Track(db.Model, SimpleSerializing):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String)
details = db.Column(db.String)
def __init__(self, **kwargs):
self.name = kwargs.get('name')
self.details = kwargs.get('details')
def update(self, **kwargs):
self.__init__(**kwargs)
def __repr__(self):
return '<Review {} {}>'.format(self.name, self.details)
|
Fix typo in example model
|
Fix typo in example model
|
Python
|
mit
|
tylerdave/OpenAPI-Tutorial,tylerdave/OpenAPI-Tutorial
|
import arrow
from betterapis import db
class Track(db.Model, SimpleSerializing):
id = db.Column(db.Integer, primary_key=True)
name = db.Clumn(db.String)
details = db.Column(db.String)
def __init__(self, **kwargs):
self.name = kwargs.get('name')
self.details = kwargs.get('details')
def update(self, **kwargs):
self.__init__(**kwargs)
def __repr__(self):
return '<Review {} {}>'.format(self.name, self.details)
Fix typo in example model
|
import arrow
from betterapis import db
class Track(db.Model, SimpleSerializing):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String)
details = db.Column(db.String)
def __init__(self, **kwargs):
self.name = kwargs.get('name')
self.details = kwargs.get('details')
def update(self, **kwargs):
self.__init__(**kwargs)
def __repr__(self):
return '<Review {} {}>'.format(self.name, self.details)
|
<commit_before>import arrow
from betterapis import db
class Track(db.Model, SimpleSerializing):
id = db.Column(db.Integer, primary_key=True)
name = db.Clumn(db.String)
details = db.Column(db.String)
def __init__(self, **kwargs):
self.name = kwargs.get('name')
self.details = kwargs.get('details')
def update(self, **kwargs):
self.__init__(**kwargs)
def __repr__(self):
return '<Review {} {}>'.format(self.name, self.details)
<commit_msg>Fix typo in example model<commit_after>
|
import arrow
from betterapis import db
class Track(db.Model, SimpleSerializing):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String)
details = db.Column(db.String)
def __init__(self, **kwargs):
self.name = kwargs.get('name')
self.details = kwargs.get('details')
def update(self, **kwargs):
self.__init__(**kwargs)
def __repr__(self):
return '<Review {} {}>'.format(self.name, self.details)
|
import arrow
from betterapis import db
class Track(db.Model, SimpleSerializing):
id = db.Column(db.Integer, primary_key=True)
name = db.Clumn(db.String)
details = db.Column(db.String)
def __init__(self, **kwargs):
self.name = kwargs.get('name')
self.details = kwargs.get('details')
def update(self, **kwargs):
self.__init__(**kwargs)
def __repr__(self):
return '<Review {} {}>'.format(self.name, self.details)
Fix typo in example modelimport arrow
from betterapis import db
class Track(db.Model, SimpleSerializing):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String)
details = db.Column(db.String)
def __init__(self, **kwargs):
self.name = kwargs.get('name')
self.details = kwargs.get('details')
def update(self, **kwargs):
self.__init__(**kwargs)
def __repr__(self):
return '<Review {} {}>'.format(self.name, self.details)
|
<commit_before>import arrow
from betterapis import db
class Track(db.Model, SimpleSerializing):
id = db.Column(db.Integer, primary_key=True)
name = db.Clumn(db.String)
details = db.Column(db.String)
def __init__(self, **kwargs):
self.name = kwargs.get('name')
self.details = kwargs.get('details')
def update(self, **kwargs):
self.__init__(**kwargs)
def __repr__(self):
return '<Review {} {}>'.format(self.name, self.details)
<commit_msg>Fix typo in example model<commit_after>import arrow
from betterapis import db
class Track(db.Model, SimpleSerializing):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String)
details = db.Column(db.String)
def __init__(self, **kwargs):
self.name = kwargs.get('name')
self.details = kwargs.get('details')
def update(self, **kwargs):
self.__init__(**kwargs)
def __repr__(self):
return '<Review {} {}>'.format(self.name, self.details)
|
af3515c8354dd525c2889eda75bfbc5cb7e2ecbf
|
massa/errors.py
|
massa/errors.py
|
# -*- coding: utf-8 -*-
from flask import jsonify
def register_error_handlers(app):
app.register_error_handler(EntityNotFoundError, entity_not_found_handler)
app.register_error_handler(InvalidInputError, invalid_input_handler)
def entity_not_found_handler(e):
return jsonify({'message': e.message}), 404
def invalid_input_handler(e):
return jsonify({'message': e.message, 'details': e.details}), 400
class DomainError(Exception):
def __init__(self, message=None, details=None):
if message: self.message = message
if details: self.details = details
class EntityNotFoundError(DomainError):
"""Raised when an entity does not exist."""
message = 'Entity does not exist.'
class InvalidInputError(DomainError):
"""Raised when input data is invalid."""
message = 'Input data is invalid.'
|
# -*- coding: utf-8 -*-
from flask import jsonify
def register_error_handlers(app):
app.register_error_handler(EntityNotFoundError, entity_not_found_handler)
app.register_error_handler(InvalidInputError, invalid_input_handler)
def entity_not_found_handler(e):
return jsonify(e.as_dict()), 404
def invalid_input_handler(e):
return jsonify(e.as_dict()), 400
class DomainError(Exception):
def __init__(self, message=None, details=None):
if message: self.message = message
if details: self.details = details
def as_dict(self):
data = {}
if self.message: data['message'] = self.message
if self.details: data['details'] = self.details
return data
class EntityNotFoundError(DomainError):
"""Raised when an entity does not exist."""
message = 'Entity does not exist.'
class InvalidInputError(DomainError):
"""Raised when input data is invalid."""
message = 'Input data is invalid.'
|
Add method to retrieve the DomainError as a dict.
|
Add method to retrieve the DomainError as a dict.
|
Python
|
mit
|
jaapverloop/massa
|
# -*- coding: utf-8 -*-
from flask import jsonify
def register_error_handlers(app):
app.register_error_handler(EntityNotFoundError, entity_not_found_handler)
app.register_error_handler(InvalidInputError, invalid_input_handler)
def entity_not_found_handler(e):
return jsonify({'message': e.message}), 404
def invalid_input_handler(e):
return jsonify({'message': e.message, 'details': e.details}), 400
class DomainError(Exception):
def __init__(self, message=None, details=None):
if message: self.message = message
if details: self.details = details
class EntityNotFoundError(DomainError):
"""Raised when an entity does not exist."""
message = 'Entity does not exist.'
class InvalidInputError(DomainError):
"""Raised when input data is invalid."""
message = 'Input data is invalid.'
Add method to retrieve the DomainError as a dict.
|
# -*- coding: utf-8 -*-
from flask import jsonify
def register_error_handlers(app):
app.register_error_handler(EntityNotFoundError, entity_not_found_handler)
app.register_error_handler(InvalidInputError, invalid_input_handler)
def entity_not_found_handler(e):
return jsonify(e.as_dict()), 404
def invalid_input_handler(e):
return jsonify(e.as_dict()), 400
class DomainError(Exception):
def __init__(self, message=None, details=None):
if message: self.message = message
if details: self.details = details
def as_dict(self):
data = {}
if self.message: data['message'] = self.message
if self.details: data['details'] = self.details
return data
class EntityNotFoundError(DomainError):
"""Raised when an entity does not exist."""
message = 'Entity does not exist.'
class InvalidInputError(DomainError):
"""Raised when input data is invalid."""
message = 'Input data is invalid.'
|
<commit_before># -*- coding: utf-8 -*-
from flask import jsonify
def register_error_handlers(app):
app.register_error_handler(EntityNotFoundError, entity_not_found_handler)
app.register_error_handler(InvalidInputError, invalid_input_handler)
def entity_not_found_handler(e):
return jsonify({'message': e.message}), 404
def invalid_input_handler(e):
return jsonify({'message': e.message, 'details': e.details}), 400
class DomainError(Exception):
def __init__(self, message=None, details=None):
if message: self.message = message
if details: self.details = details
class EntityNotFoundError(DomainError):
"""Raised when an entity does not exist."""
message = 'Entity does not exist.'
class InvalidInputError(DomainError):
"""Raised when input data is invalid."""
message = 'Input data is invalid.'
<commit_msg>Add method to retrieve the DomainError as a dict.<commit_after>
|
# -*- coding: utf-8 -*-
from flask import jsonify
def register_error_handlers(app):
app.register_error_handler(EntityNotFoundError, entity_not_found_handler)
app.register_error_handler(InvalidInputError, invalid_input_handler)
def entity_not_found_handler(e):
return jsonify(e.as_dict()), 404
def invalid_input_handler(e):
return jsonify(e.as_dict()), 400
class DomainError(Exception):
def __init__(self, message=None, details=None):
if message: self.message = message
if details: self.details = details
def as_dict(self):
data = {}
if self.message: data['message'] = self.message
if self.details: data['details'] = self.details
return data
class EntityNotFoundError(DomainError):
"""Raised when an entity does not exist."""
message = 'Entity does not exist.'
class InvalidInputError(DomainError):
"""Raised when input data is invalid."""
message = 'Input data is invalid.'
|
# -*- coding: utf-8 -*-
from flask import jsonify
def register_error_handlers(app):
app.register_error_handler(EntityNotFoundError, entity_not_found_handler)
app.register_error_handler(InvalidInputError, invalid_input_handler)
def entity_not_found_handler(e):
return jsonify({'message': e.message}), 404
def invalid_input_handler(e):
return jsonify({'message': e.message, 'details': e.details}), 400
class DomainError(Exception):
def __init__(self, message=None, details=None):
if message: self.message = message
if details: self.details = details
class EntityNotFoundError(DomainError):
"""Raised when an entity does not exist."""
message = 'Entity does not exist.'
class InvalidInputError(DomainError):
"""Raised when input data is invalid."""
message = 'Input data is invalid.'
Add method to retrieve the DomainError as a dict.# -*- coding: utf-8 -*-
from flask import jsonify
def register_error_handlers(app):
app.register_error_handler(EntityNotFoundError, entity_not_found_handler)
app.register_error_handler(InvalidInputError, invalid_input_handler)
def entity_not_found_handler(e):
return jsonify(e.as_dict()), 404
def invalid_input_handler(e):
return jsonify(e.as_dict()), 400
class DomainError(Exception):
def __init__(self, message=None, details=None):
if message: self.message = message
if details: self.details = details
def as_dict(self):
data = {}
if self.message: data['message'] = self.message
if self.details: data['details'] = self.details
return data
class EntityNotFoundError(DomainError):
"""Raised when an entity does not exist."""
message = 'Entity does not exist.'
class InvalidInputError(DomainError):
"""Raised when input data is invalid."""
message = 'Input data is invalid.'
|
<commit_before># -*- coding: utf-8 -*-
from flask import jsonify
def register_error_handlers(app):
app.register_error_handler(EntityNotFoundError, entity_not_found_handler)
app.register_error_handler(InvalidInputError, invalid_input_handler)
def entity_not_found_handler(e):
return jsonify({'message': e.message}), 404
def invalid_input_handler(e):
return jsonify({'message': e.message, 'details': e.details}), 400
class DomainError(Exception):
def __init__(self, message=None, details=None):
if message: self.message = message
if details: self.details = details
class EntityNotFoundError(DomainError):
"""Raised when an entity does not exist."""
message = 'Entity does not exist.'
class InvalidInputError(DomainError):
"""Raised when input data is invalid."""
message = 'Input data is invalid.'
<commit_msg>Add method to retrieve the DomainError as a dict.<commit_after># -*- coding: utf-8 -*-
from flask import jsonify
def register_error_handlers(app):
app.register_error_handler(EntityNotFoundError, entity_not_found_handler)
app.register_error_handler(InvalidInputError, invalid_input_handler)
def entity_not_found_handler(e):
return jsonify(e.as_dict()), 404
def invalid_input_handler(e):
return jsonify(e.as_dict()), 400
class DomainError(Exception):
def __init__(self, message=None, details=None):
if message: self.message = message
if details: self.details = details
def as_dict(self):
data = {}
if self.message: data['message'] = self.message
if self.details: data['details'] = self.details
return data
class EntityNotFoundError(DomainError):
"""Raised when an entity does not exist."""
message = 'Entity does not exist.'
class InvalidInputError(DomainError):
"""Raised when input data is invalid."""
message = 'Input data is invalid.'
|
8c0dd17bb633f56cb1ad8450759622ad75a524bc
|
config-example.py
|
config-example.py
|
# Enables detailed tracebacks and an interactive Python console on errors.
# Never use in production!
#DEBUG = True
# Address for development server to listen on
#HOST = "0.0.0.0"
# Port for development server to listen on
#PORT = 5000
# Makes the server more performant at sending static files when the
# server is being proxied by a server that supports X-Sendfile.
#USE_X_SENDFILE = True
# File to store the JSON server list data in.
FILENAME = "list.json"
# Amount of time, is seconds, after which servers are removed from the list
# if they haven't updated their listings. Note: By default Minetest servers
# only announce once every 5 minutes, so this should be more than 300.
PURGE_TIME = 350
# List of banned IP addresses
BANLIST = []
# Creates server entries if a server sends an 'update' and there is no entry yet.
# This should only be used to populate the server list after list.json was deleted.
# This WILL cause problems such as mapgen, mods and privilege information missing from the list
ALLOW_UPDATE_WITHOUT_OLD = False
|
# Enables detailed tracebacks and an interactive Python console on errors.
# Never use in production!
#DEBUG = True
# Address for development server to listen on
#HOST = "127.0.0.1"
# Port for development server to listen on
#PORT = 5000
# Makes the server more performant at sending static files when the
# server is being proxied by a server that supports X-Sendfile.
#USE_X_SENDFILE = True
# File to store the JSON server list data in.
FILENAME = "list.json"
# Amount of time, is seconds, after which servers are removed from the list
# if they haven't updated their listings. Note: By default Minetest servers
# only announce once every 5 minutes, so this should be more than 300.
PURGE_TIME = 350
# List of banned IP addresses
BANLIST = []
# Creates server entries if a server sends an 'update' and there is no entry yet.
# This should only be used to populate the server list after list.json was deleted.
# This WILL cause problems such as mapgen, mods and privilege information missing from the list
ALLOW_UPDATE_WITHOUT_OLD = False
|
Change default development server listen address back to 127.0.0.1
|
Change default development server listen address back to 127.0.0.1
|
Python
|
lgpl-2.1
|
minetest/master-server,minetest/master-server,minetest/master-server
|
# Enables detailed tracebacks and an interactive Python console on errors.
# Never use in production!
#DEBUG = True
# Address for development server to listen on
#HOST = "0.0.0.0"
# Port for development server to listen on
#PORT = 5000
# Makes the server more performant at sending static files when the
# server is being proxied by a server that supports X-Sendfile.
#USE_X_SENDFILE = True
# File to store the JSON server list data in.
FILENAME = "list.json"
# Amount of time, is seconds, after which servers are removed from the list
# if they haven't updated their listings. Note: By default Minetest servers
# only announce once every 5 minutes, so this should be more than 300.
PURGE_TIME = 350
# List of banned IP addresses
BANLIST = []
# Creates server entries if a server sends an 'update' and there is no entry yet.
# This should only be used to populate the server list after list.json was deleted.
# This WILL cause problems such as mapgen, mods and privilege information missing from the list
ALLOW_UPDATE_WITHOUT_OLD = False
Change default development server listen address back to 127.0.0.1
|
# Enables detailed tracebacks and an interactive Python console on errors.
# Never use in production!
#DEBUG = True
# Address for development server to listen on
#HOST = "127.0.0.1"
# Port for development server to listen on
#PORT = 5000
# Makes the server more performant at sending static files when the
# server is being proxied by a server that supports X-Sendfile.
#USE_X_SENDFILE = True
# File to store the JSON server list data in.
FILENAME = "list.json"
# Amount of time, is seconds, after which servers are removed from the list
# if they haven't updated their listings. Note: By default Minetest servers
# only announce once every 5 minutes, so this should be more than 300.
PURGE_TIME = 350
# List of banned IP addresses
BANLIST = []
# Creates server entries if a server sends an 'update' and there is no entry yet.
# This should only be used to populate the server list after list.json was deleted.
# This WILL cause problems such as mapgen, mods and privilege information missing from the list
ALLOW_UPDATE_WITHOUT_OLD = False
|
<commit_before>
# Enables detailed tracebacks and an interactive Python console on errors.
# Never use in production!
#DEBUG = True
# Address for development server to listen on
#HOST = "0.0.0.0"
# Port for development server to listen on
#PORT = 5000
# Makes the server more performant at sending static files when the
# server is being proxied by a server that supports X-Sendfile.
#USE_X_SENDFILE = True
# File to store the JSON server list data in.
FILENAME = "list.json"
# Amount of time, is seconds, after which servers are removed from the list
# if they haven't updated their listings. Note: By default Minetest servers
# only announce once every 5 minutes, so this should be more than 300.
PURGE_TIME = 350
# List of banned IP addresses
BANLIST = []
# Creates server entries if a server sends an 'update' and there is no entry yet.
# This should only be used to populate the server list after list.json was deleted.
# This WILL cause problems such as mapgen, mods and privilege information missing from the list
ALLOW_UPDATE_WITHOUT_OLD = False
<commit_msg>Change default development server listen address back to 127.0.0.1<commit_after>
|
# Enables detailed tracebacks and an interactive Python console on errors.
# Never use in production!
#DEBUG = True
# Address for development server to listen on
#HOST = "127.0.0.1"
# Port for development server to listen on
#PORT = 5000
# Makes the server more performant at sending static files when the
# server is being proxied by a server that supports X-Sendfile.
#USE_X_SENDFILE = True
# File to store the JSON server list data in.
FILENAME = "list.json"
# Amount of time, is seconds, after which servers are removed from the list
# if they haven't updated their listings. Note: By default Minetest servers
# only announce once every 5 minutes, so this should be more than 300.
PURGE_TIME = 350
# List of banned IP addresses
BANLIST = []
# Creates server entries if a server sends an 'update' and there is no entry yet.
# This should only be used to populate the server list after list.json was deleted.
# This WILL cause problems such as mapgen, mods and privilege information missing from the list
ALLOW_UPDATE_WITHOUT_OLD = False
|
# Enables detailed tracebacks and an interactive Python console on errors.
# Never use in production!
#DEBUG = True
# Address for development server to listen on
#HOST = "0.0.0.0"
# Port for development server to listen on
#PORT = 5000
# Makes the server more performant at sending static files when the
# server is being proxied by a server that supports X-Sendfile.
#USE_X_SENDFILE = True
# File to store the JSON server list data in.
FILENAME = "list.json"
# Amount of time, is seconds, after which servers are removed from the list
# if they haven't updated their listings. Note: By default Minetest servers
# only announce once every 5 minutes, so this should be more than 300.
PURGE_TIME = 350
# List of banned IP addresses
BANLIST = []
# Creates server entries if a server sends an 'update' and there is no entry yet.
# This should only be used to populate the server list after list.json was deleted.
# This WILL cause problems such as mapgen, mods and privilege information missing from the list
ALLOW_UPDATE_WITHOUT_OLD = False
Change default development server listen address back to 127.0.0.1
# Enables detailed tracebacks and an interactive Python console on errors.
# Never use in production!
#DEBUG = True
# Address for development server to listen on
#HOST = "127.0.0.1"
# Port for development server to listen on
#PORT = 5000
# Makes the server more performant at sending static files when the
# server is being proxied by a server that supports X-Sendfile.
#USE_X_SENDFILE = True
# File to store the JSON server list data in.
FILENAME = "list.json"
# Amount of time, is seconds, after which servers are removed from the list
# if they haven't updated their listings. Note: By default Minetest servers
# only announce once every 5 minutes, so this should be more than 300.
PURGE_TIME = 350
# List of banned IP addresses
BANLIST = []
# Creates server entries if a server sends an 'update' and there is no entry yet.
# This should only be used to populate the server list after list.json was deleted.
# This WILL cause problems such as mapgen, mods and privilege information missing from the list
ALLOW_UPDATE_WITHOUT_OLD = False
|
<commit_before>
# Enables detailed tracebacks and an interactive Python console on errors.
# Never use in production!
#DEBUG = True
# Address for development server to listen on
#HOST = "0.0.0.0"
# Port for development server to listen on
#PORT = 5000
# Makes the server more performant at sending static files when the
# server is being proxied by a server that supports X-Sendfile.
#USE_X_SENDFILE = True
# File to store the JSON server list data in.
FILENAME = "list.json"
# Amount of time, is seconds, after which servers are removed from the list
# if they haven't updated their listings. Note: By default Minetest servers
# only announce once every 5 minutes, so this should be more than 300.
PURGE_TIME = 350
# List of banned IP addresses
BANLIST = []
# Creates server entries if a server sends an 'update' and there is no entry yet.
# This should only be used to populate the server list after list.json was deleted.
# This WILL cause problems such as mapgen, mods and privilege information missing from the list
ALLOW_UPDATE_WITHOUT_OLD = False
<commit_msg>Change default development server listen address back to 127.0.0.1<commit_after>
# Enables detailed tracebacks and an interactive Python console on errors.
# Never use in production!
#DEBUG = True
# Address for development server to listen on
#HOST = "127.0.0.1"
# Port for development server to listen on
#PORT = 5000
# Makes the server more performant at sending static files when the
# server is being proxied by a server that supports X-Sendfile.
#USE_X_SENDFILE = True
# File to store the JSON server list data in.
FILENAME = "list.json"
# Amount of time, is seconds, after which servers are removed from the list
# if they haven't updated their listings. Note: By default Minetest servers
# only announce once every 5 minutes, so this should be more than 300.
PURGE_TIME = 350
# List of banned IP addresses
BANLIST = []
# Creates server entries if a server sends an 'update' and there is no entry yet.
# This should only be used to populate the server list after list.json was deleted.
# This WILL cause problems such as mapgen, mods and privilege information missing from the list
ALLOW_UPDATE_WITHOUT_OLD = False
|
04e64fea6e11a188a53d0b8d69ef97686868be1c
|
tests/py_ext_tests/test_png.py
|
tests/py_ext_tests/test_png.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import unittest
import faint
import os
import py_ext_tests
class TestPng(unittest.TestCase):
def test_write_png(self):
out_dir = py_ext_tests.make_test_dir(self)
b1 = faint.Bitmap((5,7))
b1.set_pixel((0,0),(255,0,255))
fn = os.path.join(out_dir, "b1.png")
faint.write_png(b1, fn, 0)
b2, tEXt = faint.read_png(fn)
self.assertEqual(b2.get_size(), (5,7))
self.assertEqual(tEXt, {})
def test_bad_args(self):
with self.assertRaises(TypeError):
faint.write_png("not a bitmap", py_ext_tests.make_test_dir(self), 0)
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import unittest
import faint
from faint import png
import os
import py_ext_tests
class TestPng(unittest.TestCase):
def test_write_png(self):
out_dir = py_ext_tests.make_test_dir(self)
b1 = faint.Bitmap((5,7))
b1.set_pixel((0,0),(255,0,255))
fn = os.path.join(out_dir, "b1.png")
faint.write_png(b1, fn, png.RGB)
b2, tEXt = faint.read_png(fn)
self.assertEqual(b2.get_size(), (5,7))
self.assertEqual(tEXt, {})
def test_bad_args(self):
with self.assertRaises(TypeError):
faint.write_png("not a bitmap", py_ext_tests.make_test_dir(self), 0)
|
Use the png module in test.
|
Use the png module in test.
|
Python
|
apache-2.0
|
lukas-ke/faint-graphics-editor,lukas-ke/faint-graphics-editor,lukas-ke/faint-graphics-editor,lukas-ke/faint-graphics-editor
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import unittest
import faint
import os
import py_ext_tests
class TestPng(unittest.TestCase):
def test_write_png(self):
out_dir = py_ext_tests.make_test_dir(self)
b1 = faint.Bitmap((5,7))
b1.set_pixel((0,0),(255,0,255))
fn = os.path.join(out_dir, "b1.png")
faint.write_png(b1, fn, 0)
b2, tEXt = faint.read_png(fn)
self.assertEqual(b2.get_size(), (5,7))
self.assertEqual(tEXt, {})
def test_bad_args(self):
with self.assertRaises(TypeError):
faint.write_png("not a bitmap", py_ext_tests.make_test_dir(self), 0)
Use the png module in test.
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import unittest
import faint
from faint import png
import os
import py_ext_tests
class TestPng(unittest.TestCase):
def test_write_png(self):
out_dir = py_ext_tests.make_test_dir(self)
b1 = faint.Bitmap((5,7))
b1.set_pixel((0,0),(255,0,255))
fn = os.path.join(out_dir, "b1.png")
faint.write_png(b1, fn, png.RGB)
b2, tEXt = faint.read_png(fn)
self.assertEqual(b2.get_size(), (5,7))
self.assertEqual(tEXt, {})
def test_bad_args(self):
with self.assertRaises(TypeError):
faint.write_png("not a bitmap", py_ext_tests.make_test_dir(self), 0)
|
<commit_before>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import unittest
import faint
import os
import py_ext_tests
class TestPng(unittest.TestCase):
def test_write_png(self):
out_dir = py_ext_tests.make_test_dir(self)
b1 = faint.Bitmap((5,7))
b1.set_pixel((0,0),(255,0,255))
fn = os.path.join(out_dir, "b1.png")
faint.write_png(b1, fn, 0)
b2, tEXt = faint.read_png(fn)
self.assertEqual(b2.get_size(), (5,7))
self.assertEqual(tEXt, {})
def test_bad_args(self):
with self.assertRaises(TypeError):
faint.write_png("not a bitmap", py_ext_tests.make_test_dir(self), 0)
<commit_msg>Use the png module in test.<commit_after>
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import unittest
import faint
from faint import png
import os
import py_ext_tests
class TestPng(unittest.TestCase):
def test_write_png(self):
out_dir = py_ext_tests.make_test_dir(self)
b1 = faint.Bitmap((5,7))
b1.set_pixel((0,0),(255,0,255))
fn = os.path.join(out_dir, "b1.png")
faint.write_png(b1, fn, png.RGB)
b2, tEXt = faint.read_png(fn)
self.assertEqual(b2.get_size(), (5,7))
self.assertEqual(tEXt, {})
def test_bad_args(self):
with self.assertRaises(TypeError):
faint.write_png("not a bitmap", py_ext_tests.make_test_dir(self), 0)
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import unittest
import faint
import os
import py_ext_tests
class TestPng(unittest.TestCase):
def test_write_png(self):
out_dir = py_ext_tests.make_test_dir(self)
b1 = faint.Bitmap((5,7))
b1.set_pixel((0,0),(255,0,255))
fn = os.path.join(out_dir, "b1.png")
faint.write_png(b1, fn, 0)
b2, tEXt = faint.read_png(fn)
self.assertEqual(b2.get_size(), (5,7))
self.assertEqual(tEXt, {})
def test_bad_args(self):
with self.assertRaises(TypeError):
faint.write_png("not a bitmap", py_ext_tests.make_test_dir(self), 0)
Use the png module in test.#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import unittest
import faint
from faint import png
import os
import py_ext_tests
class TestPng(unittest.TestCase):
def test_write_png(self):
out_dir = py_ext_tests.make_test_dir(self)
b1 = faint.Bitmap((5,7))
b1.set_pixel((0,0),(255,0,255))
fn = os.path.join(out_dir, "b1.png")
faint.write_png(b1, fn, png.RGB)
b2, tEXt = faint.read_png(fn)
self.assertEqual(b2.get_size(), (5,7))
self.assertEqual(tEXt, {})
def test_bad_args(self):
with self.assertRaises(TypeError):
faint.write_png("not a bitmap", py_ext_tests.make_test_dir(self), 0)
|
<commit_before>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import unittest
import faint
import os
import py_ext_tests
class TestPng(unittest.TestCase):
def test_write_png(self):
out_dir = py_ext_tests.make_test_dir(self)
b1 = faint.Bitmap((5,7))
b1.set_pixel((0,0),(255,0,255))
fn = os.path.join(out_dir, "b1.png")
faint.write_png(b1, fn, 0)
b2, tEXt = faint.read_png(fn)
self.assertEqual(b2.get_size(), (5,7))
self.assertEqual(tEXt, {})
def test_bad_args(self):
with self.assertRaises(TypeError):
faint.write_png("not a bitmap", py_ext_tests.make_test_dir(self), 0)
<commit_msg>Use the png module in test.<commit_after>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import unittest
import faint
from faint import png
import os
import py_ext_tests
class TestPng(unittest.TestCase):
def test_write_png(self):
out_dir = py_ext_tests.make_test_dir(self)
b1 = faint.Bitmap((5,7))
b1.set_pixel((0,0),(255,0,255))
fn = os.path.join(out_dir, "b1.png")
faint.write_png(b1, fn, png.RGB)
b2, tEXt = faint.read_png(fn)
self.assertEqual(b2.get_size(), (5,7))
self.assertEqual(tEXt, {})
def test_bad_args(self):
with self.assertRaises(TypeError):
faint.write_png("not a bitmap", py_ext_tests.make_test_dir(self), 0)
|
e9170de0c8d427e2545469c2d3add43bfae1cc54
|
tests/test_construct_policy.py
|
tests/test_construct_policy.py
|
"""Test IAM Policies for correctness."""
import json
from foremast.iam.construct_policy import construct_policy
ANSWER1 = {
'Version': '2012-10-17',
'Statement': [
{
'Effect': 'Allow',
'Action': [
's3:GetObject',
's3:ListObject'
],
'Resource': [
'arn:aws:s3:::archaius-stage/forrest/unicornforrest',
'arn:aws:s3:::archaius-stage/forrest/unicornforrest/*'
]
}
]
}
def test_main():
"""Check general assemblage."""
settings = {'services': {'s3': True}}
policy_json = construct_policy(app='unicornforrest',
env='stage',
group='forrest',
pipeline_settings=settings)
assert json.loads(policy_json) == ANSWER1
settings.update({'services': {'dynamodb': ['coreforrest', 'edgeforrest',
'attendantdevops']}})
policy_json = construct_policy(pipeline_settings=settings)
policy = json.loads(policy_json)
|
"""Test IAM Policies for correctness."""
import json
from foremast.iam.construct_policy import construct_policy
ANSWER1 = {
'Version': '2012-10-17',
'Statement': [
{
'Effect': 'Allow',
'Action': [
's3:GetObject',
's3:ListObject'
],
'Resource': [
'arn:aws:s3:::archaius-stage/forrest/unicornforrest',
'arn:aws:s3:::archaius-stage/forrest/unicornforrest/*'
]
}
]
}
def test_main():
"""Check general assemblage."""
settings = {'services': {'s3': True}}
policy_json = construct_policy(app='unicornforrest',
env='stage',
group='forrest',
pipeline_settings=settings)
assert json.loads(policy_json) == ANSWER1
# TODO: Test other services besides S3
settings.update({'services': {'dynamodb': ['coreforrest', 'edgeforrest',
'attendantdevops']}})
policy_json = construct_policy(pipeline_settings=settings)
policy = json.loads(policy_json)
|
Add TODO for more IAM Policy testing
|
chore: Add TODO for more IAM Policy testing
See also: PSOBAT-1482
|
Python
|
apache-2.0
|
gogoair/foremast,gogoair/foremast
|
"""Test IAM Policies for correctness."""
import json
from foremast.iam.construct_policy import construct_policy
ANSWER1 = {
'Version': '2012-10-17',
'Statement': [
{
'Effect': 'Allow',
'Action': [
's3:GetObject',
's3:ListObject'
],
'Resource': [
'arn:aws:s3:::archaius-stage/forrest/unicornforrest',
'arn:aws:s3:::archaius-stage/forrest/unicornforrest/*'
]
}
]
}
def test_main():
"""Check general assemblage."""
settings = {'services': {'s3': True}}
policy_json = construct_policy(app='unicornforrest',
env='stage',
group='forrest',
pipeline_settings=settings)
assert json.loads(policy_json) == ANSWER1
settings.update({'services': {'dynamodb': ['coreforrest', 'edgeforrest',
'attendantdevops']}})
policy_json = construct_policy(pipeline_settings=settings)
policy = json.loads(policy_json)
chore: Add TODO for more IAM Policy testing
See also: PSOBAT-1482
|
"""Test IAM Policies for correctness."""
import json
from foremast.iam.construct_policy import construct_policy
ANSWER1 = {
'Version': '2012-10-17',
'Statement': [
{
'Effect': 'Allow',
'Action': [
's3:GetObject',
's3:ListObject'
],
'Resource': [
'arn:aws:s3:::archaius-stage/forrest/unicornforrest',
'arn:aws:s3:::archaius-stage/forrest/unicornforrest/*'
]
}
]
}
def test_main():
"""Check general assemblage."""
settings = {'services': {'s3': True}}
policy_json = construct_policy(app='unicornforrest',
env='stage',
group='forrest',
pipeline_settings=settings)
assert json.loads(policy_json) == ANSWER1
# TODO: Test other services besides S3
settings.update({'services': {'dynamodb': ['coreforrest', 'edgeforrest',
'attendantdevops']}})
policy_json = construct_policy(pipeline_settings=settings)
policy = json.loads(policy_json)
|
<commit_before>"""Test IAM Policies for correctness."""
import json
from foremast.iam.construct_policy import construct_policy
ANSWER1 = {
'Version': '2012-10-17',
'Statement': [
{
'Effect': 'Allow',
'Action': [
's3:GetObject',
's3:ListObject'
],
'Resource': [
'arn:aws:s3:::archaius-stage/forrest/unicornforrest',
'arn:aws:s3:::archaius-stage/forrest/unicornforrest/*'
]
}
]
}
def test_main():
"""Check general assemblage."""
settings = {'services': {'s3': True}}
policy_json = construct_policy(app='unicornforrest',
env='stage',
group='forrest',
pipeline_settings=settings)
assert json.loads(policy_json) == ANSWER1
settings.update({'services': {'dynamodb': ['coreforrest', 'edgeforrest',
'attendantdevops']}})
policy_json = construct_policy(pipeline_settings=settings)
policy = json.loads(policy_json)
<commit_msg>chore: Add TODO for more IAM Policy testing
See also: PSOBAT-1482<commit_after>
|
"""Test IAM Policies for correctness."""
import json
from foremast.iam.construct_policy import construct_policy
ANSWER1 = {
'Version': '2012-10-17',
'Statement': [
{
'Effect': 'Allow',
'Action': [
's3:GetObject',
's3:ListObject'
],
'Resource': [
'arn:aws:s3:::archaius-stage/forrest/unicornforrest',
'arn:aws:s3:::archaius-stage/forrest/unicornforrest/*'
]
}
]
}
def test_main():
"""Check general assemblage."""
settings = {'services': {'s3': True}}
policy_json = construct_policy(app='unicornforrest',
env='stage',
group='forrest',
pipeline_settings=settings)
assert json.loads(policy_json) == ANSWER1
# TODO: Test other services besides S3
settings.update({'services': {'dynamodb': ['coreforrest', 'edgeforrest',
'attendantdevops']}})
policy_json = construct_policy(pipeline_settings=settings)
policy = json.loads(policy_json)
|
"""Test IAM Policies for correctness."""
import json
from foremast.iam.construct_policy import construct_policy
ANSWER1 = {
'Version': '2012-10-17',
'Statement': [
{
'Effect': 'Allow',
'Action': [
's3:GetObject',
's3:ListObject'
],
'Resource': [
'arn:aws:s3:::archaius-stage/forrest/unicornforrest',
'arn:aws:s3:::archaius-stage/forrest/unicornforrest/*'
]
}
]
}
def test_main():
"""Check general assemblage."""
settings = {'services': {'s3': True}}
policy_json = construct_policy(app='unicornforrest',
env='stage',
group='forrest',
pipeline_settings=settings)
assert json.loads(policy_json) == ANSWER1
settings.update({'services': {'dynamodb': ['coreforrest', 'edgeforrest',
'attendantdevops']}})
policy_json = construct_policy(pipeline_settings=settings)
policy = json.loads(policy_json)
chore: Add TODO for more IAM Policy testing
See also: PSOBAT-1482"""Test IAM Policies for correctness."""
import json
from foremast.iam.construct_policy import construct_policy
ANSWER1 = {
'Version': '2012-10-17',
'Statement': [
{
'Effect': 'Allow',
'Action': [
's3:GetObject',
's3:ListObject'
],
'Resource': [
'arn:aws:s3:::archaius-stage/forrest/unicornforrest',
'arn:aws:s3:::archaius-stage/forrest/unicornforrest/*'
]
}
]
}
def test_main():
"""Check general assemblage."""
settings = {'services': {'s3': True}}
policy_json = construct_policy(app='unicornforrest',
env='stage',
group='forrest',
pipeline_settings=settings)
assert json.loads(policy_json) == ANSWER1
# TODO: Test other services besides S3
settings.update({'services': {'dynamodb': ['coreforrest', 'edgeforrest',
'attendantdevops']}})
policy_json = construct_policy(pipeline_settings=settings)
policy = json.loads(policy_json)
|
<commit_before>"""Test IAM Policies for correctness."""
import json
from foremast.iam.construct_policy import construct_policy
ANSWER1 = {
'Version': '2012-10-17',
'Statement': [
{
'Effect': 'Allow',
'Action': [
's3:GetObject',
's3:ListObject'
],
'Resource': [
'arn:aws:s3:::archaius-stage/forrest/unicornforrest',
'arn:aws:s3:::archaius-stage/forrest/unicornforrest/*'
]
}
]
}
def test_main():
"""Check general assemblage."""
settings = {'services': {'s3': True}}
policy_json = construct_policy(app='unicornforrest',
env='stage',
group='forrest',
pipeline_settings=settings)
assert json.loads(policy_json) == ANSWER1
settings.update({'services': {'dynamodb': ['coreforrest', 'edgeforrest',
'attendantdevops']}})
policy_json = construct_policy(pipeline_settings=settings)
policy = json.loads(policy_json)
<commit_msg>chore: Add TODO for more IAM Policy testing
See also: PSOBAT-1482<commit_after>"""Test IAM Policies for correctness."""
import json
from foremast.iam.construct_policy import construct_policy
ANSWER1 = {
'Version': '2012-10-17',
'Statement': [
{
'Effect': 'Allow',
'Action': [
's3:GetObject',
's3:ListObject'
],
'Resource': [
'arn:aws:s3:::archaius-stage/forrest/unicornforrest',
'arn:aws:s3:::archaius-stage/forrest/unicornforrest/*'
]
}
]
}
def test_main():
"""Check general assemblage."""
settings = {'services': {'s3': True}}
policy_json = construct_policy(app='unicornforrest',
env='stage',
group='forrest',
pipeline_settings=settings)
assert json.loads(policy_json) == ANSWER1
# TODO: Test other services besides S3
settings.update({'services': {'dynamodb': ['coreforrest', 'edgeforrest',
'attendantdevops']}})
policy_json = construct_policy(pipeline_settings=settings)
policy = json.loads(policy_json)
|
e95bcb1a2688a9b5a0c09728cdd0082b643de943
|
pcbot/config.py
|
pcbot/config.py
|
import json
from os.path import exists
from os import mkdir
class Config:
config_path = "config/"
def __init__(self, filename, data=None, load=True):
self.filepath = "{}{}.json".format(self.config_path, filename)
if not exists(self.config_path):
mkdir(self.config_path)
loaded_data = None
if load:
loaded_data = self.load()
if data and not loaded_data:
self.data = data
elif loaded_data:
self.data = loaded_data
else:
self.data = None
if not self.data == loaded_data:
self.save()
def save(self):
with open(self.filepath, "w") as f:
json.dump(self.data, f)
def load(self):
if exists(self.filepath):
with open(self.filepath, "r") as f:
return json.load(f)
return None
|
import json
from os.path import exists
from os import mkdir
class Config:
config_path = "config/"
def __init__(self, filename, data=None, load=True):
self.filepath = "{}{}.json".format(self.config_path, filename)
if not exists(self.config_path):
mkdir(self.config_path)
loaded_data = None
if load:
loaded_data = self.load()
if data is not None and not loaded_data:
self.data = data
elif loaded_data:
self.data = loaded_data
else:
self.data = None
if not self.data == loaded_data:
self.save()
def save(self):
with open(self.filepath, "w") as f:
json.dump(self.data, f)
def load(self):
if exists(self.filepath):
with open(self.filepath, "r") as f:
return json.load(f)
return None
|
Check if data is not None instead of if data is true
|
Check if data is not None instead of if data is true
|
Python
|
mit
|
pckv/pcbot,PcBoy111/PC-BOT-V2,PcBoy111/PCBOT
|
import json
from os.path import exists
from os import mkdir
class Config:
config_path = "config/"
def __init__(self, filename, data=None, load=True):
self.filepath = "{}{}.json".format(self.config_path, filename)
if not exists(self.config_path):
mkdir(self.config_path)
loaded_data = None
if load:
loaded_data = self.load()
if data and not loaded_data:
self.data = data
elif loaded_data:
self.data = loaded_data
else:
self.data = None
if not self.data == loaded_data:
self.save()
def save(self):
with open(self.filepath, "w") as f:
json.dump(self.data, f)
def load(self):
if exists(self.filepath):
with open(self.filepath, "r") as f:
return json.load(f)
return None
Check if data is not None instead of if data is true
|
import json
from os.path import exists
from os import mkdir
class Config:
config_path = "config/"
def __init__(self, filename, data=None, load=True):
self.filepath = "{}{}.json".format(self.config_path, filename)
if not exists(self.config_path):
mkdir(self.config_path)
loaded_data = None
if load:
loaded_data = self.load()
if data is not None and not loaded_data:
self.data = data
elif loaded_data:
self.data = loaded_data
else:
self.data = None
if not self.data == loaded_data:
self.save()
def save(self):
with open(self.filepath, "w") as f:
json.dump(self.data, f)
def load(self):
if exists(self.filepath):
with open(self.filepath, "r") as f:
return json.load(f)
return None
|
<commit_before>import json
from os.path import exists
from os import mkdir
class Config:
config_path = "config/"
def __init__(self, filename, data=None, load=True):
self.filepath = "{}{}.json".format(self.config_path, filename)
if not exists(self.config_path):
mkdir(self.config_path)
loaded_data = None
if load:
loaded_data = self.load()
if data and not loaded_data:
self.data = data
elif loaded_data:
self.data = loaded_data
else:
self.data = None
if not self.data == loaded_data:
self.save()
def save(self):
with open(self.filepath, "w") as f:
json.dump(self.data, f)
def load(self):
if exists(self.filepath):
with open(self.filepath, "r") as f:
return json.load(f)
return None
<commit_msg>Check if data is not None instead of if data is true<commit_after>
|
import json
from os.path import exists
from os import mkdir
class Config:
config_path = "config/"
def __init__(self, filename, data=None, load=True):
self.filepath = "{}{}.json".format(self.config_path, filename)
if not exists(self.config_path):
mkdir(self.config_path)
loaded_data = None
if load:
loaded_data = self.load()
if data is not None and not loaded_data:
self.data = data
elif loaded_data:
self.data = loaded_data
else:
self.data = None
if not self.data == loaded_data:
self.save()
def save(self):
with open(self.filepath, "w") as f:
json.dump(self.data, f)
def load(self):
if exists(self.filepath):
with open(self.filepath, "r") as f:
return json.load(f)
return None
|
import json
from os.path import exists
from os import mkdir
class Config:
config_path = "config/"
def __init__(self, filename, data=None, load=True):
self.filepath = "{}{}.json".format(self.config_path, filename)
if not exists(self.config_path):
mkdir(self.config_path)
loaded_data = None
if load:
loaded_data = self.load()
if data and not loaded_data:
self.data = data
elif loaded_data:
self.data = loaded_data
else:
self.data = None
if not self.data == loaded_data:
self.save()
def save(self):
with open(self.filepath, "w") as f:
json.dump(self.data, f)
def load(self):
if exists(self.filepath):
with open(self.filepath, "r") as f:
return json.load(f)
return None
Check if data is not None instead of if data is trueimport json
from os.path import exists
from os import mkdir
class Config:
config_path = "config/"
def __init__(self, filename, data=None, load=True):
self.filepath = "{}{}.json".format(self.config_path, filename)
if not exists(self.config_path):
mkdir(self.config_path)
loaded_data = None
if load:
loaded_data = self.load()
if data is not None and not loaded_data:
self.data = data
elif loaded_data:
self.data = loaded_data
else:
self.data = None
if not self.data == loaded_data:
self.save()
def save(self):
with open(self.filepath, "w") as f:
json.dump(self.data, f)
def load(self):
if exists(self.filepath):
with open(self.filepath, "r") as f:
return json.load(f)
return None
|
<commit_before>import json
from os.path import exists
from os import mkdir
class Config:
config_path = "config/"
def __init__(self, filename, data=None, load=True):
self.filepath = "{}{}.json".format(self.config_path, filename)
if not exists(self.config_path):
mkdir(self.config_path)
loaded_data = None
if load:
loaded_data = self.load()
if data and not loaded_data:
self.data = data
elif loaded_data:
self.data = loaded_data
else:
self.data = None
if not self.data == loaded_data:
self.save()
def save(self):
with open(self.filepath, "w") as f:
json.dump(self.data, f)
def load(self):
if exists(self.filepath):
with open(self.filepath, "r") as f:
return json.load(f)
return None
<commit_msg>Check if data is not None instead of if data is true<commit_after>import json
from os.path import exists
from os import mkdir
class Config:
config_path = "config/"
def __init__(self, filename, data=None, load=True):
self.filepath = "{}{}.json".format(self.config_path, filename)
if not exists(self.config_path):
mkdir(self.config_path)
loaded_data = None
if load:
loaded_data = self.load()
if data is not None and not loaded_data:
self.data = data
elif loaded_data:
self.data = loaded_data
else:
self.data = None
if not self.data == loaded_data:
self.save()
def save(self):
with open(self.filepath, "w") as f:
json.dump(self.data, f)
def load(self):
if exists(self.filepath):
with open(self.filepath, "r") as f:
return json.load(f)
return None
|
0e513331fd649ac71c2d9690c1cb72bf5954973c
|
__init__.py
|
__init__.py
|
# The version of Review Board.
#
# This is in the format of:
#
# (Major, Minor, Micro, alpha/beta/rc/final, Release Number, Released)
#
VERSION = (1, 0, 0, 'final', 0, True)
def get_version_string():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
if VERSION[3] == 'rc':
version += ' RC%s' % VERSION[4]
else:
version += ' %s %s' % (release_tag, VERSION[4])
if not is_release():
version += " (dev)"
return version
def get_package_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
version += '%s%s' % (VERSION[3], VERSION[4])
return version
def is_release():
return VERSION[5]
|
# The version of Review Board.
#
# This is in the format of:
#
# (Major, Minor, Micro, alpha/beta/rc/final, Release Number, Released)
#
VERSION = (1, 1, 0, 'alpha', 1, False)
def get_version_string():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
if VERSION[3] == 'rc':
version += ' RC%s' % VERSION[4]
else:
version += ' %s %s' % (release_tag, VERSION[4])
if not is_release():
version += " (dev)"
return version
def get_package_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
version += '%s%s' % (VERSION[3], VERSION[4])
return version
def is_release():
return VERSION[5]
|
Bump to Review Board 1.1alpha1.dev.
|
Bump to Review Board 1.1alpha1.dev.
|
Python
|
mit
|
1tush/reviewboard,1tush/reviewboard,Khan/reviewboard,bkochendorfer/reviewboard,beol/reviewboard,davidt/reviewboard,Khan/reviewboard,custode/reviewboard,sgallagher/reviewboard,bkochendorfer/reviewboard,beol/reviewboard,chazy/reviewboard,KnowNo/reviewboard,chipx86/reviewboard,beol/reviewboard,atagar/ReviewBoard,1tush/reviewboard,sgallagher/reviewboard,reviewboard/reviewboard,chipx86/reviewboard,reviewboard/reviewboard,atagar/ReviewBoard,1tush/reviewboard,chazy/reviewboard,brennie/reviewboard,custode/reviewboard,reviewboard/reviewboard,bkochendorfer/reviewboard,davidt/reviewboard,beol/reviewboard,sgallagher/reviewboard,davidt/reviewboard,Khan/reviewboard,atagar/ReviewBoard,chazy/reviewboard,brennie/reviewboard,KnowNo/reviewboard,brennie/reviewboard,sgallagher/reviewboard,atagar/ReviewBoard,1tush/reviewboard,1tush/reviewboard,Khan/reviewboard,1tush/reviewboard,chazy/reviewboard,Khan/reviewboard,atagar/ReviewBoard,custode/reviewboard,atagar/ReviewBoard,chipx86/reviewboard,bkochendorfer/reviewboard,atagar/ReviewBoard,Khan/reviewboard,Khan/reviewboard,chazy/reviewboard,brennie/reviewboard,Khan/reviewboard,chazy/reviewboard,chazy/reviewboard,chazy/reviewboard,atagar/ReviewBoard,atagar/ReviewBoard,KnowNo/reviewboard,chazy/reviewboard,KnowNo/reviewboard,davidt/reviewboard,chipx86/reviewboard,custode/reviewboard,reviewboard/reviewboard,Khan/reviewboard,1tush/reviewboard,1tush/reviewboard
|
# The version of Review Board.
#
# This is in the format of:
#
# (Major, Minor, Micro, alpha/beta/rc/final, Release Number, Released)
#
VERSION = (1, 0, 0, 'final', 0, True)
def get_version_string():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
if VERSION[3] == 'rc':
version += ' RC%s' % VERSION[4]
else:
version += ' %s %s' % (release_tag, VERSION[4])
if not is_release():
version += " (dev)"
return version
def get_package_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
version += '%s%s' % (VERSION[3], VERSION[4])
return version
def is_release():
return VERSION[5]
Bump to Review Board 1.1alpha1.dev.
|
# The version of Review Board.
#
# This is in the format of:
#
# (Major, Minor, Micro, alpha/beta/rc/final, Release Number, Released)
#
VERSION = (1, 1, 0, 'alpha', 1, False)
def get_version_string():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
if VERSION[3] == 'rc':
version += ' RC%s' % VERSION[4]
else:
version += ' %s %s' % (release_tag, VERSION[4])
if not is_release():
version += " (dev)"
return version
def get_package_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
version += '%s%s' % (VERSION[3], VERSION[4])
return version
def is_release():
return VERSION[5]
|
<commit_before># The version of Review Board.
#
# This is in the format of:
#
# (Major, Minor, Micro, alpha/beta/rc/final, Release Number, Released)
#
VERSION = (1, 0, 0, 'final', 0, True)
def get_version_string():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
if VERSION[3] == 'rc':
version += ' RC%s' % VERSION[4]
else:
version += ' %s %s' % (release_tag, VERSION[4])
if not is_release():
version += " (dev)"
return version
def get_package_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
version += '%s%s' % (VERSION[3], VERSION[4])
return version
def is_release():
return VERSION[5]
<commit_msg>Bump to Review Board 1.1alpha1.dev.<commit_after>
|
# The version of Review Board.
#
# This is in the format of:
#
# (Major, Minor, Micro, alpha/beta/rc/final, Release Number, Released)
#
VERSION = (1, 1, 0, 'alpha', 1, False)
def get_version_string():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
if VERSION[3] == 'rc':
version += ' RC%s' % VERSION[4]
else:
version += ' %s %s' % (release_tag, VERSION[4])
if not is_release():
version += " (dev)"
return version
def get_package_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
version += '%s%s' % (VERSION[3], VERSION[4])
return version
def is_release():
return VERSION[5]
|
# The version of Review Board.
#
# This is in the format of:
#
# (Major, Minor, Micro, alpha/beta/rc/final, Release Number, Released)
#
VERSION = (1, 0, 0, 'final', 0, True)
def get_version_string():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
if VERSION[3] == 'rc':
version += ' RC%s' % VERSION[4]
else:
version += ' %s %s' % (release_tag, VERSION[4])
if not is_release():
version += " (dev)"
return version
def get_package_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
version += '%s%s' % (VERSION[3], VERSION[4])
return version
def is_release():
return VERSION[5]
Bump to Review Board 1.1alpha1.dev.# The version of Review Board.
#
# This is in the format of:
#
# (Major, Minor, Micro, alpha/beta/rc/final, Release Number, Released)
#
VERSION = (1, 1, 0, 'alpha', 1, False)
def get_version_string():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
if VERSION[3] == 'rc':
version += ' RC%s' % VERSION[4]
else:
version += ' %s %s' % (release_tag, VERSION[4])
if not is_release():
version += " (dev)"
return version
def get_package_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
version += '%s%s' % (VERSION[3], VERSION[4])
return version
def is_release():
return VERSION[5]
|
<commit_before># The version of Review Board.
#
# This is in the format of:
#
# (Major, Minor, Micro, alpha/beta/rc/final, Release Number, Released)
#
VERSION = (1, 0, 0, 'final', 0, True)
def get_version_string():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
if VERSION[3] == 'rc':
version += ' RC%s' % VERSION[4]
else:
version += ' %s %s' % (release_tag, VERSION[4])
if not is_release():
version += " (dev)"
return version
def get_package_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
version += '%s%s' % (VERSION[3], VERSION[4])
return version
def is_release():
return VERSION[5]
<commit_msg>Bump to Review Board 1.1alpha1.dev.<commit_after># The version of Review Board.
#
# This is in the format of:
#
# (Major, Minor, Micro, alpha/beta/rc/final, Release Number, Released)
#
VERSION = (1, 1, 0, 'alpha', 1, False)
def get_version_string():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
if VERSION[3] == 'rc':
version += ' RC%s' % VERSION[4]
else:
version += ' %s %s' % (release_tag, VERSION[4])
if not is_release():
version += " (dev)"
return version
def get_package_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
version += '%s%s' % (VERSION[3], VERSION[4])
return version
def is_release():
return VERSION[5]
|
3f62c7b413f3ef6b1072437bcd1f08b1a9c6b6ea
|
armstrong/core/arm_layout/utils.py
|
armstrong/core/arm_layout/utils.py
|
from django.utils.safestring import mark_safe
from django.template.loader import render_to_string
from armstrong.utils.backends import GenericBackend
template_finder = GenericBackend('ARMSTRONG_LAYOUT_TEMPLATE_FINDER',
defaults='armstrong.core.arm_layout.utils.get_layout_template_name')\
.get_backend
def get_layout_template_name(model, name):
ret = []
for a in model.__class__.mro():
if not hasattr(a, "_meta"):
continue
ret.append("layout/%s/%s/%s.html" % (a._meta.app_label,
a._meta.object_name.lower(), name))
return ret
def render_model(object, name, dictionary=None, context_instance=None):
dictionary = dictionary or {}
dictionary["object"] = object
return mark_safe(render_to_string(template_finder(object, name),
dictionary=dictionary, context_instance=context_instance))
|
from django.utils.safestring import mark_safe
from django.template.loader import render_to_string
from armstrong.utils.backends import GenericBackend
def get_layout_template_name(model, name):
ret = []
for a in model.__class__.mro():
if not hasattr(a, "_meta"):
continue
ret.append("layout/%s/%s/%s.html" % (a._meta.app_label,
a._meta.object_name.lower(), name))
return ret
def render_model(object, name, dictionary=None, context_instance=None):
dictionary = dictionary or {}
dictionary["object"] = object
return mark_safe(render_to_string(get_layout_template_name(object, name),
dictionary=dictionary, context_instance=context_instance))
|
Revert backend template finder code
|
Revert backend template finder code
|
Python
|
apache-2.0
|
armstrong/armstrong.core.arm_layout,armstrong/armstrong.core.arm_layout
|
from django.utils.safestring import mark_safe
from django.template.loader import render_to_string
from armstrong.utils.backends import GenericBackend
template_finder = GenericBackend('ARMSTRONG_LAYOUT_TEMPLATE_FINDER',
defaults='armstrong.core.arm_layout.utils.get_layout_template_name')\
.get_backend
def get_layout_template_name(model, name):
ret = []
for a in model.__class__.mro():
if not hasattr(a, "_meta"):
continue
ret.append("layout/%s/%s/%s.html" % (a._meta.app_label,
a._meta.object_name.lower(), name))
return ret
def render_model(object, name, dictionary=None, context_instance=None):
dictionary = dictionary or {}
dictionary["object"] = object
return mark_safe(render_to_string(template_finder(object, name),
dictionary=dictionary, context_instance=context_instance))
Revert backend template finder code
|
from django.utils.safestring import mark_safe
from django.template.loader import render_to_string
from armstrong.utils.backends import GenericBackend
def get_layout_template_name(model, name):
ret = []
for a in model.__class__.mro():
if not hasattr(a, "_meta"):
continue
ret.append("layout/%s/%s/%s.html" % (a._meta.app_label,
a._meta.object_name.lower(), name))
return ret
def render_model(object, name, dictionary=None, context_instance=None):
dictionary = dictionary or {}
dictionary["object"] = object
return mark_safe(render_to_string(get_layout_template_name(object, name),
dictionary=dictionary, context_instance=context_instance))
|
<commit_before>from django.utils.safestring import mark_safe
from django.template.loader import render_to_string
from armstrong.utils.backends import GenericBackend
template_finder = GenericBackend('ARMSTRONG_LAYOUT_TEMPLATE_FINDER',
defaults='armstrong.core.arm_layout.utils.get_layout_template_name')\
.get_backend
def get_layout_template_name(model, name):
ret = []
for a in model.__class__.mro():
if not hasattr(a, "_meta"):
continue
ret.append("layout/%s/%s/%s.html" % (a._meta.app_label,
a._meta.object_name.lower(), name))
return ret
def render_model(object, name, dictionary=None, context_instance=None):
dictionary = dictionary or {}
dictionary["object"] = object
return mark_safe(render_to_string(template_finder(object, name),
dictionary=dictionary, context_instance=context_instance))
<commit_msg>Revert backend template finder code<commit_after>
|
from django.utils.safestring import mark_safe
from django.template.loader import render_to_string
from armstrong.utils.backends import GenericBackend
def get_layout_template_name(model, name):
ret = []
for a in model.__class__.mro():
if not hasattr(a, "_meta"):
continue
ret.append("layout/%s/%s/%s.html" % (a._meta.app_label,
a._meta.object_name.lower(), name))
return ret
def render_model(object, name, dictionary=None, context_instance=None):
dictionary = dictionary or {}
dictionary["object"] = object
return mark_safe(render_to_string(get_layout_template_name(object, name),
dictionary=dictionary, context_instance=context_instance))
|
from django.utils.safestring import mark_safe
from django.template.loader import render_to_string
from armstrong.utils.backends import GenericBackend
template_finder = GenericBackend('ARMSTRONG_LAYOUT_TEMPLATE_FINDER',
defaults='armstrong.core.arm_layout.utils.get_layout_template_name')\
.get_backend
def get_layout_template_name(model, name):
ret = []
for a in model.__class__.mro():
if not hasattr(a, "_meta"):
continue
ret.append("layout/%s/%s/%s.html" % (a._meta.app_label,
a._meta.object_name.lower(), name))
return ret
def render_model(object, name, dictionary=None, context_instance=None):
dictionary = dictionary or {}
dictionary["object"] = object
return mark_safe(render_to_string(template_finder(object, name),
dictionary=dictionary, context_instance=context_instance))
Revert backend template finder codefrom django.utils.safestring import mark_safe
from django.template.loader import render_to_string
from armstrong.utils.backends import GenericBackend
def get_layout_template_name(model, name):
ret = []
for a in model.__class__.mro():
if not hasattr(a, "_meta"):
continue
ret.append("layout/%s/%s/%s.html" % (a._meta.app_label,
a._meta.object_name.lower(), name))
return ret
def render_model(object, name, dictionary=None, context_instance=None):
dictionary = dictionary or {}
dictionary["object"] = object
return mark_safe(render_to_string(get_layout_template_name(object, name),
dictionary=dictionary, context_instance=context_instance))
|
<commit_before>from django.utils.safestring import mark_safe
from django.template.loader import render_to_string
from armstrong.utils.backends import GenericBackend
template_finder = GenericBackend('ARMSTRONG_LAYOUT_TEMPLATE_FINDER',
defaults='armstrong.core.arm_layout.utils.get_layout_template_name')\
.get_backend
def get_layout_template_name(model, name):
ret = []
for a in model.__class__.mro():
if not hasattr(a, "_meta"):
continue
ret.append("layout/%s/%s/%s.html" % (a._meta.app_label,
a._meta.object_name.lower(), name))
return ret
def render_model(object, name, dictionary=None, context_instance=None):
dictionary = dictionary or {}
dictionary["object"] = object
return mark_safe(render_to_string(template_finder(object, name),
dictionary=dictionary, context_instance=context_instance))
<commit_msg>Revert backend template finder code<commit_after>from django.utils.safestring import mark_safe
from django.template.loader import render_to_string
from armstrong.utils.backends import GenericBackend
def get_layout_template_name(model, name):
ret = []
for a in model.__class__.mro():
if not hasattr(a, "_meta"):
continue
ret.append("layout/%s/%s/%s.html" % (a._meta.app_label,
a._meta.object_name.lower(), name))
return ret
def render_model(object, name, dictionary=None, context_instance=None):
dictionary = dictionary or {}
dictionary["object"] = object
return mark_safe(render_to_string(get_layout_template_name(object, name),
dictionary=dictionary, context_instance=context_instance))
|
4503e6671828497189736c86d408f6c0a8b47058
|
lambda_tweet.py
|
lambda_tweet.py
|
import boto3
import tweepy
import json
import base64
from tweet_s3_images import TweetS3Images
with open('./config.json', 'r') as file:
config = json.loads(file.read())
# Decrypt API keys
client = boto3.client('kms')
response = client.decrypt(CiphertextBlob=base64.b64decode(config['secrets']))
secrets = json.loads(response['Plaintext'])
CONSUMER_KEY = secrets['consumer-key']
CONSUMER_SECRET = secrets['consumer-secret']
ACCESS_TOKEN = secrets['access-token']
ACCESS_TOKEN_SECRET = secrets['access-token-secret']
def lambda_handler(event, context):
print('Received event: ' + json.dumps(event, indent=2))
s3_info = event['Records'][0]['S3']
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
api = tweepy.API(auth)
client = boto3.client('s3')
tweet_images = TweetS3Images(api, client)
tweet_images.send_image(s3_info['bucket']['name'], s3_info['object']['key'], cleanup=True)
|
import boto3
import tweepy
import json
import base64
from tweet_s3_images import TweetS3Images
with open('./config.json', 'r') as file:
config = json.loads(file.read())
# Decrypt API keys
client = boto3.client('kms')
response = client.decrypt(CiphertextBlob=base64.b64decode(config['secrets']))
secrets = json.loads(response['Plaintext'])
CONSUMER_KEY = secrets['consumer-key']
CONSUMER_SECRET = secrets['consumer-secret']
ACCESS_TOKEN = secrets['access-token']
ACCESS_TOKEN_SECRET = secrets['access-token-secret']
def lambda_handler(event, context):
print('Received event: ' + json.dumps(event, indent=2))
print()
s3_info = event['Records'][0]['s3']
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
api = tweepy.API(auth)
client = boto3.client('s3')
tweet_images = TweetS3Images(api, client)
tweet_images.send_image(s3_info['bucket']['name'], s3_info['object']['key'], cleanup=True)
|
Update key name for S3
|
Update key name for S3
|
Python
|
mit
|
onema/lambda-tweet
|
import boto3
import tweepy
import json
import base64
from tweet_s3_images import TweetS3Images
with open('./config.json', 'r') as file:
config = json.loads(file.read())
# Decrypt API keys
client = boto3.client('kms')
response = client.decrypt(CiphertextBlob=base64.b64decode(config['secrets']))
secrets = json.loads(response['Plaintext'])
CONSUMER_KEY = secrets['consumer-key']
CONSUMER_SECRET = secrets['consumer-secret']
ACCESS_TOKEN = secrets['access-token']
ACCESS_TOKEN_SECRET = secrets['access-token-secret']
def lambda_handler(event, context):
print('Received event: ' + json.dumps(event, indent=2))
s3_info = event['Records'][0]['S3']
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
api = tweepy.API(auth)
client = boto3.client('s3')
tweet_images = TweetS3Images(api, client)
tweet_images.send_image(s3_info['bucket']['name'], s3_info['object']['key'], cleanup=True)
Update key name for S3
|
import boto3
import tweepy
import json
import base64
from tweet_s3_images import TweetS3Images
with open('./config.json', 'r') as file:
config = json.loads(file.read())
# Decrypt API keys
client = boto3.client('kms')
response = client.decrypt(CiphertextBlob=base64.b64decode(config['secrets']))
secrets = json.loads(response['Plaintext'])
CONSUMER_KEY = secrets['consumer-key']
CONSUMER_SECRET = secrets['consumer-secret']
ACCESS_TOKEN = secrets['access-token']
ACCESS_TOKEN_SECRET = secrets['access-token-secret']
def lambda_handler(event, context):
print('Received event: ' + json.dumps(event, indent=2))
print()
s3_info = event['Records'][0]['s3']
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
api = tweepy.API(auth)
client = boto3.client('s3')
tweet_images = TweetS3Images(api, client)
tweet_images.send_image(s3_info['bucket']['name'], s3_info['object']['key'], cleanup=True)
|
<commit_before>import boto3
import tweepy
import json
import base64
from tweet_s3_images import TweetS3Images
with open('./config.json', 'r') as file:
config = json.loads(file.read())
# Decrypt API keys
client = boto3.client('kms')
response = client.decrypt(CiphertextBlob=base64.b64decode(config['secrets']))
secrets = json.loads(response['Plaintext'])
CONSUMER_KEY = secrets['consumer-key']
CONSUMER_SECRET = secrets['consumer-secret']
ACCESS_TOKEN = secrets['access-token']
ACCESS_TOKEN_SECRET = secrets['access-token-secret']
def lambda_handler(event, context):
print('Received event: ' + json.dumps(event, indent=2))
s3_info = event['Records'][0]['S3']
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
api = tweepy.API(auth)
client = boto3.client('s3')
tweet_images = TweetS3Images(api, client)
tweet_images.send_image(s3_info['bucket']['name'], s3_info['object']['key'], cleanup=True)
<commit_msg>Update key name for S3<commit_after>
|
import boto3
import tweepy
import json
import base64
from tweet_s3_images import TweetS3Images
with open('./config.json', 'r') as file:
config = json.loads(file.read())
# Decrypt API keys
client = boto3.client('kms')
response = client.decrypt(CiphertextBlob=base64.b64decode(config['secrets']))
secrets = json.loads(response['Plaintext'])
CONSUMER_KEY = secrets['consumer-key']
CONSUMER_SECRET = secrets['consumer-secret']
ACCESS_TOKEN = secrets['access-token']
ACCESS_TOKEN_SECRET = secrets['access-token-secret']
def lambda_handler(event, context):
print('Received event: ' + json.dumps(event, indent=2))
print()
s3_info = event['Records'][0]['s3']
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
api = tweepy.API(auth)
client = boto3.client('s3')
tweet_images = TweetS3Images(api, client)
tweet_images.send_image(s3_info['bucket']['name'], s3_info['object']['key'], cleanup=True)
|
import boto3
import tweepy
import json
import base64
from tweet_s3_images import TweetS3Images
with open('./config.json', 'r') as file:
config = json.loads(file.read())
# Decrypt API keys
client = boto3.client('kms')
response = client.decrypt(CiphertextBlob=base64.b64decode(config['secrets']))
secrets = json.loads(response['Plaintext'])
CONSUMER_KEY = secrets['consumer-key']
CONSUMER_SECRET = secrets['consumer-secret']
ACCESS_TOKEN = secrets['access-token']
ACCESS_TOKEN_SECRET = secrets['access-token-secret']
def lambda_handler(event, context):
print('Received event: ' + json.dumps(event, indent=2))
s3_info = event['Records'][0]['S3']
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
api = tweepy.API(auth)
client = boto3.client('s3')
tweet_images = TweetS3Images(api, client)
tweet_images.send_image(s3_info['bucket']['name'], s3_info['object']['key'], cleanup=True)
Update key name for S3import boto3
import tweepy
import json
import base64
from tweet_s3_images import TweetS3Images
with open('./config.json', 'r') as file:
config = json.loads(file.read())
# Decrypt API keys
client = boto3.client('kms')
response = client.decrypt(CiphertextBlob=base64.b64decode(config['secrets']))
secrets = json.loads(response['Plaintext'])
CONSUMER_KEY = secrets['consumer-key']
CONSUMER_SECRET = secrets['consumer-secret']
ACCESS_TOKEN = secrets['access-token']
ACCESS_TOKEN_SECRET = secrets['access-token-secret']
def lambda_handler(event, context):
print('Received event: ' + json.dumps(event, indent=2))
print()
s3_info = event['Records'][0]['s3']
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
api = tweepy.API(auth)
client = boto3.client('s3')
tweet_images = TweetS3Images(api, client)
tweet_images.send_image(s3_info['bucket']['name'], s3_info['object']['key'], cleanup=True)
|
<commit_before>import boto3
import tweepy
import json
import base64
from tweet_s3_images import TweetS3Images
with open('./config.json', 'r') as file:
config = json.loads(file.read())
# Decrypt API keys
client = boto3.client('kms')
response = client.decrypt(CiphertextBlob=base64.b64decode(config['secrets']))
secrets = json.loads(response['Plaintext'])
CONSUMER_KEY = secrets['consumer-key']
CONSUMER_SECRET = secrets['consumer-secret']
ACCESS_TOKEN = secrets['access-token']
ACCESS_TOKEN_SECRET = secrets['access-token-secret']
def lambda_handler(event, context):
print('Received event: ' + json.dumps(event, indent=2))
s3_info = event['Records'][0]['S3']
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
api = tweepy.API(auth)
client = boto3.client('s3')
tweet_images = TweetS3Images(api, client)
tweet_images.send_image(s3_info['bucket']['name'], s3_info['object']['key'], cleanup=True)
<commit_msg>Update key name for S3<commit_after>import boto3
import tweepy
import json
import base64
from tweet_s3_images import TweetS3Images
with open('./config.json', 'r') as file:
config = json.loads(file.read())
# Decrypt API keys
client = boto3.client('kms')
response = client.decrypt(CiphertextBlob=base64.b64decode(config['secrets']))
secrets = json.loads(response['Plaintext'])
CONSUMER_KEY = secrets['consumer-key']
CONSUMER_SECRET = secrets['consumer-secret']
ACCESS_TOKEN = secrets['access-token']
ACCESS_TOKEN_SECRET = secrets['access-token-secret']
def lambda_handler(event, context):
print('Received event: ' + json.dumps(event, indent=2))
print()
s3_info = event['Records'][0]['s3']
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
api = tweepy.API(auth)
client = boto3.client('s3')
tweet_images = TweetS3Images(api, client)
tweet_images.send_image(s3_info['bucket']['name'], s3_info['object']['key'], cleanup=True)
|
8f099581930d0f55454751d8fbba05a3685c6144
|
mbuild/tests/test_xyz.py
|
mbuild/tests/test_xyz.py
|
import numpy as np
import pytest
import mbuild as mb
from mbuild.tests.base_test import BaseTest
class TestXYZ(BaseTest):
def test_save(self, ethane):
ethane.save(filename='ethane.xyz')
|
import numpy as np
import pytest
import mbuild as mb
from mbuild.tests.base_test import BaseTest
class TestXYZ(BaseTest):
def test_save(self, ethane):
ethane.save(filename='ethane.xyz')
ethane.save(filename='ethane.pdb')
ethane_in = mb.load('ethane.xyz', top='ethane.pdb')
assert len(ethane_in.children) == 8
assert set([child.name for child in ethane_in.children]) == {'C', 'H'}
|
Add sanity checks to tests
|
Add sanity checks to tests
This should be cleaned up with an XYZ read that does not require reading in an
extra file (mdtraj requires a file with topological information, in this case
PDB file).
|
Python
|
mit
|
iModels/mbuild,iModels/mbuild
|
import numpy as np
import pytest
import mbuild as mb
from mbuild.tests.base_test import BaseTest
class TestXYZ(BaseTest):
def test_save(self, ethane):
ethane.save(filename='ethane.xyz')
Add sanity checks to tests
This should be cleaned up with an XYZ read that does not require reading in an
extra file (mdtraj requires a file with topological information, in this case
PDB file).
|
import numpy as np
import pytest
import mbuild as mb
from mbuild.tests.base_test import BaseTest
class TestXYZ(BaseTest):
def test_save(self, ethane):
ethane.save(filename='ethane.xyz')
ethane.save(filename='ethane.pdb')
ethane_in = mb.load('ethane.xyz', top='ethane.pdb')
assert len(ethane_in.children) == 8
assert set([child.name for child in ethane_in.children]) == {'C', 'H'}
|
<commit_before>import numpy as np
import pytest
import mbuild as mb
from mbuild.tests.base_test import BaseTest
class TestXYZ(BaseTest):
def test_save(self, ethane):
ethane.save(filename='ethane.xyz')
<commit_msg>Add sanity checks to tests
This should be cleaned up with an XYZ read that does not require reading in an
extra file (mdtraj requires a file with topological information, in this case
PDB file).<commit_after>
|
import numpy as np
import pytest
import mbuild as mb
from mbuild.tests.base_test import BaseTest
class TestXYZ(BaseTest):
def test_save(self, ethane):
ethane.save(filename='ethane.xyz')
ethane.save(filename='ethane.pdb')
ethane_in = mb.load('ethane.xyz', top='ethane.pdb')
assert len(ethane_in.children) == 8
assert set([child.name for child in ethane_in.children]) == {'C', 'H'}
|
import numpy as np
import pytest
import mbuild as mb
from mbuild.tests.base_test import BaseTest
class TestXYZ(BaseTest):
def test_save(self, ethane):
ethane.save(filename='ethane.xyz')
Add sanity checks to tests
This should be cleaned up with an XYZ read that does not require reading in an
extra file (mdtraj requires a file with topological information, in this case
PDB file).import numpy as np
import pytest
import mbuild as mb
from mbuild.tests.base_test import BaseTest
class TestXYZ(BaseTest):
def test_save(self, ethane):
ethane.save(filename='ethane.xyz')
ethane.save(filename='ethane.pdb')
ethane_in = mb.load('ethane.xyz', top='ethane.pdb')
assert len(ethane_in.children) == 8
assert set([child.name for child in ethane_in.children]) == {'C', 'H'}
|
<commit_before>import numpy as np
import pytest
import mbuild as mb
from mbuild.tests.base_test import BaseTest
class TestXYZ(BaseTest):
def test_save(self, ethane):
ethane.save(filename='ethane.xyz')
<commit_msg>Add sanity checks to tests
This should be cleaned up with an XYZ read that does not require reading in an
extra file (mdtraj requires a file with topological information, in this case
PDB file).<commit_after>import numpy as np
import pytest
import mbuild as mb
from mbuild.tests.base_test import BaseTest
class TestXYZ(BaseTest):
def test_save(self, ethane):
ethane.save(filename='ethane.xyz')
ethane.save(filename='ethane.pdb')
ethane_in = mb.load('ethane.xyz', top='ethane.pdb')
assert len(ethane_in.children) == 8
assert set([child.name for child in ethane_in.children]) == {'C', 'H'}
|
7d20874c43637f1236442333f60a88ec653f53f2
|
resources/launchers/alfanousDesktop.py
|
resources/launchers/alfanousDesktop.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import alfanousDesktop.Gui
alfanousDesktop.Gui.main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
# The paths should be generated by setup script
sys.argv.extend(
'-i', '/usr/share/alfanous-indexes/',
'-l', '/usr/locale/',
'-c', '/usr/share/alfanous-config/')
from alfanousDesktop.Gui import *
main()
|
Add resource paths to python launcher script (proxy)
|
Add resource paths to python launcher script (proxy)
|
Python
|
agpl-3.0
|
keelhaule/alfanous,muslih/alfanous,saifmahamood/alfanous,saifmahamood/alfanous,muslih/alfanous,muslih/alfanous,keelhaule/alfanous,saifmahamood/alfanous,saifmahamood/alfanous,keelhaule/alfanous,saifmahamood/alfanous,abougouffa/alfanous,keelhaule/alfanous,muslih/alfanous,muslih/alfanous,abougouffa/alfanous,keelhaule/alfanous,abougouffa/alfanous,muslih/alfanous,saifmahamood/alfanous,keelhaule/alfanous,muslih/alfanous,abougouffa/alfanous,saifmahamood/alfanous,abougouffa/alfanous,saifmahamood/alfanous,abougouffa/alfanous,abougouffa/alfanous,keelhaule/alfanous
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import alfanousDesktop.Gui
alfanousDesktop.Gui.main()
Add resource paths to python launcher script (proxy)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
# The paths should be generated by setup script
sys.argv.extend(
'-i', '/usr/share/alfanous-indexes/',
'-l', '/usr/locale/',
'-c', '/usr/share/alfanous-config/')
from alfanousDesktop.Gui import *
main()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import alfanousDesktop.Gui
alfanousDesktop.Gui.main()
<commit_msg>Add resource paths to python launcher script (proxy)<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
# The paths should be generated by setup script
sys.argv.extend(
'-i', '/usr/share/alfanous-indexes/',
'-l', '/usr/locale/',
'-c', '/usr/share/alfanous-config/')
from alfanousDesktop.Gui import *
main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import alfanousDesktop.Gui
alfanousDesktop.Gui.main()
Add resource paths to python launcher script (proxy)#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
# The paths should be generated by setup script
sys.argv.extend(
'-i', '/usr/share/alfanous-indexes/',
'-l', '/usr/locale/',
'-c', '/usr/share/alfanous-config/')
from alfanousDesktop.Gui import *
main()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import alfanousDesktop.Gui
alfanousDesktop.Gui.main()
<commit_msg>Add resource paths to python launcher script (proxy)<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
# The paths should be generated by setup script
sys.argv.extend(
'-i', '/usr/share/alfanous-indexes/',
'-l', '/usr/locale/',
'-c', '/usr/share/alfanous-config/')
from alfanousDesktop.Gui import *
main()
|
7537387aa80109877d6659cc54ec0ee7aa6496bd
|
robot/Cumulus/resources/locators_50.py
|
robot/Cumulus/resources/locators_50.py
|
from locators_51 import *
import copy
npsp_lex_locators = copy.deepcopy(npsp_lex_locators)
# current version (Sravani's )
npsp_lex_locators['delete_icon']='//span[contains(text() ,"{}")]/following::span[. = "{}"]/following-sibling::a/child::span[@class = "deleteIcon"]'
npsp_lex_locators['object']['field']= "//div[contains(@class, 'uiInput')][.//label[contains(@class, 'uiLabel')][.//span[text()='{}']]]//*[self::input or self::textarea]"
npsp_lex_locators["record"]["related"]["dd-link"]='//div[contains(@class,"actionMenu")]//a[@title="{}"]'
npsp_lex_locators["record"]["related"]["button"]="//article[contains(@class, 'slds-card slds-card_boundary')][.//img][.//span[@title='{}']]//a[@title='{}']"
# stashed (Noah's version)
# npsp_lex_locators["delete_icon"]= "//span[contains(text(),'{}')]/../following::div//span[text() = '{}']/following-sibling::a/child::span[@class = 'deleteIcon']"
# npsp_lex_locators['object']['field']= "//div[contains(@class, 'uiInput')][.//label[contains(@class, 'uiLabel')][.//span[text()='{}']]]//*[self::input or self::textarea]"
# npsp_lex_locators["record"]["related"]["dd-link"]='//div[contains(@class,"actionMenu")]//a[@title="{}"]'
|
from locators_51 import *
import copy
npsp_lex_locators = copy.deepcopy(npsp_lex_locators)
npsp_lex_locators['delete_icon']='//span[contains(text() ,"{}")]/following::span[. = "{}"]/following-sibling::a/child::span[@class = "deleteIcon"]'
npsp_lex_locators['object']['field']= "//div[contains(@class, 'uiInput')][.//label[contains(@class, 'uiLabel')][.//span[text()='{}']]]//*[self::input or self::textarea]"
npsp_lex_locators["record"]["related"]["dd-link"]='//div[contains(@class,"actionMenu")]//a[@title="{}"]'
npsp_lex_locators["record"]["related"]["button"]="//article[contains(@class, 'slds-card slds-card_boundary')][.//img][.//span[@title='{}']]//a[@title='{}']"
|
Revert "changes in locator_50 file (current and old versions)"
|
Revert "changes in locator_50 file (current and old versions)"
This reverts commit 819dfa4ed2033c1f82973edb09215b96d3c4b188.
|
Python
|
bsd-3-clause
|
SalesforceFoundation/Cumulus,SalesforceFoundation/Cumulus,SalesforceFoundation/Cumulus,SalesforceFoundation/Cumulus
|
from locators_51 import *
import copy
npsp_lex_locators = copy.deepcopy(npsp_lex_locators)
# current version (Sravani's )
npsp_lex_locators['delete_icon']='//span[contains(text() ,"{}")]/following::span[. = "{}"]/following-sibling::a/child::span[@class = "deleteIcon"]'
npsp_lex_locators['object']['field']= "//div[contains(@class, 'uiInput')][.//label[contains(@class, 'uiLabel')][.//span[text()='{}']]]//*[self::input or self::textarea]"
npsp_lex_locators["record"]["related"]["dd-link"]='//div[contains(@class,"actionMenu")]//a[@title="{}"]'
npsp_lex_locators["record"]["related"]["button"]="//article[contains(@class, 'slds-card slds-card_boundary')][.//img][.//span[@title='{}']]//a[@title='{}']"
# stashed (Noah's version)
# npsp_lex_locators["delete_icon"]= "//span[contains(text(),'{}')]/../following::div//span[text() = '{}']/following-sibling::a/child::span[@class = 'deleteIcon']"
# npsp_lex_locators['object']['field']= "//div[contains(@class, 'uiInput')][.//label[contains(@class, 'uiLabel')][.//span[text()='{}']]]//*[self::input or self::textarea]"
# npsp_lex_locators["record"]["related"]["dd-link"]='//div[contains(@class,"actionMenu")]//a[@title="{}"]'
Revert "changes in locator_50 file (current and old versions)"
This reverts commit 819dfa4ed2033c1f82973edb09215b96d3c4b188.
|
from locators_51 import *
import copy
npsp_lex_locators = copy.deepcopy(npsp_lex_locators)
npsp_lex_locators['delete_icon']='//span[contains(text() ,"{}")]/following::span[. = "{}"]/following-sibling::a/child::span[@class = "deleteIcon"]'
npsp_lex_locators['object']['field']= "//div[contains(@class, 'uiInput')][.//label[contains(@class, 'uiLabel')][.//span[text()='{}']]]//*[self::input or self::textarea]"
npsp_lex_locators["record"]["related"]["dd-link"]='//div[contains(@class,"actionMenu")]//a[@title="{}"]'
npsp_lex_locators["record"]["related"]["button"]="//article[contains(@class, 'slds-card slds-card_boundary')][.//img][.//span[@title='{}']]//a[@title='{}']"
|
<commit_before>from locators_51 import *
import copy
npsp_lex_locators = copy.deepcopy(npsp_lex_locators)
# current version (Sravani's )
npsp_lex_locators['delete_icon']='//span[contains(text() ,"{}")]/following::span[. = "{}"]/following-sibling::a/child::span[@class = "deleteIcon"]'
npsp_lex_locators['object']['field']= "//div[contains(@class, 'uiInput')][.//label[contains(@class, 'uiLabel')][.//span[text()='{}']]]//*[self::input or self::textarea]"
npsp_lex_locators["record"]["related"]["dd-link"]='//div[contains(@class,"actionMenu")]//a[@title="{}"]'
npsp_lex_locators["record"]["related"]["button"]="//article[contains(@class, 'slds-card slds-card_boundary')][.//img][.//span[@title='{}']]//a[@title='{}']"
# stashed (Noah's version)
# npsp_lex_locators["delete_icon"]= "//span[contains(text(),'{}')]/../following::div//span[text() = '{}']/following-sibling::a/child::span[@class = 'deleteIcon']"
# npsp_lex_locators['object']['field']= "//div[contains(@class, 'uiInput')][.//label[contains(@class, 'uiLabel')][.//span[text()='{}']]]//*[self::input or self::textarea]"
# npsp_lex_locators["record"]["related"]["dd-link"]='//div[contains(@class,"actionMenu")]//a[@title="{}"]'
<commit_msg>Revert "changes in locator_50 file (current and old versions)"
This reverts commit 819dfa4ed2033c1f82973edb09215b96d3c4b188.<commit_after>
|
from locators_51 import *
import copy
npsp_lex_locators = copy.deepcopy(npsp_lex_locators)
npsp_lex_locators['delete_icon']='//span[contains(text() ,"{}")]/following::span[. = "{}"]/following-sibling::a/child::span[@class = "deleteIcon"]'
npsp_lex_locators['object']['field']= "//div[contains(@class, 'uiInput')][.//label[contains(@class, 'uiLabel')][.//span[text()='{}']]]//*[self::input or self::textarea]"
npsp_lex_locators["record"]["related"]["dd-link"]='//div[contains(@class,"actionMenu")]//a[@title="{}"]'
npsp_lex_locators["record"]["related"]["button"]="//article[contains(@class, 'slds-card slds-card_boundary')][.//img][.//span[@title='{}']]//a[@title='{}']"
|
from locators_51 import *
import copy
npsp_lex_locators = copy.deepcopy(npsp_lex_locators)
# current version (Sravani's )
npsp_lex_locators['delete_icon']='//span[contains(text() ,"{}")]/following::span[. = "{}"]/following-sibling::a/child::span[@class = "deleteIcon"]'
npsp_lex_locators['object']['field']= "//div[contains(@class, 'uiInput')][.//label[contains(@class, 'uiLabel')][.//span[text()='{}']]]//*[self::input or self::textarea]"
npsp_lex_locators["record"]["related"]["dd-link"]='//div[contains(@class,"actionMenu")]//a[@title="{}"]'
npsp_lex_locators["record"]["related"]["button"]="//article[contains(@class, 'slds-card slds-card_boundary')][.//img][.//span[@title='{}']]//a[@title='{}']"
# stashed (Noah's version)
# npsp_lex_locators["delete_icon"]= "//span[contains(text(),'{}')]/../following::div//span[text() = '{}']/following-sibling::a/child::span[@class = 'deleteIcon']"
# npsp_lex_locators['object']['field']= "//div[contains(@class, 'uiInput')][.//label[contains(@class, 'uiLabel')][.//span[text()='{}']]]//*[self::input or self::textarea]"
# npsp_lex_locators["record"]["related"]["dd-link"]='//div[contains(@class,"actionMenu")]//a[@title="{}"]'
Revert "changes in locator_50 file (current and old versions)"
This reverts commit 819dfa4ed2033c1f82973edb09215b96d3c4b188.from locators_51 import *
import copy
npsp_lex_locators = copy.deepcopy(npsp_lex_locators)
npsp_lex_locators['delete_icon']='//span[contains(text() ,"{}")]/following::span[. = "{}"]/following-sibling::a/child::span[@class = "deleteIcon"]'
npsp_lex_locators['object']['field']= "//div[contains(@class, 'uiInput')][.//label[contains(@class, 'uiLabel')][.//span[text()='{}']]]//*[self::input or self::textarea]"
npsp_lex_locators["record"]["related"]["dd-link"]='//div[contains(@class,"actionMenu")]//a[@title="{}"]'
npsp_lex_locators["record"]["related"]["button"]="//article[contains(@class, 'slds-card slds-card_boundary')][.//img][.//span[@title='{}']]//a[@title='{}']"
|
<commit_before>from locators_51 import *
import copy
npsp_lex_locators = copy.deepcopy(npsp_lex_locators)
# current version (Sravani's )
npsp_lex_locators['delete_icon']='//span[contains(text() ,"{}")]/following::span[. = "{}"]/following-sibling::a/child::span[@class = "deleteIcon"]'
npsp_lex_locators['object']['field']= "//div[contains(@class, 'uiInput')][.//label[contains(@class, 'uiLabel')][.//span[text()='{}']]]//*[self::input or self::textarea]"
npsp_lex_locators["record"]["related"]["dd-link"]='//div[contains(@class,"actionMenu")]//a[@title="{}"]'
npsp_lex_locators["record"]["related"]["button"]="//article[contains(@class, 'slds-card slds-card_boundary')][.//img][.//span[@title='{}']]//a[@title='{}']"
# stashed (Noah's version)
# npsp_lex_locators["delete_icon"]= "//span[contains(text(),'{}')]/../following::div//span[text() = '{}']/following-sibling::a/child::span[@class = 'deleteIcon']"
# npsp_lex_locators['object']['field']= "//div[contains(@class, 'uiInput')][.//label[contains(@class, 'uiLabel')][.//span[text()='{}']]]//*[self::input or self::textarea]"
# npsp_lex_locators["record"]["related"]["dd-link"]='//div[contains(@class,"actionMenu")]//a[@title="{}"]'
<commit_msg>Revert "changes in locator_50 file (current and old versions)"
This reverts commit 819dfa4ed2033c1f82973edb09215b96d3c4b188.<commit_after>from locators_51 import *
import copy
npsp_lex_locators = copy.deepcopy(npsp_lex_locators)
npsp_lex_locators['delete_icon']='//span[contains(text() ,"{}")]/following::span[. = "{}"]/following-sibling::a/child::span[@class = "deleteIcon"]'
npsp_lex_locators['object']['field']= "//div[contains(@class, 'uiInput')][.//label[contains(@class, 'uiLabel')][.//span[text()='{}']]]//*[self::input or self::textarea]"
npsp_lex_locators["record"]["related"]["dd-link"]='//div[contains(@class,"actionMenu")]//a[@title="{}"]'
npsp_lex_locators["record"]["related"]["button"]="//article[contains(@class, 'slds-card slds-card_boundary')][.//img][.//span[@title='{}']]//a[@title='{}']"
|
c72b727d373ac620379fe0a2a0c1b85bb868962e
|
test_arrange_schedule.py
|
test_arrange_schedule.py
|
from arrange_schedule import *
def test_crawler_cwb_img(system_setting):
send_msg = {}
send_msg['server_dir'] = system_setting['board_py_dir']
send_msg['user_id'] = 1
receive_msg = crawler_cwb_img(send_msg)
assert receive_msg['result'] == 'success'
if __name__ == "__main__":
system_setting = read_system_setting()
test_crawler_cwb_img(system_setting)
|
from arrange_schedule import *
def test_read_system_setting():
keys = ['board_py_dir','shutdown','max_db_log','min_db_activity']
system_setting = read_system_setting()
for key in keys:
assert key in system_setting
return system_setting
def test_crawler_cwb_img(system_setting):
send_msg = {}
send_msg['server_dir'] = system_setting['board_py_dir']
send_msg['user_id'] = 1
receive_msg = crawler_cwb_img(send_msg)
assert receive_msg['result'] == 'success'
if __name__ == "__main__":
system_setting = test_read_system_setting()
test_crawler_cwb_img(system_setting)
print("All test passed")
|
Add test case for read_system_setting()
|
Add test case for read_system_setting()
|
Python
|
apache-2.0
|
stvreumi/electronic-blackboard,Billy4195/electronic-blackboard,stvreumi/electronic-blackboard,Billy4195/electronic-blackboard,chenyang14/electronic-blackboard,chenyang14/electronic-blackboard,SWLBot/electronic-blackboard,SWLBot/electronic-blackboard,stvreumi/electronic-blackboard,stvreumi/electronic-blackboard,Billy4195/electronic-blackboard,SWLBot/electronic-blackboard,Billy4195/electronic-blackboard,SWLBot/electronic-blackboard,chenyang14/electronic-blackboard
|
from arrange_schedule import *
def test_crawler_cwb_img(system_setting):
send_msg = {}
send_msg['server_dir'] = system_setting['board_py_dir']
send_msg['user_id'] = 1
receive_msg = crawler_cwb_img(send_msg)
assert receive_msg['result'] == 'success'
if __name__ == "__main__":
system_setting = read_system_setting()
test_crawler_cwb_img(system_setting)
Add test case for read_system_setting()
|
from arrange_schedule import *
def test_read_system_setting():
keys = ['board_py_dir','shutdown','max_db_log','min_db_activity']
system_setting = read_system_setting()
for key in keys:
assert key in system_setting
return system_setting
def test_crawler_cwb_img(system_setting):
send_msg = {}
send_msg['server_dir'] = system_setting['board_py_dir']
send_msg['user_id'] = 1
receive_msg = crawler_cwb_img(send_msg)
assert receive_msg['result'] == 'success'
if __name__ == "__main__":
system_setting = test_read_system_setting()
test_crawler_cwb_img(system_setting)
print("All test passed")
|
<commit_before>from arrange_schedule import *
def test_crawler_cwb_img(system_setting):
send_msg = {}
send_msg['server_dir'] = system_setting['board_py_dir']
send_msg['user_id'] = 1
receive_msg = crawler_cwb_img(send_msg)
assert receive_msg['result'] == 'success'
if __name__ == "__main__":
system_setting = read_system_setting()
test_crawler_cwb_img(system_setting)
<commit_msg>Add test case for read_system_setting()<commit_after>
|
from arrange_schedule import *
def test_read_system_setting():
keys = ['board_py_dir','shutdown','max_db_log','min_db_activity']
system_setting = read_system_setting()
for key in keys:
assert key in system_setting
return system_setting
def test_crawler_cwb_img(system_setting):
send_msg = {}
send_msg['server_dir'] = system_setting['board_py_dir']
send_msg['user_id'] = 1
receive_msg = crawler_cwb_img(send_msg)
assert receive_msg['result'] == 'success'
if __name__ == "__main__":
system_setting = test_read_system_setting()
test_crawler_cwb_img(system_setting)
print("All test passed")
|
from arrange_schedule import *
def test_crawler_cwb_img(system_setting):
send_msg = {}
send_msg['server_dir'] = system_setting['board_py_dir']
send_msg['user_id'] = 1
receive_msg = crawler_cwb_img(send_msg)
assert receive_msg['result'] == 'success'
if __name__ == "__main__":
system_setting = read_system_setting()
test_crawler_cwb_img(system_setting)
Add test case for read_system_setting()from arrange_schedule import *
def test_read_system_setting():
keys = ['board_py_dir','shutdown','max_db_log','min_db_activity']
system_setting = read_system_setting()
for key in keys:
assert key in system_setting
return system_setting
def test_crawler_cwb_img(system_setting):
send_msg = {}
send_msg['server_dir'] = system_setting['board_py_dir']
send_msg['user_id'] = 1
receive_msg = crawler_cwb_img(send_msg)
assert receive_msg['result'] == 'success'
if __name__ == "__main__":
system_setting = test_read_system_setting()
test_crawler_cwb_img(system_setting)
print("All test passed")
|
<commit_before>from arrange_schedule import *
def test_crawler_cwb_img(system_setting):
send_msg = {}
send_msg['server_dir'] = system_setting['board_py_dir']
send_msg['user_id'] = 1
receive_msg = crawler_cwb_img(send_msg)
assert receive_msg['result'] == 'success'
if __name__ == "__main__":
system_setting = read_system_setting()
test_crawler_cwb_img(system_setting)
<commit_msg>Add test case for read_system_setting()<commit_after>from arrange_schedule import *
def test_read_system_setting():
keys = ['board_py_dir','shutdown','max_db_log','min_db_activity']
system_setting = read_system_setting()
for key in keys:
assert key in system_setting
return system_setting
def test_crawler_cwb_img(system_setting):
send_msg = {}
send_msg['server_dir'] = system_setting['board_py_dir']
send_msg['user_id'] = 1
receive_msg = crawler_cwb_img(send_msg)
assert receive_msg['result'] == 'success'
if __name__ == "__main__":
system_setting = test_read_system_setting()
test_crawler_cwb_img(system_setting)
print("All test passed")
|
01a4a92f69219e081171aa1ad9c0215efec8f69d
|
flicks/settings/__init__.py
|
flicks/settings/__init__.py
|
from .base import *
try:
from .local import *
except ImportError, exc:
exc.args = tuple(['%s (did you rename settings/local.py-dist?)'
% exc.args[0]])
raise exc
import sys
if sys.argv[1] == 'test':
try:
from .test import *
except ImportError:
pass
|
from .base import *
try:
from .local import *
except ImportError, exc:
exc.args = tuple(['%s (did you rename settings/local.py-dist?)'
% exc.args[0]])
raise exc
"""
import sys
if sys.argv[1] == 'test':
try:
from .test import *
except ImportError:
pass
"""
|
Remove test settings to see if it fixes jenkins.
|
Remove test settings to see if it fixes jenkins.
|
Python
|
bsd-3-clause
|
mozilla/firefox-flicks,mozilla/firefox-flicks,mozilla/firefox-flicks,mozilla/firefox-flicks
|
from .base import *
try:
from .local import *
except ImportError, exc:
exc.args = tuple(['%s (did you rename settings/local.py-dist?)'
% exc.args[0]])
raise exc
import sys
if sys.argv[1] == 'test':
try:
from .test import *
except ImportError:
pass
Remove test settings to see if it fixes jenkins.
|
from .base import *
try:
from .local import *
except ImportError, exc:
exc.args = tuple(['%s (did you rename settings/local.py-dist?)'
% exc.args[0]])
raise exc
"""
import sys
if sys.argv[1] == 'test':
try:
from .test import *
except ImportError:
pass
"""
|
<commit_before>from .base import *
try:
from .local import *
except ImportError, exc:
exc.args = tuple(['%s (did you rename settings/local.py-dist?)'
% exc.args[0]])
raise exc
import sys
if sys.argv[1] == 'test':
try:
from .test import *
except ImportError:
pass
<commit_msg>Remove test settings to see if it fixes jenkins.<commit_after>
|
from .base import *
try:
from .local import *
except ImportError, exc:
exc.args = tuple(['%s (did you rename settings/local.py-dist?)'
% exc.args[0]])
raise exc
"""
import sys
if sys.argv[1] == 'test':
try:
from .test import *
except ImportError:
pass
"""
|
from .base import *
try:
from .local import *
except ImportError, exc:
exc.args = tuple(['%s (did you rename settings/local.py-dist?)'
% exc.args[0]])
raise exc
import sys
if sys.argv[1] == 'test':
try:
from .test import *
except ImportError:
pass
Remove test settings to see if it fixes jenkins.from .base import *
try:
from .local import *
except ImportError, exc:
exc.args = tuple(['%s (did you rename settings/local.py-dist?)'
% exc.args[0]])
raise exc
"""
import sys
if sys.argv[1] == 'test':
try:
from .test import *
except ImportError:
pass
"""
|
<commit_before>from .base import *
try:
from .local import *
except ImportError, exc:
exc.args = tuple(['%s (did you rename settings/local.py-dist?)'
% exc.args[0]])
raise exc
import sys
if sys.argv[1] == 'test':
try:
from .test import *
except ImportError:
pass
<commit_msg>Remove test settings to see if it fixes jenkins.<commit_after>from .base import *
try:
from .local import *
except ImportError, exc:
exc.args = tuple(['%s (did you rename settings/local.py-dist?)'
% exc.args[0]])
raise exc
"""
import sys
if sys.argv[1] == 'test':
try:
from .test import *
except ImportError:
pass
"""
|
854ad9b06ab5da3c4ac0ae308d4d8d01a0739d42
|
openliveq/click_model.py
|
openliveq/click_model.py
|
from math import exp
class ClickModel(object):
'''
Simple Position-biased Model:
P(C_r=1) = P(A_r=1|E_r=1)P(E_r=1),
where
C_r is click on the r-th document,
A_r is being attracted by the r-th document, and
E_r is examination of the r-th document.
In this simple model, the examination probability is defined as
P(E_r=1) = exp(- r / sigma).
Therefore, P(A_r=1|E_r=1) = P(C_r=1) / P(E_r=1).
'''
@classmethod
def estimate(cls, ctrs, sigma=10.0, topk=10):
result = {}
for ctr in ctrs:
eprob = cls._eprob(ctr.rank, sigma)
aprob = min([1.0, ctr.ctr / eprob])
result[(ctr.query_id, ctr.question_id)] = aprob
return result
@classmethod
def _eprob(cls, rank, sigma):
return exp(- float(rank) / sigma)
|
from math import exp
class ClickModel(object):
'''
Simple Position-biased Model:
P(C_r=1) = P(A_r=1|E_r=1)P(E_r=1),
where
C_r is click on the r-th document,
A_r is being attracted by the r-th document, and
E_r is examination of the r-th document.
In this simple model, the examination probability is defined as
P(E_r=1) = exp(- r / sigma).
Therefore, P(A_r=1|E_r=1) = P(C_r=1) / P(E_r=1).
'''
@classmethod
def estimate(cls, ctrs, sigma=10.0, topk=10):
result = {}
for ctr in ctrs:
if ctr.rank <= topk:
eprob = cls._eprob(ctr.rank, sigma)
aprob = min([1.0, ctr.ctr / eprob])
result[(ctr.query_id, ctr.question_id)] = aprob
return result
@classmethod
def _eprob(cls, rank, sigma):
return exp(- float(rank) / sigma)
|
Use topk in the click model
|
Use topk in the click model
|
Python
|
mit
|
mpkato/openliveq
|
from math import exp
class ClickModel(object):
'''
Simple Position-biased Model:
P(C_r=1) = P(A_r=1|E_r=1)P(E_r=1),
where
C_r is click on the r-th document,
A_r is being attracted by the r-th document, and
E_r is examination of the r-th document.
In this simple model, the examination probability is defined as
P(E_r=1) = exp(- r / sigma).
Therefore, P(A_r=1|E_r=1) = P(C_r=1) / P(E_r=1).
'''
@classmethod
def estimate(cls, ctrs, sigma=10.0, topk=10):
result = {}
for ctr in ctrs:
eprob = cls._eprob(ctr.rank, sigma)
aprob = min([1.0, ctr.ctr / eprob])
result[(ctr.query_id, ctr.question_id)] = aprob
return result
@classmethod
def _eprob(cls, rank, sigma):
return exp(- float(rank) / sigma)
Use topk in the click model
|
from math import exp
class ClickModel(object):
'''
Simple Position-biased Model:
P(C_r=1) = P(A_r=1|E_r=1)P(E_r=1),
where
C_r is click on the r-th document,
A_r is being attracted by the r-th document, and
E_r is examination of the r-th document.
In this simple model, the examination probability is defined as
P(E_r=1) = exp(- r / sigma).
Therefore, P(A_r=1|E_r=1) = P(C_r=1) / P(E_r=1).
'''
@classmethod
def estimate(cls, ctrs, sigma=10.0, topk=10):
result = {}
for ctr in ctrs:
if ctr.rank <= topk:
eprob = cls._eprob(ctr.rank, sigma)
aprob = min([1.0, ctr.ctr / eprob])
result[(ctr.query_id, ctr.question_id)] = aprob
return result
@classmethod
def _eprob(cls, rank, sigma):
return exp(- float(rank) / sigma)
|
<commit_before>from math import exp
class ClickModel(object):
'''
Simple Position-biased Model:
P(C_r=1) = P(A_r=1|E_r=1)P(E_r=1),
where
C_r is click on the r-th document,
A_r is being attracted by the r-th document, and
E_r is examination of the r-th document.
In this simple model, the examination probability is defined as
P(E_r=1) = exp(- r / sigma).
Therefore, P(A_r=1|E_r=1) = P(C_r=1) / P(E_r=1).
'''
@classmethod
def estimate(cls, ctrs, sigma=10.0, topk=10):
result = {}
for ctr in ctrs:
eprob = cls._eprob(ctr.rank, sigma)
aprob = min([1.0, ctr.ctr / eprob])
result[(ctr.query_id, ctr.question_id)] = aprob
return result
@classmethod
def _eprob(cls, rank, sigma):
return exp(- float(rank) / sigma)
<commit_msg>Use topk in the click model<commit_after>
|
from math import exp
class ClickModel(object):
'''
Simple Position-biased Model:
P(C_r=1) = P(A_r=1|E_r=1)P(E_r=1),
where
C_r is click on the r-th document,
A_r is being attracted by the r-th document, and
E_r is examination of the r-th document.
In this simple model, the examination probability is defined as
P(E_r=1) = exp(- r / sigma).
Therefore, P(A_r=1|E_r=1) = P(C_r=1) / P(E_r=1).
'''
@classmethod
def estimate(cls, ctrs, sigma=10.0, topk=10):
result = {}
for ctr in ctrs:
if ctr.rank <= topk:
eprob = cls._eprob(ctr.rank, sigma)
aprob = min([1.0, ctr.ctr / eprob])
result[(ctr.query_id, ctr.question_id)] = aprob
return result
@classmethod
def _eprob(cls, rank, sigma):
return exp(- float(rank) / sigma)
|
from math import exp
class ClickModel(object):
'''
Simple Position-biased Model:
P(C_r=1) = P(A_r=1|E_r=1)P(E_r=1),
where
C_r is click on the r-th document,
A_r is being attracted by the r-th document, and
E_r is examination of the r-th document.
In this simple model, the examination probability is defined as
P(E_r=1) = exp(- r / sigma).
Therefore, P(A_r=1|E_r=1) = P(C_r=1) / P(E_r=1).
'''
@classmethod
def estimate(cls, ctrs, sigma=10.0, topk=10):
result = {}
for ctr in ctrs:
eprob = cls._eprob(ctr.rank, sigma)
aprob = min([1.0, ctr.ctr / eprob])
result[(ctr.query_id, ctr.question_id)] = aprob
return result
@classmethod
def _eprob(cls, rank, sigma):
return exp(- float(rank) / sigma)
Use topk in the click modelfrom math import exp
class ClickModel(object):
'''
Simple Position-biased Model:
P(C_r=1) = P(A_r=1|E_r=1)P(E_r=1),
where
C_r is click on the r-th document,
A_r is being attracted by the r-th document, and
E_r is examination of the r-th document.
In this simple model, the examination probability is defined as
P(E_r=1) = exp(- r / sigma).
Therefore, P(A_r=1|E_r=1) = P(C_r=1) / P(E_r=1).
'''
@classmethod
def estimate(cls, ctrs, sigma=10.0, topk=10):
result = {}
for ctr in ctrs:
if ctr.rank <= topk:
eprob = cls._eprob(ctr.rank, sigma)
aprob = min([1.0, ctr.ctr / eprob])
result[(ctr.query_id, ctr.question_id)] = aprob
return result
@classmethod
def _eprob(cls, rank, sigma):
return exp(- float(rank) / sigma)
|
<commit_before>from math import exp
class ClickModel(object):
'''
Simple Position-biased Model:
P(C_r=1) = P(A_r=1|E_r=1)P(E_r=1),
where
C_r is click on the r-th document,
A_r is being attracted by the r-th document, and
E_r is examination of the r-th document.
In this simple model, the examination probability is defined as
P(E_r=1) = exp(- r / sigma).
Therefore, P(A_r=1|E_r=1) = P(C_r=1) / P(E_r=1).
'''
@classmethod
def estimate(cls, ctrs, sigma=10.0, topk=10):
result = {}
for ctr in ctrs:
eprob = cls._eprob(ctr.rank, sigma)
aprob = min([1.0, ctr.ctr / eprob])
result[(ctr.query_id, ctr.question_id)] = aprob
return result
@classmethod
def _eprob(cls, rank, sigma):
return exp(- float(rank) / sigma)
<commit_msg>Use topk in the click model<commit_after>from math import exp
class ClickModel(object):
'''
Simple Position-biased Model:
P(C_r=1) = P(A_r=1|E_r=1)P(E_r=1),
where
C_r is click on the r-th document,
A_r is being attracted by the r-th document, and
E_r is examination of the r-th document.
In this simple model, the examination probability is defined as
P(E_r=1) = exp(- r / sigma).
Therefore, P(A_r=1|E_r=1) = P(C_r=1) / P(E_r=1).
'''
@classmethod
def estimate(cls, ctrs, sigma=10.0, topk=10):
result = {}
for ctr in ctrs:
if ctr.rank <= topk:
eprob = cls._eprob(ctr.rank, sigma)
aprob = min([1.0, ctr.ctr / eprob])
result[(ctr.query_id, ctr.question_id)] = aprob
return result
@classmethod
def _eprob(cls, rank, sigma):
return exp(- float(rank) / sigma)
|
343baa4b8a0ed9d4db0727c514d9ff97b937c7ee
|
adLDAP.py
|
adLDAP.py
|
import ldap
def checkCredentials(username, password):
if password == "":
return 'Empty Password'
controller = 'devdc'
domainA = 'dev'
domainB = 'devlcdi'
domain = domainA + '.' + domainB
ldapServer = 'ldap://' + controller + '.' + domain
ldapUsername = username + '@' + domain
ldapPassword = password
base_dn = 'DC=' + domainA + ',DC=' + domainB
ldap_filter = 'userPrincipalName=' + ldapUsername
attrs = ['memberOf']
# Note: empty passwords WILL validate with ldap
try:
ldap_client = ldap.initialize(ldapServer)
ldap_client.set_option(ldap.OPT_REFERRALS, 0)
ldap_client.simple_bind_s(ldapUsername, ldapPassword)
except ldap.INVALID_CREDENTIALS:
ldap_client.unbind()
return ('Wrong Credentials', False)
except ldap.SERVER_DOWN:
return ('Server Down', False)
hasEditAccess = False
search_dn = "ou=users," + base_dn
scope = ldap.SCOPE_SUBTREE
filterStr = '(objectclass=person)'
attrs = ['sn']
res = ldap_client.search_s(search_dn, scope, filterStr, attrs)
print(res)
ldap_client.unbind()
return (True, hasEditAccess)
|
import ldap
validEditAccessGroups = ['Office Assistants', 'Domain Admins']
def checkCredentials(username, password):
if password == "":
return 'Empty Password'
controller = 'devdc'
domainA = 'dev'
domainB = 'devlcdi'
domain = domainA + '.' + domainB
ldapServer = 'ldap://' + controller + '.' + domain
ldapUsername = username + '@' + domain
ldapPassword = password
base_dn = 'DC=' + domainA + ',DC=' + domainB
ldap_filter = 'userPrincipalName=' + ldapUsername
# Note: empty passwords WILL validate with ldap
try:
ldap_client = ldap.initialize(ldapServer)
ldap_client.set_option(ldap.OPT_REFERRALS, 0)
ldap_client.simple_bind_s(ldapUsername, ldapPassword)
except ldap.INVALID_CREDENTIALS:
ldap_client.unbind()
return ('Wrong Credentials', False)
except ldap.SERVER_DOWN:
return ('Server Down', False)
hasEditAccess = False
dn = 'cn=Users,' + base_dn
filter = 'cn=' + str(username)
attrs = ['memberOf']
id = ldap_client.search(dn, ldap.SCOPE_SUBTREE, filter, attrs)
groups = ldap_client.result(id)[1][0][1]['memberOf']
for group in groups:
address - group.split(',')
groupName = address[0].split('=')[1]
if groupName in validEditAccessGroups:
hasEditAccess = True
break
ldap_client.unbind()
return (True, hasEditAccess)
|
Add group fetching from AD
|
Add group fetching from AD
|
Python
|
mit
|
lcdi/Inventory,lcdi/Inventory,lcdi/Inventory,lcdi/Inventory
|
import ldap
def checkCredentials(username, password):
if password == "":
return 'Empty Password'
controller = 'devdc'
domainA = 'dev'
domainB = 'devlcdi'
domain = domainA + '.' + domainB
ldapServer = 'ldap://' + controller + '.' + domain
ldapUsername = username + '@' + domain
ldapPassword = password
base_dn = 'DC=' + domainA + ',DC=' + domainB
ldap_filter = 'userPrincipalName=' + ldapUsername
attrs = ['memberOf']
# Note: empty passwords WILL validate with ldap
try:
ldap_client = ldap.initialize(ldapServer)
ldap_client.set_option(ldap.OPT_REFERRALS, 0)
ldap_client.simple_bind_s(ldapUsername, ldapPassword)
except ldap.INVALID_CREDENTIALS:
ldap_client.unbind()
return ('Wrong Credentials', False)
except ldap.SERVER_DOWN:
return ('Server Down', False)
hasEditAccess = False
search_dn = "ou=users," + base_dn
scope = ldap.SCOPE_SUBTREE
filterStr = '(objectclass=person)'
attrs = ['sn']
res = ldap_client.search_s(search_dn, scope, filterStr, attrs)
print(res)
ldap_client.unbind()
return (True, hasEditAccess)
Add group fetching from AD
|
import ldap
validEditAccessGroups = ['Office Assistants', 'Domain Admins']
def checkCredentials(username, password):
if password == "":
return 'Empty Password'
controller = 'devdc'
domainA = 'dev'
domainB = 'devlcdi'
domain = domainA + '.' + domainB
ldapServer = 'ldap://' + controller + '.' + domain
ldapUsername = username + '@' + domain
ldapPassword = password
base_dn = 'DC=' + domainA + ',DC=' + domainB
ldap_filter = 'userPrincipalName=' + ldapUsername
# Note: empty passwords WILL validate with ldap
try:
ldap_client = ldap.initialize(ldapServer)
ldap_client.set_option(ldap.OPT_REFERRALS, 0)
ldap_client.simple_bind_s(ldapUsername, ldapPassword)
except ldap.INVALID_CREDENTIALS:
ldap_client.unbind()
return ('Wrong Credentials', False)
except ldap.SERVER_DOWN:
return ('Server Down', False)
hasEditAccess = False
dn = 'cn=Users,' + base_dn
filter = 'cn=' + str(username)
attrs = ['memberOf']
id = ldap_client.search(dn, ldap.SCOPE_SUBTREE, filter, attrs)
groups = ldap_client.result(id)[1][0][1]['memberOf']
for group in groups:
address - group.split(',')
groupName = address[0].split('=')[1]
if groupName in validEditAccessGroups:
hasEditAccess = True
break
ldap_client.unbind()
return (True, hasEditAccess)
|
<commit_before>import ldap
def checkCredentials(username, password):
if password == "":
return 'Empty Password'
controller = 'devdc'
domainA = 'dev'
domainB = 'devlcdi'
domain = domainA + '.' + domainB
ldapServer = 'ldap://' + controller + '.' + domain
ldapUsername = username + '@' + domain
ldapPassword = password
base_dn = 'DC=' + domainA + ',DC=' + domainB
ldap_filter = 'userPrincipalName=' + ldapUsername
attrs = ['memberOf']
# Note: empty passwords WILL validate with ldap
try:
ldap_client = ldap.initialize(ldapServer)
ldap_client.set_option(ldap.OPT_REFERRALS, 0)
ldap_client.simple_bind_s(ldapUsername, ldapPassword)
except ldap.INVALID_CREDENTIALS:
ldap_client.unbind()
return ('Wrong Credentials', False)
except ldap.SERVER_DOWN:
return ('Server Down', False)
hasEditAccess = False
search_dn = "ou=users," + base_dn
scope = ldap.SCOPE_SUBTREE
filterStr = '(objectclass=person)'
attrs = ['sn']
res = ldap_client.search_s(search_dn, scope, filterStr, attrs)
print(res)
ldap_client.unbind()
return (True, hasEditAccess)
<commit_msg>Add group fetching from AD<commit_after>
|
import ldap
validEditAccessGroups = ['Office Assistants', 'Domain Admins']
def checkCredentials(username, password):
if password == "":
return 'Empty Password'
controller = 'devdc'
domainA = 'dev'
domainB = 'devlcdi'
domain = domainA + '.' + domainB
ldapServer = 'ldap://' + controller + '.' + domain
ldapUsername = username + '@' + domain
ldapPassword = password
base_dn = 'DC=' + domainA + ',DC=' + domainB
ldap_filter = 'userPrincipalName=' + ldapUsername
# Note: empty passwords WILL validate with ldap
try:
ldap_client = ldap.initialize(ldapServer)
ldap_client.set_option(ldap.OPT_REFERRALS, 0)
ldap_client.simple_bind_s(ldapUsername, ldapPassword)
except ldap.INVALID_CREDENTIALS:
ldap_client.unbind()
return ('Wrong Credentials', False)
except ldap.SERVER_DOWN:
return ('Server Down', False)
hasEditAccess = False
dn = 'cn=Users,' + base_dn
filter = 'cn=' + str(username)
attrs = ['memberOf']
id = ldap_client.search(dn, ldap.SCOPE_SUBTREE, filter, attrs)
groups = ldap_client.result(id)[1][0][1]['memberOf']
for group in groups:
address - group.split(',')
groupName = address[0].split('=')[1]
if groupName in validEditAccessGroups:
hasEditAccess = True
break
ldap_client.unbind()
return (True, hasEditAccess)
|
import ldap
def checkCredentials(username, password):
if password == "":
return 'Empty Password'
controller = 'devdc'
domainA = 'dev'
domainB = 'devlcdi'
domain = domainA + '.' + domainB
ldapServer = 'ldap://' + controller + '.' + domain
ldapUsername = username + '@' + domain
ldapPassword = password
base_dn = 'DC=' + domainA + ',DC=' + domainB
ldap_filter = 'userPrincipalName=' + ldapUsername
attrs = ['memberOf']
# Note: empty passwords WILL validate with ldap
try:
ldap_client = ldap.initialize(ldapServer)
ldap_client.set_option(ldap.OPT_REFERRALS, 0)
ldap_client.simple_bind_s(ldapUsername, ldapPassword)
except ldap.INVALID_CREDENTIALS:
ldap_client.unbind()
return ('Wrong Credentials', False)
except ldap.SERVER_DOWN:
return ('Server Down', False)
hasEditAccess = False
search_dn = "ou=users," + base_dn
scope = ldap.SCOPE_SUBTREE
filterStr = '(objectclass=person)'
attrs = ['sn']
res = ldap_client.search_s(search_dn, scope, filterStr, attrs)
print(res)
ldap_client.unbind()
return (True, hasEditAccess)
Add group fetching from ADimport ldap
validEditAccessGroups = ['Office Assistants', 'Domain Admins']
def checkCredentials(username, password):
if password == "":
return 'Empty Password'
controller = 'devdc'
domainA = 'dev'
domainB = 'devlcdi'
domain = domainA + '.' + domainB
ldapServer = 'ldap://' + controller + '.' + domain
ldapUsername = username + '@' + domain
ldapPassword = password
base_dn = 'DC=' + domainA + ',DC=' + domainB
ldap_filter = 'userPrincipalName=' + ldapUsername
# Note: empty passwords WILL validate with ldap
try:
ldap_client = ldap.initialize(ldapServer)
ldap_client.set_option(ldap.OPT_REFERRALS, 0)
ldap_client.simple_bind_s(ldapUsername, ldapPassword)
except ldap.INVALID_CREDENTIALS:
ldap_client.unbind()
return ('Wrong Credentials', False)
except ldap.SERVER_DOWN:
return ('Server Down', False)
hasEditAccess = False
dn = 'cn=Users,' + base_dn
filter = 'cn=' + str(username)
attrs = ['memberOf']
id = ldap_client.search(dn, ldap.SCOPE_SUBTREE, filter, attrs)
groups = ldap_client.result(id)[1][0][1]['memberOf']
for group in groups:
address - group.split(',')
groupName = address[0].split('=')[1]
if groupName in validEditAccessGroups:
hasEditAccess = True
break
ldap_client.unbind()
return (True, hasEditAccess)
|
<commit_before>import ldap
def checkCredentials(username, password):
if password == "":
return 'Empty Password'
controller = 'devdc'
domainA = 'dev'
domainB = 'devlcdi'
domain = domainA + '.' + domainB
ldapServer = 'ldap://' + controller + '.' + domain
ldapUsername = username + '@' + domain
ldapPassword = password
base_dn = 'DC=' + domainA + ',DC=' + domainB
ldap_filter = 'userPrincipalName=' + ldapUsername
attrs = ['memberOf']
# Note: empty passwords WILL validate with ldap
try:
ldap_client = ldap.initialize(ldapServer)
ldap_client.set_option(ldap.OPT_REFERRALS, 0)
ldap_client.simple_bind_s(ldapUsername, ldapPassword)
except ldap.INVALID_CREDENTIALS:
ldap_client.unbind()
return ('Wrong Credentials', False)
except ldap.SERVER_DOWN:
return ('Server Down', False)
hasEditAccess = False
search_dn = "ou=users," + base_dn
scope = ldap.SCOPE_SUBTREE
filterStr = '(objectclass=person)'
attrs = ['sn']
res = ldap_client.search_s(search_dn, scope, filterStr, attrs)
print(res)
ldap_client.unbind()
return (True, hasEditAccess)
<commit_msg>Add group fetching from AD<commit_after>import ldap
validEditAccessGroups = ['Office Assistants', 'Domain Admins']
def checkCredentials(username, password):
if password == "":
return 'Empty Password'
controller = 'devdc'
domainA = 'dev'
domainB = 'devlcdi'
domain = domainA + '.' + domainB
ldapServer = 'ldap://' + controller + '.' + domain
ldapUsername = username + '@' + domain
ldapPassword = password
base_dn = 'DC=' + domainA + ',DC=' + domainB
ldap_filter = 'userPrincipalName=' + ldapUsername
# Note: empty passwords WILL validate with ldap
try:
ldap_client = ldap.initialize(ldapServer)
ldap_client.set_option(ldap.OPT_REFERRALS, 0)
ldap_client.simple_bind_s(ldapUsername, ldapPassword)
except ldap.INVALID_CREDENTIALS:
ldap_client.unbind()
return ('Wrong Credentials', False)
except ldap.SERVER_DOWN:
return ('Server Down', False)
hasEditAccess = False
dn = 'cn=Users,' + base_dn
filter = 'cn=' + str(username)
attrs = ['memberOf']
id = ldap_client.search(dn, ldap.SCOPE_SUBTREE, filter, attrs)
groups = ldap_client.result(id)[1][0][1]['memberOf']
for group in groups:
address - group.split(',')
groupName = address[0].split('=')[1]
if groupName in validEditAccessGroups:
hasEditAccess = True
break
ldap_client.unbind()
return (True, hasEditAccess)
|
92108cdac6e9324ba9584359b6502e87ce7dcccb
|
owebunit/tests/simple.py
|
owebunit/tests/simple.py
|
import BaseHTTPServer
import threading
import time
import owebunit
class Handler(BaseHTTPServer.BaseHTTPRequestHandler):
def do_GET(self):
self.wfile.write("HTTP/1.0 200 OK\n")
self.wfile.write("Content-Type: text/plain\n")
self.wfile.write("\n")
self.wfile.write("Response text")
def run_server():
server_address = ('', 8041)
httpd = BaseHTTPServer.HTTPServer(server_address, Handler)
while True:
httpd.handle_request()
class ServerThread(threading.Thread):
def run(self):
run_server()
def start_server():
server_thread = ServerThread()
server_thread.daemon = True
server_thread.start()
start_server()
time.sleep(0.1)
class Case(owebunit.WebTestCase):
def test_simple(self):
self.get('http://127.0.0.1:8041')
self.assert_code(200)
def test_session(self):
with self.session() as s:
s.get('http://127.0.0.1:8041')
s.assert_code(200)
if __name__ == '__main__':
import unittest
unittest.main()
|
import BaseHTTPServer
import threading
import time
import owebunit
import bottle
app = bottle.Bottle()
@app.route('/ok')
def ok():
return 'ok'
@app.route('/internal_server_error')
def internal_error():
bottle.abort(500, 'internal server error')
def run_server():
app.run(host='localhost', port=8041)
class ServerThread(threading.Thread):
def run(self):
run_server()
def start_server():
server_thread = ServerThread()
server_thread.daemon = True
server_thread.start()
start_server()
time.sleep(0.1)
class Case(owebunit.WebTestCase):
def test_simple(self):
self.get('http://127.0.0.1:8041/ok')
self.assert_code(200)
def test_session(self):
with self.session() as s:
s.get('http://127.0.0.1:8041/ok')
s.assert_code(200)
if __name__ == '__main__':
import unittest
unittest.main()
|
Switch to using bottle in test suite
|
Switch to using bottle in test suite
|
Python
|
bsd-2-clause
|
p/webracer
|
import BaseHTTPServer
import threading
import time
import owebunit
class Handler(BaseHTTPServer.BaseHTTPRequestHandler):
def do_GET(self):
self.wfile.write("HTTP/1.0 200 OK\n")
self.wfile.write("Content-Type: text/plain\n")
self.wfile.write("\n")
self.wfile.write("Response text")
def run_server():
server_address = ('', 8041)
httpd = BaseHTTPServer.HTTPServer(server_address, Handler)
while True:
httpd.handle_request()
class ServerThread(threading.Thread):
def run(self):
run_server()
def start_server():
server_thread = ServerThread()
server_thread.daemon = True
server_thread.start()
start_server()
time.sleep(0.1)
class Case(owebunit.WebTestCase):
def test_simple(self):
self.get('http://127.0.0.1:8041')
self.assert_code(200)
def test_session(self):
with self.session() as s:
s.get('http://127.0.0.1:8041')
s.assert_code(200)
if __name__ == '__main__':
import unittest
unittest.main()
Switch to using bottle in test suite
|
import BaseHTTPServer
import threading
import time
import owebunit
import bottle
app = bottle.Bottle()
@app.route('/ok')
def ok():
return 'ok'
@app.route('/internal_server_error')
def internal_error():
bottle.abort(500, 'internal server error')
def run_server():
app.run(host='localhost', port=8041)
class ServerThread(threading.Thread):
def run(self):
run_server()
def start_server():
server_thread = ServerThread()
server_thread.daemon = True
server_thread.start()
start_server()
time.sleep(0.1)
class Case(owebunit.WebTestCase):
def test_simple(self):
self.get('http://127.0.0.1:8041/ok')
self.assert_code(200)
def test_session(self):
with self.session() as s:
s.get('http://127.0.0.1:8041/ok')
s.assert_code(200)
if __name__ == '__main__':
import unittest
unittest.main()
|
<commit_before>import BaseHTTPServer
import threading
import time
import owebunit
class Handler(BaseHTTPServer.BaseHTTPRequestHandler):
def do_GET(self):
self.wfile.write("HTTP/1.0 200 OK\n")
self.wfile.write("Content-Type: text/plain\n")
self.wfile.write("\n")
self.wfile.write("Response text")
def run_server():
server_address = ('', 8041)
httpd = BaseHTTPServer.HTTPServer(server_address, Handler)
while True:
httpd.handle_request()
class ServerThread(threading.Thread):
def run(self):
run_server()
def start_server():
server_thread = ServerThread()
server_thread.daemon = True
server_thread.start()
start_server()
time.sleep(0.1)
class Case(owebunit.WebTestCase):
def test_simple(self):
self.get('http://127.0.0.1:8041')
self.assert_code(200)
def test_session(self):
with self.session() as s:
s.get('http://127.0.0.1:8041')
s.assert_code(200)
if __name__ == '__main__':
import unittest
unittest.main()
<commit_msg>Switch to using bottle in test suite<commit_after>
|
import BaseHTTPServer
import threading
import time
import owebunit
import bottle
app = bottle.Bottle()
@app.route('/ok')
def ok():
return 'ok'
@app.route('/internal_server_error')
def internal_error():
bottle.abort(500, 'internal server error')
def run_server():
app.run(host='localhost', port=8041)
class ServerThread(threading.Thread):
def run(self):
run_server()
def start_server():
server_thread = ServerThread()
server_thread.daemon = True
server_thread.start()
start_server()
time.sleep(0.1)
class Case(owebunit.WebTestCase):
def test_simple(self):
self.get('http://127.0.0.1:8041/ok')
self.assert_code(200)
def test_session(self):
with self.session() as s:
s.get('http://127.0.0.1:8041/ok')
s.assert_code(200)
if __name__ == '__main__':
import unittest
unittest.main()
|
import BaseHTTPServer
import threading
import time
import owebunit
class Handler(BaseHTTPServer.BaseHTTPRequestHandler):
def do_GET(self):
self.wfile.write("HTTP/1.0 200 OK\n")
self.wfile.write("Content-Type: text/plain\n")
self.wfile.write("\n")
self.wfile.write("Response text")
def run_server():
server_address = ('', 8041)
httpd = BaseHTTPServer.HTTPServer(server_address, Handler)
while True:
httpd.handle_request()
class ServerThread(threading.Thread):
def run(self):
run_server()
def start_server():
server_thread = ServerThread()
server_thread.daemon = True
server_thread.start()
start_server()
time.sleep(0.1)
class Case(owebunit.WebTestCase):
def test_simple(self):
self.get('http://127.0.0.1:8041')
self.assert_code(200)
def test_session(self):
with self.session() as s:
s.get('http://127.0.0.1:8041')
s.assert_code(200)
if __name__ == '__main__':
import unittest
unittest.main()
Switch to using bottle in test suiteimport BaseHTTPServer
import threading
import time
import owebunit
import bottle
app = bottle.Bottle()
@app.route('/ok')
def ok():
return 'ok'
@app.route('/internal_server_error')
def internal_error():
bottle.abort(500, 'internal server error')
def run_server():
app.run(host='localhost', port=8041)
class ServerThread(threading.Thread):
def run(self):
run_server()
def start_server():
server_thread = ServerThread()
server_thread.daemon = True
server_thread.start()
start_server()
time.sleep(0.1)
class Case(owebunit.WebTestCase):
def test_simple(self):
self.get('http://127.0.0.1:8041/ok')
self.assert_code(200)
def test_session(self):
with self.session() as s:
s.get('http://127.0.0.1:8041/ok')
s.assert_code(200)
if __name__ == '__main__':
import unittest
unittest.main()
|
<commit_before>import BaseHTTPServer
import threading
import time
import owebunit
class Handler(BaseHTTPServer.BaseHTTPRequestHandler):
def do_GET(self):
self.wfile.write("HTTP/1.0 200 OK\n")
self.wfile.write("Content-Type: text/plain\n")
self.wfile.write("\n")
self.wfile.write("Response text")
def run_server():
server_address = ('', 8041)
httpd = BaseHTTPServer.HTTPServer(server_address, Handler)
while True:
httpd.handle_request()
class ServerThread(threading.Thread):
def run(self):
run_server()
def start_server():
server_thread = ServerThread()
server_thread.daemon = True
server_thread.start()
start_server()
time.sleep(0.1)
class Case(owebunit.WebTestCase):
def test_simple(self):
self.get('http://127.0.0.1:8041')
self.assert_code(200)
def test_session(self):
with self.session() as s:
s.get('http://127.0.0.1:8041')
s.assert_code(200)
if __name__ == '__main__':
import unittest
unittest.main()
<commit_msg>Switch to using bottle in test suite<commit_after>import BaseHTTPServer
import threading
import time
import owebunit
import bottle
app = bottle.Bottle()
@app.route('/ok')
def ok():
return 'ok'
@app.route('/internal_server_error')
def internal_error():
bottle.abort(500, 'internal server error')
def run_server():
app.run(host='localhost', port=8041)
class ServerThread(threading.Thread):
def run(self):
run_server()
def start_server():
server_thread = ServerThread()
server_thread.daemon = True
server_thread.start()
start_server()
time.sleep(0.1)
class Case(owebunit.WebTestCase):
def test_simple(self):
self.get('http://127.0.0.1:8041/ok')
self.assert_code(200)
def test_session(self):
with self.session() as s:
s.get('http://127.0.0.1:8041/ok')
s.assert_code(200)
if __name__ == '__main__':
import unittest
unittest.main()
|
2747b9a5fb480176df2a8910156c9e56dd9fcd1a
|
src/commit_id.py
|
src/commit_id.py
|
import subprocess as sp
import sys
def grab_output(*command):
return sp.Popen(command, stdout=sp.PIPE).communicate()[0].strip()
commit_id_size = 12
try:
commit_id = grab_output('gat', 'rev-parse', '--short=%d' % commit_id_size, 'HEAD')
commit_date = grab_output('git', 'show', '-s', '--format=%ci', 'HEAD')
except:
commit_id = 'invalid-hash'
commit_date = 'invalid-date'
hfile = open(sys.argv[1], 'w')
hfile.write('#define ANGLE_COMMIT_HASH "%s"\n' % commit_id)
hfile.write('#define ANGLE_COMMIT_HASH_SIZE %d\n' % commit_id_size)
hfile.write('#define ANGLE_COMMIT_DATE "%s"\n' % commit_date)
hfile.close()
|
import subprocess as sp
import sys
def grab_output(*command):
return sp.Popen(command, stdout=sp.PIPE).communicate()[0].strip()
commit_id_size = 12
try:
commit_id = grab_output('git', 'rev-parse', '--short=%d' % commit_id_size, 'HEAD')
commit_date = grab_output('git', 'show', '-s', '--format=%ci', 'HEAD')
except:
commit_id = 'invalid-hash'
commit_date = 'invalid-date'
hfile = open(sys.argv[1], 'w')
hfile.write('#define ANGLE_COMMIT_HASH "%s"\n' % commit_id)
hfile.write('#define ANGLE_COMMIT_HASH_SIZE %d\n' % commit_id_size)
hfile.write('#define ANGLE_COMMIT_DATE "%s"\n' % commit_date)
hfile.close()
|
Fix typo in Python commit id script.
|
Fix typo in Python commit id script.
BUG=angle:529
Change-Id: Ife174e3fb4cb32342f519691d1d5d5c015cf2727
Reviewed-on: https://chromium-review.googlesource.com/187541
Reviewed-by: Nicolas Capens <51edc787c30f24c4904e8ffbb5523c3a9f7a451d@chromium.org>
Reviewed-by: Shannon Woods <0f3694938fd5703bd548127ee497e4d604509bef@chromium.org>
Tested-by: Jamie Madill <7e492b4f1c8458024932de3ba475cbf015424c30@chromium.org>
|
Python
|
bsd-3-clause
|
geekboxzone/lollipop_external_chromium_org_third_party_angle,android-ia/platform_external_chromium_org_third_party_angle,geekboxzone/lollipop_external_chromium_org_third_party_angle,geekboxzone/lollipop_external_chromium_org_third_party_angle,xin3liang/platform_external_chromium_org_third_party_angle,geekboxzone/lollipop_external_chromium_org_third_party_angle,xin3liang/platform_external_chromium_org_third_party_angle,xin3liang/platform_external_chromium_org_third_party_angle,jgcaaprom/android_external_chromium_org_third_party_angle,MIPS/external-chromium_org-third_party-angle,jgcaaprom/android_external_chromium_org_third_party_angle,android-ia/platform_external_chromium_org_third_party_angle,MIPS/external-chromium_org-third_party-angle,android-ia/platform_external_chromium_org_third_party_angle,MIPS/external-chromium_org-third_party-angle,android-ia/platform_external_chromium_org_third_party_angle,jgcaaprom/android_external_chromium_org_third_party_angle,xin3liang/platform_external_chromium_org_third_party_angle,MIPS/external-chromium_org-third_party-angle,jgcaaprom/android_external_chromium_org_third_party_angle
|
import subprocess as sp
import sys
def grab_output(*command):
return sp.Popen(command, stdout=sp.PIPE).communicate()[0].strip()
commit_id_size = 12
try:
commit_id = grab_output('gat', 'rev-parse', '--short=%d' % commit_id_size, 'HEAD')
commit_date = grab_output('git', 'show', '-s', '--format=%ci', 'HEAD')
except:
commit_id = 'invalid-hash'
commit_date = 'invalid-date'
hfile = open(sys.argv[1], 'w')
hfile.write('#define ANGLE_COMMIT_HASH "%s"\n' % commit_id)
hfile.write('#define ANGLE_COMMIT_HASH_SIZE %d\n' % commit_id_size)
hfile.write('#define ANGLE_COMMIT_DATE "%s"\n' % commit_date)
hfile.close()
Fix typo in Python commit id script.
BUG=angle:529
Change-Id: Ife174e3fb4cb32342f519691d1d5d5c015cf2727
Reviewed-on: https://chromium-review.googlesource.com/187541
Reviewed-by: Nicolas Capens <51edc787c30f24c4904e8ffbb5523c3a9f7a451d@chromium.org>
Reviewed-by: Shannon Woods <0f3694938fd5703bd548127ee497e4d604509bef@chromium.org>
Tested-by: Jamie Madill <7e492b4f1c8458024932de3ba475cbf015424c30@chromium.org>
|
import subprocess as sp
import sys
def grab_output(*command):
return sp.Popen(command, stdout=sp.PIPE).communicate()[0].strip()
commit_id_size = 12
try:
commit_id = grab_output('git', 'rev-parse', '--short=%d' % commit_id_size, 'HEAD')
commit_date = grab_output('git', 'show', '-s', '--format=%ci', 'HEAD')
except:
commit_id = 'invalid-hash'
commit_date = 'invalid-date'
hfile = open(sys.argv[1], 'w')
hfile.write('#define ANGLE_COMMIT_HASH "%s"\n' % commit_id)
hfile.write('#define ANGLE_COMMIT_HASH_SIZE %d\n' % commit_id_size)
hfile.write('#define ANGLE_COMMIT_DATE "%s"\n' % commit_date)
hfile.close()
|
<commit_before>import subprocess as sp
import sys
def grab_output(*command):
return sp.Popen(command, stdout=sp.PIPE).communicate()[0].strip()
commit_id_size = 12
try:
commit_id = grab_output('gat', 'rev-parse', '--short=%d' % commit_id_size, 'HEAD')
commit_date = grab_output('git', 'show', '-s', '--format=%ci', 'HEAD')
except:
commit_id = 'invalid-hash'
commit_date = 'invalid-date'
hfile = open(sys.argv[1], 'w')
hfile.write('#define ANGLE_COMMIT_HASH "%s"\n' % commit_id)
hfile.write('#define ANGLE_COMMIT_HASH_SIZE %d\n' % commit_id_size)
hfile.write('#define ANGLE_COMMIT_DATE "%s"\n' % commit_date)
hfile.close()
<commit_msg>Fix typo in Python commit id script.
BUG=angle:529
Change-Id: Ife174e3fb4cb32342f519691d1d5d5c015cf2727
Reviewed-on: https://chromium-review.googlesource.com/187541
Reviewed-by: Nicolas Capens <51edc787c30f24c4904e8ffbb5523c3a9f7a451d@chromium.org>
Reviewed-by: Shannon Woods <0f3694938fd5703bd548127ee497e4d604509bef@chromium.org>
Tested-by: Jamie Madill <7e492b4f1c8458024932de3ba475cbf015424c30@chromium.org><commit_after>
|
import subprocess as sp
import sys
def grab_output(*command):
return sp.Popen(command, stdout=sp.PIPE).communicate()[0].strip()
commit_id_size = 12
try:
commit_id = grab_output('git', 'rev-parse', '--short=%d' % commit_id_size, 'HEAD')
commit_date = grab_output('git', 'show', '-s', '--format=%ci', 'HEAD')
except:
commit_id = 'invalid-hash'
commit_date = 'invalid-date'
hfile = open(sys.argv[1], 'w')
hfile.write('#define ANGLE_COMMIT_HASH "%s"\n' % commit_id)
hfile.write('#define ANGLE_COMMIT_HASH_SIZE %d\n' % commit_id_size)
hfile.write('#define ANGLE_COMMIT_DATE "%s"\n' % commit_date)
hfile.close()
|
import subprocess as sp
import sys
def grab_output(*command):
return sp.Popen(command, stdout=sp.PIPE).communicate()[0].strip()
commit_id_size = 12
try:
commit_id = grab_output('gat', 'rev-parse', '--short=%d' % commit_id_size, 'HEAD')
commit_date = grab_output('git', 'show', '-s', '--format=%ci', 'HEAD')
except:
commit_id = 'invalid-hash'
commit_date = 'invalid-date'
hfile = open(sys.argv[1], 'w')
hfile.write('#define ANGLE_COMMIT_HASH "%s"\n' % commit_id)
hfile.write('#define ANGLE_COMMIT_HASH_SIZE %d\n' % commit_id_size)
hfile.write('#define ANGLE_COMMIT_DATE "%s"\n' % commit_date)
hfile.close()
Fix typo in Python commit id script.
BUG=angle:529
Change-Id: Ife174e3fb4cb32342f519691d1d5d5c015cf2727
Reviewed-on: https://chromium-review.googlesource.com/187541
Reviewed-by: Nicolas Capens <51edc787c30f24c4904e8ffbb5523c3a9f7a451d@chromium.org>
Reviewed-by: Shannon Woods <0f3694938fd5703bd548127ee497e4d604509bef@chromium.org>
Tested-by: Jamie Madill <7e492b4f1c8458024932de3ba475cbf015424c30@chromium.org>import subprocess as sp
import sys
def grab_output(*command):
return sp.Popen(command, stdout=sp.PIPE).communicate()[0].strip()
commit_id_size = 12
try:
commit_id = grab_output('git', 'rev-parse', '--short=%d' % commit_id_size, 'HEAD')
commit_date = grab_output('git', 'show', '-s', '--format=%ci', 'HEAD')
except:
commit_id = 'invalid-hash'
commit_date = 'invalid-date'
hfile = open(sys.argv[1], 'w')
hfile.write('#define ANGLE_COMMIT_HASH "%s"\n' % commit_id)
hfile.write('#define ANGLE_COMMIT_HASH_SIZE %d\n' % commit_id_size)
hfile.write('#define ANGLE_COMMIT_DATE "%s"\n' % commit_date)
hfile.close()
|
<commit_before>import subprocess as sp
import sys
def grab_output(*command):
return sp.Popen(command, stdout=sp.PIPE).communicate()[0].strip()
commit_id_size = 12
try:
commit_id = grab_output('gat', 'rev-parse', '--short=%d' % commit_id_size, 'HEAD')
commit_date = grab_output('git', 'show', '-s', '--format=%ci', 'HEAD')
except:
commit_id = 'invalid-hash'
commit_date = 'invalid-date'
hfile = open(sys.argv[1], 'w')
hfile.write('#define ANGLE_COMMIT_HASH "%s"\n' % commit_id)
hfile.write('#define ANGLE_COMMIT_HASH_SIZE %d\n' % commit_id_size)
hfile.write('#define ANGLE_COMMIT_DATE "%s"\n' % commit_date)
hfile.close()
<commit_msg>Fix typo in Python commit id script.
BUG=angle:529
Change-Id: Ife174e3fb4cb32342f519691d1d5d5c015cf2727
Reviewed-on: https://chromium-review.googlesource.com/187541
Reviewed-by: Nicolas Capens <51edc787c30f24c4904e8ffbb5523c3a9f7a451d@chromium.org>
Reviewed-by: Shannon Woods <0f3694938fd5703bd548127ee497e4d604509bef@chromium.org>
Tested-by: Jamie Madill <7e492b4f1c8458024932de3ba475cbf015424c30@chromium.org><commit_after>import subprocess as sp
import sys
def grab_output(*command):
return sp.Popen(command, stdout=sp.PIPE).communicate()[0].strip()
commit_id_size = 12
try:
commit_id = grab_output('git', 'rev-parse', '--short=%d' % commit_id_size, 'HEAD')
commit_date = grab_output('git', 'show', '-s', '--format=%ci', 'HEAD')
except:
commit_id = 'invalid-hash'
commit_date = 'invalid-date'
hfile = open(sys.argv[1], 'w')
hfile.write('#define ANGLE_COMMIT_HASH "%s"\n' % commit_id)
hfile.write('#define ANGLE_COMMIT_HASH_SIZE %d\n' % commit_id_size)
hfile.write('#define ANGLE_COMMIT_DATE "%s"\n' % commit_date)
hfile.close()
|
f90cc1b24910b1c6214bdda3c2de831b5b507e01
|
factual/common/responses.py
|
factual/common/responses.py
|
import logging
import exceptions
class Response(object):
def __init__(self, body, meta=None):
self.meta = meta
self.body = body
# todo: handle non-"ok" status
self.status = body.get("status", None)
self.version = body.get("version", None)
self.response = body.get("response", {})
if self.status == "error":
raise exceptions.FactualError(body.get("error"))
def __repr__(self):
if len(self.response) > 3:
response_repr = "%d records in response" % len(self.response)
else:
response_repr = self.response.__repr__()
return "FactualResponse(%s, v%s, %s)" % (self.status, self.version, response_repr)
class ReadResponse(Response):
pass
class SchemaResponse(Response):
pass
|
import logging
import exceptions
class Response(object):
def __init__(self, body, meta=None):
self.meta = meta
self.body = body
# todo: handle non-"ok" status
self.status = body.get("status", None)
self.version = body.get("version", None)
self.response = body.get("response", {})
if self.status == "error":
error = "\"%s\" error: %s" % (body.get("error_type"), body.get("message"))
raise exceptions.FactualError(error)
def __repr__(self):
if len(self.response) > 3:
response_repr = "%d records in response" % len(self.response)
else:
response_repr = self.response.__repr__()
return "FactualResponse(%s, v%s, %s)" % (self.status, self.version, response_repr)
class ReadResponse(Response):
pass
class SchemaResponse(Response):
pass
|
Fix parsing of errors returned by API.
|
Fix parsing of errors returned by API.
|
Python
|
bsd-2-clause
|
casebeer/factual
|
import logging
import exceptions
class Response(object):
def __init__(self, body, meta=None):
self.meta = meta
self.body = body
# todo: handle non-"ok" status
self.status = body.get("status", None)
self.version = body.get("version", None)
self.response = body.get("response", {})
if self.status == "error":
raise exceptions.FactualError(body.get("error"))
def __repr__(self):
if len(self.response) > 3:
response_repr = "%d records in response" % len(self.response)
else:
response_repr = self.response.__repr__()
return "FactualResponse(%s, v%s, %s)" % (self.status, self.version, response_repr)
class ReadResponse(Response):
pass
class SchemaResponse(Response):
pass
Fix parsing of errors returned by API.
|
import logging
import exceptions
class Response(object):
def __init__(self, body, meta=None):
self.meta = meta
self.body = body
# todo: handle non-"ok" status
self.status = body.get("status", None)
self.version = body.get("version", None)
self.response = body.get("response", {})
if self.status == "error":
error = "\"%s\" error: %s" % (body.get("error_type"), body.get("message"))
raise exceptions.FactualError(error)
def __repr__(self):
if len(self.response) > 3:
response_repr = "%d records in response" % len(self.response)
else:
response_repr = self.response.__repr__()
return "FactualResponse(%s, v%s, %s)" % (self.status, self.version, response_repr)
class ReadResponse(Response):
pass
class SchemaResponse(Response):
pass
|
<commit_before>import logging
import exceptions
class Response(object):
def __init__(self, body, meta=None):
self.meta = meta
self.body = body
# todo: handle non-"ok" status
self.status = body.get("status", None)
self.version = body.get("version", None)
self.response = body.get("response", {})
if self.status == "error":
raise exceptions.FactualError(body.get("error"))
def __repr__(self):
if len(self.response) > 3:
response_repr = "%d records in response" % len(self.response)
else:
response_repr = self.response.__repr__()
return "FactualResponse(%s, v%s, %s)" % (self.status, self.version, response_repr)
class ReadResponse(Response):
pass
class SchemaResponse(Response):
pass
<commit_msg>Fix parsing of errors returned by API.<commit_after>
|
import logging
import exceptions
class Response(object):
def __init__(self, body, meta=None):
self.meta = meta
self.body = body
# todo: handle non-"ok" status
self.status = body.get("status", None)
self.version = body.get("version", None)
self.response = body.get("response", {})
if self.status == "error":
error = "\"%s\" error: %s" % (body.get("error_type"), body.get("message"))
raise exceptions.FactualError(error)
def __repr__(self):
if len(self.response) > 3:
response_repr = "%d records in response" % len(self.response)
else:
response_repr = self.response.__repr__()
return "FactualResponse(%s, v%s, %s)" % (self.status, self.version, response_repr)
class ReadResponse(Response):
pass
class SchemaResponse(Response):
pass
|
import logging
import exceptions
class Response(object):
def __init__(self, body, meta=None):
self.meta = meta
self.body = body
# todo: handle non-"ok" status
self.status = body.get("status", None)
self.version = body.get("version", None)
self.response = body.get("response", {})
if self.status == "error":
raise exceptions.FactualError(body.get("error"))
def __repr__(self):
if len(self.response) > 3:
response_repr = "%d records in response" % len(self.response)
else:
response_repr = self.response.__repr__()
return "FactualResponse(%s, v%s, %s)" % (self.status, self.version, response_repr)
class ReadResponse(Response):
pass
class SchemaResponse(Response):
pass
Fix parsing of errors returned by API.import logging
import exceptions
class Response(object):
def __init__(self, body, meta=None):
self.meta = meta
self.body = body
# todo: handle non-"ok" status
self.status = body.get("status", None)
self.version = body.get("version", None)
self.response = body.get("response", {})
if self.status == "error":
error = "\"%s\" error: %s" % (body.get("error_type"), body.get("message"))
raise exceptions.FactualError(error)
def __repr__(self):
if len(self.response) > 3:
response_repr = "%d records in response" % len(self.response)
else:
response_repr = self.response.__repr__()
return "FactualResponse(%s, v%s, %s)" % (self.status, self.version, response_repr)
class ReadResponse(Response):
pass
class SchemaResponse(Response):
pass
|
<commit_before>import logging
import exceptions
class Response(object):
def __init__(self, body, meta=None):
self.meta = meta
self.body = body
# todo: handle non-"ok" status
self.status = body.get("status", None)
self.version = body.get("version", None)
self.response = body.get("response", {})
if self.status == "error":
raise exceptions.FactualError(body.get("error"))
def __repr__(self):
if len(self.response) > 3:
response_repr = "%d records in response" % len(self.response)
else:
response_repr = self.response.__repr__()
return "FactualResponse(%s, v%s, %s)" % (self.status, self.version, response_repr)
class ReadResponse(Response):
pass
class SchemaResponse(Response):
pass
<commit_msg>Fix parsing of errors returned by API.<commit_after>import logging
import exceptions
class Response(object):
def __init__(self, body, meta=None):
self.meta = meta
self.body = body
# todo: handle non-"ok" status
self.status = body.get("status", None)
self.version = body.get("version", None)
self.response = body.get("response", {})
if self.status == "error":
error = "\"%s\" error: %s" % (body.get("error_type"), body.get("message"))
raise exceptions.FactualError(error)
def __repr__(self):
if len(self.response) > 3:
response_repr = "%d records in response" % len(self.response)
else:
response_repr = self.response.__repr__()
return "FactualResponse(%s, v%s, %s)" % (self.status, self.version, response_repr)
class ReadResponse(Response):
pass
class SchemaResponse(Response):
pass
|
6daa0f1aa06092598892cb37a7f3c5f3541fa0c2
|
cms/manage.py
|
cms/manage.py
|
#!/usr/bin/env python
from django.core.management import execute_manager
import imp
try:
imp.find_module('settings') # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n" % __file__)
sys.exit(1)
import settings
if __name__ == "__main__":
execute_manager(settings)
|
#!/usr/bin/env python
from django.core.management import execute_manager
import imp
try:
imp.find_module('settings') # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. "
"It appears you've customized things.\nYou'll have to run django-admin.py, "
"passing it your settings module.\n" % __file__)
sys.exit(1)
import settings
if __name__ == "__main__":
execute_manager(settings)
|
Fix string layout for readability
|
Fix string layout for readability
|
Python
|
agpl-3.0
|
prarthitm/edxplatform,nanolearningllc/edx-platform-cypress,benpatterson/edx-platform,mjirayu/sit_academy,yokose-ks/edx-platform,Edraak/edx-platform,10clouds/edx-platform,raccoongang/edx-platform,kursitet/edx-platform,jolyonb/edx-platform,cecep-edu/edx-platform,Edraak/circleci-edx-platform,nttks/edx-platform,ampax/edx-platform-backup,jbassen/edx-platform,CourseTalk/edx-platform,shurihell/testasia,nikolas/edx-platform,nttks/edx-platform,praveen-pal/edx-platform,dsajkl/123,iivic/BoiseStateX,shubhdev/edx-platform,xuxiao19910803/edx-platform,gymnasium/edx-platform,edx-solutions/edx-platform,philanthropy-u/edx-platform,don-github/edx-platform,mjirayu/sit_academy,utecuy/edx-platform,IndonesiaX/edx-platform,jazztpt/edx-platform,Livit/Livit.Learn.EdX,ESOedX/edx-platform,shabab12/edx-platform,CredoReference/edx-platform,mjg2203/edx-platform-seas,cpennington/edx-platform,TsinghuaX/edx-platform,knehez/edx-platform,appsembler/edx-platform,kxliugang/edx-platform,LearnEra/LearnEraPlaftform,beacloudgenius/edx-platform,solashirai/edx-platform,shurihell/testasia,deepsrijit1105/edx-platform,solashirai/edx-platform,Shrhawk/edx-platform,jazkarta/edx-platform,Endika/edx-platform,RPI-OPENEDX/edx-platform,LICEF/edx-platform,WatanabeYasumasa/edx-platform,alexthered/kienhoc-platform,CourseTalk/edx-platform,halvertoluke/edx-platform,jswope00/griffinx,hamzehd/edx-platform,franosincic/edx-platform,devs1991/test_edx_docmode,mushtaqak/edx-platform,nttks/jenkins-test,beni55/edx-platform,hastexo/edx-platform,JioEducation/edx-platform,analyseuc3m/ANALYSE-v1,raccoongang/edx-platform,bitifirefly/edx-platform,xuxiao19910803/edx,IITBinterns13/edx-platform-dev,shabab12/edx-platform,dkarakats/edx-platform,halvertoluke/edx-platform,peterm-itr/edx-platform,cselis86/edx-platform,eduNEXT/edunext-platform,amir-qayyum-khan/edx-platform,ZLLab-Mooc/edx-platform,unicri/edx-platform,gsehub/edx-platform,mtlchun/edx,dsajkl/reqiop,atsolakid/edx-platform,pdehaye/theming-edx-platform,don-github/edx-platform,kursitet/edx-platform,jbzdak/edx-platform,shubhdev/edxOnBaadal,10clouds/edx-platform,mjg2203/edx-platform-seas,nanolearningllc/edx-platform-cypress,jelugbo/tundex,deepsrijit1105/edx-platform,mushtaqak/edx-platform,mitocw/edx-platform,nanolearning/edx-platform,leansoft/edx-platform,martynovp/edx-platform,bitifirefly/edx-platform,hkawasaki/kawasaki-aio8-0,fly19890211/edx-platform,simbs/edx-platform,edx/edx-platform,caesar2164/edx-platform,nanolearningllc/edx-platform-cypress,EduPepperPDTesting/pepper2013-testing,chauhanhardik/populo_2,Lektorium-LLC/edx-platform,eestay/edx-platform,motion2015/a3,torchingloom/edx-platform,simbs/edx-platform,B-MOOC/edx-platform,philanthropy-u/edx-platform,don-github/edx-platform,mitocw/edx-platform,eduNEXT/edx-platform,jswope00/griffinx,morenopc/edx-platform,jbzdak/edx-platform,valtech-mooc/edx-platform,knehez/edx-platform,chand3040/cloud_that,ZLLab-Mooc/edx-platform,rhndg/openedx,chudaol/edx-platform,vasyarv/edx-platform,fintech-circle/edx-platform,dsajkl/123,ampax/edx-platform,lduarte1991/edx-platform,beacloudgenius/edx-platform,jazkarta/edx-platform,JCBarahona/edX,xingyepei/edx-platform,arifsetiawan/edx-platform,nagyistoce/edx-platform,praveen-pal/edx-platform,gsehub/edx-platform,inares/edx-platform,longmen21/edx-platform,kxliugang/edx-platform,rue89-tech/edx-platform,utecuy/edx-platform,TeachAtTUM/edx-platform,waheedahmed/edx-platform,pku9104038/edx-platform,Softmotions/edx-platform,jruiperezv/ANALYSE,jbassen/edx-platform,vasyarv/edx-platform,yokose-ks/edx-platform,ahmadiga/min_edx,vismartltd/edx-platform,hkawasaki/kawasaki-aio8-0,Unow/edx-platform,naresh21/synergetics-edx-platform,cselis86/edx-platform,waheedahmed/edx-platform,kmoocdev2/edx-platform,devs1991/test_edx_docmode,AkA84/edx-platform,PepperPD/edx-pepper-platform,doismellburning/edx-platform,syjeon/new_edx,MakeHer/edx-platform,msegado/edx-platform,jamesblunt/edx-platform,nttks/jenkins-test,carsongee/edx-platform,hamzehd/edx-platform,hkawasaki/kawasaki-aio8-1,ahmadio/edx-platform,ak2703/edx-platform,hkawasaki/kawasaki-aio8-0,lduarte1991/edx-platform,defance/edx-platform,Edraak/edraak-platform,ovnicraft/edx-platform,kmoocdev2/edx-platform,leansoft/edx-platform,Semi-global/edx-platform,deepsrijit1105/edx-platform,Softmotions/edx-platform,SivilTaram/edx-platform,jazkarta/edx-platform-for-isc,nanolearningllc/edx-platform-cypress,proversity-org/edx-platform,ahmadiga/min_edx,devs1991/test_edx_docmode,xuxiao19910803/edx-platform,Stanford-Online/edx-platform,mjirayu/sit_academy,leansoft/edx-platform,LearnEra/LearnEraPlaftform,Shrhawk/edx-platform,xinjiguaike/edx-platform,franosincic/edx-platform,jazkarta/edx-platform,jazkarta/edx-platform-for-isc,edx/edx-platform,J861449197/edx-platform,olexiim/edx-platform,dcosentino/edx-platform,torchingloom/edx-platform,vismartltd/edx-platform,marcore/edx-platform,nagyistoce/edx-platform,devs1991/test_edx_docmode,antoviaque/edx-platform,zhenzhai/edx-platform,appliedx/edx-platform,beacloudgenius/edx-platform,ubc/edx-platform,inares/edx-platform,shubhdev/openedx,longmen21/edx-platform,chrisndodge/edx-platform,Shrhawk/edx-platform,Edraak/circleci-edx-platform,jonathan-beard/edx-platform,atsolakid/edx-platform,TsinghuaX/edx-platform,msegado/edx-platform,cecep-edu/edx-platform,chauhanhardik/populo,waheedahmed/edx-platform,etzhou/edx-platform,philanthropy-u/edx-platform,nikolas/edx-platform,caesar2164/edx-platform,procangroup/edx-platform,chrisndodge/edx-platform,benpatterson/edx-platform,AkA84/edx-platform,pepeportela/edx-platform,doismellburning/edx-platform,JCBarahona/edX,morpheby/levelup-by,iivic/BoiseStateX,sudheerchintala/LearnEraPlatForm,EDUlib/edx-platform,cognitiveclass/edx-platform,eduNEXT/edx-platform,CredoReference/edx-platform,abdoosh00/edx-rtl-final,hastexo/edx-platform,etzhou/edx-platform,chauhanhardik/populo_2,dkarakats/edx-platform,ahmedaljazzar/edx-platform,waheedahmed/edx-platform,procangroup/edx-platform,synergeticsedx/deployment-wipro,alu042/edx-platform,unicri/edx-platform,cyanna/edx-platform,ampax/edx-platform,hmcmooc/muddx-platform,MSOpenTech/edx-platform,vikas1885/test1,jelugbo/tundex,olexiim/edx-platform,hkawasaki/kawasaki-aio8-2,mtlchun/edx,zubair-arbi/edx-platform,ovnicraft/edx-platform,polimediaupv/edx-platform,pomegranited/edx-platform,ovnicraft/edx-platform,iivic/BoiseStateX,shubhdev/edxOnBaadal,abdoosh00/edx-rtl-final,rismalrv/edx-platform,gymnasium/edx-platform,hastexo/edx-platform,bitifirefly/edx-platform,angelapper/edx-platform,IONISx/edx-platform,marcore/edx-platform,chudaol/edx-platform,jamesblunt/edx-platform,pabloborrego93/edx-platform,jswope00/GAI,wwj718/ANALYSE,teltek/edx-platform,cecep-edu/edx-platform,shubhdev/openedx,ahmadio/edx-platform,apigee/edx-platform,kmoocdev2/edx-platform,BehavioralInsightsTeam/edx-platform,arbrandes/edx-platform,teltek/edx-platform,wwj718/ANALYSE,doismellburning/edx-platform,edx/edx-platform,msegado/edx-platform,edry/edx-platform,motion2015/a3,Lektorium-LLC/edx-platform,zerobatu/edx-platform,beni55/edx-platform,cyanna/edx-platform,LearnEra/LearnEraPlaftform,fintech-circle/edx-platform,pku9104038/edx-platform,pku9104038/edx-platform,vikas1885/test1,kmoocdev/edx-platform,kalebhartje/schoolboost,LICEF/edx-platform,4eek/edx-platform,chauhanhardik/populo_2,Kalyzee/edx-platform,teltek/edx-platform,devs1991/test_edx_docmode,nikolas/edx-platform,jbassen/edx-platform,adoosii/edx-platform,a-parhom/edx-platform,alu042/edx-platform,Semi-global/edx-platform,jzoldak/edx-platform,sameetb-cuelogic/edx-platform-test,zofuthan/edx-platform,arifsetiawan/edx-platform,mcgachey/edx-platform,10clouds/edx-platform,appliedx/edx-platform,dsajkl/reqiop,ZLLab-Mooc/edx-platform,waheedahmed/edx-platform,mahendra-r/edx-platform,tanmaykm/edx-platform,abdoosh00/edraak,appliedx/edx-platform,shubhdev/edx-platform,deepsrijit1105/edx-platform,AkA84/edx-platform,polimediaupv/edx-platform,CredoReference/edx-platform,jjmiranda/edx-platform,rue89-tech/edx-platform,Semi-global/edx-platform,adoosii/edx-platform,dcosentino/edx-platform,amir-qayyum-khan/edx-platform,shubhdev/edx-platform,analyseuc3m/ANALYSE-v1,jonathan-beard/edx-platform,tiagochiavericosta/edx-platform,pomegranited/edx-platform,Edraak/edx-platform,cselis86/edx-platform,msegado/edx-platform,tiagochiavericosta/edx-platform,cecep-edu/edx-platform,gsehub/edx-platform,shubhdev/openedx,rue89-tech/edx-platform,syjeon/new_edx,Ayub-Khan/edx-platform,dkarakats/edx-platform,ahmadio/edx-platform,nanolearning/edx-platform,Softmotions/edx-platform,hkawasaki/kawasaki-aio8-2,eemirtekin/edx-platform,TeachAtTUM/edx-platform,Shrhawk/edx-platform,chauhanhardik/populo,fintech-circle/edx-platform,cognitiveclass/edx-platform,rationalAgent/edx-platform-custom,OmarIthawi/edx-platform,DNFcode/edx-platform,shashank971/edx-platform,sameetb-cuelogic/edx-platform-test,ESOedX/edx-platform,ampax/edx-platform,Stanford-Online/edx-platform,rismalrv/edx-platform,nanolearningllc/edx-platform-cypress-2,naresh21/synergetics-edx-platform,shashank971/edx-platform,inares/edx-platform,jazztpt/edx-platform,louyihua/edx-platform,jolyonb/edx-platform,arifsetiawan/edx-platform,abdoosh00/edraak,tanmaykm/edx-platform,proversity-org/edx-platform,ubc/edx-platform,simbs/edx-platform,beacloudgenius/edx-platform,rationalAgent/edx-platform-custom,vasyarv/edx-platform,beni55/edx-platform,IONISx/edx-platform,dkarakats/edx-platform,ahmedaljazzar/edx-platform,unicri/edx-platform,MakeHer/edx-platform,DefyVentures/edx-platform,kmoocdev2/edx-platform,wwj718/ANALYSE,xuxiao19910803/edx-platform,synergeticsedx/deployment-wipro,nanolearningllc/edx-platform-cypress,pomegranited/edx-platform,eduNEXT/edx-platform,louyihua/edx-platform,polimediaupv/edx-platform,bitifirefly/edx-platform,peterm-itr/edx-platform,shubhdev/edx-platform,andyzsf/edx,marcore/edx-platform,alu042/edx-platform,ubc/edx-platform,doganov/edx-platform,rhndg/openedx,pabloborrego93/edx-platform,romain-li/edx-platform,Unow/edx-platform,unicri/edx-platform,vismartltd/edx-platform,etzhou/edx-platform,doganov/edx-platform,OmarIthawi/edx-platform,pomegranited/edx-platform,Ayub-Khan/edx-platform,zadgroup/edx-platform,kalebhartje/schoolboost,Semi-global/edx-platform,etzhou/edx-platform,eduNEXT/edunext-platform,beacloudgenius/edx-platform,BehavioralInsightsTeam/edx-platform,beni55/edx-platform,shashank971/edx-platform,motion2015/edx-platform,morenopc/edx-platform,vismartltd/edx-platform,nanolearningllc/edx-platform-cypress-2,JCBarahona/edX,LearnEra/LearnEraPlaftform,Endika/edx-platform,kursitet/edx-platform,appsembler/edx-platform,nikolas/edx-platform,etzhou/edx-platform,andyzsf/edx,chauhanhardik/populo,zerobatu/edx-platform,nanolearning/edx-platform,teltek/edx-platform,PepperPD/edx-pepper-platform,vikas1885/test1,J861449197/edx-platform,peterm-itr/edx-platform,shubhdev/edxOnBaadal,openfun/edx-platform,angelapper/edx-platform,valtech-mooc/edx-platform,stvstnfrd/edx-platform,doganov/edx-platform,Semi-global/edx-platform,jbassen/edx-platform,chand3040/cloud_that,dsajkl/reqiop,EduPepperPD/pepper2013,rationalAgent/edx-platform-custom,martynovp/edx-platform,shabab12/edx-platform,nttks/edx-platform,bdero/edx-platform,dsajkl/123,B-MOOC/edx-platform,don-github/edx-platform,ovnicraft/edx-platform,shurihell/testasia,B-MOOC/edx-platform,SravanthiSinha/edx-platform,fly19890211/edx-platform,chand3040/cloud_that,jamiefolsom/edx-platform,martynovp/edx-platform,zhenzhai/edx-platform,IndonesiaX/edx-platform,ESOedX/edx-platform,TeachAtTUM/edx-platform,pepeportela/edx-platform,kursitet/edx-platform,halvertoluke/edx-platform,chauhanhardik/populo_2,ak2703/edx-platform,cognitiveclass/edx-platform,edry/edx-platform,devs1991/test_edx_docmode,synergeticsedx/deployment-wipro,jonathan-beard/edx-platform,Unow/edx-platform,nttks/jenkins-test,DNFcode/edx-platform,EduPepperPDTesting/pepper2013-testing,bigdatauniversity/edx-platform,a-parhom/edx-platform,zubair-arbi/edx-platform,morenopc/edx-platform,Stanford-Online/edx-platform,EduPepperPDTesting/pepper2013-testing,y12uc231/edx-platform,DefyVentures/edx-platform,shashank971/edx-platform,Edraak/edx-platform,ahmadio/edx-platform,romain-li/edx-platform,arbrandes/edx-platform,raccoongang/edx-platform,Softmotions/edx-platform,ahmadiga/min_edx,caesar2164/edx-platform,kxliugang/edx-platform,jzoldak/edx-platform,kamalx/edx-platform,BehavioralInsightsTeam/edx-platform,romain-li/edx-platform,zofuthan/edx-platform,ubc/edx-platform,Ayub-Khan/edx-platform,hastexo/edx-platform,adoosii/edx-platform,knehez/edx-platform,kmoocdev/edx-platform,kamalx/edx-platform,zubair-arbi/edx-platform,MSOpenTech/edx-platform,chauhanhardik/populo_2,nagyistoce/edx-platform,edry/edx-platform,auferack08/edx-platform,prarthitm/edxplatform,IndonesiaX/edx-platform,edx-solutions/edx-platform,itsjeyd/edx-platform,EDUlib/edx-platform,vasyarv/edx-platform,LICEF/edx-platform,unicri/edx-platform,chudaol/edx-platform,mushtaqak/edx-platform,dcosentino/edx-platform,sudheerchintala/LearnEraPlatForm,MakeHer/edx-platform,Lektorium-LLC/edx-platform,DefyVentures/edx-platform,mbareta/edx-platform-ft,kxliugang/edx-platform,MakeHer/edx-platform,syjeon/new_edx,y12uc231/edx-platform,shubhdev/edxOnBaadal,syjeon/new_edx,tiagochiavericosta/edx-platform,UOMx/edx-platform,don-github/edx-platform,torchingloom/edx-platform,morpheby/levelup-by,vismartltd/edx-platform,eduNEXT/edunext-platform,franosincic/edx-platform,Edraak/edraak-platform,mtlchun/edx,antoviaque/edx-platform,hmcmooc/muddx-platform,SravanthiSinha/edx-platform,wwj718/ANALYSE,10clouds/edx-platform,cpennington/edx-platform,halvertoluke/edx-platform,benpatterson/edx-platform,antonve/s4-project-mooc,4eek/edx-platform,zerobatu/edx-platform,SravanthiSinha/edx-platform,jelugbo/tundex,peterm-itr/edx-platform,leansoft/edx-platform,arbrandes/edx-platform,atsolakid/edx-platform,nanolearning/edx-platform,solashirai/edx-platform,motion2015/edx-platform,J861449197/edx-platform,Unow/edx-platform,zubair-arbi/edx-platform,pomegranited/edx-platform,zadgroup/edx-platform,nagyistoce/edx-platform,UXE/local-edx,doismellburning/edx-platform,cyanna/edx-platform,chudaol/edx-platform,jjmiranda/edx-platform,jamesblunt/edx-platform,pabloborrego93/edx-platform,DefyVentures/edx-platform,valtech-mooc/edx-platform,antonve/s4-project-mooc,jazztpt/edx-platform,mahendra-r/edx-platform,carsongee/edx-platform,cognitiveclass/edx-platform,Edraak/edx-platform,procangroup/edx-platform,pdehaye/theming-edx-platform,antoviaque/edx-platform,nikolas/edx-platform,CourseTalk/edx-platform,eduNEXT/edunext-platform,RPI-OPENEDX/edx-platform,playm2mboy/edx-platform,pabloborrego93/edx-platform,morpheby/levelup-by,franosincic/edx-platform,pdehaye/theming-edx-platform,zofuthan/edx-platform,playm2mboy/edx-platform,PepperPD/edx-pepper-platform,naresh21/synergetics-edx-platform,xingyepei/edx-platform,arbrandes/edx-platform,jazkarta/edx-platform,B-MOOC/edx-platform,BehavioralInsightsTeam/edx-platform,openfun/edx-platform,stvstnfrd/edx-platform,jbzdak/edx-platform,miptliot/edx-platform,zhenzhai/edx-platform,kmoocdev/edx-platform,playm2mboy/edx-platform,pelikanchik/edx-platform,yokose-ks/edx-platform,jbzdak/edx-platform,wwj718/edx-platform,wwj718/edx-platform,pepeportela/edx-platform,rue89-tech/edx-platform,IndonesiaX/edx-platform,nanolearning/edx-platform,EDUlib/edx-platform,xuxiao19910803/edx,xingyepei/edx-platform,vikas1885/test1,sameetb-cuelogic/edx-platform-test,jonathan-beard/edx-platform,DNFcode/edx-platform,mbareta/edx-platform-ft,mbareta/edx-platform-ft,stvstnfrd/edx-platform,JioEducation/edx-platform,martynovp/edx-platform,atsolakid/edx-platform,jolyonb/edx-platform,inares/edx-platform,utecuy/edx-platform,martynovp/edx-platform,andyzsf/edx,tiagochiavericosta/edx-platform,dkarakats/edx-platform,appliedx/edx-platform,benpatterson/edx-platform,motion2015/edx-platform,apigee/edx-platform,Livit/Livit.Learn.EdX,SivilTaram/edx-platform,PepperPD/edx-pepper-platform,xingyepei/edx-platform,nagyistoce/edx-platform,itsjeyd/edx-platform,SravanthiSinha/edx-platform,longmen21/edx-platform,eemirtekin/edx-platform,xinjiguaike/edx-platform,kamalx/edx-platform,rue89-tech/edx-platform,shubhdev/openedx,jswope00/griffinx,lduarte1991/edx-platform,rationalAgent/edx-platform-custom,leansoft/edx-platform,Lektorium-LLC/edx-platform,doganov/edx-platform,jazkarta/edx-platform-for-isc,ferabra/edx-platform,edx-solutions/edx-platform,prarthitm/edxplatform,bdero/edx-platform,OmarIthawi/edx-platform,synergeticsedx/deployment-wipro,eemirtekin/edx-platform,EduPepperPD/pepper2013,polimediaupv/edx-platform,MSOpenTech/edx-platform,kalebhartje/schoolboost,philanthropy-u/edx-platform,knehez/edx-platform,xuxiao19910803/edx-platform,openfun/edx-platform,zofuthan/edx-platform,wwj718/ANALYSE,CredoReference/edx-platform,hkawasaki/kawasaki-aio8-1,jelugbo/tundex,jamesblunt/edx-platform,chrisndodge/edx-platform,alexthered/kienhoc-platform,a-parhom/edx-platform,pelikanchik/edx-platform,SivilTaram/edx-platform,yokose-ks/edx-platform,Kalyzee/edx-platform,AkA84/edx-platform,msegado/edx-platform,UXE/local-edx,Endika/edx-platform,defance/edx-platform,ampax/edx-platform-backup,dsajkl/123,zadgroup/edx-platform,ampax/edx-platform-backup,jswope00/griffinx,ahmadio/edx-platform,alexthered/kienhoc-platform,shubhdev/openedx,eestay/edx-platform,shubhdev/edx-platform,rismalrv/edx-platform,chauhanhardik/populo,jswope00/GAI,ubc/edx-platform,IITBinterns13/edx-platform-dev,ferabra/edx-platform,shubhdev/edxOnBaadal,doganov/edx-platform,SravanthiSinha/edx-platform,devs1991/test_edx_docmode,TeachAtTUM/edx-platform,eduNEXT/edx-platform,kxliugang/edx-platform,fly19890211/edx-platform,jruiperezv/ANALYSE,carsongee/edx-platform,louyihua/edx-platform,bdero/edx-platform,nttks/jenkins-test,inares/edx-platform,Shrhawk/edx-platform,nanolearningllc/edx-platform-cypress-2,miptliot/edx-platform,mahendra-r/edx-platform,adoosii/edx-platform,miptliot/edx-platform,RPI-OPENEDX/edx-platform,ampax/edx-platform,jswope00/GAI,shurihell/testasia,mitocw/edx-platform,mahendra-r/edx-platform,auferack08/edx-platform,jazkarta/edx-platform,eestay/edx-platform,rationalAgent/edx-platform-custom,Softmotions/edx-platform,amir-qayyum-khan/edx-platform,xuxiao19910803/edx,antonve/s4-project-mooc,kmoocdev2/edx-platform,prarthitm/edxplatform,ak2703/edx-platform,DefyVentures/edx-platform,longmen21/edx-platform,mbareta/edx-platform-ft,knehez/edx-platform,chand3040/cloud_that,MSOpenTech/edx-platform,motion2015/a3,Endika/edx-platform,dsajkl/123,jazztpt/edx-platform,UOMx/edx-platform,lduarte1991/edx-platform,abdoosh00/edx-rtl-final,analyseuc3m/ANALYSE-v1,proversity-org/edx-platform,mitocw/edx-platform,edry/edx-platform,ferabra/edx-platform,simbs/edx-platform,jazkarta/edx-platform-for-isc,zadgroup/edx-platform,carsongee/edx-platform,ahmedaljazzar/edx-platform,jbzdak/edx-platform,iivic/BoiseStateX,Edraak/circleci-edx-platform,yokose-ks/edx-platform,mcgachey/edx-platform,mahendra-r/edx-platform,edry/edx-platform,mcgachey/edx-platform,wwj718/edx-platform,nanolearningllc/edx-platform-cypress-2,LICEF/edx-platform,torchingloom/edx-platform,xinjiguaike/edx-platform,B-MOOC/edx-platform,pku9104038/edx-platform,kmoocdev/edx-platform,polimediaupv/edx-platform,gymnasium/edx-platform,EduPepperPDTesting/pepper2013-testing,cselis86/edx-platform,xuxiao19910803/edx,ESOedX/edx-platform,shurihell/testasia,SivilTaram/edx-platform,jazkarta/edx-platform-for-isc,valtech-mooc/edx-platform,J861449197/edx-platform,UXE/local-edx,cselis86/edx-platform,J861449197/edx-platform,shashank971/edx-platform,sudheerchintala/LearnEraPlatForm,angelapper/edx-platform,IndonesiaX/edx-platform,solashirai/edx-platform,arifsetiawan/edx-platform,Ayub-Khan/edx-platform,mcgachey/edx-platform,iivic/BoiseStateX,proversity-org/edx-platform,Edraak/circleci-edx-platform,rhndg/openedx,franosincic/edx-platform,cyanna/edx-platform,ampax/edx-platform-backup,itsjeyd/edx-platform,zubair-arbi/edx-platform,ovnicraft/edx-platform,4eek/edx-platform,chrisndodge/edx-platform,beni55/edx-platform,sudheerchintala/LearnEraPlatForm,tanmaykm/edx-platform,wwj718/edx-platform,eemirtekin/edx-platform,longmen21/edx-platform,RPI-OPENEDX/edx-platform,jswope00/griffinx,jamiefolsom/edx-platform,devs1991/test_edx_docmode,chudaol/edx-platform,zofuthan/edx-platform,hamzehd/edx-platform,stvstnfrd/edx-platform,nttks/jenkins-test,xinjiguaike/edx-platform,pelikanchik/edx-platform,dcosentino/edx-platform,alu042/edx-platform,Stanford-Online/edx-platform,marcore/edx-platform,jzoldak/edx-platform,y12uc231/edx-platform,gsehub/edx-platform,hamzehd/edx-platform,mjg2203/edx-platform-seas,hkawasaki/kawasaki-aio8-1,bigdatauniversity/edx-platform,rismalrv/edx-platform,jamesblunt/edx-platform,WatanabeYasumasa/edx-platform,xuxiao19910803/edx,WatanabeYasumasa/edx-platform,morenopc/edx-platform,dcosentino/edx-platform,jzoldak/edx-platform,EduPepperPDTesting/pepper2013-testing,praveen-pal/edx-platform,mjirayu/sit_academy,abdoosh00/edraak,Livit/Livit.Learn.EdX,tanmaykm/edx-platform,nanolearningllc/edx-platform-cypress-2,JCBarahona/edX,Kalyzee/edx-platform,xinjiguaike/edx-platform,antonve/s4-project-mooc,zhenzhai/edx-platform,gymnasium/edx-platform,MSOpenTech/edx-platform,ferabra/edx-platform,mjirayu/sit_academy,louyihua/edx-platform,ahmadiga/min_edx,dsajkl/reqiop,y12uc231/edx-platform,raccoongang/edx-platform,motion2015/a3,motion2015/a3,adoosii/edx-platform,analyseuc3m/ANALYSE-v1,EduPepperPD/pepper2013,hmcmooc/muddx-platform,mcgachey/edx-platform,a-parhom/edx-platform,alexthered/kienhoc-platform,jjmiranda/edx-platform,cecep-edu/edx-platform,simbs/edx-platform,jruiperezv/ANALYSE,4eek/edx-platform,MakeHer/edx-platform,torchingloom/edx-platform,DNFcode/edx-platform,xingyepei/edx-platform,AkA84/edx-platform,edx-solutions/edx-platform,procangroup/edx-platform,jelugbo/tundex,Edraak/circleci-edx-platform,EduPepperPD/pepper2013,zerobatu/edx-platform,olexiim/edx-platform,hamzehd/edx-platform,ahmadiga/min_edx,romain-li/edx-platform,ahmedaljazzar/edx-platform,playm2mboy/edx-platform,jswope00/GAI,ak2703/edx-platform,vasyarv/edx-platform,andyzsf/edx,abdoosh00/edraak,praveen-pal/edx-platform,fly19890211/edx-platform,rismalrv/edx-platform,auferack08/edx-platform,openfun/edx-platform,playm2mboy/edx-platform,hkawasaki/kawasaki-aio8-2,edx/edx-platform,morpheby/levelup-by,LICEF/edx-platform,doismellburning/edx-platform,cpennington/edx-platform,openfun/edx-platform,amir-qayyum-khan/edx-platform,sameetb-cuelogic/edx-platform-test,Ayub-Khan/edx-platform,jolyonb/edx-platform,OmarIthawi/edx-platform,mushtaqak/edx-platform,Kalyzee/edx-platform,bigdatauniversity/edx-platform,UOMx/edx-platform,valtech-mooc/edx-platform,zerobatu/edx-platform,SivilTaram/edx-platform,tiagochiavericosta/edx-platform,utecuy/edx-platform,rhndg/openedx,cognitiveclass/edx-platform,arifsetiawan/edx-platform,mtlchun/edx,wwj718/edx-platform,morenopc/edx-platform,jamiefolsom/edx-platform,olexiim/edx-platform,utecuy/edx-platform,miptliot/edx-platform,chand3040/cloud_that,IITBinterns13/edx-platform-dev,antonve/s4-project-mooc,nttks/edx-platform,hkawasaki/kawasaki-aio8-1,4eek/edx-platform,antoviaque/edx-platform,PepperPD/edx-pepper-platform,mjg2203/edx-platform-seas,EDUlib/edx-platform,Edraak/edx-platform,jamiefolsom/edx-platform,bdero/edx-platform,zadgroup/edx-platform,Edraak/edraak-platform,alexthered/kienhoc-platform,cpennington/edx-platform,mtlchun/edx,IITBinterns13/edx-platform-dev,jamiefolsom/edx-platform,auferack08/edx-platform,motion2015/edx-platform,jjmiranda/edx-platform,ZLLab-Mooc/edx-platform,EduPepperPDTesting/pepper2013-testing,jonathan-beard/edx-platform,jbassen/edx-platform,eemirtekin/edx-platform,motion2015/edx-platform,vikas1885/test1,ZLLab-Mooc/edx-platform,appliedx/edx-platform,jruiperezv/ANALYSE,bigdatauniversity/edx-platform,kamalx/edx-platform,angelapper/edx-platform,kmoocdev/edx-platform,hkawasaki/kawasaki-aio8-2,bitifirefly/edx-platform,fintech-circle/edx-platform,sameetb-cuelogic/edx-platform-test,IONISx/edx-platform,RPI-OPENEDX/edx-platform,kalebhartje/schoolboost,JioEducation/edx-platform,rhndg/openedx,xuxiao19910803/edx-platform,eestay/edx-platform,naresh21/synergetics-edx-platform,Livit/Livit.Learn.EdX,fly19890211/edx-platform,halvertoluke/edx-platform,ak2703/edx-platform,appsembler/edx-platform,kamalx/edx-platform,cyanna/edx-platform,solashirai/edx-platform,UXE/local-edx,Edraak/edraak-platform,DNFcode/edx-platform,EduPepperPD/pepper2013,kalebhartje/schoolboost,JCBarahona/edX,pepeportela/edx-platform,hmcmooc/muddx-platform,atsolakid/edx-platform,bigdatauniversity/edx-platform,mushtaqak/edx-platform,eestay/edx-platform,y12uc231/edx-platform,TsinghuaX/edx-platform,kursitet/edx-platform,JioEducation/edx-platform,CourseTalk/edx-platform,jazztpt/edx-platform,pdehaye/theming-edx-platform,chauhanhardik/populo,defance/edx-platform,jruiperezv/ANALYSE,benpatterson/edx-platform,Kalyzee/edx-platform,abdoosh00/edx-rtl-final,pelikanchik/edx-platform,hkawasaki/kawasaki-aio8-0,romain-li/edx-platform,TsinghuaX/edx-platform,apigee/edx-platform,WatanabeYasumasa/edx-platform,IONISx/edx-platform,appsembler/edx-platform,caesar2164/edx-platform,nttks/edx-platform,olexiim/edx-platform,itsjeyd/edx-platform,ferabra/edx-platform,ampax/edx-platform-backup,zhenzhai/edx-platform,IONISx/edx-platform,apigee/edx-platform,defance/edx-platform,shabab12/edx-platform,UOMx/edx-platform
|
#!/usr/bin/env python
from django.core.management import execute_manager
import imp
try:
imp.find_module('settings') # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n" % __file__)
sys.exit(1)
import settings
if __name__ == "__main__":
execute_manager(settings)
Fix string layout for readability
|
#!/usr/bin/env python
from django.core.management import execute_manager
import imp
try:
imp.find_module('settings') # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. "
"It appears you've customized things.\nYou'll have to run django-admin.py, "
"passing it your settings module.\n" % __file__)
sys.exit(1)
import settings
if __name__ == "__main__":
execute_manager(settings)
|
<commit_before>#!/usr/bin/env python
from django.core.management import execute_manager
import imp
try:
imp.find_module('settings') # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n" % __file__)
sys.exit(1)
import settings
if __name__ == "__main__":
execute_manager(settings)
<commit_msg>Fix string layout for readability<commit_after>
|
#!/usr/bin/env python
from django.core.management import execute_manager
import imp
try:
imp.find_module('settings') # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. "
"It appears you've customized things.\nYou'll have to run django-admin.py, "
"passing it your settings module.\n" % __file__)
sys.exit(1)
import settings
if __name__ == "__main__":
execute_manager(settings)
|
#!/usr/bin/env python
from django.core.management import execute_manager
import imp
try:
imp.find_module('settings') # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n" % __file__)
sys.exit(1)
import settings
if __name__ == "__main__":
execute_manager(settings)
Fix string layout for readability#!/usr/bin/env python
from django.core.management import execute_manager
import imp
try:
imp.find_module('settings') # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. "
"It appears you've customized things.\nYou'll have to run django-admin.py, "
"passing it your settings module.\n" % __file__)
sys.exit(1)
import settings
if __name__ == "__main__":
execute_manager(settings)
|
<commit_before>#!/usr/bin/env python
from django.core.management import execute_manager
import imp
try:
imp.find_module('settings') # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n" % __file__)
sys.exit(1)
import settings
if __name__ == "__main__":
execute_manager(settings)
<commit_msg>Fix string layout for readability<commit_after>#!/usr/bin/env python
from django.core.management import execute_manager
import imp
try:
imp.find_module('settings') # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. "
"It appears you've customized things.\nYou'll have to run django-admin.py, "
"passing it your settings module.\n" % __file__)
sys.exit(1)
import settings
if __name__ == "__main__":
execute_manager(settings)
|
7217e2dcbec3e13d730e47e001d00c5fb8534468
|
moa/__init__.py
|
moa/__init__.py
|
'''A framework for designing and running experiments in Python using Kivy.
'''
__version__ = '0.1-dev'
from kivy import kivy_home_dir
from os import environ
from os.path import join
from moa.logger import Logger
#: moa configuration filename
moa_config_fn = ''
if not environ.get('MOA_DOC_INCLUDE'):
moa_config_fn = join(kivy_home_dir, 'moa_config.ini')
Logger.info('Moa v%s' % (__version__))
|
'''A framework for designing and running experiments in Python using Kivy.
'''
__version__ = '0.1-dev'
from kivy import kivy_home_dir
from os import environ
from os.path import join
if 'MOA_CLOCK' in environ:
from moa.clock import set_clock
set_clock(clock='moa')
from moa.logger import Logger
#: moa configuration filename
moa_config_fn = ''
if not environ.get('MOA_DOC_INCLUDE'):
moa_config_fn = join(kivy_home_dir, 'moa_config.ini')
Logger.info('Moa v%s' % (__version__))
|
Add config option to start moa clock.
|
Add config option to start moa clock.
|
Python
|
mit
|
matham/moa
|
'''A framework for designing and running experiments in Python using Kivy.
'''
__version__ = '0.1-dev'
from kivy import kivy_home_dir
from os import environ
from os.path import join
from moa.logger import Logger
#: moa configuration filename
moa_config_fn = ''
if not environ.get('MOA_DOC_INCLUDE'):
moa_config_fn = join(kivy_home_dir, 'moa_config.ini')
Logger.info('Moa v%s' % (__version__))
Add config option to start moa clock.
|
'''A framework for designing and running experiments in Python using Kivy.
'''
__version__ = '0.1-dev'
from kivy import kivy_home_dir
from os import environ
from os.path import join
if 'MOA_CLOCK' in environ:
from moa.clock import set_clock
set_clock(clock='moa')
from moa.logger import Logger
#: moa configuration filename
moa_config_fn = ''
if not environ.get('MOA_DOC_INCLUDE'):
moa_config_fn = join(kivy_home_dir, 'moa_config.ini')
Logger.info('Moa v%s' % (__version__))
|
<commit_before>'''A framework for designing and running experiments in Python using Kivy.
'''
__version__ = '0.1-dev'
from kivy import kivy_home_dir
from os import environ
from os.path import join
from moa.logger import Logger
#: moa configuration filename
moa_config_fn = ''
if not environ.get('MOA_DOC_INCLUDE'):
moa_config_fn = join(kivy_home_dir, 'moa_config.ini')
Logger.info('Moa v%s' % (__version__))
<commit_msg>Add config option to start moa clock.<commit_after>
|
'''A framework for designing and running experiments in Python using Kivy.
'''
__version__ = '0.1-dev'
from kivy import kivy_home_dir
from os import environ
from os.path import join
if 'MOA_CLOCK' in environ:
from moa.clock import set_clock
set_clock(clock='moa')
from moa.logger import Logger
#: moa configuration filename
moa_config_fn = ''
if not environ.get('MOA_DOC_INCLUDE'):
moa_config_fn = join(kivy_home_dir, 'moa_config.ini')
Logger.info('Moa v%s' % (__version__))
|
'''A framework for designing and running experiments in Python using Kivy.
'''
__version__ = '0.1-dev'
from kivy import kivy_home_dir
from os import environ
from os.path import join
from moa.logger import Logger
#: moa configuration filename
moa_config_fn = ''
if not environ.get('MOA_DOC_INCLUDE'):
moa_config_fn = join(kivy_home_dir, 'moa_config.ini')
Logger.info('Moa v%s' % (__version__))
Add config option to start moa clock.'''A framework for designing and running experiments in Python using Kivy.
'''
__version__ = '0.1-dev'
from kivy import kivy_home_dir
from os import environ
from os.path import join
if 'MOA_CLOCK' in environ:
from moa.clock import set_clock
set_clock(clock='moa')
from moa.logger import Logger
#: moa configuration filename
moa_config_fn = ''
if not environ.get('MOA_DOC_INCLUDE'):
moa_config_fn = join(kivy_home_dir, 'moa_config.ini')
Logger.info('Moa v%s' % (__version__))
|
<commit_before>'''A framework for designing and running experiments in Python using Kivy.
'''
__version__ = '0.1-dev'
from kivy import kivy_home_dir
from os import environ
from os.path import join
from moa.logger import Logger
#: moa configuration filename
moa_config_fn = ''
if not environ.get('MOA_DOC_INCLUDE'):
moa_config_fn = join(kivy_home_dir, 'moa_config.ini')
Logger.info('Moa v%s' % (__version__))
<commit_msg>Add config option to start moa clock.<commit_after>'''A framework for designing and running experiments in Python using Kivy.
'''
__version__ = '0.1-dev'
from kivy import kivy_home_dir
from os import environ
from os.path import join
if 'MOA_CLOCK' in environ:
from moa.clock import set_clock
set_clock(clock='moa')
from moa.logger import Logger
#: moa configuration filename
moa_config_fn = ''
if not environ.get('MOA_DOC_INCLUDE'):
moa_config_fn = join(kivy_home_dir, 'moa_config.ini')
Logger.info('Moa v%s' % (__version__))
|
8556fc0b6fb024ab6cc68364270462681209108a
|
examples/match.py
|
examples/match.py
|
import cassiopeia as cass
from cassiopeia.core import Summoner
def print_summoner(name: str, id: int):
me = Summoner(name="Kalturi", id=21359666)
#matches = cass.get_matches(me)
matches = me.matches
match = matches[0]
print(match.id)
for p in match.participants:
print(p.id, p.champion.name)
if __name__ == "__main__":
print_summoner(name="Kalturi", id=21359666)
|
import cassiopeia as cass
from cassiopeia.core import Summoner
def print_newest_match(name: str, region: str):
summoner = Summoner(name=name, region=region)
# matches = cass.get_matches(summoner)
matches = summoner.matches
match = matches[0]
print('Match ID:', match.id)
for p in match.participants:
print(p.name, 'playing', p.champion.name)
if __name__ == "__main__":
print_newest_match(name="Kalturi", region="NA")
|
Add print description, use function arguments
|
Add print description, use function arguments
- changed name and ID to being name and region
- Add a string to the print calls denoting what's being printed out
|
Python
|
mit
|
robrua/cassiopeia,10se1ucgo/cassiopeia,meraki-analytics/cassiopeia
|
import cassiopeia as cass
from cassiopeia.core import Summoner
def print_summoner(name: str, id: int):
me = Summoner(name="Kalturi", id=21359666)
#matches = cass.get_matches(me)
matches = me.matches
match = matches[0]
print(match.id)
for p in match.participants:
print(p.id, p.champion.name)
if __name__ == "__main__":
print_summoner(name="Kalturi", id=21359666)
Add print description, use function arguments
- changed name and ID to being name and region
- Add a string to the print calls denoting what's being printed out
|
import cassiopeia as cass
from cassiopeia.core import Summoner
def print_newest_match(name: str, region: str):
summoner = Summoner(name=name, region=region)
# matches = cass.get_matches(summoner)
matches = summoner.matches
match = matches[0]
print('Match ID:', match.id)
for p in match.participants:
print(p.name, 'playing', p.champion.name)
if __name__ == "__main__":
print_newest_match(name="Kalturi", region="NA")
|
<commit_before>import cassiopeia as cass
from cassiopeia.core import Summoner
def print_summoner(name: str, id: int):
me = Summoner(name="Kalturi", id=21359666)
#matches = cass.get_matches(me)
matches = me.matches
match = matches[0]
print(match.id)
for p in match.participants:
print(p.id, p.champion.name)
if __name__ == "__main__":
print_summoner(name="Kalturi", id=21359666)
<commit_msg>Add print description, use function arguments
- changed name and ID to being name and region
- Add a string to the print calls denoting what's being printed out<commit_after>
|
import cassiopeia as cass
from cassiopeia.core import Summoner
def print_newest_match(name: str, region: str):
summoner = Summoner(name=name, region=region)
# matches = cass.get_matches(summoner)
matches = summoner.matches
match = matches[0]
print('Match ID:', match.id)
for p in match.participants:
print(p.name, 'playing', p.champion.name)
if __name__ == "__main__":
print_newest_match(name="Kalturi", region="NA")
|
import cassiopeia as cass
from cassiopeia.core import Summoner
def print_summoner(name: str, id: int):
me = Summoner(name="Kalturi", id=21359666)
#matches = cass.get_matches(me)
matches = me.matches
match = matches[0]
print(match.id)
for p in match.participants:
print(p.id, p.champion.name)
if __name__ == "__main__":
print_summoner(name="Kalturi", id=21359666)
Add print description, use function arguments
- changed name and ID to being name and region
- Add a string to the print calls denoting what's being printed outimport cassiopeia as cass
from cassiopeia.core import Summoner
def print_newest_match(name: str, region: str):
summoner = Summoner(name=name, region=region)
# matches = cass.get_matches(summoner)
matches = summoner.matches
match = matches[0]
print('Match ID:', match.id)
for p in match.participants:
print(p.name, 'playing', p.champion.name)
if __name__ == "__main__":
print_newest_match(name="Kalturi", region="NA")
|
<commit_before>import cassiopeia as cass
from cassiopeia.core import Summoner
def print_summoner(name: str, id: int):
me = Summoner(name="Kalturi", id=21359666)
#matches = cass.get_matches(me)
matches = me.matches
match = matches[0]
print(match.id)
for p in match.participants:
print(p.id, p.champion.name)
if __name__ == "__main__":
print_summoner(name="Kalturi", id=21359666)
<commit_msg>Add print description, use function arguments
- changed name and ID to being name and region
- Add a string to the print calls denoting what's being printed out<commit_after>import cassiopeia as cass
from cassiopeia.core import Summoner
def print_newest_match(name: str, region: str):
summoner = Summoner(name=name, region=region)
# matches = cass.get_matches(summoner)
matches = summoner.matches
match = matches[0]
print('Match ID:', match.id)
for p in match.participants:
print(p.name, 'playing', p.champion.name)
if __name__ == "__main__":
print_newest_match(name="Kalturi", region="NA")
|
5e8d64bcbb53da0984ac0b41a470417a05c530d7
|
microcosm_postgres/factories.py
|
microcosm_postgres/factories.py
|
"""
Factory that configures SQLAlchemy for PostgreSQL.
"""
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from microcosm.api import binding, defaults
@binding("postgres")
@defaults(
host="localhost",
port=5432,
password="secret",
)
def configure_sqlalchemy_engine(graph):
"""
Create the SQLAlchemy engine.
"""
# use different database name for testing
if graph.metadata.testing:
database_name = "{}_test_db".format(graph.metadata.name)
else:
database_name = "{}_db".format(graph.metadata.name)
# use the metadata name as the username
username = graph.metadata.name
password = graph.config.postgres.password or ""
uri = "postgresql://{}:{}@{}:{}/{}".format(
username,
password,
graph.config.postgres.host,
graph.config.postgres.port,
database_name,
)
return create_engine(uri)
def configure_sqlalchemy_session(graph):
"""
Create the SQLAlchemy session class.
"""
return sessionmaker(bind=graph.postgres)
|
"""
Factory that configures SQLAlchemy for PostgreSQL.
"""
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from microcosm.api import binding, defaults
@binding("postgres")
@defaults(
host="localhost",
port=5432,
password="secret",
)
def configure_sqlalchemy_engine(graph):
"""
Create the SQLAlchemy engine.
"""
# use different database name for testing
if graph.metadata.testing:
database_name = "{}_test_db".format(graph.metadata.name)
else:
database_name = "{}_db".format(graph.metadata.name)
# use the metadata name as the username
username = graph.metadata.name
password = graph.config.postgres.password or ""
uri = "postgresql://{}:{}@{}:{}/{}".format(
username,
password,
graph.config.postgres.host,
graph.config.postgres.port,
database_name,
)
return create_engine(uri)
def configure_sqlalchemy_sessionmaker(graph):
"""
Create the SQLAlchemy session class.
"""
return sessionmaker(bind=graph.postgres)
|
Rename factory to match what it creates
|
Rename factory to match what it creates
|
Python
|
apache-2.0
|
globality-corp/microcosm-postgres,globality-corp/microcosm-postgres
|
"""
Factory that configures SQLAlchemy for PostgreSQL.
"""
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from microcosm.api import binding, defaults
@binding("postgres")
@defaults(
host="localhost",
port=5432,
password="secret",
)
def configure_sqlalchemy_engine(graph):
"""
Create the SQLAlchemy engine.
"""
# use different database name for testing
if graph.metadata.testing:
database_name = "{}_test_db".format(graph.metadata.name)
else:
database_name = "{}_db".format(graph.metadata.name)
# use the metadata name as the username
username = graph.metadata.name
password = graph.config.postgres.password or ""
uri = "postgresql://{}:{}@{}:{}/{}".format(
username,
password,
graph.config.postgres.host,
graph.config.postgres.port,
database_name,
)
return create_engine(uri)
def configure_sqlalchemy_session(graph):
"""
Create the SQLAlchemy session class.
"""
return sessionmaker(bind=graph.postgres)
Rename factory to match what it creates
|
"""
Factory that configures SQLAlchemy for PostgreSQL.
"""
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from microcosm.api import binding, defaults
@binding("postgres")
@defaults(
host="localhost",
port=5432,
password="secret",
)
def configure_sqlalchemy_engine(graph):
"""
Create the SQLAlchemy engine.
"""
# use different database name for testing
if graph.metadata.testing:
database_name = "{}_test_db".format(graph.metadata.name)
else:
database_name = "{}_db".format(graph.metadata.name)
# use the metadata name as the username
username = graph.metadata.name
password = graph.config.postgres.password or ""
uri = "postgresql://{}:{}@{}:{}/{}".format(
username,
password,
graph.config.postgres.host,
graph.config.postgres.port,
database_name,
)
return create_engine(uri)
def configure_sqlalchemy_sessionmaker(graph):
"""
Create the SQLAlchemy session class.
"""
return sessionmaker(bind=graph.postgres)
|
<commit_before>"""
Factory that configures SQLAlchemy for PostgreSQL.
"""
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from microcosm.api import binding, defaults
@binding("postgres")
@defaults(
host="localhost",
port=5432,
password="secret",
)
def configure_sqlalchemy_engine(graph):
"""
Create the SQLAlchemy engine.
"""
# use different database name for testing
if graph.metadata.testing:
database_name = "{}_test_db".format(graph.metadata.name)
else:
database_name = "{}_db".format(graph.metadata.name)
# use the metadata name as the username
username = graph.metadata.name
password = graph.config.postgres.password or ""
uri = "postgresql://{}:{}@{}:{}/{}".format(
username,
password,
graph.config.postgres.host,
graph.config.postgres.port,
database_name,
)
return create_engine(uri)
def configure_sqlalchemy_session(graph):
"""
Create the SQLAlchemy session class.
"""
return sessionmaker(bind=graph.postgres)
<commit_msg>Rename factory to match what it creates<commit_after>
|
"""
Factory that configures SQLAlchemy for PostgreSQL.
"""
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from microcosm.api import binding, defaults
@binding("postgres")
@defaults(
host="localhost",
port=5432,
password="secret",
)
def configure_sqlalchemy_engine(graph):
"""
Create the SQLAlchemy engine.
"""
# use different database name for testing
if graph.metadata.testing:
database_name = "{}_test_db".format(graph.metadata.name)
else:
database_name = "{}_db".format(graph.metadata.name)
# use the metadata name as the username
username = graph.metadata.name
password = graph.config.postgres.password or ""
uri = "postgresql://{}:{}@{}:{}/{}".format(
username,
password,
graph.config.postgres.host,
graph.config.postgres.port,
database_name,
)
return create_engine(uri)
def configure_sqlalchemy_sessionmaker(graph):
"""
Create the SQLAlchemy session class.
"""
return sessionmaker(bind=graph.postgres)
|
"""
Factory that configures SQLAlchemy for PostgreSQL.
"""
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from microcosm.api import binding, defaults
@binding("postgres")
@defaults(
host="localhost",
port=5432,
password="secret",
)
def configure_sqlalchemy_engine(graph):
"""
Create the SQLAlchemy engine.
"""
# use different database name for testing
if graph.metadata.testing:
database_name = "{}_test_db".format(graph.metadata.name)
else:
database_name = "{}_db".format(graph.metadata.name)
# use the metadata name as the username
username = graph.metadata.name
password = graph.config.postgres.password or ""
uri = "postgresql://{}:{}@{}:{}/{}".format(
username,
password,
graph.config.postgres.host,
graph.config.postgres.port,
database_name,
)
return create_engine(uri)
def configure_sqlalchemy_session(graph):
"""
Create the SQLAlchemy session class.
"""
return sessionmaker(bind=graph.postgres)
Rename factory to match what it creates"""
Factory that configures SQLAlchemy for PostgreSQL.
"""
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from microcosm.api import binding, defaults
@binding("postgres")
@defaults(
host="localhost",
port=5432,
password="secret",
)
def configure_sqlalchemy_engine(graph):
"""
Create the SQLAlchemy engine.
"""
# use different database name for testing
if graph.metadata.testing:
database_name = "{}_test_db".format(graph.metadata.name)
else:
database_name = "{}_db".format(graph.metadata.name)
# use the metadata name as the username
username = graph.metadata.name
password = graph.config.postgres.password or ""
uri = "postgresql://{}:{}@{}:{}/{}".format(
username,
password,
graph.config.postgres.host,
graph.config.postgres.port,
database_name,
)
return create_engine(uri)
def configure_sqlalchemy_sessionmaker(graph):
"""
Create the SQLAlchemy session class.
"""
return sessionmaker(bind=graph.postgres)
|
<commit_before>"""
Factory that configures SQLAlchemy for PostgreSQL.
"""
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from microcosm.api import binding, defaults
@binding("postgres")
@defaults(
host="localhost",
port=5432,
password="secret",
)
def configure_sqlalchemy_engine(graph):
"""
Create the SQLAlchemy engine.
"""
# use different database name for testing
if graph.metadata.testing:
database_name = "{}_test_db".format(graph.metadata.name)
else:
database_name = "{}_db".format(graph.metadata.name)
# use the metadata name as the username
username = graph.metadata.name
password = graph.config.postgres.password or ""
uri = "postgresql://{}:{}@{}:{}/{}".format(
username,
password,
graph.config.postgres.host,
graph.config.postgres.port,
database_name,
)
return create_engine(uri)
def configure_sqlalchemy_session(graph):
"""
Create the SQLAlchemy session class.
"""
return sessionmaker(bind=graph.postgres)
<commit_msg>Rename factory to match what it creates<commit_after>"""
Factory that configures SQLAlchemy for PostgreSQL.
"""
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from microcosm.api import binding, defaults
@binding("postgres")
@defaults(
host="localhost",
port=5432,
password="secret",
)
def configure_sqlalchemy_engine(graph):
"""
Create the SQLAlchemy engine.
"""
# use different database name for testing
if graph.metadata.testing:
database_name = "{}_test_db".format(graph.metadata.name)
else:
database_name = "{}_db".format(graph.metadata.name)
# use the metadata name as the username
username = graph.metadata.name
password = graph.config.postgres.password or ""
uri = "postgresql://{}:{}@{}:{}/{}".format(
username,
password,
graph.config.postgres.host,
graph.config.postgres.port,
database_name,
)
return create_engine(uri)
def configure_sqlalchemy_sessionmaker(graph):
"""
Create the SQLAlchemy session class.
"""
return sessionmaker(bind=graph.postgres)
|
87d9365cd3f19a52957e2e26cefa9fa048c2acb1
|
TWLight/resources/filters.py
|
TWLight/resources/filters.py
|
import django_filters
from .models import Language, Partner
from .helpers import get_tag_choices
class PartnerFilter(django_filters.FilterSet):
tags = django_filters.ChoiceFilter(
label="Tags", choices=get_tag_choices(), method="tags_filter"
)
languages = django_filters.ModelChoiceFilter(queryset=Language.objects.all())
def __init__(self, *args, **kwargs):
# grab "language_code" from kwargs and then remove it so we can call super()
language_code = None
if "language_code" in kwargs:
language_code = kwargs.get("language_code")
kwargs.pop("language_code")
super(PartnerFilter, self).__init__(*args, **kwargs)
self.filters["tags"].extra.update({"choices": get_tag_choices(language_code)})
# Add CSS classes to style widgets
self.filters["tags"].field.widget.attrs.update(
{"class": "form-control form-control-sm"}
)
self.filters["languages"].field.widget.attrs.update(
{"class": "form-control form-control-sm"}
)
class Meta:
model = Partner
fields = ["languages"]
def tags_filter(self, queryset, name, value):
return queryset.filter(new_tags__tags__contains=value)
|
from django.utils.translation import gettext as _
from .models import Language, Partner
from .helpers import get_tag_choices
import django_filters
class PartnerFilter(django_filters.FilterSet):
tags = django_filters.ChoiceFilter(
# Translators: On the MyLibrary page (https://wikipedialibrary.wmflabs.org/users/my_library), this text is shown to indicate how many subject areas a collection covers.
label=_("Tags"),
choices=get_tag_choices(),
method="tags_filter",
)
languages = django_filters.ModelChoiceFilter(
# Translators: On the MyLibrary page (https://wikipedialibrary.wmflabs.org/users/my_library), this text is shown to indicate how many languages a collection supports.
label=_("Languages"),
queryset=Language.objects.all(),
)
def __init__(self, *args, **kwargs):
# grab "language_code" from kwargs and then remove it so we can call super()
language_code = None
if "language_code" in kwargs:
language_code = kwargs.get("language_code")
kwargs.pop("language_code")
super(PartnerFilter, self).__init__(*args, **kwargs)
self.filters["tags"].extra.update({"choices": get_tag_choices(language_code)})
# Add CSS classes to style widgets
self.filters["tags"].field.widget.attrs.update(
{"class": "form-control form-control-sm"}
)
self.filters["languages"].field.widget.attrs.update(
{"class": "form-control form-control-sm"}
)
class Meta:
model = Partner
fields = ["languages"]
def tags_filter(self, queryset, name, value):
return queryset.filter(new_tags__tags__contains=value)
|
Mark filter headers in My Library for translation
|
Mark filter headers in My Library for translation
|
Python
|
mit
|
WikipediaLibrary/TWLight,WikipediaLibrary/TWLight,WikipediaLibrary/TWLight,WikipediaLibrary/TWLight,WikipediaLibrary/TWLight
|
import django_filters
from .models import Language, Partner
from .helpers import get_tag_choices
class PartnerFilter(django_filters.FilterSet):
tags = django_filters.ChoiceFilter(
label="Tags", choices=get_tag_choices(), method="tags_filter"
)
languages = django_filters.ModelChoiceFilter(queryset=Language.objects.all())
def __init__(self, *args, **kwargs):
# grab "language_code" from kwargs and then remove it so we can call super()
language_code = None
if "language_code" in kwargs:
language_code = kwargs.get("language_code")
kwargs.pop("language_code")
super(PartnerFilter, self).__init__(*args, **kwargs)
self.filters["tags"].extra.update({"choices": get_tag_choices(language_code)})
# Add CSS classes to style widgets
self.filters["tags"].field.widget.attrs.update(
{"class": "form-control form-control-sm"}
)
self.filters["languages"].field.widget.attrs.update(
{"class": "form-control form-control-sm"}
)
class Meta:
model = Partner
fields = ["languages"]
def tags_filter(self, queryset, name, value):
return queryset.filter(new_tags__tags__contains=value)
Mark filter headers in My Library for translation
|
from django.utils.translation import gettext as _
from .models import Language, Partner
from .helpers import get_tag_choices
import django_filters
class PartnerFilter(django_filters.FilterSet):
tags = django_filters.ChoiceFilter(
# Translators: On the MyLibrary page (https://wikipedialibrary.wmflabs.org/users/my_library), this text is shown to indicate how many subject areas a collection covers.
label=_("Tags"),
choices=get_tag_choices(),
method="tags_filter",
)
languages = django_filters.ModelChoiceFilter(
# Translators: On the MyLibrary page (https://wikipedialibrary.wmflabs.org/users/my_library), this text is shown to indicate how many languages a collection supports.
label=_("Languages"),
queryset=Language.objects.all(),
)
def __init__(self, *args, **kwargs):
# grab "language_code" from kwargs and then remove it so we can call super()
language_code = None
if "language_code" in kwargs:
language_code = kwargs.get("language_code")
kwargs.pop("language_code")
super(PartnerFilter, self).__init__(*args, **kwargs)
self.filters["tags"].extra.update({"choices": get_tag_choices(language_code)})
# Add CSS classes to style widgets
self.filters["tags"].field.widget.attrs.update(
{"class": "form-control form-control-sm"}
)
self.filters["languages"].field.widget.attrs.update(
{"class": "form-control form-control-sm"}
)
class Meta:
model = Partner
fields = ["languages"]
def tags_filter(self, queryset, name, value):
return queryset.filter(new_tags__tags__contains=value)
|
<commit_before>import django_filters
from .models import Language, Partner
from .helpers import get_tag_choices
class PartnerFilter(django_filters.FilterSet):
tags = django_filters.ChoiceFilter(
label="Tags", choices=get_tag_choices(), method="tags_filter"
)
languages = django_filters.ModelChoiceFilter(queryset=Language.objects.all())
def __init__(self, *args, **kwargs):
# grab "language_code" from kwargs and then remove it so we can call super()
language_code = None
if "language_code" in kwargs:
language_code = kwargs.get("language_code")
kwargs.pop("language_code")
super(PartnerFilter, self).__init__(*args, **kwargs)
self.filters["tags"].extra.update({"choices": get_tag_choices(language_code)})
# Add CSS classes to style widgets
self.filters["tags"].field.widget.attrs.update(
{"class": "form-control form-control-sm"}
)
self.filters["languages"].field.widget.attrs.update(
{"class": "form-control form-control-sm"}
)
class Meta:
model = Partner
fields = ["languages"]
def tags_filter(self, queryset, name, value):
return queryset.filter(new_tags__tags__contains=value)
<commit_msg>Mark filter headers in My Library for translation<commit_after>
|
from django.utils.translation import gettext as _
from .models import Language, Partner
from .helpers import get_tag_choices
import django_filters
class PartnerFilter(django_filters.FilterSet):
tags = django_filters.ChoiceFilter(
# Translators: On the MyLibrary page (https://wikipedialibrary.wmflabs.org/users/my_library), this text is shown to indicate how many subject areas a collection covers.
label=_("Tags"),
choices=get_tag_choices(),
method="tags_filter",
)
languages = django_filters.ModelChoiceFilter(
# Translators: On the MyLibrary page (https://wikipedialibrary.wmflabs.org/users/my_library), this text is shown to indicate how many languages a collection supports.
label=_("Languages"),
queryset=Language.objects.all(),
)
def __init__(self, *args, **kwargs):
# grab "language_code" from kwargs and then remove it so we can call super()
language_code = None
if "language_code" in kwargs:
language_code = kwargs.get("language_code")
kwargs.pop("language_code")
super(PartnerFilter, self).__init__(*args, **kwargs)
self.filters["tags"].extra.update({"choices": get_tag_choices(language_code)})
# Add CSS classes to style widgets
self.filters["tags"].field.widget.attrs.update(
{"class": "form-control form-control-sm"}
)
self.filters["languages"].field.widget.attrs.update(
{"class": "form-control form-control-sm"}
)
class Meta:
model = Partner
fields = ["languages"]
def tags_filter(self, queryset, name, value):
return queryset.filter(new_tags__tags__contains=value)
|
import django_filters
from .models import Language, Partner
from .helpers import get_tag_choices
class PartnerFilter(django_filters.FilterSet):
tags = django_filters.ChoiceFilter(
label="Tags", choices=get_tag_choices(), method="tags_filter"
)
languages = django_filters.ModelChoiceFilter(queryset=Language.objects.all())
def __init__(self, *args, **kwargs):
# grab "language_code" from kwargs and then remove it so we can call super()
language_code = None
if "language_code" in kwargs:
language_code = kwargs.get("language_code")
kwargs.pop("language_code")
super(PartnerFilter, self).__init__(*args, **kwargs)
self.filters["tags"].extra.update({"choices": get_tag_choices(language_code)})
# Add CSS classes to style widgets
self.filters["tags"].field.widget.attrs.update(
{"class": "form-control form-control-sm"}
)
self.filters["languages"].field.widget.attrs.update(
{"class": "form-control form-control-sm"}
)
class Meta:
model = Partner
fields = ["languages"]
def tags_filter(self, queryset, name, value):
return queryset.filter(new_tags__tags__contains=value)
Mark filter headers in My Library for translationfrom django.utils.translation import gettext as _
from .models import Language, Partner
from .helpers import get_tag_choices
import django_filters
class PartnerFilter(django_filters.FilterSet):
tags = django_filters.ChoiceFilter(
# Translators: On the MyLibrary page (https://wikipedialibrary.wmflabs.org/users/my_library), this text is shown to indicate how many subject areas a collection covers.
label=_("Tags"),
choices=get_tag_choices(),
method="tags_filter",
)
languages = django_filters.ModelChoiceFilter(
# Translators: On the MyLibrary page (https://wikipedialibrary.wmflabs.org/users/my_library), this text is shown to indicate how many languages a collection supports.
label=_("Languages"),
queryset=Language.objects.all(),
)
def __init__(self, *args, **kwargs):
# grab "language_code" from kwargs and then remove it so we can call super()
language_code = None
if "language_code" in kwargs:
language_code = kwargs.get("language_code")
kwargs.pop("language_code")
super(PartnerFilter, self).__init__(*args, **kwargs)
self.filters["tags"].extra.update({"choices": get_tag_choices(language_code)})
# Add CSS classes to style widgets
self.filters["tags"].field.widget.attrs.update(
{"class": "form-control form-control-sm"}
)
self.filters["languages"].field.widget.attrs.update(
{"class": "form-control form-control-sm"}
)
class Meta:
model = Partner
fields = ["languages"]
def tags_filter(self, queryset, name, value):
return queryset.filter(new_tags__tags__contains=value)
|
<commit_before>import django_filters
from .models import Language, Partner
from .helpers import get_tag_choices
class PartnerFilter(django_filters.FilterSet):
tags = django_filters.ChoiceFilter(
label="Tags", choices=get_tag_choices(), method="tags_filter"
)
languages = django_filters.ModelChoiceFilter(queryset=Language.objects.all())
def __init__(self, *args, **kwargs):
# grab "language_code" from kwargs and then remove it so we can call super()
language_code = None
if "language_code" in kwargs:
language_code = kwargs.get("language_code")
kwargs.pop("language_code")
super(PartnerFilter, self).__init__(*args, **kwargs)
self.filters["tags"].extra.update({"choices": get_tag_choices(language_code)})
# Add CSS classes to style widgets
self.filters["tags"].field.widget.attrs.update(
{"class": "form-control form-control-sm"}
)
self.filters["languages"].field.widget.attrs.update(
{"class": "form-control form-control-sm"}
)
class Meta:
model = Partner
fields = ["languages"]
def tags_filter(self, queryset, name, value):
return queryset.filter(new_tags__tags__contains=value)
<commit_msg>Mark filter headers in My Library for translation<commit_after>from django.utils.translation import gettext as _
from .models import Language, Partner
from .helpers import get_tag_choices
import django_filters
class PartnerFilter(django_filters.FilterSet):
tags = django_filters.ChoiceFilter(
# Translators: On the MyLibrary page (https://wikipedialibrary.wmflabs.org/users/my_library), this text is shown to indicate how many subject areas a collection covers.
label=_("Tags"),
choices=get_tag_choices(),
method="tags_filter",
)
languages = django_filters.ModelChoiceFilter(
# Translators: On the MyLibrary page (https://wikipedialibrary.wmflabs.org/users/my_library), this text is shown to indicate how many languages a collection supports.
label=_("Languages"),
queryset=Language.objects.all(),
)
def __init__(self, *args, **kwargs):
# grab "language_code" from kwargs and then remove it so we can call super()
language_code = None
if "language_code" in kwargs:
language_code = kwargs.get("language_code")
kwargs.pop("language_code")
super(PartnerFilter, self).__init__(*args, **kwargs)
self.filters["tags"].extra.update({"choices": get_tag_choices(language_code)})
# Add CSS classes to style widgets
self.filters["tags"].field.widget.attrs.update(
{"class": "form-control form-control-sm"}
)
self.filters["languages"].field.widget.attrs.update(
{"class": "form-control form-control-sm"}
)
class Meta:
model = Partner
fields = ["languages"]
def tags_filter(self, queryset, name, value):
return queryset.filter(new_tags__tags__contains=value)
|
e563e8f8f1af691c4c9aa2f6177fbf2c8e2a4855
|
della/user_manager/draw_service.py
|
della/user_manager/draw_service.py
|
import json
from django.conf import settings
def _get_default_file_content():
return {'status': False}
def _write_status_file():
file_path = settings.STATUS_FILE
with open(file_path, 'w') as f:
json.dump({'status': True}, f)
return True
def _get_status_file():
file_path = settings.STATUS_FILE
try:
with open(file_path) as f:
return json.load(f)
except FileNotFoundError:
with open(file_path, 'w') as f:
response = _get_default_file_content()
json.dump(response, f)
return response
def get_draw_status():
return _get_status_file()['status']
def flip_draw_status():
if not _get_status_file()['status']:
return _write_status_file()
return True
|
import json
import random
from collections import deque
from django.conf import settings
from django.contrib.auth.models import User
def _get_default_file_content():
return {'status': False}
def _write_status_file():
file_path = settings.STATUS_FILE
with open(file_path, 'w') as f:
json.dump({'status': True}, f)
return True
def _get_status_file():
file_path = settings.STATUS_FILE
try:
with open(file_path) as f:
return json.load(f)
except FileNotFoundError:
with open(file_path, 'w') as f:
response = _get_default_file_content()
json.dump(response, f)
return response
def get_draw_status():
return _get_status_file()['status']
def flip_draw_status():
if not _get_status_file()['status']:
return _write_status_file()
return True
def draw_names():
pass
def make_pairs(user_ids):
while True:
pairs = _get_pairs(user_ids=user_ids)
if _is_valid_pair(pairs=pairs):
break
return pairs
def _get_pairs(user_ids):
user_ids_copy = user_ids.copy()
random.shuffle(user_ids_copy)
pairs = deque(user_ids_copy)
pairs.rotate()
return list(zip(user_ids, user_ids_copy))
def _is_valid_pair(pairs):
"""
Checks if the pair and list of pairs is valid. A pair is invalid if both
santa and santee are same i.e. (1, 1)
"""
for pair in pairs:
if pair[0] == pair[1]:
return False
return True
|
Add helper funtions for making pairs
|
Add helper funtions for making pairs
|
Python
|
mit
|
avinassh/della,avinassh/della,avinassh/della
|
import json
from django.conf import settings
def _get_default_file_content():
return {'status': False}
def _write_status_file():
file_path = settings.STATUS_FILE
with open(file_path, 'w') as f:
json.dump({'status': True}, f)
return True
def _get_status_file():
file_path = settings.STATUS_FILE
try:
with open(file_path) as f:
return json.load(f)
except FileNotFoundError:
with open(file_path, 'w') as f:
response = _get_default_file_content()
json.dump(response, f)
return response
def get_draw_status():
return _get_status_file()['status']
def flip_draw_status():
if not _get_status_file()['status']:
return _write_status_file()
return True
Add helper funtions for making pairs
|
import json
import random
from collections import deque
from django.conf import settings
from django.contrib.auth.models import User
def _get_default_file_content():
return {'status': False}
def _write_status_file():
file_path = settings.STATUS_FILE
with open(file_path, 'w') as f:
json.dump({'status': True}, f)
return True
def _get_status_file():
file_path = settings.STATUS_FILE
try:
with open(file_path) as f:
return json.load(f)
except FileNotFoundError:
with open(file_path, 'w') as f:
response = _get_default_file_content()
json.dump(response, f)
return response
def get_draw_status():
return _get_status_file()['status']
def flip_draw_status():
if not _get_status_file()['status']:
return _write_status_file()
return True
def draw_names():
pass
def make_pairs(user_ids):
while True:
pairs = _get_pairs(user_ids=user_ids)
if _is_valid_pair(pairs=pairs):
break
return pairs
def _get_pairs(user_ids):
user_ids_copy = user_ids.copy()
random.shuffle(user_ids_copy)
pairs = deque(user_ids_copy)
pairs.rotate()
return list(zip(user_ids, user_ids_copy))
def _is_valid_pair(pairs):
"""
Checks if the pair and list of pairs is valid. A pair is invalid if both
santa and santee are same i.e. (1, 1)
"""
for pair in pairs:
if pair[0] == pair[1]:
return False
return True
|
<commit_before>import json
from django.conf import settings
def _get_default_file_content():
return {'status': False}
def _write_status_file():
file_path = settings.STATUS_FILE
with open(file_path, 'w') as f:
json.dump({'status': True}, f)
return True
def _get_status_file():
file_path = settings.STATUS_FILE
try:
with open(file_path) as f:
return json.load(f)
except FileNotFoundError:
with open(file_path, 'w') as f:
response = _get_default_file_content()
json.dump(response, f)
return response
def get_draw_status():
return _get_status_file()['status']
def flip_draw_status():
if not _get_status_file()['status']:
return _write_status_file()
return True
<commit_msg>Add helper funtions for making pairs<commit_after>
|
import json
import random
from collections import deque
from django.conf import settings
from django.contrib.auth.models import User
def _get_default_file_content():
return {'status': False}
def _write_status_file():
file_path = settings.STATUS_FILE
with open(file_path, 'w') as f:
json.dump({'status': True}, f)
return True
def _get_status_file():
file_path = settings.STATUS_FILE
try:
with open(file_path) as f:
return json.load(f)
except FileNotFoundError:
with open(file_path, 'w') as f:
response = _get_default_file_content()
json.dump(response, f)
return response
def get_draw_status():
return _get_status_file()['status']
def flip_draw_status():
if not _get_status_file()['status']:
return _write_status_file()
return True
def draw_names():
pass
def make_pairs(user_ids):
while True:
pairs = _get_pairs(user_ids=user_ids)
if _is_valid_pair(pairs=pairs):
break
return pairs
def _get_pairs(user_ids):
user_ids_copy = user_ids.copy()
random.shuffle(user_ids_copy)
pairs = deque(user_ids_copy)
pairs.rotate()
return list(zip(user_ids, user_ids_copy))
def _is_valid_pair(pairs):
"""
Checks if the pair and list of pairs is valid. A pair is invalid if both
santa and santee are same i.e. (1, 1)
"""
for pair in pairs:
if pair[0] == pair[1]:
return False
return True
|
import json
from django.conf import settings
def _get_default_file_content():
return {'status': False}
def _write_status_file():
file_path = settings.STATUS_FILE
with open(file_path, 'w') as f:
json.dump({'status': True}, f)
return True
def _get_status_file():
file_path = settings.STATUS_FILE
try:
with open(file_path) as f:
return json.load(f)
except FileNotFoundError:
with open(file_path, 'w') as f:
response = _get_default_file_content()
json.dump(response, f)
return response
def get_draw_status():
return _get_status_file()['status']
def flip_draw_status():
if not _get_status_file()['status']:
return _write_status_file()
return True
Add helper funtions for making pairsimport json
import random
from collections import deque
from django.conf import settings
from django.contrib.auth.models import User
def _get_default_file_content():
return {'status': False}
def _write_status_file():
file_path = settings.STATUS_FILE
with open(file_path, 'w') as f:
json.dump({'status': True}, f)
return True
def _get_status_file():
file_path = settings.STATUS_FILE
try:
with open(file_path) as f:
return json.load(f)
except FileNotFoundError:
with open(file_path, 'w') as f:
response = _get_default_file_content()
json.dump(response, f)
return response
def get_draw_status():
return _get_status_file()['status']
def flip_draw_status():
if not _get_status_file()['status']:
return _write_status_file()
return True
def draw_names():
pass
def make_pairs(user_ids):
while True:
pairs = _get_pairs(user_ids=user_ids)
if _is_valid_pair(pairs=pairs):
break
return pairs
def _get_pairs(user_ids):
user_ids_copy = user_ids.copy()
random.shuffle(user_ids_copy)
pairs = deque(user_ids_copy)
pairs.rotate()
return list(zip(user_ids, user_ids_copy))
def _is_valid_pair(pairs):
"""
Checks if the pair and list of pairs is valid. A pair is invalid if both
santa and santee are same i.e. (1, 1)
"""
for pair in pairs:
if pair[0] == pair[1]:
return False
return True
|
<commit_before>import json
from django.conf import settings
def _get_default_file_content():
return {'status': False}
def _write_status_file():
file_path = settings.STATUS_FILE
with open(file_path, 'w') as f:
json.dump({'status': True}, f)
return True
def _get_status_file():
file_path = settings.STATUS_FILE
try:
with open(file_path) as f:
return json.load(f)
except FileNotFoundError:
with open(file_path, 'w') as f:
response = _get_default_file_content()
json.dump(response, f)
return response
def get_draw_status():
return _get_status_file()['status']
def flip_draw_status():
if not _get_status_file()['status']:
return _write_status_file()
return True
<commit_msg>Add helper funtions for making pairs<commit_after>import json
import random
from collections import deque
from django.conf import settings
from django.contrib.auth.models import User
def _get_default_file_content():
return {'status': False}
def _write_status_file():
file_path = settings.STATUS_FILE
with open(file_path, 'w') as f:
json.dump({'status': True}, f)
return True
def _get_status_file():
file_path = settings.STATUS_FILE
try:
with open(file_path) as f:
return json.load(f)
except FileNotFoundError:
with open(file_path, 'w') as f:
response = _get_default_file_content()
json.dump(response, f)
return response
def get_draw_status():
return _get_status_file()['status']
def flip_draw_status():
if not _get_status_file()['status']:
return _write_status_file()
return True
def draw_names():
pass
def make_pairs(user_ids):
while True:
pairs = _get_pairs(user_ids=user_ids)
if _is_valid_pair(pairs=pairs):
break
return pairs
def _get_pairs(user_ids):
user_ids_copy = user_ids.copy()
random.shuffle(user_ids_copy)
pairs = deque(user_ids_copy)
pairs.rotate()
return list(zip(user_ids, user_ids_copy))
def _is_valid_pair(pairs):
"""
Checks if the pair and list of pairs is valid. A pair is invalid if both
santa and santee are same i.e. (1, 1)
"""
for pair in pairs:
if pair[0] == pair[1]:
return False
return True
|
21f06746eebe809f5d7017394b4c7c50ba319066
|
street_score/bulkadmin/forms.py
|
street_score/bulkadmin/forms.py
|
import csv
from django import forms
class BulkUploadForm(forms.Form):
data = forms.FileField()
def clean(self):
cleaned_data = super(BulkUploadForm, self).clean()
cleaned_data['data'] = BulkUploadForm.load_csv(cleaned_data['data'])
return cleaned_data
@staticmethod
def load_csv(f):
reader = csv.reader(f)
data = []
i = 0
for row in reader:
if i == 0:
header = row
else:
data.append(dict(zip(header, row)))
i += 1
return data
|
import csv
from django import forms
class BulkUploadForm(forms.Form):
data = forms.FileField(help_text="""
<p>Select the CSV file to upload. The file should have a header for
each column you want to populate. When you have selected your
file, click the 'Upload' button below.</p>
""")
def clean(self):
cleaned_data = super(BulkUploadForm, self).clean()
cleaned_data['data'] = BulkUploadForm.load_csv(cleaned_data['data'])
return cleaned_data
@staticmethod
def load_csv(f):
reader = csv.reader(f)
data = []
i = 0
for row in reader:
if i == 0:
header = row
else:
data.append(dict(zip(header, row)))
i += 1
return data
|
Add a help_text string to the admin form
|
Add a help_text string to the admin form
|
Python
|
mit
|
openplans/streetscore,openplans/streetscore,openplans/streetscore
|
import csv
from django import forms
class BulkUploadForm(forms.Form):
data = forms.FileField()
def clean(self):
cleaned_data = super(BulkUploadForm, self).clean()
cleaned_data['data'] = BulkUploadForm.load_csv(cleaned_data['data'])
return cleaned_data
@staticmethod
def load_csv(f):
reader = csv.reader(f)
data = []
i = 0
for row in reader:
if i == 0:
header = row
else:
data.append(dict(zip(header, row)))
i += 1
return data
Add a help_text string to the admin form
|
import csv
from django import forms
class BulkUploadForm(forms.Form):
data = forms.FileField(help_text="""
<p>Select the CSV file to upload. The file should have a header for
each column you want to populate. When you have selected your
file, click the 'Upload' button below.</p>
""")
def clean(self):
cleaned_data = super(BulkUploadForm, self).clean()
cleaned_data['data'] = BulkUploadForm.load_csv(cleaned_data['data'])
return cleaned_data
@staticmethod
def load_csv(f):
reader = csv.reader(f)
data = []
i = 0
for row in reader:
if i == 0:
header = row
else:
data.append(dict(zip(header, row)))
i += 1
return data
|
<commit_before>import csv
from django import forms
class BulkUploadForm(forms.Form):
data = forms.FileField()
def clean(self):
cleaned_data = super(BulkUploadForm, self).clean()
cleaned_data['data'] = BulkUploadForm.load_csv(cleaned_data['data'])
return cleaned_data
@staticmethod
def load_csv(f):
reader = csv.reader(f)
data = []
i = 0
for row in reader:
if i == 0:
header = row
else:
data.append(dict(zip(header, row)))
i += 1
return data
<commit_msg>Add a help_text string to the admin form<commit_after>
|
import csv
from django import forms
class BulkUploadForm(forms.Form):
data = forms.FileField(help_text="""
<p>Select the CSV file to upload. The file should have a header for
each column you want to populate. When you have selected your
file, click the 'Upload' button below.</p>
""")
def clean(self):
cleaned_data = super(BulkUploadForm, self).clean()
cleaned_data['data'] = BulkUploadForm.load_csv(cleaned_data['data'])
return cleaned_data
@staticmethod
def load_csv(f):
reader = csv.reader(f)
data = []
i = 0
for row in reader:
if i == 0:
header = row
else:
data.append(dict(zip(header, row)))
i += 1
return data
|
import csv
from django import forms
class BulkUploadForm(forms.Form):
data = forms.FileField()
def clean(self):
cleaned_data = super(BulkUploadForm, self).clean()
cleaned_data['data'] = BulkUploadForm.load_csv(cleaned_data['data'])
return cleaned_data
@staticmethod
def load_csv(f):
reader = csv.reader(f)
data = []
i = 0
for row in reader:
if i == 0:
header = row
else:
data.append(dict(zip(header, row)))
i += 1
return data
Add a help_text string to the admin formimport csv
from django import forms
class BulkUploadForm(forms.Form):
data = forms.FileField(help_text="""
<p>Select the CSV file to upload. The file should have a header for
each column you want to populate. When you have selected your
file, click the 'Upload' button below.</p>
""")
def clean(self):
cleaned_data = super(BulkUploadForm, self).clean()
cleaned_data['data'] = BulkUploadForm.load_csv(cleaned_data['data'])
return cleaned_data
@staticmethod
def load_csv(f):
reader = csv.reader(f)
data = []
i = 0
for row in reader:
if i == 0:
header = row
else:
data.append(dict(zip(header, row)))
i += 1
return data
|
<commit_before>import csv
from django import forms
class BulkUploadForm(forms.Form):
data = forms.FileField()
def clean(self):
cleaned_data = super(BulkUploadForm, self).clean()
cleaned_data['data'] = BulkUploadForm.load_csv(cleaned_data['data'])
return cleaned_data
@staticmethod
def load_csv(f):
reader = csv.reader(f)
data = []
i = 0
for row in reader:
if i == 0:
header = row
else:
data.append(dict(zip(header, row)))
i += 1
return data
<commit_msg>Add a help_text string to the admin form<commit_after>import csv
from django import forms
class BulkUploadForm(forms.Form):
data = forms.FileField(help_text="""
<p>Select the CSV file to upload. The file should have a header for
each column you want to populate. When you have selected your
file, click the 'Upload' button below.</p>
""")
def clean(self):
cleaned_data = super(BulkUploadForm, self).clean()
cleaned_data['data'] = BulkUploadForm.load_csv(cleaned_data['data'])
return cleaned_data
@staticmethod
def load_csv(f):
reader = csv.reader(f)
data = []
i = 0
for row in reader:
if i == 0:
header = row
else:
data.append(dict(zip(header, row)))
i += 1
return data
|
edadd7385ff8c839b524a6c06d0fc370c3db25bb
|
src/constants.py
|
src/constants.py
|
#!/usr/bin/env python
TRAJECTORY_TYPE = 'circular'
if TRAJECTORY_TYPE == 'linear':
SIMULATION_TIME_IN_SECONDS = 40
elif TRAJECTORY_TYPE == 'circular':
SIMULATION_TIME_IN_SECONDS = 120
DELTA_T = 0.1 # this is the sampling time
STEPS = int(SIMULATION_TIME_IN_SECONDS / DELTA_T)
K_V = 0.90
K_W = 0.90
|
#!/usr/bin/env python
TRAJECTORY_TYPE = 'squared'
if TRAJECTORY_TYPE == 'linear':
SIMULATION_TIME_IN_SECONDS = 40.0
elif TRAJECTORY_TYPE == 'circular':
SIMULATION_TIME_IN_SECONDS = 120.0
elif TRAJECTORY_TYPE == 'squared':
SIMULATION_TIME_IN_SECONDS = 160.0
DELTA_T = 0.1 # this is the sampling time
STEPS = int(SIMULATION_TIME_IN_SECONDS / DELTA_T)
K_V = 0.90
K_W = 0.90
|
Add simulation time for a squared trajectory
|
Add simulation time for a squared trajectory
|
Python
|
mit
|
bit0001/trajectory_tracking,bit0001/trajectory_tracking
|
#!/usr/bin/env python
TRAJECTORY_TYPE = 'circular'
if TRAJECTORY_TYPE == 'linear':
SIMULATION_TIME_IN_SECONDS = 40
elif TRAJECTORY_TYPE == 'circular':
SIMULATION_TIME_IN_SECONDS = 120
DELTA_T = 0.1 # this is the sampling time
STEPS = int(SIMULATION_TIME_IN_SECONDS / DELTA_T)
K_V = 0.90
K_W = 0.90
Add simulation time for a squared trajectory
|
#!/usr/bin/env python
TRAJECTORY_TYPE = 'squared'
if TRAJECTORY_TYPE == 'linear':
SIMULATION_TIME_IN_SECONDS = 40.0
elif TRAJECTORY_TYPE == 'circular':
SIMULATION_TIME_IN_SECONDS = 120.0
elif TRAJECTORY_TYPE == 'squared':
SIMULATION_TIME_IN_SECONDS = 160.0
DELTA_T = 0.1 # this is the sampling time
STEPS = int(SIMULATION_TIME_IN_SECONDS / DELTA_T)
K_V = 0.90
K_W = 0.90
|
<commit_before>#!/usr/bin/env python
TRAJECTORY_TYPE = 'circular'
if TRAJECTORY_TYPE == 'linear':
SIMULATION_TIME_IN_SECONDS = 40
elif TRAJECTORY_TYPE == 'circular':
SIMULATION_TIME_IN_SECONDS = 120
DELTA_T = 0.1 # this is the sampling time
STEPS = int(SIMULATION_TIME_IN_SECONDS / DELTA_T)
K_V = 0.90
K_W = 0.90
<commit_msg>Add simulation time for a squared trajectory<commit_after>
|
#!/usr/bin/env python
TRAJECTORY_TYPE = 'squared'
if TRAJECTORY_TYPE == 'linear':
SIMULATION_TIME_IN_SECONDS = 40.0
elif TRAJECTORY_TYPE == 'circular':
SIMULATION_TIME_IN_SECONDS = 120.0
elif TRAJECTORY_TYPE == 'squared':
SIMULATION_TIME_IN_SECONDS = 160.0
DELTA_T = 0.1 # this is the sampling time
STEPS = int(SIMULATION_TIME_IN_SECONDS / DELTA_T)
K_V = 0.90
K_W = 0.90
|
#!/usr/bin/env python
TRAJECTORY_TYPE = 'circular'
if TRAJECTORY_TYPE == 'linear':
SIMULATION_TIME_IN_SECONDS = 40
elif TRAJECTORY_TYPE == 'circular':
SIMULATION_TIME_IN_SECONDS = 120
DELTA_T = 0.1 # this is the sampling time
STEPS = int(SIMULATION_TIME_IN_SECONDS / DELTA_T)
K_V = 0.90
K_W = 0.90
Add simulation time for a squared trajectory#!/usr/bin/env python
TRAJECTORY_TYPE = 'squared'
if TRAJECTORY_TYPE == 'linear':
SIMULATION_TIME_IN_SECONDS = 40.0
elif TRAJECTORY_TYPE == 'circular':
SIMULATION_TIME_IN_SECONDS = 120.0
elif TRAJECTORY_TYPE == 'squared':
SIMULATION_TIME_IN_SECONDS = 160.0
DELTA_T = 0.1 # this is the sampling time
STEPS = int(SIMULATION_TIME_IN_SECONDS / DELTA_T)
K_V = 0.90
K_W = 0.90
|
<commit_before>#!/usr/bin/env python
TRAJECTORY_TYPE = 'circular'
if TRAJECTORY_TYPE == 'linear':
SIMULATION_TIME_IN_SECONDS = 40
elif TRAJECTORY_TYPE == 'circular':
SIMULATION_TIME_IN_SECONDS = 120
DELTA_T = 0.1 # this is the sampling time
STEPS = int(SIMULATION_TIME_IN_SECONDS / DELTA_T)
K_V = 0.90
K_W = 0.90
<commit_msg>Add simulation time for a squared trajectory<commit_after>#!/usr/bin/env python
TRAJECTORY_TYPE = 'squared'
if TRAJECTORY_TYPE == 'linear':
SIMULATION_TIME_IN_SECONDS = 40.0
elif TRAJECTORY_TYPE == 'circular':
SIMULATION_TIME_IN_SECONDS = 120.0
elif TRAJECTORY_TYPE == 'squared':
SIMULATION_TIME_IN_SECONDS = 160.0
DELTA_T = 0.1 # this is the sampling time
STEPS = int(SIMULATION_TIME_IN_SECONDS / DELTA_T)
K_V = 0.90
K_W = 0.90
|
d386c389b9e350b01fdf25f7cd91857d3fbb1ead
|
opps/contrib/multisite/admin.py
|
opps/contrib/multisite/admin.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib import admin
from django.conf import settings
from django.utils import timezone
from .models import SitePermission
class AdminViewPermission(admin.ModelAdmin):
def queryset(self, request):
queryset = super(AdminViewPermission, self).queryset(request)
try:
sitepermission = SitePermission.objects.get(
user=request.user,
date_available__lte=timezone.now(),
published=True)
return queryset.filter(site_iid=sitepermission.site_iid)
except SitePermission.DoesNotExist:
pass
return queryset
def get_form(self, request, obj=None, **kwargs):
form = super(AdminViewPermission, self).get_form(request, obj,
**kwargs)
if not settings.OPPS_MULTISITE_ADMIN:
return form
try:
sitepermission = SitePermission.objects.get(
user=request.user,
date_available__lte=timezone.now(),
published=True)
form.base_fields['site'].initial = sitepermission.site
form.base_fields['site'].choices = ((sitepermission.site.id,
sitepermission.site.domain),)
except SitePermission.DoesNotExist:
pass
return form
admin.site.register(SitePermission)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib import admin
from django.conf import settings
from django.utils import timezone
from .models import SitePermission
class AdminViewPermission(admin.ModelAdmin):
def queryset(self, request):
queryset = super(AdminViewPermission, self).queryset(request)
if not settings.OPPS_MULTISITE_ADMIN:
return queryset
try:
sitepermission = SitePermission.objects.get(
user=request.user,
date_available__lte=timezone.now(),
published=True)
return queryset.filter(site_iid=sitepermission.site_iid)
except SitePermission.DoesNotExist:
pass
return queryset
def get_form(self, request, obj=None, **kwargs):
form = super(AdminViewPermission, self).get_form(request, obj,
**kwargs)
if not settings.OPPS_MULTISITE_ADMIN:
return form
try:
sitepermission = SitePermission.objects.get(
user=request.user,
date_available__lte=timezone.now(),
published=True)
form.base_fields['site'].initial = sitepermission.site
form.base_fields['site'].choices = ((sitepermission.site.id,
sitepermission.site.domain),)
except SitePermission.DoesNotExist:
pass
return form
admin.site.register(SitePermission)
|
Use OPPS_MULTISITE_ADMIN on queryset AdminViewPermission
|
Use OPPS_MULTISITE_ADMIN on queryset AdminViewPermission
|
Python
|
mit
|
opps/opps,YACOWS/opps,williamroot/opps,opps/opps,williamroot/opps,jeanmask/opps,williamroot/opps,YACOWS/opps,YACOWS/opps,opps/opps,jeanmask/opps,williamroot/opps,jeanmask/opps,jeanmask/opps,YACOWS/opps,opps/opps
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib import admin
from django.conf import settings
from django.utils import timezone
from .models import SitePermission
class AdminViewPermission(admin.ModelAdmin):
def queryset(self, request):
queryset = super(AdminViewPermission, self).queryset(request)
try:
sitepermission = SitePermission.objects.get(
user=request.user,
date_available__lte=timezone.now(),
published=True)
return queryset.filter(site_iid=sitepermission.site_iid)
except SitePermission.DoesNotExist:
pass
return queryset
def get_form(self, request, obj=None, **kwargs):
form = super(AdminViewPermission, self).get_form(request, obj,
**kwargs)
if not settings.OPPS_MULTISITE_ADMIN:
return form
try:
sitepermission = SitePermission.objects.get(
user=request.user,
date_available__lte=timezone.now(),
published=True)
form.base_fields['site'].initial = sitepermission.site
form.base_fields['site'].choices = ((sitepermission.site.id,
sitepermission.site.domain),)
except SitePermission.DoesNotExist:
pass
return form
admin.site.register(SitePermission)
Use OPPS_MULTISITE_ADMIN on queryset AdminViewPermission
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib import admin
from django.conf import settings
from django.utils import timezone
from .models import SitePermission
class AdminViewPermission(admin.ModelAdmin):
def queryset(self, request):
queryset = super(AdminViewPermission, self).queryset(request)
if not settings.OPPS_MULTISITE_ADMIN:
return queryset
try:
sitepermission = SitePermission.objects.get(
user=request.user,
date_available__lte=timezone.now(),
published=True)
return queryset.filter(site_iid=sitepermission.site_iid)
except SitePermission.DoesNotExist:
pass
return queryset
def get_form(self, request, obj=None, **kwargs):
form = super(AdminViewPermission, self).get_form(request, obj,
**kwargs)
if not settings.OPPS_MULTISITE_ADMIN:
return form
try:
sitepermission = SitePermission.objects.get(
user=request.user,
date_available__lte=timezone.now(),
published=True)
form.base_fields['site'].initial = sitepermission.site
form.base_fields['site'].choices = ((sitepermission.site.id,
sitepermission.site.domain),)
except SitePermission.DoesNotExist:
pass
return form
admin.site.register(SitePermission)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib import admin
from django.conf import settings
from django.utils import timezone
from .models import SitePermission
class AdminViewPermission(admin.ModelAdmin):
def queryset(self, request):
queryset = super(AdminViewPermission, self).queryset(request)
try:
sitepermission = SitePermission.objects.get(
user=request.user,
date_available__lte=timezone.now(),
published=True)
return queryset.filter(site_iid=sitepermission.site_iid)
except SitePermission.DoesNotExist:
pass
return queryset
def get_form(self, request, obj=None, **kwargs):
form = super(AdminViewPermission, self).get_form(request, obj,
**kwargs)
if not settings.OPPS_MULTISITE_ADMIN:
return form
try:
sitepermission = SitePermission.objects.get(
user=request.user,
date_available__lte=timezone.now(),
published=True)
form.base_fields['site'].initial = sitepermission.site
form.base_fields['site'].choices = ((sitepermission.site.id,
sitepermission.site.domain),)
except SitePermission.DoesNotExist:
pass
return form
admin.site.register(SitePermission)
<commit_msg>Use OPPS_MULTISITE_ADMIN on queryset AdminViewPermission<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib import admin
from django.conf import settings
from django.utils import timezone
from .models import SitePermission
class AdminViewPermission(admin.ModelAdmin):
def queryset(self, request):
queryset = super(AdminViewPermission, self).queryset(request)
if not settings.OPPS_MULTISITE_ADMIN:
return queryset
try:
sitepermission = SitePermission.objects.get(
user=request.user,
date_available__lte=timezone.now(),
published=True)
return queryset.filter(site_iid=sitepermission.site_iid)
except SitePermission.DoesNotExist:
pass
return queryset
def get_form(self, request, obj=None, **kwargs):
form = super(AdminViewPermission, self).get_form(request, obj,
**kwargs)
if not settings.OPPS_MULTISITE_ADMIN:
return form
try:
sitepermission = SitePermission.objects.get(
user=request.user,
date_available__lte=timezone.now(),
published=True)
form.base_fields['site'].initial = sitepermission.site
form.base_fields['site'].choices = ((sitepermission.site.id,
sitepermission.site.domain),)
except SitePermission.DoesNotExist:
pass
return form
admin.site.register(SitePermission)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib import admin
from django.conf import settings
from django.utils import timezone
from .models import SitePermission
class AdminViewPermission(admin.ModelAdmin):
def queryset(self, request):
queryset = super(AdminViewPermission, self).queryset(request)
try:
sitepermission = SitePermission.objects.get(
user=request.user,
date_available__lte=timezone.now(),
published=True)
return queryset.filter(site_iid=sitepermission.site_iid)
except SitePermission.DoesNotExist:
pass
return queryset
def get_form(self, request, obj=None, **kwargs):
form = super(AdminViewPermission, self).get_form(request, obj,
**kwargs)
if not settings.OPPS_MULTISITE_ADMIN:
return form
try:
sitepermission = SitePermission.objects.get(
user=request.user,
date_available__lte=timezone.now(),
published=True)
form.base_fields['site'].initial = sitepermission.site
form.base_fields['site'].choices = ((sitepermission.site.id,
sitepermission.site.domain),)
except SitePermission.DoesNotExist:
pass
return form
admin.site.register(SitePermission)
Use OPPS_MULTISITE_ADMIN on queryset AdminViewPermission#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib import admin
from django.conf import settings
from django.utils import timezone
from .models import SitePermission
class AdminViewPermission(admin.ModelAdmin):
def queryset(self, request):
queryset = super(AdminViewPermission, self).queryset(request)
if not settings.OPPS_MULTISITE_ADMIN:
return queryset
try:
sitepermission = SitePermission.objects.get(
user=request.user,
date_available__lte=timezone.now(),
published=True)
return queryset.filter(site_iid=sitepermission.site_iid)
except SitePermission.DoesNotExist:
pass
return queryset
def get_form(self, request, obj=None, **kwargs):
form = super(AdminViewPermission, self).get_form(request, obj,
**kwargs)
if not settings.OPPS_MULTISITE_ADMIN:
return form
try:
sitepermission = SitePermission.objects.get(
user=request.user,
date_available__lte=timezone.now(),
published=True)
form.base_fields['site'].initial = sitepermission.site
form.base_fields['site'].choices = ((sitepermission.site.id,
sitepermission.site.domain),)
except SitePermission.DoesNotExist:
pass
return form
admin.site.register(SitePermission)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib import admin
from django.conf import settings
from django.utils import timezone
from .models import SitePermission
class AdminViewPermission(admin.ModelAdmin):
def queryset(self, request):
queryset = super(AdminViewPermission, self).queryset(request)
try:
sitepermission = SitePermission.objects.get(
user=request.user,
date_available__lte=timezone.now(),
published=True)
return queryset.filter(site_iid=sitepermission.site_iid)
except SitePermission.DoesNotExist:
pass
return queryset
def get_form(self, request, obj=None, **kwargs):
form = super(AdminViewPermission, self).get_form(request, obj,
**kwargs)
if not settings.OPPS_MULTISITE_ADMIN:
return form
try:
sitepermission = SitePermission.objects.get(
user=request.user,
date_available__lte=timezone.now(),
published=True)
form.base_fields['site'].initial = sitepermission.site
form.base_fields['site'].choices = ((sitepermission.site.id,
sitepermission.site.domain),)
except SitePermission.DoesNotExist:
pass
return form
admin.site.register(SitePermission)
<commit_msg>Use OPPS_MULTISITE_ADMIN on queryset AdminViewPermission<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib import admin
from django.conf import settings
from django.utils import timezone
from .models import SitePermission
class AdminViewPermission(admin.ModelAdmin):
def queryset(self, request):
queryset = super(AdminViewPermission, self).queryset(request)
if not settings.OPPS_MULTISITE_ADMIN:
return queryset
try:
sitepermission = SitePermission.objects.get(
user=request.user,
date_available__lte=timezone.now(),
published=True)
return queryset.filter(site_iid=sitepermission.site_iid)
except SitePermission.DoesNotExist:
pass
return queryset
def get_form(self, request, obj=None, **kwargs):
form = super(AdminViewPermission, self).get_form(request, obj,
**kwargs)
if not settings.OPPS_MULTISITE_ADMIN:
return form
try:
sitepermission = SitePermission.objects.get(
user=request.user,
date_available__lte=timezone.now(),
published=True)
form.base_fields['site'].initial = sitepermission.site
form.base_fields['site'].choices = ((sitepermission.site.id,
sitepermission.site.domain),)
except SitePermission.DoesNotExist:
pass
return form
admin.site.register(SitePermission)
|
52e6dabe13abdcd81a097beaacca585800397552
|
examples/upperair/Wyoming_Request.py
|
examples/upperair/Wyoming_Request.py
|
# Copyright (c) 2017 Siphon Contributors.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""
Wyoming Upper Air Data Request
==============================
This example shows how to use siphon's `simplewebswervice` support to create a query to
the Wyoming upper air archive.
"""
from datetime import datetime
from siphon.simplewebservice.wyoming import WyomingUpperAir
####################################################
# Create a datetime object for the sounding and string of the station identifier.
date = datetime(2017, 9, 10, 6)
station = 'MFL'
####################################################
# Make the request (a pandas dataframe is returned).
df = WyomingUpperAir.request_data(date, station)
####################################################
# Inspect data columns in the dataframe.
print(df.columns)
####################################################
# Pull out a specific column of data.
print(df['pressure'])
####################################################
# Units are stored in a dictionary with the variable name as the key in the `units` attribute
# of the dataframe.
print(df.units)
####################################################
print(df.units['pressure'])
|
# Copyright (c) 2017 Siphon Contributors.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""
Wyoming Upper Air Data Request
==============================
This example shows how to use siphon's `simplewebswervice` support to create a query to
the Wyoming upper air archive.
"""
from datetime import datetime
from metpy.units import units
from siphon.simplewebservice.wyoming import WyomingUpperAir
####################################################
# Create a datetime object for the sounding and string of the station identifier.
date = datetime(2017, 9, 10, 6)
station = 'MFL'
####################################################
# Make the request (a pandas dataframe is returned).
df = WyomingUpperAir.request_data(date, station)
####################################################
# Inspect data columns in the dataframe.
print(df.columns)
####################################################
# Pull out a specific column of data.
print(df['pressure'])
####################################################
# Units are stored in a dictionary with the variable name as the key in the `units` attribute
# of the dataframe.
print(df.units)
####################################################
print(df.units['pressure'])
####################################################
# Units can then be attached to the values from the dataframe.
pressure = df['pressure'].values * units(df.units['pressure'])
temperature = df['temperature'].values * units(df.units['temperature'])
dewpoint = df['dewpoint'].values * units(df.units['dewpoint'])
u_wind = df['u_wind'].values * units(df.units['u_wind'])
v_wind = df['v_wind'].values * units(df.units['v_wind'])
|
Add attaching units to example.
|
Add attaching units to example.
|
Python
|
bsd-3-clause
|
Unidata/siphon
|
# Copyright (c) 2017 Siphon Contributors.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""
Wyoming Upper Air Data Request
==============================
This example shows how to use siphon's `simplewebswervice` support to create a query to
the Wyoming upper air archive.
"""
from datetime import datetime
from siphon.simplewebservice.wyoming import WyomingUpperAir
####################################################
# Create a datetime object for the sounding and string of the station identifier.
date = datetime(2017, 9, 10, 6)
station = 'MFL'
####################################################
# Make the request (a pandas dataframe is returned).
df = WyomingUpperAir.request_data(date, station)
####################################################
# Inspect data columns in the dataframe.
print(df.columns)
####################################################
# Pull out a specific column of data.
print(df['pressure'])
####################################################
# Units are stored in a dictionary with the variable name as the key in the `units` attribute
# of the dataframe.
print(df.units)
####################################################
print(df.units['pressure'])
Add attaching units to example.
|
# Copyright (c) 2017 Siphon Contributors.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""
Wyoming Upper Air Data Request
==============================
This example shows how to use siphon's `simplewebswervice` support to create a query to
the Wyoming upper air archive.
"""
from datetime import datetime
from metpy.units import units
from siphon.simplewebservice.wyoming import WyomingUpperAir
####################################################
# Create a datetime object for the sounding and string of the station identifier.
date = datetime(2017, 9, 10, 6)
station = 'MFL'
####################################################
# Make the request (a pandas dataframe is returned).
df = WyomingUpperAir.request_data(date, station)
####################################################
# Inspect data columns in the dataframe.
print(df.columns)
####################################################
# Pull out a specific column of data.
print(df['pressure'])
####################################################
# Units are stored in a dictionary with the variable name as the key in the `units` attribute
# of the dataframe.
print(df.units)
####################################################
print(df.units['pressure'])
####################################################
# Units can then be attached to the values from the dataframe.
pressure = df['pressure'].values * units(df.units['pressure'])
temperature = df['temperature'].values * units(df.units['temperature'])
dewpoint = df['dewpoint'].values * units(df.units['dewpoint'])
u_wind = df['u_wind'].values * units(df.units['u_wind'])
v_wind = df['v_wind'].values * units(df.units['v_wind'])
|
<commit_before># Copyright (c) 2017 Siphon Contributors.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""
Wyoming Upper Air Data Request
==============================
This example shows how to use siphon's `simplewebswervice` support to create a query to
the Wyoming upper air archive.
"""
from datetime import datetime
from siphon.simplewebservice.wyoming import WyomingUpperAir
####################################################
# Create a datetime object for the sounding and string of the station identifier.
date = datetime(2017, 9, 10, 6)
station = 'MFL'
####################################################
# Make the request (a pandas dataframe is returned).
df = WyomingUpperAir.request_data(date, station)
####################################################
# Inspect data columns in the dataframe.
print(df.columns)
####################################################
# Pull out a specific column of data.
print(df['pressure'])
####################################################
# Units are stored in a dictionary with the variable name as the key in the `units` attribute
# of the dataframe.
print(df.units)
####################################################
print(df.units['pressure'])
<commit_msg>Add attaching units to example.<commit_after>
|
# Copyright (c) 2017 Siphon Contributors.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""
Wyoming Upper Air Data Request
==============================
This example shows how to use siphon's `simplewebswervice` support to create a query to
the Wyoming upper air archive.
"""
from datetime import datetime
from metpy.units import units
from siphon.simplewebservice.wyoming import WyomingUpperAir
####################################################
# Create a datetime object for the sounding and string of the station identifier.
date = datetime(2017, 9, 10, 6)
station = 'MFL'
####################################################
# Make the request (a pandas dataframe is returned).
df = WyomingUpperAir.request_data(date, station)
####################################################
# Inspect data columns in the dataframe.
print(df.columns)
####################################################
# Pull out a specific column of data.
print(df['pressure'])
####################################################
# Units are stored in a dictionary with the variable name as the key in the `units` attribute
# of the dataframe.
print(df.units)
####################################################
print(df.units['pressure'])
####################################################
# Units can then be attached to the values from the dataframe.
pressure = df['pressure'].values * units(df.units['pressure'])
temperature = df['temperature'].values * units(df.units['temperature'])
dewpoint = df['dewpoint'].values * units(df.units['dewpoint'])
u_wind = df['u_wind'].values * units(df.units['u_wind'])
v_wind = df['v_wind'].values * units(df.units['v_wind'])
|
# Copyright (c) 2017 Siphon Contributors.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""
Wyoming Upper Air Data Request
==============================
This example shows how to use siphon's `simplewebswervice` support to create a query to
the Wyoming upper air archive.
"""
from datetime import datetime
from siphon.simplewebservice.wyoming import WyomingUpperAir
####################################################
# Create a datetime object for the sounding and string of the station identifier.
date = datetime(2017, 9, 10, 6)
station = 'MFL'
####################################################
# Make the request (a pandas dataframe is returned).
df = WyomingUpperAir.request_data(date, station)
####################################################
# Inspect data columns in the dataframe.
print(df.columns)
####################################################
# Pull out a specific column of data.
print(df['pressure'])
####################################################
# Units are stored in a dictionary with the variable name as the key in the `units` attribute
# of the dataframe.
print(df.units)
####################################################
print(df.units['pressure'])
Add attaching units to example.# Copyright (c) 2017 Siphon Contributors.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""
Wyoming Upper Air Data Request
==============================
This example shows how to use siphon's `simplewebswervice` support to create a query to
the Wyoming upper air archive.
"""
from datetime import datetime
from metpy.units import units
from siphon.simplewebservice.wyoming import WyomingUpperAir
####################################################
# Create a datetime object for the sounding and string of the station identifier.
date = datetime(2017, 9, 10, 6)
station = 'MFL'
####################################################
# Make the request (a pandas dataframe is returned).
df = WyomingUpperAir.request_data(date, station)
####################################################
# Inspect data columns in the dataframe.
print(df.columns)
####################################################
# Pull out a specific column of data.
print(df['pressure'])
####################################################
# Units are stored in a dictionary with the variable name as the key in the `units` attribute
# of the dataframe.
print(df.units)
####################################################
print(df.units['pressure'])
####################################################
# Units can then be attached to the values from the dataframe.
pressure = df['pressure'].values * units(df.units['pressure'])
temperature = df['temperature'].values * units(df.units['temperature'])
dewpoint = df['dewpoint'].values * units(df.units['dewpoint'])
u_wind = df['u_wind'].values * units(df.units['u_wind'])
v_wind = df['v_wind'].values * units(df.units['v_wind'])
|
<commit_before># Copyright (c) 2017 Siphon Contributors.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""
Wyoming Upper Air Data Request
==============================
This example shows how to use siphon's `simplewebswervice` support to create a query to
the Wyoming upper air archive.
"""
from datetime import datetime
from siphon.simplewebservice.wyoming import WyomingUpperAir
####################################################
# Create a datetime object for the sounding and string of the station identifier.
date = datetime(2017, 9, 10, 6)
station = 'MFL'
####################################################
# Make the request (a pandas dataframe is returned).
df = WyomingUpperAir.request_data(date, station)
####################################################
# Inspect data columns in the dataframe.
print(df.columns)
####################################################
# Pull out a specific column of data.
print(df['pressure'])
####################################################
# Units are stored in a dictionary with the variable name as the key in the `units` attribute
# of the dataframe.
print(df.units)
####################################################
print(df.units['pressure'])
<commit_msg>Add attaching units to example.<commit_after># Copyright (c) 2017 Siphon Contributors.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""
Wyoming Upper Air Data Request
==============================
This example shows how to use siphon's `simplewebswervice` support to create a query to
the Wyoming upper air archive.
"""
from datetime import datetime
from metpy.units import units
from siphon.simplewebservice.wyoming import WyomingUpperAir
####################################################
# Create a datetime object for the sounding and string of the station identifier.
date = datetime(2017, 9, 10, 6)
station = 'MFL'
####################################################
# Make the request (a pandas dataframe is returned).
df = WyomingUpperAir.request_data(date, station)
####################################################
# Inspect data columns in the dataframe.
print(df.columns)
####################################################
# Pull out a specific column of data.
print(df['pressure'])
####################################################
# Units are stored in a dictionary with the variable name as the key in the `units` attribute
# of the dataframe.
print(df.units)
####################################################
print(df.units['pressure'])
####################################################
# Units can then be attached to the values from the dataframe.
pressure = df['pressure'].values * units(df.units['pressure'])
temperature = df['temperature'].values * units(df.units['temperature'])
dewpoint = df['dewpoint'].values * units(df.units['dewpoint'])
u_wind = df['u_wind'].values * units(df.units['u_wind'])
v_wind = df['v_wind'].values * units(df.units['v_wind'])
|
394ed06411d3ca3ada66aab3bee796682895acc0
|
cla_backend/apps/core/testing.py
|
cla_backend/apps/core/testing.py
|
from django.core.management import call_command
from django.test.utils import get_runner
from django.conf import settings
from django.db import connections, DEFAULT_DB_ALIAS
# use jenkins runner if present otherwise the default django one
if 'django_jenkins' in settings.INSTALLED_APPS:
base_runner = 'django_jenkins.runner.CITestSuiteRunner'
else:
base_runner = 'django.test.runner.DiscoverRunner'
class CLADiscoverRunner(get_runner(settings, base_runner)):
"""
Overrides the default Runner and loads the initial_groups fixture.
This is because migrations are switched off during testing but
we do need `initial_groups` in order for the tests to pass.
"""
def setup_databases(self, **kwargs):
ret = super(CLADiscoverRunner, self).setup_databases(**kwargs)
connection = connections[DEFAULT_DB_ALIAS]
cursor = connection.cursor()
cursor.execute('CREATE EXTENSION pgcrypto')
call_command('loaddata', 'initial_groups')
return ret
|
from django.core.management import call_command
from django.test.utils import get_runner
from django.conf import settings
from django.db import connections, DEFAULT_DB_ALIAS
# use jenkins runner if present otherwise the default django one
if 'django_jenkins' in settings.INSTALLED_APPS:
base_runner = 'django_jenkins.runner.CITestSuiteRunner'
else:
base_runner = 'django.test.runner.DiscoverRunner'
class CLADiscoverRunner(get_runner(settings, base_runner)):
"""
Overrides the default Runner and loads the initial_groups fixture.
This is because migrations are switched off during testing but
we do need `initial_groups` in order for the tests to pass.
"""
def setup_databases(self, **kwargs):
ret = super(CLADiscoverRunner, self).setup_databases(**kwargs)
connection = connections[DEFAULT_DB_ALIAS]
cursor = connection.cursor()
cursor.execute('CREATE EXTENSION IF NOT EXISTS pgcrypto')
call_command('loaddata', 'initial_groups')
return ret
|
Install pgcrypto PGSQL extension but only if it does not exist already (e.g. from template1)
|
Install pgcrypto PGSQL extension but only if it does not exist already (e.g. from template1)
|
Python
|
mit
|
ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend
|
from django.core.management import call_command
from django.test.utils import get_runner
from django.conf import settings
from django.db import connections, DEFAULT_DB_ALIAS
# use jenkins runner if present otherwise the default django one
if 'django_jenkins' in settings.INSTALLED_APPS:
base_runner = 'django_jenkins.runner.CITestSuiteRunner'
else:
base_runner = 'django.test.runner.DiscoverRunner'
class CLADiscoverRunner(get_runner(settings, base_runner)):
"""
Overrides the default Runner and loads the initial_groups fixture.
This is because migrations are switched off during testing but
we do need `initial_groups` in order for the tests to pass.
"""
def setup_databases(self, **kwargs):
ret = super(CLADiscoverRunner, self).setup_databases(**kwargs)
connection = connections[DEFAULT_DB_ALIAS]
cursor = connection.cursor()
cursor.execute('CREATE EXTENSION pgcrypto')
call_command('loaddata', 'initial_groups')
return ret
Install pgcrypto PGSQL extension but only if it does not exist already (e.g. from template1)
|
from django.core.management import call_command
from django.test.utils import get_runner
from django.conf import settings
from django.db import connections, DEFAULT_DB_ALIAS
# use jenkins runner if present otherwise the default django one
if 'django_jenkins' in settings.INSTALLED_APPS:
base_runner = 'django_jenkins.runner.CITestSuiteRunner'
else:
base_runner = 'django.test.runner.DiscoverRunner'
class CLADiscoverRunner(get_runner(settings, base_runner)):
"""
Overrides the default Runner and loads the initial_groups fixture.
This is because migrations are switched off during testing but
we do need `initial_groups` in order for the tests to pass.
"""
def setup_databases(self, **kwargs):
ret = super(CLADiscoverRunner, self).setup_databases(**kwargs)
connection = connections[DEFAULT_DB_ALIAS]
cursor = connection.cursor()
cursor.execute('CREATE EXTENSION IF NOT EXISTS pgcrypto')
call_command('loaddata', 'initial_groups')
return ret
|
<commit_before>from django.core.management import call_command
from django.test.utils import get_runner
from django.conf import settings
from django.db import connections, DEFAULT_DB_ALIAS
# use jenkins runner if present otherwise the default django one
if 'django_jenkins' in settings.INSTALLED_APPS:
base_runner = 'django_jenkins.runner.CITestSuiteRunner'
else:
base_runner = 'django.test.runner.DiscoverRunner'
class CLADiscoverRunner(get_runner(settings, base_runner)):
"""
Overrides the default Runner and loads the initial_groups fixture.
This is because migrations are switched off during testing but
we do need `initial_groups` in order for the tests to pass.
"""
def setup_databases(self, **kwargs):
ret = super(CLADiscoverRunner, self).setup_databases(**kwargs)
connection = connections[DEFAULT_DB_ALIAS]
cursor = connection.cursor()
cursor.execute('CREATE EXTENSION pgcrypto')
call_command('loaddata', 'initial_groups')
return ret
<commit_msg>Install pgcrypto PGSQL extension but only if it does not exist already (e.g. from template1)<commit_after>
|
from django.core.management import call_command
from django.test.utils import get_runner
from django.conf import settings
from django.db import connections, DEFAULT_DB_ALIAS
# use jenkins runner if present otherwise the default django one
if 'django_jenkins' in settings.INSTALLED_APPS:
base_runner = 'django_jenkins.runner.CITestSuiteRunner'
else:
base_runner = 'django.test.runner.DiscoverRunner'
class CLADiscoverRunner(get_runner(settings, base_runner)):
"""
Overrides the default Runner and loads the initial_groups fixture.
This is because migrations are switched off during testing but
we do need `initial_groups` in order for the tests to pass.
"""
def setup_databases(self, **kwargs):
ret = super(CLADiscoverRunner, self).setup_databases(**kwargs)
connection = connections[DEFAULT_DB_ALIAS]
cursor = connection.cursor()
cursor.execute('CREATE EXTENSION IF NOT EXISTS pgcrypto')
call_command('loaddata', 'initial_groups')
return ret
|
from django.core.management import call_command
from django.test.utils import get_runner
from django.conf import settings
from django.db import connections, DEFAULT_DB_ALIAS
# use jenkins runner if present otherwise the default django one
if 'django_jenkins' in settings.INSTALLED_APPS:
base_runner = 'django_jenkins.runner.CITestSuiteRunner'
else:
base_runner = 'django.test.runner.DiscoverRunner'
class CLADiscoverRunner(get_runner(settings, base_runner)):
"""
Overrides the default Runner and loads the initial_groups fixture.
This is because migrations are switched off during testing but
we do need `initial_groups` in order for the tests to pass.
"""
def setup_databases(self, **kwargs):
ret = super(CLADiscoverRunner, self).setup_databases(**kwargs)
connection = connections[DEFAULT_DB_ALIAS]
cursor = connection.cursor()
cursor.execute('CREATE EXTENSION pgcrypto')
call_command('loaddata', 'initial_groups')
return ret
Install pgcrypto PGSQL extension but only if it does not exist already (e.g. from template1)from django.core.management import call_command
from django.test.utils import get_runner
from django.conf import settings
from django.db import connections, DEFAULT_DB_ALIAS
# use jenkins runner if present otherwise the default django one
if 'django_jenkins' in settings.INSTALLED_APPS:
base_runner = 'django_jenkins.runner.CITestSuiteRunner'
else:
base_runner = 'django.test.runner.DiscoverRunner'
class CLADiscoverRunner(get_runner(settings, base_runner)):
"""
Overrides the default Runner and loads the initial_groups fixture.
This is because migrations are switched off during testing but
we do need `initial_groups` in order for the tests to pass.
"""
def setup_databases(self, **kwargs):
ret = super(CLADiscoverRunner, self).setup_databases(**kwargs)
connection = connections[DEFAULT_DB_ALIAS]
cursor = connection.cursor()
cursor.execute('CREATE EXTENSION IF NOT EXISTS pgcrypto')
call_command('loaddata', 'initial_groups')
return ret
|
<commit_before>from django.core.management import call_command
from django.test.utils import get_runner
from django.conf import settings
from django.db import connections, DEFAULT_DB_ALIAS
# use jenkins runner if present otherwise the default django one
if 'django_jenkins' in settings.INSTALLED_APPS:
base_runner = 'django_jenkins.runner.CITestSuiteRunner'
else:
base_runner = 'django.test.runner.DiscoverRunner'
class CLADiscoverRunner(get_runner(settings, base_runner)):
"""
Overrides the default Runner and loads the initial_groups fixture.
This is because migrations are switched off during testing but
we do need `initial_groups` in order for the tests to pass.
"""
def setup_databases(self, **kwargs):
ret = super(CLADiscoverRunner, self).setup_databases(**kwargs)
connection = connections[DEFAULT_DB_ALIAS]
cursor = connection.cursor()
cursor.execute('CREATE EXTENSION pgcrypto')
call_command('loaddata', 'initial_groups')
return ret
<commit_msg>Install pgcrypto PGSQL extension but only if it does not exist already (e.g. from template1)<commit_after>from django.core.management import call_command
from django.test.utils import get_runner
from django.conf import settings
from django.db import connections, DEFAULT_DB_ALIAS
# use jenkins runner if present otherwise the default django one
if 'django_jenkins' in settings.INSTALLED_APPS:
base_runner = 'django_jenkins.runner.CITestSuiteRunner'
else:
base_runner = 'django.test.runner.DiscoverRunner'
class CLADiscoverRunner(get_runner(settings, base_runner)):
"""
Overrides the default Runner and loads the initial_groups fixture.
This is because migrations are switched off during testing but
we do need `initial_groups` in order for the tests to pass.
"""
def setup_databases(self, **kwargs):
ret = super(CLADiscoverRunner, self).setup_databases(**kwargs)
connection = connections[DEFAULT_DB_ALIAS]
cursor = connection.cursor()
cursor.execute('CREATE EXTENSION IF NOT EXISTS pgcrypto')
call_command('loaddata', 'initial_groups')
return ret
|
9e2d3ed154e977d38126f610f11a0df3a0141da1
|
apps/local_apps/account/context_processors.py
|
apps/local_apps/account/context_processors.py
|
from account.models import Account, AnonymousAccount
def openid(request):
return {'openid': request.openid}
def account(request):
account = AnonymousAccount(request)
if request.user.is_authenticated():
try:
account = Account._default_manager.get(user=request.user)
except (Account.DoesNotExist, Account.MultipleObjectsReturned):
account = None
return {'account': account}
|
from account.models import Account, AnonymousAccount
def openid(request):
return {'openid': request.openid}
def account(request):
if request.user.is_authenticated():
try:
account = Account._default_manager.get(user=request.user)
except (Account.DoesNotExist, Account.MultipleObjectsReturned):
account = AnonymousAccount(request)
else:
account = AnonymousAccount(request)
return {'account': account}
|
Handle the exception case in the account context_processor.
|
Handle the exception case in the account context_processor.
git-svn-id: 51ba99f60490c2ee9ba726ccda75a38950f5105d@1119 45601e1e-1555-4799-bd40-45c8c71eef50
|
Python
|
mit
|
alex/pinax,amarandon/pinax,amarandon/pinax,amarandon/pinax,amarandon/pinax,alex/pinax,alex/pinax
|
from account.models import Account, AnonymousAccount
def openid(request):
return {'openid': request.openid}
def account(request):
account = AnonymousAccount(request)
if request.user.is_authenticated():
try:
account = Account._default_manager.get(user=request.user)
except (Account.DoesNotExist, Account.MultipleObjectsReturned):
account = None
return {'account': account}
Handle the exception case in the account context_processor.
git-svn-id: 51ba99f60490c2ee9ba726ccda75a38950f5105d@1119 45601e1e-1555-4799-bd40-45c8c71eef50
|
from account.models import Account, AnonymousAccount
def openid(request):
return {'openid': request.openid}
def account(request):
if request.user.is_authenticated():
try:
account = Account._default_manager.get(user=request.user)
except (Account.DoesNotExist, Account.MultipleObjectsReturned):
account = AnonymousAccount(request)
else:
account = AnonymousAccount(request)
return {'account': account}
|
<commit_before>
from account.models import Account, AnonymousAccount
def openid(request):
return {'openid': request.openid}
def account(request):
account = AnonymousAccount(request)
if request.user.is_authenticated():
try:
account = Account._default_manager.get(user=request.user)
except (Account.DoesNotExist, Account.MultipleObjectsReturned):
account = None
return {'account': account}
<commit_msg>Handle the exception case in the account context_processor.
git-svn-id: 51ba99f60490c2ee9ba726ccda75a38950f5105d@1119 45601e1e-1555-4799-bd40-45c8c71eef50<commit_after>
|
from account.models import Account, AnonymousAccount
def openid(request):
return {'openid': request.openid}
def account(request):
if request.user.is_authenticated():
try:
account = Account._default_manager.get(user=request.user)
except (Account.DoesNotExist, Account.MultipleObjectsReturned):
account = AnonymousAccount(request)
else:
account = AnonymousAccount(request)
return {'account': account}
|
from account.models import Account, AnonymousAccount
def openid(request):
return {'openid': request.openid}
def account(request):
account = AnonymousAccount(request)
if request.user.is_authenticated():
try:
account = Account._default_manager.get(user=request.user)
except (Account.DoesNotExist, Account.MultipleObjectsReturned):
account = None
return {'account': account}
Handle the exception case in the account context_processor.
git-svn-id: 51ba99f60490c2ee9ba726ccda75a38950f5105d@1119 45601e1e-1555-4799-bd40-45c8c71eef50
from account.models import Account, AnonymousAccount
def openid(request):
return {'openid': request.openid}
def account(request):
if request.user.is_authenticated():
try:
account = Account._default_manager.get(user=request.user)
except (Account.DoesNotExist, Account.MultipleObjectsReturned):
account = AnonymousAccount(request)
else:
account = AnonymousAccount(request)
return {'account': account}
|
<commit_before>
from account.models import Account, AnonymousAccount
def openid(request):
return {'openid': request.openid}
def account(request):
account = AnonymousAccount(request)
if request.user.is_authenticated():
try:
account = Account._default_manager.get(user=request.user)
except (Account.DoesNotExist, Account.MultipleObjectsReturned):
account = None
return {'account': account}
<commit_msg>Handle the exception case in the account context_processor.
git-svn-id: 51ba99f60490c2ee9ba726ccda75a38950f5105d@1119 45601e1e-1555-4799-bd40-45c8c71eef50<commit_after>
from account.models import Account, AnonymousAccount
def openid(request):
return {'openid': request.openid}
def account(request):
if request.user.is_authenticated():
try:
account = Account._default_manager.get(user=request.user)
except (Account.DoesNotExist, Account.MultipleObjectsReturned):
account = AnonymousAccount(request)
else:
account = AnonymousAccount(request)
return {'account': account}
|
dddb15a50d64a7b2660f2af1a407dd7fa9b3742b
|
samples/auth.py
|
samples/auth.py
|
"""[START auth]"""
from oauth2client.client import GoogleCredentials
from googleapiclient.discovery
def get_service():
credentials = GoogleCredentials.get_application_default()
return build('bigquery', 'v2', credentials)
"""[END auth]"""
|
"""[START auth]"""
from oauth2client.client import GoogleCredentials
from googleapiclient.discovery import build
def get_service():
credentials = GoogleCredentials.get_application_default()
return build('bigquery', 'v2', credentials)
"""[END auth]"""
|
Make sure to import build from googleapiclient.discovery.
|
Make sure to import build from googleapiclient.discovery.
|
Python
|
apache-2.0
|
googlearchive/bigquery-samples-python,googlearchive/bigquery-samples-python
|
"""[START auth]"""
from oauth2client.client import GoogleCredentials
from googleapiclient.discovery
def get_service():
credentials = GoogleCredentials.get_application_default()
return build('bigquery', 'v2', credentials)
"""[END auth]"""
Make sure to import build from googleapiclient.discovery.
|
"""[START auth]"""
from oauth2client.client import GoogleCredentials
from googleapiclient.discovery import build
def get_service():
credentials = GoogleCredentials.get_application_default()
return build('bigquery', 'v2', credentials)
"""[END auth]"""
|
<commit_before>"""[START auth]"""
from oauth2client.client import GoogleCredentials
from googleapiclient.discovery
def get_service():
credentials = GoogleCredentials.get_application_default()
return build('bigquery', 'v2', credentials)
"""[END auth]"""
<commit_msg>Make sure to import build from googleapiclient.discovery.<commit_after>
|
"""[START auth]"""
from oauth2client.client import GoogleCredentials
from googleapiclient.discovery import build
def get_service():
credentials = GoogleCredentials.get_application_default()
return build('bigquery', 'v2', credentials)
"""[END auth]"""
|
"""[START auth]"""
from oauth2client.client import GoogleCredentials
from googleapiclient.discovery
def get_service():
credentials = GoogleCredentials.get_application_default()
return build('bigquery', 'v2', credentials)
"""[END auth]"""
Make sure to import build from googleapiclient.discovery."""[START auth]"""
from oauth2client.client import GoogleCredentials
from googleapiclient.discovery import build
def get_service():
credentials = GoogleCredentials.get_application_default()
return build('bigquery', 'v2', credentials)
"""[END auth]"""
|
<commit_before>"""[START auth]"""
from oauth2client.client import GoogleCredentials
from googleapiclient.discovery
def get_service():
credentials = GoogleCredentials.get_application_default()
return build('bigquery', 'v2', credentials)
"""[END auth]"""
<commit_msg>Make sure to import build from googleapiclient.discovery.<commit_after>"""[START auth]"""
from oauth2client.client import GoogleCredentials
from googleapiclient.discovery import build
def get_service():
credentials = GoogleCredentials.get_application_default()
return build('bigquery', 'v2', credentials)
"""[END auth]"""
|
be7f3153e1505ecdfca6e5078c4b3a4ed1817c28
|
setup.py
|
setup.py
|
import os
import json
from setuptools import setup
f = open(os.path.join(os.path.dirname(__file__), 'README.md'))
readme = f.read()
f.close()
f = open(os.path.join(os.path.dirname(__file__), 'package.json'))
package = json.loads(f.read())
f.close()
setup(
name=package['name'],
version=package['version'],
description=package['description'],
long_description=readme,
author=package['author']['name'],
author_email=package['author']['email'],
url=package['homepage'],
packages=['s3direct'],
include_package_data=True,
install_requires=['django>=1.8'],
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
|
import os
import json
from setuptools import setup
f = open(os.path.join(os.path.dirname(__file__), 'README.md'))
readme = f.read()
f.close()
f = open(os.path.join(os.path.dirname(__file__), 'package.json'))
package = json.loads(f.read())
f.close()
setup(
name=package['name'],
version=package['version'],
description=package['description'],
long_description=readme,
long_description_content_type='text/markdown',
author=package['author']['name'],
author_email=package['author']['email'],
url=package['homepage'],
packages=['s3direct'],
include_package_data=True,
install_requires=['django>=1.8'],
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
|
Add description content type for pypi.
|
Add description content type for pypi.
|
Python
|
mit
|
bradleyg/django-s3direct,bradleyg/django-s3direct,bradleyg/django-s3direct
|
import os
import json
from setuptools import setup
f = open(os.path.join(os.path.dirname(__file__), 'README.md'))
readme = f.read()
f.close()
f = open(os.path.join(os.path.dirname(__file__), 'package.json'))
package = json.loads(f.read())
f.close()
setup(
name=package['name'],
version=package['version'],
description=package['description'],
long_description=readme,
author=package['author']['name'],
author_email=package['author']['email'],
url=package['homepage'],
packages=['s3direct'],
include_package_data=True,
install_requires=['django>=1.8'],
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
Add description content type for pypi.
|
import os
import json
from setuptools import setup
f = open(os.path.join(os.path.dirname(__file__), 'README.md'))
readme = f.read()
f.close()
f = open(os.path.join(os.path.dirname(__file__), 'package.json'))
package = json.loads(f.read())
f.close()
setup(
name=package['name'],
version=package['version'],
description=package['description'],
long_description=readme,
long_description_content_type='text/markdown',
author=package['author']['name'],
author_email=package['author']['email'],
url=package['homepage'],
packages=['s3direct'],
include_package_data=True,
install_requires=['django>=1.8'],
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
|
<commit_before>import os
import json
from setuptools import setup
f = open(os.path.join(os.path.dirname(__file__), 'README.md'))
readme = f.read()
f.close()
f = open(os.path.join(os.path.dirname(__file__), 'package.json'))
package = json.loads(f.read())
f.close()
setup(
name=package['name'],
version=package['version'],
description=package['description'],
long_description=readme,
author=package['author']['name'],
author_email=package['author']['email'],
url=package['homepage'],
packages=['s3direct'],
include_package_data=True,
install_requires=['django>=1.8'],
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
<commit_msg>Add description content type for pypi.<commit_after>
|
import os
import json
from setuptools import setup
f = open(os.path.join(os.path.dirname(__file__), 'README.md'))
readme = f.read()
f.close()
f = open(os.path.join(os.path.dirname(__file__), 'package.json'))
package = json.loads(f.read())
f.close()
setup(
name=package['name'],
version=package['version'],
description=package['description'],
long_description=readme,
long_description_content_type='text/markdown',
author=package['author']['name'],
author_email=package['author']['email'],
url=package['homepage'],
packages=['s3direct'],
include_package_data=True,
install_requires=['django>=1.8'],
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
|
import os
import json
from setuptools import setup
f = open(os.path.join(os.path.dirname(__file__), 'README.md'))
readme = f.read()
f.close()
f = open(os.path.join(os.path.dirname(__file__), 'package.json'))
package = json.loads(f.read())
f.close()
setup(
name=package['name'],
version=package['version'],
description=package['description'],
long_description=readme,
author=package['author']['name'],
author_email=package['author']['email'],
url=package['homepage'],
packages=['s3direct'],
include_package_data=True,
install_requires=['django>=1.8'],
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
Add description content type for pypi.import os
import json
from setuptools import setup
f = open(os.path.join(os.path.dirname(__file__), 'README.md'))
readme = f.read()
f.close()
f = open(os.path.join(os.path.dirname(__file__), 'package.json'))
package = json.loads(f.read())
f.close()
setup(
name=package['name'],
version=package['version'],
description=package['description'],
long_description=readme,
long_description_content_type='text/markdown',
author=package['author']['name'],
author_email=package['author']['email'],
url=package['homepage'],
packages=['s3direct'],
include_package_data=True,
install_requires=['django>=1.8'],
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
|
<commit_before>import os
import json
from setuptools import setup
f = open(os.path.join(os.path.dirname(__file__), 'README.md'))
readme = f.read()
f.close()
f = open(os.path.join(os.path.dirname(__file__), 'package.json'))
package = json.loads(f.read())
f.close()
setup(
name=package['name'],
version=package['version'],
description=package['description'],
long_description=readme,
author=package['author']['name'],
author_email=package['author']['email'],
url=package['homepage'],
packages=['s3direct'],
include_package_data=True,
install_requires=['django>=1.8'],
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
<commit_msg>Add description content type for pypi.<commit_after>import os
import json
from setuptools import setup
f = open(os.path.join(os.path.dirname(__file__), 'README.md'))
readme = f.read()
f.close()
f = open(os.path.join(os.path.dirname(__file__), 'package.json'))
package = json.loads(f.read())
f.close()
setup(
name=package['name'],
version=package['version'],
description=package['description'],
long_description=readme,
long_description_content_type='text/markdown',
author=package['author']['name'],
author_email=package['author']['email'],
url=package['homepage'],
packages=['s3direct'],
include_package_data=True,
install_requires=['django>=1.8'],
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
|
c99ef9e318467624de8f62afee6c14ca422e8413
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
"""setup.py: setuptools control."""
import re
from setuptools import setup, find_packages
import pip
version = re.search(
'^__version__\s*=\s*"(.*)"',
open('src/bslint.py').read(),
re.M
).group(1)
with open("README.rst", "rb") as f:
long_descr = f.read().decode("utf-8")
pip.main(['install', 'pyenchant'])
setup(
name = "bslint",
packages = find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
package_dir={'src': 'src'},
package_data={'src': ['config/*.json']},
entry_points = { "console_scripts": ['bslint = src.bslint:main']},
version = version,
description = "A linter tool for the BrightScript language.",
long_description = long_descr,
author = "BSLint",
author_email = "zachary.robinson@sky.uk",
url = "https://github.com/sky-uk/roku-linter",
download_url = 'https://github.com/sky-uk/bslint/archive/0.2.2.tar.gz',
)
|
# -*- coding: utf-8 -*-
"""setup.py: setuptools control."""
import re
from setuptools import setup, find_packages
import pip
version = re.search(
'^__version__\s*=\s*"(.*)"',
open('src/bslint.py').read(),
re.M
).group(1)
with open("README.rst", "rb") as f:
long_descr = f.read().decode("utf-8")
pip.main(['install', 'pyenchant'])
setup(
name = "bslint",
packages = find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
package_dir={'src': 'src'},
package_data={'src': ['config/*.json']},
entry_points = { "console_scripts": ['bslint = src.bslint:main']},
version = version,
description = "A linter tool for the BrightScript language.",
long_description = long_descr,
author = "BSLint",
author_email = "zachary.robinson@sky.uk",
url = "https://github.com/sky-uk/roku-linter",
download_url = 'https://github.com/sky-uk/bslint/archive/0.2.2.tar.gz',
install_requires=['pip']
)
|
Include pip as required install
|
Include pip as required install
|
Python
|
bsd-3-clause
|
sky-uk/bslint
|
# -*- coding: utf-8 -*-
"""setup.py: setuptools control."""
import re
from setuptools import setup, find_packages
import pip
version = re.search(
'^__version__\s*=\s*"(.*)"',
open('src/bslint.py').read(),
re.M
).group(1)
with open("README.rst", "rb") as f:
long_descr = f.read().decode("utf-8")
pip.main(['install', 'pyenchant'])
setup(
name = "bslint",
packages = find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
package_dir={'src': 'src'},
package_data={'src': ['config/*.json']},
entry_points = { "console_scripts": ['bslint = src.bslint:main']},
version = version,
description = "A linter tool for the BrightScript language.",
long_description = long_descr,
author = "BSLint",
author_email = "zachary.robinson@sky.uk",
url = "https://github.com/sky-uk/roku-linter",
download_url = 'https://github.com/sky-uk/bslint/archive/0.2.2.tar.gz',
)
Include pip as required install
|
# -*- coding: utf-8 -*-
"""setup.py: setuptools control."""
import re
from setuptools import setup, find_packages
import pip
version = re.search(
'^__version__\s*=\s*"(.*)"',
open('src/bslint.py').read(),
re.M
).group(1)
with open("README.rst", "rb") as f:
long_descr = f.read().decode("utf-8")
pip.main(['install', 'pyenchant'])
setup(
name = "bslint",
packages = find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
package_dir={'src': 'src'},
package_data={'src': ['config/*.json']},
entry_points = { "console_scripts": ['bslint = src.bslint:main']},
version = version,
description = "A linter tool for the BrightScript language.",
long_description = long_descr,
author = "BSLint",
author_email = "zachary.robinson@sky.uk",
url = "https://github.com/sky-uk/roku-linter",
download_url = 'https://github.com/sky-uk/bslint/archive/0.2.2.tar.gz',
install_requires=['pip']
)
|
<commit_before># -*- coding: utf-8 -*-
"""setup.py: setuptools control."""
import re
from setuptools import setup, find_packages
import pip
version = re.search(
'^__version__\s*=\s*"(.*)"',
open('src/bslint.py').read(),
re.M
).group(1)
with open("README.rst", "rb") as f:
long_descr = f.read().decode("utf-8")
pip.main(['install', 'pyenchant'])
setup(
name = "bslint",
packages = find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
package_dir={'src': 'src'},
package_data={'src': ['config/*.json']},
entry_points = { "console_scripts": ['bslint = src.bslint:main']},
version = version,
description = "A linter tool for the BrightScript language.",
long_description = long_descr,
author = "BSLint",
author_email = "zachary.robinson@sky.uk",
url = "https://github.com/sky-uk/roku-linter",
download_url = 'https://github.com/sky-uk/bslint/archive/0.2.2.tar.gz',
)
<commit_msg>Include pip as required install<commit_after>
|
# -*- coding: utf-8 -*-
"""setup.py: setuptools control."""
import re
from setuptools import setup, find_packages
import pip
version = re.search(
'^__version__\s*=\s*"(.*)"',
open('src/bslint.py').read(),
re.M
).group(1)
with open("README.rst", "rb") as f:
long_descr = f.read().decode("utf-8")
pip.main(['install', 'pyenchant'])
setup(
name = "bslint",
packages = find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
package_dir={'src': 'src'},
package_data={'src': ['config/*.json']},
entry_points = { "console_scripts": ['bslint = src.bslint:main']},
version = version,
description = "A linter tool for the BrightScript language.",
long_description = long_descr,
author = "BSLint",
author_email = "zachary.robinson@sky.uk",
url = "https://github.com/sky-uk/roku-linter",
download_url = 'https://github.com/sky-uk/bslint/archive/0.2.2.tar.gz',
install_requires=['pip']
)
|
# -*- coding: utf-8 -*-
"""setup.py: setuptools control."""
import re
from setuptools import setup, find_packages
import pip
version = re.search(
'^__version__\s*=\s*"(.*)"',
open('src/bslint.py').read(),
re.M
).group(1)
with open("README.rst", "rb") as f:
long_descr = f.read().decode("utf-8")
pip.main(['install', 'pyenchant'])
setup(
name = "bslint",
packages = find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
package_dir={'src': 'src'},
package_data={'src': ['config/*.json']},
entry_points = { "console_scripts": ['bslint = src.bslint:main']},
version = version,
description = "A linter tool for the BrightScript language.",
long_description = long_descr,
author = "BSLint",
author_email = "zachary.robinson@sky.uk",
url = "https://github.com/sky-uk/roku-linter",
download_url = 'https://github.com/sky-uk/bslint/archive/0.2.2.tar.gz',
)
Include pip as required install# -*- coding: utf-8 -*-
"""setup.py: setuptools control."""
import re
from setuptools import setup, find_packages
import pip
version = re.search(
'^__version__\s*=\s*"(.*)"',
open('src/bslint.py').read(),
re.M
).group(1)
with open("README.rst", "rb") as f:
long_descr = f.read().decode("utf-8")
pip.main(['install', 'pyenchant'])
setup(
name = "bslint",
packages = find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
package_dir={'src': 'src'},
package_data={'src': ['config/*.json']},
entry_points = { "console_scripts": ['bslint = src.bslint:main']},
version = version,
description = "A linter tool for the BrightScript language.",
long_description = long_descr,
author = "BSLint",
author_email = "zachary.robinson@sky.uk",
url = "https://github.com/sky-uk/roku-linter",
download_url = 'https://github.com/sky-uk/bslint/archive/0.2.2.tar.gz',
install_requires=['pip']
)
|
<commit_before># -*- coding: utf-8 -*-
"""setup.py: setuptools control."""
import re
from setuptools import setup, find_packages
import pip
version = re.search(
'^__version__\s*=\s*"(.*)"',
open('src/bslint.py').read(),
re.M
).group(1)
with open("README.rst", "rb") as f:
long_descr = f.read().decode("utf-8")
pip.main(['install', 'pyenchant'])
setup(
name = "bslint",
packages = find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
package_dir={'src': 'src'},
package_data={'src': ['config/*.json']},
entry_points = { "console_scripts": ['bslint = src.bslint:main']},
version = version,
description = "A linter tool for the BrightScript language.",
long_description = long_descr,
author = "BSLint",
author_email = "zachary.robinson@sky.uk",
url = "https://github.com/sky-uk/roku-linter",
download_url = 'https://github.com/sky-uk/bslint/archive/0.2.2.tar.gz',
)
<commit_msg>Include pip as required install<commit_after># -*- coding: utf-8 -*-
"""setup.py: setuptools control."""
import re
from setuptools import setup, find_packages
import pip
version = re.search(
'^__version__\s*=\s*"(.*)"',
open('src/bslint.py').read(),
re.M
).group(1)
with open("README.rst", "rb") as f:
long_descr = f.read().decode("utf-8")
pip.main(['install', 'pyenchant'])
setup(
name = "bslint",
packages = find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
package_dir={'src': 'src'},
package_data={'src': ['config/*.json']},
entry_points = { "console_scripts": ['bslint = src.bslint:main']},
version = version,
description = "A linter tool for the BrightScript language.",
long_description = long_descr,
author = "BSLint",
author_email = "zachary.robinson@sky.uk",
url = "https://github.com/sky-uk/roku-linter",
download_url = 'https://github.com/sky-uk/bslint/archive/0.2.2.tar.gz',
install_requires=['pip']
)
|
a3fb6a02a2c039fe53326ab7bf974efa1df0c2fe
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
from setuptools import setup
setup(
name='rq-scheduler',
version='0.1',
author='Selwin Ong',
author_email='selwin.ong@gmail.com',
packages=['rq_scheduler'],
url='https://github.com/ui/rq-scheduler',
license='MIT',
description='Provides job scheduling capabilities to RQ (Redis Queue)',
long_description=open('README.rst').read(),
zip_safe=False,
include_package_data=True,
entry_points='''\
[console_scripts]
rqscheduler = rq_scheduler.scripts.rqscheduler:main
''',
package_data = { '': ['README.rst'] },
install_requires=['rq'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
# -*- coding: utf-8 -*-
from setuptools import setup
setup(
name='rq-scheduler',
version='0.1.1',
author='Selwin Ong',
author_email='selwin.ong@gmail.com',
packages=['rq_scheduler'],
url='https://github.com/ui/rq-scheduler',
license='MIT',
description='Provides job scheduling capabilities to RQ (Redis Queue)',
long_description=open('README.rst').read(),
zip_safe=False,
include_package_data=True,
entry_points='''\
[console_scripts]
rqscheduler = rq_scheduler.scripts.rqscheduler:main
''',
package_data = { '': ['README.rst'] },
install_requires=['rq'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
Bump version to fix pypi packaging
|
Bump version to fix pypi packaging
|
Python
|
mit
|
ihuro/rq-scheduler,sum12/rq-scheduler,cheungpat/rq-scheduler,ui/rq-scheduler,lechup/rq-scheduler,mbodock/rq-scheduler,peergradeio/rq-scheduler
|
# -*- coding: utf-8 -*-
from setuptools import setup
setup(
name='rq-scheduler',
version='0.1',
author='Selwin Ong',
author_email='selwin.ong@gmail.com',
packages=['rq_scheduler'],
url='https://github.com/ui/rq-scheduler',
license='MIT',
description='Provides job scheduling capabilities to RQ (Redis Queue)',
long_description=open('README.rst').read(),
zip_safe=False,
include_package_data=True,
entry_points='''\
[console_scripts]
rqscheduler = rq_scheduler.scripts.rqscheduler:main
''',
package_data = { '': ['README.rst'] },
install_requires=['rq'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)Bump version to fix pypi packaging
|
# -*- coding: utf-8 -*-
from setuptools import setup
setup(
name='rq-scheduler',
version='0.1.1',
author='Selwin Ong',
author_email='selwin.ong@gmail.com',
packages=['rq_scheduler'],
url='https://github.com/ui/rq-scheduler',
license='MIT',
description='Provides job scheduling capabilities to RQ (Redis Queue)',
long_description=open('README.rst').read(),
zip_safe=False,
include_package_data=True,
entry_points='''\
[console_scripts]
rqscheduler = rq_scheduler.scripts.rqscheduler:main
''',
package_data = { '': ['README.rst'] },
install_requires=['rq'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
<commit_before># -*- coding: utf-8 -*-
from setuptools import setup
setup(
name='rq-scheduler',
version='0.1',
author='Selwin Ong',
author_email='selwin.ong@gmail.com',
packages=['rq_scheduler'],
url='https://github.com/ui/rq-scheduler',
license='MIT',
description='Provides job scheduling capabilities to RQ (Redis Queue)',
long_description=open('README.rst').read(),
zip_safe=False,
include_package_data=True,
entry_points='''\
[console_scripts]
rqscheduler = rq_scheduler.scripts.rqscheduler:main
''',
package_data = { '': ['README.rst'] },
install_requires=['rq'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)<commit_msg>Bump version to fix pypi packaging<commit_after>
|
# -*- coding: utf-8 -*-
from setuptools import setup
setup(
name='rq-scheduler',
version='0.1.1',
author='Selwin Ong',
author_email='selwin.ong@gmail.com',
packages=['rq_scheduler'],
url='https://github.com/ui/rq-scheduler',
license='MIT',
description='Provides job scheduling capabilities to RQ (Redis Queue)',
long_description=open('README.rst').read(),
zip_safe=False,
include_package_data=True,
entry_points='''\
[console_scripts]
rqscheduler = rq_scheduler.scripts.rqscheduler:main
''',
package_data = { '': ['README.rst'] },
install_requires=['rq'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
# -*- coding: utf-8 -*-
from setuptools import setup
setup(
name='rq-scheduler',
version='0.1',
author='Selwin Ong',
author_email='selwin.ong@gmail.com',
packages=['rq_scheduler'],
url='https://github.com/ui/rq-scheduler',
license='MIT',
description='Provides job scheduling capabilities to RQ (Redis Queue)',
long_description=open('README.rst').read(),
zip_safe=False,
include_package_data=True,
entry_points='''\
[console_scripts]
rqscheduler = rq_scheduler.scripts.rqscheduler:main
''',
package_data = { '': ['README.rst'] },
install_requires=['rq'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)Bump version to fix pypi packaging# -*- coding: utf-8 -*-
from setuptools import setup
setup(
name='rq-scheduler',
version='0.1.1',
author='Selwin Ong',
author_email='selwin.ong@gmail.com',
packages=['rq_scheduler'],
url='https://github.com/ui/rq-scheduler',
license='MIT',
description='Provides job scheduling capabilities to RQ (Redis Queue)',
long_description=open('README.rst').read(),
zip_safe=False,
include_package_data=True,
entry_points='''\
[console_scripts]
rqscheduler = rq_scheduler.scripts.rqscheduler:main
''',
package_data = { '': ['README.rst'] },
install_requires=['rq'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
<commit_before># -*- coding: utf-8 -*-
from setuptools import setup
setup(
name='rq-scheduler',
version='0.1',
author='Selwin Ong',
author_email='selwin.ong@gmail.com',
packages=['rq_scheduler'],
url='https://github.com/ui/rq-scheduler',
license='MIT',
description='Provides job scheduling capabilities to RQ (Redis Queue)',
long_description=open('README.rst').read(),
zip_safe=False,
include_package_data=True,
entry_points='''\
[console_scripts]
rqscheduler = rq_scheduler.scripts.rqscheduler:main
''',
package_data = { '': ['README.rst'] },
install_requires=['rq'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)<commit_msg>Bump version to fix pypi packaging<commit_after># -*- coding: utf-8 -*-
from setuptools import setup
setup(
name='rq-scheduler',
version='0.1.1',
author='Selwin Ong',
author_email='selwin.ong@gmail.com',
packages=['rq_scheduler'],
url='https://github.com/ui/rq-scheduler',
license='MIT',
description='Provides job scheduling capabilities to RQ (Redis Queue)',
long_description=open('README.rst').read(),
zip_safe=False,
include_package_data=True,
entry_points='''\
[console_scripts]
rqscheduler = rq_scheduler.scripts.rqscheduler:main
''',
package_data = { '': ['README.rst'] },
install_requires=['rq'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
cadc5534f5926a0aeb1fb4bf6e9f1db0f56a9b6f
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('requirements.txt') as req_file:
requires = req_file.read().split('\n')
with open('requirements-dev.txt') as req_file:
requires_dev = req_file.read().split('\n')
with open('VERSION') as fp:
version = fp.read().strip()
setup(name='molo.polls',
version=version,
description=('A molo module that provides the ability to run polls.'),
long_description=readme,
classifiers=[
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.6",
"Framework :: Django",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='Praekelt Foundation',
author_email='dev@praekelt.com',
url='http://github.com/praekelt/molo.polls',
license='BSD',
keywords='praekelt, mobi, web, django',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
namespace_packages=['molo'],
install_requires=requires,
tests_require=requires_dev,
entry_points={})
|
from setuptools import setup, find_packages
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('requirements.txt') as req_file:
requires = [req for req in req_file.read().split('\n') if req]
with open('requirements-dev.txt') as req_file:
requires_dev = [req for req in req_file.read().split('\n') if req]
with open('VERSION') as fp:
version = fp.read().strip()
setup(name='molo.polls',
version=version,
description=('A molo module that provides the ability to run polls.'),
long_description=readme,
classifiers=[
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.6",
"Framework :: Django",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='Praekelt Foundation',
author_email='dev@praekelt.com',
url='http://github.com/praekelt/molo.polls',
license='BSD',
keywords='praekelt, mobi, web, django',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
namespace_packages=['molo'],
install_requires=requires,
tests_require=requires_dev,
entry_points={})
|
Remove empty string from requirements list
|
Remove empty string from requirements list
When we moved to Python 3 we used this simpler method to read the requirements
file. However we need to remove the empty/Falsey elements from the list.
This fixes the error:
```
Failed building wheel for molo.polls
```
|
Python
|
bsd-2-clause
|
praekelt/molo.polls,praekelt/molo.polls
|
from setuptools import setup, find_packages
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('requirements.txt') as req_file:
requires = req_file.read().split('\n')
with open('requirements-dev.txt') as req_file:
requires_dev = req_file.read().split('\n')
with open('VERSION') as fp:
version = fp.read().strip()
setup(name='molo.polls',
version=version,
description=('A molo module that provides the ability to run polls.'),
long_description=readme,
classifiers=[
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.6",
"Framework :: Django",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='Praekelt Foundation',
author_email='dev@praekelt.com',
url='http://github.com/praekelt/molo.polls',
license='BSD',
keywords='praekelt, mobi, web, django',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
namespace_packages=['molo'],
install_requires=requires,
tests_require=requires_dev,
entry_points={})
Remove empty string from requirements list
When we moved to Python 3 we used this simpler method to read the requirements
file. However we need to remove the empty/Falsey elements from the list.
This fixes the error:
```
Failed building wheel for molo.polls
```
|
from setuptools import setup, find_packages
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('requirements.txt') as req_file:
requires = [req for req in req_file.read().split('\n') if req]
with open('requirements-dev.txt') as req_file:
requires_dev = [req for req in req_file.read().split('\n') if req]
with open('VERSION') as fp:
version = fp.read().strip()
setup(name='molo.polls',
version=version,
description=('A molo module that provides the ability to run polls.'),
long_description=readme,
classifiers=[
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.6",
"Framework :: Django",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='Praekelt Foundation',
author_email='dev@praekelt.com',
url='http://github.com/praekelt/molo.polls',
license='BSD',
keywords='praekelt, mobi, web, django',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
namespace_packages=['molo'],
install_requires=requires,
tests_require=requires_dev,
entry_points={})
|
<commit_before>from setuptools import setup, find_packages
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('requirements.txt') as req_file:
requires = req_file.read().split('\n')
with open('requirements-dev.txt') as req_file:
requires_dev = req_file.read().split('\n')
with open('VERSION') as fp:
version = fp.read().strip()
setup(name='molo.polls',
version=version,
description=('A molo module that provides the ability to run polls.'),
long_description=readme,
classifiers=[
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.6",
"Framework :: Django",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='Praekelt Foundation',
author_email='dev@praekelt.com',
url='http://github.com/praekelt/molo.polls',
license='BSD',
keywords='praekelt, mobi, web, django',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
namespace_packages=['molo'],
install_requires=requires,
tests_require=requires_dev,
entry_points={})
<commit_msg>Remove empty string from requirements list
When we moved to Python 3 we used this simpler method to read the requirements
file. However we need to remove the empty/Falsey elements from the list.
This fixes the error:
```
Failed building wheel for molo.polls
```<commit_after>
|
from setuptools import setup, find_packages
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('requirements.txt') as req_file:
requires = [req for req in req_file.read().split('\n') if req]
with open('requirements-dev.txt') as req_file:
requires_dev = [req for req in req_file.read().split('\n') if req]
with open('VERSION') as fp:
version = fp.read().strip()
setup(name='molo.polls',
version=version,
description=('A molo module that provides the ability to run polls.'),
long_description=readme,
classifiers=[
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.6",
"Framework :: Django",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='Praekelt Foundation',
author_email='dev@praekelt.com',
url='http://github.com/praekelt/molo.polls',
license='BSD',
keywords='praekelt, mobi, web, django',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
namespace_packages=['molo'],
install_requires=requires,
tests_require=requires_dev,
entry_points={})
|
from setuptools import setup, find_packages
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('requirements.txt') as req_file:
requires = req_file.read().split('\n')
with open('requirements-dev.txt') as req_file:
requires_dev = req_file.read().split('\n')
with open('VERSION') as fp:
version = fp.read().strip()
setup(name='molo.polls',
version=version,
description=('A molo module that provides the ability to run polls.'),
long_description=readme,
classifiers=[
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.6",
"Framework :: Django",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='Praekelt Foundation',
author_email='dev@praekelt.com',
url='http://github.com/praekelt/molo.polls',
license='BSD',
keywords='praekelt, mobi, web, django',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
namespace_packages=['molo'],
install_requires=requires,
tests_require=requires_dev,
entry_points={})
Remove empty string from requirements list
When we moved to Python 3 we used this simpler method to read the requirements
file. However we need to remove the empty/Falsey elements from the list.
This fixes the error:
```
Failed building wheel for molo.polls
```from setuptools import setup, find_packages
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('requirements.txt') as req_file:
requires = [req for req in req_file.read().split('\n') if req]
with open('requirements-dev.txt') as req_file:
requires_dev = [req for req in req_file.read().split('\n') if req]
with open('VERSION') as fp:
version = fp.read().strip()
setup(name='molo.polls',
version=version,
description=('A molo module that provides the ability to run polls.'),
long_description=readme,
classifiers=[
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.6",
"Framework :: Django",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='Praekelt Foundation',
author_email='dev@praekelt.com',
url='http://github.com/praekelt/molo.polls',
license='BSD',
keywords='praekelt, mobi, web, django',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
namespace_packages=['molo'],
install_requires=requires,
tests_require=requires_dev,
entry_points={})
|
<commit_before>from setuptools import setup, find_packages
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('requirements.txt') as req_file:
requires = req_file.read().split('\n')
with open('requirements-dev.txt') as req_file:
requires_dev = req_file.read().split('\n')
with open('VERSION') as fp:
version = fp.read().strip()
setup(name='molo.polls',
version=version,
description=('A molo module that provides the ability to run polls.'),
long_description=readme,
classifiers=[
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.6",
"Framework :: Django",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='Praekelt Foundation',
author_email='dev@praekelt.com',
url='http://github.com/praekelt/molo.polls',
license='BSD',
keywords='praekelt, mobi, web, django',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
namespace_packages=['molo'],
install_requires=requires,
tests_require=requires_dev,
entry_points={})
<commit_msg>Remove empty string from requirements list
When we moved to Python 3 we used this simpler method to read the requirements
file. However we need to remove the empty/Falsey elements from the list.
This fixes the error:
```
Failed building wheel for molo.polls
```<commit_after>from setuptools import setup, find_packages
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('requirements.txt') as req_file:
requires = [req for req in req_file.read().split('\n') if req]
with open('requirements-dev.txt') as req_file:
requires_dev = [req for req in req_file.read().split('\n') if req]
with open('VERSION') as fp:
version = fp.read().strip()
setup(name='molo.polls',
version=version,
description=('A molo module that provides the ability to run polls.'),
long_description=readme,
classifiers=[
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.6",
"Framework :: Django",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='Praekelt Foundation',
author_email='dev@praekelt.com',
url='http://github.com/praekelt/molo.polls',
license='BSD',
keywords='praekelt, mobi, web, django',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
namespace_packages=['molo'],
install_requires=requires,
tests_require=requires_dev,
entry_points={})
|
e197d1a030993ec01c113188593fbd12267fd4fa
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import sys, os
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
# Hack to prevent "TypeError: 'NoneType' object is not callable" error
# in multiprocessing/util.py _exit_function when setup.py exits
# (see http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)
try:
import multiprocessing
except ImportError:
pass
setup(
name='Willow',
version='0.4a0',
description='A Python image library that sits on top of Pillow, Wand and OpenCV',
author='Karl Hobley',
author_email='karlhobley10@gmail.com',
url='',
packages=find_packages(),
include_package_data=True,
license='BSD',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Topic :: Multimedia :: Graphics',
'Topic :: Multimedia :: Graphics :: Graphics Conversion',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
install_requires=[],
zip_safe=False,
)
|
#!/usr/bin/env python
import sys, os
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
# Hack to prevent "TypeError: 'NoneType' object is not callable" error
# in multiprocessing/util.py _exit_function when setup.py exits
# (see http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)
try:
import multiprocessing
except ImportError:
pass
setup(
name='Willow',
version='0.4a0',
description='A Python image library that sits on top of Pillow, Wand and OpenCV',
author='Karl Hobley',
author_email='karlhobley10@gmail.com',
url='',
packages=find_packages(exclude=['tests']),
include_package_data=True,
license='BSD',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Topic :: Multimedia :: Graphics',
'Topic :: Multimedia :: Graphics :: Graphics Conversion',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
install_requires=[],
zip_safe=False,
)
|
Exclude tests package from distribution
|
Exclude tests package from distribution
|
Python
|
bsd-3-clause
|
wgiddens/Willow,wgiddens/Willow
|
#!/usr/bin/env python
import sys, os
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
# Hack to prevent "TypeError: 'NoneType' object is not callable" error
# in multiprocessing/util.py _exit_function when setup.py exits
# (see http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)
try:
import multiprocessing
except ImportError:
pass
setup(
name='Willow',
version='0.4a0',
description='A Python image library that sits on top of Pillow, Wand and OpenCV',
author='Karl Hobley',
author_email='karlhobley10@gmail.com',
url='',
packages=find_packages(),
include_package_data=True,
license='BSD',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Topic :: Multimedia :: Graphics',
'Topic :: Multimedia :: Graphics :: Graphics Conversion',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
install_requires=[],
zip_safe=False,
)
Exclude tests package from distribution
|
#!/usr/bin/env python
import sys, os
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
# Hack to prevent "TypeError: 'NoneType' object is not callable" error
# in multiprocessing/util.py _exit_function when setup.py exits
# (see http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)
try:
import multiprocessing
except ImportError:
pass
setup(
name='Willow',
version='0.4a0',
description='A Python image library that sits on top of Pillow, Wand and OpenCV',
author='Karl Hobley',
author_email='karlhobley10@gmail.com',
url='',
packages=find_packages(exclude=['tests']),
include_package_data=True,
license='BSD',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Topic :: Multimedia :: Graphics',
'Topic :: Multimedia :: Graphics :: Graphics Conversion',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
install_requires=[],
zip_safe=False,
)
|
<commit_before>#!/usr/bin/env python
import sys, os
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
# Hack to prevent "TypeError: 'NoneType' object is not callable" error
# in multiprocessing/util.py _exit_function when setup.py exits
# (see http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)
try:
import multiprocessing
except ImportError:
pass
setup(
name='Willow',
version='0.4a0',
description='A Python image library that sits on top of Pillow, Wand and OpenCV',
author='Karl Hobley',
author_email='karlhobley10@gmail.com',
url='',
packages=find_packages(),
include_package_data=True,
license='BSD',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Topic :: Multimedia :: Graphics',
'Topic :: Multimedia :: Graphics :: Graphics Conversion',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
install_requires=[],
zip_safe=False,
)
<commit_msg>Exclude tests package from distribution<commit_after>
|
#!/usr/bin/env python
import sys, os
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
# Hack to prevent "TypeError: 'NoneType' object is not callable" error
# in multiprocessing/util.py _exit_function when setup.py exits
# (see http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)
try:
import multiprocessing
except ImportError:
pass
setup(
name='Willow',
version='0.4a0',
description='A Python image library that sits on top of Pillow, Wand and OpenCV',
author='Karl Hobley',
author_email='karlhobley10@gmail.com',
url='',
packages=find_packages(exclude=['tests']),
include_package_data=True,
license='BSD',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Topic :: Multimedia :: Graphics',
'Topic :: Multimedia :: Graphics :: Graphics Conversion',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
install_requires=[],
zip_safe=False,
)
|
#!/usr/bin/env python
import sys, os
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
# Hack to prevent "TypeError: 'NoneType' object is not callable" error
# in multiprocessing/util.py _exit_function when setup.py exits
# (see http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)
try:
import multiprocessing
except ImportError:
pass
setup(
name='Willow',
version='0.4a0',
description='A Python image library that sits on top of Pillow, Wand and OpenCV',
author='Karl Hobley',
author_email='karlhobley10@gmail.com',
url='',
packages=find_packages(),
include_package_data=True,
license='BSD',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Topic :: Multimedia :: Graphics',
'Topic :: Multimedia :: Graphics :: Graphics Conversion',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
install_requires=[],
zip_safe=False,
)
Exclude tests package from distribution#!/usr/bin/env python
import sys, os
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
# Hack to prevent "TypeError: 'NoneType' object is not callable" error
# in multiprocessing/util.py _exit_function when setup.py exits
# (see http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)
try:
import multiprocessing
except ImportError:
pass
setup(
name='Willow',
version='0.4a0',
description='A Python image library that sits on top of Pillow, Wand and OpenCV',
author='Karl Hobley',
author_email='karlhobley10@gmail.com',
url='',
packages=find_packages(exclude=['tests']),
include_package_data=True,
license='BSD',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Topic :: Multimedia :: Graphics',
'Topic :: Multimedia :: Graphics :: Graphics Conversion',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
install_requires=[],
zip_safe=False,
)
|
<commit_before>#!/usr/bin/env python
import sys, os
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
# Hack to prevent "TypeError: 'NoneType' object is not callable" error
# in multiprocessing/util.py _exit_function when setup.py exits
# (see http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)
try:
import multiprocessing
except ImportError:
pass
setup(
name='Willow',
version='0.4a0',
description='A Python image library that sits on top of Pillow, Wand and OpenCV',
author='Karl Hobley',
author_email='karlhobley10@gmail.com',
url='',
packages=find_packages(),
include_package_data=True,
license='BSD',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Topic :: Multimedia :: Graphics',
'Topic :: Multimedia :: Graphics :: Graphics Conversion',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
install_requires=[],
zip_safe=False,
)
<commit_msg>Exclude tests package from distribution<commit_after>#!/usr/bin/env python
import sys, os
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
# Hack to prevent "TypeError: 'NoneType' object is not callable" error
# in multiprocessing/util.py _exit_function when setup.py exits
# (see http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)
try:
import multiprocessing
except ImportError:
pass
setup(
name='Willow',
version='0.4a0',
description='A Python image library that sits on top of Pillow, Wand and OpenCV',
author='Karl Hobley',
author_email='karlhobley10@gmail.com',
url='',
packages=find_packages(exclude=['tests']),
include_package_data=True,
license='BSD',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Topic :: Multimedia :: Graphics',
'Topic :: Multimedia :: Graphics :: Graphics Conversion',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
install_requires=[],
zip_safe=False,
)
|
0a9601c4ee085c38f660e6d40c98256098576a92
|
setup.py
|
setup.py
|
from distutils.core import setup
from pip.req import parse_requirements
install_reqs = parse_requirements("./requirements.txt")
reqs = [str(ir.req) for ir in install_reqs]
setup(
name='url-matchers',
version='0.0.1',
modules=['url_matchers'],
install_requires=reqs,
author="Alex Good",
author_email="alex@makerlabs.co.uk",
url="https://github.com/alexjg/url-matchers",
)
|
from distutils.core import setup
from os import path
from pip.req import parse_requirements
requirements_location = path.join(path.dirname(__file__), "requirements.txt")
install_reqs = parse_requirements(requirements_location)
reqs = [str(ir.req) for ir in install_reqs]
setup(
name='url-matchers',
version='0.0.1',
modules=['url_matchers'],
install_requires=reqs,
author="Alex Good",
author_email="alex@makerlabs.co.uk",
url="https://github.com/alexjg/url-matchers",
)
|
Use absolute path for requirements file
|
Use absolute path for requirements file
|
Python
|
mit
|
alexjg/url-matchers
|
from distutils.core import setup
from pip.req import parse_requirements
install_reqs = parse_requirements("./requirements.txt")
reqs = [str(ir.req) for ir in install_reqs]
setup(
name='url-matchers',
version='0.0.1',
modules=['url_matchers'],
install_requires=reqs,
author="Alex Good",
author_email="alex@makerlabs.co.uk",
url="https://github.com/alexjg/url-matchers",
)
Use absolute path for requirements file
|
from distutils.core import setup
from os import path
from pip.req import parse_requirements
requirements_location = path.join(path.dirname(__file__), "requirements.txt")
install_reqs = parse_requirements(requirements_location)
reqs = [str(ir.req) for ir in install_reqs]
setup(
name='url-matchers',
version='0.0.1',
modules=['url_matchers'],
install_requires=reqs,
author="Alex Good",
author_email="alex@makerlabs.co.uk",
url="https://github.com/alexjg/url-matchers",
)
|
<commit_before>from distutils.core import setup
from pip.req import parse_requirements
install_reqs = parse_requirements("./requirements.txt")
reqs = [str(ir.req) for ir in install_reqs]
setup(
name='url-matchers',
version='0.0.1',
modules=['url_matchers'],
install_requires=reqs,
author="Alex Good",
author_email="alex@makerlabs.co.uk",
url="https://github.com/alexjg/url-matchers",
)
<commit_msg>Use absolute path for requirements file<commit_after>
|
from distutils.core import setup
from os import path
from pip.req import parse_requirements
requirements_location = path.join(path.dirname(__file__), "requirements.txt")
install_reqs = parse_requirements(requirements_location)
reqs = [str(ir.req) for ir in install_reqs]
setup(
name='url-matchers',
version='0.0.1',
modules=['url_matchers'],
install_requires=reqs,
author="Alex Good",
author_email="alex@makerlabs.co.uk",
url="https://github.com/alexjg/url-matchers",
)
|
from distutils.core import setup
from pip.req import parse_requirements
install_reqs = parse_requirements("./requirements.txt")
reqs = [str(ir.req) for ir in install_reqs]
setup(
name='url-matchers',
version='0.0.1',
modules=['url_matchers'],
install_requires=reqs,
author="Alex Good",
author_email="alex@makerlabs.co.uk",
url="https://github.com/alexjg/url-matchers",
)
Use absolute path for requirements filefrom distutils.core import setup
from os import path
from pip.req import parse_requirements
requirements_location = path.join(path.dirname(__file__), "requirements.txt")
install_reqs = parse_requirements(requirements_location)
reqs = [str(ir.req) for ir in install_reqs]
setup(
name='url-matchers',
version='0.0.1',
modules=['url_matchers'],
install_requires=reqs,
author="Alex Good",
author_email="alex@makerlabs.co.uk",
url="https://github.com/alexjg/url-matchers",
)
|
<commit_before>from distutils.core import setup
from pip.req import parse_requirements
install_reqs = parse_requirements("./requirements.txt")
reqs = [str(ir.req) for ir in install_reqs]
setup(
name='url-matchers',
version='0.0.1',
modules=['url_matchers'],
install_requires=reqs,
author="Alex Good",
author_email="alex@makerlabs.co.uk",
url="https://github.com/alexjg/url-matchers",
)
<commit_msg>Use absolute path for requirements file<commit_after>from distutils.core import setup
from os import path
from pip.req import parse_requirements
requirements_location = path.join(path.dirname(__file__), "requirements.txt")
install_reqs = parse_requirements(requirements_location)
reqs = [str(ir.req) for ir in install_reqs]
setup(
name='url-matchers',
version='0.0.1',
modules=['url_matchers'],
install_requires=reqs,
author="Alex Good",
author_email="alex@makerlabs.co.uk",
url="https://github.com/alexjg/url-matchers",
)
|
091d0589d40e5bc19a58e16653ab48c9a821b276
|
setup.py
|
setup.py
|
from setuptools import setup
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
readme = f.read()
setup(
name='manuale',
version='1.0.1.dev0',
license='MIT',
description="A fully manual Let's Encrypt/ACME client",
long_description=readme,
url='https://github.com/veeti/manuale',
author="Veeti Paananen",
author_email='veeti.paananen@rojekti.fi',
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
packages=['manuale'],
install_requires=[
'cryptography >= 1.0',
'requests',
],
entry_points={
'console_scripts': [
'manuale = manuale.cli:main',
],
},
)
|
from setuptools import setup
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
readme = f.read()
setup(
name='manuale',
version='1.0.1.dev0',
license='MIT',
description="A fully manual Let's Encrypt/ACME client",
long_description=readme,
url='https://github.com/veeti/manuale',
author="Veeti Paananen",
author_email='veeti.paananen@rojekti.fi',
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3 :: Only',
],
packages=['manuale'],
install_requires=[
'cryptography >= 1.0',
'requests',
],
entry_points={
'console_scripts': [
'manuale = manuale.cli:main',
],
},
)
|
Add Python 3 only classifier
|
Add Python 3 only classifier
|
Python
|
mit
|
veeti/manuale
|
from setuptools import setup
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
readme = f.read()
setup(
name='manuale',
version='1.0.1.dev0',
license='MIT',
description="A fully manual Let's Encrypt/ACME client",
long_description=readme,
url='https://github.com/veeti/manuale',
author="Veeti Paananen",
author_email='veeti.paananen@rojekti.fi',
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
packages=['manuale'],
install_requires=[
'cryptography >= 1.0',
'requests',
],
entry_points={
'console_scripts': [
'manuale = manuale.cli:main',
],
},
)
Add Python 3 only classifier
|
from setuptools import setup
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
readme = f.read()
setup(
name='manuale',
version='1.0.1.dev0',
license='MIT',
description="A fully manual Let's Encrypt/ACME client",
long_description=readme,
url='https://github.com/veeti/manuale',
author="Veeti Paananen",
author_email='veeti.paananen@rojekti.fi',
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3 :: Only',
],
packages=['manuale'],
install_requires=[
'cryptography >= 1.0',
'requests',
],
entry_points={
'console_scripts': [
'manuale = manuale.cli:main',
],
},
)
|
<commit_before>from setuptools import setup
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
readme = f.read()
setup(
name='manuale',
version='1.0.1.dev0',
license='MIT',
description="A fully manual Let's Encrypt/ACME client",
long_description=readme,
url='https://github.com/veeti/manuale',
author="Veeti Paananen",
author_email='veeti.paananen@rojekti.fi',
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
packages=['manuale'],
install_requires=[
'cryptography >= 1.0',
'requests',
],
entry_points={
'console_scripts': [
'manuale = manuale.cli:main',
],
},
)
<commit_msg>Add Python 3 only classifier<commit_after>
|
from setuptools import setup
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
readme = f.read()
setup(
name='manuale',
version='1.0.1.dev0',
license='MIT',
description="A fully manual Let's Encrypt/ACME client",
long_description=readme,
url='https://github.com/veeti/manuale',
author="Veeti Paananen",
author_email='veeti.paananen@rojekti.fi',
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3 :: Only',
],
packages=['manuale'],
install_requires=[
'cryptography >= 1.0',
'requests',
],
entry_points={
'console_scripts': [
'manuale = manuale.cli:main',
],
},
)
|
from setuptools import setup
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
readme = f.read()
setup(
name='manuale',
version='1.0.1.dev0',
license='MIT',
description="A fully manual Let's Encrypt/ACME client",
long_description=readme,
url='https://github.com/veeti/manuale',
author="Veeti Paananen",
author_email='veeti.paananen@rojekti.fi',
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
packages=['manuale'],
install_requires=[
'cryptography >= 1.0',
'requests',
],
entry_points={
'console_scripts': [
'manuale = manuale.cli:main',
],
},
)
Add Python 3 only classifierfrom setuptools import setup
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
readme = f.read()
setup(
name='manuale',
version='1.0.1.dev0',
license='MIT',
description="A fully manual Let's Encrypt/ACME client",
long_description=readme,
url='https://github.com/veeti/manuale',
author="Veeti Paananen",
author_email='veeti.paananen@rojekti.fi',
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3 :: Only',
],
packages=['manuale'],
install_requires=[
'cryptography >= 1.0',
'requests',
],
entry_points={
'console_scripts': [
'manuale = manuale.cli:main',
],
},
)
|
<commit_before>from setuptools import setup
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
readme = f.read()
setup(
name='manuale',
version='1.0.1.dev0',
license='MIT',
description="A fully manual Let's Encrypt/ACME client",
long_description=readme,
url='https://github.com/veeti/manuale',
author="Veeti Paananen",
author_email='veeti.paananen@rojekti.fi',
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
packages=['manuale'],
install_requires=[
'cryptography >= 1.0',
'requests',
],
entry_points={
'console_scripts': [
'manuale = manuale.cli:main',
],
},
)
<commit_msg>Add Python 3 only classifier<commit_after>from setuptools import setup
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
readme = f.read()
setup(
name='manuale',
version='1.0.1.dev0',
license='MIT',
description="A fully manual Let's Encrypt/ACME client",
long_description=readme,
url='https://github.com/veeti/manuale',
author="Veeti Paananen",
author_email='veeti.paananen@rojekti.fi',
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3 :: Only',
],
packages=['manuale'],
install_requires=[
'cryptography >= 1.0',
'requests',
],
entry_points={
'console_scripts': [
'manuale = manuale.cli:main',
],
},
)
|
6cb1b973e24fee5879d5623673d976b66fdf4252
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='tattler',
author='Joe Friedl',
author_email='joe@joefriedl.net',
version='0.1',
description='A nose plugin that tattles on functions.',
keywords='nose plugin test testing mock',
url='https://github.com/grampajoe/tattler',
license='MIT',
py_modules=['tattler'],
install_requires=[
'mock',
],
entry_points = {
'nose.plugins.0.10': [
'tattler = tattler:Tattler',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Plugins',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Software Development :: Testing',
],
)
|
from setuptools import setup
setup(
name='tattler',
author='Joe Friedl',
author_email='joe@joefriedl.net',
version='0.1',
description='A nose plugin that tattles on functions.',
keywords='nose plugin test testing mock',
url='https://github.com/grampajoe/tattler',
license='MIT',
py_modules=['tattler'],
install_requires=[
'nose',
'mock',
],
entry_points = {
'nose.plugins.0.10': [
'tattler = tattler:Tattler',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Plugins',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Software Development :: Testing',
],
)
|
Add nose to the requirements, why not?
|
Add nose to the requirements, why not?
|
Python
|
mit
|
grampajoe/tattler
|
from setuptools import setup
setup(
name='tattler',
author='Joe Friedl',
author_email='joe@joefriedl.net',
version='0.1',
description='A nose plugin that tattles on functions.',
keywords='nose plugin test testing mock',
url='https://github.com/grampajoe/tattler',
license='MIT',
py_modules=['tattler'],
install_requires=[
'mock',
],
entry_points = {
'nose.plugins.0.10': [
'tattler = tattler:Tattler',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Plugins',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Software Development :: Testing',
],
)
Add nose to the requirements, why not?
|
from setuptools import setup
setup(
name='tattler',
author='Joe Friedl',
author_email='joe@joefriedl.net',
version='0.1',
description='A nose plugin that tattles on functions.',
keywords='nose plugin test testing mock',
url='https://github.com/grampajoe/tattler',
license='MIT',
py_modules=['tattler'],
install_requires=[
'nose',
'mock',
],
entry_points = {
'nose.plugins.0.10': [
'tattler = tattler:Tattler',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Plugins',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Software Development :: Testing',
],
)
|
<commit_before>from setuptools import setup
setup(
name='tattler',
author='Joe Friedl',
author_email='joe@joefriedl.net',
version='0.1',
description='A nose plugin that tattles on functions.',
keywords='nose plugin test testing mock',
url='https://github.com/grampajoe/tattler',
license='MIT',
py_modules=['tattler'],
install_requires=[
'mock',
],
entry_points = {
'nose.plugins.0.10': [
'tattler = tattler:Tattler',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Plugins',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Software Development :: Testing',
],
)
<commit_msg>Add nose to the requirements, why not?<commit_after>
|
from setuptools import setup
setup(
name='tattler',
author='Joe Friedl',
author_email='joe@joefriedl.net',
version='0.1',
description='A nose plugin that tattles on functions.',
keywords='nose plugin test testing mock',
url='https://github.com/grampajoe/tattler',
license='MIT',
py_modules=['tattler'],
install_requires=[
'nose',
'mock',
],
entry_points = {
'nose.plugins.0.10': [
'tattler = tattler:Tattler',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Plugins',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Software Development :: Testing',
],
)
|
from setuptools import setup
setup(
name='tattler',
author='Joe Friedl',
author_email='joe@joefriedl.net',
version='0.1',
description='A nose plugin that tattles on functions.',
keywords='nose plugin test testing mock',
url='https://github.com/grampajoe/tattler',
license='MIT',
py_modules=['tattler'],
install_requires=[
'mock',
],
entry_points = {
'nose.plugins.0.10': [
'tattler = tattler:Tattler',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Plugins',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Software Development :: Testing',
],
)
Add nose to the requirements, why not?from setuptools import setup
setup(
name='tattler',
author='Joe Friedl',
author_email='joe@joefriedl.net',
version='0.1',
description='A nose plugin that tattles on functions.',
keywords='nose plugin test testing mock',
url='https://github.com/grampajoe/tattler',
license='MIT',
py_modules=['tattler'],
install_requires=[
'nose',
'mock',
],
entry_points = {
'nose.plugins.0.10': [
'tattler = tattler:Tattler',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Plugins',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Software Development :: Testing',
],
)
|
<commit_before>from setuptools import setup
setup(
name='tattler',
author='Joe Friedl',
author_email='joe@joefriedl.net',
version='0.1',
description='A nose plugin that tattles on functions.',
keywords='nose plugin test testing mock',
url='https://github.com/grampajoe/tattler',
license='MIT',
py_modules=['tattler'],
install_requires=[
'mock',
],
entry_points = {
'nose.plugins.0.10': [
'tattler = tattler:Tattler',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Plugins',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Software Development :: Testing',
],
)
<commit_msg>Add nose to the requirements, why not?<commit_after>from setuptools import setup
setup(
name='tattler',
author='Joe Friedl',
author_email='joe@joefriedl.net',
version='0.1',
description='A nose plugin that tattles on functions.',
keywords='nose plugin test testing mock',
url='https://github.com/grampajoe/tattler',
license='MIT',
py_modules=['tattler'],
install_requires=[
'nose',
'mock',
],
entry_points = {
'nose.plugins.0.10': [
'tattler = tattler:Tattler',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Plugins',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Software Development :: Testing',
],
)
|
9a2318d4ddadb57b4096896407c7683d9804e04d
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
long_description = """Publish browser extensions to their stores.
Currently only available for Google Chrome."""
setup(
name='webstoremgr',
version='0.5.3',
description='Publish browser extensions to their stores.',
long_description=long_description,
author='Martin Melka',
author_email='melka@avast.com',
license='MIT',
keywords='extension, browser, chrome, firefox, store',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development'
],
url='https://git.int.avast.com/melka/webstore-manager',
include_package_data=True,
packages=find_packages(),
entry_points={
'console_scripts': [
'webstoremgr = webstore_manager.manager:main'
]
},
install_requires=['click>=6', 'requests', 'appdirs', 'PyJWT'],
setup_requires=['pytest-runner'],
tests_require=['pytest', 'betamax', 'flexmock']
)
|
from setuptools import setup, find_packages
long_description = """Publish browser extensions to their stores.
Currently only available for Google Chrome."""
setup(
name='webstoremgr',
version='0.1',
description='Publish browser extensions to their stores.',
long_description=long_description,
author='Martin Melka',
author_email='melka@avast.com',
license='MIT',
keywords='extension, browser, chrome, firefox, store',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development'
],
url='https://git.int.avast.com/melka/webstore-manager',
include_package_data=True,
packages=find_packages(),
entry_points={
'console_scripts': [
'webstoremgr = webstore_manager.manager:main'
]
},
install_requires=['click>=6', 'requests', 'appdirs', 'PyJWT'],
setup_requires=['pytest-runner'],
tests_require=['pytest', 'betamax', 'flexmock']
)
|
Reset version number for newly named pkg
|
Reset version number for newly named pkg
|
Python
|
mit
|
melkamar/webstore-manager,melkamar/webstore-manager
|
from setuptools import setup, find_packages
long_description = """Publish browser extensions to their stores.
Currently only available for Google Chrome."""
setup(
name='webstoremgr',
version='0.5.3',
description='Publish browser extensions to their stores.',
long_description=long_description,
author='Martin Melka',
author_email='melka@avast.com',
license='MIT',
keywords='extension, browser, chrome, firefox, store',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development'
],
url='https://git.int.avast.com/melka/webstore-manager',
include_package_data=True,
packages=find_packages(),
entry_points={
'console_scripts': [
'webstoremgr = webstore_manager.manager:main'
]
},
install_requires=['click>=6', 'requests', 'appdirs', 'PyJWT'],
setup_requires=['pytest-runner'],
tests_require=['pytest', 'betamax', 'flexmock']
)
Reset version number for newly named pkg
|
from setuptools import setup, find_packages
long_description = """Publish browser extensions to their stores.
Currently only available for Google Chrome."""
setup(
name='webstoremgr',
version='0.1',
description='Publish browser extensions to their stores.',
long_description=long_description,
author='Martin Melka',
author_email='melka@avast.com',
license='MIT',
keywords='extension, browser, chrome, firefox, store',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development'
],
url='https://git.int.avast.com/melka/webstore-manager',
include_package_data=True,
packages=find_packages(),
entry_points={
'console_scripts': [
'webstoremgr = webstore_manager.manager:main'
]
},
install_requires=['click>=6', 'requests', 'appdirs', 'PyJWT'],
setup_requires=['pytest-runner'],
tests_require=['pytest', 'betamax', 'flexmock']
)
|
<commit_before>from setuptools import setup, find_packages
long_description = """Publish browser extensions to their stores.
Currently only available for Google Chrome."""
setup(
name='webstoremgr',
version='0.5.3',
description='Publish browser extensions to their stores.',
long_description=long_description,
author='Martin Melka',
author_email='melka@avast.com',
license='MIT',
keywords='extension, browser, chrome, firefox, store',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development'
],
url='https://git.int.avast.com/melka/webstore-manager',
include_package_data=True,
packages=find_packages(),
entry_points={
'console_scripts': [
'webstoremgr = webstore_manager.manager:main'
]
},
install_requires=['click>=6', 'requests', 'appdirs', 'PyJWT'],
setup_requires=['pytest-runner'],
tests_require=['pytest', 'betamax', 'flexmock']
)
<commit_msg>Reset version number for newly named pkg<commit_after>
|
from setuptools import setup, find_packages
long_description = """Publish browser extensions to their stores.
Currently only available for Google Chrome."""
setup(
name='webstoremgr',
version='0.1',
description='Publish browser extensions to their stores.',
long_description=long_description,
author='Martin Melka',
author_email='melka@avast.com',
license='MIT',
keywords='extension, browser, chrome, firefox, store',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development'
],
url='https://git.int.avast.com/melka/webstore-manager',
include_package_data=True,
packages=find_packages(),
entry_points={
'console_scripts': [
'webstoremgr = webstore_manager.manager:main'
]
},
install_requires=['click>=6', 'requests', 'appdirs', 'PyJWT'],
setup_requires=['pytest-runner'],
tests_require=['pytest', 'betamax', 'flexmock']
)
|
from setuptools import setup, find_packages
long_description = """Publish browser extensions to their stores.
Currently only available for Google Chrome."""
setup(
name='webstoremgr',
version='0.5.3',
description='Publish browser extensions to their stores.',
long_description=long_description,
author='Martin Melka',
author_email='melka@avast.com',
license='MIT',
keywords='extension, browser, chrome, firefox, store',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development'
],
url='https://git.int.avast.com/melka/webstore-manager',
include_package_data=True,
packages=find_packages(),
entry_points={
'console_scripts': [
'webstoremgr = webstore_manager.manager:main'
]
},
install_requires=['click>=6', 'requests', 'appdirs', 'PyJWT'],
setup_requires=['pytest-runner'],
tests_require=['pytest', 'betamax', 'flexmock']
)
Reset version number for newly named pkgfrom setuptools import setup, find_packages
long_description = """Publish browser extensions to their stores.
Currently only available for Google Chrome."""
setup(
name='webstoremgr',
version='0.1',
description='Publish browser extensions to their stores.',
long_description=long_description,
author='Martin Melka',
author_email='melka@avast.com',
license='MIT',
keywords='extension, browser, chrome, firefox, store',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development'
],
url='https://git.int.avast.com/melka/webstore-manager',
include_package_data=True,
packages=find_packages(),
entry_points={
'console_scripts': [
'webstoremgr = webstore_manager.manager:main'
]
},
install_requires=['click>=6', 'requests', 'appdirs', 'PyJWT'],
setup_requires=['pytest-runner'],
tests_require=['pytest', 'betamax', 'flexmock']
)
|
<commit_before>from setuptools import setup, find_packages
long_description = """Publish browser extensions to their stores.
Currently only available for Google Chrome."""
setup(
name='webstoremgr',
version='0.5.3',
description='Publish browser extensions to their stores.',
long_description=long_description,
author='Martin Melka',
author_email='melka@avast.com',
license='MIT',
keywords='extension, browser, chrome, firefox, store',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development'
],
url='https://git.int.avast.com/melka/webstore-manager',
include_package_data=True,
packages=find_packages(),
entry_points={
'console_scripts': [
'webstoremgr = webstore_manager.manager:main'
]
},
install_requires=['click>=6', 'requests', 'appdirs', 'PyJWT'],
setup_requires=['pytest-runner'],
tests_require=['pytest', 'betamax', 'flexmock']
)
<commit_msg>Reset version number for newly named pkg<commit_after>from setuptools import setup, find_packages
long_description = """Publish browser extensions to their stores.
Currently only available for Google Chrome."""
setup(
name='webstoremgr',
version='0.1',
description='Publish browser extensions to their stores.',
long_description=long_description,
author='Martin Melka',
author_email='melka@avast.com',
license='MIT',
keywords='extension, browser, chrome, firefox, store',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development'
],
url='https://git.int.avast.com/melka/webstore-manager',
include_package_data=True,
packages=find_packages(),
entry_points={
'console_scripts': [
'webstoremgr = webstore_manager.manager:main'
]
},
install_requires=['click>=6', 'requests', 'appdirs', 'PyJWT'],
setup_requires=['pytest-runner'],
tests_require=['pytest', 'betamax', 'flexmock']
)
|
2b3c9eb3f849e564775f71714c21d490858bd8dc
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import setup
if os.path.exists('README.rst'):
long_description = open('README.rst').read()
else:
long_description = '''A simple Python wrapper around the ChemSpider Web Services.'''
setup(
name='ChemSpiPy',
version='1.0.5',
author='Matt Swain',
author_email='m.swain@me.com',
license='MIT',
url='https://github.com/mcs07/ChemSpiPy',
packages=['chemspipy'],
description='A simple Python wrapper around the ChemSpider Web Services.',
long_description=long_description,
keywords='chemistry cheminformatics chemspider rsc rest api',
zip_safe=False,
install_requires=['requests', 'six'],
tests_require=['pytest'],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Healthcare Industry',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Topic :: Scientific/Engineering :: Chemistry',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import setup
if os.path.exists('README.rst'):
long_description = open('README.rst').read()
else:
long_description = '''A simple Python wrapper around the ChemSpider Web Services.'''
setup(
name='ChemSpiPy',
version='1.0.5',
author='Matt Swain',
author_email='m.swain@me.com',
license='MIT',
url='https://github.com/mcs07/ChemSpiPy',
packages=['chemspipy'],
description='A simple Python wrapper around the ChemSpider Web Services.',
long_description=long_description,
keywords='chemistry cheminformatics chemspider rsc rest api',
zip_safe=False,
install_requires=['requests', 'six'],
tests_require=['pytest'],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Healthcare Industry',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Topic :: Scientific/Engineering :: Chemistry',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
Remove classifiers that get outdated
|
Remove classifiers that get outdated
|
Python
|
mit
|
mcs07/ChemSpiPy
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import setup
if os.path.exists('README.rst'):
long_description = open('README.rst').read()
else:
long_description = '''A simple Python wrapper around the ChemSpider Web Services.'''
setup(
name='ChemSpiPy',
version='1.0.5',
author='Matt Swain',
author_email='m.swain@me.com',
license='MIT',
url='https://github.com/mcs07/ChemSpiPy',
packages=['chemspipy'],
description='A simple Python wrapper around the ChemSpider Web Services.',
long_description=long_description,
keywords='chemistry cheminformatics chemspider rsc rest api',
zip_safe=False,
install_requires=['requests', 'six'],
tests_require=['pytest'],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Healthcare Industry',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Topic :: Scientific/Engineering :: Chemistry',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
Remove classifiers that get outdated
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import setup
if os.path.exists('README.rst'):
long_description = open('README.rst').read()
else:
long_description = '''A simple Python wrapper around the ChemSpider Web Services.'''
setup(
name='ChemSpiPy',
version='1.0.5',
author='Matt Swain',
author_email='m.swain@me.com',
license='MIT',
url='https://github.com/mcs07/ChemSpiPy',
packages=['chemspipy'],
description='A simple Python wrapper around the ChemSpider Web Services.',
long_description=long_description,
keywords='chemistry cheminformatics chemspider rsc rest api',
zip_safe=False,
install_requires=['requests', 'six'],
tests_require=['pytest'],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Healthcare Industry',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Topic :: Scientific/Engineering :: Chemistry',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import setup
if os.path.exists('README.rst'):
long_description = open('README.rst').read()
else:
long_description = '''A simple Python wrapper around the ChemSpider Web Services.'''
setup(
name='ChemSpiPy',
version='1.0.5',
author='Matt Swain',
author_email='m.swain@me.com',
license='MIT',
url='https://github.com/mcs07/ChemSpiPy',
packages=['chemspipy'],
description='A simple Python wrapper around the ChemSpider Web Services.',
long_description=long_description,
keywords='chemistry cheminformatics chemspider rsc rest api',
zip_safe=False,
install_requires=['requests', 'six'],
tests_require=['pytest'],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Healthcare Industry',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Topic :: Scientific/Engineering :: Chemistry',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
<commit_msg>Remove classifiers that get outdated<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import setup
if os.path.exists('README.rst'):
long_description = open('README.rst').read()
else:
long_description = '''A simple Python wrapper around the ChemSpider Web Services.'''
setup(
name='ChemSpiPy',
version='1.0.5',
author='Matt Swain',
author_email='m.swain@me.com',
license='MIT',
url='https://github.com/mcs07/ChemSpiPy',
packages=['chemspipy'],
description='A simple Python wrapper around the ChemSpider Web Services.',
long_description=long_description,
keywords='chemistry cheminformatics chemspider rsc rest api',
zip_safe=False,
install_requires=['requests', 'six'],
tests_require=['pytest'],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Healthcare Industry',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Topic :: Scientific/Engineering :: Chemistry',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import setup
if os.path.exists('README.rst'):
long_description = open('README.rst').read()
else:
long_description = '''A simple Python wrapper around the ChemSpider Web Services.'''
setup(
name='ChemSpiPy',
version='1.0.5',
author='Matt Swain',
author_email='m.swain@me.com',
license='MIT',
url='https://github.com/mcs07/ChemSpiPy',
packages=['chemspipy'],
description='A simple Python wrapper around the ChemSpider Web Services.',
long_description=long_description,
keywords='chemistry cheminformatics chemspider rsc rest api',
zip_safe=False,
install_requires=['requests', 'six'],
tests_require=['pytest'],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Healthcare Industry',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Topic :: Scientific/Engineering :: Chemistry',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
Remove classifiers that get outdated#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import setup
if os.path.exists('README.rst'):
long_description = open('README.rst').read()
else:
long_description = '''A simple Python wrapper around the ChemSpider Web Services.'''
setup(
name='ChemSpiPy',
version='1.0.5',
author='Matt Swain',
author_email='m.swain@me.com',
license='MIT',
url='https://github.com/mcs07/ChemSpiPy',
packages=['chemspipy'],
description='A simple Python wrapper around the ChemSpider Web Services.',
long_description=long_description,
keywords='chemistry cheminformatics chemspider rsc rest api',
zip_safe=False,
install_requires=['requests', 'six'],
tests_require=['pytest'],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Healthcare Industry',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Topic :: Scientific/Engineering :: Chemistry',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import setup
if os.path.exists('README.rst'):
long_description = open('README.rst').read()
else:
long_description = '''A simple Python wrapper around the ChemSpider Web Services.'''
setup(
name='ChemSpiPy',
version='1.0.5',
author='Matt Swain',
author_email='m.swain@me.com',
license='MIT',
url='https://github.com/mcs07/ChemSpiPy',
packages=['chemspipy'],
description='A simple Python wrapper around the ChemSpider Web Services.',
long_description=long_description,
keywords='chemistry cheminformatics chemspider rsc rest api',
zip_safe=False,
install_requires=['requests', 'six'],
tests_require=['pytest'],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Healthcare Industry',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Topic :: Scientific/Engineering :: Chemistry',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
<commit_msg>Remove classifiers that get outdated<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import setup
if os.path.exists('README.rst'):
long_description = open('README.rst').read()
else:
long_description = '''A simple Python wrapper around the ChemSpider Web Services.'''
setup(
name='ChemSpiPy',
version='1.0.5',
author='Matt Swain',
author_email='m.swain@me.com',
license='MIT',
url='https://github.com/mcs07/ChemSpiPy',
packages=['chemspipy'],
description='A simple Python wrapper around the ChemSpider Web Services.',
long_description=long_description,
keywords='chemistry cheminformatics chemspider rsc rest api',
zip_safe=False,
install_requires=['requests', 'six'],
tests_require=['pytest'],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Healthcare Industry',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Topic :: Scientific/Engineering :: Chemistry',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
834637f8860f6b2d99726f9f531d05884e375ea3
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import os
import re
from distutils.core import setup
DIRNAME = os.path.abspath(os.path.dirname(__file__))
rel = lambda *parts: os.path.abspath(os.path.join(DIRNAME, *parts))
README = open(rel('README.rst')).read()
INIT_PY = open(rel('flask_redis.py')).read()
VERSION = re.findall("__version__ = '([^']+)'", INIT_PY)[0]
setup(
name='Flask-And-Redis',
version=VERSION,
description='Simple as dead support of Redis database for Flask apps.',
long_description=README,
author='Igor Davydenko',
author_email='playpauseandstop@gmail.com',
url='https://github.com/playpauseandstop/Flask-And-Redis',
install_requires=[
'Flask',
'redis',
],
py_modules=[
'flask_redis',
],
platforms='any',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Operating System :: OS Independent',
'Topic :: Utilities',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: BSD License',
],
keywords='flask redis',
license='BSD License',
)
|
#!/usr/bin/env python
import os
import re
import sys
from distutils.core import setup
DIRNAME = os.path.abspath(os.path.dirname(__file__))
rel = lambda *parts: os.path.abspath(os.path.join(DIRNAME, *parts))
with open(rel('README.rst')) as handler:
README = handler.read()
with open(rel('flask_redis.py')) as handler:
INIT_PY = handler.read()
INSTALL_REQUIRES = {
2: ['Flask>=0.8', 'redis>=2.4.11'],
3: ['Flask>=0.10.1', 'redis>=2.6.2'],
}
VERSION = re.findall("__version__ = '([^']+)'", INIT_PY)[0]
setup(
name='Flask-And-Redis',
version=VERSION,
description='Simple as dead support of Redis database for Flask apps.',
long_description=README,
author='Igor Davydenko',
author_email='playpauseandstop@gmail.com',
url='https://github.com/playpauseandstop/Flask-And-Redis',
install_requires=INSTALL_REQUIRES[sys.version_info[0]],
py_modules=['flask_redis'],
platforms='any',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Operating System :: OS Independent',
'Topic :: Utilities',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: BSD License',
],
keywords='flask redis',
license='BSD License',
)
|
Fix install requirements due to Python versions.
|
Fix install requirements due to Python versions.
|
Python
|
bsd-3-clause
|
playpauseandstop/Flask-And-Redis,playpauseandstop/Flask-And-Redis
|
#!/usr/bin/env python
import os
import re
from distutils.core import setup
DIRNAME = os.path.abspath(os.path.dirname(__file__))
rel = lambda *parts: os.path.abspath(os.path.join(DIRNAME, *parts))
README = open(rel('README.rst')).read()
INIT_PY = open(rel('flask_redis.py')).read()
VERSION = re.findall("__version__ = '([^']+)'", INIT_PY)[0]
setup(
name='Flask-And-Redis',
version=VERSION,
description='Simple as dead support of Redis database for Flask apps.',
long_description=README,
author='Igor Davydenko',
author_email='playpauseandstop@gmail.com',
url='https://github.com/playpauseandstop/Flask-And-Redis',
install_requires=[
'Flask',
'redis',
],
py_modules=[
'flask_redis',
],
platforms='any',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Operating System :: OS Independent',
'Topic :: Utilities',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: BSD License',
],
keywords='flask redis',
license='BSD License',
)
Fix install requirements due to Python versions.
|
#!/usr/bin/env python
import os
import re
import sys
from distutils.core import setup
DIRNAME = os.path.abspath(os.path.dirname(__file__))
rel = lambda *parts: os.path.abspath(os.path.join(DIRNAME, *parts))
with open(rel('README.rst')) as handler:
README = handler.read()
with open(rel('flask_redis.py')) as handler:
INIT_PY = handler.read()
INSTALL_REQUIRES = {
2: ['Flask>=0.8', 'redis>=2.4.11'],
3: ['Flask>=0.10.1', 'redis>=2.6.2'],
}
VERSION = re.findall("__version__ = '([^']+)'", INIT_PY)[0]
setup(
name='Flask-And-Redis',
version=VERSION,
description='Simple as dead support of Redis database for Flask apps.',
long_description=README,
author='Igor Davydenko',
author_email='playpauseandstop@gmail.com',
url='https://github.com/playpauseandstop/Flask-And-Redis',
install_requires=INSTALL_REQUIRES[sys.version_info[0]],
py_modules=['flask_redis'],
platforms='any',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Operating System :: OS Independent',
'Topic :: Utilities',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: BSD License',
],
keywords='flask redis',
license='BSD License',
)
|
<commit_before>#!/usr/bin/env python
import os
import re
from distutils.core import setup
DIRNAME = os.path.abspath(os.path.dirname(__file__))
rel = lambda *parts: os.path.abspath(os.path.join(DIRNAME, *parts))
README = open(rel('README.rst')).read()
INIT_PY = open(rel('flask_redis.py')).read()
VERSION = re.findall("__version__ = '([^']+)'", INIT_PY)[0]
setup(
name='Flask-And-Redis',
version=VERSION,
description='Simple as dead support of Redis database for Flask apps.',
long_description=README,
author='Igor Davydenko',
author_email='playpauseandstop@gmail.com',
url='https://github.com/playpauseandstop/Flask-And-Redis',
install_requires=[
'Flask',
'redis',
],
py_modules=[
'flask_redis',
],
platforms='any',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Operating System :: OS Independent',
'Topic :: Utilities',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: BSD License',
],
keywords='flask redis',
license='BSD License',
)
<commit_msg>Fix install requirements due to Python versions.<commit_after>
|
#!/usr/bin/env python
import os
import re
import sys
from distutils.core import setup
DIRNAME = os.path.abspath(os.path.dirname(__file__))
rel = lambda *parts: os.path.abspath(os.path.join(DIRNAME, *parts))
with open(rel('README.rst')) as handler:
README = handler.read()
with open(rel('flask_redis.py')) as handler:
INIT_PY = handler.read()
INSTALL_REQUIRES = {
2: ['Flask>=0.8', 'redis>=2.4.11'],
3: ['Flask>=0.10.1', 'redis>=2.6.2'],
}
VERSION = re.findall("__version__ = '([^']+)'", INIT_PY)[0]
setup(
name='Flask-And-Redis',
version=VERSION,
description='Simple as dead support of Redis database for Flask apps.',
long_description=README,
author='Igor Davydenko',
author_email='playpauseandstop@gmail.com',
url='https://github.com/playpauseandstop/Flask-And-Redis',
install_requires=INSTALL_REQUIRES[sys.version_info[0]],
py_modules=['flask_redis'],
platforms='any',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Operating System :: OS Independent',
'Topic :: Utilities',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: BSD License',
],
keywords='flask redis',
license='BSD License',
)
|
#!/usr/bin/env python
import os
import re
from distutils.core import setup
DIRNAME = os.path.abspath(os.path.dirname(__file__))
rel = lambda *parts: os.path.abspath(os.path.join(DIRNAME, *parts))
README = open(rel('README.rst')).read()
INIT_PY = open(rel('flask_redis.py')).read()
VERSION = re.findall("__version__ = '([^']+)'", INIT_PY)[0]
setup(
name='Flask-And-Redis',
version=VERSION,
description='Simple as dead support of Redis database for Flask apps.',
long_description=README,
author='Igor Davydenko',
author_email='playpauseandstop@gmail.com',
url='https://github.com/playpauseandstop/Flask-And-Redis',
install_requires=[
'Flask',
'redis',
],
py_modules=[
'flask_redis',
],
platforms='any',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Operating System :: OS Independent',
'Topic :: Utilities',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: BSD License',
],
keywords='flask redis',
license='BSD License',
)
Fix install requirements due to Python versions.#!/usr/bin/env python
import os
import re
import sys
from distutils.core import setup
DIRNAME = os.path.abspath(os.path.dirname(__file__))
rel = lambda *parts: os.path.abspath(os.path.join(DIRNAME, *parts))
with open(rel('README.rst')) as handler:
README = handler.read()
with open(rel('flask_redis.py')) as handler:
INIT_PY = handler.read()
INSTALL_REQUIRES = {
2: ['Flask>=0.8', 'redis>=2.4.11'],
3: ['Flask>=0.10.1', 'redis>=2.6.2'],
}
VERSION = re.findall("__version__ = '([^']+)'", INIT_PY)[0]
setup(
name='Flask-And-Redis',
version=VERSION,
description='Simple as dead support of Redis database for Flask apps.',
long_description=README,
author='Igor Davydenko',
author_email='playpauseandstop@gmail.com',
url='https://github.com/playpauseandstop/Flask-And-Redis',
install_requires=INSTALL_REQUIRES[sys.version_info[0]],
py_modules=['flask_redis'],
platforms='any',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Operating System :: OS Independent',
'Topic :: Utilities',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: BSD License',
],
keywords='flask redis',
license='BSD License',
)
|
<commit_before>#!/usr/bin/env python
import os
import re
from distutils.core import setup
DIRNAME = os.path.abspath(os.path.dirname(__file__))
rel = lambda *parts: os.path.abspath(os.path.join(DIRNAME, *parts))
README = open(rel('README.rst')).read()
INIT_PY = open(rel('flask_redis.py')).read()
VERSION = re.findall("__version__ = '([^']+)'", INIT_PY)[0]
setup(
name='Flask-And-Redis',
version=VERSION,
description='Simple as dead support of Redis database for Flask apps.',
long_description=README,
author='Igor Davydenko',
author_email='playpauseandstop@gmail.com',
url='https://github.com/playpauseandstop/Flask-And-Redis',
install_requires=[
'Flask',
'redis',
],
py_modules=[
'flask_redis',
],
platforms='any',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Operating System :: OS Independent',
'Topic :: Utilities',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: BSD License',
],
keywords='flask redis',
license='BSD License',
)
<commit_msg>Fix install requirements due to Python versions.<commit_after>#!/usr/bin/env python
import os
import re
import sys
from distutils.core import setup
DIRNAME = os.path.abspath(os.path.dirname(__file__))
rel = lambda *parts: os.path.abspath(os.path.join(DIRNAME, *parts))
with open(rel('README.rst')) as handler:
README = handler.read()
with open(rel('flask_redis.py')) as handler:
INIT_PY = handler.read()
INSTALL_REQUIRES = {
2: ['Flask>=0.8', 'redis>=2.4.11'],
3: ['Flask>=0.10.1', 'redis>=2.6.2'],
}
VERSION = re.findall("__version__ = '([^']+)'", INIT_PY)[0]
setup(
name='Flask-And-Redis',
version=VERSION,
description='Simple as dead support of Redis database for Flask apps.',
long_description=README,
author='Igor Davydenko',
author_email='playpauseandstop@gmail.com',
url='https://github.com/playpauseandstop/Flask-And-Redis',
install_requires=INSTALL_REQUIRES[sys.version_info[0]],
py_modules=['flask_redis'],
platforms='any',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Operating System :: OS Independent',
'Topic :: Utilities',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: BSD License',
],
keywords='flask redis',
license='BSD License',
)
|
cb1b0b6c6bb3fb2f982f775ee831abf20916c020
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import setup
from github_backup import __version__
def open_file(fname):
return open(os.path.join(os.path.dirname(__file__), fname))
setup(
name='github-backup',
version=__version__,
author='Jose Diaz-Gonzalez',
author_email='github-backup@josediazgonzalez.com',
packages=['github_backup'],
scripts=['bin/github-backup'],
url='http://github.com/josegonzalez/python-github-backup',
license=open('LICENSE.txt').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Topic :: System :: Archiving :: Backup',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
description='backup a github user or organization',
long_description=open_file('README.rst').read(),
install_requires=open_file('requirements.txt').readlines(),
zip_safe=True,
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import setup
from github_backup import __version__
def open_file(fname):
return open(os.path.join(os.path.dirname(__file__), fname))
setup(
name='github-backup',
version=__version__,
author='Jose Diaz-Gonzalez',
author_email='github-backup@josediazgonzalez.com',
packages=['github_backup'],
scripts=['bin/github-backup'],
url='http://github.com/josegonzalez/python-github-backup',
license=open('LICENSE.txt').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Topic :: System :: Archiving :: Backup',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
],
description='backup a github user or organization',
long_description=open_file('README.rst').read(),
install_requires=open_file('requirements.txt').readlines(),
zip_safe=True,
)
|
Add support for python 3.7 and 3.8 in package classifiers
|
Add support for python 3.7 and 3.8 in package classifiers
|
Python
|
mit
|
josegonzalez/python-github-backup,josegonzalez/python-github-backup
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import setup
from github_backup import __version__
def open_file(fname):
return open(os.path.join(os.path.dirname(__file__), fname))
setup(
name='github-backup',
version=__version__,
author='Jose Diaz-Gonzalez',
author_email='github-backup@josediazgonzalez.com',
packages=['github_backup'],
scripts=['bin/github-backup'],
url='http://github.com/josegonzalez/python-github-backup',
license=open('LICENSE.txt').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Topic :: System :: Archiving :: Backup',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
description='backup a github user or organization',
long_description=open_file('README.rst').read(),
install_requires=open_file('requirements.txt').readlines(),
zip_safe=True,
)
Add support for python 3.7 and 3.8 in package classifiers
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import setup
from github_backup import __version__
def open_file(fname):
return open(os.path.join(os.path.dirname(__file__), fname))
setup(
name='github-backup',
version=__version__,
author='Jose Diaz-Gonzalez',
author_email='github-backup@josediazgonzalez.com',
packages=['github_backup'],
scripts=['bin/github-backup'],
url='http://github.com/josegonzalez/python-github-backup',
license=open('LICENSE.txt').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Topic :: System :: Archiving :: Backup',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
],
description='backup a github user or organization',
long_description=open_file('README.rst').read(),
install_requires=open_file('requirements.txt').readlines(),
zip_safe=True,
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import setup
from github_backup import __version__
def open_file(fname):
return open(os.path.join(os.path.dirname(__file__), fname))
setup(
name='github-backup',
version=__version__,
author='Jose Diaz-Gonzalez',
author_email='github-backup@josediazgonzalez.com',
packages=['github_backup'],
scripts=['bin/github-backup'],
url='http://github.com/josegonzalez/python-github-backup',
license=open('LICENSE.txt').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Topic :: System :: Archiving :: Backup',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
description='backup a github user or organization',
long_description=open_file('README.rst').read(),
install_requires=open_file('requirements.txt').readlines(),
zip_safe=True,
)
<commit_msg>Add support for python 3.7 and 3.8 in package classifiers<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import setup
from github_backup import __version__
def open_file(fname):
return open(os.path.join(os.path.dirname(__file__), fname))
setup(
name='github-backup',
version=__version__,
author='Jose Diaz-Gonzalez',
author_email='github-backup@josediazgonzalez.com',
packages=['github_backup'],
scripts=['bin/github-backup'],
url='http://github.com/josegonzalez/python-github-backup',
license=open('LICENSE.txt').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Topic :: System :: Archiving :: Backup',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
],
description='backup a github user or organization',
long_description=open_file('README.rst').read(),
install_requires=open_file('requirements.txt').readlines(),
zip_safe=True,
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import setup
from github_backup import __version__
def open_file(fname):
return open(os.path.join(os.path.dirname(__file__), fname))
setup(
name='github-backup',
version=__version__,
author='Jose Diaz-Gonzalez',
author_email='github-backup@josediazgonzalez.com',
packages=['github_backup'],
scripts=['bin/github-backup'],
url='http://github.com/josegonzalez/python-github-backup',
license=open('LICENSE.txt').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Topic :: System :: Archiving :: Backup',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
description='backup a github user or organization',
long_description=open_file('README.rst').read(),
install_requires=open_file('requirements.txt').readlines(),
zip_safe=True,
)
Add support for python 3.7 and 3.8 in package classifiers#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import setup
from github_backup import __version__
def open_file(fname):
return open(os.path.join(os.path.dirname(__file__), fname))
setup(
name='github-backup',
version=__version__,
author='Jose Diaz-Gonzalez',
author_email='github-backup@josediazgonzalez.com',
packages=['github_backup'],
scripts=['bin/github-backup'],
url='http://github.com/josegonzalez/python-github-backup',
license=open('LICENSE.txt').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Topic :: System :: Archiving :: Backup',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
],
description='backup a github user or organization',
long_description=open_file('README.rst').read(),
install_requires=open_file('requirements.txt').readlines(),
zip_safe=True,
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import setup
from github_backup import __version__
def open_file(fname):
return open(os.path.join(os.path.dirname(__file__), fname))
setup(
name='github-backup',
version=__version__,
author='Jose Diaz-Gonzalez',
author_email='github-backup@josediazgonzalez.com',
packages=['github_backup'],
scripts=['bin/github-backup'],
url='http://github.com/josegonzalez/python-github-backup',
license=open('LICENSE.txt').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Topic :: System :: Archiving :: Backup',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
description='backup a github user or organization',
long_description=open_file('README.rst').read(),
install_requires=open_file('requirements.txt').readlines(),
zip_safe=True,
)
<commit_msg>Add support for python 3.7 and 3.8 in package classifiers<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import setup
from github_backup import __version__
def open_file(fname):
return open(os.path.join(os.path.dirname(__file__), fname))
setup(
name='github-backup',
version=__version__,
author='Jose Diaz-Gonzalez',
author_email='github-backup@josediazgonzalez.com',
packages=['github_backup'],
scripts=['bin/github-backup'],
url='http://github.com/josegonzalez/python-github-backup',
license=open('LICENSE.txt').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Topic :: System :: Archiving :: Backup',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
],
description='backup a github user or organization',
long_description=open_file('README.rst').read(),
install_requires=open_file('requirements.txt').readlines(),
zip_safe=True,
)
|
169240f25757db371108af98163c81f7cc0e647b
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
import sys, os
version = '0.4.3'
setup(name='twitter',
version=version,
description="An API and command-line toolset for Twitter (twitter.com)",
long_description=open("./README", "r").read(),
# Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: End Users/Desktop",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Communications :: Chat :: Internet Relay Chat",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries",
"Topic :: Utilities",
"License :: OSI Approved :: MIT License",
],
keywords='twitter, IRC, command-line tools, web 2.0',
author='Mike Verdone',
author_email='mike.verdone+twitterapi@gmail.com',
url='http://mike.verdone.ca/twitter/',
license='MIT License',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=True,
install_requires=[
# -*- Extra requirements: -*-
"simplejson>=1.7.1",
"dateutil>=1.1",
],
entry_points="""
# -*- Entry points: -*-
[console_scripts]
twitter=twitter.cmdline:main
twitterbot=twitter.ircbot:main
""",
)
|
from setuptools import setup, find_packages
import sys, os
version = '0.4.4'
setup(name='twitter',
version=version,
description="An API and command-line toolset for Twitter (twitter.com)",
long_description=open("./README", "r").read(),
# Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: End Users/Desktop",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Communications :: Chat :: Internet Relay Chat",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries",
"Topic :: Utilities",
"License :: OSI Approved :: MIT License",
],
keywords='twitter, IRC, command-line tools, web 2.0',
author='Mike Verdone',
author_email='mike.verdone+twitterapi@gmail.com',
url='http://mike.verdone.ca/twitter/',
license='MIT License',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=True,
install_requires=[
# -*- Extra requirements: -*-
"simplejson>=1.7.1",
"python-dateutil>=1.1",
],
entry_points="""
# -*- Entry points: -*-
[console_scripts]
twitter=twitter.cmdline:main
twitterbot=twitter.ircbot:main
""",
)
|
Change dateutil to python-dateutil because some loser decided to rename it. (Thanks Leigh)
|
Change dateutil to python-dateutil because some loser decided to rename it. (Thanks Leigh)
git-svn-id: 7187af8a85e68091b56e148623cc345c4eafc588@188 d723f978-dc38-0410-87ed-da353333cdcc
|
Python
|
mit
|
hugovk/twitter,adonoho/twitter,durden/frappy,jessamynsmith/twitter,tytek2012/twitter,Adai0808/twitter,sixohsix/twitter,miragshin/twitter
|
from setuptools import setup, find_packages
import sys, os
version = '0.4.3'
setup(name='twitter',
version=version,
description="An API and command-line toolset for Twitter (twitter.com)",
long_description=open("./README", "r").read(),
# Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: End Users/Desktop",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Communications :: Chat :: Internet Relay Chat",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries",
"Topic :: Utilities",
"License :: OSI Approved :: MIT License",
],
keywords='twitter, IRC, command-line tools, web 2.0',
author='Mike Verdone',
author_email='mike.verdone+twitterapi@gmail.com',
url='http://mike.verdone.ca/twitter/',
license='MIT License',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=True,
install_requires=[
# -*- Extra requirements: -*-
"simplejson>=1.7.1",
"dateutil>=1.1",
],
entry_points="""
# -*- Entry points: -*-
[console_scripts]
twitter=twitter.cmdline:main
twitterbot=twitter.ircbot:main
""",
)
Change dateutil to python-dateutil because some loser decided to rename it. (Thanks Leigh)
git-svn-id: 7187af8a85e68091b56e148623cc345c4eafc588@188 d723f978-dc38-0410-87ed-da353333cdcc
|
from setuptools import setup, find_packages
import sys, os
version = '0.4.4'
setup(name='twitter',
version=version,
description="An API and command-line toolset for Twitter (twitter.com)",
long_description=open("./README", "r").read(),
# Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: End Users/Desktop",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Communications :: Chat :: Internet Relay Chat",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries",
"Topic :: Utilities",
"License :: OSI Approved :: MIT License",
],
keywords='twitter, IRC, command-line tools, web 2.0',
author='Mike Verdone',
author_email='mike.verdone+twitterapi@gmail.com',
url='http://mike.verdone.ca/twitter/',
license='MIT License',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=True,
install_requires=[
# -*- Extra requirements: -*-
"simplejson>=1.7.1",
"python-dateutil>=1.1",
],
entry_points="""
# -*- Entry points: -*-
[console_scripts]
twitter=twitter.cmdline:main
twitterbot=twitter.ircbot:main
""",
)
|
<commit_before>from setuptools import setup, find_packages
import sys, os
version = '0.4.3'
setup(name='twitter',
version=version,
description="An API and command-line toolset for Twitter (twitter.com)",
long_description=open("./README", "r").read(),
# Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: End Users/Desktop",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Communications :: Chat :: Internet Relay Chat",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries",
"Topic :: Utilities",
"License :: OSI Approved :: MIT License",
],
keywords='twitter, IRC, command-line tools, web 2.0',
author='Mike Verdone',
author_email='mike.verdone+twitterapi@gmail.com',
url='http://mike.verdone.ca/twitter/',
license='MIT License',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=True,
install_requires=[
# -*- Extra requirements: -*-
"simplejson>=1.7.1",
"dateutil>=1.1",
],
entry_points="""
# -*- Entry points: -*-
[console_scripts]
twitter=twitter.cmdline:main
twitterbot=twitter.ircbot:main
""",
)
<commit_msg>Change dateutil to python-dateutil because some loser decided to rename it. (Thanks Leigh)
git-svn-id: 7187af8a85e68091b56e148623cc345c4eafc588@188 d723f978-dc38-0410-87ed-da353333cdcc<commit_after>
|
from setuptools import setup, find_packages
import sys, os
version = '0.4.4'
setup(name='twitter',
version=version,
description="An API and command-line toolset for Twitter (twitter.com)",
long_description=open("./README", "r").read(),
# Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: End Users/Desktop",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Communications :: Chat :: Internet Relay Chat",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries",
"Topic :: Utilities",
"License :: OSI Approved :: MIT License",
],
keywords='twitter, IRC, command-line tools, web 2.0',
author='Mike Verdone',
author_email='mike.verdone+twitterapi@gmail.com',
url='http://mike.verdone.ca/twitter/',
license='MIT License',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=True,
install_requires=[
# -*- Extra requirements: -*-
"simplejson>=1.7.1",
"python-dateutil>=1.1",
],
entry_points="""
# -*- Entry points: -*-
[console_scripts]
twitter=twitter.cmdline:main
twitterbot=twitter.ircbot:main
""",
)
|
from setuptools import setup, find_packages
import sys, os
version = '0.4.3'
setup(name='twitter',
version=version,
description="An API and command-line toolset for Twitter (twitter.com)",
long_description=open("./README", "r").read(),
# Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: End Users/Desktop",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Communications :: Chat :: Internet Relay Chat",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries",
"Topic :: Utilities",
"License :: OSI Approved :: MIT License",
],
keywords='twitter, IRC, command-line tools, web 2.0',
author='Mike Verdone',
author_email='mike.verdone+twitterapi@gmail.com',
url='http://mike.verdone.ca/twitter/',
license='MIT License',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=True,
install_requires=[
# -*- Extra requirements: -*-
"simplejson>=1.7.1",
"dateutil>=1.1",
],
entry_points="""
# -*- Entry points: -*-
[console_scripts]
twitter=twitter.cmdline:main
twitterbot=twitter.ircbot:main
""",
)
Change dateutil to python-dateutil because some loser decided to rename it. (Thanks Leigh)
git-svn-id: 7187af8a85e68091b56e148623cc345c4eafc588@188 d723f978-dc38-0410-87ed-da353333cdccfrom setuptools import setup, find_packages
import sys, os
version = '0.4.4'
setup(name='twitter',
version=version,
description="An API and command-line toolset for Twitter (twitter.com)",
long_description=open("./README", "r").read(),
# Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: End Users/Desktop",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Communications :: Chat :: Internet Relay Chat",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries",
"Topic :: Utilities",
"License :: OSI Approved :: MIT License",
],
keywords='twitter, IRC, command-line tools, web 2.0',
author='Mike Verdone',
author_email='mike.verdone+twitterapi@gmail.com',
url='http://mike.verdone.ca/twitter/',
license='MIT License',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=True,
install_requires=[
# -*- Extra requirements: -*-
"simplejson>=1.7.1",
"python-dateutil>=1.1",
],
entry_points="""
# -*- Entry points: -*-
[console_scripts]
twitter=twitter.cmdline:main
twitterbot=twitter.ircbot:main
""",
)
|
<commit_before>from setuptools import setup, find_packages
import sys, os
version = '0.4.3'
setup(name='twitter',
version=version,
description="An API and command-line toolset for Twitter (twitter.com)",
long_description=open("./README", "r").read(),
# Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: End Users/Desktop",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Communications :: Chat :: Internet Relay Chat",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries",
"Topic :: Utilities",
"License :: OSI Approved :: MIT License",
],
keywords='twitter, IRC, command-line tools, web 2.0',
author='Mike Verdone',
author_email='mike.verdone+twitterapi@gmail.com',
url='http://mike.verdone.ca/twitter/',
license='MIT License',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=True,
install_requires=[
# -*- Extra requirements: -*-
"simplejson>=1.7.1",
"dateutil>=1.1",
],
entry_points="""
# -*- Entry points: -*-
[console_scripts]
twitter=twitter.cmdline:main
twitterbot=twitter.ircbot:main
""",
)
<commit_msg>Change dateutil to python-dateutil because some loser decided to rename it. (Thanks Leigh)
git-svn-id: 7187af8a85e68091b56e148623cc345c4eafc588@188 d723f978-dc38-0410-87ed-da353333cdcc<commit_after>from setuptools import setup, find_packages
import sys, os
version = '0.4.4'
setup(name='twitter',
version=version,
description="An API and command-line toolset for Twitter (twitter.com)",
long_description=open("./README", "r").read(),
# Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: End Users/Desktop",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Communications :: Chat :: Internet Relay Chat",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries",
"Topic :: Utilities",
"License :: OSI Approved :: MIT License",
],
keywords='twitter, IRC, command-line tools, web 2.0',
author='Mike Verdone',
author_email='mike.verdone+twitterapi@gmail.com',
url='http://mike.verdone.ca/twitter/',
license='MIT License',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=True,
install_requires=[
# -*- Extra requirements: -*-
"simplejson>=1.7.1",
"python-dateutil>=1.1",
],
entry_points="""
# -*- Entry points: -*-
[console_scripts]
twitter=twitter.cmdline:main
twitterbot=twitter.ircbot:main
""",
)
|
d1a784ec841f4f0fbe8945bf7a5f81e7c3952b93
|
plugin_handler.py
|
plugin_handler.py
|
# -*- coding: utf-8 -*-
# Execute this file to see what plugins will be loaded.
# Implementation leans to Lex Toumbourou's example:
# https://lextoumbourou.com/blog/posts/dynamically-loading-modules-and-classes-in-python/
import os
import pkgutil
import sys
from typing import List
from venues.abstract_venue import AbstractVenue
def load_venue_plugins() -> List[AbstractVenue]:
"""
Read plugin directory and load found plugins.
Variable "blocklist" can be used to exclude loading certain plugins.
"""
blocklist = ["plugin_tiketti", "plugin_telakka"]
found_blocked = list()
loadedplugins = list()
pluginspathabs = os.path.join(os.path.dirname(__file__), "venues")
for loader, plugname, ispkg in \
pkgutil.iter_modules(path=[pluginspathabs]):
if plugname in sys.modules or plugname == "abstract_venue":
continue
if plugname in blocklist:
found_blocked.append(plugname.lstrip("plugin_"))
continue
plugpath = f"venues.{plugname}"
loadplug = __import__(plugpath, fromlist=[plugname])
classname = plugname.split("_")[1].title()
loadedclass = getattr(loadplug, classname)
instance = loadedclass()
loadedplugins.append(instance)
print(f"Loaded plugin: {instance.get_venue_name()}")
print("Blocked plugins: {}.\n".format(", ".join(found_blocked[1:])))
return loadedplugins
|
# -*- coding: utf-8 -*-
# Execute this file to see what plugins will be loaded.
# Implementation leans to Lex Toumbourou's example:
# https://lextoumbourou.com/blog/posts/dynamically-loading-modules-and-classes-in-python/
import os
import pkgutil
import sys
from typing import List
from venues.abstract_venue import AbstractVenue
def load_venue_plugins() -> List[AbstractVenue]:
"""
Read plugin directory and load found plugins.
Variable "blocklist" can be used to exclude loading certain plugins.
"""
blocklist = ["plugin_tiketti", "plugin_telakka", "plugin_lutakko", "plugin_yotalo"]
found_blocked = list()
loadedplugins = list()
pluginspathabs = os.path.join(os.path.dirname(__file__), "venues")
for loader, plugname, ispkg in \
pkgutil.iter_modules(path=[pluginspathabs]):
if plugname in sys.modules or plugname == "abstract_venue":
continue
if plugname in blocklist:
found_blocked.append(plugname.lstrip("plugin_"))
continue
plugpath = f"venues.{plugname}"
loadplug = __import__(plugpath, fromlist=[plugname])
classname = plugname.split("_")[1].title()
loadedclass = getattr(loadplug, classname)
instance = loadedclass()
loadedplugins.append(instance)
print(f"Loaded plugin: {instance.get_venue_name()}")
print("Blocked plugins: {}.\n".format(", ".join(found_blocked[1:])))
return loadedplugins
|
Disable more plugins to make it work again.
|
Disable more plugins to make it work again.
Will fix venues parsing later.
Signed-off-by: Ville Valkonen <989b9f9979d21943697628c770235933300d59bc@gmail.com>
|
Python
|
isc
|
weezel/BandEventNotifier
|
# -*- coding: utf-8 -*-
# Execute this file to see what plugins will be loaded.
# Implementation leans to Lex Toumbourou's example:
# https://lextoumbourou.com/blog/posts/dynamically-loading-modules-and-classes-in-python/
import os
import pkgutil
import sys
from typing import List
from venues.abstract_venue import AbstractVenue
def load_venue_plugins() -> List[AbstractVenue]:
"""
Read plugin directory and load found plugins.
Variable "blocklist" can be used to exclude loading certain plugins.
"""
blocklist = ["plugin_tiketti", "plugin_telakka"]
found_blocked = list()
loadedplugins = list()
pluginspathabs = os.path.join(os.path.dirname(__file__), "venues")
for loader, plugname, ispkg in \
pkgutil.iter_modules(path=[pluginspathabs]):
if plugname in sys.modules or plugname == "abstract_venue":
continue
if plugname in blocklist:
found_blocked.append(plugname.lstrip("plugin_"))
continue
plugpath = f"venues.{plugname}"
loadplug = __import__(plugpath, fromlist=[plugname])
classname = plugname.split("_")[1].title()
loadedclass = getattr(loadplug, classname)
instance = loadedclass()
loadedplugins.append(instance)
print(f"Loaded plugin: {instance.get_venue_name()}")
print("Blocked plugins: {}.\n".format(", ".join(found_blocked[1:])))
return loadedplugins
Disable more plugins to make it work again.
Will fix venues parsing later.
Signed-off-by: Ville Valkonen <989b9f9979d21943697628c770235933300d59bc@gmail.com>
|
# -*- coding: utf-8 -*-
# Execute this file to see what plugins will be loaded.
# Implementation leans to Lex Toumbourou's example:
# https://lextoumbourou.com/blog/posts/dynamically-loading-modules-and-classes-in-python/
import os
import pkgutil
import sys
from typing import List
from venues.abstract_venue import AbstractVenue
def load_venue_plugins() -> List[AbstractVenue]:
"""
Read plugin directory and load found plugins.
Variable "blocklist" can be used to exclude loading certain plugins.
"""
blocklist = ["plugin_tiketti", "plugin_telakka", "plugin_lutakko", "plugin_yotalo"]
found_blocked = list()
loadedplugins = list()
pluginspathabs = os.path.join(os.path.dirname(__file__), "venues")
for loader, plugname, ispkg in \
pkgutil.iter_modules(path=[pluginspathabs]):
if plugname in sys.modules or plugname == "abstract_venue":
continue
if plugname in blocklist:
found_blocked.append(plugname.lstrip("plugin_"))
continue
plugpath = f"venues.{plugname}"
loadplug = __import__(plugpath, fromlist=[plugname])
classname = plugname.split("_")[1].title()
loadedclass = getattr(loadplug, classname)
instance = loadedclass()
loadedplugins.append(instance)
print(f"Loaded plugin: {instance.get_venue_name()}")
print("Blocked plugins: {}.\n".format(", ".join(found_blocked[1:])))
return loadedplugins
|
<commit_before># -*- coding: utf-8 -*-
# Execute this file to see what plugins will be loaded.
# Implementation leans to Lex Toumbourou's example:
# https://lextoumbourou.com/blog/posts/dynamically-loading-modules-and-classes-in-python/
import os
import pkgutil
import sys
from typing import List
from venues.abstract_venue import AbstractVenue
def load_venue_plugins() -> List[AbstractVenue]:
"""
Read plugin directory and load found plugins.
Variable "blocklist" can be used to exclude loading certain plugins.
"""
blocklist = ["plugin_tiketti", "plugin_telakka"]
found_blocked = list()
loadedplugins = list()
pluginspathabs = os.path.join(os.path.dirname(__file__), "venues")
for loader, plugname, ispkg in \
pkgutil.iter_modules(path=[pluginspathabs]):
if plugname in sys.modules or plugname == "abstract_venue":
continue
if plugname in blocklist:
found_blocked.append(plugname.lstrip("plugin_"))
continue
plugpath = f"venues.{plugname}"
loadplug = __import__(plugpath, fromlist=[plugname])
classname = plugname.split("_")[1].title()
loadedclass = getattr(loadplug, classname)
instance = loadedclass()
loadedplugins.append(instance)
print(f"Loaded plugin: {instance.get_venue_name()}")
print("Blocked plugins: {}.\n".format(", ".join(found_blocked[1:])))
return loadedplugins
<commit_msg>Disable more plugins to make it work again.
Will fix venues parsing later.
Signed-off-by: Ville Valkonen <989b9f9979d21943697628c770235933300d59bc@gmail.com><commit_after>
|
# -*- coding: utf-8 -*-
# Execute this file to see what plugins will be loaded.
# Implementation leans to Lex Toumbourou's example:
# https://lextoumbourou.com/blog/posts/dynamically-loading-modules-and-classes-in-python/
import os
import pkgutil
import sys
from typing import List
from venues.abstract_venue import AbstractVenue
def load_venue_plugins() -> List[AbstractVenue]:
"""
Read plugin directory and load found plugins.
Variable "blocklist" can be used to exclude loading certain plugins.
"""
blocklist = ["plugin_tiketti", "plugin_telakka", "plugin_lutakko", "plugin_yotalo"]
found_blocked = list()
loadedplugins = list()
pluginspathabs = os.path.join(os.path.dirname(__file__), "venues")
for loader, plugname, ispkg in \
pkgutil.iter_modules(path=[pluginspathabs]):
if plugname in sys.modules or plugname == "abstract_venue":
continue
if plugname in blocklist:
found_blocked.append(plugname.lstrip("plugin_"))
continue
plugpath = f"venues.{plugname}"
loadplug = __import__(plugpath, fromlist=[plugname])
classname = plugname.split("_")[1].title()
loadedclass = getattr(loadplug, classname)
instance = loadedclass()
loadedplugins.append(instance)
print(f"Loaded plugin: {instance.get_venue_name()}")
print("Blocked plugins: {}.\n".format(", ".join(found_blocked[1:])))
return loadedplugins
|
# -*- coding: utf-8 -*-
# Execute this file to see what plugins will be loaded.
# Implementation leans to Lex Toumbourou's example:
# https://lextoumbourou.com/blog/posts/dynamically-loading-modules-and-classes-in-python/
import os
import pkgutil
import sys
from typing import List
from venues.abstract_venue import AbstractVenue
def load_venue_plugins() -> List[AbstractVenue]:
"""
Read plugin directory and load found plugins.
Variable "blocklist" can be used to exclude loading certain plugins.
"""
blocklist = ["plugin_tiketti", "plugin_telakka"]
found_blocked = list()
loadedplugins = list()
pluginspathabs = os.path.join(os.path.dirname(__file__), "venues")
for loader, plugname, ispkg in \
pkgutil.iter_modules(path=[pluginspathabs]):
if plugname in sys.modules or plugname == "abstract_venue":
continue
if plugname in blocklist:
found_blocked.append(plugname.lstrip("plugin_"))
continue
plugpath = f"venues.{plugname}"
loadplug = __import__(plugpath, fromlist=[plugname])
classname = plugname.split("_")[1].title()
loadedclass = getattr(loadplug, classname)
instance = loadedclass()
loadedplugins.append(instance)
print(f"Loaded plugin: {instance.get_venue_name()}")
print("Blocked plugins: {}.\n".format(", ".join(found_blocked[1:])))
return loadedplugins
Disable more plugins to make it work again.
Will fix venues parsing later.
Signed-off-by: Ville Valkonen <989b9f9979d21943697628c770235933300d59bc@gmail.com># -*- coding: utf-8 -*-
# Execute this file to see what plugins will be loaded.
# Implementation leans to Lex Toumbourou's example:
# https://lextoumbourou.com/blog/posts/dynamically-loading-modules-and-classes-in-python/
import os
import pkgutil
import sys
from typing import List
from venues.abstract_venue import AbstractVenue
def load_venue_plugins() -> List[AbstractVenue]:
"""
Read plugin directory and load found plugins.
Variable "blocklist" can be used to exclude loading certain plugins.
"""
blocklist = ["plugin_tiketti", "plugin_telakka", "plugin_lutakko", "plugin_yotalo"]
found_blocked = list()
loadedplugins = list()
pluginspathabs = os.path.join(os.path.dirname(__file__), "venues")
for loader, plugname, ispkg in \
pkgutil.iter_modules(path=[pluginspathabs]):
if plugname in sys.modules or plugname == "abstract_venue":
continue
if plugname in blocklist:
found_blocked.append(plugname.lstrip("plugin_"))
continue
plugpath = f"venues.{plugname}"
loadplug = __import__(plugpath, fromlist=[plugname])
classname = plugname.split("_")[1].title()
loadedclass = getattr(loadplug, classname)
instance = loadedclass()
loadedplugins.append(instance)
print(f"Loaded plugin: {instance.get_venue_name()}")
print("Blocked plugins: {}.\n".format(", ".join(found_blocked[1:])))
return loadedplugins
|
<commit_before># -*- coding: utf-8 -*-
# Execute this file to see what plugins will be loaded.
# Implementation leans to Lex Toumbourou's example:
# https://lextoumbourou.com/blog/posts/dynamically-loading-modules-and-classes-in-python/
import os
import pkgutil
import sys
from typing import List
from venues.abstract_venue import AbstractVenue
def load_venue_plugins() -> List[AbstractVenue]:
"""
Read plugin directory and load found plugins.
Variable "blocklist" can be used to exclude loading certain plugins.
"""
blocklist = ["plugin_tiketti", "plugin_telakka"]
found_blocked = list()
loadedplugins = list()
pluginspathabs = os.path.join(os.path.dirname(__file__), "venues")
for loader, plugname, ispkg in \
pkgutil.iter_modules(path=[pluginspathabs]):
if plugname in sys.modules or plugname == "abstract_venue":
continue
if plugname in blocklist:
found_blocked.append(plugname.lstrip("plugin_"))
continue
plugpath = f"venues.{plugname}"
loadplug = __import__(plugpath, fromlist=[plugname])
classname = plugname.split("_")[1].title()
loadedclass = getattr(loadplug, classname)
instance = loadedclass()
loadedplugins.append(instance)
print(f"Loaded plugin: {instance.get_venue_name()}")
print("Blocked plugins: {}.\n".format(", ".join(found_blocked[1:])))
return loadedplugins
<commit_msg>Disable more plugins to make it work again.
Will fix venues parsing later.
Signed-off-by: Ville Valkonen <989b9f9979d21943697628c770235933300d59bc@gmail.com><commit_after># -*- coding: utf-8 -*-
# Execute this file to see what plugins will be loaded.
# Implementation leans to Lex Toumbourou's example:
# https://lextoumbourou.com/blog/posts/dynamically-loading-modules-and-classes-in-python/
import os
import pkgutil
import sys
from typing import List
from venues.abstract_venue import AbstractVenue
def load_venue_plugins() -> List[AbstractVenue]:
"""
Read plugin directory and load found plugins.
Variable "blocklist" can be used to exclude loading certain plugins.
"""
blocklist = ["plugin_tiketti", "plugin_telakka", "plugin_lutakko", "plugin_yotalo"]
found_blocked = list()
loadedplugins = list()
pluginspathabs = os.path.join(os.path.dirname(__file__), "venues")
for loader, plugname, ispkg in \
pkgutil.iter_modules(path=[pluginspathabs]):
if plugname in sys.modules or plugname == "abstract_venue":
continue
if plugname in blocklist:
found_blocked.append(plugname.lstrip("plugin_"))
continue
plugpath = f"venues.{plugname}"
loadplug = __import__(plugpath, fromlist=[plugname])
classname = plugname.split("_")[1].title()
loadedclass = getattr(loadplug, classname)
instance = loadedclass()
loadedplugins.append(instance)
print(f"Loaded plugin: {instance.get_venue_name()}")
print("Blocked plugins: {}.\n".format(", ".join(found_blocked[1:])))
return loadedplugins
|
6bb184afc3317ca28bcdf9f684de40f173a5a3e6
|
setup.py
|
setup.py
|
import os
from setuptools import setup
# Ensure that the ssdeep library is built, otherwise install will fail
os.environ['BUILD_LIB'] = '1'
setup(
name="stoq",
version="0.10.16",
author="Marcus LaFerrera",
author_email="marcus@punchcyber.com",
description="A framework for simplifying analysis.",
license="Apache License 2.0",
url="https://github.com/PUNCH-Cyber/stoq",
packages=['stoq'],
package_dir={'stoq': 'stoq-framework'},
include_package_data=True,
install_requires=['beautifulsoup4',
'requests',
'python-magic',
'ssdeep',
'lxml',
'yapsy',
'demjson',
'jinja2',
'hydra',
'Cython',
'yara-python',
'python-json-logger'],
)
|
import os
from setuptools import setup
# Ensure that the ssdeep library is built, otherwise install will fail
os.environ['BUILD_LIB'] = '1'
setup(
name="stoq",
version="0.10.16",
author="Marcus LaFerrera",
author_email="marcus@punchcyber.com",
description="A framework for simplifying analysis.",
license="Apache License 2.0",
url="https://github.com/PUNCH-Cyber/stoq",
packages=['stoq'],
package_dir={'stoq': 'stoq-framework'},
include_package_data=True,
install_requires=['beautifulsoup4',
'requests',
'python-magic',
'ssdeep',
'yapsy',
'demjson',
'jinja2',
'yara-python',
'python-json-logger'],
)
|
Remove libraries not required for most use cases
|
Remove libraries not required for most use cases
|
Python
|
apache-2.0
|
PUNCH-Cyber/stoq
|
import os
from setuptools import setup
# Ensure that the ssdeep library is built, otherwise install will fail
os.environ['BUILD_LIB'] = '1'
setup(
name="stoq",
version="0.10.16",
author="Marcus LaFerrera",
author_email="marcus@punchcyber.com",
description="A framework for simplifying analysis.",
license="Apache License 2.0",
url="https://github.com/PUNCH-Cyber/stoq",
packages=['stoq'],
package_dir={'stoq': 'stoq-framework'},
include_package_data=True,
install_requires=['beautifulsoup4',
'requests',
'python-magic',
'ssdeep',
'lxml',
'yapsy',
'demjson',
'jinja2',
'hydra',
'Cython',
'yara-python',
'python-json-logger'],
)
Remove libraries not required for most use cases
|
import os
from setuptools import setup
# Ensure that the ssdeep library is built, otherwise install will fail
os.environ['BUILD_LIB'] = '1'
setup(
name="stoq",
version="0.10.16",
author="Marcus LaFerrera",
author_email="marcus@punchcyber.com",
description="A framework for simplifying analysis.",
license="Apache License 2.0",
url="https://github.com/PUNCH-Cyber/stoq",
packages=['stoq'],
package_dir={'stoq': 'stoq-framework'},
include_package_data=True,
install_requires=['beautifulsoup4',
'requests',
'python-magic',
'ssdeep',
'yapsy',
'demjson',
'jinja2',
'yara-python',
'python-json-logger'],
)
|
<commit_before>import os
from setuptools import setup
# Ensure that the ssdeep library is built, otherwise install will fail
os.environ['BUILD_LIB'] = '1'
setup(
name="stoq",
version="0.10.16",
author="Marcus LaFerrera",
author_email="marcus@punchcyber.com",
description="A framework for simplifying analysis.",
license="Apache License 2.0",
url="https://github.com/PUNCH-Cyber/stoq",
packages=['stoq'],
package_dir={'stoq': 'stoq-framework'},
include_package_data=True,
install_requires=['beautifulsoup4',
'requests',
'python-magic',
'ssdeep',
'lxml',
'yapsy',
'demjson',
'jinja2',
'hydra',
'Cython',
'yara-python',
'python-json-logger'],
)
<commit_msg>Remove libraries not required for most use cases<commit_after>
|
import os
from setuptools import setup
# Ensure that the ssdeep library is built, otherwise install will fail
os.environ['BUILD_LIB'] = '1'
setup(
name="stoq",
version="0.10.16",
author="Marcus LaFerrera",
author_email="marcus@punchcyber.com",
description="A framework for simplifying analysis.",
license="Apache License 2.0",
url="https://github.com/PUNCH-Cyber/stoq",
packages=['stoq'],
package_dir={'stoq': 'stoq-framework'},
include_package_data=True,
install_requires=['beautifulsoup4',
'requests',
'python-magic',
'ssdeep',
'yapsy',
'demjson',
'jinja2',
'yara-python',
'python-json-logger'],
)
|
import os
from setuptools import setup
# Ensure that the ssdeep library is built, otherwise install will fail
os.environ['BUILD_LIB'] = '1'
setup(
name="stoq",
version="0.10.16",
author="Marcus LaFerrera",
author_email="marcus@punchcyber.com",
description="A framework for simplifying analysis.",
license="Apache License 2.0",
url="https://github.com/PUNCH-Cyber/stoq",
packages=['stoq'],
package_dir={'stoq': 'stoq-framework'},
include_package_data=True,
install_requires=['beautifulsoup4',
'requests',
'python-magic',
'ssdeep',
'lxml',
'yapsy',
'demjson',
'jinja2',
'hydra',
'Cython',
'yara-python',
'python-json-logger'],
)
Remove libraries not required for most use casesimport os
from setuptools import setup
# Ensure that the ssdeep library is built, otherwise install will fail
os.environ['BUILD_LIB'] = '1'
setup(
name="stoq",
version="0.10.16",
author="Marcus LaFerrera",
author_email="marcus@punchcyber.com",
description="A framework for simplifying analysis.",
license="Apache License 2.0",
url="https://github.com/PUNCH-Cyber/stoq",
packages=['stoq'],
package_dir={'stoq': 'stoq-framework'},
include_package_data=True,
install_requires=['beautifulsoup4',
'requests',
'python-magic',
'ssdeep',
'yapsy',
'demjson',
'jinja2',
'yara-python',
'python-json-logger'],
)
|
<commit_before>import os
from setuptools import setup
# Ensure that the ssdeep library is built, otherwise install will fail
os.environ['BUILD_LIB'] = '1'
setup(
name="stoq",
version="0.10.16",
author="Marcus LaFerrera",
author_email="marcus@punchcyber.com",
description="A framework for simplifying analysis.",
license="Apache License 2.0",
url="https://github.com/PUNCH-Cyber/stoq",
packages=['stoq'],
package_dir={'stoq': 'stoq-framework'},
include_package_data=True,
install_requires=['beautifulsoup4',
'requests',
'python-magic',
'ssdeep',
'lxml',
'yapsy',
'demjson',
'jinja2',
'hydra',
'Cython',
'yara-python',
'python-json-logger'],
)
<commit_msg>Remove libraries not required for most use cases<commit_after>import os
from setuptools import setup
# Ensure that the ssdeep library is built, otherwise install will fail
os.environ['BUILD_LIB'] = '1'
setup(
name="stoq",
version="0.10.16",
author="Marcus LaFerrera",
author_email="marcus@punchcyber.com",
description="A framework for simplifying analysis.",
license="Apache License 2.0",
url="https://github.com/PUNCH-Cyber/stoq",
packages=['stoq'],
package_dir={'stoq': 'stoq-framework'},
include_package_data=True,
install_requires=['beautifulsoup4',
'requests',
'python-magic',
'ssdeep',
'yapsy',
'demjson',
'jinja2',
'yara-python',
'python-json-logger'],
)
|
8c694c6ad022710a9b7de3b10650591278325267
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from codecs import open
from setuptools import find_packages, setup
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
setup(
name='django-latest-tweets',
version='0.4.5',
description='Latest Tweets for Django',
long_description=readme,
url='https://github.com/developersociety/django-latest-tweets',
maintainer='The Developer Society',
maintainer_email='studio@dev.ngo',
platforms=['any'],
install_requires=[
'twitter>=1.9.1',
'requests>=2.0',
],
packages=find_packages(),
include_package_data=True,
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.11',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
license='BSD',
)
|
#!/usr/bin/env python
from codecs import open
from setuptools import find_packages, setup
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
setup(
name='django-latest-tweets',
version='0.4.5',
description='Latest Tweets for Django',
long_description=readme,
url='https://github.com/developersociety/django-latest-tweets',
maintainer='The Developer Society',
maintainer_email='studio@dev.ngo',
platforms=['any'],
install_requires=[
'twitter>=1.9.1',
'requests>=2.0',
],
packages=find_packages(),
include_package_data=True,
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.11',
'Framework :: Django :: 2.2',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
license='BSD',
)
|
Add support for Python 3.7 and Django 2.2
|
Add support for Python 3.7 and Django 2.2
|
Python
|
bsd-3-clause
|
blancltd/django-latest-tweets
|
#!/usr/bin/env python
from codecs import open
from setuptools import find_packages, setup
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
setup(
name='django-latest-tweets',
version='0.4.5',
description='Latest Tweets for Django',
long_description=readme,
url='https://github.com/developersociety/django-latest-tweets',
maintainer='The Developer Society',
maintainer_email='studio@dev.ngo',
platforms=['any'],
install_requires=[
'twitter>=1.9.1',
'requests>=2.0',
],
packages=find_packages(),
include_package_data=True,
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.11',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
license='BSD',
)
Add support for Python 3.7 and Django 2.2
|
#!/usr/bin/env python
from codecs import open
from setuptools import find_packages, setup
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
setup(
name='django-latest-tweets',
version='0.4.5',
description='Latest Tweets for Django',
long_description=readme,
url='https://github.com/developersociety/django-latest-tweets',
maintainer='The Developer Society',
maintainer_email='studio@dev.ngo',
platforms=['any'],
install_requires=[
'twitter>=1.9.1',
'requests>=2.0',
],
packages=find_packages(),
include_package_data=True,
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.11',
'Framework :: Django :: 2.2',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
license='BSD',
)
|
<commit_before>#!/usr/bin/env python
from codecs import open
from setuptools import find_packages, setup
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
setup(
name='django-latest-tweets',
version='0.4.5',
description='Latest Tweets for Django',
long_description=readme,
url='https://github.com/developersociety/django-latest-tweets',
maintainer='The Developer Society',
maintainer_email='studio@dev.ngo',
platforms=['any'],
install_requires=[
'twitter>=1.9.1',
'requests>=2.0',
],
packages=find_packages(),
include_package_data=True,
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.11',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
license='BSD',
)
<commit_msg>Add support for Python 3.7 and Django 2.2<commit_after>
|
#!/usr/bin/env python
from codecs import open
from setuptools import find_packages, setup
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
setup(
name='django-latest-tweets',
version='0.4.5',
description='Latest Tweets for Django',
long_description=readme,
url='https://github.com/developersociety/django-latest-tweets',
maintainer='The Developer Society',
maintainer_email='studio@dev.ngo',
platforms=['any'],
install_requires=[
'twitter>=1.9.1',
'requests>=2.0',
],
packages=find_packages(),
include_package_data=True,
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.11',
'Framework :: Django :: 2.2',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
license='BSD',
)
|
#!/usr/bin/env python
from codecs import open
from setuptools import find_packages, setup
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
setup(
name='django-latest-tweets',
version='0.4.5',
description='Latest Tweets for Django',
long_description=readme,
url='https://github.com/developersociety/django-latest-tweets',
maintainer='The Developer Society',
maintainer_email='studio@dev.ngo',
platforms=['any'],
install_requires=[
'twitter>=1.9.1',
'requests>=2.0',
],
packages=find_packages(),
include_package_data=True,
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.11',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
license='BSD',
)
Add support for Python 3.7 and Django 2.2#!/usr/bin/env python
from codecs import open
from setuptools import find_packages, setup
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
setup(
name='django-latest-tweets',
version='0.4.5',
description='Latest Tweets for Django',
long_description=readme,
url='https://github.com/developersociety/django-latest-tweets',
maintainer='The Developer Society',
maintainer_email='studio@dev.ngo',
platforms=['any'],
install_requires=[
'twitter>=1.9.1',
'requests>=2.0',
],
packages=find_packages(),
include_package_data=True,
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.11',
'Framework :: Django :: 2.2',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
license='BSD',
)
|
<commit_before>#!/usr/bin/env python
from codecs import open
from setuptools import find_packages, setup
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
setup(
name='django-latest-tweets',
version='0.4.5',
description='Latest Tweets for Django',
long_description=readme,
url='https://github.com/developersociety/django-latest-tweets',
maintainer='The Developer Society',
maintainer_email='studio@dev.ngo',
platforms=['any'],
install_requires=[
'twitter>=1.9.1',
'requests>=2.0',
],
packages=find_packages(),
include_package_data=True,
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.11',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
license='BSD',
)
<commit_msg>Add support for Python 3.7 and Django 2.2<commit_after>#!/usr/bin/env python
from codecs import open
from setuptools import find_packages, setup
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
setup(
name='django-latest-tweets',
version='0.4.5',
description='Latest Tweets for Django',
long_description=readme,
url='https://github.com/developersociety/django-latest-tweets',
maintainer='The Developer Society',
maintainer_email='studio@dev.ngo',
platforms=['any'],
install_requires=[
'twitter>=1.9.1',
'requests>=2.0',
],
packages=find_packages(),
include_package_data=True,
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.11',
'Framework :: Django :: 2.2',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
license='BSD',
)
|
e5dd1f4cbbc76842ac7a95e9fe878631460ee020
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
from pypandoc import convert
def convert_markdown_to_rst(file):
return convert(file, 'rst')
setup(name='gitlabform',
version='1.0.1',
description='Easy configuration as code tool for GitLab using config in plain YAML',
long_description=convert_markdown_to_rst('README.md'),
url='https://github.com/egnyte/gitlabform',
author='Egnyte',
keywords=['gitlab', 'configuration-as-code'],
classifiers=[
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Development Status :: 4 - Beta",
"Intended Audience :: Information Technology",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Software Development :: Version Control :: Git",
],
packages=find_packages(),
install_requires=[
'requests==2.18.4',
'pyyaml==3.13',
],
tests_requires=[
'pytest',
],
setup_requires=[
'pypandoc',
],
scripts=[
'bin/gitlabform',
],
)
|
from setuptools import setup, find_packages
from pypandoc import convert
def convert_markdown_to_rst(file):
return convert(file, 'rst')
setup(name='gitlabform',
version='1.0.2',
description='Easy configuration as code tool for GitLab using config in plain YAML',
long_description=convert_markdown_to_rst('README.md'),
url='https://github.com/egnyte/gitlabform',
author='Egnyte',
keywords=['gitlab', 'configuration-as-code'],
classifiers=[
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Development Status :: 4 - Beta",
"Intended Audience :: Information Technology",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Software Development :: Version Control :: Git",
],
packages=find_packages(),
install_requires=[
'requests>=2.20.0',
'pyyaml==3.13',
],
tests_requires=[
'pytest',
],
setup_requires=[
'pypandoc',
],
scripts=[
'bin/gitlabform',
],
)
|
Update requests to fix security issue
|
Update requests to fix security issue
|
Python
|
mit
|
egnyte/gitlabform,egnyte/gitlabform
|
from setuptools import setup, find_packages
from pypandoc import convert
def convert_markdown_to_rst(file):
return convert(file, 'rst')
setup(name='gitlabform',
version='1.0.1',
description='Easy configuration as code tool for GitLab using config in plain YAML',
long_description=convert_markdown_to_rst('README.md'),
url='https://github.com/egnyte/gitlabform',
author='Egnyte',
keywords=['gitlab', 'configuration-as-code'],
classifiers=[
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Development Status :: 4 - Beta",
"Intended Audience :: Information Technology",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Software Development :: Version Control :: Git",
],
packages=find_packages(),
install_requires=[
'requests==2.18.4',
'pyyaml==3.13',
],
tests_requires=[
'pytest',
],
setup_requires=[
'pypandoc',
],
scripts=[
'bin/gitlabform',
],
)
Update requests to fix security issue
|
from setuptools import setup, find_packages
from pypandoc import convert
def convert_markdown_to_rst(file):
return convert(file, 'rst')
setup(name='gitlabform',
version='1.0.2',
description='Easy configuration as code tool for GitLab using config in plain YAML',
long_description=convert_markdown_to_rst('README.md'),
url='https://github.com/egnyte/gitlabform',
author='Egnyte',
keywords=['gitlab', 'configuration-as-code'],
classifiers=[
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Development Status :: 4 - Beta",
"Intended Audience :: Information Technology",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Software Development :: Version Control :: Git",
],
packages=find_packages(),
install_requires=[
'requests>=2.20.0',
'pyyaml==3.13',
],
tests_requires=[
'pytest',
],
setup_requires=[
'pypandoc',
],
scripts=[
'bin/gitlabform',
],
)
|
<commit_before>from setuptools import setup, find_packages
from pypandoc import convert
def convert_markdown_to_rst(file):
return convert(file, 'rst')
setup(name='gitlabform',
version='1.0.1',
description='Easy configuration as code tool for GitLab using config in plain YAML',
long_description=convert_markdown_to_rst('README.md'),
url='https://github.com/egnyte/gitlabform',
author='Egnyte',
keywords=['gitlab', 'configuration-as-code'],
classifiers=[
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Development Status :: 4 - Beta",
"Intended Audience :: Information Technology",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Software Development :: Version Control :: Git",
],
packages=find_packages(),
install_requires=[
'requests==2.18.4',
'pyyaml==3.13',
],
tests_requires=[
'pytest',
],
setup_requires=[
'pypandoc',
],
scripts=[
'bin/gitlabform',
],
)
<commit_msg>Update requests to fix security issue<commit_after>
|
from setuptools import setup, find_packages
from pypandoc import convert
def convert_markdown_to_rst(file):
return convert(file, 'rst')
setup(name='gitlabform',
version='1.0.2',
description='Easy configuration as code tool for GitLab using config in plain YAML',
long_description=convert_markdown_to_rst('README.md'),
url='https://github.com/egnyte/gitlabform',
author='Egnyte',
keywords=['gitlab', 'configuration-as-code'],
classifiers=[
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Development Status :: 4 - Beta",
"Intended Audience :: Information Technology",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Software Development :: Version Control :: Git",
],
packages=find_packages(),
install_requires=[
'requests>=2.20.0',
'pyyaml==3.13',
],
tests_requires=[
'pytest',
],
setup_requires=[
'pypandoc',
],
scripts=[
'bin/gitlabform',
],
)
|
from setuptools import setup, find_packages
from pypandoc import convert
def convert_markdown_to_rst(file):
return convert(file, 'rst')
setup(name='gitlabform',
version='1.0.1',
description='Easy configuration as code tool for GitLab using config in plain YAML',
long_description=convert_markdown_to_rst('README.md'),
url='https://github.com/egnyte/gitlabform',
author='Egnyte',
keywords=['gitlab', 'configuration-as-code'],
classifiers=[
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Development Status :: 4 - Beta",
"Intended Audience :: Information Technology",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Software Development :: Version Control :: Git",
],
packages=find_packages(),
install_requires=[
'requests==2.18.4',
'pyyaml==3.13',
],
tests_requires=[
'pytest',
],
setup_requires=[
'pypandoc',
],
scripts=[
'bin/gitlabform',
],
)
Update requests to fix security issuefrom setuptools import setup, find_packages
from pypandoc import convert
def convert_markdown_to_rst(file):
return convert(file, 'rst')
setup(name='gitlabform',
version='1.0.2',
description='Easy configuration as code tool for GitLab using config in plain YAML',
long_description=convert_markdown_to_rst('README.md'),
url='https://github.com/egnyte/gitlabform',
author='Egnyte',
keywords=['gitlab', 'configuration-as-code'],
classifiers=[
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Development Status :: 4 - Beta",
"Intended Audience :: Information Technology",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Software Development :: Version Control :: Git",
],
packages=find_packages(),
install_requires=[
'requests>=2.20.0',
'pyyaml==3.13',
],
tests_requires=[
'pytest',
],
setup_requires=[
'pypandoc',
],
scripts=[
'bin/gitlabform',
],
)
|
<commit_before>from setuptools import setup, find_packages
from pypandoc import convert
def convert_markdown_to_rst(file):
return convert(file, 'rst')
setup(name='gitlabform',
version='1.0.1',
description='Easy configuration as code tool for GitLab using config in plain YAML',
long_description=convert_markdown_to_rst('README.md'),
url='https://github.com/egnyte/gitlabform',
author='Egnyte',
keywords=['gitlab', 'configuration-as-code'],
classifiers=[
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Development Status :: 4 - Beta",
"Intended Audience :: Information Technology",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Software Development :: Version Control :: Git",
],
packages=find_packages(),
install_requires=[
'requests==2.18.4',
'pyyaml==3.13',
],
tests_requires=[
'pytest',
],
setup_requires=[
'pypandoc',
],
scripts=[
'bin/gitlabform',
],
)
<commit_msg>Update requests to fix security issue<commit_after>from setuptools import setup, find_packages
from pypandoc import convert
def convert_markdown_to_rst(file):
return convert(file, 'rst')
setup(name='gitlabform',
version='1.0.2',
description='Easy configuration as code tool for GitLab using config in plain YAML',
long_description=convert_markdown_to_rst('README.md'),
url='https://github.com/egnyte/gitlabform',
author='Egnyte',
keywords=['gitlab', 'configuration-as-code'],
classifiers=[
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Development Status :: 4 - Beta",
"Intended Audience :: Information Technology",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Software Development :: Version Control :: Git",
],
packages=find_packages(),
install_requires=[
'requests>=2.20.0',
'pyyaml==3.13',
],
tests_requires=[
'pytest',
],
setup_requires=[
'pypandoc',
],
scripts=[
'bin/gitlabform',
],
)
|
b36f2518666572ccd3b98dd88536533e17a39e3f
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from distutils.core import setup
setup(name='SpaceScout-Server',
version='1.0',
description='REST Backend for SpaceScout',
install_requires=[
'Django>=1.7,<1.8',
'mock<=1.0.1',
'oauth2<=1.5.211',
'oauth_provider',
'Pillow',
'pyproj',
'pytz',
'South',
'simplejson>=2.1',
'django-oauth-plus<=2.2.5',
'phonenumbers'
],
)
|
#!/usr/bin/env python
from distutils.core import setup
setup(name='SpaceScout-Server',
version='1.0',
description='REST Backend for SpaceScout',
install_requires=[
'Django>=1.7,<1.8',
'mock<=1.0.1',
'oauth2<=1.5.211',
'Pillow',
'pyproj',
'pytz',
'South',
'simplejson>=2.1',
'django-oauth-plus<=2.2.5',
'phonenumbers'
],
)
|
Remove oauth_provider as that's the eggname for django-oauth-plus.
|
Remove oauth_provider as that's the eggname for django-oauth-plus.
|
Python
|
apache-2.0
|
uw-it-aca/spotseeker_server,uw-it-aca/spotseeker_server,uw-it-aca/spotseeker_server
|
#!/usr/bin/env python
from distutils.core import setup
setup(name='SpaceScout-Server',
version='1.0',
description='REST Backend for SpaceScout',
install_requires=[
'Django>=1.7,<1.8',
'mock<=1.0.1',
'oauth2<=1.5.211',
'oauth_provider',
'Pillow',
'pyproj',
'pytz',
'South',
'simplejson>=2.1',
'django-oauth-plus<=2.2.5',
'phonenumbers'
],
)
Remove oauth_provider as that's the eggname for django-oauth-plus.
|
#!/usr/bin/env python
from distutils.core import setup
setup(name='SpaceScout-Server',
version='1.0',
description='REST Backend for SpaceScout',
install_requires=[
'Django>=1.7,<1.8',
'mock<=1.0.1',
'oauth2<=1.5.211',
'Pillow',
'pyproj',
'pytz',
'South',
'simplejson>=2.1',
'django-oauth-plus<=2.2.5',
'phonenumbers'
],
)
|
<commit_before>#!/usr/bin/env python
from distutils.core import setup
setup(name='SpaceScout-Server',
version='1.0',
description='REST Backend for SpaceScout',
install_requires=[
'Django>=1.7,<1.8',
'mock<=1.0.1',
'oauth2<=1.5.211',
'oauth_provider',
'Pillow',
'pyproj',
'pytz',
'South',
'simplejson>=2.1',
'django-oauth-plus<=2.2.5',
'phonenumbers'
],
)
<commit_msg>Remove oauth_provider as that's the eggname for django-oauth-plus.<commit_after>
|
#!/usr/bin/env python
from distutils.core import setup
setup(name='SpaceScout-Server',
version='1.0',
description='REST Backend for SpaceScout',
install_requires=[
'Django>=1.7,<1.8',
'mock<=1.0.1',
'oauth2<=1.5.211',
'Pillow',
'pyproj',
'pytz',
'South',
'simplejson>=2.1',
'django-oauth-plus<=2.2.5',
'phonenumbers'
],
)
|
#!/usr/bin/env python
from distutils.core import setup
setup(name='SpaceScout-Server',
version='1.0',
description='REST Backend for SpaceScout',
install_requires=[
'Django>=1.7,<1.8',
'mock<=1.0.1',
'oauth2<=1.5.211',
'oauth_provider',
'Pillow',
'pyproj',
'pytz',
'South',
'simplejson>=2.1',
'django-oauth-plus<=2.2.5',
'phonenumbers'
],
)
Remove oauth_provider as that's the eggname for django-oauth-plus.#!/usr/bin/env python
from distutils.core import setup
setup(name='SpaceScout-Server',
version='1.0',
description='REST Backend for SpaceScout',
install_requires=[
'Django>=1.7,<1.8',
'mock<=1.0.1',
'oauth2<=1.5.211',
'Pillow',
'pyproj',
'pytz',
'South',
'simplejson>=2.1',
'django-oauth-plus<=2.2.5',
'phonenumbers'
],
)
|
<commit_before>#!/usr/bin/env python
from distutils.core import setup
setup(name='SpaceScout-Server',
version='1.0',
description='REST Backend for SpaceScout',
install_requires=[
'Django>=1.7,<1.8',
'mock<=1.0.1',
'oauth2<=1.5.211',
'oauth_provider',
'Pillow',
'pyproj',
'pytz',
'South',
'simplejson>=2.1',
'django-oauth-plus<=2.2.5',
'phonenumbers'
],
)
<commit_msg>Remove oauth_provider as that's the eggname for django-oauth-plus.<commit_after>#!/usr/bin/env python
from distutils.core import setup
setup(name='SpaceScout-Server',
version='1.0',
description='REST Backend for SpaceScout',
install_requires=[
'Django>=1.7,<1.8',
'mock<=1.0.1',
'oauth2<=1.5.211',
'Pillow',
'pyproj',
'pytz',
'South',
'simplejson>=2.1',
'django-oauth-plus<=2.2.5',
'phonenumbers'
],
)
|
993b8b23d70b133528a52bc91292735bb5668abe
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name='zeit.care',
version='0.2dev',
author='Christian Zagrodnick, Ron Drongowski, Dominik Hoppe',
author_email='cz@gocept.com',
url='http://trac.gocept.com/zeit',
description="""\
""",
packages=find_packages('src'),
package_dir = {'': 'src'},
include_package_data = True,
zip_safe=False,
license='gocept proprietary',
namespace_packages = ['zeit'],
install_requires=[
'zeit.connector',
'setuptools',
'pytz',
],
entry_points = """
[console_scripts]
isofication = zeit.care.worker:isofy_main
divisor = zeit.care.divisor:main
boxinjector = zeit.care.boxinjector:main
ressortindexwriter = zeit.care.ressortindex:main
commentthreadwriter = zeit.care.commentthread:main
"""
)
|
from setuptools import setup, find_packages
setup(
name='zeit.care',
version='0.2dev',
author='Christian Zagrodnick, Ron Drongowski, Dominik Hoppe',
author_email='cz@gocept.com',
url='http://trac.gocept.com/zeit',
description="""\
""",
packages=find_packages('src'),
package_dir = {'': 'src'},
include_package_data = True,
zip_safe=False,
license='gocept proprietary',
namespace_packages = ['zeit'],
install_requires=[
'zeit.connector',
'setuptools',
'pytz',
],
entry_points = """
[console_scripts]
isofication = zeit.care.worker:isofy_main
divisor = zeit.care.divisor:main
boxinjector = zeit.care.boxinjector:main
ressortindexwriter = zeit.care.ressortindex:main
commentthreadworker = zeit.care.commentthread:main
"""
)
|
Rename commentthreadwriter to commentthreadworker since it does not write anything...
|
Rename commentthreadwriter to commentthreadworker since it does not write
anything...
|
Python
|
bsd-3-clause
|
ZeitOnline/zeit.care
|
from setuptools import setup, find_packages
setup(
name='zeit.care',
version='0.2dev',
author='Christian Zagrodnick, Ron Drongowski, Dominik Hoppe',
author_email='cz@gocept.com',
url='http://trac.gocept.com/zeit',
description="""\
""",
packages=find_packages('src'),
package_dir = {'': 'src'},
include_package_data = True,
zip_safe=False,
license='gocept proprietary',
namespace_packages = ['zeit'],
install_requires=[
'zeit.connector',
'setuptools',
'pytz',
],
entry_points = """
[console_scripts]
isofication = zeit.care.worker:isofy_main
divisor = zeit.care.divisor:main
boxinjector = zeit.care.boxinjector:main
ressortindexwriter = zeit.care.ressortindex:main
commentthreadwriter = zeit.care.commentthread:main
"""
)
Rename commentthreadwriter to commentthreadworker since it does not write
anything...
|
from setuptools import setup, find_packages
setup(
name='zeit.care',
version='0.2dev',
author='Christian Zagrodnick, Ron Drongowski, Dominik Hoppe',
author_email='cz@gocept.com',
url='http://trac.gocept.com/zeit',
description="""\
""",
packages=find_packages('src'),
package_dir = {'': 'src'},
include_package_data = True,
zip_safe=False,
license='gocept proprietary',
namespace_packages = ['zeit'],
install_requires=[
'zeit.connector',
'setuptools',
'pytz',
],
entry_points = """
[console_scripts]
isofication = zeit.care.worker:isofy_main
divisor = zeit.care.divisor:main
boxinjector = zeit.care.boxinjector:main
ressortindexwriter = zeit.care.ressortindex:main
commentthreadworker = zeit.care.commentthread:main
"""
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='zeit.care',
version='0.2dev',
author='Christian Zagrodnick, Ron Drongowski, Dominik Hoppe',
author_email='cz@gocept.com',
url='http://trac.gocept.com/zeit',
description="""\
""",
packages=find_packages('src'),
package_dir = {'': 'src'},
include_package_data = True,
zip_safe=False,
license='gocept proprietary',
namespace_packages = ['zeit'],
install_requires=[
'zeit.connector',
'setuptools',
'pytz',
],
entry_points = """
[console_scripts]
isofication = zeit.care.worker:isofy_main
divisor = zeit.care.divisor:main
boxinjector = zeit.care.boxinjector:main
ressortindexwriter = zeit.care.ressortindex:main
commentthreadwriter = zeit.care.commentthread:main
"""
)
<commit_msg>Rename commentthreadwriter to commentthreadworker since it does not write
anything...<commit_after>
|
from setuptools import setup, find_packages
setup(
name='zeit.care',
version='0.2dev',
author='Christian Zagrodnick, Ron Drongowski, Dominik Hoppe',
author_email='cz@gocept.com',
url='http://trac.gocept.com/zeit',
description="""\
""",
packages=find_packages('src'),
package_dir = {'': 'src'},
include_package_data = True,
zip_safe=False,
license='gocept proprietary',
namespace_packages = ['zeit'],
install_requires=[
'zeit.connector',
'setuptools',
'pytz',
],
entry_points = """
[console_scripts]
isofication = zeit.care.worker:isofy_main
divisor = zeit.care.divisor:main
boxinjector = zeit.care.boxinjector:main
ressortindexwriter = zeit.care.ressortindex:main
commentthreadworker = zeit.care.commentthread:main
"""
)
|
from setuptools import setup, find_packages
setup(
name='zeit.care',
version='0.2dev',
author='Christian Zagrodnick, Ron Drongowski, Dominik Hoppe',
author_email='cz@gocept.com',
url='http://trac.gocept.com/zeit',
description="""\
""",
packages=find_packages('src'),
package_dir = {'': 'src'},
include_package_data = True,
zip_safe=False,
license='gocept proprietary',
namespace_packages = ['zeit'],
install_requires=[
'zeit.connector',
'setuptools',
'pytz',
],
entry_points = """
[console_scripts]
isofication = zeit.care.worker:isofy_main
divisor = zeit.care.divisor:main
boxinjector = zeit.care.boxinjector:main
ressortindexwriter = zeit.care.ressortindex:main
commentthreadwriter = zeit.care.commentthread:main
"""
)
Rename commentthreadwriter to commentthreadworker since it does not write
anything...from setuptools import setup, find_packages
setup(
name='zeit.care',
version='0.2dev',
author='Christian Zagrodnick, Ron Drongowski, Dominik Hoppe',
author_email='cz@gocept.com',
url='http://trac.gocept.com/zeit',
description="""\
""",
packages=find_packages('src'),
package_dir = {'': 'src'},
include_package_data = True,
zip_safe=False,
license='gocept proprietary',
namespace_packages = ['zeit'],
install_requires=[
'zeit.connector',
'setuptools',
'pytz',
],
entry_points = """
[console_scripts]
isofication = zeit.care.worker:isofy_main
divisor = zeit.care.divisor:main
boxinjector = zeit.care.boxinjector:main
ressortindexwriter = zeit.care.ressortindex:main
commentthreadworker = zeit.care.commentthread:main
"""
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='zeit.care',
version='0.2dev',
author='Christian Zagrodnick, Ron Drongowski, Dominik Hoppe',
author_email='cz@gocept.com',
url='http://trac.gocept.com/zeit',
description="""\
""",
packages=find_packages('src'),
package_dir = {'': 'src'},
include_package_data = True,
zip_safe=False,
license='gocept proprietary',
namespace_packages = ['zeit'],
install_requires=[
'zeit.connector',
'setuptools',
'pytz',
],
entry_points = """
[console_scripts]
isofication = zeit.care.worker:isofy_main
divisor = zeit.care.divisor:main
boxinjector = zeit.care.boxinjector:main
ressortindexwriter = zeit.care.ressortindex:main
commentthreadwriter = zeit.care.commentthread:main
"""
)
<commit_msg>Rename commentthreadwriter to commentthreadworker since it does not write
anything...<commit_after>from setuptools import setup, find_packages
setup(
name='zeit.care',
version='0.2dev',
author='Christian Zagrodnick, Ron Drongowski, Dominik Hoppe',
author_email='cz@gocept.com',
url='http://trac.gocept.com/zeit',
description="""\
""",
packages=find_packages('src'),
package_dir = {'': 'src'},
include_package_data = True,
zip_safe=False,
license='gocept proprietary',
namespace_packages = ['zeit'],
install_requires=[
'zeit.connector',
'setuptools',
'pytz',
],
entry_points = """
[console_scripts]
isofication = zeit.care.worker:isofy_main
divisor = zeit.care.divisor:main
boxinjector = zeit.care.boxinjector:main
ressortindexwriter = zeit.care.ressortindex:main
commentthreadworker = zeit.care.commentthread:main
"""
)
|
e30a87841eb00d496d1980b025e2b2458eeaa101
|
setup.py
|
setup.py
|
from setuptools import setup, Extension
import numpy
setup (name = 'xfel', \
version = '1.0', \
packages = ['xfel',
'xfel.core',
'xfel.orientaton',
'xfel.sampling',
'xfel.test',
],
package_dir = {'xfel': './xfel'},
requires = ['numpy', 'scipy']
)
|
from setuptools import setup, Extension
import numpy
setup (name = 'bxfel', \
version = '1.0', \
packages = ['bxfel',
'bxfel.core',
'bxfel.orientation',
'bxfel.sampling',
'bxfel.test',
],
package_dir = {'bxfel': './bxfel'},
package_data={'bxfel':['orientation/resources/gauss/*.dat',
'orientation/resources/chebyshev/*.dat']},
include_package_data=True,
requires = ['numpy', 'scipy'],
zip_safe=False
)
|
Include data files in pacakge
|
Include data files in pacakge
|
Python
|
mit
|
mmechelke/bayesian_xfel,mmechelke/bayesian_xfel
|
from setuptools import setup, Extension
import numpy
setup (name = 'xfel', \
version = '1.0', \
packages = ['xfel',
'xfel.core',
'xfel.orientaton',
'xfel.sampling',
'xfel.test',
],
package_dir = {'xfel': './xfel'},
requires = ['numpy', 'scipy']
)
Include data files in pacakge
|
from setuptools import setup, Extension
import numpy
setup (name = 'bxfel', \
version = '1.0', \
packages = ['bxfel',
'bxfel.core',
'bxfel.orientation',
'bxfel.sampling',
'bxfel.test',
],
package_dir = {'bxfel': './bxfel'},
package_data={'bxfel':['orientation/resources/gauss/*.dat',
'orientation/resources/chebyshev/*.dat']},
include_package_data=True,
requires = ['numpy', 'scipy'],
zip_safe=False
)
|
<commit_before>from setuptools import setup, Extension
import numpy
setup (name = 'xfel', \
version = '1.0', \
packages = ['xfel',
'xfel.core',
'xfel.orientaton',
'xfel.sampling',
'xfel.test',
],
package_dir = {'xfel': './xfel'},
requires = ['numpy', 'scipy']
)
<commit_msg>Include data files in pacakge<commit_after>
|
from setuptools import setup, Extension
import numpy
setup (name = 'bxfel', \
version = '1.0', \
packages = ['bxfel',
'bxfel.core',
'bxfel.orientation',
'bxfel.sampling',
'bxfel.test',
],
package_dir = {'bxfel': './bxfel'},
package_data={'bxfel':['orientation/resources/gauss/*.dat',
'orientation/resources/chebyshev/*.dat']},
include_package_data=True,
requires = ['numpy', 'scipy'],
zip_safe=False
)
|
from setuptools import setup, Extension
import numpy
setup (name = 'xfel', \
version = '1.0', \
packages = ['xfel',
'xfel.core',
'xfel.orientaton',
'xfel.sampling',
'xfel.test',
],
package_dir = {'xfel': './xfel'},
requires = ['numpy', 'scipy']
)
Include data files in pacakgefrom setuptools import setup, Extension
import numpy
setup (name = 'bxfel', \
version = '1.0', \
packages = ['bxfel',
'bxfel.core',
'bxfel.orientation',
'bxfel.sampling',
'bxfel.test',
],
package_dir = {'bxfel': './bxfel'},
package_data={'bxfel':['orientation/resources/gauss/*.dat',
'orientation/resources/chebyshev/*.dat']},
include_package_data=True,
requires = ['numpy', 'scipy'],
zip_safe=False
)
|
<commit_before>from setuptools import setup, Extension
import numpy
setup (name = 'xfel', \
version = '1.0', \
packages = ['xfel',
'xfel.core',
'xfel.orientaton',
'xfel.sampling',
'xfel.test',
],
package_dir = {'xfel': './xfel'},
requires = ['numpy', 'scipy']
)
<commit_msg>Include data files in pacakge<commit_after>from setuptools import setup, Extension
import numpy
setup (name = 'bxfel', \
version = '1.0', \
packages = ['bxfel',
'bxfel.core',
'bxfel.orientation',
'bxfel.sampling',
'bxfel.test',
],
package_dir = {'bxfel': './bxfel'},
package_data={'bxfel':['orientation/resources/gauss/*.dat',
'orientation/resources/chebyshev/*.dat']},
include_package_data=True,
requires = ['numpy', 'scipy'],
zip_safe=False
)
|
11a1ce43d2246574c5b4090252cb6464385194f4
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name='jmbo-skeleton',
version='0.6',
description='Create a Jmbo project environment quickly. Includes a Jmbo demo application.',
long_description = open('README.rst', 'r').read() + open('AUTHORS.rst', 'r').read() + open('CHANGELOG.rst', 'r').read(),
author='Praekelt Foundation',
author_email='dev@praekelt.com',
license='BSD',
url='',
packages = find_packages(),
install_requires = [
'jmbo-foundry>=1.1.1',
'raven',
],
include_package_data=True,
tests_require=[
'django-setuptest>=0.1.2',
],
test_suite="setuptest.setuptest.SetupTestSuite",
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: BSD License",
"Development Status :: 4 - Beta",
"Operating System :: OS Independent",
"Framework :: Django",
"Intended Audience :: Developers",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
],
zip_safe=False,
)
|
from setuptools import setup, find_packages
setup(
name='jmbo-skeleton',
version='0.6',
description='Create a Jmbo project environment quickly. Includes a Jmbo demo application.',
long_description = open('README.rst', 'r').read() + open('AUTHORS.rst', 'r').read() + open('CHANGELOG.rst', 'r').read(),
author='Praekelt Foundation',
author_email='dev@praekelt.com',
license='BSD',
url='',
packages = find_packages(),
install_requires = [
'jmbo-foundry>=1.1.1',
'raven<3.0.0',
],
include_package_data=True,
tests_require=[
'django-setuptest>=0.1.2',
],
test_suite="setuptest.setuptest.SetupTestSuite",
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: BSD License",
"Development Status :: 4 - Beta",
"Operating System :: OS Independent",
"Framework :: Django",
"Intended Audience :: Developers",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
],
zip_safe=False,
)
|
Use an older raven until our Sentry is upgraded to 5.1
|
Use an older raven until our Sentry is upgraded to 5.1
|
Python
|
bsd-3-clause
|
praekelt/jmbo-skeleton,praekelt/jmbo-skeleton,praekelt/jmbo-skeleton
|
from setuptools import setup, find_packages
setup(
name='jmbo-skeleton',
version='0.6',
description='Create a Jmbo project environment quickly. Includes a Jmbo demo application.',
long_description = open('README.rst', 'r').read() + open('AUTHORS.rst', 'r').read() + open('CHANGELOG.rst', 'r').read(),
author='Praekelt Foundation',
author_email='dev@praekelt.com',
license='BSD',
url='',
packages = find_packages(),
install_requires = [
'jmbo-foundry>=1.1.1',
'raven',
],
include_package_data=True,
tests_require=[
'django-setuptest>=0.1.2',
],
test_suite="setuptest.setuptest.SetupTestSuite",
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: BSD License",
"Development Status :: 4 - Beta",
"Operating System :: OS Independent",
"Framework :: Django",
"Intended Audience :: Developers",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
],
zip_safe=False,
)
Use an older raven until our Sentry is upgraded to 5.1
|
from setuptools import setup, find_packages
setup(
name='jmbo-skeleton',
version='0.6',
description='Create a Jmbo project environment quickly. Includes a Jmbo demo application.',
long_description = open('README.rst', 'r').read() + open('AUTHORS.rst', 'r').read() + open('CHANGELOG.rst', 'r').read(),
author='Praekelt Foundation',
author_email='dev@praekelt.com',
license='BSD',
url='',
packages = find_packages(),
install_requires = [
'jmbo-foundry>=1.1.1',
'raven<3.0.0',
],
include_package_data=True,
tests_require=[
'django-setuptest>=0.1.2',
],
test_suite="setuptest.setuptest.SetupTestSuite",
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: BSD License",
"Development Status :: 4 - Beta",
"Operating System :: OS Independent",
"Framework :: Django",
"Intended Audience :: Developers",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
],
zip_safe=False,
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='jmbo-skeleton',
version='0.6',
description='Create a Jmbo project environment quickly. Includes a Jmbo demo application.',
long_description = open('README.rst', 'r').read() + open('AUTHORS.rst', 'r').read() + open('CHANGELOG.rst', 'r').read(),
author='Praekelt Foundation',
author_email='dev@praekelt.com',
license='BSD',
url='',
packages = find_packages(),
install_requires = [
'jmbo-foundry>=1.1.1',
'raven',
],
include_package_data=True,
tests_require=[
'django-setuptest>=0.1.2',
],
test_suite="setuptest.setuptest.SetupTestSuite",
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: BSD License",
"Development Status :: 4 - Beta",
"Operating System :: OS Independent",
"Framework :: Django",
"Intended Audience :: Developers",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
],
zip_safe=False,
)
<commit_msg>Use an older raven until our Sentry is upgraded to 5.1<commit_after>
|
from setuptools import setup, find_packages
setup(
name='jmbo-skeleton',
version='0.6',
description='Create a Jmbo project environment quickly. Includes a Jmbo demo application.',
long_description = open('README.rst', 'r').read() + open('AUTHORS.rst', 'r').read() + open('CHANGELOG.rst', 'r').read(),
author='Praekelt Foundation',
author_email='dev@praekelt.com',
license='BSD',
url='',
packages = find_packages(),
install_requires = [
'jmbo-foundry>=1.1.1',
'raven<3.0.0',
],
include_package_data=True,
tests_require=[
'django-setuptest>=0.1.2',
],
test_suite="setuptest.setuptest.SetupTestSuite",
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: BSD License",
"Development Status :: 4 - Beta",
"Operating System :: OS Independent",
"Framework :: Django",
"Intended Audience :: Developers",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
],
zip_safe=False,
)
|
from setuptools import setup, find_packages
setup(
name='jmbo-skeleton',
version='0.6',
description='Create a Jmbo project environment quickly. Includes a Jmbo demo application.',
long_description = open('README.rst', 'r').read() + open('AUTHORS.rst', 'r').read() + open('CHANGELOG.rst', 'r').read(),
author='Praekelt Foundation',
author_email='dev@praekelt.com',
license='BSD',
url='',
packages = find_packages(),
install_requires = [
'jmbo-foundry>=1.1.1',
'raven',
],
include_package_data=True,
tests_require=[
'django-setuptest>=0.1.2',
],
test_suite="setuptest.setuptest.SetupTestSuite",
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: BSD License",
"Development Status :: 4 - Beta",
"Operating System :: OS Independent",
"Framework :: Django",
"Intended Audience :: Developers",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
],
zip_safe=False,
)
Use an older raven until our Sentry is upgraded to 5.1from setuptools import setup, find_packages
setup(
name='jmbo-skeleton',
version='0.6',
description='Create a Jmbo project environment quickly. Includes a Jmbo demo application.',
long_description = open('README.rst', 'r').read() + open('AUTHORS.rst', 'r').read() + open('CHANGELOG.rst', 'r').read(),
author='Praekelt Foundation',
author_email='dev@praekelt.com',
license='BSD',
url='',
packages = find_packages(),
install_requires = [
'jmbo-foundry>=1.1.1',
'raven<3.0.0',
],
include_package_data=True,
tests_require=[
'django-setuptest>=0.1.2',
],
test_suite="setuptest.setuptest.SetupTestSuite",
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: BSD License",
"Development Status :: 4 - Beta",
"Operating System :: OS Independent",
"Framework :: Django",
"Intended Audience :: Developers",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
],
zip_safe=False,
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='jmbo-skeleton',
version='0.6',
description='Create a Jmbo project environment quickly. Includes a Jmbo demo application.',
long_description = open('README.rst', 'r').read() + open('AUTHORS.rst', 'r').read() + open('CHANGELOG.rst', 'r').read(),
author='Praekelt Foundation',
author_email='dev@praekelt.com',
license='BSD',
url='',
packages = find_packages(),
install_requires = [
'jmbo-foundry>=1.1.1',
'raven',
],
include_package_data=True,
tests_require=[
'django-setuptest>=0.1.2',
],
test_suite="setuptest.setuptest.SetupTestSuite",
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: BSD License",
"Development Status :: 4 - Beta",
"Operating System :: OS Independent",
"Framework :: Django",
"Intended Audience :: Developers",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
],
zip_safe=False,
)
<commit_msg>Use an older raven until our Sentry is upgraded to 5.1<commit_after>from setuptools import setup, find_packages
setup(
name='jmbo-skeleton',
version='0.6',
description='Create a Jmbo project environment quickly. Includes a Jmbo demo application.',
long_description = open('README.rst', 'r').read() + open('AUTHORS.rst', 'r').read() + open('CHANGELOG.rst', 'r').read(),
author='Praekelt Foundation',
author_email='dev@praekelt.com',
license='BSD',
url='',
packages = find_packages(),
install_requires = [
'jmbo-foundry>=1.1.1',
'raven<3.0.0',
],
include_package_data=True,
tests_require=[
'django-setuptest>=0.1.2',
],
test_suite="setuptest.setuptest.SetupTestSuite",
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: BSD License",
"Development Status :: 4 - Beta",
"Operating System :: OS Independent",
"Framework :: Django",
"Intended Audience :: Developers",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
],
zip_safe=False,
)
|
38ccfb87ba7f6f139dbb99ba5002a8abec8c40be
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name = "wsgi-sslify",
description = "WSGI middleware to force HTTPS.",
version = "1.0.1",
author = "Jacob Kaplan-Moss",
author_email = "jacob@jacobian.org",
url = "https://github.com/jacobian/wsgi-sslify",
py_modules = ['wsgi_sslify'],
install_requires = ['werkzeug>=0.10.1'],
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware'
]
)
|
from setuptools import setup
setup(
name = "wsgi-sslify",
description = "WSGI middleware to force HTTPS.",
version = "1.0.1",
author = "Jacob Kaplan-Moss",
author_email = "jacob@jacobian.org",
url = "https://github.com/jacobian/wsgi-sslify",
py_modules = ['wsgi_sslify'],
install_requires = ['werkzeug>=0.10.1'],
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware'
]
)
|
Add PyPI classifiers for Python 3.4, 3.5 and 3.6
|
Add PyPI classifiers for Python 3.4, 3.5 and 3.6
Since the tests pass on Python 3.
|
Python
|
bsd-3-clause
|
jacobian/wsgi-sslify
|
from setuptools import setup
setup(
name = "wsgi-sslify",
description = "WSGI middleware to force HTTPS.",
version = "1.0.1",
author = "Jacob Kaplan-Moss",
author_email = "jacob@jacobian.org",
url = "https://github.com/jacobian/wsgi-sslify",
py_modules = ['wsgi_sslify'],
install_requires = ['werkzeug>=0.10.1'],
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware'
]
)
Add PyPI classifiers for Python 3.4, 3.5 and 3.6
Since the tests pass on Python 3.
|
from setuptools import setup
setup(
name = "wsgi-sslify",
description = "WSGI middleware to force HTTPS.",
version = "1.0.1",
author = "Jacob Kaplan-Moss",
author_email = "jacob@jacobian.org",
url = "https://github.com/jacobian/wsgi-sslify",
py_modules = ['wsgi_sslify'],
install_requires = ['werkzeug>=0.10.1'],
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware'
]
)
|
<commit_before>from setuptools import setup
setup(
name = "wsgi-sslify",
description = "WSGI middleware to force HTTPS.",
version = "1.0.1",
author = "Jacob Kaplan-Moss",
author_email = "jacob@jacobian.org",
url = "https://github.com/jacobian/wsgi-sslify",
py_modules = ['wsgi_sslify'],
install_requires = ['werkzeug>=0.10.1'],
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware'
]
)
<commit_msg>Add PyPI classifiers for Python 3.4, 3.5 and 3.6
Since the tests pass on Python 3.<commit_after>
|
from setuptools import setup
setup(
name = "wsgi-sslify",
description = "WSGI middleware to force HTTPS.",
version = "1.0.1",
author = "Jacob Kaplan-Moss",
author_email = "jacob@jacobian.org",
url = "https://github.com/jacobian/wsgi-sslify",
py_modules = ['wsgi_sslify'],
install_requires = ['werkzeug>=0.10.1'],
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware'
]
)
|
from setuptools import setup
setup(
name = "wsgi-sslify",
description = "WSGI middleware to force HTTPS.",
version = "1.0.1",
author = "Jacob Kaplan-Moss",
author_email = "jacob@jacobian.org",
url = "https://github.com/jacobian/wsgi-sslify",
py_modules = ['wsgi_sslify'],
install_requires = ['werkzeug>=0.10.1'],
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware'
]
)
Add PyPI classifiers for Python 3.4, 3.5 and 3.6
Since the tests pass on Python 3.from setuptools import setup
setup(
name = "wsgi-sslify",
description = "WSGI middleware to force HTTPS.",
version = "1.0.1",
author = "Jacob Kaplan-Moss",
author_email = "jacob@jacobian.org",
url = "https://github.com/jacobian/wsgi-sslify",
py_modules = ['wsgi_sslify'],
install_requires = ['werkzeug>=0.10.1'],
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware'
]
)
|
<commit_before>from setuptools import setup
setup(
name = "wsgi-sslify",
description = "WSGI middleware to force HTTPS.",
version = "1.0.1",
author = "Jacob Kaplan-Moss",
author_email = "jacob@jacobian.org",
url = "https://github.com/jacobian/wsgi-sslify",
py_modules = ['wsgi_sslify'],
install_requires = ['werkzeug>=0.10.1'],
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware'
]
)
<commit_msg>Add PyPI classifiers for Python 3.4, 3.5 and 3.6
Since the tests pass on Python 3.<commit_after>from setuptools import setup
setup(
name = "wsgi-sslify",
description = "WSGI middleware to force HTTPS.",
version = "1.0.1",
author = "Jacob Kaplan-Moss",
author_email = "jacob@jacobian.org",
url = "https://github.com/jacobian/wsgi-sslify",
py_modules = ['wsgi_sslify'],
install_requires = ['werkzeug>=0.10.1'],
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware'
]
)
|
8d5e1aaf0a06eeeacafcf62a58f83a4c23fc59b7
|
comet/handler/test/test_spawn.py
|
comet/handler/test/test_spawn.py
|
import os
import sys
import tempfile
from twisted.trial import unittest
from twisted.python import util
from comet.icomet import IHandler
from comet.handler import SpawnCommand
SHELL = '/bin/sh'
class DummyEvent(object):
def __init__(self, text=None):
self.text = text or u""
class SpawnCommandProtocolTestCase(unittest.TestCase):
def test_interface(self):
self.assertTrue(IHandler.implementedBy(SpawnCommand))
def test_good_process(self):
spawn = SpawnCommand(sys.executable)
d = spawn(DummyEvent())
d.addCallback(self.assertEqual, True)
return d
def test_bad_process(self):
spawn = SpawnCommand("/not/a/real/executable")
return self.assertFailure(spawn(DummyEvent()), Exception)
def test_write_data(self):
if not os.access(SHELL, os.X_OK):
raise unittest.SkipTest("Shell not available")
TEXT = "Test spawn process"
output_file = tempfile.NamedTemporaryFile()
def read_data(result):
try:
self.assertEqual(output_file.read(), TEXT)
finally:
output_file.close()
spawn = SpawnCommand('/bin/sh', util.sibpath(__file__, "test_spawn.sh"), output_file.name)
d = spawn(DummyEvent(TEXT))
d.addCallback(read_data)
return d
|
import os
import sys
import tempfile
from twisted.trial import unittest
from twisted.python import util
from comet.icomet import IHandler
from comet.handler import SpawnCommand
SHELL = '/bin/sh'
class DummyEvent(object):
def __init__(self, text=None):
self.text = text or u""
class SpawnCommandProtocolTestCase(unittest.TestCase):
def test_interface(self):
self.assertTrue(IHandler.implementedBy(SpawnCommand))
def test_good_process(self):
spawn = SpawnCommand(sys.executable)
d = spawn(DummyEvent())
d.addCallback(self.assertEqual, True)
return d
def test_bad_process(self):
spawn = SpawnCommand("/not/a/real/executable")
return self.assertFailure(spawn(DummyEvent()), Exception)
def test_write_data(self):
if not os.access(SHELL, os.X_OK):
raise unittest.SkipTest("Shell not available")
TEXT = u"Test spawn process"
output_file = tempfile.NamedTemporaryFile()
def read_data(result):
try:
# NamedTemporaryFile is opened in binary mode, so we need to
# encode the read for comparison.
self.assertEqual(output_file.read().decode('utf-8'), TEXT)
finally:
output_file.close()
spawn = SpawnCommand('/bin/sh', util.sibpath(__file__, "test_spawn.sh"), output_file.name)
d = spawn(DummyEvent(TEXT))
d.addCallback(read_data)
return d
|
Convert read data to unicode.
|
Convert read data to unicode.
The NamedTemporaryFile is in binary mode by default, so the read returns raw
bytes.
|
Python
|
bsd-2-clause
|
jdswinbank/Comet,jdswinbank/Comet
|
import os
import sys
import tempfile
from twisted.trial import unittest
from twisted.python import util
from comet.icomet import IHandler
from comet.handler import SpawnCommand
SHELL = '/bin/sh'
class DummyEvent(object):
def __init__(self, text=None):
self.text = text or u""
class SpawnCommandProtocolTestCase(unittest.TestCase):
def test_interface(self):
self.assertTrue(IHandler.implementedBy(SpawnCommand))
def test_good_process(self):
spawn = SpawnCommand(sys.executable)
d = spawn(DummyEvent())
d.addCallback(self.assertEqual, True)
return d
def test_bad_process(self):
spawn = SpawnCommand("/not/a/real/executable")
return self.assertFailure(spawn(DummyEvent()), Exception)
def test_write_data(self):
if not os.access(SHELL, os.X_OK):
raise unittest.SkipTest("Shell not available")
TEXT = "Test spawn process"
output_file = tempfile.NamedTemporaryFile()
def read_data(result):
try:
self.assertEqual(output_file.read(), TEXT)
finally:
output_file.close()
spawn = SpawnCommand('/bin/sh', util.sibpath(__file__, "test_spawn.sh"), output_file.name)
d = spawn(DummyEvent(TEXT))
d.addCallback(read_data)
return d
Convert read data to unicode.
The NamedTemporaryFile is in binary mode by default, so the read returns raw
bytes.
|
import os
import sys
import tempfile
from twisted.trial import unittest
from twisted.python import util
from comet.icomet import IHandler
from comet.handler import SpawnCommand
SHELL = '/bin/sh'
class DummyEvent(object):
def __init__(self, text=None):
self.text = text or u""
class SpawnCommandProtocolTestCase(unittest.TestCase):
def test_interface(self):
self.assertTrue(IHandler.implementedBy(SpawnCommand))
def test_good_process(self):
spawn = SpawnCommand(sys.executable)
d = spawn(DummyEvent())
d.addCallback(self.assertEqual, True)
return d
def test_bad_process(self):
spawn = SpawnCommand("/not/a/real/executable")
return self.assertFailure(spawn(DummyEvent()), Exception)
def test_write_data(self):
if not os.access(SHELL, os.X_OK):
raise unittest.SkipTest("Shell not available")
TEXT = u"Test spawn process"
output_file = tempfile.NamedTemporaryFile()
def read_data(result):
try:
# NamedTemporaryFile is opened in binary mode, so we need to
# encode the read for comparison.
self.assertEqual(output_file.read().decode('utf-8'), TEXT)
finally:
output_file.close()
spawn = SpawnCommand('/bin/sh', util.sibpath(__file__, "test_spawn.sh"), output_file.name)
d = spawn(DummyEvent(TEXT))
d.addCallback(read_data)
return d
|
<commit_before>import os
import sys
import tempfile
from twisted.trial import unittest
from twisted.python import util
from comet.icomet import IHandler
from comet.handler import SpawnCommand
SHELL = '/bin/sh'
class DummyEvent(object):
def __init__(self, text=None):
self.text = text or u""
class SpawnCommandProtocolTestCase(unittest.TestCase):
def test_interface(self):
self.assertTrue(IHandler.implementedBy(SpawnCommand))
def test_good_process(self):
spawn = SpawnCommand(sys.executable)
d = spawn(DummyEvent())
d.addCallback(self.assertEqual, True)
return d
def test_bad_process(self):
spawn = SpawnCommand("/not/a/real/executable")
return self.assertFailure(spawn(DummyEvent()), Exception)
def test_write_data(self):
if not os.access(SHELL, os.X_OK):
raise unittest.SkipTest("Shell not available")
TEXT = "Test spawn process"
output_file = tempfile.NamedTemporaryFile()
def read_data(result):
try:
self.assertEqual(output_file.read(), TEXT)
finally:
output_file.close()
spawn = SpawnCommand('/bin/sh', util.sibpath(__file__, "test_spawn.sh"), output_file.name)
d = spawn(DummyEvent(TEXT))
d.addCallback(read_data)
return d
<commit_msg>Convert read data to unicode.
The NamedTemporaryFile is in binary mode by default, so the read returns raw
bytes.<commit_after>
|
import os
import sys
import tempfile
from twisted.trial import unittest
from twisted.python import util
from comet.icomet import IHandler
from comet.handler import SpawnCommand
SHELL = '/bin/sh'
class DummyEvent(object):
def __init__(self, text=None):
self.text = text or u""
class SpawnCommandProtocolTestCase(unittest.TestCase):
def test_interface(self):
self.assertTrue(IHandler.implementedBy(SpawnCommand))
def test_good_process(self):
spawn = SpawnCommand(sys.executable)
d = spawn(DummyEvent())
d.addCallback(self.assertEqual, True)
return d
def test_bad_process(self):
spawn = SpawnCommand("/not/a/real/executable")
return self.assertFailure(spawn(DummyEvent()), Exception)
def test_write_data(self):
if not os.access(SHELL, os.X_OK):
raise unittest.SkipTest("Shell not available")
TEXT = u"Test spawn process"
output_file = tempfile.NamedTemporaryFile()
def read_data(result):
try:
# NamedTemporaryFile is opened in binary mode, so we need to
# encode the read for comparison.
self.assertEqual(output_file.read().decode('utf-8'), TEXT)
finally:
output_file.close()
spawn = SpawnCommand('/bin/sh', util.sibpath(__file__, "test_spawn.sh"), output_file.name)
d = spawn(DummyEvent(TEXT))
d.addCallback(read_data)
return d
|
import os
import sys
import tempfile
from twisted.trial import unittest
from twisted.python import util
from comet.icomet import IHandler
from comet.handler import SpawnCommand
SHELL = '/bin/sh'
class DummyEvent(object):
def __init__(self, text=None):
self.text = text or u""
class SpawnCommandProtocolTestCase(unittest.TestCase):
def test_interface(self):
self.assertTrue(IHandler.implementedBy(SpawnCommand))
def test_good_process(self):
spawn = SpawnCommand(sys.executable)
d = spawn(DummyEvent())
d.addCallback(self.assertEqual, True)
return d
def test_bad_process(self):
spawn = SpawnCommand("/not/a/real/executable")
return self.assertFailure(spawn(DummyEvent()), Exception)
def test_write_data(self):
if not os.access(SHELL, os.X_OK):
raise unittest.SkipTest("Shell not available")
TEXT = "Test spawn process"
output_file = tempfile.NamedTemporaryFile()
def read_data(result):
try:
self.assertEqual(output_file.read(), TEXT)
finally:
output_file.close()
spawn = SpawnCommand('/bin/sh', util.sibpath(__file__, "test_spawn.sh"), output_file.name)
d = spawn(DummyEvent(TEXT))
d.addCallback(read_data)
return d
Convert read data to unicode.
The NamedTemporaryFile is in binary mode by default, so the read returns raw
bytes.import os
import sys
import tempfile
from twisted.trial import unittest
from twisted.python import util
from comet.icomet import IHandler
from comet.handler import SpawnCommand
SHELL = '/bin/sh'
class DummyEvent(object):
def __init__(self, text=None):
self.text = text or u""
class SpawnCommandProtocolTestCase(unittest.TestCase):
def test_interface(self):
self.assertTrue(IHandler.implementedBy(SpawnCommand))
def test_good_process(self):
spawn = SpawnCommand(sys.executable)
d = spawn(DummyEvent())
d.addCallback(self.assertEqual, True)
return d
def test_bad_process(self):
spawn = SpawnCommand("/not/a/real/executable")
return self.assertFailure(spawn(DummyEvent()), Exception)
def test_write_data(self):
if not os.access(SHELL, os.X_OK):
raise unittest.SkipTest("Shell not available")
TEXT = u"Test spawn process"
output_file = tempfile.NamedTemporaryFile()
def read_data(result):
try:
# NamedTemporaryFile is opened in binary mode, so we need to
# encode the read for comparison.
self.assertEqual(output_file.read().decode('utf-8'), TEXT)
finally:
output_file.close()
spawn = SpawnCommand('/bin/sh', util.sibpath(__file__, "test_spawn.sh"), output_file.name)
d = spawn(DummyEvent(TEXT))
d.addCallback(read_data)
return d
|
<commit_before>import os
import sys
import tempfile
from twisted.trial import unittest
from twisted.python import util
from comet.icomet import IHandler
from comet.handler import SpawnCommand
SHELL = '/bin/sh'
class DummyEvent(object):
def __init__(self, text=None):
self.text = text or u""
class SpawnCommandProtocolTestCase(unittest.TestCase):
def test_interface(self):
self.assertTrue(IHandler.implementedBy(SpawnCommand))
def test_good_process(self):
spawn = SpawnCommand(sys.executable)
d = spawn(DummyEvent())
d.addCallback(self.assertEqual, True)
return d
def test_bad_process(self):
spawn = SpawnCommand("/not/a/real/executable")
return self.assertFailure(spawn(DummyEvent()), Exception)
def test_write_data(self):
if not os.access(SHELL, os.X_OK):
raise unittest.SkipTest("Shell not available")
TEXT = "Test spawn process"
output_file = tempfile.NamedTemporaryFile()
def read_data(result):
try:
self.assertEqual(output_file.read(), TEXT)
finally:
output_file.close()
spawn = SpawnCommand('/bin/sh', util.sibpath(__file__, "test_spawn.sh"), output_file.name)
d = spawn(DummyEvent(TEXT))
d.addCallback(read_data)
return d
<commit_msg>Convert read data to unicode.
The NamedTemporaryFile is in binary mode by default, so the read returns raw
bytes.<commit_after>import os
import sys
import tempfile
from twisted.trial import unittest
from twisted.python import util
from comet.icomet import IHandler
from comet.handler import SpawnCommand
SHELL = '/bin/sh'
class DummyEvent(object):
def __init__(self, text=None):
self.text = text or u""
class SpawnCommandProtocolTestCase(unittest.TestCase):
def test_interface(self):
self.assertTrue(IHandler.implementedBy(SpawnCommand))
def test_good_process(self):
spawn = SpawnCommand(sys.executable)
d = spawn(DummyEvent())
d.addCallback(self.assertEqual, True)
return d
def test_bad_process(self):
spawn = SpawnCommand("/not/a/real/executable")
return self.assertFailure(spawn(DummyEvent()), Exception)
def test_write_data(self):
if not os.access(SHELL, os.X_OK):
raise unittest.SkipTest("Shell not available")
TEXT = u"Test spawn process"
output_file = tempfile.NamedTemporaryFile()
def read_data(result):
try:
# NamedTemporaryFile is opened in binary mode, so we need to
# encode the read for comparison.
self.assertEqual(output_file.read().decode('utf-8'), TEXT)
finally:
output_file.close()
spawn = SpawnCommand('/bin/sh', util.sibpath(__file__, "test_spawn.sh"), output_file.name)
d = spawn(DummyEvent(TEXT))
d.addCallback(read_data)
return d
|
0261c895cb41f5caba42ae432b997fd3c941e96f
|
tests.py
|
tests.py
|
import pytest
import cleaner
class TestTagRemoval():
def test_span_removal(self):
text = ('<span style="font-family: "helvetica neue" ,'
'"arial" , "helvetica" , sans-serif;">This is some'
' dummy text lalalala</span> This is some more dummy text '
'<span>test</span>')
expected = ('This is some dummy text lalalala This is some more dummy '
'text test')
cleaned = cleaner.remove_superflous_markup(text)
assert cleaned == expected
|
import pytest
import cleaner
class TestTagTools():
def test_get_pure_tag(self):
tag1 = '<div>'
tag2 = '</div>'
tag3 = '<pre class="prettyprint">'
assert cleaner.get_pure_tag(tag1) == '<div>'
assert cleaner.get_pure_tag(tag2) == '</div>'
assert cleaner.get_pure_tag(tag3) == '<pre>'
|
Add test for getting pure html tag
|
Add test for getting pure html tag
|
Python
|
mit
|
jamalmoir/blogger_html_cleaner
|
import pytest
import cleaner
class TestTagRemoval():
def test_span_removal(self):
text = ('<span style="font-family: "helvetica neue" ,'
'"arial" , "helvetica" , sans-serif;">This is some'
' dummy text lalalala</span> This is some more dummy text '
'<span>test</span>')
expected = ('This is some dummy text lalalala This is some more dummy '
'text test')
cleaned = cleaner.remove_superflous_markup(text)
assert cleaned == expected
Add test for getting pure html tag
|
import pytest
import cleaner
class TestTagTools():
def test_get_pure_tag(self):
tag1 = '<div>'
tag2 = '</div>'
tag3 = '<pre class="prettyprint">'
assert cleaner.get_pure_tag(tag1) == '<div>'
assert cleaner.get_pure_tag(tag2) == '</div>'
assert cleaner.get_pure_tag(tag3) == '<pre>'
|
<commit_before>import pytest
import cleaner
class TestTagRemoval():
def test_span_removal(self):
text = ('<span style="font-family: "helvetica neue" ,'
'"arial" , "helvetica" , sans-serif;">This is some'
' dummy text lalalala</span> This is some more dummy text '
'<span>test</span>')
expected = ('This is some dummy text lalalala This is some more dummy '
'text test')
cleaned = cleaner.remove_superflous_markup(text)
assert cleaned == expected
<commit_msg>Add test for getting pure html tag<commit_after>
|
import pytest
import cleaner
class TestTagTools():
def test_get_pure_tag(self):
tag1 = '<div>'
tag2 = '</div>'
tag3 = '<pre class="prettyprint">'
assert cleaner.get_pure_tag(tag1) == '<div>'
assert cleaner.get_pure_tag(tag2) == '</div>'
assert cleaner.get_pure_tag(tag3) == '<pre>'
|
import pytest
import cleaner
class TestTagRemoval():
def test_span_removal(self):
text = ('<span style="font-family: "helvetica neue" ,'
'"arial" , "helvetica" , sans-serif;">This is some'
' dummy text lalalala</span> This is some more dummy text '
'<span>test</span>')
expected = ('This is some dummy text lalalala This is some more dummy '
'text test')
cleaned = cleaner.remove_superflous_markup(text)
assert cleaned == expected
Add test for getting pure html tagimport pytest
import cleaner
class TestTagTools():
def test_get_pure_tag(self):
tag1 = '<div>'
tag2 = '</div>'
tag3 = '<pre class="prettyprint">'
assert cleaner.get_pure_tag(tag1) == '<div>'
assert cleaner.get_pure_tag(tag2) == '</div>'
assert cleaner.get_pure_tag(tag3) == '<pre>'
|
<commit_before>import pytest
import cleaner
class TestTagRemoval():
def test_span_removal(self):
text = ('<span style="font-family: "helvetica neue" ,'
'"arial" , "helvetica" , sans-serif;">This is some'
' dummy text lalalala</span> This is some more dummy text '
'<span>test</span>')
expected = ('This is some dummy text lalalala This is some more dummy '
'text test')
cleaned = cleaner.remove_superflous_markup(text)
assert cleaned == expected
<commit_msg>Add test for getting pure html tag<commit_after>import pytest
import cleaner
class TestTagTools():
def test_get_pure_tag(self):
tag1 = '<div>'
tag2 = '</div>'
tag3 = '<pre class="prettyprint">'
assert cleaner.get_pure_tag(tag1) == '<div>'
assert cleaner.get_pure_tag(tag2) == '</div>'
assert cleaner.get_pure_tag(tag3) == '<pre>'
|
d5de8224a0d67b74444a0ad7c755e3c7bc1c39a5
|
features.py
|
features.py
|
"""
Define all features to be extracted from the data
"""
from PIL import Image
from PIL.ImageStat import Stat
from skimage.feature import local_binary_pattern
class BaseFeatureExtractor(object):
""" Basis for all feature extractors
"""
def extract(self, data):
""" Return list of feature values
"""
raise NotImplementedError('No way of extracting features specified')
class BasicImageStats(BaseFeatureExtractor):
""" Compute some basic pixel-based image statistics
"""
def extract(self, img_path):
stats = Stat(Image.open(img_path))
return stats.count \
+ stats.sum \
+ stats.sum2 \
+ stats.mean \
+ stats.median \
+ stats.rms \
+ stats.var \
+ stats.stddev
class LocalBinaryPatterns(BaseFeatureExtractor):
""" Extract some LBPs
"""
def extract(self, img_path):
image = Image.open(img_path)
assert image.size > (500, 500), 'Image must have a size of at least 500x500'
box = (100, 100, 500, 500)
sub_img = image.crop(box)
lbp = local_binary_pattern(sub_img.getdata(), 8 * 3, 3, 'uniform')
return lbp.flat
|
"""
Define all features to be extracted from the data
"""
import numpy as np
from PIL import Image
from PIL.ImageStat import Stat
from skimage.feature import local_binary_pattern
class BaseFeatureExtractor(object):
""" Basis for all feature extractors
"""
def extract(self, data):
""" Return list of feature values
"""
raise NotImplementedError('No way of extracting features specified')
class BasicImageStats(BaseFeatureExtractor):
""" Compute some basic pixel-based image statistics
"""
def extract(self, img_path):
stats = Stat(Image.open(img_path))
return stats.count \
+ stats.sum \
+ stats.sum2 \
+ stats.mean \
+ stats.median \
+ stats.rms \
+ stats.var \
+ stats.stddev
class LocalBinaryPatterns(BaseFeatureExtractor):
""" Extract some LBPs
"""
def extract(self, img_path):
image = Image.open(img_path)
# assemble region of interest
fac = 30/80
w, h = image.size
box = (
int(fac * w), int(fac * h),
int((1-fac) * w), int((1-fac) * h)
)
sub_img = image.crop(box)
# analyze local binary patterns
lbp = local_binary_pattern(sub_img.getdata(), 8 * 3, 3, 'uniform')
hist = np.bincount(
np.array(lbp.flat).astype(np.int64),
minlength=50
)
return hist
|
Use histogram of local binary patterns
|
Use histogram of local binary patterns
|
Python
|
mit
|
kpj/PyClass
|
"""
Define all features to be extracted from the data
"""
from PIL import Image
from PIL.ImageStat import Stat
from skimage.feature import local_binary_pattern
class BaseFeatureExtractor(object):
""" Basis for all feature extractors
"""
def extract(self, data):
""" Return list of feature values
"""
raise NotImplementedError('No way of extracting features specified')
class BasicImageStats(BaseFeatureExtractor):
""" Compute some basic pixel-based image statistics
"""
def extract(self, img_path):
stats = Stat(Image.open(img_path))
return stats.count \
+ stats.sum \
+ stats.sum2 \
+ stats.mean \
+ stats.median \
+ stats.rms \
+ stats.var \
+ stats.stddev
class LocalBinaryPatterns(BaseFeatureExtractor):
""" Extract some LBPs
"""
def extract(self, img_path):
image = Image.open(img_path)
assert image.size > (500, 500), 'Image must have a size of at least 500x500'
box = (100, 100, 500, 500)
sub_img = image.crop(box)
lbp = local_binary_pattern(sub_img.getdata(), 8 * 3, 3, 'uniform')
return lbp.flat
Use histogram of local binary patterns
|
"""
Define all features to be extracted from the data
"""
import numpy as np
from PIL import Image
from PIL.ImageStat import Stat
from skimage.feature import local_binary_pattern
class BaseFeatureExtractor(object):
""" Basis for all feature extractors
"""
def extract(self, data):
""" Return list of feature values
"""
raise NotImplementedError('No way of extracting features specified')
class BasicImageStats(BaseFeatureExtractor):
""" Compute some basic pixel-based image statistics
"""
def extract(self, img_path):
stats = Stat(Image.open(img_path))
return stats.count \
+ stats.sum \
+ stats.sum2 \
+ stats.mean \
+ stats.median \
+ stats.rms \
+ stats.var \
+ stats.stddev
class LocalBinaryPatterns(BaseFeatureExtractor):
""" Extract some LBPs
"""
def extract(self, img_path):
image = Image.open(img_path)
# assemble region of interest
fac = 30/80
w, h = image.size
box = (
int(fac * w), int(fac * h),
int((1-fac) * w), int((1-fac) * h)
)
sub_img = image.crop(box)
# analyze local binary patterns
lbp = local_binary_pattern(sub_img.getdata(), 8 * 3, 3, 'uniform')
hist = np.bincount(
np.array(lbp.flat).astype(np.int64),
minlength=50
)
return hist
|
<commit_before>"""
Define all features to be extracted from the data
"""
from PIL import Image
from PIL.ImageStat import Stat
from skimage.feature import local_binary_pattern
class BaseFeatureExtractor(object):
""" Basis for all feature extractors
"""
def extract(self, data):
""" Return list of feature values
"""
raise NotImplementedError('No way of extracting features specified')
class BasicImageStats(BaseFeatureExtractor):
""" Compute some basic pixel-based image statistics
"""
def extract(self, img_path):
stats = Stat(Image.open(img_path))
return stats.count \
+ stats.sum \
+ stats.sum2 \
+ stats.mean \
+ stats.median \
+ stats.rms \
+ stats.var \
+ stats.stddev
class LocalBinaryPatterns(BaseFeatureExtractor):
""" Extract some LBPs
"""
def extract(self, img_path):
image = Image.open(img_path)
assert image.size > (500, 500), 'Image must have a size of at least 500x500'
box = (100, 100, 500, 500)
sub_img = image.crop(box)
lbp = local_binary_pattern(sub_img.getdata(), 8 * 3, 3, 'uniform')
return lbp.flat
<commit_msg>Use histogram of local binary patterns<commit_after>
|
"""
Define all features to be extracted from the data
"""
import numpy as np
from PIL import Image
from PIL.ImageStat import Stat
from skimage.feature import local_binary_pattern
class BaseFeatureExtractor(object):
""" Basis for all feature extractors
"""
def extract(self, data):
""" Return list of feature values
"""
raise NotImplementedError('No way of extracting features specified')
class BasicImageStats(BaseFeatureExtractor):
""" Compute some basic pixel-based image statistics
"""
def extract(self, img_path):
stats = Stat(Image.open(img_path))
return stats.count \
+ stats.sum \
+ stats.sum2 \
+ stats.mean \
+ stats.median \
+ stats.rms \
+ stats.var \
+ stats.stddev
class LocalBinaryPatterns(BaseFeatureExtractor):
""" Extract some LBPs
"""
def extract(self, img_path):
image = Image.open(img_path)
# assemble region of interest
fac = 30/80
w, h = image.size
box = (
int(fac * w), int(fac * h),
int((1-fac) * w), int((1-fac) * h)
)
sub_img = image.crop(box)
# analyze local binary patterns
lbp = local_binary_pattern(sub_img.getdata(), 8 * 3, 3, 'uniform')
hist = np.bincount(
np.array(lbp.flat).astype(np.int64),
minlength=50
)
return hist
|
"""
Define all features to be extracted from the data
"""
from PIL import Image
from PIL.ImageStat import Stat
from skimage.feature import local_binary_pattern
class BaseFeatureExtractor(object):
""" Basis for all feature extractors
"""
def extract(self, data):
""" Return list of feature values
"""
raise NotImplementedError('No way of extracting features specified')
class BasicImageStats(BaseFeatureExtractor):
""" Compute some basic pixel-based image statistics
"""
def extract(self, img_path):
stats = Stat(Image.open(img_path))
return stats.count \
+ stats.sum \
+ stats.sum2 \
+ stats.mean \
+ stats.median \
+ stats.rms \
+ stats.var \
+ stats.stddev
class LocalBinaryPatterns(BaseFeatureExtractor):
""" Extract some LBPs
"""
def extract(self, img_path):
image = Image.open(img_path)
assert image.size > (500, 500), 'Image must have a size of at least 500x500'
box = (100, 100, 500, 500)
sub_img = image.crop(box)
lbp = local_binary_pattern(sub_img.getdata(), 8 * 3, 3, 'uniform')
return lbp.flat
Use histogram of local binary patterns"""
Define all features to be extracted from the data
"""
import numpy as np
from PIL import Image
from PIL.ImageStat import Stat
from skimage.feature import local_binary_pattern
class BaseFeatureExtractor(object):
""" Basis for all feature extractors
"""
def extract(self, data):
""" Return list of feature values
"""
raise NotImplementedError('No way of extracting features specified')
class BasicImageStats(BaseFeatureExtractor):
""" Compute some basic pixel-based image statistics
"""
def extract(self, img_path):
stats = Stat(Image.open(img_path))
return stats.count \
+ stats.sum \
+ stats.sum2 \
+ stats.mean \
+ stats.median \
+ stats.rms \
+ stats.var \
+ stats.stddev
class LocalBinaryPatterns(BaseFeatureExtractor):
""" Extract some LBPs
"""
def extract(self, img_path):
image = Image.open(img_path)
# assemble region of interest
fac = 30/80
w, h = image.size
box = (
int(fac * w), int(fac * h),
int((1-fac) * w), int((1-fac) * h)
)
sub_img = image.crop(box)
# analyze local binary patterns
lbp = local_binary_pattern(sub_img.getdata(), 8 * 3, 3, 'uniform')
hist = np.bincount(
np.array(lbp.flat).astype(np.int64),
minlength=50
)
return hist
|
<commit_before>"""
Define all features to be extracted from the data
"""
from PIL import Image
from PIL.ImageStat import Stat
from skimage.feature import local_binary_pattern
class BaseFeatureExtractor(object):
""" Basis for all feature extractors
"""
def extract(self, data):
""" Return list of feature values
"""
raise NotImplementedError('No way of extracting features specified')
class BasicImageStats(BaseFeatureExtractor):
""" Compute some basic pixel-based image statistics
"""
def extract(self, img_path):
stats = Stat(Image.open(img_path))
return stats.count \
+ stats.sum \
+ stats.sum2 \
+ stats.mean \
+ stats.median \
+ stats.rms \
+ stats.var \
+ stats.stddev
class LocalBinaryPatterns(BaseFeatureExtractor):
""" Extract some LBPs
"""
def extract(self, img_path):
image = Image.open(img_path)
assert image.size > (500, 500), 'Image must have a size of at least 500x500'
box = (100, 100, 500, 500)
sub_img = image.crop(box)
lbp = local_binary_pattern(sub_img.getdata(), 8 * 3, 3, 'uniform')
return lbp.flat
<commit_msg>Use histogram of local binary patterns<commit_after>"""
Define all features to be extracted from the data
"""
import numpy as np
from PIL import Image
from PIL.ImageStat import Stat
from skimage.feature import local_binary_pattern
class BaseFeatureExtractor(object):
""" Basis for all feature extractors
"""
def extract(self, data):
""" Return list of feature values
"""
raise NotImplementedError('No way of extracting features specified')
class BasicImageStats(BaseFeatureExtractor):
""" Compute some basic pixel-based image statistics
"""
def extract(self, img_path):
stats = Stat(Image.open(img_path))
return stats.count \
+ stats.sum \
+ stats.sum2 \
+ stats.mean \
+ stats.median \
+ stats.rms \
+ stats.var \
+ stats.stddev
class LocalBinaryPatterns(BaseFeatureExtractor):
""" Extract some LBPs
"""
def extract(self, img_path):
image = Image.open(img_path)
# assemble region of interest
fac = 30/80
w, h = image.size
box = (
int(fac * w), int(fac * h),
int((1-fac) * w), int((1-fac) * h)
)
sub_img = image.crop(box)
# analyze local binary patterns
lbp = local_binary_pattern(sub_img.getdata(), 8 * 3, 3, 'uniform')
hist = np.bincount(
np.array(lbp.flat).astype(np.int64),
minlength=50
)
return hist
|
a20ffb81801a5f96af47ccf4bf7fe0133e74102b
|
source/views.py
|
source/views.py
|
from rest_framework.views import APIView
from rest_framework.response import Response
class EnumView(APIView):
permission_classes = []
def get(self, *args, **kwargs):
enums = self.enum_class.get_as_tuple_list()
context = []
for enum in enums:
_id = enum[1]
i18n = self.enum_class.i18n[_id]
context.append({
'id': _id,
'i18n': i18n,
})
return Response(context)
|
from rest_framework.views import APIView
from rest_framework.response import Response
class EnumView(APIView):
permission_classes = []
fields = ('i18n', )
def get(self, *args, **kwargs):
enums = self.enum_class.get_as_tuple_list()
context = []
for enum in enums:
_id = enum[1]
enum_context = {'id': _id}
for field in self.fields:
enum_context[field] = getattr(self.enum_class, field)[_id]
context.append(enum_context)
return Response(context)
|
Add possibility to set fields
|
Add possibility to set fields
|
Python
|
mit
|
iktw/django-rest-enum-view
|
from rest_framework.views import APIView
from rest_framework.response import Response
class EnumView(APIView):
permission_classes = []
def get(self, *args, **kwargs):
enums = self.enum_class.get_as_tuple_list()
context = []
for enum in enums:
_id = enum[1]
i18n = self.enum_class.i18n[_id]
context.append({
'id': _id,
'i18n': i18n,
})
return Response(context)
Add possibility to set fields
|
from rest_framework.views import APIView
from rest_framework.response import Response
class EnumView(APIView):
permission_classes = []
fields = ('i18n', )
def get(self, *args, **kwargs):
enums = self.enum_class.get_as_tuple_list()
context = []
for enum in enums:
_id = enum[1]
enum_context = {'id': _id}
for field in self.fields:
enum_context[field] = getattr(self.enum_class, field)[_id]
context.append(enum_context)
return Response(context)
|
<commit_before>from rest_framework.views import APIView
from rest_framework.response import Response
class EnumView(APIView):
permission_classes = []
def get(self, *args, **kwargs):
enums = self.enum_class.get_as_tuple_list()
context = []
for enum in enums:
_id = enum[1]
i18n = self.enum_class.i18n[_id]
context.append({
'id': _id,
'i18n': i18n,
})
return Response(context)
<commit_msg>Add possibility to set fields<commit_after>
|
from rest_framework.views import APIView
from rest_framework.response import Response
class EnumView(APIView):
permission_classes = []
fields = ('i18n', )
def get(self, *args, **kwargs):
enums = self.enum_class.get_as_tuple_list()
context = []
for enum in enums:
_id = enum[1]
enum_context = {'id': _id}
for field in self.fields:
enum_context[field] = getattr(self.enum_class, field)[_id]
context.append(enum_context)
return Response(context)
|
from rest_framework.views import APIView
from rest_framework.response import Response
class EnumView(APIView):
permission_classes = []
def get(self, *args, **kwargs):
enums = self.enum_class.get_as_tuple_list()
context = []
for enum in enums:
_id = enum[1]
i18n = self.enum_class.i18n[_id]
context.append({
'id': _id,
'i18n': i18n,
})
return Response(context)
Add possibility to set fieldsfrom rest_framework.views import APIView
from rest_framework.response import Response
class EnumView(APIView):
permission_classes = []
fields = ('i18n', )
def get(self, *args, **kwargs):
enums = self.enum_class.get_as_tuple_list()
context = []
for enum in enums:
_id = enum[1]
enum_context = {'id': _id}
for field in self.fields:
enum_context[field] = getattr(self.enum_class, field)[_id]
context.append(enum_context)
return Response(context)
|
<commit_before>from rest_framework.views import APIView
from rest_framework.response import Response
class EnumView(APIView):
permission_classes = []
def get(self, *args, **kwargs):
enums = self.enum_class.get_as_tuple_list()
context = []
for enum in enums:
_id = enum[1]
i18n = self.enum_class.i18n[_id]
context.append({
'id': _id,
'i18n': i18n,
})
return Response(context)
<commit_msg>Add possibility to set fields<commit_after>from rest_framework.views import APIView
from rest_framework.response import Response
class EnumView(APIView):
permission_classes = []
fields = ('i18n', )
def get(self, *args, **kwargs):
enums = self.enum_class.get_as_tuple_list()
context = []
for enum in enums:
_id = enum[1]
enum_context = {'id': _id}
for field in self.fields:
enum_context[field] = getattr(self.enum_class, field)[_id]
context.append(enum_context)
return Response(context)
|
2314829d58b200570272332c89a85b4009a396bf
|
tests/common.py
|
tests/common.py
|
from pprint import pprint, pformat
import datetime
import os
from sgmock import Fixture
from sgmock import TestCase
if 'USE_SHOTGUN' in os.environ:
from shotgun_api3 import ShotgunError, Fault
import shotgun_api3_registry
def Shotgun():
return shotgun_api3_registry.connect('sgsession.tests', server='testing')
else:
from sgmock import Shotgun, ShotgunError, Fault
from sgsession import Session, Entity
def mini_uuid():
return os.urandom(4).encode('hex')
def timestamp():
return datetime.datetime.now().strftime('%Y%m%d%H%M%S')
def minimal(entity):
return dict(type=entity['type'], id=entity['id'])
|
from pprint import pprint, pformat
import datetime
import os
from sgmock import Fixture
from sgmock import TestCase
_shotgun_server = os.environ.get('SHOTGUN', 'mock')
if _shotgun_server == 'mock':
from sgmock import Shotgun, ShotgunError, Fault
else:
from shotgun_api3 import ShotgunError, Fault
import shotgun_api3_registry
def Shotgun():
return shotgun_api3_registry.connect('sgsession.tests', server=_shotgun_server)
from sgsession import Session, Entity
def mini_uuid():
return os.urandom(4).encode('hex')
def timestamp():
return datetime.datetime.now().strftime('%Y%m%d%H%M%S')
def minimal(entity):
return dict(type=entity['type'], id=entity['id'])
|
Change detection of Shotgun server for tests
|
Change detection of Shotgun server for tests
|
Python
|
bsd-3-clause
|
westernx/sgsession
|
from pprint import pprint, pformat
import datetime
import os
from sgmock import Fixture
from sgmock import TestCase
if 'USE_SHOTGUN' in os.environ:
from shotgun_api3 import ShotgunError, Fault
import shotgun_api3_registry
def Shotgun():
return shotgun_api3_registry.connect('sgsession.tests', server='testing')
else:
from sgmock import Shotgun, ShotgunError, Fault
from sgsession import Session, Entity
def mini_uuid():
return os.urandom(4).encode('hex')
def timestamp():
return datetime.datetime.now().strftime('%Y%m%d%H%M%S')
def minimal(entity):
return dict(type=entity['type'], id=entity['id'])
Change detection of Shotgun server for tests
|
from pprint import pprint, pformat
import datetime
import os
from sgmock import Fixture
from sgmock import TestCase
_shotgun_server = os.environ.get('SHOTGUN', 'mock')
if _shotgun_server == 'mock':
from sgmock import Shotgun, ShotgunError, Fault
else:
from shotgun_api3 import ShotgunError, Fault
import shotgun_api3_registry
def Shotgun():
return shotgun_api3_registry.connect('sgsession.tests', server=_shotgun_server)
from sgsession import Session, Entity
def mini_uuid():
return os.urandom(4).encode('hex')
def timestamp():
return datetime.datetime.now().strftime('%Y%m%d%H%M%S')
def minimal(entity):
return dict(type=entity['type'], id=entity['id'])
|
<commit_before>from pprint import pprint, pformat
import datetime
import os
from sgmock import Fixture
from sgmock import TestCase
if 'USE_SHOTGUN' in os.environ:
from shotgun_api3 import ShotgunError, Fault
import shotgun_api3_registry
def Shotgun():
return shotgun_api3_registry.connect('sgsession.tests', server='testing')
else:
from sgmock import Shotgun, ShotgunError, Fault
from sgsession import Session, Entity
def mini_uuid():
return os.urandom(4).encode('hex')
def timestamp():
return datetime.datetime.now().strftime('%Y%m%d%H%M%S')
def minimal(entity):
return dict(type=entity['type'], id=entity['id'])
<commit_msg>Change detection of Shotgun server for tests<commit_after>
|
from pprint import pprint, pformat
import datetime
import os
from sgmock import Fixture
from sgmock import TestCase
_shotgun_server = os.environ.get('SHOTGUN', 'mock')
if _shotgun_server == 'mock':
from sgmock import Shotgun, ShotgunError, Fault
else:
from shotgun_api3 import ShotgunError, Fault
import shotgun_api3_registry
def Shotgun():
return shotgun_api3_registry.connect('sgsession.tests', server=_shotgun_server)
from sgsession import Session, Entity
def mini_uuid():
return os.urandom(4).encode('hex')
def timestamp():
return datetime.datetime.now().strftime('%Y%m%d%H%M%S')
def minimal(entity):
return dict(type=entity['type'], id=entity['id'])
|
from pprint import pprint, pformat
import datetime
import os
from sgmock import Fixture
from sgmock import TestCase
if 'USE_SHOTGUN' in os.environ:
from shotgun_api3 import ShotgunError, Fault
import shotgun_api3_registry
def Shotgun():
return shotgun_api3_registry.connect('sgsession.tests', server='testing')
else:
from sgmock import Shotgun, ShotgunError, Fault
from sgsession import Session, Entity
def mini_uuid():
return os.urandom(4).encode('hex')
def timestamp():
return datetime.datetime.now().strftime('%Y%m%d%H%M%S')
def minimal(entity):
return dict(type=entity['type'], id=entity['id'])
Change detection of Shotgun server for testsfrom pprint import pprint, pformat
import datetime
import os
from sgmock import Fixture
from sgmock import TestCase
_shotgun_server = os.environ.get('SHOTGUN', 'mock')
if _shotgun_server == 'mock':
from sgmock import Shotgun, ShotgunError, Fault
else:
from shotgun_api3 import ShotgunError, Fault
import shotgun_api3_registry
def Shotgun():
return shotgun_api3_registry.connect('sgsession.tests', server=_shotgun_server)
from sgsession import Session, Entity
def mini_uuid():
return os.urandom(4).encode('hex')
def timestamp():
return datetime.datetime.now().strftime('%Y%m%d%H%M%S')
def minimal(entity):
return dict(type=entity['type'], id=entity['id'])
|
<commit_before>from pprint import pprint, pformat
import datetime
import os
from sgmock import Fixture
from sgmock import TestCase
if 'USE_SHOTGUN' in os.environ:
from shotgun_api3 import ShotgunError, Fault
import shotgun_api3_registry
def Shotgun():
return shotgun_api3_registry.connect('sgsession.tests', server='testing')
else:
from sgmock import Shotgun, ShotgunError, Fault
from sgsession import Session, Entity
def mini_uuid():
return os.urandom(4).encode('hex')
def timestamp():
return datetime.datetime.now().strftime('%Y%m%d%H%M%S')
def minimal(entity):
return dict(type=entity['type'], id=entity['id'])
<commit_msg>Change detection of Shotgun server for tests<commit_after>from pprint import pprint, pformat
import datetime
import os
from sgmock import Fixture
from sgmock import TestCase
_shotgun_server = os.environ.get('SHOTGUN', 'mock')
if _shotgun_server == 'mock':
from sgmock import Shotgun, ShotgunError, Fault
else:
from shotgun_api3 import ShotgunError, Fault
import shotgun_api3_registry
def Shotgun():
return shotgun_api3_registry.connect('sgsession.tests', server=_shotgun_server)
from sgsession import Session, Entity
def mini_uuid():
return os.urandom(4).encode('hex')
def timestamp():
return datetime.datetime.now().strftime('%Y%m%d%H%M%S')
def minimal(entity):
return dict(type=entity['type'], id=entity['id'])
|
cb966e6623306ce4dfd96a5a286482489e28b9e9
|
pyclient/lockd.py
|
pyclient/lockd.py
|
# lockd client
import httplib
import json
class LockdClient(object):
def __init__(self, host="127.0.0.1", port=2080):
self._host_port = "%s:%s" % (host, port)
def is_locked(self, name):
return self._lockish("GET", name, 404)
def lock(self, name):
return self._lockish("POST", name, 409)
def unlock(self, name):
return self._lockish("DELETE", name, 404)
def _lockish(self, method, name, false_code):
# FIXME: does name need to escaped here?
path = "/lock/%s" % name
#
conn = httplib.HTTPConnection(self._host_port)
conn.request(method, path)
response = conn.getresponse()
status = response.status
response.read()
conn.close()
#
if status == 200:
return True
elif status == false_code:
return False
else:
msg = "Unexpected response: %s %s; data: %s" % (status, response.reason, data)
raise Exception(msg)
|
# lockd client
import httplib
import json
class LockdClient(object):
def __init__(self, host="127.0.0.1", port=2080):
self._host_port = "%s:%s" % (host, port)
def is_locked(self, name):
return self._lockish("GET", name, 404)
def lock(self, name):
return self._lockish("POST", name, 409)
def unlock(self, name):
return self._lockish("DELETE", name, 404)
def _lockish(self, method, name, false_code):
# FIXME: does name need to escaped here?
path = "/lock/%s" % name
#
conn = httplib.HTTPConnection(self._host_port)
conn.request(method, path)
response = conn.getresponse()
status = response.status
response.read()
conn.close()
#
if status == 200:
return True
elif status == false_code:
return False
else:
msg = "Unexpected response: %s %s" % (status, response.reason)
raise Exception(msg)
|
Fix bug in python client
|
Fix bug in python client
|
Python
|
mit
|
divtxt/lockd,divtxt/lockd
|
# lockd client
import httplib
import json
class LockdClient(object):
def __init__(self, host="127.0.0.1", port=2080):
self._host_port = "%s:%s" % (host, port)
def is_locked(self, name):
return self._lockish("GET", name, 404)
def lock(self, name):
return self._lockish("POST", name, 409)
def unlock(self, name):
return self._lockish("DELETE", name, 404)
def _lockish(self, method, name, false_code):
# FIXME: does name need to escaped here?
path = "/lock/%s" % name
#
conn = httplib.HTTPConnection(self._host_port)
conn.request(method, path)
response = conn.getresponse()
status = response.status
response.read()
conn.close()
#
if status == 200:
return True
elif status == false_code:
return False
else:
msg = "Unexpected response: %s %s; data: %s" % (status, response.reason, data)
raise Exception(msg)
Fix bug in python client
|
# lockd client
import httplib
import json
class LockdClient(object):
def __init__(self, host="127.0.0.1", port=2080):
self._host_port = "%s:%s" % (host, port)
def is_locked(self, name):
return self._lockish("GET", name, 404)
def lock(self, name):
return self._lockish("POST", name, 409)
def unlock(self, name):
return self._lockish("DELETE", name, 404)
def _lockish(self, method, name, false_code):
# FIXME: does name need to escaped here?
path = "/lock/%s" % name
#
conn = httplib.HTTPConnection(self._host_port)
conn.request(method, path)
response = conn.getresponse()
status = response.status
response.read()
conn.close()
#
if status == 200:
return True
elif status == false_code:
return False
else:
msg = "Unexpected response: %s %s" % (status, response.reason)
raise Exception(msg)
|
<commit_before># lockd client
import httplib
import json
class LockdClient(object):
def __init__(self, host="127.0.0.1", port=2080):
self._host_port = "%s:%s" % (host, port)
def is_locked(self, name):
return self._lockish("GET", name, 404)
def lock(self, name):
return self._lockish("POST", name, 409)
def unlock(self, name):
return self._lockish("DELETE", name, 404)
def _lockish(self, method, name, false_code):
# FIXME: does name need to escaped here?
path = "/lock/%s" % name
#
conn = httplib.HTTPConnection(self._host_port)
conn.request(method, path)
response = conn.getresponse()
status = response.status
response.read()
conn.close()
#
if status == 200:
return True
elif status == false_code:
return False
else:
msg = "Unexpected response: %s %s; data: %s" % (status, response.reason, data)
raise Exception(msg)
<commit_msg>Fix bug in python client<commit_after>
|
# lockd client
import httplib
import json
class LockdClient(object):
def __init__(self, host="127.0.0.1", port=2080):
self._host_port = "%s:%s" % (host, port)
def is_locked(self, name):
return self._lockish("GET", name, 404)
def lock(self, name):
return self._lockish("POST", name, 409)
def unlock(self, name):
return self._lockish("DELETE", name, 404)
def _lockish(self, method, name, false_code):
# FIXME: does name need to escaped here?
path = "/lock/%s" % name
#
conn = httplib.HTTPConnection(self._host_port)
conn.request(method, path)
response = conn.getresponse()
status = response.status
response.read()
conn.close()
#
if status == 200:
return True
elif status == false_code:
return False
else:
msg = "Unexpected response: %s %s" % (status, response.reason)
raise Exception(msg)
|
# lockd client
import httplib
import json
class LockdClient(object):
def __init__(self, host="127.0.0.1", port=2080):
self._host_port = "%s:%s" % (host, port)
def is_locked(self, name):
return self._lockish("GET", name, 404)
def lock(self, name):
return self._lockish("POST", name, 409)
def unlock(self, name):
return self._lockish("DELETE", name, 404)
def _lockish(self, method, name, false_code):
# FIXME: does name need to escaped here?
path = "/lock/%s" % name
#
conn = httplib.HTTPConnection(self._host_port)
conn.request(method, path)
response = conn.getresponse()
status = response.status
response.read()
conn.close()
#
if status == 200:
return True
elif status == false_code:
return False
else:
msg = "Unexpected response: %s %s; data: %s" % (status, response.reason, data)
raise Exception(msg)
Fix bug in python client# lockd client
import httplib
import json
class LockdClient(object):
def __init__(self, host="127.0.0.1", port=2080):
self._host_port = "%s:%s" % (host, port)
def is_locked(self, name):
return self._lockish("GET", name, 404)
def lock(self, name):
return self._lockish("POST", name, 409)
def unlock(self, name):
return self._lockish("DELETE", name, 404)
def _lockish(self, method, name, false_code):
# FIXME: does name need to escaped here?
path = "/lock/%s" % name
#
conn = httplib.HTTPConnection(self._host_port)
conn.request(method, path)
response = conn.getresponse()
status = response.status
response.read()
conn.close()
#
if status == 200:
return True
elif status == false_code:
return False
else:
msg = "Unexpected response: %s %s" % (status, response.reason)
raise Exception(msg)
|
<commit_before># lockd client
import httplib
import json
class LockdClient(object):
def __init__(self, host="127.0.0.1", port=2080):
self._host_port = "%s:%s" % (host, port)
def is_locked(self, name):
return self._lockish("GET", name, 404)
def lock(self, name):
return self._lockish("POST", name, 409)
def unlock(self, name):
return self._lockish("DELETE", name, 404)
def _lockish(self, method, name, false_code):
# FIXME: does name need to escaped here?
path = "/lock/%s" % name
#
conn = httplib.HTTPConnection(self._host_port)
conn.request(method, path)
response = conn.getresponse()
status = response.status
response.read()
conn.close()
#
if status == 200:
return True
elif status == false_code:
return False
else:
msg = "Unexpected response: %s %s; data: %s" % (status, response.reason, data)
raise Exception(msg)
<commit_msg>Fix bug in python client<commit_after># lockd client
import httplib
import json
class LockdClient(object):
def __init__(self, host="127.0.0.1", port=2080):
self._host_port = "%s:%s" % (host, port)
def is_locked(self, name):
return self._lockish("GET", name, 404)
def lock(self, name):
return self._lockish("POST", name, 409)
def unlock(self, name):
return self._lockish("DELETE", name, 404)
def _lockish(self, method, name, false_code):
# FIXME: does name need to escaped here?
path = "/lock/%s" % name
#
conn = httplib.HTTPConnection(self._host_port)
conn.request(method, path)
response = conn.getresponse()
status = response.status
response.read()
conn.close()
#
if status == 200:
return True
elif status == false_code:
return False
else:
msg = "Unexpected response: %s %s" % (status, response.reason)
raise Exception(msg)
|
c6e69e1cec4de30a2e56ec865c3043edc7bf39b5
|
civictechprojects/apps.py
|
civictechprojects/apps.py
|
from django.apps import AppConfig
class CivictechprojectsConfig(AppConfig):
name = 'civictechprojects'
def ready(self):
# Remove any tags that aren't in the canonical tag list
# TODO: Fix so this doesn't break in production database
# from .models import Project
# Project.remove_tags_not_in_list()
|
from django.apps import AppConfig
class CivictechprojectsConfig(AppConfig):
name = 'civictechprojects'
# def ready(self):
# Remove any tags that aren't in the canonical tag list
# TODO: Fix so this doesn't break in production database
# from .models import Project
# Project.remove_tags_not_in_list()
|
Comment out entire function to stop unexpected EOF effor
|
Comment out entire function to stop unexpected EOF effor
|
Python
|
mit
|
DemocracyLab/CivicTechExchange,DemocracyLab/CivicTechExchange,DemocracyLab/CivicTechExchange,DemocracyLab/CivicTechExchange
|
from django.apps import AppConfig
class CivictechprojectsConfig(AppConfig):
name = 'civictechprojects'
def ready(self):
# Remove any tags that aren't in the canonical tag list
# TODO: Fix so this doesn't break in production database
# from .models import Project
# Project.remove_tags_not_in_list()
Comment out entire function to stop unexpected EOF effor
|
from django.apps import AppConfig
class CivictechprojectsConfig(AppConfig):
name = 'civictechprojects'
# def ready(self):
# Remove any tags that aren't in the canonical tag list
# TODO: Fix so this doesn't break in production database
# from .models import Project
# Project.remove_tags_not_in_list()
|
<commit_before>from django.apps import AppConfig
class CivictechprojectsConfig(AppConfig):
name = 'civictechprojects'
def ready(self):
# Remove any tags that aren't in the canonical tag list
# TODO: Fix so this doesn't break in production database
# from .models import Project
# Project.remove_tags_not_in_list()
<commit_msg>Comment out entire function to stop unexpected EOF effor<commit_after>
|
from django.apps import AppConfig
class CivictechprojectsConfig(AppConfig):
name = 'civictechprojects'
# def ready(self):
# Remove any tags that aren't in the canonical tag list
# TODO: Fix so this doesn't break in production database
# from .models import Project
# Project.remove_tags_not_in_list()
|
from django.apps import AppConfig
class CivictechprojectsConfig(AppConfig):
name = 'civictechprojects'
def ready(self):
# Remove any tags that aren't in the canonical tag list
# TODO: Fix so this doesn't break in production database
# from .models import Project
# Project.remove_tags_not_in_list()
Comment out entire function to stop unexpected EOF efforfrom django.apps import AppConfig
class CivictechprojectsConfig(AppConfig):
name = 'civictechprojects'
# def ready(self):
# Remove any tags that aren't in the canonical tag list
# TODO: Fix so this doesn't break in production database
# from .models import Project
# Project.remove_tags_not_in_list()
|
<commit_before>from django.apps import AppConfig
class CivictechprojectsConfig(AppConfig):
name = 'civictechprojects'
def ready(self):
# Remove any tags that aren't in the canonical tag list
# TODO: Fix so this doesn't break in production database
# from .models import Project
# Project.remove_tags_not_in_list()
<commit_msg>Comment out entire function to stop unexpected EOF effor<commit_after>from django.apps import AppConfig
class CivictechprojectsConfig(AppConfig):
name = 'civictechprojects'
# def ready(self):
# Remove any tags that aren't in the canonical tag list
# TODO: Fix so this doesn't break in production database
# from .models import Project
# Project.remove_tags_not_in_list()
|
fbf6fe6d6e5e3b9e9ea192eba7ef6b76b66ebf0a
|
trade_client.py
|
trade_client.py
|
import json
import socket
from orderbook import create_confirm
def send_msg(ip, port, message):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((ip, port))
try:
sock.sendall(message)
response = sock.recv(1024)
print "Received: {}".format(response)
finally:
sock.close()
return response
def send_offer(ip, port, offer):
message = json.dumps(offer)
return send_msg(ip, port, message)
def handle_response(response):
response = json.loads(response)
if response:
if response['type'] == 'trade':
return handle_trade(response)
return "Nothing"
def handle_trade(trade):
id = trade['trade-id'].split(';')[0]
return create_confirm(
id = id,
trade_id = trade['trade-id']
)
|
import json
import socket
from orderbook import create_confirm
def send_msg(ip, port, message):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((ip, port))
try:
sock.sendall(message)
response = sock.recv(1024)
finally:
sock.close()
return response
def send_offer(ip, port, offer):
message = json.dumps(offer)
return send_msg(ip, port, message)
def handle_response(response):
try:
response = json.loads(response)
if response and isinstance(response, basestring):
return None
if response and response['type'] == 'trade':
return handle_trade(response)
except ValueError:
return None
def handle_trade(trade):
id = trade['trade-id'].split(';')[0]
return create_confirm(
id = id,
trade_id = trade['trade-id']
)
|
Handle incoming messages that aren't JSON objects.
|
Handle incoming messages that aren't JSON objects.
|
Python
|
mit
|
Tribler/decentral-market
|
import json
import socket
from orderbook import create_confirm
def send_msg(ip, port, message):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((ip, port))
try:
sock.sendall(message)
response = sock.recv(1024)
print "Received: {}".format(response)
finally:
sock.close()
return response
def send_offer(ip, port, offer):
message = json.dumps(offer)
return send_msg(ip, port, message)
def handle_response(response):
response = json.loads(response)
if response:
if response['type'] == 'trade':
return handle_trade(response)
return "Nothing"
def handle_trade(trade):
id = trade['trade-id'].split(';')[0]
return create_confirm(
id = id,
trade_id = trade['trade-id']
)
Handle incoming messages that aren't JSON objects.
|
import json
import socket
from orderbook import create_confirm
def send_msg(ip, port, message):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((ip, port))
try:
sock.sendall(message)
response = sock.recv(1024)
finally:
sock.close()
return response
def send_offer(ip, port, offer):
message = json.dumps(offer)
return send_msg(ip, port, message)
def handle_response(response):
try:
response = json.loads(response)
if response and isinstance(response, basestring):
return None
if response and response['type'] == 'trade':
return handle_trade(response)
except ValueError:
return None
def handle_trade(trade):
id = trade['trade-id'].split(';')[0]
return create_confirm(
id = id,
trade_id = trade['trade-id']
)
|
<commit_before>import json
import socket
from orderbook import create_confirm
def send_msg(ip, port, message):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((ip, port))
try:
sock.sendall(message)
response = sock.recv(1024)
print "Received: {}".format(response)
finally:
sock.close()
return response
def send_offer(ip, port, offer):
message = json.dumps(offer)
return send_msg(ip, port, message)
def handle_response(response):
response = json.loads(response)
if response:
if response['type'] == 'trade':
return handle_trade(response)
return "Nothing"
def handle_trade(trade):
id = trade['trade-id'].split(';')[0]
return create_confirm(
id = id,
trade_id = trade['trade-id']
)
<commit_msg>Handle incoming messages that aren't JSON objects.<commit_after>
|
import json
import socket
from orderbook import create_confirm
def send_msg(ip, port, message):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((ip, port))
try:
sock.sendall(message)
response = sock.recv(1024)
finally:
sock.close()
return response
def send_offer(ip, port, offer):
message = json.dumps(offer)
return send_msg(ip, port, message)
def handle_response(response):
try:
response = json.loads(response)
if response and isinstance(response, basestring):
return None
if response and response['type'] == 'trade':
return handle_trade(response)
except ValueError:
return None
def handle_trade(trade):
id = trade['trade-id'].split(';')[0]
return create_confirm(
id = id,
trade_id = trade['trade-id']
)
|
import json
import socket
from orderbook import create_confirm
def send_msg(ip, port, message):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((ip, port))
try:
sock.sendall(message)
response = sock.recv(1024)
print "Received: {}".format(response)
finally:
sock.close()
return response
def send_offer(ip, port, offer):
message = json.dumps(offer)
return send_msg(ip, port, message)
def handle_response(response):
response = json.loads(response)
if response:
if response['type'] == 'trade':
return handle_trade(response)
return "Nothing"
def handle_trade(trade):
id = trade['trade-id'].split(';')[0]
return create_confirm(
id = id,
trade_id = trade['trade-id']
)
Handle incoming messages that aren't JSON objects.import json
import socket
from orderbook import create_confirm
def send_msg(ip, port, message):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((ip, port))
try:
sock.sendall(message)
response = sock.recv(1024)
finally:
sock.close()
return response
def send_offer(ip, port, offer):
message = json.dumps(offer)
return send_msg(ip, port, message)
def handle_response(response):
try:
response = json.loads(response)
if response and isinstance(response, basestring):
return None
if response and response['type'] == 'trade':
return handle_trade(response)
except ValueError:
return None
def handle_trade(trade):
id = trade['trade-id'].split(';')[0]
return create_confirm(
id = id,
trade_id = trade['trade-id']
)
|
<commit_before>import json
import socket
from orderbook import create_confirm
def send_msg(ip, port, message):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((ip, port))
try:
sock.sendall(message)
response = sock.recv(1024)
print "Received: {}".format(response)
finally:
sock.close()
return response
def send_offer(ip, port, offer):
message = json.dumps(offer)
return send_msg(ip, port, message)
def handle_response(response):
response = json.loads(response)
if response:
if response['type'] == 'trade':
return handle_trade(response)
return "Nothing"
def handle_trade(trade):
id = trade['trade-id'].split(';')[0]
return create_confirm(
id = id,
trade_id = trade['trade-id']
)
<commit_msg>Handle incoming messages that aren't JSON objects.<commit_after>import json
import socket
from orderbook import create_confirm
def send_msg(ip, port, message):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((ip, port))
try:
sock.sendall(message)
response = sock.recv(1024)
finally:
sock.close()
return response
def send_offer(ip, port, offer):
message = json.dumps(offer)
return send_msg(ip, port, message)
def handle_response(response):
try:
response = json.loads(response)
if response and isinstance(response, basestring):
return None
if response and response['type'] == 'trade':
return handle_trade(response)
except ValueError:
return None
def handle_trade(trade):
id = trade['trade-id'].split(';')[0]
return create_confirm(
id = id,
trade_id = trade['trade-id']
)
|
07de3a41daf1a82cb45e63472212821b9f6c809f
|
panoptes/state_machine/states/core.py
|
panoptes/state_machine/states/core.py
|
import time
import transitions
from panoptes.utils.logger import has_logger
@has_logger
class PanState(transitions.State):
""" Base class for PANOPTES transitions """
def __init__(self, *args, **kwargs):
name = kwargs.get('name', self.__class__)
self.panoptes = kwargs.get('panoptes', None)
super().__init__(name=name, on_enter=['execute'])
self._sleep_delay = 3 # seconds
def main(self):
assert self.panoptes is not None
msg = "Must implement `main` method inside class {}. Exiting".format(self.name)
self.panoptes.logger.warning(msg)
return 'exit'
def sleep(self, seconds=None):
""" sleep for `seconds` or `_sleep_delay` seconds """
assert self.panoptes is not None
if seconds is None:
seconds = self._sleep_delay
self.panoptes.logger.debug("Sleeping {} for {} seconds".format(self.name, seconds))
time.sleep(seconds)
|
import time
import transitions
from panoptes.utils.logger import has_logger
@has_logger
class PanState(transitions.State):
""" Base class for PANOPTES transitions """
def __init__(self, *args, **kwargs):
name = kwargs.get('name', self.__class__)
self.panoptes = kwargs.get('panoptes', None)
super().__init__(name=name, on_enter=['execute'])
self._sleep_delay = 3 # seconds
def main(self):
assert self.panoptes is not None
msg = "Must implement `main` method inside class {}. Exiting".format(self.name)
self.panoptes.logger.warning(msg)
return 'exit'
def sleep(self, seconds=None):
""" sleep for `seconds` or `_sleep_delay` seconds
This puts the state into a loop that is responsive to outside messages.
Args:
seconds(float): Seconds to sleep for, defaults to `_sleep_delay`.
"""
assert self.panoptes is not None
if seconds is None:
seconds = self._sleep_delay
self.panoptes.logger.debug("Sleeping {} for {} seconds".format(self.name, seconds))
if seconds > 10:
step_time = seconds / 4
while seconds:
seconds = seconds - step_time
# NOTE: DO SOMETHING RESPONSIVE HERE
time.sleep(step_time)
else:
time.sleep(seconds)
|
Add loop to sleep structur
|
Add loop to sleep structur
|
Python
|
mit
|
joshwalawender/POCS,panoptes/POCS,AstroHuntsman/POCS,AstroHuntsman/POCS,AstroHuntsman/POCS,AstroHuntsman/POCS,panoptes/POCS,panoptes/POCS,joshwalawender/POCS,panoptes/POCS,joshwalawender/POCS
|
import time
import transitions
from panoptes.utils.logger import has_logger
@has_logger
class PanState(transitions.State):
""" Base class for PANOPTES transitions """
def __init__(self, *args, **kwargs):
name = kwargs.get('name', self.__class__)
self.panoptes = kwargs.get('panoptes', None)
super().__init__(name=name, on_enter=['execute'])
self._sleep_delay = 3 # seconds
def main(self):
assert self.panoptes is not None
msg = "Must implement `main` method inside class {}. Exiting".format(self.name)
self.panoptes.logger.warning(msg)
return 'exit'
def sleep(self, seconds=None):
""" sleep for `seconds` or `_sleep_delay` seconds """
assert self.panoptes is not None
if seconds is None:
seconds = self._sleep_delay
self.panoptes.logger.debug("Sleeping {} for {} seconds".format(self.name, seconds))
time.sleep(seconds)
Add loop to sleep structur
|
import time
import transitions
from panoptes.utils.logger import has_logger
@has_logger
class PanState(transitions.State):
""" Base class for PANOPTES transitions """
def __init__(self, *args, **kwargs):
name = kwargs.get('name', self.__class__)
self.panoptes = kwargs.get('panoptes', None)
super().__init__(name=name, on_enter=['execute'])
self._sleep_delay = 3 # seconds
def main(self):
assert self.panoptes is not None
msg = "Must implement `main` method inside class {}. Exiting".format(self.name)
self.panoptes.logger.warning(msg)
return 'exit'
def sleep(self, seconds=None):
""" sleep for `seconds` or `_sleep_delay` seconds
This puts the state into a loop that is responsive to outside messages.
Args:
seconds(float): Seconds to sleep for, defaults to `_sleep_delay`.
"""
assert self.panoptes is not None
if seconds is None:
seconds = self._sleep_delay
self.panoptes.logger.debug("Sleeping {} for {} seconds".format(self.name, seconds))
if seconds > 10:
step_time = seconds / 4
while seconds:
seconds = seconds - step_time
# NOTE: DO SOMETHING RESPONSIVE HERE
time.sleep(step_time)
else:
time.sleep(seconds)
|
<commit_before>import time
import transitions
from panoptes.utils.logger import has_logger
@has_logger
class PanState(transitions.State):
""" Base class for PANOPTES transitions """
def __init__(self, *args, **kwargs):
name = kwargs.get('name', self.__class__)
self.panoptes = kwargs.get('panoptes', None)
super().__init__(name=name, on_enter=['execute'])
self._sleep_delay = 3 # seconds
def main(self):
assert self.panoptes is not None
msg = "Must implement `main` method inside class {}. Exiting".format(self.name)
self.panoptes.logger.warning(msg)
return 'exit'
def sleep(self, seconds=None):
""" sleep for `seconds` or `_sleep_delay` seconds """
assert self.panoptes is not None
if seconds is None:
seconds = self._sleep_delay
self.panoptes.logger.debug("Sleeping {} for {} seconds".format(self.name, seconds))
time.sleep(seconds)
<commit_msg>Add loop to sleep structur<commit_after>
|
import time
import transitions
from panoptes.utils.logger import has_logger
@has_logger
class PanState(transitions.State):
""" Base class for PANOPTES transitions """
def __init__(self, *args, **kwargs):
name = kwargs.get('name', self.__class__)
self.panoptes = kwargs.get('panoptes', None)
super().__init__(name=name, on_enter=['execute'])
self._sleep_delay = 3 # seconds
def main(self):
assert self.panoptes is not None
msg = "Must implement `main` method inside class {}. Exiting".format(self.name)
self.panoptes.logger.warning(msg)
return 'exit'
def sleep(self, seconds=None):
""" sleep for `seconds` or `_sleep_delay` seconds
This puts the state into a loop that is responsive to outside messages.
Args:
seconds(float): Seconds to sleep for, defaults to `_sleep_delay`.
"""
assert self.panoptes is not None
if seconds is None:
seconds = self._sleep_delay
self.panoptes.logger.debug("Sleeping {} for {} seconds".format(self.name, seconds))
if seconds > 10:
step_time = seconds / 4
while seconds:
seconds = seconds - step_time
# NOTE: DO SOMETHING RESPONSIVE HERE
time.sleep(step_time)
else:
time.sleep(seconds)
|
import time
import transitions
from panoptes.utils.logger import has_logger
@has_logger
class PanState(transitions.State):
""" Base class for PANOPTES transitions """
def __init__(self, *args, **kwargs):
name = kwargs.get('name', self.__class__)
self.panoptes = kwargs.get('panoptes', None)
super().__init__(name=name, on_enter=['execute'])
self._sleep_delay = 3 # seconds
def main(self):
assert self.panoptes is not None
msg = "Must implement `main` method inside class {}. Exiting".format(self.name)
self.panoptes.logger.warning(msg)
return 'exit'
def sleep(self, seconds=None):
""" sleep for `seconds` or `_sleep_delay` seconds """
assert self.panoptes is not None
if seconds is None:
seconds = self._sleep_delay
self.panoptes.logger.debug("Sleeping {} for {} seconds".format(self.name, seconds))
time.sleep(seconds)
Add loop to sleep structurimport time
import transitions
from panoptes.utils.logger import has_logger
@has_logger
class PanState(transitions.State):
""" Base class for PANOPTES transitions """
def __init__(self, *args, **kwargs):
name = kwargs.get('name', self.__class__)
self.panoptes = kwargs.get('panoptes', None)
super().__init__(name=name, on_enter=['execute'])
self._sleep_delay = 3 # seconds
def main(self):
assert self.panoptes is not None
msg = "Must implement `main` method inside class {}. Exiting".format(self.name)
self.panoptes.logger.warning(msg)
return 'exit'
def sleep(self, seconds=None):
""" sleep for `seconds` or `_sleep_delay` seconds
This puts the state into a loop that is responsive to outside messages.
Args:
seconds(float): Seconds to sleep for, defaults to `_sleep_delay`.
"""
assert self.panoptes is not None
if seconds is None:
seconds = self._sleep_delay
self.panoptes.logger.debug("Sleeping {} for {} seconds".format(self.name, seconds))
if seconds > 10:
step_time = seconds / 4
while seconds:
seconds = seconds - step_time
# NOTE: DO SOMETHING RESPONSIVE HERE
time.sleep(step_time)
else:
time.sleep(seconds)
|
<commit_before>import time
import transitions
from panoptes.utils.logger import has_logger
@has_logger
class PanState(transitions.State):
""" Base class for PANOPTES transitions """
def __init__(self, *args, **kwargs):
name = kwargs.get('name', self.__class__)
self.panoptes = kwargs.get('panoptes', None)
super().__init__(name=name, on_enter=['execute'])
self._sleep_delay = 3 # seconds
def main(self):
assert self.panoptes is not None
msg = "Must implement `main` method inside class {}. Exiting".format(self.name)
self.panoptes.logger.warning(msg)
return 'exit'
def sleep(self, seconds=None):
""" sleep for `seconds` or `_sleep_delay` seconds """
assert self.panoptes is not None
if seconds is None:
seconds = self._sleep_delay
self.panoptes.logger.debug("Sleeping {} for {} seconds".format(self.name, seconds))
time.sleep(seconds)
<commit_msg>Add loop to sleep structur<commit_after>import time
import transitions
from panoptes.utils.logger import has_logger
@has_logger
class PanState(transitions.State):
""" Base class for PANOPTES transitions """
def __init__(self, *args, **kwargs):
name = kwargs.get('name', self.__class__)
self.panoptes = kwargs.get('panoptes', None)
super().__init__(name=name, on_enter=['execute'])
self._sleep_delay = 3 # seconds
def main(self):
assert self.panoptes is not None
msg = "Must implement `main` method inside class {}. Exiting".format(self.name)
self.panoptes.logger.warning(msg)
return 'exit'
def sleep(self, seconds=None):
""" sleep for `seconds` or `_sleep_delay` seconds
This puts the state into a loop that is responsive to outside messages.
Args:
seconds(float): Seconds to sleep for, defaults to `_sleep_delay`.
"""
assert self.panoptes is not None
if seconds is None:
seconds = self._sleep_delay
self.panoptes.logger.debug("Sleeping {} for {} seconds".format(self.name, seconds))
if seconds > 10:
step_time = seconds / 4
while seconds:
seconds = seconds - step_time
# NOTE: DO SOMETHING RESPONSIVE HERE
time.sleep(step_time)
else:
time.sleep(seconds)
|
a8179d06885d72e49335263ef2633f3128a20ee7
|
tests/logging.py
|
tests/logging.py
|
import os
from datetime import datetime
def save_logs(groomer, test_description):
divider = ('=' * 10 + '{}' + '=' * 10 + '\n')
test_log_path = 'tests/test_logs/{}.log'.format(test_description)
with open(test_log_path, 'w+') as test_log:
test_log.write(divider.format('TEST LOG'))
test_log.write(str(datetime.now().time()) + '\n')
test_log.write(test_description + '\n')
test_log.write('-' * 20 + '\n')
with open(groomer.logger.log_path, 'r') as logfile:
log = logfile.read()
test_log.write(log)
if os.path.exists(groomer.logger.log_debug_err):
test_log.write(divider.format('ERR LOG'))
with open(groomer.logger.log_debug_err, 'r') as debug_err:
err = debug_err.read()
test_log.write(err)
if os.path.exists(groomer.logger.log_debug_out):
test_log.write(divider.format('OUT LOG'))
with open(groomer.logger.log_debug_out, 'r') as debug_out:
out = debug_out.read()
test_log.write(out)
|
import os
from datetime import datetime
def save_logs(groomer, test_description):
divider = ('=' * 10 + '{}' + '=' * 10 + '\n')
test_log_path = 'tests/test_logs/{}.log'.format(test_description)
with open(test_log_path, 'w+') as test_log:
test_log.write(divider.format('TEST LOG'))
test_log.write(str(datetime.now().time()) + '\n')
test_log.write(test_description + '\n')
test_log.write('-' * 20 + '\n')
with open(groomer.logger.log_path, 'rb') as logfile:
log = logfile.read()
test_log.write(log)
if os.path.exists(groomer.logger.log_debug_err):
test_log.write(divider.format('ERR LOG'))
with open(groomer.logger.log_debug_err, 'rb') as debug_err:
err = debug_err.read()
test_log.write(err)
if os.path.exists(groomer.logger.log_debug_out):
test_log.write(divider.format('OUT LOG'))
with open(groomer.logger.log_debug_out, 'rb') as debug_out:
out = debug_out.read()
test_log.write(out)
|
Read test logs in bytes mode
|
Read test logs in bytes mode
|
Python
|
bsd-3-clause
|
CIRCL/PyCIRCLean,Rafiot/PyCIRCLean,CIRCL/PyCIRCLean,Rafiot/PyCIRCLean
|
import os
from datetime import datetime
def save_logs(groomer, test_description):
divider = ('=' * 10 + '{}' + '=' * 10 + '\n')
test_log_path = 'tests/test_logs/{}.log'.format(test_description)
with open(test_log_path, 'w+') as test_log:
test_log.write(divider.format('TEST LOG'))
test_log.write(str(datetime.now().time()) + '\n')
test_log.write(test_description + '\n')
test_log.write('-' * 20 + '\n')
with open(groomer.logger.log_path, 'r') as logfile:
log = logfile.read()
test_log.write(log)
if os.path.exists(groomer.logger.log_debug_err):
test_log.write(divider.format('ERR LOG'))
with open(groomer.logger.log_debug_err, 'r') as debug_err:
err = debug_err.read()
test_log.write(err)
if os.path.exists(groomer.logger.log_debug_out):
test_log.write(divider.format('OUT LOG'))
with open(groomer.logger.log_debug_out, 'r') as debug_out:
out = debug_out.read()
test_log.write(out)
Read test logs in bytes mode
|
import os
from datetime import datetime
def save_logs(groomer, test_description):
divider = ('=' * 10 + '{}' + '=' * 10 + '\n')
test_log_path = 'tests/test_logs/{}.log'.format(test_description)
with open(test_log_path, 'w+') as test_log:
test_log.write(divider.format('TEST LOG'))
test_log.write(str(datetime.now().time()) + '\n')
test_log.write(test_description + '\n')
test_log.write('-' * 20 + '\n')
with open(groomer.logger.log_path, 'rb') as logfile:
log = logfile.read()
test_log.write(log)
if os.path.exists(groomer.logger.log_debug_err):
test_log.write(divider.format('ERR LOG'))
with open(groomer.logger.log_debug_err, 'rb') as debug_err:
err = debug_err.read()
test_log.write(err)
if os.path.exists(groomer.logger.log_debug_out):
test_log.write(divider.format('OUT LOG'))
with open(groomer.logger.log_debug_out, 'rb') as debug_out:
out = debug_out.read()
test_log.write(out)
|
<commit_before>import os
from datetime import datetime
def save_logs(groomer, test_description):
divider = ('=' * 10 + '{}' + '=' * 10 + '\n')
test_log_path = 'tests/test_logs/{}.log'.format(test_description)
with open(test_log_path, 'w+') as test_log:
test_log.write(divider.format('TEST LOG'))
test_log.write(str(datetime.now().time()) + '\n')
test_log.write(test_description + '\n')
test_log.write('-' * 20 + '\n')
with open(groomer.logger.log_path, 'r') as logfile:
log = logfile.read()
test_log.write(log)
if os.path.exists(groomer.logger.log_debug_err):
test_log.write(divider.format('ERR LOG'))
with open(groomer.logger.log_debug_err, 'r') as debug_err:
err = debug_err.read()
test_log.write(err)
if os.path.exists(groomer.logger.log_debug_out):
test_log.write(divider.format('OUT LOG'))
with open(groomer.logger.log_debug_out, 'r') as debug_out:
out = debug_out.read()
test_log.write(out)
<commit_msg>Read test logs in bytes mode<commit_after>
|
import os
from datetime import datetime
def save_logs(groomer, test_description):
divider = ('=' * 10 + '{}' + '=' * 10 + '\n')
test_log_path = 'tests/test_logs/{}.log'.format(test_description)
with open(test_log_path, 'w+') as test_log:
test_log.write(divider.format('TEST LOG'))
test_log.write(str(datetime.now().time()) + '\n')
test_log.write(test_description + '\n')
test_log.write('-' * 20 + '\n')
with open(groomer.logger.log_path, 'rb') as logfile:
log = logfile.read()
test_log.write(log)
if os.path.exists(groomer.logger.log_debug_err):
test_log.write(divider.format('ERR LOG'))
with open(groomer.logger.log_debug_err, 'rb') as debug_err:
err = debug_err.read()
test_log.write(err)
if os.path.exists(groomer.logger.log_debug_out):
test_log.write(divider.format('OUT LOG'))
with open(groomer.logger.log_debug_out, 'rb') as debug_out:
out = debug_out.read()
test_log.write(out)
|
import os
from datetime import datetime
def save_logs(groomer, test_description):
divider = ('=' * 10 + '{}' + '=' * 10 + '\n')
test_log_path = 'tests/test_logs/{}.log'.format(test_description)
with open(test_log_path, 'w+') as test_log:
test_log.write(divider.format('TEST LOG'))
test_log.write(str(datetime.now().time()) + '\n')
test_log.write(test_description + '\n')
test_log.write('-' * 20 + '\n')
with open(groomer.logger.log_path, 'r') as logfile:
log = logfile.read()
test_log.write(log)
if os.path.exists(groomer.logger.log_debug_err):
test_log.write(divider.format('ERR LOG'))
with open(groomer.logger.log_debug_err, 'r') as debug_err:
err = debug_err.read()
test_log.write(err)
if os.path.exists(groomer.logger.log_debug_out):
test_log.write(divider.format('OUT LOG'))
with open(groomer.logger.log_debug_out, 'r') as debug_out:
out = debug_out.read()
test_log.write(out)
Read test logs in bytes modeimport os
from datetime import datetime
def save_logs(groomer, test_description):
divider = ('=' * 10 + '{}' + '=' * 10 + '\n')
test_log_path = 'tests/test_logs/{}.log'.format(test_description)
with open(test_log_path, 'w+') as test_log:
test_log.write(divider.format('TEST LOG'))
test_log.write(str(datetime.now().time()) + '\n')
test_log.write(test_description + '\n')
test_log.write('-' * 20 + '\n')
with open(groomer.logger.log_path, 'rb') as logfile:
log = logfile.read()
test_log.write(log)
if os.path.exists(groomer.logger.log_debug_err):
test_log.write(divider.format('ERR LOG'))
with open(groomer.logger.log_debug_err, 'rb') as debug_err:
err = debug_err.read()
test_log.write(err)
if os.path.exists(groomer.logger.log_debug_out):
test_log.write(divider.format('OUT LOG'))
with open(groomer.logger.log_debug_out, 'rb') as debug_out:
out = debug_out.read()
test_log.write(out)
|
<commit_before>import os
from datetime import datetime
def save_logs(groomer, test_description):
divider = ('=' * 10 + '{}' + '=' * 10 + '\n')
test_log_path = 'tests/test_logs/{}.log'.format(test_description)
with open(test_log_path, 'w+') as test_log:
test_log.write(divider.format('TEST LOG'))
test_log.write(str(datetime.now().time()) + '\n')
test_log.write(test_description + '\n')
test_log.write('-' * 20 + '\n')
with open(groomer.logger.log_path, 'r') as logfile:
log = logfile.read()
test_log.write(log)
if os.path.exists(groomer.logger.log_debug_err):
test_log.write(divider.format('ERR LOG'))
with open(groomer.logger.log_debug_err, 'r') as debug_err:
err = debug_err.read()
test_log.write(err)
if os.path.exists(groomer.logger.log_debug_out):
test_log.write(divider.format('OUT LOG'))
with open(groomer.logger.log_debug_out, 'r') as debug_out:
out = debug_out.read()
test_log.write(out)
<commit_msg>Read test logs in bytes mode<commit_after>import os
from datetime import datetime
def save_logs(groomer, test_description):
divider = ('=' * 10 + '{}' + '=' * 10 + '\n')
test_log_path = 'tests/test_logs/{}.log'.format(test_description)
with open(test_log_path, 'w+') as test_log:
test_log.write(divider.format('TEST LOG'))
test_log.write(str(datetime.now().time()) + '\n')
test_log.write(test_description + '\n')
test_log.write('-' * 20 + '\n')
with open(groomer.logger.log_path, 'rb') as logfile:
log = logfile.read()
test_log.write(log)
if os.path.exists(groomer.logger.log_debug_err):
test_log.write(divider.format('ERR LOG'))
with open(groomer.logger.log_debug_err, 'rb') as debug_err:
err = debug_err.read()
test_log.write(err)
if os.path.exists(groomer.logger.log_debug_out):
test_log.write(divider.format('OUT LOG'))
with open(groomer.logger.log_debug_out, 'rb') as debug_out:
out = debug_out.read()
test_log.write(out)
|
523df3d68af5c8fbd52e0f86a7201d9c39f50f54
|
lib/writeFiles.py
|
lib/writeFiles.py
|
import json
import sys
import os
f = open(sys.argv[1])
out = sys.argv[2]
data = json.load(f)
for key in data["files"]:
val = data["files"][key]
try:
os.makedirs(out + "/" + os.path.dirname(key))
except:
print "ignoring error"
if type(val) == str or type(val) == unicode:
print "Symlinking " + out + "/" + key
os.symlink(val, out + "/" + key)
if type(val) == dict:
print "Creating file " + out + "/" + key
o = open(out + "/" + key, "w")
o.write(val["content"])
|
import json
import sys
import os
f = open(sys.argv[1])
out = sys.argv[2]
data = json.load(f)
for key in data["files"]:
val = data["files"][key]
try:
os.makedirs(out + "/" + os.path.dirname(key))
except:
print "ignoring error"
if type(val) == str or type(val) == unicode:
print "Symlinking " + out + "/" + key
os.symlink(val, out + "/" + key)
if type(val) == dict:
print "Creating file " + out + "/" + key
with open(out + "/" + key, "w") as f:
f.write(val["content"].encode('UTF-8'))
|
Fix errors when contents of a file contains UTF-8 chars.
|
Fix errors when contents of a file contains UTF-8 chars.
- Convert tabs to spaces.
- Use a more pythonic way to open (and write into) the file.
- Add UTF-8 encoding before writing the contents of the file.
|
Python
|
mit
|
sheenobu/nix-home,sheenobu/nix-home
|
import json
import sys
import os
f = open(sys.argv[1])
out = sys.argv[2]
data = json.load(f)
for key in data["files"]:
val = data["files"][key]
try:
os.makedirs(out + "/" + os.path.dirname(key))
except:
print "ignoring error"
if type(val) == str or type(val) == unicode:
print "Symlinking " + out + "/" + key
os.symlink(val, out + "/" + key)
if type(val) == dict:
print "Creating file " + out + "/" + key
o = open(out + "/" + key, "w")
o.write(val["content"])
Fix errors when contents of a file contains UTF-8 chars.
- Convert tabs to spaces.
- Use a more pythonic way to open (and write into) the file.
- Add UTF-8 encoding before writing the contents of the file.
|
import json
import sys
import os
f = open(sys.argv[1])
out = sys.argv[2]
data = json.load(f)
for key in data["files"]:
val = data["files"][key]
try:
os.makedirs(out + "/" + os.path.dirname(key))
except:
print "ignoring error"
if type(val) == str or type(val) == unicode:
print "Symlinking " + out + "/" + key
os.symlink(val, out + "/" + key)
if type(val) == dict:
print "Creating file " + out + "/" + key
with open(out + "/" + key, "w") as f:
f.write(val["content"].encode('UTF-8'))
|
<commit_before>import json
import sys
import os
f = open(sys.argv[1])
out = sys.argv[2]
data = json.load(f)
for key in data["files"]:
val = data["files"][key]
try:
os.makedirs(out + "/" + os.path.dirname(key))
except:
print "ignoring error"
if type(val) == str or type(val) == unicode:
print "Symlinking " + out + "/" + key
os.symlink(val, out + "/" + key)
if type(val) == dict:
print "Creating file " + out + "/" + key
o = open(out + "/" + key, "w")
o.write(val["content"])
<commit_msg>Fix errors when contents of a file contains UTF-8 chars.
- Convert tabs to spaces.
- Use a more pythonic way to open (and write into) the file.
- Add UTF-8 encoding before writing the contents of the file.<commit_after>
|
import json
import sys
import os
f = open(sys.argv[1])
out = sys.argv[2]
data = json.load(f)
for key in data["files"]:
val = data["files"][key]
try:
os.makedirs(out + "/" + os.path.dirname(key))
except:
print "ignoring error"
if type(val) == str or type(val) == unicode:
print "Symlinking " + out + "/" + key
os.symlink(val, out + "/" + key)
if type(val) == dict:
print "Creating file " + out + "/" + key
with open(out + "/" + key, "w") as f:
f.write(val["content"].encode('UTF-8'))
|
import json
import sys
import os
f = open(sys.argv[1])
out = sys.argv[2]
data = json.load(f)
for key in data["files"]:
val = data["files"][key]
try:
os.makedirs(out + "/" + os.path.dirname(key))
except:
print "ignoring error"
if type(val) == str or type(val) == unicode:
print "Symlinking " + out + "/" + key
os.symlink(val, out + "/" + key)
if type(val) == dict:
print "Creating file " + out + "/" + key
o = open(out + "/" + key, "w")
o.write(val["content"])
Fix errors when contents of a file contains UTF-8 chars.
- Convert tabs to spaces.
- Use a more pythonic way to open (and write into) the file.
- Add UTF-8 encoding before writing the contents of the file.import json
import sys
import os
f = open(sys.argv[1])
out = sys.argv[2]
data = json.load(f)
for key in data["files"]:
val = data["files"][key]
try:
os.makedirs(out + "/" + os.path.dirname(key))
except:
print "ignoring error"
if type(val) == str or type(val) == unicode:
print "Symlinking " + out + "/" + key
os.symlink(val, out + "/" + key)
if type(val) == dict:
print "Creating file " + out + "/" + key
with open(out + "/" + key, "w") as f:
f.write(val["content"].encode('UTF-8'))
|
<commit_before>import json
import sys
import os
f = open(sys.argv[1])
out = sys.argv[2]
data = json.load(f)
for key in data["files"]:
val = data["files"][key]
try:
os.makedirs(out + "/" + os.path.dirname(key))
except:
print "ignoring error"
if type(val) == str or type(val) == unicode:
print "Symlinking " + out + "/" + key
os.symlink(val, out + "/" + key)
if type(val) == dict:
print "Creating file " + out + "/" + key
o = open(out + "/" + key, "w")
o.write(val["content"])
<commit_msg>Fix errors when contents of a file contains UTF-8 chars.
- Convert tabs to spaces.
- Use a more pythonic way to open (and write into) the file.
- Add UTF-8 encoding before writing the contents of the file.<commit_after>import json
import sys
import os
f = open(sys.argv[1])
out = sys.argv[2]
data = json.load(f)
for key in data["files"]:
val = data["files"][key]
try:
os.makedirs(out + "/" + os.path.dirname(key))
except:
print "ignoring error"
if type(val) == str or type(val) == unicode:
print "Symlinking " + out + "/" + key
os.symlink(val, out + "/" + key)
if type(val) == dict:
print "Creating file " + out + "/" + key
with open(out + "/" + key, "w") as f:
f.write(val["content"].encode('UTF-8'))
|
da2376744ec5b1823ea75f3cefbb0de0ac000c1b
|
tests/secrets.py
|
tests/secrets.py
|
# -*- coding: utf-8 -*-
import os
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
OPS_KEY = os.environ['OPS_KEY']
OPS_SECRET = os.environ['OPS_SECRET']
|
# -*- coding: utf-8 -*-
import os
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
OPS_KEY = os.environ['OPS_KEY']
OPS_SECRET = os.environ['OPS_SECRET']
TWITTER_CONSUMER_ACCESS = os.environ['TWITTER_CONSUMER_ACCESS']
TWITTER_CONSUMER_SECRET = os.environ['TWITTER_CONSUMER_SECRET']
TWITTER_ACCESS = os.environ['TWITTER_ACCESS']
TWITTER_SECRET = os.environ['TWITTER_SECRET']
|
Read twitter tokens from .env
|
Read twitter tokens from .env
|
Python
|
mit
|
nestauk/inet
|
# -*- coding: utf-8 -*-
import os
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
OPS_KEY = os.environ['OPS_KEY']
OPS_SECRET = os.environ['OPS_SECRET']
Read twitter tokens from .env
|
# -*- coding: utf-8 -*-
import os
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
OPS_KEY = os.environ['OPS_KEY']
OPS_SECRET = os.environ['OPS_SECRET']
TWITTER_CONSUMER_ACCESS = os.environ['TWITTER_CONSUMER_ACCESS']
TWITTER_CONSUMER_SECRET = os.environ['TWITTER_CONSUMER_SECRET']
TWITTER_ACCESS = os.environ['TWITTER_ACCESS']
TWITTER_SECRET = os.environ['TWITTER_SECRET']
|
<commit_before># -*- coding: utf-8 -*-
import os
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
OPS_KEY = os.environ['OPS_KEY']
OPS_SECRET = os.environ['OPS_SECRET']
<commit_msg>Read twitter tokens from .env<commit_after>
|
# -*- coding: utf-8 -*-
import os
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
OPS_KEY = os.environ['OPS_KEY']
OPS_SECRET = os.environ['OPS_SECRET']
TWITTER_CONSUMER_ACCESS = os.environ['TWITTER_CONSUMER_ACCESS']
TWITTER_CONSUMER_SECRET = os.environ['TWITTER_CONSUMER_SECRET']
TWITTER_ACCESS = os.environ['TWITTER_ACCESS']
TWITTER_SECRET = os.environ['TWITTER_SECRET']
|
# -*- coding: utf-8 -*-
import os
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
OPS_KEY = os.environ['OPS_KEY']
OPS_SECRET = os.environ['OPS_SECRET']
Read twitter tokens from .env# -*- coding: utf-8 -*-
import os
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
OPS_KEY = os.environ['OPS_KEY']
OPS_SECRET = os.environ['OPS_SECRET']
TWITTER_CONSUMER_ACCESS = os.environ['TWITTER_CONSUMER_ACCESS']
TWITTER_CONSUMER_SECRET = os.environ['TWITTER_CONSUMER_SECRET']
TWITTER_ACCESS = os.environ['TWITTER_ACCESS']
TWITTER_SECRET = os.environ['TWITTER_SECRET']
|
<commit_before># -*- coding: utf-8 -*-
import os
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
OPS_KEY = os.environ['OPS_KEY']
OPS_SECRET = os.environ['OPS_SECRET']
<commit_msg>Read twitter tokens from .env<commit_after># -*- coding: utf-8 -*-
import os
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
OPS_KEY = os.environ['OPS_KEY']
OPS_SECRET = os.environ['OPS_SECRET']
TWITTER_CONSUMER_ACCESS = os.environ['TWITTER_CONSUMER_ACCESS']
TWITTER_CONSUMER_SECRET = os.environ['TWITTER_CONSUMER_SECRET']
TWITTER_ACCESS = os.environ['TWITTER_ACCESS']
TWITTER_SECRET = os.environ['TWITTER_SECRET']
|
53be5c9c86d544567f8171baba58128b5ad0502a
|
tests/test_io.py
|
tests/test_io.py
|
import nonstdlib
def test_capture_output():
import sys
with nonstdlib.capture_output() as output:
print('std', end='', file=sys.stdout)
print('st', end='', file=sys.stderr)
print('out', file=sys.stdout)
print('derr', file=sys.stderr)
assert 'stdout' in output
assert 'stderr' in output
assert output.stdout == 'stdout\n'
assert output.stderr == 'stderr\n'
def test_muffle():
with nonstdlib.muffle():
print("""\
This test doesn't really test anything, it just makes sure the
muffle function returns without raising any exceptions. You shouldn't ever see
this message.""")
|
#!/usr/bin/env python
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import nonstdlib
def test_capture_output():
import sys
with nonstdlib.capture_output() as output:
print('std', end='', file=sys.stdout)
print('st', end='', file=sys.stderr)
print('out', file=sys.stdout)
print('derr', file=sys.stderr)
assert 'stdout' in output
assert 'stderr' in output
assert output.stdout == 'stdout\n'
assert output.stderr == 'stderr\n'
def test_muffle():
with nonstdlib.muffle():
print("""\
This test doesn't really test anything, it just makes sure the
muffle function returns without raising any exceptions. You shouldn't ever see
this message.""")
|
Make the tests compatible with python2.
|
Make the tests compatible with python2.
|
Python
|
mit
|
kalekundert/nonstdlib,KenKundert/nonstdlib,KenKundert/nonstdlib,kalekundert/nonstdlib
|
import nonstdlib
def test_capture_output():
import sys
with nonstdlib.capture_output() as output:
print('std', end='', file=sys.stdout)
print('st', end='', file=sys.stderr)
print('out', file=sys.stdout)
print('derr', file=sys.stderr)
assert 'stdout' in output
assert 'stderr' in output
assert output.stdout == 'stdout\n'
assert output.stderr == 'stderr\n'
def test_muffle():
with nonstdlib.muffle():
print("""\
This test doesn't really test anything, it just makes sure the
muffle function returns without raising any exceptions. You shouldn't ever see
this message.""")
Make the tests compatible with python2.
|
#!/usr/bin/env python
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import nonstdlib
def test_capture_output():
import sys
with nonstdlib.capture_output() as output:
print('std', end='', file=sys.stdout)
print('st', end='', file=sys.stderr)
print('out', file=sys.stdout)
print('derr', file=sys.stderr)
assert 'stdout' in output
assert 'stderr' in output
assert output.stdout == 'stdout\n'
assert output.stderr == 'stderr\n'
def test_muffle():
with nonstdlib.muffle():
print("""\
This test doesn't really test anything, it just makes sure the
muffle function returns without raising any exceptions. You shouldn't ever see
this message.""")
|
<commit_before>import nonstdlib
def test_capture_output():
import sys
with nonstdlib.capture_output() as output:
print('std', end='', file=sys.stdout)
print('st', end='', file=sys.stderr)
print('out', file=sys.stdout)
print('derr', file=sys.stderr)
assert 'stdout' in output
assert 'stderr' in output
assert output.stdout == 'stdout\n'
assert output.stderr == 'stderr\n'
def test_muffle():
with nonstdlib.muffle():
print("""\
This test doesn't really test anything, it just makes sure the
muffle function returns without raising any exceptions. You shouldn't ever see
this message.""")
<commit_msg>Make the tests compatible with python2.<commit_after>
|
#!/usr/bin/env python
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import nonstdlib
def test_capture_output():
import sys
with nonstdlib.capture_output() as output:
print('std', end='', file=sys.stdout)
print('st', end='', file=sys.stderr)
print('out', file=sys.stdout)
print('derr', file=sys.stderr)
assert 'stdout' in output
assert 'stderr' in output
assert output.stdout == 'stdout\n'
assert output.stderr == 'stderr\n'
def test_muffle():
with nonstdlib.muffle():
print("""\
This test doesn't really test anything, it just makes sure the
muffle function returns without raising any exceptions. You shouldn't ever see
this message.""")
|
import nonstdlib
def test_capture_output():
import sys
with nonstdlib.capture_output() as output:
print('std', end='', file=sys.stdout)
print('st', end='', file=sys.stderr)
print('out', file=sys.stdout)
print('derr', file=sys.stderr)
assert 'stdout' in output
assert 'stderr' in output
assert output.stdout == 'stdout\n'
assert output.stderr == 'stderr\n'
def test_muffle():
with nonstdlib.muffle():
print("""\
This test doesn't really test anything, it just makes sure the
muffle function returns without raising any exceptions. You shouldn't ever see
this message.""")
Make the tests compatible with python2.#!/usr/bin/env python
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import nonstdlib
def test_capture_output():
import sys
with nonstdlib.capture_output() as output:
print('std', end='', file=sys.stdout)
print('st', end='', file=sys.stderr)
print('out', file=sys.stdout)
print('derr', file=sys.stderr)
assert 'stdout' in output
assert 'stderr' in output
assert output.stdout == 'stdout\n'
assert output.stderr == 'stderr\n'
def test_muffle():
with nonstdlib.muffle():
print("""\
This test doesn't really test anything, it just makes sure the
muffle function returns without raising any exceptions. You shouldn't ever see
this message.""")
|
<commit_before>import nonstdlib
def test_capture_output():
import sys
with nonstdlib.capture_output() as output:
print('std', end='', file=sys.stdout)
print('st', end='', file=sys.stderr)
print('out', file=sys.stdout)
print('derr', file=sys.stderr)
assert 'stdout' in output
assert 'stderr' in output
assert output.stdout == 'stdout\n'
assert output.stderr == 'stderr\n'
def test_muffle():
with nonstdlib.muffle():
print("""\
This test doesn't really test anything, it just makes sure the
muffle function returns without raising any exceptions. You shouldn't ever see
this message.""")
<commit_msg>Make the tests compatible with python2.<commit_after>#!/usr/bin/env python
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import nonstdlib
def test_capture_output():
import sys
with nonstdlib.capture_output() as output:
print('std', end='', file=sys.stdout)
print('st', end='', file=sys.stderr)
print('out', file=sys.stdout)
print('derr', file=sys.stderr)
assert 'stdout' in output
assert 'stderr' in output
assert output.stdout == 'stdout\n'
assert output.stderr == 'stderr\n'
def test_muffle():
with nonstdlib.muffle():
print("""\
This test doesn't really test anything, it just makes sure the
muffle function returns without raising any exceptions. You shouldn't ever see
this message.""")
|
8bf8101bc755628e66da95734d30fe24e5d9a121
|
test/__init__.py
|
test/__init__.py
|
from kitten import db
class MockDatabaseMixin(object):
def setup_method(self, method):
db.Base.metadata.create_all(db.engine)
self.session = db.Session()
def teardown_method(self, method):
self.session.close()
db.Base.metadata.drop_all(db.engine)
|
Add database helpers to test base
|
Add database helpers to test base
|
Python
|
mit
|
thiderman/network-kitten
|
Add database helpers to test base
|
from kitten import db
class MockDatabaseMixin(object):
def setup_method(self, method):
db.Base.metadata.create_all(db.engine)
self.session = db.Session()
def teardown_method(self, method):
self.session.close()
db.Base.metadata.drop_all(db.engine)
|
<commit_before><commit_msg>Add database helpers to test base<commit_after>
|
from kitten import db
class MockDatabaseMixin(object):
def setup_method(self, method):
db.Base.metadata.create_all(db.engine)
self.session = db.Session()
def teardown_method(self, method):
self.session.close()
db.Base.metadata.drop_all(db.engine)
|
Add database helpers to test basefrom kitten import db
class MockDatabaseMixin(object):
def setup_method(self, method):
db.Base.metadata.create_all(db.engine)
self.session = db.Session()
def teardown_method(self, method):
self.session.close()
db.Base.metadata.drop_all(db.engine)
|
<commit_before><commit_msg>Add database helpers to test base<commit_after>from kitten import db
class MockDatabaseMixin(object):
def setup_method(self, method):
db.Base.metadata.create_all(db.engine)
self.session = db.Session()
def teardown_method(self, method):
self.session.close()
db.Base.metadata.drop_all(db.engine)
|
|
ee65b4ecd9a94598e1fa9fe2a8a25697c2480477
|
test/conftest.py
|
test/conftest.py
|
import pytest
def pytest_addoption(parser):
parser.addoption("--travis", action="store_true", default=False,
help="Only run tests marked for Travis")
def pytest_configure(config):
config.addinivalue_line("markers",
"not_travis: Mark a test that should not be run on Travis")
def pytest_runtest_setup(item):
not_travis = item.get_marker("not_travis")
if not_travis is not None and item.config.getoption("--travis"):
pytest.skip("Skipping test marked not for Travis")
# Print a progress "." once a minute when running in travis mode
# This is an attempt to stop travis timing the builds out due to lack
# of output.
progress_process = None
def pytest_configure(config):
global progress_process
if config.getoption("--travis") and progress_process is None:
import multiprocessing
import py
terminal = py.io.TerminalWriter()
def writer():
import time
while True:
terminal.write("still alive\n")
time.sleep(60)
progress_process = multiprocessing.Process(target=writer)
progress_process.daemon = True
progress_process.start()
def pytest_unconfigure(config):
global progress_process
if config.getoption("--travis") and progress_process is not None:
progress_process.terminate()
|
import pytest
def pytest_addoption(parser):
parser.addoption("--travis", action="store_true", default=False,
help="Only run tests marked for Travis")
def pytest_configure(config):
config.addinivalue_line("markers",
"not_travis: Mark a test that should not be run on Travis")
def pytest_runtest_setup(item):
not_travis = item.get_marker("not_travis")
if not_travis is not None and item.config.getoption("--travis"):
pytest.skip("Skipping test marked not for Travis")
# Print a progress "." once a minute when running in travis mode
# This is an attempt to stop travis timing the builds out due to lack
# of output.
progress_process = None
def pytest_configure(config):
global progress_process
if config.getoption("--travis") and progress_process is None:
import multiprocessing
import py
terminal = py.io.TerminalWriter()
def writer():
import time
while True:
terminal.write("still alive\n")
time.sleep(60)
progress_process = multiprocessing.Process(target=writer)
progress_process.daemon = True
progress_process.start()
def pytest_unconfigure(config):
global progress_process
if config.getoption("--travis") and progress_process is not None:
progress_process.terminate()
def pytest_runtest_teardown(item, nextitem):
"""Clear COFS caches after running a test"""
from cofs.utility import linProblemCache, tmpFunctionCache
linProblemCache.clear()
tmpFunctionCache.clear()
|
Clear caches in test item teardown
|
test: Clear caches in test item teardown
Rather than relying on the user manually clearing the COFS function
cache and linear problem cache, clear them in the teardown step of each
test.
|
Python
|
mit
|
tkarna/cofs
|
import pytest
def pytest_addoption(parser):
parser.addoption("--travis", action="store_true", default=False,
help="Only run tests marked for Travis")
def pytest_configure(config):
config.addinivalue_line("markers",
"not_travis: Mark a test that should not be run on Travis")
def pytest_runtest_setup(item):
not_travis = item.get_marker("not_travis")
if not_travis is not None and item.config.getoption("--travis"):
pytest.skip("Skipping test marked not for Travis")
# Print a progress "." once a minute when running in travis mode
# This is an attempt to stop travis timing the builds out due to lack
# of output.
progress_process = None
def pytest_configure(config):
global progress_process
if config.getoption("--travis") and progress_process is None:
import multiprocessing
import py
terminal = py.io.TerminalWriter()
def writer():
import time
while True:
terminal.write("still alive\n")
time.sleep(60)
progress_process = multiprocessing.Process(target=writer)
progress_process.daemon = True
progress_process.start()
def pytest_unconfigure(config):
global progress_process
if config.getoption("--travis") and progress_process is not None:
progress_process.terminate()
test: Clear caches in test item teardown
Rather than relying on the user manually clearing the COFS function
cache and linear problem cache, clear them in the teardown step of each
test.
|
import pytest
def pytest_addoption(parser):
parser.addoption("--travis", action="store_true", default=False,
help="Only run tests marked for Travis")
def pytest_configure(config):
config.addinivalue_line("markers",
"not_travis: Mark a test that should not be run on Travis")
def pytest_runtest_setup(item):
not_travis = item.get_marker("not_travis")
if not_travis is not None and item.config.getoption("--travis"):
pytest.skip("Skipping test marked not for Travis")
# Print a progress "." once a minute when running in travis mode
# This is an attempt to stop travis timing the builds out due to lack
# of output.
progress_process = None
def pytest_configure(config):
global progress_process
if config.getoption("--travis") and progress_process is None:
import multiprocessing
import py
terminal = py.io.TerminalWriter()
def writer():
import time
while True:
terminal.write("still alive\n")
time.sleep(60)
progress_process = multiprocessing.Process(target=writer)
progress_process.daemon = True
progress_process.start()
def pytest_unconfigure(config):
global progress_process
if config.getoption("--travis") and progress_process is not None:
progress_process.terminate()
def pytest_runtest_teardown(item, nextitem):
"""Clear COFS caches after running a test"""
from cofs.utility import linProblemCache, tmpFunctionCache
linProblemCache.clear()
tmpFunctionCache.clear()
|
<commit_before>import pytest
def pytest_addoption(parser):
parser.addoption("--travis", action="store_true", default=False,
help="Only run tests marked for Travis")
def pytest_configure(config):
config.addinivalue_line("markers",
"not_travis: Mark a test that should not be run on Travis")
def pytest_runtest_setup(item):
not_travis = item.get_marker("not_travis")
if not_travis is not None and item.config.getoption("--travis"):
pytest.skip("Skipping test marked not for Travis")
# Print a progress "." once a minute when running in travis mode
# This is an attempt to stop travis timing the builds out due to lack
# of output.
progress_process = None
def pytest_configure(config):
global progress_process
if config.getoption("--travis") and progress_process is None:
import multiprocessing
import py
terminal = py.io.TerminalWriter()
def writer():
import time
while True:
terminal.write("still alive\n")
time.sleep(60)
progress_process = multiprocessing.Process(target=writer)
progress_process.daemon = True
progress_process.start()
def pytest_unconfigure(config):
global progress_process
if config.getoption("--travis") and progress_process is not None:
progress_process.terminate()
<commit_msg>test: Clear caches in test item teardown
Rather than relying on the user manually clearing the COFS function
cache and linear problem cache, clear them in the teardown step of each
test.<commit_after>
|
import pytest
def pytest_addoption(parser):
parser.addoption("--travis", action="store_true", default=False,
help="Only run tests marked for Travis")
def pytest_configure(config):
config.addinivalue_line("markers",
"not_travis: Mark a test that should not be run on Travis")
def pytest_runtest_setup(item):
not_travis = item.get_marker("not_travis")
if not_travis is not None and item.config.getoption("--travis"):
pytest.skip("Skipping test marked not for Travis")
# Print a progress "." once a minute when running in travis mode
# This is an attempt to stop travis timing the builds out due to lack
# of output.
progress_process = None
def pytest_configure(config):
global progress_process
if config.getoption("--travis") and progress_process is None:
import multiprocessing
import py
terminal = py.io.TerminalWriter()
def writer():
import time
while True:
terminal.write("still alive\n")
time.sleep(60)
progress_process = multiprocessing.Process(target=writer)
progress_process.daemon = True
progress_process.start()
def pytest_unconfigure(config):
global progress_process
if config.getoption("--travis") and progress_process is not None:
progress_process.terminate()
def pytest_runtest_teardown(item, nextitem):
"""Clear COFS caches after running a test"""
from cofs.utility import linProblemCache, tmpFunctionCache
linProblemCache.clear()
tmpFunctionCache.clear()
|
import pytest
def pytest_addoption(parser):
parser.addoption("--travis", action="store_true", default=False,
help="Only run tests marked for Travis")
def pytest_configure(config):
config.addinivalue_line("markers",
"not_travis: Mark a test that should not be run on Travis")
def pytest_runtest_setup(item):
not_travis = item.get_marker("not_travis")
if not_travis is not None and item.config.getoption("--travis"):
pytest.skip("Skipping test marked not for Travis")
# Print a progress "." once a minute when running in travis mode
# This is an attempt to stop travis timing the builds out due to lack
# of output.
progress_process = None
def pytest_configure(config):
global progress_process
if config.getoption("--travis") and progress_process is None:
import multiprocessing
import py
terminal = py.io.TerminalWriter()
def writer():
import time
while True:
terminal.write("still alive\n")
time.sleep(60)
progress_process = multiprocessing.Process(target=writer)
progress_process.daemon = True
progress_process.start()
def pytest_unconfigure(config):
global progress_process
if config.getoption("--travis") and progress_process is not None:
progress_process.terminate()
test: Clear caches in test item teardown
Rather than relying on the user manually clearing the COFS function
cache and linear problem cache, clear them in the teardown step of each
test.import pytest
def pytest_addoption(parser):
parser.addoption("--travis", action="store_true", default=False,
help="Only run tests marked for Travis")
def pytest_configure(config):
config.addinivalue_line("markers",
"not_travis: Mark a test that should not be run on Travis")
def pytest_runtest_setup(item):
not_travis = item.get_marker("not_travis")
if not_travis is not None and item.config.getoption("--travis"):
pytest.skip("Skipping test marked not for Travis")
# Print a progress "." once a minute when running in travis mode
# This is an attempt to stop travis timing the builds out due to lack
# of output.
progress_process = None
def pytest_configure(config):
global progress_process
if config.getoption("--travis") and progress_process is None:
import multiprocessing
import py
terminal = py.io.TerminalWriter()
def writer():
import time
while True:
terminal.write("still alive\n")
time.sleep(60)
progress_process = multiprocessing.Process(target=writer)
progress_process.daemon = True
progress_process.start()
def pytest_unconfigure(config):
global progress_process
if config.getoption("--travis") and progress_process is not None:
progress_process.terminate()
def pytest_runtest_teardown(item, nextitem):
"""Clear COFS caches after running a test"""
from cofs.utility import linProblemCache, tmpFunctionCache
linProblemCache.clear()
tmpFunctionCache.clear()
|
<commit_before>import pytest
def pytest_addoption(parser):
parser.addoption("--travis", action="store_true", default=False,
help="Only run tests marked for Travis")
def pytest_configure(config):
config.addinivalue_line("markers",
"not_travis: Mark a test that should not be run on Travis")
def pytest_runtest_setup(item):
not_travis = item.get_marker("not_travis")
if not_travis is not None and item.config.getoption("--travis"):
pytest.skip("Skipping test marked not for Travis")
# Print a progress "." once a minute when running in travis mode
# This is an attempt to stop travis timing the builds out due to lack
# of output.
progress_process = None
def pytest_configure(config):
global progress_process
if config.getoption("--travis") and progress_process is None:
import multiprocessing
import py
terminal = py.io.TerminalWriter()
def writer():
import time
while True:
terminal.write("still alive\n")
time.sleep(60)
progress_process = multiprocessing.Process(target=writer)
progress_process.daemon = True
progress_process.start()
def pytest_unconfigure(config):
global progress_process
if config.getoption("--travis") and progress_process is not None:
progress_process.terminate()
<commit_msg>test: Clear caches in test item teardown
Rather than relying on the user manually clearing the COFS function
cache and linear problem cache, clear them in the teardown step of each
test.<commit_after>import pytest
def pytest_addoption(parser):
parser.addoption("--travis", action="store_true", default=False,
help="Only run tests marked for Travis")
def pytest_configure(config):
config.addinivalue_line("markers",
"not_travis: Mark a test that should not be run on Travis")
def pytest_runtest_setup(item):
not_travis = item.get_marker("not_travis")
if not_travis is not None and item.config.getoption("--travis"):
pytest.skip("Skipping test marked not for Travis")
# Print a progress "." once a minute when running in travis mode
# This is an attempt to stop travis timing the builds out due to lack
# of output.
progress_process = None
def pytest_configure(config):
global progress_process
if config.getoption("--travis") and progress_process is None:
import multiprocessing
import py
terminal = py.io.TerminalWriter()
def writer():
import time
while True:
terminal.write("still alive\n")
time.sleep(60)
progress_process = multiprocessing.Process(target=writer)
progress_process.daemon = True
progress_process.start()
def pytest_unconfigure(config):
global progress_process
if config.getoption("--travis") and progress_process is not None:
progress_process.terminate()
def pytest_runtest_teardown(item, nextitem):
"""Clear COFS caches after running a test"""
from cofs.utility import linProblemCache, tmpFunctionCache
linProblemCache.clear()
tmpFunctionCache.clear()
|
bf961cf69386404b03d46ebc3ab34b7da804f016
|
test/test_ttt.py
|
test/test_ttt.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_ttt
----------------------------------
Tests for `ttt` module.
"""
import unittest
from ttt import ttt
class TestPat(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_000_something(self):
pass
if __name__ == '__main__':
import sys
sys.exit(unittest.main())
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_ttt
----------------------------------
Tests for `ttt` module.
"""
class Test:
def setup(self):
pass
def teardown(self):
pass
def test_(self):
pass
|
Use pytest instead of unittest
|
Use pytest instead of unittest
|
Python
|
isc
|
yerejm/ttt,yerejm/ttt
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_ttt
----------------------------------
Tests for `ttt` module.
"""
import unittest
from ttt import ttt
class TestPat(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_000_something(self):
pass
if __name__ == '__main__':
import sys
sys.exit(unittest.main())
Use pytest instead of unittest
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_ttt
----------------------------------
Tests for `ttt` module.
"""
class Test:
def setup(self):
pass
def teardown(self):
pass
def test_(self):
pass
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_ttt
----------------------------------
Tests for `ttt` module.
"""
import unittest
from ttt import ttt
class TestPat(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_000_something(self):
pass
if __name__ == '__main__':
import sys
sys.exit(unittest.main())
<commit_msg>Use pytest instead of unittest<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_ttt
----------------------------------
Tests for `ttt` module.
"""
class Test:
def setup(self):
pass
def teardown(self):
pass
def test_(self):
pass
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_ttt
----------------------------------
Tests for `ttt` module.
"""
import unittest
from ttt import ttt
class TestPat(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_000_something(self):
pass
if __name__ == '__main__':
import sys
sys.exit(unittest.main())
Use pytest instead of unittest#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_ttt
----------------------------------
Tests for `ttt` module.
"""
class Test:
def setup(self):
pass
def teardown(self):
pass
def test_(self):
pass
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_ttt
----------------------------------
Tests for `ttt` module.
"""
import unittest
from ttt import ttt
class TestPat(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_000_something(self):
pass
if __name__ == '__main__':
import sys
sys.exit(unittest.main())
<commit_msg>Use pytest instead of unittest<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_ttt
----------------------------------
Tests for `ttt` module.
"""
class Test:
def setup(self):
pass
def teardown(self):
pass
def test_(self):
pass
|
43dce889a79b77445eebe0d0e15532b64e7728d5
|
tests/test_upbeatbot.py
|
tests/test_upbeatbot.py
|
import unittest
from libs.upbeatbot import UpBeatBot
class TestUpbeatBot(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.upbeat_bot = UpBeatBot()
def test_chosen_animal_returned(self):
tweet = 'Hey @upbeatbot send me a dog!'
animal = self.upbeat_bot._get_animal_from_message(tweet)
self.assertEqual(animal, 'dog')
def test_random_animal_returned_with_text(self):
tweet = 'Hey @upbeatbot send me a pic!'
animal = self.upbeat_bot._get_animal_from_message(tweet)
# Not really a test, just ensuring *something* is returned
self.assertTrue(animal)
def test_random_returned_no_text(self):
tweet = '@upbeatbot' # Minimum viable string
animal = self.upbeat_bot._get_animal_from_message(tweet)
# Ditto as above
self.assertTrue(animal)
|
import unittest
from libs.upbeatbot import UpBeatBot
class TestUpbeatBot(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.upbeat_bot = UpBeatBot()
def test_get_animal_from_message_chosen_animal_returned(self):
tweet = 'Hey @upbeatbot send me a dog!'
animal = self.upbeat_bot._get_animal_from_message(tweet)
self.assertEqual(animal, 'dog')
def test__get_animal_from_message_random_animal_returned_with_text(self):
tweet = 'Hey @upbeatbot send me a pic!'
animal = self.upbeat_bot._get_animal_from_message(tweet)
# Not really a test, just ensuring *something* is returned
self.assertTrue(animal)
def test__get_animal_from_message_random_returned_no_text(self):
tweet = '@upbeatbot' # Minimum viable string
animal = self.upbeat_bot._get_animal_from_message(tweet)
# Ditto as above
self.assertTrue(animal)
|
Use more descriptive unit test names
|
Use more descriptive unit test names
|
Python
|
mit
|
nickdibari/UpBeatBot
|
import unittest
from libs.upbeatbot import UpBeatBot
class TestUpbeatBot(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.upbeat_bot = UpBeatBot()
def test_chosen_animal_returned(self):
tweet = 'Hey @upbeatbot send me a dog!'
animal = self.upbeat_bot._get_animal_from_message(tweet)
self.assertEqual(animal, 'dog')
def test_random_animal_returned_with_text(self):
tweet = 'Hey @upbeatbot send me a pic!'
animal = self.upbeat_bot._get_animal_from_message(tweet)
# Not really a test, just ensuring *something* is returned
self.assertTrue(animal)
def test_random_returned_no_text(self):
tweet = '@upbeatbot' # Minimum viable string
animal = self.upbeat_bot._get_animal_from_message(tweet)
# Ditto as above
self.assertTrue(animal)
Use more descriptive unit test names
|
import unittest
from libs.upbeatbot import UpBeatBot
class TestUpbeatBot(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.upbeat_bot = UpBeatBot()
def test_get_animal_from_message_chosen_animal_returned(self):
tweet = 'Hey @upbeatbot send me a dog!'
animal = self.upbeat_bot._get_animal_from_message(tweet)
self.assertEqual(animal, 'dog')
def test__get_animal_from_message_random_animal_returned_with_text(self):
tweet = 'Hey @upbeatbot send me a pic!'
animal = self.upbeat_bot._get_animal_from_message(tweet)
# Not really a test, just ensuring *something* is returned
self.assertTrue(animal)
def test__get_animal_from_message_random_returned_no_text(self):
tweet = '@upbeatbot' # Minimum viable string
animal = self.upbeat_bot._get_animal_from_message(tweet)
# Ditto as above
self.assertTrue(animal)
|
<commit_before>import unittest
from libs.upbeatbot import UpBeatBot
class TestUpbeatBot(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.upbeat_bot = UpBeatBot()
def test_chosen_animal_returned(self):
tweet = 'Hey @upbeatbot send me a dog!'
animal = self.upbeat_bot._get_animal_from_message(tweet)
self.assertEqual(animal, 'dog')
def test_random_animal_returned_with_text(self):
tweet = 'Hey @upbeatbot send me a pic!'
animal = self.upbeat_bot._get_animal_from_message(tweet)
# Not really a test, just ensuring *something* is returned
self.assertTrue(animal)
def test_random_returned_no_text(self):
tweet = '@upbeatbot' # Minimum viable string
animal = self.upbeat_bot._get_animal_from_message(tweet)
# Ditto as above
self.assertTrue(animal)
<commit_msg>Use more descriptive unit test names<commit_after>
|
import unittest
from libs.upbeatbot import UpBeatBot
class TestUpbeatBot(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.upbeat_bot = UpBeatBot()
def test_get_animal_from_message_chosen_animal_returned(self):
tweet = 'Hey @upbeatbot send me a dog!'
animal = self.upbeat_bot._get_animal_from_message(tweet)
self.assertEqual(animal, 'dog')
def test__get_animal_from_message_random_animal_returned_with_text(self):
tweet = 'Hey @upbeatbot send me a pic!'
animal = self.upbeat_bot._get_animal_from_message(tweet)
# Not really a test, just ensuring *something* is returned
self.assertTrue(animal)
def test__get_animal_from_message_random_returned_no_text(self):
tweet = '@upbeatbot' # Minimum viable string
animal = self.upbeat_bot._get_animal_from_message(tweet)
# Ditto as above
self.assertTrue(animal)
|
import unittest
from libs.upbeatbot import UpBeatBot
class TestUpbeatBot(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.upbeat_bot = UpBeatBot()
def test_chosen_animal_returned(self):
tweet = 'Hey @upbeatbot send me a dog!'
animal = self.upbeat_bot._get_animal_from_message(tweet)
self.assertEqual(animal, 'dog')
def test_random_animal_returned_with_text(self):
tweet = 'Hey @upbeatbot send me a pic!'
animal = self.upbeat_bot._get_animal_from_message(tweet)
# Not really a test, just ensuring *something* is returned
self.assertTrue(animal)
def test_random_returned_no_text(self):
tweet = '@upbeatbot' # Minimum viable string
animal = self.upbeat_bot._get_animal_from_message(tweet)
# Ditto as above
self.assertTrue(animal)
Use more descriptive unit test namesimport unittest
from libs.upbeatbot import UpBeatBot
class TestUpbeatBot(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.upbeat_bot = UpBeatBot()
def test_get_animal_from_message_chosen_animal_returned(self):
tweet = 'Hey @upbeatbot send me a dog!'
animal = self.upbeat_bot._get_animal_from_message(tweet)
self.assertEqual(animal, 'dog')
def test__get_animal_from_message_random_animal_returned_with_text(self):
tweet = 'Hey @upbeatbot send me a pic!'
animal = self.upbeat_bot._get_animal_from_message(tweet)
# Not really a test, just ensuring *something* is returned
self.assertTrue(animal)
def test__get_animal_from_message_random_returned_no_text(self):
tweet = '@upbeatbot' # Minimum viable string
animal = self.upbeat_bot._get_animal_from_message(tweet)
# Ditto as above
self.assertTrue(animal)
|
<commit_before>import unittest
from libs.upbeatbot import UpBeatBot
class TestUpbeatBot(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.upbeat_bot = UpBeatBot()
def test_chosen_animal_returned(self):
tweet = 'Hey @upbeatbot send me a dog!'
animal = self.upbeat_bot._get_animal_from_message(tweet)
self.assertEqual(animal, 'dog')
def test_random_animal_returned_with_text(self):
tweet = 'Hey @upbeatbot send me a pic!'
animal = self.upbeat_bot._get_animal_from_message(tweet)
# Not really a test, just ensuring *something* is returned
self.assertTrue(animal)
def test_random_returned_no_text(self):
tweet = '@upbeatbot' # Minimum viable string
animal = self.upbeat_bot._get_animal_from_message(tweet)
# Ditto as above
self.assertTrue(animal)
<commit_msg>Use more descriptive unit test names<commit_after>import unittest
from libs.upbeatbot import UpBeatBot
class TestUpbeatBot(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.upbeat_bot = UpBeatBot()
def test_get_animal_from_message_chosen_animal_returned(self):
tweet = 'Hey @upbeatbot send me a dog!'
animal = self.upbeat_bot._get_animal_from_message(tweet)
self.assertEqual(animal, 'dog')
def test__get_animal_from_message_random_animal_returned_with_text(self):
tweet = 'Hey @upbeatbot send me a pic!'
animal = self.upbeat_bot._get_animal_from_message(tweet)
# Not really a test, just ensuring *something* is returned
self.assertTrue(animal)
def test__get_animal_from_message_random_returned_no_text(self):
tweet = '@upbeatbot' # Minimum viable string
animal = self.upbeat_bot._get_animal_from_message(tweet)
# Ditto as above
self.assertTrue(animal)
|
66fc59842fb85bb7b9d1434b7ba6d279c2f454df
|
tip/algorithms/sorting/mergesort.py
|
tip/algorithms/sorting/mergesort.py
|
def merge(a, b):
if len(a) * len(b) == 0:
return a + b
v = (a[0] < b[0] and a or b).pop(0)
return [v] + merge(a, b)
def mergesort(list):
if len(list) < 2:
return list
m = len(list) / 2
return merge(mergesort(list[:int(m)]), mergesort(list[int(m):]))
|
def merge(a, b):
if len(a) * len(b) == 0:
return a + b
v = (a[0] < b[0] and a or b).pop(0)
return [v]+ merge(a, b)
def mergesort(list):
if len(list) < 2:
return list
m = len(list) / 2
return merge(mergesort(list[:int(m)]), mergesort(list[int(m):]))
|
Add intended error to see flake8 report
|
Add intended error to see flake8 report
|
Python
|
unlicense
|
davidgasquez/tip
|
def merge(a, b):
if len(a) * len(b) == 0:
return a + b
v = (a[0] < b[0] and a or b).pop(0)
return [v] + merge(a, b)
def mergesort(list):
if len(list) < 2:
return list
m = len(list) / 2
return merge(mergesort(list[:int(m)]), mergesort(list[int(m):]))
Add intended error to see flake8 report
|
def merge(a, b):
if len(a) * len(b) == 0:
return a + b
v = (a[0] < b[0] and a or b).pop(0)
return [v]+ merge(a, b)
def mergesort(list):
if len(list) < 2:
return list
m = len(list) / 2
return merge(mergesort(list[:int(m)]), mergesort(list[int(m):]))
|
<commit_before>def merge(a, b):
if len(a) * len(b) == 0:
return a + b
v = (a[0] < b[0] and a or b).pop(0)
return [v] + merge(a, b)
def mergesort(list):
if len(list) < 2:
return list
m = len(list) / 2
return merge(mergesort(list[:int(m)]), mergesort(list[int(m):]))
<commit_msg>Add intended error to see flake8 report<commit_after>
|
def merge(a, b):
if len(a) * len(b) == 0:
return a + b
v = (a[0] < b[0] and a or b).pop(0)
return [v]+ merge(a, b)
def mergesort(list):
if len(list) < 2:
return list
m = len(list) / 2
return merge(mergesort(list[:int(m)]), mergesort(list[int(m):]))
|
def merge(a, b):
if len(a) * len(b) == 0:
return a + b
v = (a[0] < b[0] and a or b).pop(0)
return [v] + merge(a, b)
def mergesort(list):
if len(list) < 2:
return list
m = len(list) / 2
return merge(mergesort(list[:int(m)]), mergesort(list[int(m):]))
Add intended error to see flake8 reportdef merge(a, b):
if len(a) * len(b) == 0:
return a + b
v = (a[0] < b[0] and a or b).pop(0)
return [v]+ merge(a, b)
def mergesort(list):
if len(list) < 2:
return list
m = len(list) / 2
return merge(mergesort(list[:int(m)]), mergesort(list[int(m):]))
|
<commit_before>def merge(a, b):
if len(a) * len(b) == 0:
return a + b
v = (a[0] < b[0] and a or b).pop(0)
return [v] + merge(a, b)
def mergesort(list):
if len(list) < 2:
return list
m = len(list) / 2
return merge(mergesort(list[:int(m)]), mergesort(list[int(m):]))
<commit_msg>Add intended error to see flake8 report<commit_after>def merge(a, b):
if len(a) * len(b) == 0:
return a + b
v = (a[0] < b[0] and a or b).pop(0)
return [v]+ merge(a, b)
def mergesort(list):
if len(list) < 2:
return list
m = len(list) / 2
return merge(mergesort(list[:int(m)]), mergesort(list[int(m):]))
|
12b313ed0be7049335046a00844c378b0bed7064
|
helpernmap.py
|
helpernmap.py
|
import nmap
class HelperNmap:
def __init__(self,args=""):
self.args = args
def process(self):
print "Running Scan"
nm = nmap.PortScanner()
nm.scan(hosts='173.255.243.189', arguments='-sV -p1-5000')
for host in nm.all_hosts():
print('----------------------------------------------------')
print('Host : %s (%s)' % (host, nm[host].hostname()))
print('State : %s' % nm[host].state())
for proto in nm[host].all_protocols():
print('----------')
print('Protocol : %s' % proto)
lport = nm[host][proto].keys()
lport.sort()
for port in lport:
if nm[host][proto][port]['state'] == 'open':
print ('port : %s\tstate : %s %s %s ' % (port, nm[host][proto][port]['product'], nm[host][proto][port]['version'], nm[host][proto][port]['cpe']))
|
import nmap
class HelperNmap:
def __init__(self,args=""):
self.args = args
def process(self):
if self.__setParams():
print "Running Scan"
nm = nmap.PortScanner()
nm.scan(hosts=str(self.args), arguments='-sV -p1-5000')
for host in nm.all_hosts():
print('----------------------------------------------------')
print('Host : %s (%s)' % (host, nm[host].hostname()))
print('State : %s' % nm[host].state())
for proto in nm[host].all_protocols():
print('----------')
print('Protocol : %s' % proto)
lport = nm[host][proto].keys()
lport.sort()
for port in lport:
if nm[host][proto][port]['state'] == 'open':
print ('port : %s\tstate : %s %s %s ' % (port, nm[host][proto][port]['product'], nm[host][proto][port]['version'], nm[host][proto][port]['cpe']))
else:
print "Its not a valid argument"
#private function to set params
def __setParams(self):
target = ""
if self.args.find('net:') != -1:
self.args = self.args.split(":")[1]
return True
else:
return False
|
Add network target as a params
|
Add network target as a params
Signed-off-by: Jacobo Tibaquira <d8d2a0ed36dd5f2e41c721fffbe8af2e7e4fe993@gmail.com>
|
Python
|
apache-2.0
|
JKO/nsearch,JKO/nsearch
|
import nmap
class HelperNmap:
def __init__(self,args=""):
self.args = args
def process(self):
print "Running Scan"
nm = nmap.PortScanner()
nm.scan(hosts='173.255.243.189', arguments='-sV -p1-5000')
for host in nm.all_hosts():
print('----------------------------------------------------')
print('Host : %s (%s)' % (host, nm[host].hostname()))
print('State : %s' % nm[host].state())
for proto in nm[host].all_protocols():
print('----------')
print('Protocol : %s' % proto)
lport = nm[host][proto].keys()
lport.sort()
for port in lport:
if nm[host][proto][port]['state'] == 'open':
print ('port : %s\tstate : %s %s %s ' % (port, nm[host][proto][port]['product'], nm[host][proto][port]['version'], nm[host][proto][port]['cpe']))
Add network target as a params
Signed-off-by: Jacobo Tibaquira <d8d2a0ed36dd5f2e41c721fffbe8af2e7e4fe993@gmail.com>
|
import nmap
class HelperNmap:
def __init__(self,args=""):
self.args = args
def process(self):
if self.__setParams():
print "Running Scan"
nm = nmap.PortScanner()
nm.scan(hosts=str(self.args), arguments='-sV -p1-5000')
for host in nm.all_hosts():
print('----------------------------------------------------')
print('Host : %s (%s)' % (host, nm[host].hostname()))
print('State : %s' % nm[host].state())
for proto in nm[host].all_protocols():
print('----------')
print('Protocol : %s' % proto)
lport = nm[host][proto].keys()
lport.sort()
for port in lport:
if nm[host][proto][port]['state'] == 'open':
print ('port : %s\tstate : %s %s %s ' % (port, nm[host][proto][port]['product'], nm[host][proto][port]['version'], nm[host][proto][port]['cpe']))
else:
print "Its not a valid argument"
#private function to set params
def __setParams(self):
target = ""
if self.args.find('net:') != -1:
self.args = self.args.split(":")[1]
return True
else:
return False
|
<commit_before>import nmap
class HelperNmap:
def __init__(self,args=""):
self.args = args
def process(self):
print "Running Scan"
nm = nmap.PortScanner()
nm.scan(hosts='173.255.243.189', arguments='-sV -p1-5000')
for host in nm.all_hosts():
print('----------------------------------------------------')
print('Host : %s (%s)' % (host, nm[host].hostname()))
print('State : %s' % nm[host].state())
for proto in nm[host].all_protocols():
print('----------')
print('Protocol : %s' % proto)
lport = nm[host][proto].keys()
lport.sort()
for port in lport:
if nm[host][proto][port]['state'] == 'open':
print ('port : %s\tstate : %s %s %s ' % (port, nm[host][proto][port]['product'], nm[host][proto][port]['version'], nm[host][proto][port]['cpe']))
<commit_msg>Add network target as a params
Signed-off-by: Jacobo Tibaquira <d8d2a0ed36dd5f2e41c721fffbe8af2e7e4fe993@gmail.com><commit_after>
|
import nmap
class HelperNmap:
def __init__(self,args=""):
self.args = args
def process(self):
if self.__setParams():
print "Running Scan"
nm = nmap.PortScanner()
nm.scan(hosts=str(self.args), arguments='-sV -p1-5000')
for host in nm.all_hosts():
print('----------------------------------------------------')
print('Host : %s (%s)' % (host, nm[host].hostname()))
print('State : %s' % nm[host].state())
for proto in nm[host].all_protocols():
print('----------')
print('Protocol : %s' % proto)
lport = nm[host][proto].keys()
lport.sort()
for port in lport:
if nm[host][proto][port]['state'] == 'open':
print ('port : %s\tstate : %s %s %s ' % (port, nm[host][proto][port]['product'], nm[host][proto][port]['version'], nm[host][proto][port]['cpe']))
else:
print "Its not a valid argument"
#private function to set params
def __setParams(self):
target = ""
if self.args.find('net:') != -1:
self.args = self.args.split(":")[1]
return True
else:
return False
|
import nmap
class HelperNmap:
def __init__(self,args=""):
self.args = args
def process(self):
print "Running Scan"
nm = nmap.PortScanner()
nm.scan(hosts='173.255.243.189', arguments='-sV -p1-5000')
for host in nm.all_hosts():
print('----------------------------------------------------')
print('Host : %s (%s)' % (host, nm[host].hostname()))
print('State : %s' % nm[host].state())
for proto in nm[host].all_protocols():
print('----------')
print('Protocol : %s' % proto)
lport = nm[host][proto].keys()
lport.sort()
for port in lport:
if nm[host][proto][port]['state'] == 'open':
print ('port : %s\tstate : %s %s %s ' % (port, nm[host][proto][port]['product'], nm[host][proto][port]['version'], nm[host][proto][port]['cpe']))
Add network target as a params
Signed-off-by: Jacobo Tibaquira <d8d2a0ed36dd5f2e41c721fffbe8af2e7e4fe993@gmail.com>import nmap
class HelperNmap:
def __init__(self,args=""):
self.args = args
def process(self):
if self.__setParams():
print "Running Scan"
nm = nmap.PortScanner()
nm.scan(hosts=str(self.args), arguments='-sV -p1-5000')
for host in nm.all_hosts():
print('----------------------------------------------------')
print('Host : %s (%s)' % (host, nm[host].hostname()))
print('State : %s' % nm[host].state())
for proto in nm[host].all_protocols():
print('----------')
print('Protocol : %s' % proto)
lport = nm[host][proto].keys()
lport.sort()
for port in lport:
if nm[host][proto][port]['state'] == 'open':
print ('port : %s\tstate : %s %s %s ' % (port, nm[host][proto][port]['product'], nm[host][proto][port]['version'], nm[host][proto][port]['cpe']))
else:
print "Its not a valid argument"
#private function to set params
def __setParams(self):
target = ""
if self.args.find('net:') != -1:
self.args = self.args.split(":")[1]
return True
else:
return False
|
<commit_before>import nmap
class HelperNmap:
def __init__(self,args=""):
self.args = args
def process(self):
print "Running Scan"
nm = nmap.PortScanner()
nm.scan(hosts='173.255.243.189', arguments='-sV -p1-5000')
for host in nm.all_hosts():
print('----------------------------------------------------')
print('Host : %s (%s)' % (host, nm[host].hostname()))
print('State : %s' % nm[host].state())
for proto in nm[host].all_protocols():
print('----------')
print('Protocol : %s' % proto)
lport = nm[host][proto].keys()
lport.sort()
for port in lport:
if nm[host][proto][port]['state'] == 'open':
print ('port : %s\tstate : %s %s %s ' % (port, nm[host][proto][port]['product'], nm[host][proto][port]['version'], nm[host][proto][port]['cpe']))
<commit_msg>Add network target as a params
Signed-off-by: Jacobo Tibaquira <d8d2a0ed36dd5f2e41c721fffbe8af2e7e4fe993@gmail.com><commit_after>import nmap
class HelperNmap:
def __init__(self,args=""):
self.args = args
def process(self):
if self.__setParams():
print "Running Scan"
nm = nmap.PortScanner()
nm.scan(hosts=str(self.args), arguments='-sV -p1-5000')
for host in nm.all_hosts():
print('----------------------------------------------------')
print('Host : %s (%s)' % (host, nm[host].hostname()))
print('State : %s' % nm[host].state())
for proto in nm[host].all_protocols():
print('----------')
print('Protocol : %s' % proto)
lport = nm[host][proto].keys()
lport.sort()
for port in lport:
if nm[host][proto][port]['state'] == 'open':
print ('port : %s\tstate : %s %s %s ' % (port, nm[host][proto][port]['product'], nm[host][proto][port]['version'], nm[host][proto][port]['cpe']))
else:
print "Its not a valid argument"
#private function to set params
def __setParams(self):
target = ""
if self.args.find('net:') != -1:
self.args = self.args.split(":")[1]
return True
else:
return False
|
2dc031aca46c02449eb85ee5149b75951e26e3b9
|
deferrable/backend/sqs.py
|
deferrable/backend/sqs.py
|
from .base import BackendFactory, Backend
from ..queue.sqs import SQSQueue
class SQSBackendFactory(BackendFactory):
def __init__(self, sqs_connection_thunk, visibility_timeout=30, wait_time=10):
"""To allow backends to be initialized lazily, this factory requires a thunk
(parameter-less closure) which returns an initialized SQS connection. This thunk
is called as late as possible to initialize the connection and perform operations
against the SQS API. We do this so that backends can be made available at import time
without requiring a connection to be created at import time as well."""
self.sqs_connection_thunk = sqs_connection_thunk
self.visibility_timeout = visibility_timeout
self.wait_time = wait_time
def _create_backend_for_group(self, group):
error_queue = SQSQueue(self.sqs_connection_thunk,
self._queue_name('{}_error'.format(group)),
self.visibility_timeout,
self.wait_time)
queue = SQSQueue(self.sqs_connection_thunk,
self._queue_name(group),
self.visibility_timeout,
self.wait_time,
redrive_queue=error_queue)
return SQSBackend(group, queue, error_queue)
class SQSBackend(Backend):
pass
|
from .base import BackendFactory, Backend
from ..queue.sqs import SQSQueue
class SQSBackendFactory(BackendFactory):
def __init__(self, sqs_connection_thunk, visibility_timeout=30, wait_time=10, name_suffix=None):
"""To allow backends to be initialized lazily, this factory requires a thunk
(parameter-less closure) which returns an initialized SQS connection. This thunk
is called as late as possible to initialize the connection and perform operations
against the SQS API. We do this so that backends can be made available at import time
without requiring a connection to be created at import time as well."""
self.sqs_connection_thunk = sqs_connection_thunk
self.visibility_timeout = visibility_timeout
self.wait_time = wait_time
# SQS makes it impossible to separate your queues by environment, so it can
# be useful to include something to make your names unique. Typically you
# will just pass your environment here.
self.name_suffix = name_suffix
def _create_backend_for_group(self, group):
formatted_name = group
if self.name_suffix:
formatted_name += '_{}'.format(self.name_suffix)
error_queue = SQSQueue(self.sqs_connection_thunk,
self._queue_name('{}_error'.format(formatted_name)),
self.visibility_timeout,
self.wait_time)
queue = SQSQueue(self.sqs_connection_thunk,
self._queue_name(formatted_name),
self.visibility_timeout,
self.wait_time,
redrive_queue=error_queue)
return SQSBackend(group, queue, error_queue)
class SQSBackend(Backend):
pass
|
Add naming suffix option for SQS backends
|
Add naming suffix option for SQS backends
|
Python
|
mit
|
gamechanger/deferrable
|
from .base import BackendFactory, Backend
from ..queue.sqs import SQSQueue
class SQSBackendFactory(BackendFactory):
def __init__(self, sqs_connection_thunk, visibility_timeout=30, wait_time=10):
"""To allow backends to be initialized lazily, this factory requires a thunk
(parameter-less closure) which returns an initialized SQS connection. This thunk
is called as late as possible to initialize the connection and perform operations
against the SQS API. We do this so that backends can be made available at import time
without requiring a connection to be created at import time as well."""
self.sqs_connection_thunk = sqs_connection_thunk
self.visibility_timeout = visibility_timeout
self.wait_time = wait_time
def _create_backend_for_group(self, group):
error_queue = SQSQueue(self.sqs_connection_thunk,
self._queue_name('{}_error'.format(group)),
self.visibility_timeout,
self.wait_time)
queue = SQSQueue(self.sqs_connection_thunk,
self._queue_name(group),
self.visibility_timeout,
self.wait_time,
redrive_queue=error_queue)
return SQSBackend(group, queue, error_queue)
class SQSBackend(Backend):
pass
Add naming suffix option for SQS backends
|
from .base import BackendFactory, Backend
from ..queue.sqs import SQSQueue
class SQSBackendFactory(BackendFactory):
def __init__(self, sqs_connection_thunk, visibility_timeout=30, wait_time=10, name_suffix=None):
"""To allow backends to be initialized lazily, this factory requires a thunk
(parameter-less closure) which returns an initialized SQS connection. This thunk
is called as late as possible to initialize the connection and perform operations
against the SQS API. We do this so that backends can be made available at import time
without requiring a connection to be created at import time as well."""
self.sqs_connection_thunk = sqs_connection_thunk
self.visibility_timeout = visibility_timeout
self.wait_time = wait_time
# SQS makes it impossible to separate your queues by environment, so it can
# be useful to include something to make your names unique. Typically you
# will just pass your environment here.
self.name_suffix = name_suffix
def _create_backend_for_group(self, group):
formatted_name = group
if self.name_suffix:
formatted_name += '_{}'.format(self.name_suffix)
error_queue = SQSQueue(self.sqs_connection_thunk,
self._queue_name('{}_error'.format(formatted_name)),
self.visibility_timeout,
self.wait_time)
queue = SQSQueue(self.sqs_connection_thunk,
self._queue_name(formatted_name),
self.visibility_timeout,
self.wait_time,
redrive_queue=error_queue)
return SQSBackend(group, queue, error_queue)
class SQSBackend(Backend):
pass
|
<commit_before>from .base import BackendFactory, Backend
from ..queue.sqs import SQSQueue
class SQSBackendFactory(BackendFactory):
def __init__(self, sqs_connection_thunk, visibility_timeout=30, wait_time=10):
"""To allow backends to be initialized lazily, this factory requires a thunk
(parameter-less closure) which returns an initialized SQS connection. This thunk
is called as late as possible to initialize the connection and perform operations
against the SQS API. We do this so that backends can be made available at import time
without requiring a connection to be created at import time as well."""
self.sqs_connection_thunk = sqs_connection_thunk
self.visibility_timeout = visibility_timeout
self.wait_time = wait_time
def _create_backend_for_group(self, group):
error_queue = SQSQueue(self.sqs_connection_thunk,
self._queue_name('{}_error'.format(group)),
self.visibility_timeout,
self.wait_time)
queue = SQSQueue(self.sqs_connection_thunk,
self._queue_name(group),
self.visibility_timeout,
self.wait_time,
redrive_queue=error_queue)
return SQSBackend(group, queue, error_queue)
class SQSBackend(Backend):
pass
<commit_msg>Add naming suffix option for SQS backends<commit_after>
|
from .base import BackendFactory, Backend
from ..queue.sqs import SQSQueue
class SQSBackendFactory(BackendFactory):
def __init__(self, sqs_connection_thunk, visibility_timeout=30, wait_time=10, name_suffix=None):
"""To allow backends to be initialized lazily, this factory requires a thunk
(parameter-less closure) which returns an initialized SQS connection. This thunk
is called as late as possible to initialize the connection and perform operations
against the SQS API. We do this so that backends can be made available at import time
without requiring a connection to be created at import time as well."""
self.sqs_connection_thunk = sqs_connection_thunk
self.visibility_timeout = visibility_timeout
self.wait_time = wait_time
# SQS makes it impossible to separate your queues by environment, so it can
# be useful to include something to make your names unique. Typically you
# will just pass your environment here.
self.name_suffix = name_suffix
def _create_backend_for_group(self, group):
formatted_name = group
if self.name_suffix:
formatted_name += '_{}'.format(self.name_suffix)
error_queue = SQSQueue(self.sqs_connection_thunk,
self._queue_name('{}_error'.format(formatted_name)),
self.visibility_timeout,
self.wait_time)
queue = SQSQueue(self.sqs_connection_thunk,
self._queue_name(formatted_name),
self.visibility_timeout,
self.wait_time,
redrive_queue=error_queue)
return SQSBackend(group, queue, error_queue)
class SQSBackend(Backend):
pass
|
from .base import BackendFactory, Backend
from ..queue.sqs import SQSQueue
class SQSBackendFactory(BackendFactory):
def __init__(self, sqs_connection_thunk, visibility_timeout=30, wait_time=10):
"""To allow backends to be initialized lazily, this factory requires a thunk
(parameter-less closure) which returns an initialized SQS connection. This thunk
is called as late as possible to initialize the connection and perform operations
against the SQS API. We do this so that backends can be made available at import time
without requiring a connection to be created at import time as well."""
self.sqs_connection_thunk = sqs_connection_thunk
self.visibility_timeout = visibility_timeout
self.wait_time = wait_time
def _create_backend_for_group(self, group):
error_queue = SQSQueue(self.sqs_connection_thunk,
self._queue_name('{}_error'.format(group)),
self.visibility_timeout,
self.wait_time)
queue = SQSQueue(self.sqs_connection_thunk,
self._queue_name(group),
self.visibility_timeout,
self.wait_time,
redrive_queue=error_queue)
return SQSBackend(group, queue, error_queue)
class SQSBackend(Backend):
pass
Add naming suffix option for SQS backendsfrom .base import BackendFactory, Backend
from ..queue.sqs import SQSQueue
class SQSBackendFactory(BackendFactory):
def __init__(self, sqs_connection_thunk, visibility_timeout=30, wait_time=10, name_suffix=None):
"""To allow backends to be initialized lazily, this factory requires a thunk
(parameter-less closure) which returns an initialized SQS connection. This thunk
is called as late as possible to initialize the connection and perform operations
against the SQS API. We do this so that backends can be made available at import time
without requiring a connection to be created at import time as well."""
self.sqs_connection_thunk = sqs_connection_thunk
self.visibility_timeout = visibility_timeout
self.wait_time = wait_time
# SQS makes it impossible to separate your queues by environment, so it can
# be useful to include something to make your names unique. Typically you
# will just pass your environment here.
self.name_suffix = name_suffix
def _create_backend_for_group(self, group):
formatted_name = group
if self.name_suffix:
formatted_name += '_{}'.format(self.name_suffix)
error_queue = SQSQueue(self.sqs_connection_thunk,
self._queue_name('{}_error'.format(formatted_name)),
self.visibility_timeout,
self.wait_time)
queue = SQSQueue(self.sqs_connection_thunk,
self._queue_name(formatted_name),
self.visibility_timeout,
self.wait_time,
redrive_queue=error_queue)
return SQSBackend(group, queue, error_queue)
class SQSBackend(Backend):
pass
|
<commit_before>from .base import BackendFactory, Backend
from ..queue.sqs import SQSQueue
class SQSBackendFactory(BackendFactory):
def __init__(self, sqs_connection_thunk, visibility_timeout=30, wait_time=10):
"""To allow backends to be initialized lazily, this factory requires a thunk
(parameter-less closure) which returns an initialized SQS connection. This thunk
is called as late as possible to initialize the connection and perform operations
against the SQS API. We do this so that backends can be made available at import time
without requiring a connection to be created at import time as well."""
self.sqs_connection_thunk = sqs_connection_thunk
self.visibility_timeout = visibility_timeout
self.wait_time = wait_time
def _create_backend_for_group(self, group):
error_queue = SQSQueue(self.sqs_connection_thunk,
self._queue_name('{}_error'.format(group)),
self.visibility_timeout,
self.wait_time)
queue = SQSQueue(self.sqs_connection_thunk,
self._queue_name(group),
self.visibility_timeout,
self.wait_time,
redrive_queue=error_queue)
return SQSBackend(group, queue, error_queue)
class SQSBackend(Backend):
pass
<commit_msg>Add naming suffix option for SQS backends<commit_after>from .base import BackendFactory, Backend
from ..queue.sqs import SQSQueue
class SQSBackendFactory(BackendFactory):
def __init__(self, sqs_connection_thunk, visibility_timeout=30, wait_time=10, name_suffix=None):
"""To allow backends to be initialized lazily, this factory requires a thunk
(parameter-less closure) which returns an initialized SQS connection. This thunk
is called as late as possible to initialize the connection and perform operations
against the SQS API. We do this so that backends can be made available at import time
without requiring a connection to be created at import time as well."""
self.sqs_connection_thunk = sqs_connection_thunk
self.visibility_timeout = visibility_timeout
self.wait_time = wait_time
# SQS makes it impossible to separate your queues by environment, so it can
# be useful to include something to make your names unique. Typically you
# will just pass your environment here.
self.name_suffix = name_suffix
def _create_backend_for_group(self, group):
formatted_name = group
if self.name_suffix:
formatted_name += '_{}'.format(self.name_suffix)
error_queue = SQSQueue(self.sqs_connection_thunk,
self._queue_name('{}_error'.format(formatted_name)),
self.visibility_timeout,
self.wait_time)
queue = SQSQueue(self.sqs_connection_thunk,
self._queue_name(formatted_name),
self.visibility_timeout,
self.wait_time,
redrive_queue=error_queue)
return SQSBackend(group, queue, error_queue)
class SQSBackend(Backend):
pass
|
5da99ceeeec050b2732475ffca89a64d2d10f34e
|
trainTweet.py
|
trainTweet.py
|
#!/usr/bin/env python
import subprocess
import argparse
parser = argparse.ArgumentParser(description="Tweet some Train Statuses!")
parser.add_argument("-s", "--station", dest="station", type=str, help="Station Short Code. Ex: 'SLM'")
parser.add_argument("-t", "--train", dest="train", type=int, help="Train Number. Ex: '500'")
parser.add_argument("-d", "--date", dest="date", type=str, help="Date. MM/DD/YYYY")
parser.set_defaults(station=None, train=None, date=None)
args = parser.parse_args()
def main():
def run_command(cmd_array, shell=False):
p = subprocess.Popen(cmd_array, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=shell)
out, err = p.communicate()
return out
command = ['phantomjs', 'trainStatus.js']
if args.station:
command.append(str(args.station))
if args.train:
command.append(str(args.train))
if args.date:
command.append(str(args.date))
print "Executing {}".format(command)
text = run_command(command)
tweet = text.split("-" * 3)
result = run_command(['node', 'twitter.js', tweet[1]])
print result
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
import subprocess
import argparse
import os
parser = argparse.ArgumentParser(description="Tweet some Train Statuses!")
parser.add_argument("-s", "--station", dest="station", type=str, help="Station Short Code. Ex: 'SLM'")
parser.add_argument("-t", "--train", dest="train", type=int, help="Train Number. Ex: '500'")
parser.add_argument("-d", "--date", dest="date", type=str, help="Date. MM/DD/YYYY")
parser.set_defaults(station=None, train=None, date=None)
args = parser.parse_args()
def main():
def run_command(cmd_array, shell=False):
print "Executing `{}`".format(cmd_array)
p = subprocess.Popen(cmd_array, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=shell)
out, err = p.communicate()
return out
command = ['phantomjs', "{}/trainStatus.js".format(os.getcwd())]
if args.station:
command.append(str(args.station))
if args.train:
command.append(str(args.train))
if args.date:
command.append(str(args.date))
text = run_command(command)
tweet = text.split("-" * 3)
result = run_command(['node', "{}/twitter.js".format(os.getcwd()), tweet[1]])
print result
if __name__ == "__main__":
main()
|
Use full paths & print our current command fully
|
Use full paths & print our current command fully
|
Python
|
mit
|
dmiedema/train-500-status
|
#!/usr/bin/env python
import subprocess
import argparse
parser = argparse.ArgumentParser(description="Tweet some Train Statuses!")
parser.add_argument("-s", "--station", dest="station", type=str, help="Station Short Code. Ex: 'SLM'")
parser.add_argument("-t", "--train", dest="train", type=int, help="Train Number. Ex: '500'")
parser.add_argument("-d", "--date", dest="date", type=str, help="Date. MM/DD/YYYY")
parser.set_defaults(station=None, train=None, date=None)
args = parser.parse_args()
def main():
def run_command(cmd_array, shell=False):
p = subprocess.Popen(cmd_array, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=shell)
out, err = p.communicate()
return out
command = ['phantomjs', 'trainStatus.js']
if args.station:
command.append(str(args.station))
if args.train:
command.append(str(args.train))
if args.date:
command.append(str(args.date))
print "Executing {}".format(command)
text = run_command(command)
tweet = text.split("-" * 3)
result = run_command(['node', 'twitter.js', tweet[1]])
print result
if __name__ == "__main__":
main()
Use full paths & print our current command fully
|
#!/usr/bin/env python
import subprocess
import argparse
import os
parser = argparse.ArgumentParser(description="Tweet some Train Statuses!")
parser.add_argument("-s", "--station", dest="station", type=str, help="Station Short Code. Ex: 'SLM'")
parser.add_argument("-t", "--train", dest="train", type=int, help="Train Number. Ex: '500'")
parser.add_argument("-d", "--date", dest="date", type=str, help="Date. MM/DD/YYYY")
parser.set_defaults(station=None, train=None, date=None)
args = parser.parse_args()
def main():
def run_command(cmd_array, shell=False):
print "Executing `{}`".format(cmd_array)
p = subprocess.Popen(cmd_array, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=shell)
out, err = p.communicate()
return out
command = ['phantomjs', "{}/trainStatus.js".format(os.getcwd())]
if args.station:
command.append(str(args.station))
if args.train:
command.append(str(args.train))
if args.date:
command.append(str(args.date))
text = run_command(command)
tweet = text.split("-" * 3)
result = run_command(['node', "{}/twitter.js".format(os.getcwd()), tweet[1]])
print result
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python
import subprocess
import argparse
parser = argparse.ArgumentParser(description="Tweet some Train Statuses!")
parser.add_argument("-s", "--station", dest="station", type=str, help="Station Short Code. Ex: 'SLM'")
parser.add_argument("-t", "--train", dest="train", type=int, help="Train Number. Ex: '500'")
parser.add_argument("-d", "--date", dest="date", type=str, help="Date. MM/DD/YYYY")
parser.set_defaults(station=None, train=None, date=None)
args = parser.parse_args()
def main():
def run_command(cmd_array, shell=False):
p = subprocess.Popen(cmd_array, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=shell)
out, err = p.communicate()
return out
command = ['phantomjs', 'trainStatus.js']
if args.station:
command.append(str(args.station))
if args.train:
command.append(str(args.train))
if args.date:
command.append(str(args.date))
print "Executing {}".format(command)
text = run_command(command)
tweet = text.split("-" * 3)
result = run_command(['node', 'twitter.js', tweet[1]])
print result
if __name__ == "__main__":
main()
<commit_msg>Use full paths & print our current command fully<commit_after>
|
#!/usr/bin/env python
import subprocess
import argparse
import os
parser = argparse.ArgumentParser(description="Tweet some Train Statuses!")
parser.add_argument("-s", "--station", dest="station", type=str, help="Station Short Code. Ex: 'SLM'")
parser.add_argument("-t", "--train", dest="train", type=int, help="Train Number. Ex: '500'")
parser.add_argument("-d", "--date", dest="date", type=str, help="Date. MM/DD/YYYY")
parser.set_defaults(station=None, train=None, date=None)
args = parser.parse_args()
def main():
def run_command(cmd_array, shell=False):
print "Executing `{}`".format(cmd_array)
p = subprocess.Popen(cmd_array, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=shell)
out, err = p.communicate()
return out
command = ['phantomjs', "{}/trainStatus.js".format(os.getcwd())]
if args.station:
command.append(str(args.station))
if args.train:
command.append(str(args.train))
if args.date:
command.append(str(args.date))
text = run_command(command)
tweet = text.split("-" * 3)
result = run_command(['node', "{}/twitter.js".format(os.getcwd()), tweet[1]])
print result
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
import subprocess
import argparse
parser = argparse.ArgumentParser(description="Tweet some Train Statuses!")
parser.add_argument("-s", "--station", dest="station", type=str, help="Station Short Code. Ex: 'SLM'")
parser.add_argument("-t", "--train", dest="train", type=int, help="Train Number. Ex: '500'")
parser.add_argument("-d", "--date", dest="date", type=str, help="Date. MM/DD/YYYY")
parser.set_defaults(station=None, train=None, date=None)
args = parser.parse_args()
def main():
def run_command(cmd_array, shell=False):
p = subprocess.Popen(cmd_array, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=shell)
out, err = p.communicate()
return out
command = ['phantomjs', 'trainStatus.js']
if args.station:
command.append(str(args.station))
if args.train:
command.append(str(args.train))
if args.date:
command.append(str(args.date))
print "Executing {}".format(command)
text = run_command(command)
tweet = text.split("-" * 3)
result = run_command(['node', 'twitter.js', tweet[1]])
print result
if __name__ == "__main__":
main()
Use full paths & print our current command fully#!/usr/bin/env python
import subprocess
import argparse
import os
parser = argparse.ArgumentParser(description="Tweet some Train Statuses!")
parser.add_argument("-s", "--station", dest="station", type=str, help="Station Short Code. Ex: 'SLM'")
parser.add_argument("-t", "--train", dest="train", type=int, help="Train Number. Ex: '500'")
parser.add_argument("-d", "--date", dest="date", type=str, help="Date. MM/DD/YYYY")
parser.set_defaults(station=None, train=None, date=None)
args = parser.parse_args()
def main():
def run_command(cmd_array, shell=False):
print "Executing `{}`".format(cmd_array)
p = subprocess.Popen(cmd_array, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=shell)
out, err = p.communicate()
return out
command = ['phantomjs', "{}/trainStatus.js".format(os.getcwd())]
if args.station:
command.append(str(args.station))
if args.train:
command.append(str(args.train))
if args.date:
command.append(str(args.date))
text = run_command(command)
tweet = text.split("-" * 3)
result = run_command(['node', "{}/twitter.js".format(os.getcwd()), tweet[1]])
print result
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python
import subprocess
import argparse
parser = argparse.ArgumentParser(description="Tweet some Train Statuses!")
parser.add_argument("-s", "--station", dest="station", type=str, help="Station Short Code. Ex: 'SLM'")
parser.add_argument("-t", "--train", dest="train", type=int, help="Train Number. Ex: '500'")
parser.add_argument("-d", "--date", dest="date", type=str, help="Date. MM/DD/YYYY")
parser.set_defaults(station=None, train=None, date=None)
args = parser.parse_args()
def main():
def run_command(cmd_array, shell=False):
p = subprocess.Popen(cmd_array, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=shell)
out, err = p.communicate()
return out
command = ['phantomjs', 'trainStatus.js']
if args.station:
command.append(str(args.station))
if args.train:
command.append(str(args.train))
if args.date:
command.append(str(args.date))
print "Executing {}".format(command)
text = run_command(command)
tweet = text.split("-" * 3)
result = run_command(['node', 'twitter.js', tweet[1]])
print result
if __name__ == "__main__":
main()
<commit_msg>Use full paths & print our current command fully<commit_after>#!/usr/bin/env python
import subprocess
import argparse
import os
parser = argparse.ArgumentParser(description="Tweet some Train Statuses!")
parser.add_argument("-s", "--station", dest="station", type=str, help="Station Short Code. Ex: 'SLM'")
parser.add_argument("-t", "--train", dest="train", type=int, help="Train Number. Ex: '500'")
parser.add_argument("-d", "--date", dest="date", type=str, help="Date. MM/DD/YYYY")
parser.set_defaults(station=None, train=None, date=None)
args = parser.parse_args()
def main():
def run_command(cmd_array, shell=False):
print "Executing `{}`".format(cmd_array)
p = subprocess.Popen(cmd_array, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=shell)
out, err = p.communicate()
return out
command = ['phantomjs', "{}/trainStatus.js".format(os.getcwd())]
if args.station:
command.append(str(args.station))
if args.train:
command.append(str(args.train))
if args.date:
command.append(str(args.date))
text = run_command(command)
tweet = text.split("-" * 3)
result = run_command(['node', "{}/twitter.js".format(os.getcwd()), tweet[1]])
print result
if __name__ == "__main__":
main()
|
605340f9c18f1591846e0a9b5f9c983c940d80c9
|
tests/models/test_repository.py
|
tests/models/test_repository.py
|
from nose.tools import eq_
from mock import MagicMock, patch
from pyolite.models.repository import Repository
class TestRepositoryModel(object):
def test_it_should_be_possible_to_retrieve_by_name_a_repo(self):
mocked_users = MagicMock()
mocked_file = MagicMock()
mocked_dir = MagicMock()
mocked_path = MagicMock()
mocked_dir.isdir.return_value = True
mocked_file.isdir.return_value = False
mocked_file.__str__ = lambda x: 'tests/fixtures/get_repo_by_name.conf'
mocked_path.walk.return_value = [mocked_file, mocked_dir]
with patch.multiple('pyolite.models.repository',
Path=MagicMock(return_value=mocked_path),
ListUsers=MagicMock(return_value=mocked_users)):
repo = Repository.get_by_name('new_one', 'simple_path', 'git')
eq_(repo.name, 'new_one')
eq_(repo.path, 'simple_path')
eq_(repo.git, 'git')
eq_(repo.users, mocked_users)
|
from nose.tools import eq_
from mock import MagicMock, patch
from pyolite.models.repository import Repository
class TestRepositoryModel(object):
def test_it_should_be_possible_to_retrieve_by_name_a_repo(self):
mocked_users = MagicMock()
mocked_file = MagicMock()
mocked_dir = MagicMock()
mocked_path = MagicMock()
mocked_dir.isdir.return_value = True
mocked_file.isdir.return_value = False
mocked_file.__str__ = lambda x: 'tests/fixtures/get_repo_by_name.conf'
mocked_path.walk.return_value = [mocked_file, mocked_dir]
with patch.multiple('pyolite.models.repository',
Path=MagicMock(return_value=mocked_path),
ListUsers=MagicMock(return_value=mocked_users)):
repo = Repository.get_by_name('new_one', 'simple_path', 'git')
eq_(repo.name, 'new_one')
eq_(repo.path, 'simple_path')
eq_(repo.git, 'git')
eq_(repo.users, mocked_users)
def test_if_we_find_only_directories_should_return_none(self):
mocked_users = MagicMock()
mocked_dir = MagicMock()
mocked_path = MagicMock()
mocked_dir.isdir.return_value = True
mocked_path.walk.return_value = [mocked_dir]
with patch.multiple('pyolite.models.repository',
Path=MagicMock(return_value=mocked_path),
ListUsers=MagicMock(return_value=mocked_users)):
repo = Repository.get_by_name('new_one', 'simple_path', 'git')
eq_(repo, None)
|
Test if we look after a non-existing repo
|
Test if we look after a non-existing repo
|
Python
|
bsd-2-clause
|
PressLabs/pyolite,shawkinsl/pyolite
|
from nose.tools import eq_
from mock import MagicMock, patch
from pyolite.models.repository import Repository
class TestRepositoryModel(object):
def test_it_should_be_possible_to_retrieve_by_name_a_repo(self):
mocked_users = MagicMock()
mocked_file = MagicMock()
mocked_dir = MagicMock()
mocked_path = MagicMock()
mocked_dir.isdir.return_value = True
mocked_file.isdir.return_value = False
mocked_file.__str__ = lambda x: 'tests/fixtures/get_repo_by_name.conf'
mocked_path.walk.return_value = [mocked_file, mocked_dir]
with patch.multiple('pyolite.models.repository',
Path=MagicMock(return_value=mocked_path),
ListUsers=MagicMock(return_value=mocked_users)):
repo = Repository.get_by_name('new_one', 'simple_path', 'git')
eq_(repo.name, 'new_one')
eq_(repo.path, 'simple_path')
eq_(repo.git, 'git')
eq_(repo.users, mocked_users)
Test if we look after a non-existing repo
|
from nose.tools import eq_
from mock import MagicMock, patch
from pyolite.models.repository import Repository
class TestRepositoryModel(object):
def test_it_should_be_possible_to_retrieve_by_name_a_repo(self):
mocked_users = MagicMock()
mocked_file = MagicMock()
mocked_dir = MagicMock()
mocked_path = MagicMock()
mocked_dir.isdir.return_value = True
mocked_file.isdir.return_value = False
mocked_file.__str__ = lambda x: 'tests/fixtures/get_repo_by_name.conf'
mocked_path.walk.return_value = [mocked_file, mocked_dir]
with patch.multiple('pyolite.models.repository',
Path=MagicMock(return_value=mocked_path),
ListUsers=MagicMock(return_value=mocked_users)):
repo = Repository.get_by_name('new_one', 'simple_path', 'git')
eq_(repo.name, 'new_one')
eq_(repo.path, 'simple_path')
eq_(repo.git, 'git')
eq_(repo.users, mocked_users)
def test_if_we_find_only_directories_should_return_none(self):
mocked_users = MagicMock()
mocked_dir = MagicMock()
mocked_path = MagicMock()
mocked_dir.isdir.return_value = True
mocked_path.walk.return_value = [mocked_dir]
with patch.multiple('pyolite.models.repository',
Path=MagicMock(return_value=mocked_path),
ListUsers=MagicMock(return_value=mocked_users)):
repo = Repository.get_by_name('new_one', 'simple_path', 'git')
eq_(repo, None)
|
<commit_before>from nose.tools import eq_
from mock import MagicMock, patch
from pyolite.models.repository import Repository
class TestRepositoryModel(object):
def test_it_should_be_possible_to_retrieve_by_name_a_repo(self):
mocked_users = MagicMock()
mocked_file = MagicMock()
mocked_dir = MagicMock()
mocked_path = MagicMock()
mocked_dir.isdir.return_value = True
mocked_file.isdir.return_value = False
mocked_file.__str__ = lambda x: 'tests/fixtures/get_repo_by_name.conf'
mocked_path.walk.return_value = [mocked_file, mocked_dir]
with patch.multiple('pyolite.models.repository',
Path=MagicMock(return_value=mocked_path),
ListUsers=MagicMock(return_value=mocked_users)):
repo = Repository.get_by_name('new_one', 'simple_path', 'git')
eq_(repo.name, 'new_one')
eq_(repo.path, 'simple_path')
eq_(repo.git, 'git')
eq_(repo.users, mocked_users)
<commit_msg>Test if we look after a non-existing repo<commit_after>
|
from nose.tools import eq_
from mock import MagicMock, patch
from pyolite.models.repository import Repository
class TestRepositoryModel(object):
def test_it_should_be_possible_to_retrieve_by_name_a_repo(self):
mocked_users = MagicMock()
mocked_file = MagicMock()
mocked_dir = MagicMock()
mocked_path = MagicMock()
mocked_dir.isdir.return_value = True
mocked_file.isdir.return_value = False
mocked_file.__str__ = lambda x: 'tests/fixtures/get_repo_by_name.conf'
mocked_path.walk.return_value = [mocked_file, mocked_dir]
with patch.multiple('pyolite.models.repository',
Path=MagicMock(return_value=mocked_path),
ListUsers=MagicMock(return_value=mocked_users)):
repo = Repository.get_by_name('new_one', 'simple_path', 'git')
eq_(repo.name, 'new_one')
eq_(repo.path, 'simple_path')
eq_(repo.git, 'git')
eq_(repo.users, mocked_users)
def test_if_we_find_only_directories_should_return_none(self):
mocked_users = MagicMock()
mocked_dir = MagicMock()
mocked_path = MagicMock()
mocked_dir.isdir.return_value = True
mocked_path.walk.return_value = [mocked_dir]
with patch.multiple('pyolite.models.repository',
Path=MagicMock(return_value=mocked_path),
ListUsers=MagicMock(return_value=mocked_users)):
repo = Repository.get_by_name('new_one', 'simple_path', 'git')
eq_(repo, None)
|
from nose.tools import eq_
from mock import MagicMock, patch
from pyolite.models.repository import Repository
class TestRepositoryModel(object):
def test_it_should_be_possible_to_retrieve_by_name_a_repo(self):
mocked_users = MagicMock()
mocked_file = MagicMock()
mocked_dir = MagicMock()
mocked_path = MagicMock()
mocked_dir.isdir.return_value = True
mocked_file.isdir.return_value = False
mocked_file.__str__ = lambda x: 'tests/fixtures/get_repo_by_name.conf'
mocked_path.walk.return_value = [mocked_file, mocked_dir]
with patch.multiple('pyolite.models.repository',
Path=MagicMock(return_value=mocked_path),
ListUsers=MagicMock(return_value=mocked_users)):
repo = Repository.get_by_name('new_one', 'simple_path', 'git')
eq_(repo.name, 'new_one')
eq_(repo.path, 'simple_path')
eq_(repo.git, 'git')
eq_(repo.users, mocked_users)
Test if we look after a non-existing repofrom nose.tools import eq_
from mock import MagicMock, patch
from pyolite.models.repository import Repository
class TestRepositoryModel(object):
def test_it_should_be_possible_to_retrieve_by_name_a_repo(self):
mocked_users = MagicMock()
mocked_file = MagicMock()
mocked_dir = MagicMock()
mocked_path = MagicMock()
mocked_dir.isdir.return_value = True
mocked_file.isdir.return_value = False
mocked_file.__str__ = lambda x: 'tests/fixtures/get_repo_by_name.conf'
mocked_path.walk.return_value = [mocked_file, mocked_dir]
with patch.multiple('pyolite.models.repository',
Path=MagicMock(return_value=mocked_path),
ListUsers=MagicMock(return_value=mocked_users)):
repo = Repository.get_by_name('new_one', 'simple_path', 'git')
eq_(repo.name, 'new_one')
eq_(repo.path, 'simple_path')
eq_(repo.git, 'git')
eq_(repo.users, mocked_users)
def test_if_we_find_only_directories_should_return_none(self):
mocked_users = MagicMock()
mocked_dir = MagicMock()
mocked_path = MagicMock()
mocked_dir.isdir.return_value = True
mocked_path.walk.return_value = [mocked_dir]
with patch.multiple('pyolite.models.repository',
Path=MagicMock(return_value=mocked_path),
ListUsers=MagicMock(return_value=mocked_users)):
repo = Repository.get_by_name('new_one', 'simple_path', 'git')
eq_(repo, None)
|
<commit_before>from nose.tools import eq_
from mock import MagicMock, patch
from pyolite.models.repository import Repository
class TestRepositoryModel(object):
def test_it_should_be_possible_to_retrieve_by_name_a_repo(self):
mocked_users = MagicMock()
mocked_file = MagicMock()
mocked_dir = MagicMock()
mocked_path = MagicMock()
mocked_dir.isdir.return_value = True
mocked_file.isdir.return_value = False
mocked_file.__str__ = lambda x: 'tests/fixtures/get_repo_by_name.conf'
mocked_path.walk.return_value = [mocked_file, mocked_dir]
with patch.multiple('pyolite.models.repository',
Path=MagicMock(return_value=mocked_path),
ListUsers=MagicMock(return_value=mocked_users)):
repo = Repository.get_by_name('new_one', 'simple_path', 'git')
eq_(repo.name, 'new_one')
eq_(repo.path, 'simple_path')
eq_(repo.git, 'git')
eq_(repo.users, mocked_users)
<commit_msg>Test if we look after a non-existing repo<commit_after>from nose.tools import eq_
from mock import MagicMock, patch
from pyolite.models.repository import Repository
class TestRepositoryModel(object):
def test_it_should_be_possible_to_retrieve_by_name_a_repo(self):
mocked_users = MagicMock()
mocked_file = MagicMock()
mocked_dir = MagicMock()
mocked_path = MagicMock()
mocked_dir.isdir.return_value = True
mocked_file.isdir.return_value = False
mocked_file.__str__ = lambda x: 'tests/fixtures/get_repo_by_name.conf'
mocked_path.walk.return_value = [mocked_file, mocked_dir]
with patch.multiple('pyolite.models.repository',
Path=MagicMock(return_value=mocked_path),
ListUsers=MagicMock(return_value=mocked_users)):
repo = Repository.get_by_name('new_one', 'simple_path', 'git')
eq_(repo.name, 'new_one')
eq_(repo.path, 'simple_path')
eq_(repo.git, 'git')
eq_(repo.users, mocked_users)
def test_if_we_find_only_directories_should_return_none(self):
mocked_users = MagicMock()
mocked_dir = MagicMock()
mocked_path = MagicMock()
mocked_dir.isdir.return_value = True
mocked_path.walk.return_value = [mocked_dir]
with patch.multiple('pyolite.models.repository',
Path=MagicMock(return_value=mocked_path),
ListUsers=MagicMock(return_value=mocked_users)):
repo = Repository.get_by_name('new_one', 'simple_path', 'git')
eq_(repo, None)
|
09dad7d36c6968fc50cf8f2c475608a66bd6571a
|
plugins/expand.py
|
plugins/expand.py
|
# -*- coding: utf-8 -*-
#
# This file is part of tofbot, a friendly IRC bot.
# You may redistribute it under the Simplified BSD License.
# If we meet some day, and you think this stuff is worth it,
# you can buy us a beer in return.
#
# Copyright (c) 2012 Etienne Millon <etienne.millon@gmail.com>
from BeautifulSoup import BeautifulSoup
import requests
import re
from toflib import Plugin
DOMAINS = [ "t.co/"
]
VIDEO_DOMAINS = [ "youtube"
, "youtu.be"
]
def is_mini(url):
for d in DOMAINS:
if d in url:
return True
return False
def is_video(url):
for d in VIDEO_DOMAINS:
if d in url:
return True
return False
def urlExpand(url):
r = requests.get(url)
return r.url
def getTitle(url):
r = requests.get(url)
c = r.content
s = BeautifulSoup(c)
t = s.html.head.title.string
return ''.join(t.split("\n")).strip()
class PluginExpand(Plugin):
def on_url(self, url):
if is_mini(url):
try:
exp = urlExpand(url)
self.say(exp)
except:
pass
if is_video(url):
try:
t = getTitle(url)
self.say(t)
except:
pass
|
# -*- coding: utf-8 -*-
#
# This file is part of tofbot, a friendly IRC bot.
# You may redistribute it under the Simplified BSD License.
# If we meet some day, and you think this stuff is worth it,
# you can buy us a beer in return.
#
# Copyright (c) 2012 Etienne Millon <etienne.millon@gmail.com>
from BeautifulSoup import BeautifulSoup
import requests
import re
from toflib import Plugin
DOMAINS = [ "t.co/"
, "tinyurl.com/"
]
VIDEO_DOMAINS = [ "youtube"
, "youtu.be"
]
def is_mini(url):
for d in DOMAINS:
if d in url:
return True
return False
def is_video(url):
for d in VIDEO_DOMAINS:
if d in url:
return True
return False
def urlExpand(url):
r = requests.get(url)
return r.url
def getTitle(url):
r = requests.get(url)
c = r.content
s = BeautifulSoup(c)
t = s.html.head.title.string
return ''.join(t.split("\n")).strip()
class PluginExpand(Plugin):
def on_url(self, url):
if is_mini(url):
try:
exp = urlExpand(url)
self.say(exp)
except:
pass
if is_video(url):
try:
t = getTitle(url)
self.say(t)
except:
pass
|
Add tinyurl.com as a minifier
|
Add tinyurl.com as a minifier
|
Python
|
bsd-2-clause
|
p0nce/tofbot,tofbot/tofbot,chmduquesne/tofbot,p0nce/tofbot,soulaklabs/tofbot,tofbot/tofbot,martinkirch/tofbot,martinkirch/tofbot,soulaklabs/tofbot
|
# -*- coding: utf-8 -*-
#
# This file is part of tofbot, a friendly IRC bot.
# You may redistribute it under the Simplified BSD License.
# If we meet some day, and you think this stuff is worth it,
# you can buy us a beer in return.
#
# Copyright (c) 2012 Etienne Millon <etienne.millon@gmail.com>
from BeautifulSoup import BeautifulSoup
import requests
import re
from toflib import Plugin
DOMAINS = [ "t.co/"
]
VIDEO_DOMAINS = [ "youtube"
, "youtu.be"
]
def is_mini(url):
for d in DOMAINS:
if d in url:
return True
return False
def is_video(url):
for d in VIDEO_DOMAINS:
if d in url:
return True
return False
def urlExpand(url):
r = requests.get(url)
return r.url
def getTitle(url):
r = requests.get(url)
c = r.content
s = BeautifulSoup(c)
t = s.html.head.title.string
return ''.join(t.split("\n")).strip()
class PluginExpand(Plugin):
def on_url(self, url):
if is_mini(url):
try:
exp = urlExpand(url)
self.say(exp)
except:
pass
if is_video(url):
try:
t = getTitle(url)
self.say(t)
except:
pass
Add tinyurl.com as a minifier
|
# -*- coding: utf-8 -*-
#
# This file is part of tofbot, a friendly IRC bot.
# You may redistribute it under the Simplified BSD License.
# If we meet some day, and you think this stuff is worth it,
# you can buy us a beer in return.
#
# Copyright (c) 2012 Etienne Millon <etienne.millon@gmail.com>
from BeautifulSoup import BeautifulSoup
import requests
import re
from toflib import Plugin
DOMAINS = [ "t.co/"
, "tinyurl.com/"
]
VIDEO_DOMAINS = [ "youtube"
, "youtu.be"
]
def is_mini(url):
for d in DOMAINS:
if d in url:
return True
return False
def is_video(url):
for d in VIDEO_DOMAINS:
if d in url:
return True
return False
def urlExpand(url):
r = requests.get(url)
return r.url
def getTitle(url):
r = requests.get(url)
c = r.content
s = BeautifulSoup(c)
t = s.html.head.title.string
return ''.join(t.split("\n")).strip()
class PluginExpand(Plugin):
def on_url(self, url):
if is_mini(url):
try:
exp = urlExpand(url)
self.say(exp)
except:
pass
if is_video(url):
try:
t = getTitle(url)
self.say(t)
except:
pass
|
<commit_before># -*- coding: utf-8 -*-
#
# This file is part of tofbot, a friendly IRC bot.
# You may redistribute it under the Simplified BSD License.
# If we meet some day, and you think this stuff is worth it,
# you can buy us a beer in return.
#
# Copyright (c) 2012 Etienne Millon <etienne.millon@gmail.com>
from BeautifulSoup import BeautifulSoup
import requests
import re
from toflib import Plugin
DOMAINS = [ "t.co/"
]
VIDEO_DOMAINS = [ "youtube"
, "youtu.be"
]
def is_mini(url):
for d in DOMAINS:
if d in url:
return True
return False
def is_video(url):
for d in VIDEO_DOMAINS:
if d in url:
return True
return False
def urlExpand(url):
r = requests.get(url)
return r.url
def getTitle(url):
r = requests.get(url)
c = r.content
s = BeautifulSoup(c)
t = s.html.head.title.string
return ''.join(t.split("\n")).strip()
class PluginExpand(Plugin):
def on_url(self, url):
if is_mini(url):
try:
exp = urlExpand(url)
self.say(exp)
except:
pass
if is_video(url):
try:
t = getTitle(url)
self.say(t)
except:
pass
<commit_msg>Add tinyurl.com as a minifier<commit_after>
|
# -*- coding: utf-8 -*-
#
# This file is part of tofbot, a friendly IRC bot.
# You may redistribute it under the Simplified BSD License.
# If we meet some day, and you think this stuff is worth it,
# you can buy us a beer in return.
#
# Copyright (c) 2012 Etienne Millon <etienne.millon@gmail.com>
from BeautifulSoup import BeautifulSoup
import requests
import re
from toflib import Plugin
DOMAINS = [ "t.co/"
, "tinyurl.com/"
]
VIDEO_DOMAINS = [ "youtube"
, "youtu.be"
]
def is_mini(url):
for d in DOMAINS:
if d in url:
return True
return False
def is_video(url):
for d in VIDEO_DOMAINS:
if d in url:
return True
return False
def urlExpand(url):
r = requests.get(url)
return r.url
def getTitle(url):
r = requests.get(url)
c = r.content
s = BeautifulSoup(c)
t = s.html.head.title.string
return ''.join(t.split("\n")).strip()
class PluginExpand(Plugin):
def on_url(self, url):
if is_mini(url):
try:
exp = urlExpand(url)
self.say(exp)
except:
pass
if is_video(url):
try:
t = getTitle(url)
self.say(t)
except:
pass
|
# -*- coding: utf-8 -*-
#
# This file is part of tofbot, a friendly IRC bot.
# You may redistribute it under the Simplified BSD License.
# If we meet some day, and you think this stuff is worth it,
# you can buy us a beer in return.
#
# Copyright (c) 2012 Etienne Millon <etienne.millon@gmail.com>
from BeautifulSoup import BeautifulSoup
import requests
import re
from toflib import Plugin
DOMAINS = [ "t.co/"
]
VIDEO_DOMAINS = [ "youtube"
, "youtu.be"
]
def is_mini(url):
for d in DOMAINS:
if d in url:
return True
return False
def is_video(url):
for d in VIDEO_DOMAINS:
if d in url:
return True
return False
def urlExpand(url):
r = requests.get(url)
return r.url
def getTitle(url):
r = requests.get(url)
c = r.content
s = BeautifulSoup(c)
t = s.html.head.title.string
return ''.join(t.split("\n")).strip()
class PluginExpand(Plugin):
def on_url(self, url):
if is_mini(url):
try:
exp = urlExpand(url)
self.say(exp)
except:
pass
if is_video(url):
try:
t = getTitle(url)
self.say(t)
except:
pass
Add tinyurl.com as a minifier# -*- coding: utf-8 -*-
#
# This file is part of tofbot, a friendly IRC bot.
# You may redistribute it under the Simplified BSD License.
# If we meet some day, and you think this stuff is worth it,
# you can buy us a beer in return.
#
# Copyright (c) 2012 Etienne Millon <etienne.millon@gmail.com>
from BeautifulSoup import BeautifulSoup
import requests
import re
from toflib import Plugin
DOMAINS = [ "t.co/"
, "tinyurl.com/"
]
VIDEO_DOMAINS = [ "youtube"
, "youtu.be"
]
def is_mini(url):
for d in DOMAINS:
if d in url:
return True
return False
def is_video(url):
for d in VIDEO_DOMAINS:
if d in url:
return True
return False
def urlExpand(url):
r = requests.get(url)
return r.url
def getTitle(url):
r = requests.get(url)
c = r.content
s = BeautifulSoup(c)
t = s.html.head.title.string
return ''.join(t.split("\n")).strip()
class PluginExpand(Plugin):
def on_url(self, url):
if is_mini(url):
try:
exp = urlExpand(url)
self.say(exp)
except:
pass
if is_video(url):
try:
t = getTitle(url)
self.say(t)
except:
pass
|
<commit_before># -*- coding: utf-8 -*-
#
# This file is part of tofbot, a friendly IRC bot.
# You may redistribute it under the Simplified BSD License.
# If we meet some day, and you think this stuff is worth it,
# you can buy us a beer in return.
#
# Copyright (c) 2012 Etienne Millon <etienne.millon@gmail.com>
from BeautifulSoup import BeautifulSoup
import requests
import re
from toflib import Plugin
DOMAINS = [ "t.co/"
]
VIDEO_DOMAINS = [ "youtube"
, "youtu.be"
]
def is_mini(url):
for d in DOMAINS:
if d in url:
return True
return False
def is_video(url):
for d in VIDEO_DOMAINS:
if d in url:
return True
return False
def urlExpand(url):
r = requests.get(url)
return r.url
def getTitle(url):
r = requests.get(url)
c = r.content
s = BeautifulSoup(c)
t = s.html.head.title.string
return ''.join(t.split("\n")).strip()
class PluginExpand(Plugin):
def on_url(self, url):
if is_mini(url):
try:
exp = urlExpand(url)
self.say(exp)
except:
pass
if is_video(url):
try:
t = getTitle(url)
self.say(t)
except:
pass
<commit_msg>Add tinyurl.com as a minifier<commit_after># -*- coding: utf-8 -*-
#
# This file is part of tofbot, a friendly IRC bot.
# You may redistribute it under the Simplified BSD License.
# If we meet some day, and you think this stuff is worth it,
# you can buy us a beer in return.
#
# Copyright (c) 2012 Etienne Millon <etienne.millon@gmail.com>
from BeautifulSoup import BeautifulSoup
import requests
import re
from toflib import Plugin
DOMAINS = [ "t.co/"
, "tinyurl.com/"
]
VIDEO_DOMAINS = [ "youtube"
, "youtu.be"
]
def is_mini(url):
for d in DOMAINS:
if d in url:
return True
return False
def is_video(url):
for d in VIDEO_DOMAINS:
if d in url:
return True
return False
def urlExpand(url):
r = requests.get(url)
return r.url
def getTitle(url):
r = requests.get(url)
c = r.content
s = BeautifulSoup(c)
t = s.html.head.title.string
return ''.join(t.split("\n")).strip()
class PluginExpand(Plugin):
def on_url(self, url):
if is_mini(url):
try:
exp = urlExpand(url)
self.say(exp)
except:
pass
if is_video(url):
try:
t = getTitle(url)
self.say(t)
except:
pass
|
66bc3ca0f9bd0e2b20d772e694c819b937a0c346
|
pogom/pokeller.py
|
pogom/pokeller.py
|
# -*- coding: utf-8 -*-
import logging
import time
from threading import Thread
from .models import Pokemon
log = logging.getLogger(__name__)
log.setLevel(level=10)
class PokePoller(Thread):
def __init__(self):
Thread.__init__(self)
self.daemon = True
self.name = 'pokemon_poller'
self.notify = lambda x: None
def set_callback(self, notify_func):
self.notify = notify_func
def run(self):
while True:
time.sleep(10)
self.notify(Pokemon.get_active())
|
# -*- coding: utf-8 -*-
import logging
import time
from threading import Thread
from .models import Pokemon
log = logging.getLogger(__name__)
log.setLevel(level=10)
class PokePoller(Thread):
def __init__(self):
Thread.__init__(self)
self.daemon = True
self.name = 'pokemon_poller'
self.notify = lambda x: None
def set_callback(self, notify_func):
self.notify = notify_func
def run(self):
while True:
time.sleep(10)
try:
self.notify(Pokemon.get_active())
except Exception as e:
log.debug(e)
|
Add a debug message to find an unexpected error
|
Add a debug message to find an unexpected error
|
Python
|
mit
|
falau/pogom,falau/pogom,falau/pogom
|
# -*- coding: utf-8 -*-
import logging
import time
from threading import Thread
from .models import Pokemon
log = logging.getLogger(__name__)
log.setLevel(level=10)
class PokePoller(Thread):
def __init__(self):
Thread.__init__(self)
self.daemon = True
self.name = 'pokemon_poller'
self.notify = lambda x: None
def set_callback(self, notify_func):
self.notify = notify_func
def run(self):
while True:
time.sleep(10)
self.notify(Pokemon.get_active())
Add a debug message to find an unexpected error
|
# -*- coding: utf-8 -*-
import logging
import time
from threading import Thread
from .models import Pokemon
log = logging.getLogger(__name__)
log.setLevel(level=10)
class PokePoller(Thread):
def __init__(self):
Thread.__init__(self)
self.daemon = True
self.name = 'pokemon_poller'
self.notify = lambda x: None
def set_callback(self, notify_func):
self.notify = notify_func
def run(self):
while True:
time.sleep(10)
try:
self.notify(Pokemon.get_active())
except Exception as e:
log.debug(e)
|
<commit_before># -*- coding: utf-8 -*-
import logging
import time
from threading import Thread
from .models import Pokemon
log = logging.getLogger(__name__)
log.setLevel(level=10)
class PokePoller(Thread):
def __init__(self):
Thread.__init__(self)
self.daemon = True
self.name = 'pokemon_poller'
self.notify = lambda x: None
def set_callback(self, notify_func):
self.notify = notify_func
def run(self):
while True:
time.sleep(10)
self.notify(Pokemon.get_active())
<commit_msg>Add a debug message to find an unexpected error<commit_after>
|
# -*- coding: utf-8 -*-
import logging
import time
from threading import Thread
from .models import Pokemon
log = logging.getLogger(__name__)
log.setLevel(level=10)
class PokePoller(Thread):
def __init__(self):
Thread.__init__(self)
self.daemon = True
self.name = 'pokemon_poller'
self.notify = lambda x: None
def set_callback(self, notify_func):
self.notify = notify_func
def run(self):
while True:
time.sleep(10)
try:
self.notify(Pokemon.get_active())
except Exception as e:
log.debug(e)
|
# -*- coding: utf-8 -*-
import logging
import time
from threading import Thread
from .models import Pokemon
log = logging.getLogger(__name__)
log.setLevel(level=10)
class PokePoller(Thread):
def __init__(self):
Thread.__init__(self)
self.daemon = True
self.name = 'pokemon_poller'
self.notify = lambda x: None
def set_callback(self, notify_func):
self.notify = notify_func
def run(self):
while True:
time.sleep(10)
self.notify(Pokemon.get_active())
Add a debug message to find an unexpected error# -*- coding: utf-8 -*-
import logging
import time
from threading import Thread
from .models import Pokemon
log = logging.getLogger(__name__)
log.setLevel(level=10)
class PokePoller(Thread):
def __init__(self):
Thread.__init__(self)
self.daemon = True
self.name = 'pokemon_poller'
self.notify = lambda x: None
def set_callback(self, notify_func):
self.notify = notify_func
def run(self):
while True:
time.sleep(10)
try:
self.notify(Pokemon.get_active())
except Exception as e:
log.debug(e)
|
<commit_before># -*- coding: utf-8 -*-
import logging
import time
from threading import Thread
from .models import Pokemon
log = logging.getLogger(__name__)
log.setLevel(level=10)
class PokePoller(Thread):
def __init__(self):
Thread.__init__(self)
self.daemon = True
self.name = 'pokemon_poller'
self.notify = lambda x: None
def set_callback(self, notify_func):
self.notify = notify_func
def run(self):
while True:
time.sleep(10)
self.notify(Pokemon.get_active())
<commit_msg>Add a debug message to find an unexpected error<commit_after># -*- coding: utf-8 -*-
import logging
import time
from threading import Thread
from .models import Pokemon
log = logging.getLogger(__name__)
log.setLevel(level=10)
class PokePoller(Thread):
def __init__(self):
Thread.__init__(self)
self.daemon = True
self.name = 'pokemon_poller'
self.notify = lambda x: None
def set_callback(self, notify_func):
self.notify = notify_func
def run(self):
while True:
time.sleep(10)
try:
self.notify(Pokemon.get_active())
except Exception as e:
log.debug(e)
|
34e211c2cc3021be7497eb48e68f557c56a2a1ea
|
django_assets/__init__.py
|
django_assets/__init__.py
|
__version__ = (0, 2)
# Make a couple frequently used things available right here.
from bundle import Bundle
from registry import register
|
__version__ = (0, 3, 'dev')
# Make a couple frequently used things available right here.
from bundle import Bundle
from registry import register
|
Increment to next dev version.
|
Increment to next dev version.
|
Python
|
bsd-2-clause
|
scorphus/webassets,wijerasa/webassets,heynemann/webassets,wijerasa/webassets,heynemann/webassets,florianjacob/webassets,john2x/webassets,JDeuce/webassets,scorphus/webassets,glorpen/webassets,aconrad/webassets,JDeuce/webassets,aconrad/webassets,john2x/webassets,florianjacob/webassets,glorpen/webassets,glorpen/webassets,aconrad/webassets,0x1997/webassets,heynemann/webassets,rs/webassets,0x1997/webassets
|
__version__ = (0, 2)
# Make a couple frequently used things available right here.
from bundle import Bundle
from registry import registerIncrement to next dev version.
|
__version__ = (0, 3, 'dev')
# Make a couple frequently used things available right here.
from bundle import Bundle
from registry import register
|
<commit_before>__version__ = (0, 2)
# Make a couple frequently used things available right here.
from bundle import Bundle
from registry import register<commit_msg>Increment to next dev version.<commit_after>
|
__version__ = (0, 3, 'dev')
# Make a couple frequently used things available right here.
from bundle import Bundle
from registry import register
|
__version__ = (0, 2)
# Make a couple frequently used things available right here.
from bundle import Bundle
from registry import registerIncrement to next dev version.__version__ = (0, 3, 'dev')
# Make a couple frequently used things available right here.
from bundle import Bundle
from registry import register
|
<commit_before>__version__ = (0, 2)
# Make a couple frequently used things available right here.
from bundle import Bundle
from registry import register<commit_msg>Increment to next dev version.<commit_after>__version__ = (0, 3, 'dev')
# Make a couple frequently used things available right here.
from bundle import Bundle
from registry import register
|
1c1ca68a41e56cb912a9ec9f81ab974324f9d2f4
|
tests/test_filter_refs_prefs.py
|
tests/test_filter_refs_prefs.py
|
# tests.test_filter_refs_prefs
# coding=utf-8
from __future__ import unicode_literals
import nose.tools as nose
import yvs.filter_refs as yvs
from tests.decorators import use_prefs
@use_prefs({'language': 'en', 'version': 59})
def test_version_persistence():
"""should remember version preferences"""
results = yvs.get_result_list('mat 4')
nose.assert_equal(len(results), 1)
nose.assert_equal(results[0]['title'], 'Matthew 4 (ESV)')
@use_prefs({'language': 'es'})
def test_language_persistence():
"""should remember language preferences"""
results = yvs.get_result_list('gá 4')
nose.assert_equal(len(results), 1)
nose.assert_equal(results[0]['title'], 'Gálatas 4 (NVI)')
|
# tests.test_filter_refs_prefs
# coding=utf-8
from __future__ import unicode_literals
import os.path
import nose.tools as nose
import yvs.filter_refs as yvs
from tests.decorators import use_prefs
@use_prefs({'language': 'en', 'version': 59})
def test_version_persistence():
"""should remember version preferences"""
results = yvs.get_result_list('mat 4')
nose.assert_equal(len(results), 1)
nose.assert_equal(results[0]['title'], 'Matthew 4 (ESV)')
@use_prefs({'language': 'es'})
def test_language_persistence():
"""should remember language preferences"""
results = yvs.get_result_list('gá 4')
nose.assert_equal(len(results), 1)
nose.assert_equal(results[0]['title'], 'Gálatas 4 (NVI)')
def test_create_data_dir_silent_fail():
"""should silently fail if Alfred data directory already exists"""
yvs.shared.create_alfred_data_dir()
yvs.shared.create_alfred_data_dir()
nose.assert_true(os.path.exists(yvs.shared.ALFRED_DATA_DIR))
|
Add test for silent fail when creating Alfred data dir
|
Add test for silent fail when creating Alfred data dir
|
Python
|
mit
|
caleb531/youversion-suggest,caleb531/youversion-suggest
|
# tests.test_filter_refs_prefs
# coding=utf-8
from __future__ import unicode_literals
import nose.tools as nose
import yvs.filter_refs as yvs
from tests.decorators import use_prefs
@use_prefs({'language': 'en', 'version': 59})
def test_version_persistence():
"""should remember version preferences"""
results = yvs.get_result_list('mat 4')
nose.assert_equal(len(results), 1)
nose.assert_equal(results[0]['title'], 'Matthew 4 (ESV)')
@use_prefs({'language': 'es'})
def test_language_persistence():
"""should remember language preferences"""
results = yvs.get_result_list('gá 4')
nose.assert_equal(len(results), 1)
nose.assert_equal(results[0]['title'], 'Gálatas 4 (NVI)')
Add test for silent fail when creating Alfred data dir
|
# tests.test_filter_refs_prefs
# coding=utf-8
from __future__ import unicode_literals
import os.path
import nose.tools as nose
import yvs.filter_refs as yvs
from tests.decorators import use_prefs
@use_prefs({'language': 'en', 'version': 59})
def test_version_persistence():
"""should remember version preferences"""
results = yvs.get_result_list('mat 4')
nose.assert_equal(len(results), 1)
nose.assert_equal(results[0]['title'], 'Matthew 4 (ESV)')
@use_prefs({'language': 'es'})
def test_language_persistence():
"""should remember language preferences"""
results = yvs.get_result_list('gá 4')
nose.assert_equal(len(results), 1)
nose.assert_equal(results[0]['title'], 'Gálatas 4 (NVI)')
def test_create_data_dir_silent_fail():
"""should silently fail if Alfred data directory already exists"""
yvs.shared.create_alfred_data_dir()
yvs.shared.create_alfred_data_dir()
nose.assert_true(os.path.exists(yvs.shared.ALFRED_DATA_DIR))
|
<commit_before># tests.test_filter_refs_prefs
# coding=utf-8
from __future__ import unicode_literals
import nose.tools as nose
import yvs.filter_refs as yvs
from tests.decorators import use_prefs
@use_prefs({'language': 'en', 'version': 59})
def test_version_persistence():
"""should remember version preferences"""
results = yvs.get_result_list('mat 4')
nose.assert_equal(len(results), 1)
nose.assert_equal(results[0]['title'], 'Matthew 4 (ESV)')
@use_prefs({'language': 'es'})
def test_language_persistence():
"""should remember language preferences"""
results = yvs.get_result_list('gá 4')
nose.assert_equal(len(results), 1)
nose.assert_equal(results[0]['title'], 'Gálatas 4 (NVI)')
<commit_msg>Add test for silent fail when creating Alfred data dir<commit_after>
|
# tests.test_filter_refs_prefs
# coding=utf-8
from __future__ import unicode_literals
import os.path
import nose.tools as nose
import yvs.filter_refs as yvs
from tests.decorators import use_prefs
@use_prefs({'language': 'en', 'version': 59})
def test_version_persistence():
"""should remember version preferences"""
results = yvs.get_result_list('mat 4')
nose.assert_equal(len(results), 1)
nose.assert_equal(results[0]['title'], 'Matthew 4 (ESV)')
@use_prefs({'language': 'es'})
def test_language_persistence():
"""should remember language preferences"""
results = yvs.get_result_list('gá 4')
nose.assert_equal(len(results), 1)
nose.assert_equal(results[0]['title'], 'Gálatas 4 (NVI)')
def test_create_data_dir_silent_fail():
"""should silently fail if Alfred data directory already exists"""
yvs.shared.create_alfred_data_dir()
yvs.shared.create_alfred_data_dir()
nose.assert_true(os.path.exists(yvs.shared.ALFRED_DATA_DIR))
|
# tests.test_filter_refs_prefs
# coding=utf-8
from __future__ import unicode_literals
import nose.tools as nose
import yvs.filter_refs as yvs
from tests.decorators import use_prefs
@use_prefs({'language': 'en', 'version': 59})
def test_version_persistence():
"""should remember version preferences"""
results = yvs.get_result_list('mat 4')
nose.assert_equal(len(results), 1)
nose.assert_equal(results[0]['title'], 'Matthew 4 (ESV)')
@use_prefs({'language': 'es'})
def test_language_persistence():
"""should remember language preferences"""
results = yvs.get_result_list('gá 4')
nose.assert_equal(len(results), 1)
nose.assert_equal(results[0]['title'], 'Gálatas 4 (NVI)')
Add test for silent fail when creating Alfred data dir# tests.test_filter_refs_prefs
# coding=utf-8
from __future__ import unicode_literals
import os.path
import nose.tools as nose
import yvs.filter_refs as yvs
from tests.decorators import use_prefs
@use_prefs({'language': 'en', 'version': 59})
def test_version_persistence():
"""should remember version preferences"""
results = yvs.get_result_list('mat 4')
nose.assert_equal(len(results), 1)
nose.assert_equal(results[0]['title'], 'Matthew 4 (ESV)')
@use_prefs({'language': 'es'})
def test_language_persistence():
"""should remember language preferences"""
results = yvs.get_result_list('gá 4')
nose.assert_equal(len(results), 1)
nose.assert_equal(results[0]['title'], 'Gálatas 4 (NVI)')
def test_create_data_dir_silent_fail():
"""should silently fail if Alfred data directory already exists"""
yvs.shared.create_alfred_data_dir()
yvs.shared.create_alfred_data_dir()
nose.assert_true(os.path.exists(yvs.shared.ALFRED_DATA_DIR))
|
<commit_before># tests.test_filter_refs_prefs
# coding=utf-8
from __future__ import unicode_literals
import nose.tools as nose
import yvs.filter_refs as yvs
from tests.decorators import use_prefs
@use_prefs({'language': 'en', 'version': 59})
def test_version_persistence():
"""should remember version preferences"""
results = yvs.get_result_list('mat 4')
nose.assert_equal(len(results), 1)
nose.assert_equal(results[0]['title'], 'Matthew 4 (ESV)')
@use_prefs({'language': 'es'})
def test_language_persistence():
"""should remember language preferences"""
results = yvs.get_result_list('gá 4')
nose.assert_equal(len(results), 1)
nose.assert_equal(results[0]['title'], 'Gálatas 4 (NVI)')
<commit_msg>Add test for silent fail when creating Alfred data dir<commit_after># tests.test_filter_refs_prefs
# coding=utf-8
from __future__ import unicode_literals
import os.path
import nose.tools as nose
import yvs.filter_refs as yvs
from tests.decorators import use_prefs
@use_prefs({'language': 'en', 'version': 59})
def test_version_persistence():
"""should remember version preferences"""
results = yvs.get_result_list('mat 4')
nose.assert_equal(len(results), 1)
nose.assert_equal(results[0]['title'], 'Matthew 4 (ESV)')
@use_prefs({'language': 'es'})
def test_language_persistence():
"""should remember language preferences"""
results = yvs.get_result_list('gá 4')
nose.assert_equal(len(results), 1)
nose.assert_equal(results[0]['title'], 'Gálatas 4 (NVI)')
def test_create_data_dir_silent_fail():
"""should silently fail if Alfred data directory already exists"""
yvs.shared.create_alfred_data_dir()
yvs.shared.create_alfred_data_dir()
nose.assert_true(os.path.exists(yvs.shared.ALFRED_DATA_DIR))
|
72bc4597e172a5c276057c543998337038ef15f5
|
projects/views.py
|
projects/views.py
|
# This file is part of the FragDev Website.
#
# the FragDev Website is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# the FragDev Website is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with the FragDev Website. If not, see <http://www.gnu.org/licenses/>.
from django.http import Http404
from django.shortcuts import render
from django.template import loader
from .models import Project
def index(request):
'''
Project listing
'''
projects = Project.objects.filter(status=Project.PUBLIC).order_by('-date')
template = loader.get_template('projects/page-index.html')
return HttpResponse(template.render({'projects': projects}))
def project(request, slug):
'''
Display project details
'''
try:
project = Project.objects.get(slug=slug,status=Project.PUBLIC)
except ObjectDoesNotExist:
raise Http404('Project {} does not exist' % slug)
template = loader.get_template('projects/page-project.html')
return HttpResponse(template.render({'project': project}))
|
# This file is part of the FragDev Website.
#
# the FragDev Website is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# the FragDev Website is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with the FragDev Website. If not, see <http://www.gnu.org/licenses/>.
from django.http import Http404, HttpResponse
from django.shortcuts import render
from django.template import loader
from .models import Project
def index(request):
'''
Project listing
'''
projects = Project.objects.exclude(status=Project.HIDDEN).order_by('-date')
template = loader.get_template('projects/page-index.html')
return HttpResponse(template.render({'projects': projects}))
def project(request, slug):
'''
Display project details
'''
try:
project = Project.objects.exclude(status=Project.HIDDEN).get(slug=slug)
except ObjectDoesNotExist:
raise Http404('Project {} does not exist' % slug)
template = loader.get_template('projects/page-project.html')
return HttpResponse(template.render({'project': project}))
|
Add missing include, and switch to exclude projects with HIDDEN status from public display
|
Add missing include, and switch to exclude projects with HIDDEN status from public display
|
Python
|
agpl-3.0
|
lo-windigo/fragdev,lo-windigo/fragdev
|
# This file is part of the FragDev Website.
#
# the FragDev Website is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# the FragDev Website is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with the FragDev Website. If not, see <http://www.gnu.org/licenses/>.
from django.http import Http404
from django.shortcuts import render
from django.template import loader
from .models import Project
def index(request):
'''
Project listing
'''
projects = Project.objects.filter(status=Project.PUBLIC).order_by('-date')
template = loader.get_template('projects/page-index.html')
return HttpResponse(template.render({'projects': projects}))
def project(request, slug):
'''
Display project details
'''
try:
project = Project.objects.get(slug=slug,status=Project.PUBLIC)
except ObjectDoesNotExist:
raise Http404('Project {} does not exist' % slug)
template = loader.get_template('projects/page-project.html')
return HttpResponse(template.render({'project': project}))
Add missing include, and switch to exclude projects with HIDDEN status from public display
|
# This file is part of the FragDev Website.
#
# the FragDev Website is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# the FragDev Website is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with the FragDev Website. If not, see <http://www.gnu.org/licenses/>.
from django.http import Http404, HttpResponse
from django.shortcuts import render
from django.template import loader
from .models import Project
def index(request):
'''
Project listing
'''
projects = Project.objects.exclude(status=Project.HIDDEN).order_by('-date')
template = loader.get_template('projects/page-index.html')
return HttpResponse(template.render({'projects': projects}))
def project(request, slug):
'''
Display project details
'''
try:
project = Project.objects.exclude(status=Project.HIDDEN).get(slug=slug)
except ObjectDoesNotExist:
raise Http404('Project {} does not exist' % slug)
template = loader.get_template('projects/page-project.html')
return HttpResponse(template.render({'project': project}))
|
<commit_before># This file is part of the FragDev Website.
#
# the FragDev Website is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# the FragDev Website is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with the FragDev Website. If not, see <http://www.gnu.org/licenses/>.
from django.http import Http404
from django.shortcuts import render
from django.template import loader
from .models import Project
def index(request):
'''
Project listing
'''
projects = Project.objects.filter(status=Project.PUBLIC).order_by('-date')
template = loader.get_template('projects/page-index.html')
return HttpResponse(template.render({'projects': projects}))
def project(request, slug):
'''
Display project details
'''
try:
project = Project.objects.get(slug=slug,status=Project.PUBLIC)
except ObjectDoesNotExist:
raise Http404('Project {} does not exist' % slug)
template = loader.get_template('projects/page-project.html')
return HttpResponse(template.render({'project': project}))
<commit_msg>Add missing include, and switch to exclude projects with HIDDEN status from public display<commit_after>
|
# This file is part of the FragDev Website.
#
# the FragDev Website is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# the FragDev Website is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with the FragDev Website. If not, see <http://www.gnu.org/licenses/>.
from django.http import Http404, HttpResponse
from django.shortcuts import render
from django.template import loader
from .models import Project
def index(request):
'''
Project listing
'''
projects = Project.objects.exclude(status=Project.HIDDEN).order_by('-date')
template = loader.get_template('projects/page-index.html')
return HttpResponse(template.render({'projects': projects}))
def project(request, slug):
'''
Display project details
'''
try:
project = Project.objects.exclude(status=Project.HIDDEN).get(slug=slug)
except ObjectDoesNotExist:
raise Http404('Project {} does not exist' % slug)
template = loader.get_template('projects/page-project.html')
return HttpResponse(template.render({'project': project}))
|
# This file is part of the FragDev Website.
#
# the FragDev Website is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# the FragDev Website is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with the FragDev Website. If not, see <http://www.gnu.org/licenses/>.
from django.http import Http404
from django.shortcuts import render
from django.template import loader
from .models import Project
def index(request):
'''
Project listing
'''
projects = Project.objects.filter(status=Project.PUBLIC).order_by('-date')
template = loader.get_template('projects/page-index.html')
return HttpResponse(template.render({'projects': projects}))
def project(request, slug):
'''
Display project details
'''
try:
project = Project.objects.get(slug=slug,status=Project.PUBLIC)
except ObjectDoesNotExist:
raise Http404('Project {} does not exist' % slug)
template = loader.get_template('projects/page-project.html')
return HttpResponse(template.render({'project': project}))
Add missing include, and switch to exclude projects with HIDDEN status from public display# This file is part of the FragDev Website.
#
# the FragDev Website is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# the FragDev Website is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with the FragDev Website. If not, see <http://www.gnu.org/licenses/>.
from django.http import Http404, HttpResponse
from django.shortcuts import render
from django.template import loader
from .models import Project
def index(request):
'''
Project listing
'''
projects = Project.objects.exclude(status=Project.HIDDEN).order_by('-date')
template = loader.get_template('projects/page-index.html')
return HttpResponse(template.render({'projects': projects}))
def project(request, slug):
'''
Display project details
'''
try:
project = Project.objects.exclude(status=Project.HIDDEN).get(slug=slug)
except ObjectDoesNotExist:
raise Http404('Project {} does not exist' % slug)
template = loader.get_template('projects/page-project.html')
return HttpResponse(template.render({'project': project}))
|
<commit_before># This file is part of the FragDev Website.
#
# the FragDev Website is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# the FragDev Website is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with the FragDev Website. If not, see <http://www.gnu.org/licenses/>.
from django.http import Http404
from django.shortcuts import render
from django.template import loader
from .models import Project
def index(request):
'''
Project listing
'''
projects = Project.objects.filter(status=Project.PUBLIC).order_by('-date')
template = loader.get_template('projects/page-index.html')
return HttpResponse(template.render({'projects': projects}))
def project(request, slug):
'''
Display project details
'''
try:
project = Project.objects.get(slug=slug,status=Project.PUBLIC)
except ObjectDoesNotExist:
raise Http404('Project {} does not exist' % slug)
template = loader.get_template('projects/page-project.html')
return HttpResponse(template.render({'project': project}))
<commit_msg>Add missing include, and switch to exclude projects with HIDDEN status from public display<commit_after># This file is part of the FragDev Website.
#
# the FragDev Website is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# the FragDev Website is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with the FragDev Website. If not, see <http://www.gnu.org/licenses/>.
from django.http import Http404, HttpResponse
from django.shortcuts import render
from django.template import loader
from .models import Project
def index(request):
'''
Project listing
'''
projects = Project.objects.exclude(status=Project.HIDDEN).order_by('-date')
template = loader.get_template('projects/page-index.html')
return HttpResponse(template.render({'projects': projects}))
def project(request, slug):
'''
Display project details
'''
try:
project = Project.objects.exclude(status=Project.HIDDEN).get(slug=slug)
except ObjectDoesNotExist:
raise Http404('Project {} does not exist' % slug)
template = loader.get_template('projects/page-project.html')
return HttpResponse(template.render({'project': project}))
|
6798df4657730484549fdeaa13578c2d7e36f4eb
|
udiskie/automount.py
|
udiskie/automount.py
|
"""
Udiskie automounter daemon.
"""
__all__ = ['AutoMounter']
class AutoMounter(object):
"""
Automatically mount newly added media.
"""
def __init__(self, mounter):
self._mounter = mounter
def device_added(self, udevice):
self._mounter.add_device(udevice)
def media_added(self, udevice):
self._mounter.add_device(udevice)
|
"""
Udiskie automounter daemon.
"""
__all__ = ['AutoMounter']
class AutoMounter(object):
"""
Automatically mount newly added media.
"""
def __init__(self, mounter):
self._mounter = mounter
def device_added(self, udevice):
self._mounter.add_device(udevice)
def media_added(self, udevice):
self._mounter.add_device(udevice)
def device_changed(self, old_state, new_state):
"""
Check whether is_external changed, then mount
"""
# fixes usecase: mount luks-cleartext when opened by
# non-udiskie-software problem: in this case the device is not seen as
# external from the beginning and thus not mounted
if not old_state.is_external and new_state.is_external:
self._mounter.add_device(new_state)
|
Add function in AutoMounter to handle mounting of LUKS-devices not opened by udiskie
|
Add function in AutoMounter to handle mounting of LUKS-devices not opened by udiskie
|
Python
|
mit
|
pstray/udiskie,coldfix/udiskie,coldfix/udiskie,mathstuf/udiskie,khardix/udiskie,pstray/udiskie
|
"""
Udiskie automounter daemon.
"""
__all__ = ['AutoMounter']
class AutoMounter(object):
"""
Automatically mount newly added media.
"""
def __init__(self, mounter):
self._mounter = mounter
def device_added(self, udevice):
self._mounter.add_device(udevice)
def media_added(self, udevice):
self._mounter.add_device(udevice)
Add function in AutoMounter to handle mounting of LUKS-devices not opened by udiskie
|
"""
Udiskie automounter daemon.
"""
__all__ = ['AutoMounter']
class AutoMounter(object):
"""
Automatically mount newly added media.
"""
def __init__(self, mounter):
self._mounter = mounter
def device_added(self, udevice):
self._mounter.add_device(udevice)
def media_added(self, udevice):
self._mounter.add_device(udevice)
def device_changed(self, old_state, new_state):
"""
Check whether is_external changed, then mount
"""
# fixes usecase: mount luks-cleartext when opened by
# non-udiskie-software problem: in this case the device is not seen as
# external from the beginning and thus not mounted
if not old_state.is_external and new_state.is_external:
self._mounter.add_device(new_state)
|
<commit_before>"""
Udiskie automounter daemon.
"""
__all__ = ['AutoMounter']
class AutoMounter(object):
"""
Automatically mount newly added media.
"""
def __init__(self, mounter):
self._mounter = mounter
def device_added(self, udevice):
self._mounter.add_device(udevice)
def media_added(self, udevice):
self._mounter.add_device(udevice)
<commit_msg>Add function in AutoMounter to handle mounting of LUKS-devices not opened by udiskie<commit_after>
|
"""
Udiskie automounter daemon.
"""
__all__ = ['AutoMounter']
class AutoMounter(object):
"""
Automatically mount newly added media.
"""
def __init__(self, mounter):
self._mounter = mounter
def device_added(self, udevice):
self._mounter.add_device(udevice)
def media_added(self, udevice):
self._mounter.add_device(udevice)
def device_changed(self, old_state, new_state):
"""
Check whether is_external changed, then mount
"""
# fixes usecase: mount luks-cleartext when opened by
# non-udiskie-software problem: in this case the device is not seen as
# external from the beginning and thus not mounted
if not old_state.is_external and new_state.is_external:
self._mounter.add_device(new_state)
|
"""
Udiskie automounter daemon.
"""
__all__ = ['AutoMounter']
class AutoMounter(object):
"""
Automatically mount newly added media.
"""
def __init__(self, mounter):
self._mounter = mounter
def device_added(self, udevice):
self._mounter.add_device(udevice)
def media_added(self, udevice):
self._mounter.add_device(udevice)
Add function in AutoMounter to handle mounting of LUKS-devices not opened by udiskie"""
Udiskie automounter daemon.
"""
__all__ = ['AutoMounter']
class AutoMounter(object):
"""
Automatically mount newly added media.
"""
def __init__(self, mounter):
self._mounter = mounter
def device_added(self, udevice):
self._mounter.add_device(udevice)
def media_added(self, udevice):
self._mounter.add_device(udevice)
def device_changed(self, old_state, new_state):
"""
Check whether is_external changed, then mount
"""
# fixes usecase: mount luks-cleartext when opened by
# non-udiskie-software problem: in this case the device is not seen as
# external from the beginning and thus not mounted
if not old_state.is_external and new_state.is_external:
self._mounter.add_device(new_state)
|
<commit_before>"""
Udiskie automounter daemon.
"""
__all__ = ['AutoMounter']
class AutoMounter(object):
"""
Automatically mount newly added media.
"""
def __init__(self, mounter):
self._mounter = mounter
def device_added(self, udevice):
self._mounter.add_device(udevice)
def media_added(self, udevice):
self._mounter.add_device(udevice)
<commit_msg>Add function in AutoMounter to handle mounting of LUKS-devices not opened by udiskie<commit_after>"""
Udiskie automounter daemon.
"""
__all__ = ['AutoMounter']
class AutoMounter(object):
"""
Automatically mount newly added media.
"""
def __init__(self, mounter):
self._mounter = mounter
def device_added(self, udevice):
self._mounter.add_device(udevice)
def media_added(self, udevice):
self._mounter.add_device(udevice)
def device_changed(self, old_state, new_state):
"""
Check whether is_external changed, then mount
"""
# fixes usecase: mount luks-cleartext when opened by
# non-udiskie-software problem: in this case the device is not seen as
# external from the beginning and thus not mounted
if not old_state.is_external and new_state.is_external:
self._mounter.add_device(new_state)
|
3a4b274d4a7e23911843250c73da2ab59cf6649c
|
tests/alerts/alert_test_case.py
|
tests/alerts/alert_test_case.py
|
import os.path
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "../../alerts"))
from alert_test_suite import AlertTestSuite
class AlertTestCase(object):
def __init__(self, description, events=[], events_type='event', expected_alert=None):
self.description = description
# As a result of defining our test cases as class level variables
# we need to copy each event so that other tests dont
# mess with the same instance in memory
self.events = AlertTestSuite.copy(events)
assert any(isinstance(i, list) for i in self.events) is False, 'Test case events contains a sublist when it should not.'
self.events_type = events_type
self.expected_alert = expected_alert
self.full_events = []
def run(self, alert_filename, alert_classname):
alert_file_module = __import__(alert_filename)
alert_class_attr = getattr(alert_file_module, alert_classname)
alert_task = alert_class_attr()
alert_task.run()
return alert_task
|
import os.path
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "../../alerts"))
from alert_test_suite import AlertTestSuite
class AlertTestCase(object):
def __init__(self, description, events=[], expected_alert=None):
self.description = description
# As a result of defining our test cases as class level variables
# we need to copy each event so that other tests dont
# mess with the same instance in memory
self.events = AlertTestSuite.copy(events)
assert any(isinstance(i, list) for i in self.events) is False, 'Test case events contains a sublist when it should not.'
self.expected_alert = expected_alert
self.full_events = []
def run(self, alert_filename, alert_classname):
alert_file_module = __import__(alert_filename)
alert_class_attr = getattr(alert_file_module, alert_classname)
alert_task = alert_class_attr()
alert_task.run()
return alert_task
|
Remove events_type from alert test case
|
Remove events_type from alert test case
|
Python
|
mpl-2.0
|
mpurzynski/MozDef,jeffbryner/MozDef,mozilla/MozDef,mpurzynski/MozDef,Phrozyn/MozDef,mpurzynski/MozDef,jeffbryner/MozDef,jeffbryner/MozDef,Phrozyn/MozDef,mpurzynski/MozDef,jeffbryner/MozDef,mozilla/MozDef,Phrozyn/MozDef,Phrozyn/MozDef,mozilla/MozDef,mozilla/MozDef
|
import os.path
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "../../alerts"))
from alert_test_suite import AlertTestSuite
class AlertTestCase(object):
def __init__(self, description, events=[], events_type='event', expected_alert=None):
self.description = description
# As a result of defining our test cases as class level variables
# we need to copy each event so that other tests dont
# mess with the same instance in memory
self.events = AlertTestSuite.copy(events)
assert any(isinstance(i, list) for i in self.events) is False, 'Test case events contains a sublist when it should not.'
self.events_type = events_type
self.expected_alert = expected_alert
self.full_events = []
def run(self, alert_filename, alert_classname):
alert_file_module = __import__(alert_filename)
alert_class_attr = getattr(alert_file_module, alert_classname)
alert_task = alert_class_attr()
alert_task.run()
return alert_task
Remove events_type from alert test case
|
import os.path
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "../../alerts"))
from alert_test_suite import AlertTestSuite
class AlertTestCase(object):
def __init__(self, description, events=[], expected_alert=None):
self.description = description
# As a result of defining our test cases as class level variables
# we need to copy each event so that other tests dont
# mess with the same instance in memory
self.events = AlertTestSuite.copy(events)
assert any(isinstance(i, list) for i in self.events) is False, 'Test case events contains a sublist when it should not.'
self.expected_alert = expected_alert
self.full_events = []
def run(self, alert_filename, alert_classname):
alert_file_module = __import__(alert_filename)
alert_class_attr = getattr(alert_file_module, alert_classname)
alert_task = alert_class_attr()
alert_task.run()
return alert_task
|
<commit_before>import os.path
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "../../alerts"))
from alert_test_suite import AlertTestSuite
class AlertTestCase(object):
def __init__(self, description, events=[], events_type='event', expected_alert=None):
self.description = description
# As a result of defining our test cases as class level variables
# we need to copy each event so that other tests dont
# mess with the same instance in memory
self.events = AlertTestSuite.copy(events)
assert any(isinstance(i, list) for i in self.events) is False, 'Test case events contains a sublist when it should not.'
self.events_type = events_type
self.expected_alert = expected_alert
self.full_events = []
def run(self, alert_filename, alert_classname):
alert_file_module = __import__(alert_filename)
alert_class_attr = getattr(alert_file_module, alert_classname)
alert_task = alert_class_attr()
alert_task.run()
return alert_task
<commit_msg>Remove events_type from alert test case<commit_after>
|
import os.path
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "../../alerts"))
from alert_test_suite import AlertTestSuite
class AlertTestCase(object):
def __init__(self, description, events=[], expected_alert=None):
self.description = description
# As a result of defining our test cases as class level variables
# we need to copy each event so that other tests dont
# mess with the same instance in memory
self.events = AlertTestSuite.copy(events)
assert any(isinstance(i, list) for i in self.events) is False, 'Test case events contains a sublist when it should not.'
self.expected_alert = expected_alert
self.full_events = []
def run(self, alert_filename, alert_classname):
alert_file_module = __import__(alert_filename)
alert_class_attr = getattr(alert_file_module, alert_classname)
alert_task = alert_class_attr()
alert_task.run()
return alert_task
|
import os.path
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "../../alerts"))
from alert_test_suite import AlertTestSuite
class AlertTestCase(object):
def __init__(self, description, events=[], events_type='event', expected_alert=None):
self.description = description
# As a result of defining our test cases as class level variables
# we need to copy each event so that other tests dont
# mess with the same instance in memory
self.events = AlertTestSuite.copy(events)
assert any(isinstance(i, list) for i in self.events) is False, 'Test case events contains a sublist when it should not.'
self.events_type = events_type
self.expected_alert = expected_alert
self.full_events = []
def run(self, alert_filename, alert_classname):
alert_file_module = __import__(alert_filename)
alert_class_attr = getattr(alert_file_module, alert_classname)
alert_task = alert_class_attr()
alert_task.run()
return alert_task
Remove events_type from alert test caseimport os.path
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "../../alerts"))
from alert_test_suite import AlertTestSuite
class AlertTestCase(object):
def __init__(self, description, events=[], expected_alert=None):
self.description = description
# As a result of defining our test cases as class level variables
# we need to copy each event so that other tests dont
# mess with the same instance in memory
self.events = AlertTestSuite.copy(events)
assert any(isinstance(i, list) for i in self.events) is False, 'Test case events contains a sublist when it should not.'
self.expected_alert = expected_alert
self.full_events = []
def run(self, alert_filename, alert_classname):
alert_file_module = __import__(alert_filename)
alert_class_attr = getattr(alert_file_module, alert_classname)
alert_task = alert_class_attr()
alert_task.run()
return alert_task
|
<commit_before>import os.path
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "../../alerts"))
from alert_test_suite import AlertTestSuite
class AlertTestCase(object):
def __init__(self, description, events=[], events_type='event', expected_alert=None):
self.description = description
# As a result of defining our test cases as class level variables
# we need to copy each event so that other tests dont
# mess with the same instance in memory
self.events = AlertTestSuite.copy(events)
assert any(isinstance(i, list) for i in self.events) is False, 'Test case events contains a sublist when it should not.'
self.events_type = events_type
self.expected_alert = expected_alert
self.full_events = []
def run(self, alert_filename, alert_classname):
alert_file_module = __import__(alert_filename)
alert_class_attr = getattr(alert_file_module, alert_classname)
alert_task = alert_class_attr()
alert_task.run()
return alert_task
<commit_msg>Remove events_type from alert test case<commit_after>import os.path
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "../../alerts"))
from alert_test_suite import AlertTestSuite
class AlertTestCase(object):
def __init__(self, description, events=[], expected_alert=None):
self.description = description
# As a result of defining our test cases as class level variables
# we need to copy each event so that other tests dont
# mess with the same instance in memory
self.events = AlertTestSuite.copy(events)
assert any(isinstance(i, list) for i in self.events) is False, 'Test case events contains a sublist when it should not.'
self.expected_alert = expected_alert
self.full_events = []
def run(self, alert_filename, alert_classname):
alert_file_module = __import__(alert_filename)
alert_class_attr = getattr(alert_file_module, alert_classname)
alert_task = alert_class_attr()
alert_task.run()
return alert_task
|
ef56181a5c42ab6dc9283822a9a6214f0a97475f
|
yolk/__init__.py
|
yolk/__init__.py
|
"""yolk
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.5.2'
|
"""yolk
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.6'
|
Increment minor version to 0.6
|
Increment minor version to 0.6
|
Python
|
bsd-3-clause
|
myint/yolk,myint/yolk
|
"""yolk
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.5.2'
Increment minor version to 0.6
|
"""yolk
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.6'
|
<commit_before>"""yolk
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.5.2'
<commit_msg>Increment minor version to 0.6<commit_after>
|
"""yolk
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.6'
|
"""yolk
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.5.2'
Increment minor version to 0.6"""yolk
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.6'
|
<commit_before>"""yolk
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.5.2'
<commit_msg>Increment minor version to 0.6<commit_after>"""yolk
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.6'
|
4a75d2b12fdaea392e3cf74eb1335b93c8aacdbd
|
accounts/tests/test_models.py
|
accounts/tests/test_models.py
|
"""accounts app unittests for models
"""
from django.test import TestCase
from django.contrib.auth import get_user_model
TEST_EMAIL = 'newvisitor@example.com'
class UserModelTest(TestCase):
"""Tests for passwordless user model.
"""
def test_user_valid_with_only_email(self):
"""Should not raise if the user model is happy with email only.
"""
user = get_user_model()(email=TEST_EMAIL)
user.full_clean()
def test_users_are_authenticated(self):
"""User objects should be authenticated for views/templates.
"""
user = get_user_model()()
self.assertTrue(user.is_authenticated())
|
"""accounts app unittests for models
"""
from django.test import TestCase
from django.contrib.auth import get_user_model
USER = get_user_model()
TEST_EMAIL = 'newvisitor@example.com'
class UserModelTest(TestCase):
"""Tests for passwordless user model.
"""
def test_user_valid_with_only_email(self):
"""Should not raise if the user model is happy with email only.
"""
user = USER(email=TEST_EMAIL)
user.full_clean()
def test_users_are_authenticated(self):
"""User objects should be authenticated for views/templates.
"""
user = USER()
self.assertTrue(user.is_authenticated())
|
Tidy up UserModel unit test
|
Tidy up UserModel unit test
|
Python
|
mit
|
randomic/aniauth-tdd,randomic/aniauth-tdd
|
"""accounts app unittests for models
"""
from django.test import TestCase
from django.contrib.auth import get_user_model
TEST_EMAIL = 'newvisitor@example.com'
class UserModelTest(TestCase):
"""Tests for passwordless user model.
"""
def test_user_valid_with_only_email(self):
"""Should not raise if the user model is happy with email only.
"""
user = get_user_model()(email=TEST_EMAIL)
user.full_clean()
def test_users_are_authenticated(self):
"""User objects should be authenticated for views/templates.
"""
user = get_user_model()()
self.assertTrue(user.is_authenticated())
Tidy up UserModel unit test
|
"""accounts app unittests for models
"""
from django.test import TestCase
from django.contrib.auth import get_user_model
USER = get_user_model()
TEST_EMAIL = 'newvisitor@example.com'
class UserModelTest(TestCase):
"""Tests for passwordless user model.
"""
def test_user_valid_with_only_email(self):
"""Should not raise if the user model is happy with email only.
"""
user = USER(email=TEST_EMAIL)
user.full_clean()
def test_users_are_authenticated(self):
"""User objects should be authenticated for views/templates.
"""
user = USER()
self.assertTrue(user.is_authenticated())
|
<commit_before>"""accounts app unittests for models
"""
from django.test import TestCase
from django.contrib.auth import get_user_model
TEST_EMAIL = 'newvisitor@example.com'
class UserModelTest(TestCase):
"""Tests for passwordless user model.
"""
def test_user_valid_with_only_email(self):
"""Should not raise if the user model is happy with email only.
"""
user = get_user_model()(email=TEST_EMAIL)
user.full_clean()
def test_users_are_authenticated(self):
"""User objects should be authenticated for views/templates.
"""
user = get_user_model()()
self.assertTrue(user.is_authenticated())
<commit_msg>Tidy up UserModel unit test<commit_after>
|
"""accounts app unittests for models
"""
from django.test import TestCase
from django.contrib.auth import get_user_model
USER = get_user_model()
TEST_EMAIL = 'newvisitor@example.com'
class UserModelTest(TestCase):
"""Tests for passwordless user model.
"""
def test_user_valid_with_only_email(self):
"""Should not raise if the user model is happy with email only.
"""
user = USER(email=TEST_EMAIL)
user.full_clean()
def test_users_are_authenticated(self):
"""User objects should be authenticated for views/templates.
"""
user = USER()
self.assertTrue(user.is_authenticated())
|
"""accounts app unittests for models
"""
from django.test import TestCase
from django.contrib.auth import get_user_model
TEST_EMAIL = 'newvisitor@example.com'
class UserModelTest(TestCase):
"""Tests for passwordless user model.
"""
def test_user_valid_with_only_email(self):
"""Should not raise if the user model is happy with email only.
"""
user = get_user_model()(email=TEST_EMAIL)
user.full_clean()
def test_users_are_authenticated(self):
"""User objects should be authenticated for views/templates.
"""
user = get_user_model()()
self.assertTrue(user.is_authenticated())
Tidy up UserModel unit test"""accounts app unittests for models
"""
from django.test import TestCase
from django.contrib.auth import get_user_model
USER = get_user_model()
TEST_EMAIL = 'newvisitor@example.com'
class UserModelTest(TestCase):
"""Tests for passwordless user model.
"""
def test_user_valid_with_only_email(self):
"""Should not raise if the user model is happy with email only.
"""
user = USER(email=TEST_EMAIL)
user.full_clean()
def test_users_are_authenticated(self):
"""User objects should be authenticated for views/templates.
"""
user = USER()
self.assertTrue(user.is_authenticated())
|
<commit_before>"""accounts app unittests for models
"""
from django.test import TestCase
from django.contrib.auth import get_user_model
TEST_EMAIL = 'newvisitor@example.com'
class UserModelTest(TestCase):
"""Tests for passwordless user model.
"""
def test_user_valid_with_only_email(self):
"""Should not raise if the user model is happy with email only.
"""
user = get_user_model()(email=TEST_EMAIL)
user.full_clean()
def test_users_are_authenticated(self):
"""User objects should be authenticated for views/templates.
"""
user = get_user_model()()
self.assertTrue(user.is_authenticated())
<commit_msg>Tidy up UserModel unit test<commit_after>"""accounts app unittests for models
"""
from django.test import TestCase
from django.contrib.auth import get_user_model
USER = get_user_model()
TEST_EMAIL = 'newvisitor@example.com'
class UserModelTest(TestCase):
"""Tests for passwordless user model.
"""
def test_user_valid_with_only_email(self):
"""Should not raise if the user model is happy with email only.
"""
user = USER(email=TEST_EMAIL)
user.full_clean()
def test_users_are_authenticated(self):
"""User objects should be authenticated for views/templates.
"""
user = USER()
self.assertTrue(user.is_authenticated())
|
207871f4f057d88f67bad0c371f880664dcee062
|
pydirections/route_requester.py
|
pydirections/route_requester.py
|
"""
This class holds all the necessary information required for a proposed route
"""
ACCEPTABLE_MODES = set(["driving", "walking", "bicycling", "transit"])
class DirectionsRequest(object):
def __init__(self, mode="driving", **kwargs):
self.mode = mode
self.origin = kwargs['origin']
self.destination = kwargs['destination']
def set_api_key(self, key):
self.api_key = key
return self
|
"""
This class holds all the necessary information required for a proposed route
"""
ACCEPTABLE_MODES = set(["driving", "walking", "bicycling", "transit"])
class DirectionsRequest(object):
def __init__(self, **kwargs):
self.mode = "driving"
self.origin = kwargs['origin']
self.destination = kwargs['destination']
def set_api_key(self, key):
self.api_key = key
return self
|
Build custom route requester class
|
Build custom route requester class
|
Python
|
apache-2.0
|
apranav19/pydirections
|
"""
This class holds all the necessary information required for a proposed route
"""
ACCEPTABLE_MODES = set(["driving", "walking", "bicycling", "transit"])
class DirectionsRequest(object):
def __init__(self, mode="driving", **kwargs):
self.mode = mode
self.origin = kwargs['origin']
self.destination = kwargs['destination']
def set_api_key(self, key):
self.api_key = key
return self
Build custom route requester class
|
"""
This class holds all the necessary information required for a proposed route
"""
ACCEPTABLE_MODES = set(["driving", "walking", "bicycling", "transit"])
class DirectionsRequest(object):
def __init__(self, **kwargs):
self.mode = "driving"
self.origin = kwargs['origin']
self.destination = kwargs['destination']
def set_api_key(self, key):
self.api_key = key
return self
|
<commit_before>"""
This class holds all the necessary information required for a proposed route
"""
ACCEPTABLE_MODES = set(["driving", "walking", "bicycling", "transit"])
class DirectionsRequest(object):
def __init__(self, mode="driving", **kwargs):
self.mode = mode
self.origin = kwargs['origin']
self.destination = kwargs['destination']
def set_api_key(self, key):
self.api_key = key
return self
<commit_msg>Build custom route requester class<commit_after>
|
"""
This class holds all the necessary information required for a proposed route
"""
ACCEPTABLE_MODES = set(["driving", "walking", "bicycling", "transit"])
class DirectionsRequest(object):
def __init__(self, **kwargs):
self.mode = "driving"
self.origin = kwargs['origin']
self.destination = kwargs['destination']
def set_api_key(self, key):
self.api_key = key
return self
|
"""
This class holds all the necessary information required for a proposed route
"""
ACCEPTABLE_MODES = set(["driving", "walking", "bicycling", "transit"])
class DirectionsRequest(object):
def __init__(self, mode="driving", **kwargs):
self.mode = mode
self.origin = kwargs['origin']
self.destination = kwargs['destination']
def set_api_key(self, key):
self.api_key = key
return self
Build custom route requester class"""
This class holds all the necessary information required for a proposed route
"""
ACCEPTABLE_MODES = set(["driving", "walking", "bicycling", "transit"])
class DirectionsRequest(object):
def __init__(self, **kwargs):
self.mode = "driving"
self.origin = kwargs['origin']
self.destination = kwargs['destination']
def set_api_key(self, key):
self.api_key = key
return self
|
<commit_before>"""
This class holds all the necessary information required for a proposed route
"""
ACCEPTABLE_MODES = set(["driving", "walking", "bicycling", "transit"])
class DirectionsRequest(object):
def __init__(self, mode="driving", **kwargs):
self.mode = mode
self.origin = kwargs['origin']
self.destination = kwargs['destination']
def set_api_key(self, key):
self.api_key = key
return self
<commit_msg>Build custom route requester class<commit_after>"""
This class holds all the necessary information required for a proposed route
"""
ACCEPTABLE_MODES = set(["driving", "walking", "bicycling", "transit"])
class DirectionsRequest(object):
def __init__(self, **kwargs):
self.mode = "driving"
self.origin = kwargs['origin']
self.destination = kwargs['destination']
def set_api_key(self, key):
self.api_key = key
return self
|
9c12e2d6890a32d93ea6b2a9ae6f000d46182377
|
ga/image/store.py
|
ga/image/store.py
|
# -*- coding: utf-8 -*-
import StringIO
import shortuuid
from boto.s3.connection import S3Connection
from ga import settings
conn = S3Connection(settings.AWS_KEY, settings.AWS_SECRET)
bucket = conn.get_bucket(settings.AWS_BUCKET)
def upload_image_from_pil_image(image):
output = StringIO.StringIO()
image.save(output, 'JPEG')
output.name = 'file'
return upload_image(output)
def upload_image(stream):
uuid = shortuuid.uuid()
key = bucket.new_key(uuid + '.jpg')
key.set_contents_from_string(stream.getvalue())
key.set_metadata('Content-Type', 'image/jpeg')
key.set_acl('public-read')
return key.generate_url(expires_in=0, query_auth=False, force_http=True)
|
# -*- coding: utf-8 -*-
import StringIO
import shortuuid
from boto.s3.connection import S3Connection
from ga import settings
def upload_image_from_pil_image(image):
output = StringIO.StringIO()
image.save(output, 'JPEG')
output.name = 'file'
return upload_image(output)
def upload_image(stream):
conn = S3Connection(settings.AWS_KEY, settings.AWS_SECRET)
bucket = conn.get_bucket(settings.AWS_BUCKET)
key = bucket.new_key(shortuuid.uuid() + '.jpg')
key.set_contents_from_string(stream.getvalue())
key.set_metadata('Content-Type', 'image/jpeg')
key.set_acl('public-read')
return key.generate_url(expires_in=0, query_auth=False, force_http=True)
|
Move S3 bucket connection into `upload_image`
|
Move S3 bucket connection into `upload_image`
|
Python
|
mit
|
alexmic/great-again,alexmic/great-again
|
# -*- coding: utf-8 -*-
import StringIO
import shortuuid
from boto.s3.connection import S3Connection
from ga import settings
conn = S3Connection(settings.AWS_KEY, settings.AWS_SECRET)
bucket = conn.get_bucket(settings.AWS_BUCKET)
def upload_image_from_pil_image(image):
output = StringIO.StringIO()
image.save(output, 'JPEG')
output.name = 'file'
return upload_image(output)
def upload_image(stream):
uuid = shortuuid.uuid()
key = bucket.new_key(uuid + '.jpg')
key.set_contents_from_string(stream.getvalue())
key.set_metadata('Content-Type', 'image/jpeg')
key.set_acl('public-read')
return key.generate_url(expires_in=0, query_auth=False, force_http=True)
Move S3 bucket connection into `upload_image`
|
# -*- coding: utf-8 -*-
import StringIO
import shortuuid
from boto.s3.connection import S3Connection
from ga import settings
def upload_image_from_pil_image(image):
output = StringIO.StringIO()
image.save(output, 'JPEG')
output.name = 'file'
return upload_image(output)
def upload_image(stream):
conn = S3Connection(settings.AWS_KEY, settings.AWS_SECRET)
bucket = conn.get_bucket(settings.AWS_BUCKET)
key = bucket.new_key(shortuuid.uuid() + '.jpg')
key.set_contents_from_string(stream.getvalue())
key.set_metadata('Content-Type', 'image/jpeg')
key.set_acl('public-read')
return key.generate_url(expires_in=0, query_auth=False, force_http=True)
|
<commit_before># -*- coding: utf-8 -*-
import StringIO
import shortuuid
from boto.s3.connection import S3Connection
from ga import settings
conn = S3Connection(settings.AWS_KEY, settings.AWS_SECRET)
bucket = conn.get_bucket(settings.AWS_BUCKET)
def upload_image_from_pil_image(image):
output = StringIO.StringIO()
image.save(output, 'JPEG')
output.name = 'file'
return upload_image(output)
def upload_image(stream):
uuid = shortuuid.uuid()
key = bucket.new_key(uuid + '.jpg')
key.set_contents_from_string(stream.getvalue())
key.set_metadata('Content-Type', 'image/jpeg')
key.set_acl('public-read')
return key.generate_url(expires_in=0, query_auth=False, force_http=True)
<commit_msg>Move S3 bucket connection into `upload_image`<commit_after>
|
# -*- coding: utf-8 -*-
import StringIO
import shortuuid
from boto.s3.connection import S3Connection
from ga import settings
def upload_image_from_pil_image(image):
output = StringIO.StringIO()
image.save(output, 'JPEG')
output.name = 'file'
return upload_image(output)
def upload_image(stream):
conn = S3Connection(settings.AWS_KEY, settings.AWS_SECRET)
bucket = conn.get_bucket(settings.AWS_BUCKET)
key = bucket.new_key(shortuuid.uuid() + '.jpg')
key.set_contents_from_string(stream.getvalue())
key.set_metadata('Content-Type', 'image/jpeg')
key.set_acl('public-read')
return key.generate_url(expires_in=0, query_auth=False, force_http=True)
|
# -*- coding: utf-8 -*-
import StringIO
import shortuuid
from boto.s3.connection import S3Connection
from ga import settings
conn = S3Connection(settings.AWS_KEY, settings.AWS_SECRET)
bucket = conn.get_bucket(settings.AWS_BUCKET)
def upload_image_from_pil_image(image):
output = StringIO.StringIO()
image.save(output, 'JPEG')
output.name = 'file'
return upload_image(output)
def upload_image(stream):
uuid = shortuuid.uuid()
key = bucket.new_key(uuid + '.jpg')
key.set_contents_from_string(stream.getvalue())
key.set_metadata('Content-Type', 'image/jpeg')
key.set_acl('public-read')
return key.generate_url(expires_in=0, query_auth=False, force_http=True)
Move S3 bucket connection into `upload_image`# -*- coding: utf-8 -*-
import StringIO
import shortuuid
from boto.s3.connection import S3Connection
from ga import settings
def upload_image_from_pil_image(image):
output = StringIO.StringIO()
image.save(output, 'JPEG')
output.name = 'file'
return upload_image(output)
def upload_image(stream):
conn = S3Connection(settings.AWS_KEY, settings.AWS_SECRET)
bucket = conn.get_bucket(settings.AWS_BUCKET)
key = bucket.new_key(shortuuid.uuid() + '.jpg')
key.set_contents_from_string(stream.getvalue())
key.set_metadata('Content-Type', 'image/jpeg')
key.set_acl('public-read')
return key.generate_url(expires_in=0, query_auth=False, force_http=True)
|
<commit_before># -*- coding: utf-8 -*-
import StringIO
import shortuuid
from boto.s3.connection import S3Connection
from ga import settings
conn = S3Connection(settings.AWS_KEY, settings.AWS_SECRET)
bucket = conn.get_bucket(settings.AWS_BUCKET)
def upload_image_from_pil_image(image):
output = StringIO.StringIO()
image.save(output, 'JPEG')
output.name = 'file'
return upload_image(output)
def upload_image(stream):
uuid = shortuuid.uuid()
key = bucket.new_key(uuid + '.jpg')
key.set_contents_from_string(stream.getvalue())
key.set_metadata('Content-Type', 'image/jpeg')
key.set_acl('public-read')
return key.generate_url(expires_in=0, query_auth=False, force_http=True)
<commit_msg>Move S3 bucket connection into `upload_image`<commit_after># -*- coding: utf-8 -*-
import StringIO
import shortuuid
from boto.s3.connection import S3Connection
from ga import settings
def upload_image_from_pil_image(image):
output = StringIO.StringIO()
image.save(output, 'JPEG')
output.name = 'file'
return upload_image(output)
def upload_image(stream):
conn = S3Connection(settings.AWS_KEY, settings.AWS_SECRET)
bucket = conn.get_bucket(settings.AWS_BUCKET)
key = bucket.new_key(shortuuid.uuid() + '.jpg')
key.set_contents_from_string(stream.getvalue())
key.set_metadata('Content-Type', 'image/jpeg')
key.set_acl('public-read')
return key.generate_url(expires_in=0, query_auth=False, force_http=True)
|
2293539766043db129dc32634cedced7377eb9fe
|
Lib/test/test_wave.py
|
Lib/test/test_wave.py
|
from test_support import TestFailed
import os, tempfile
import wave
def check(t, msg=None):
if not t:
raise TestFailed, msg
nchannels = 2
sampwidth = 2
framerate = 8000
nframes = 100
testfile = tempfile.mktemp()
f = wave.open(testfile, 'w')
f.setnchannels(nchannels)
f.setsampwidth(sampwidth)
f.setframerate(framerate)
f.setnframes(nframes)
output = '\0' * nframes * nchannels * sampwidth
f.writeframes(output)
f.close()
f = wave.open(testfile, 'r')
check(nchannels == f.getnchannels(), "nchannels")
check(sampwidth == f.getsampwidth(), "sampwidth")
check(framerate == f.getframerate(), "framerate")
check(nframes == f.getnframes(), "nframes")
input = f.readframes(nframes)
check(input == output, "data")
f.close()
os.remove(testfile)
|
from test_support import TestFailed
import os, tempfile
import wave
def check(t, msg=None):
if not t:
raise TestFailed, msg
nchannels = 2
sampwidth = 2
framerate = 8000
nframes = 100
testfile = tempfile.mktemp()
f = wave.open(testfile, 'wb')
f.setnchannels(nchannels)
f.setsampwidth(sampwidth)
f.setframerate(framerate)
f.setnframes(nframes)
output = '\0' * nframes * nchannels * sampwidth
f.writeframes(output)
f.close()
f = wave.open(testfile, 'rb')
check(nchannels == f.getnchannels(), "nchannels")
check(sampwidth == f.getsampwidth(), "sampwidth")
check(framerate == f.getframerate(), "framerate")
check(nframes == f.getnframes(), "nframes")
input = f.readframes(nframes)
check(input == output, "data")
f.close()
os.remove(testfile)
|
Use binary mode to open "wave" files.
|
Use binary mode to open "wave" files.
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
from test_support import TestFailed
import os, tempfile
import wave
def check(t, msg=None):
if not t:
raise TestFailed, msg
nchannels = 2
sampwidth = 2
framerate = 8000
nframes = 100
testfile = tempfile.mktemp()
f = wave.open(testfile, 'w')
f.setnchannels(nchannels)
f.setsampwidth(sampwidth)
f.setframerate(framerate)
f.setnframes(nframes)
output = '\0' * nframes * nchannels * sampwidth
f.writeframes(output)
f.close()
f = wave.open(testfile, 'r')
check(nchannels == f.getnchannels(), "nchannels")
check(sampwidth == f.getsampwidth(), "sampwidth")
check(framerate == f.getframerate(), "framerate")
check(nframes == f.getnframes(), "nframes")
input = f.readframes(nframes)
check(input == output, "data")
f.close()
os.remove(testfile)
Use binary mode to open "wave" files.
|
from test_support import TestFailed
import os, tempfile
import wave
def check(t, msg=None):
if not t:
raise TestFailed, msg
nchannels = 2
sampwidth = 2
framerate = 8000
nframes = 100
testfile = tempfile.mktemp()
f = wave.open(testfile, 'wb')
f.setnchannels(nchannels)
f.setsampwidth(sampwidth)
f.setframerate(framerate)
f.setnframes(nframes)
output = '\0' * nframes * nchannels * sampwidth
f.writeframes(output)
f.close()
f = wave.open(testfile, 'rb')
check(nchannels == f.getnchannels(), "nchannels")
check(sampwidth == f.getsampwidth(), "sampwidth")
check(framerate == f.getframerate(), "framerate")
check(nframes == f.getnframes(), "nframes")
input = f.readframes(nframes)
check(input == output, "data")
f.close()
os.remove(testfile)
|
<commit_before>from test_support import TestFailed
import os, tempfile
import wave
def check(t, msg=None):
if not t:
raise TestFailed, msg
nchannels = 2
sampwidth = 2
framerate = 8000
nframes = 100
testfile = tempfile.mktemp()
f = wave.open(testfile, 'w')
f.setnchannels(nchannels)
f.setsampwidth(sampwidth)
f.setframerate(framerate)
f.setnframes(nframes)
output = '\0' * nframes * nchannels * sampwidth
f.writeframes(output)
f.close()
f = wave.open(testfile, 'r')
check(nchannels == f.getnchannels(), "nchannels")
check(sampwidth == f.getsampwidth(), "sampwidth")
check(framerate == f.getframerate(), "framerate")
check(nframes == f.getnframes(), "nframes")
input = f.readframes(nframes)
check(input == output, "data")
f.close()
os.remove(testfile)
<commit_msg>Use binary mode to open "wave" files.<commit_after>
|
from test_support import TestFailed
import os, tempfile
import wave
def check(t, msg=None):
if not t:
raise TestFailed, msg
nchannels = 2
sampwidth = 2
framerate = 8000
nframes = 100
testfile = tempfile.mktemp()
f = wave.open(testfile, 'wb')
f.setnchannels(nchannels)
f.setsampwidth(sampwidth)
f.setframerate(framerate)
f.setnframes(nframes)
output = '\0' * nframes * nchannels * sampwidth
f.writeframes(output)
f.close()
f = wave.open(testfile, 'rb')
check(nchannels == f.getnchannels(), "nchannels")
check(sampwidth == f.getsampwidth(), "sampwidth")
check(framerate == f.getframerate(), "framerate")
check(nframes == f.getnframes(), "nframes")
input = f.readframes(nframes)
check(input == output, "data")
f.close()
os.remove(testfile)
|
from test_support import TestFailed
import os, tempfile
import wave
def check(t, msg=None):
if not t:
raise TestFailed, msg
nchannels = 2
sampwidth = 2
framerate = 8000
nframes = 100
testfile = tempfile.mktemp()
f = wave.open(testfile, 'w')
f.setnchannels(nchannels)
f.setsampwidth(sampwidth)
f.setframerate(framerate)
f.setnframes(nframes)
output = '\0' * nframes * nchannels * sampwidth
f.writeframes(output)
f.close()
f = wave.open(testfile, 'r')
check(nchannels == f.getnchannels(), "nchannels")
check(sampwidth == f.getsampwidth(), "sampwidth")
check(framerate == f.getframerate(), "framerate")
check(nframes == f.getnframes(), "nframes")
input = f.readframes(nframes)
check(input == output, "data")
f.close()
os.remove(testfile)
Use binary mode to open "wave" files.from test_support import TestFailed
import os, tempfile
import wave
def check(t, msg=None):
if not t:
raise TestFailed, msg
nchannels = 2
sampwidth = 2
framerate = 8000
nframes = 100
testfile = tempfile.mktemp()
f = wave.open(testfile, 'wb')
f.setnchannels(nchannels)
f.setsampwidth(sampwidth)
f.setframerate(framerate)
f.setnframes(nframes)
output = '\0' * nframes * nchannels * sampwidth
f.writeframes(output)
f.close()
f = wave.open(testfile, 'rb')
check(nchannels == f.getnchannels(), "nchannels")
check(sampwidth == f.getsampwidth(), "sampwidth")
check(framerate == f.getframerate(), "framerate")
check(nframes == f.getnframes(), "nframes")
input = f.readframes(nframes)
check(input == output, "data")
f.close()
os.remove(testfile)
|
<commit_before>from test_support import TestFailed
import os, tempfile
import wave
def check(t, msg=None):
if not t:
raise TestFailed, msg
nchannels = 2
sampwidth = 2
framerate = 8000
nframes = 100
testfile = tempfile.mktemp()
f = wave.open(testfile, 'w')
f.setnchannels(nchannels)
f.setsampwidth(sampwidth)
f.setframerate(framerate)
f.setnframes(nframes)
output = '\0' * nframes * nchannels * sampwidth
f.writeframes(output)
f.close()
f = wave.open(testfile, 'r')
check(nchannels == f.getnchannels(), "nchannels")
check(sampwidth == f.getsampwidth(), "sampwidth")
check(framerate == f.getframerate(), "framerate")
check(nframes == f.getnframes(), "nframes")
input = f.readframes(nframes)
check(input == output, "data")
f.close()
os.remove(testfile)
<commit_msg>Use binary mode to open "wave" files.<commit_after>from test_support import TestFailed
import os, tempfile
import wave
def check(t, msg=None):
if not t:
raise TestFailed, msg
nchannels = 2
sampwidth = 2
framerate = 8000
nframes = 100
testfile = tempfile.mktemp()
f = wave.open(testfile, 'wb')
f.setnchannels(nchannels)
f.setsampwidth(sampwidth)
f.setframerate(framerate)
f.setnframes(nframes)
output = '\0' * nframes * nchannels * sampwidth
f.writeframes(output)
f.close()
f = wave.open(testfile, 'rb')
check(nchannels == f.getnchannels(), "nchannels")
check(sampwidth == f.getsampwidth(), "sampwidth")
check(framerate == f.getframerate(), "framerate")
check(nframes == f.getnframes(), "nframes")
input = f.readframes(nframes)
check(input == output, "data")
f.close()
os.remove(testfile)
|
1666f883e3f6a497971b484c9ba875df2f6693a2
|
test/testall.py
|
test/testall.py
|
#!/usr/bin/env python
# This file is part of beets.
# Copyright 2013, Adrian Sampson.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
import os
import re
import sys
from _common import unittest
pkgpath = os.path.dirname(__file__) or '.'
sys.path.append(pkgpath)
os.chdir(pkgpath)
def suite():
s = unittest.TestSuite()
# Get the suite() of every module in this directory beginning with
# "test_".
for fname in os.listdir(pkgpath):
match = re.match(r'(test_\S+)\.py$', fname)
if match:
modname = match.group(1)
s.addTest(__import__(modname).suite())
return s
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
#!/usr/bin/env python
# This file is part of beets.
# Copyright 2013, Adrian Sampson.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
import os
import re
import sys
from _common import unittest
pkgpath = os.path.dirname(__file__) or '.'
sys.path.append(pkgpath)
os.chdir(pkgpath)
# Make sure we use local version of beetsplug and not system namespaced version
# for tests
try:
del sys.modules["beetsplug"]
except KeyError:
pass
def suite():
s = unittest.TestSuite()
# Get the suite() of every module in this directory beginning with
# "test_".
for fname in os.listdir(pkgpath):
match = re.match(r'(test_\S+)\.py$', fname)
if match:
modname = match.group(1)
s.addTest(__import__(modname).suite())
return s
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
Fix python namespaces for test runs
|
Fix python namespaces for test runs
We need to make sure we don't use namespaced versions that are already installed
on the system but rather use local version from current sources
|
Python
|
mit
|
SusannaMaria/beets,mathstuf/beets,mathstuf/beets,YetAnotherNerd/beets,lengtche/beets,LordSputnik/beets,shamangeorge/beets,ibmibmibm/beets,m-urban/beets,krig/beets,lightwang1/beets,shamangeorge/beets,MyTunesFreeMusic/privacy-policy,jcoady9/beets,SusannaMaria/beets,beetbox/beets,Andypsamp/CODfinalJUNIT,Andypsamp/CODfinalJUNIT,jcoady9/beets,pkess/beets,PierreRust/beets,tima/beets,mried/beets,pkess/beets,Freso/beets,bj-yinyan/beets,beetbox/beets,dfc/beets,YetAnotherNerd/beets,tima/beets,ruippeixotog/beets,diego-plan9/beets,drm00/beets,ruippeixotog/beets,marcuskrahl/beets,kareemallen/beets,arabenjamin/beets,drm00/beets,parapente/beets,Dishwishy/beets,madmouser1/beets,imsparsh/beets,Freso/beets,mathstuf/beets,andremiller/beets,LordSputnik/beets,moodboom/beets,YetAnotherNerd/beets,mosesfistos1/beetbox,multikatt/beets,jackwilsdon/beets,jmwatte/beets,jayme-github/beets,asteven/beets,xsteadfastx/beets,m-urban/beets,bj-yinyan/beets,YetAnotherNerd/beets,LordSputnik/beets,Dishwishy/beets,Kraymer/beets,mosesfistos1/beetbox,ruippeixotog/beets,jcoady9/beets,randybias/beets,untitaker/beets,PierreRust/beets,beetbox/beets,mried/beets,artemutin/beets,shanemikel/beets,Freso/beets,Andypsamp/CODfinalJUNIT,lightwang1/beets,shanemikel/beets,kelvinhammond/beets,mried/beets,gabrielaraujof/beets,ttsda/beets,randybias/beets,krig/beets,sadatay/beets,sampsyo/beets,parapente/beets,kareemallen/beets,ttsda/beets,swt30/beets,PierreRust/beets,imsparsh/beets,sampsyo/beets,madmouser1/beets,gabrielaraujof/beets,pkess/beets,jackwilsdon/beets,m-urban/beets,arabenjamin/beets,drm00/beets,Andypsamp/CODfinalJUNIT,beetbox/beets,sadatay/beets,ibmibmibm/beets,untitaker/beets,moodboom/beets,SusannaMaria/beets,sampsyo/beets,tima/beets,ttsda/beets,jmwatte/beets,kelvinhammond/beets,drm00/beets,jayme-github/beets,xsteadfastx/beets,kareemallen/beets,jackwilsdon/beets,MyTunesFreeMusic/privacy-policy,xsteadfastx/beets,jbaiter/beets,m-urban/beets,parapente/beets,lengtche/beets,randybias/beets,mosesfistos1/beetbox,tima/beets,andremiller/beets,PierreRust/beets,kelvinhammond/beets,artemutin/beets,marcuskrahl/beets,diego-plan9/beets,xsteadfastx/beets,gabrielaraujof/beets,arabenjamin/beets,Wen777/beets,imsparsh/beets,swt30/beets,arabenjamin/beets,diego-plan9/beets,asteven/beets,MyTunesFreeMusic/privacy-policy,lengtche/beets,dfc/beets,sampsyo/beets,bj-yinyan/beets,Kraymer/beets,moodboom/beets,shanemikel/beets,swt30/beets,madmouser1/beets,asteven/beets,Freso/beets,ttsda/beets,Kraymer/beets,randybias/beets,Andypsamp/CODjunit,parapente/beets,Wen777/beets,jcoady9/beets,swt30/beets,multikatt/beets,bj-yinyan/beets,kareemallen/beets,ruippeixotog/beets,Andypsamp/CODjunit,shamangeorge/beets,lengtche/beets,MyTunesFreeMusic/privacy-policy,lightwang1/beets,lightwang1/beets,LordSputnik/beets,artemutin/beets,Wen777/beets,untitaker/beets,multikatt/beets,Andypsamp/CODfinalJUNIT,marcuskrahl/beets,shamangeorge/beets,andremiller/beets,mried/beets,jackwilsdon/beets,dfc/beets,gabrielaraujof/beets,mosesfistos1/beetbox,SusannaMaria/beets,marcuskrahl/beets,asteven/beets,Andypsamp/CODjunit,moodboom/beets,madmouser1/beets,ibmibmibm/beets,dfc/beets,artemutin/beets,diego-plan9/beets,sadatay/beets,Andypsamp/CODjunit,sadatay/beets,Dishwishy/beets,ibmibmibm/beets,Dishwishy/beets,mathstuf/beets,Kraymer/beets,pkess/beets,imsparsh/beets,Andypsamp/CODjunit,krig/beets,jbaiter/beets,jmwatte/beets,multikatt/beets,jmwatte/beets,shanemikel/beets,kelvinhammond/beets,untitaker/beets
|
#!/usr/bin/env python
# This file is part of beets.
# Copyright 2013, Adrian Sampson.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
import os
import re
import sys
from _common import unittest
pkgpath = os.path.dirname(__file__) or '.'
sys.path.append(pkgpath)
os.chdir(pkgpath)
def suite():
s = unittest.TestSuite()
# Get the suite() of every module in this directory beginning with
# "test_".
for fname in os.listdir(pkgpath):
match = re.match(r'(test_\S+)\.py$', fname)
if match:
modname = match.group(1)
s.addTest(__import__(modname).suite())
return s
if __name__ == '__main__':
unittest.main(defaultTest='suite')
Fix python namespaces for test runs
We need to make sure we don't use namespaced versions that are already installed
on the system but rather use local version from current sources
|
#!/usr/bin/env python
# This file is part of beets.
# Copyright 2013, Adrian Sampson.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
import os
import re
import sys
from _common import unittest
pkgpath = os.path.dirname(__file__) or '.'
sys.path.append(pkgpath)
os.chdir(pkgpath)
# Make sure we use local version of beetsplug and not system namespaced version
# for tests
try:
del sys.modules["beetsplug"]
except KeyError:
pass
def suite():
s = unittest.TestSuite()
# Get the suite() of every module in this directory beginning with
# "test_".
for fname in os.listdir(pkgpath):
match = re.match(r'(test_\S+)\.py$', fname)
if match:
modname = match.group(1)
s.addTest(__import__(modname).suite())
return s
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
<commit_before>#!/usr/bin/env python
# This file is part of beets.
# Copyright 2013, Adrian Sampson.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
import os
import re
import sys
from _common import unittest
pkgpath = os.path.dirname(__file__) or '.'
sys.path.append(pkgpath)
os.chdir(pkgpath)
def suite():
s = unittest.TestSuite()
# Get the suite() of every module in this directory beginning with
# "test_".
for fname in os.listdir(pkgpath):
match = re.match(r'(test_\S+)\.py$', fname)
if match:
modname = match.group(1)
s.addTest(__import__(modname).suite())
return s
if __name__ == '__main__':
unittest.main(defaultTest='suite')
<commit_msg>Fix python namespaces for test runs
We need to make sure we don't use namespaced versions that are already installed
on the system but rather use local version from current sources<commit_after>
|
#!/usr/bin/env python
# This file is part of beets.
# Copyright 2013, Adrian Sampson.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
import os
import re
import sys
from _common import unittest
pkgpath = os.path.dirname(__file__) or '.'
sys.path.append(pkgpath)
os.chdir(pkgpath)
# Make sure we use local version of beetsplug and not system namespaced version
# for tests
try:
del sys.modules["beetsplug"]
except KeyError:
pass
def suite():
s = unittest.TestSuite()
# Get the suite() of every module in this directory beginning with
# "test_".
for fname in os.listdir(pkgpath):
match = re.match(r'(test_\S+)\.py$', fname)
if match:
modname = match.group(1)
s.addTest(__import__(modname).suite())
return s
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
#!/usr/bin/env python
# This file is part of beets.
# Copyright 2013, Adrian Sampson.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
import os
import re
import sys
from _common import unittest
pkgpath = os.path.dirname(__file__) or '.'
sys.path.append(pkgpath)
os.chdir(pkgpath)
def suite():
s = unittest.TestSuite()
# Get the suite() of every module in this directory beginning with
# "test_".
for fname in os.listdir(pkgpath):
match = re.match(r'(test_\S+)\.py$', fname)
if match:
modname = match.group(1)
s.addTest(__import__(modname).suite())
return s
if __name__ == '__main__':
unittest.main(defaultTest='suite')
Fix python namespaces for test runs
We need to make sure we don't use namespaced versions that are already installed
on the system but rather use local version from current sources#!/usr/bin/env python
# This file is part of beets.
# Copyright 2013, Adrian Sampson.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
import os
import re
import sys
from _common import unittest
pkgpath = os.path.dirname(__file__) or '.'
sys.path.append(pkgpath)
os.chdir(pkgpath)
# Make sure we use local version of beetsplug and not system namespaced version
# for tests
try:
del sys.modules["beetsplug"]
except KeyError:
pass
def suite():
s = unittest.TestSuite()
# Get the suite() of every module in this directory beginning with
# "test_".
for fname in os.listdir(pkgpath):
match = re.match(r'(test_\S+)\.py$', fname)
if match:
modname = match.group(1)
s.addTest(__import__(modname).suite())
return s
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
<commit_before>#!/usr/bin/env python
# This file is part of beets.
# Copyright 2013, Adrian Sampson.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
import os
import re
import sys
from _common import unittest
pkgpath = os.path.dirname(__file__) or '.'
sys.path.append(pkgpath)
os.chdir(pkgpath)
def suite():
s = unittest.TestSuite()
# Get the suite() of every module in this directory beginning with
# "test_".
for fname in os.listdir(pkgpath):
match = re.match(r'(test_\S+)\.py$', fname)
if match:
modname = match.group(1)
s.addTest(__import__(modname).suite())
return s
if __name__ == '__main__':
unittest.main(defaultTest='suite')
<commit_msg>Fix python namespaces for test runs
We need to make sure we don't use namespaced versions that are already installed
on the system but rather use local version from current sources<commit_after>#!/usr/bin/env python
# This file is part of beets.
# Copyright 2013, Adrian Sampson.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
import os
import re
import sys
from _common import unittest
pkgpath = os.path.dirname(__file__) or '.'
sys.path.append(pkgpath)
os.chdir(pkgpath)
# Make sure we use local version of beetsplug and not system namespaced version
# for tests
try:
del sys.modules["beetsplug"]
except KeyError:
pass
def suite():
s = unittest.TestSuite()
# Get the suite() of every module in this directory beginning with
# "test_".
for fname in os.listdir(pkgpath):
match = re.match(r'(test_\S+)\.py$', fname)
if match:
modname = match.group(1)
s.addTest(__import__(modname).suite())
return s
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
aa4a0b1640dab90a4867614f0d00cca99601e342
|
south/models.py
|
south/models.py
|
from django.db import models
class MigrationHistory(models.Model):
app_name = models.CharField(max_length=255)
migration = models.CharField(max_length=255)
applied = models.DateTimeField(blank=True)
class Meta:
unique_together = (('app_name', 'migration'),)
@classmethod
def for_migration(cls, migration):
try:
return cls.objects.get(app_name=migration.app_label(),
migration=migration.name())
except cls.DoesNotExist:
return cls(app_name=migration.app_label(),
migration=migration.name())
def get_migrations(self):
from south.migration.base import Migrations
return Migrations(self.app_name)
def get_migration(self):
return self.get_migrations().migration(self.migration)
|
from django.db import models
class MigrationHistory(models.Model):
app_name = models.CharField(max_length=255)
migration = models.CharField(max_length=255)
applied = models.DateTimeField(blank=True)
@classmethod
def for_migration(cls, migration):
try:
return cls.objects.get(app_name=migration.app_label(),
migration=migration.name())
except cls.DoesNotExist:
return cls(app_name=migration.app_label(),
migration=migration.name())
def get_migrations(self):
from south.migration.base import Migrations
return Migrations(self.app_name)
def get_migration(self):
return self.get_migrations().migration(self.migration)
|
Remove unique_together on the model; the key length was too long on wide-character MySQL installs.
|
Remove unique_together on the model; the key length was too long on wide-character MySQL installs.
|
Python
|
apache-2.0
|
theatlantic/django-south,theatlantic/django-south
|
from django.db import models
class MigrationHistory(models.Model):
app_name = models.CharField(max_length=255)
migration = models.CharField(max_length=255)
applied = models.DateTimeField(blank=True)
class Meta:
unique_together = (('app_name', 'migration'),)
@classmethod
def for_migration(cls, migration):
try:
return cls.objects.get(app_name=migration.app_label(),
migration=migration.name())
except cls.DoesNotExist:
return cls(app_name=migration.app_label(),
migration=migration.name())
def get_migrations(self):
from south.migration.base import Migrations
return Migrations(self.app_name)
def get_migration(self):
return self.get_migrations().migration(self.migration)
Remove unique_together on the model; the key length was too long on wide-character MySQL installs.
|
from django.db import models
class MigrationHistory(models.Model):
app_name = models.CharField(max_length=255)
migration = models.CharField(max_length=255)
applied = models.DateTimeField(blank=True)
@classmethod
def for_migration(cls, migration):
try:
return cls.objects.get(app_name=migration.app_label(),
migration=migration.name())
except cls.DoesNotExist:
return cls(app_name=migration.app_label(),
migration=migration.name())
def get_migrations(self):
from south.migration.base import Migrations
return Migrations(self.app_name)
def get_migration(self):
return self.get_migrations().migration(self.migration)
|
<commit_before>from django.db import models
class MigrationHistory(models.Model):
app_name = models.CharField(max_length=255)
migration = models.CharField(max_length=255)
applied = models.DateTimeField(blank=True)
class Meta:
unique_together = (('app_name', 'migration'),)
@classmethod
def for_migration(cls, migration):
try:
return cls.objects.get(app_name=migration.app_label(),
migration=migration.name())
except cls.DoesNotExist:
return cls(app_name=migration.app_label(),
migration=migration.name())
def get_migrations(self):
from south.migration.base import Migrations
return Migrations(self.app_name)
def get_migration(self):
return self.get_migrations().migration(self.migration)
<commit_msg>Remove unique_together on the model; the key length was too long on wide-character MySQL installs.<commit_after>
|
from django.db import models
class MigrationHistory(models.Model):
app_name = models.CharField(max_length=255)
migration = models.CharField(max_length=255)
applied = models.DateTimeField(blank=True)
@classmethod
def for_migration(cls, migration):
try:
return cls.objects.get(app_name=migration.app_label(),
migration=migration.name())
except cls.DoesNotExist:
return cls(app_name=migration.app_label(),
migration=migration.name())
def get_migrations(self):
from south.migration.base import Migrations
return Migrations(self.app_name)
def get_migration(self):
return self.get_migrations().migration(self.migration)
|
from django.db import models
class MigrationHistory(models.Model):
app_name = models.CharField(max_length=255)
migration = models.CharField(max_length=255)
applied = models.DateTimeField(blank=True)
class Meta:
unique_together = (('app_name', 'migration'),)
@classmethod
def for_migration(cls, migration):
try:
return cls.objects.get(app_name=migration.app_label(),
migration=migration.name())
except cls.DoesNotExist:
return cls(app_name=migration.app_label(),
migration=migration.name())
def get_migrations(self):
from south.migration.base import Migrations
return Migrations(self.app_name)
def get_migration(self):
return self.get_migrations().migration(self.migration)
Remove unique_together on the model; the key length was too long on wide-character MySQL installs.from django.db import models
class MigrationHistory(models.Model):
app_name = models.CharField(max_length=255)
migration = models.CharField(max_length=255)
applied = models.DateTimeField(blank=True)
@classmethod
def for_migration(cls, migration):
try:
return cls.objects.get(app_name=migration.app_label(),
migration=migration.name())
except cls.DoesNotExist:
return cls(app_name=migration.app_label(),
migration=migration.name())
def get_migrations(self):
from south.migration.base import Migrations
return Migrations(self.app_name)
def get_migration(self):
return self.get_migrations().migration(self.migration)
|
<commit_before>from django.db import models
class MigrationHistory(models.Model):
app_name = models.CharField(max_length=255)
migration = models.CharField(max_length=255)
applied = models.DateTimeField(blank=True)
class Meta:
unique_together = (('app_name', 'migration'),)
@classmethod
def for_migration(cls, migration):
try:
return cls.objects.get(app_name=migration.app_label(),
migration=migration.name())
except cls.DoesNotExist:
return cls(app_name=migration.app_label(),
migration=migration.name())
def get_migrations(self):
from south.migration.base import Migrations
return Migrations(self.app_name)
def get_migration(self):
return self.get_migrations().migration(self.migration)
<commit_msg>Remove unique_together on the model; the key length was too long on wide-character MySQL installs.<commit_after>from django.db import models
class MigrationHistory(models.Model):
app_name = models.CharField(max_length=255)
migration = models.CharField(max_length=255)
applied = models.DateTimeField(blank=True)
@classmethod
def for_migration(cls, migration):
try:
return cls.objects.get(app_name=migration.app_label(),
migration=migration.name())
except cls.DoesNotExist:
return cls(app_name=migration.app_label(),
migration=migration.name())
def get_migrations(self):
from south.migration.base import Migrations
return Migrations(self.app_name)
def get_migration(self):
return self.get_migrations().migration(self.migration)
|
cfa77bed245d80d453f7b463ea35473bbc29cc50
|
toolkits/rdk.py
|
toolkits/rdk.py
|
import numpy as np
from cinfony import rdk
from cinfony.rdk import *
class Fingerprint(rdk.Fingerprint):
@property
def raw(self):
return np.array(self.fp, dtype=bool)
rdk.Fingerprint = Fingerprint
|
import numpy as np
from cinfony import rdk
from cinfony.rdk import *
class Fingerprint(rdk.Fingerprint):
@property
def raw(self):
return np.array(self.fp, dtype=bool)
rdk.Fingerprint = Fingerprint
# Patch reader not to return None as molecules
def _readfile(format, filename):
for mol in rdk.readfile(format, filename):
if mol is not None:
yield mol
rdk.readfile = _readfile
|
Patch readfile not to return mols
|
Patch readfile not to return mols
|
Python
|
bsd-3-clause
|
mwojcikowski/opendrugdiscovery
|
import numpy as np
from cinfony import rdk
from cinfony.rdk import *
class Fingerprint(rdk.Fingerprint):
@property
def raw(self):
return np.array(self.fp, dtype=bool)
rdk.Fingerprint = Fingerprint
Patch readfile not to return mols
|
import numpy as np
from cinfony import rdk
from cinfony.rdk import *
class Fingerprint(rdk.Fingerprint):
@property
def raw(self):
return np.array(self.fp, dtype=bool)
rdk.Fingerprint = Fingerprint
# Patch reader not to return None as molecules
def _readfile(format, filename):
for mol in rdk.readfile(format, filename):
if mol is not None:
yield mol
rdk.readfile = _readfile
|
<commit_before>import numpy as np
from cinfony import rdk
from cinfony.rdk import *
class Fingerprint(rdk.Fingerprint):
@property
def raw(self):
return np.array(self.fp, dtype=bool)
rdk.Fingerprint = Fingerprint
<commit_msg>Patch readfile not to return mols<commit_after>
|
import numpy as np
from cinfony import rdk
from cinfony.rdk import *
class Fingerprint(rdk.Fingerprint):
@property
def raw(self):
return np.array(self.fp, dtype=bool)
rdk.Fingerprint = Fingerprint
# Patch reader not to return None as molecules
def _readfile(format, filename):
for mol in rdk.readfile(format, filename):
if mol is not None:
yield mol
rdk.readfile = _readfile
|
import numpy as np
from cinfony import rdk
from cinfony.rdk import *
class Fingerprint(rdk.Fingerprint):
@property
def raw(self):
return np.array(self.fp, dtype=bool)
rdk.Fingerprint = Fingerprint
Patch readfile not to return molsimport numpy as np
from cinfony import rdk
from cinfony.rdk import *
class Fingerprint(rdk.Fingerprint):
@property
def raw(self):
return np.array(self.fp, dtype=bool)
rdk.Fingerprint = Fingerprint
# Patch reader not to return None as molecules
def _readfile(format, filename):
for mol in rdk.readfile(format, filename):
if mol is not None:
yield mol
rdk.readfile = _readfile
|
<commit_before>import numpy as np
from cinfony import rdk
from cinfony.rdk import *
class Fingerprint(rdk.Fingerprint):
@property
def raw(self):
return np.array(self.fp, dtype=bool)
rdk.Fingerprint = Fingerprint
<commit_msg>Patch readfile not to return mols<commit_after>import numpy as np
from cinfony import rdk
from cinfony.rdk import *
class Fingerprint(rdk.Fingerprint):
@property
def raw(self):
return np.array(self.fp, dtype=bool)
rdk.Fingerprint = Fingerprint
# Patch reader not to return None as molecules
def _readfile(format, filename):
for mol in rdk.readfile(format, filename):
if mol is not None:
yield mol
rdk.readfile = _readfile
|
e6c774c13a01f2cbe09947c39c1dac7b4989bebc
|
jason2/project.py
|
jason2/project.py
|
class Project(object):
"""Holds project configuration parameters, such as data directory."""
def __init__(self, data_directory):
self.data_directory = data_directory
|
import ConfigParser
class Project(object):
"""Holds project configuration parameters, such as data directory."""
@classmethod
def from_config(cls, filename):
config = ConfigParser.RawConfigParser()
config.read(filename)
return cls(config.get("data", "directory"))
def __init__(self, data_directory):
self.data_directory = data_directory
|
Add from_configfile method to Project
|
Add from_configfile method to Project
|
Python
|
mit
|
gadomski/jason2
|
class Project(object):
"""Holds project configuration parameters, such as data directory."""
def __init__(self, data_directory):
self.data_directory = data_directory
Add from_configfile method to Project
|
import ConfigParser
class Project(object):
"""Holds project configuration parameters, such as data directory."""
@classmethod
def from_config(cls, filename):
config = ConfigParser.RawConfigParser()
config.read(filename)
return cls(config.get("data", "directory"))
def __init__(self, data_directory):
self.data_directory = data_directory
|
<commit_before>class Project(object):
"""Holds project configuration parameters, such as data directory."""
def __init__(self, data_directory):
self.data_directory = data_directory
<commit_msg>Add from_configfile method to Project<commit_after>
|
import ConfigParser
class Project(object):
"""Holds project configuration parameters, such as data directory."""
@classmethod
def from_config(cls, filename):
config = ConfigParser.RawConfigParser()
config.read(filename)
return cls(config.get("data", "directory"))
def __init__(self, data_directory):
self.data_directory = data_directory
|
class Project(object):
"""Holds project configuration parameters, such as data directory."""
def __init__(self, data_directory):
self.data_directory = data_directory
Add from_configfile method to Projectimport ConfigParser
class Project(object):
"""Holds project configuration parameters, such as data directory."""
@classmethod
def from_config(cls, filename):
config = ConfigParser.RawConfigParser()
config.read(filename)
return cls(config.get("data", "directory"))
def __init__(self, data_directory):
self.data_directory = data_directory
|
<commit_before>class Project(object):
"""Holds project configuration parameters, such as data directory."""
def __init__(self, data_directory):
self.data_directory = data_directory
<commit_msg>Add from_configfile method to Project<commit_after>import ConfigParser
class Project(object):
"""Holds project configuration parameters, such as data directory."""
@classmethod
def from_config(cls, filename):
config = ConfigParser.RawConfigParser()
config.read(filename)
return cls(config.get("data", "directory"))
def __init__(self, data_directory):
self.data_directory = data_directory
|
1a90e2c7b9155f35ced7e8f51707a2d86ad3ca93
|
scripts/Driver.py
|
scripts/Driver.py
|
'''
Created on Feb 20, 2013
@author: crisr
'''
import xml.etree.ElementTree as ET
import os
from Simulation import Simulation
debug = True
if __name__ == '__main__':
#open the XML
try:
inputFile = 'test.xml' #sys.argv[1]
except:
raise IOError ('input file not provided')
workingDir = os.getcwd()
if not os.path.isabs(inputFile):
inputFile = os.path.join(workingDir,inputFile)
if not os.path.exists(inputFile):
print('file not found '+inputFile)
try:
tree = ET.parse(inputFile)
if debug: print('opened file '+inputFile)
except:
tree = ET.parse(inputFile)
raise IOError ('not able to parse ' + inputFile)
root = tree.getroot()
#generate all the components of the simulation
simulation = Simulation(inputFile)
simulation.XMLread(root)
simulation.run()
|
'''
Created on Feb 20, 2013
@author: crisr
'''
import xml.etree.ElementTree as ET
import os
from Simulation import Simulation
import sys
debug = True
if __name__ == '__main__':
#open the XML
try:
if len(sys.argv) == 1:
inputFile = 'test.xml'
else:
inputFile = sys.argv[1]
except:
raise IOError ('input file not provided')
workingDir = os.getcwd()
if not os.path.isabs(inputFile):
inputFile = os.path.join(workingDir,inputFile)
if not os.path.exists(inputFile):
print('file not found '+inputFile)
try:
tree = ET.parse(inputFile)
if debug: print('opened file '+inputFile)
except:
tree = ET.parse(inputFile)
raise IOError ('not able to parse ' + inputFile)
root = tree.getroot()
#generate all the components of the simulation
simulation = Simulation(inputFile)
simulation.XMLread(root)
simulation.run()
|
Allow different input files to be specified.
|
Allow different input files to be specified.
r18478
|
Python
|
apache-2.0
|
joshua-cogliati-inl/raven,joshua-cogliati-inl/raven,joshua-cogliati-inl/raven,joshua-cogliati-inl/raven,joshua-cogliati-inl/raven,idaholab/raven,joshua-cogliati-inl/raven,idaholab/raven,idaholab/raven,idaholab/raven,idaholab/raven,idaholab/raven,idaholab/raven,joshua-cogliati-inl/raven
|
'''
Created on Feb 20, 2013
@author: crisr
'''
import xml.etree.ElementTree as ET
import os
from Simulation import Simulation
debug = True
if __name__ == '__main__':
#open the XML
try:
inputFile = 'test.xml' #sys.argv[1]
except:
raise IOError ('input file not provided')
workingDir = os.getcwd()
if not os.path.isabs(inputFile):
inputFile = os.path.join(workingDir,inputFile)
if not os.path.exists(inputFile):
print('file not found '+inputFile)
try:
tree = ET.parse(inputFile)
if debug: print('opened file '+inputFile)
except:
tree = ET.parse(inputFile)
raise IOError ('not able to parse ' + inputFile)
root = tree.getroot()
#generate all the components of the simulation
simulation = Simulation(inputFile)
simulation.XMLread(root)
simulation.run()Allow different input files to be specified.
r18478
|
'''
Created on Feb 20, 2013
@author: crisr
'''
import xml.etree.ElementTree as ET
import os
from Simulation import Simulation
import sys
debug = True
if __name__ == '__main__':
#open the XML
try:
if len(sys.argv) == 1:
inputFile = 'test.xml'
else:
inputFile = sys.argv[1]
except:
raise IOError ('input file not provided')
workingDir = os.getcwd()
if not os.path.isabs(inputFile):
inputFile = os.path.join(workingDir,inputFile)
if not os.path.exists(inputFile):
print('file not found '+inputFile)
try:
tree = ET.parse(inputFile)
if debug: print('opened file '+inputFile)
except:
tree = ET.parse(inputFile)
raise IOError ('not able to parse ' + inputFile)
root = tree.getroot()
#generate all the components of the simulation
simulation = Simulation(inputFile)
simulation.XMLread(root)
simulation.run()
|
<commit_before>'''
Created on Feb 20, 2013
@author: crisr
'''
import xml.etree.ElementTree as ET
import os
from Simulation import Simulation
debug = True
if __name__ == '__main__':
#open the XML
try:
inputFile = 'test.xml' #sys.argv[1]
except:
raise IOError ('input file not provided')
workingDir = os.getcwd()
if not os.path.isabs(inputFile):
inputFile = os.path.join(workingDir,inputFile)
if not os.path.exists(inputFile):
print('file not found '+inputFile)
try:
tree = ET.parse(inputFile)
if debug: print('opened file '+inputFile)
except:
tree = ET.parse(inputFile)
raise IOError ('not able to parse ' + inputFile)
root = tree.getroot()
#generate all the components of the simulation
simulation = Simulation(inputFile)
simulation.XMLread(root)
simulation.run()<commit_msg>Allow different input files to be specified.
r18478<commit_after>
|
'''
Created on Feb 20, 2013
@author: crisr
'''
import xml.etree.ElementTree as ET
import os
from Simulation import Simulation
import sys
debug = True
if __name__ == '__main__':
#open the XML
try:
if len(sys.argv) == 1:
inputFile = 'test.xml'
else:
inputFile = sys.argv[1]
except:
raise IOError ('input file not provided')
workingDir = os.getcwd()
if not os.path.isabs(inputFile):
inputFile = os.path.join(workingDir,inputFile)
if not os.path.exists(inputFile):
print('file not found '+inputFile)
try:
tree = ET.parse(inputFile)
if debug: print('opened file '+inputFile)
except:
tree = ET.parse(inputFile)
raise IOError ('not able to parse ' + inputFile)
root = tree.getroot()
#generate all the components of the simulation
simulation = Simulation(inputFile)
simulation.XMLread(root)
simulation.run()
|
'''
Created on Feb 20, 2013
@author: crisr
'''
import xml.etree.ElementTree as ET
import os
from Simulation import Simulation
debug = True
if __name__ == '__main__':
#open the XML
try:
inputFile = 'test.xml' #sys.argv[1]
except:
raise IOError ('input file not provided')
workingDir = os.getcwd()
if not os.path.isabs(inputFile):
inputFile = os.path.join(workingDir,inputFile)
if not os.path.exists(inputFile):
print('file not found '+inputFile)
try:
tree = ET.parse(inputFile)
if debug: print('opened file '+inputFile)
except:
tree = ET.parse(inputFile)
raise IOError ('not able to parse ' + inputFile)
root = tree.getroot()
#generate all the components of the simulation
simulation = Simulation(inputFile)
simulation.XMLread(root)
simulation.run()Allow different input files to be specified.
r18478'''
Created on Feb 20, 2013
@author: crisr
'''
import xml.etree.ElementTree as ET
import os
from Simulation import Simulation
import sys
debug = True
if __name__ == '__main__':
#open the XML
try:
if len(sys.argv) == 1:
inputFile = 'test.xml'
else:
inputFile = sys.argv[1]
except:
raise IOError ('input file not provided')
workingDir = os.getcwd()
if not os.path.isabs(inputFile):
inputFile = os.path.join(workingDir,inputFile)
if not os.path.exists(inputFile):
print('file not found '+inputFile)
try:
tree = ET.parse(inputFile)
if debug: print('opened file '+inputFile)
except:
tree = ET.parse(inputFile)
raise IOError ('not able to parse ' + inputFile)
root = tree.getroot()
#generate all the components of the simulation
simulation = Simulation(inputFile)
simulation.XMLread(root)
simulation.run()
|
<commit_before>'''
Created on Feb 20, 2013
@author: crisr
'''
import xml.etree.ElementTree as ET
import os
from Simulation import Simulation
debug = True
if __name__ == '__main__':
#open the XML
try:
inputFile = 'test.xml' #sys.argv[1]
except:
raise IOError ('input file not provided')
workingDir = os.getcwd()
if not os.path.isabs(inputFile):
inputFile = os.path.join(workingDir,inputFile)
if not os.path.exists(inputFile):
print('file not found '+inputFile)
try:
tree = ET.parse(inputFile)
if debug: print('opened file '+inputFile)
except:
tree = ET.parse(inputFile)
raise IOError ('not able to parse ' + inputFile)
root = tree.getroot()
#generate all the components of the simulation
simulation = Simulation(inputFile)
simulation.XMLread(root)
simulation.run()<commit_msg>Allow different input files to be specified.
r18478<commit_after>'''
Created on Feb 20, 2013
@author: crisr
'''
import xml.etree.ElementTree as ET
import os
from Simulation import Simulation
import sys
debug = True
if __name__ == '__main__':
#open the XML
try:
if len(sys.argv) == 1:
inputFile = 'test.xml'
else:
inputFile = sys.argv[1]
except:
raise IOError ('input file not provided')
workingDir = os.getcwd()
if not os.path.isabs(inputFile):
inputFile = os.path.join(workingDir,inputFile)
if not os.path.exists(inputFile):
print('file not found '+inputFile)
try:
tree = ET.parse(inputFile)
if debug: print('opened file '+inputFile)
except:
tree = ET.parse(inputFile)
raise IOError ('not able to parse ' + inputFile)
root = tree.getroot()
#generate all the components of the simulation
simulation = Simulation(inputFile)
simulation.XMLread(root)
simulation.run()
|
686406781af00d93e4d70049499068037d72be74
|
geotrek/core/tests/test_forms.py
|
geotrek/core/tests/test_forms.py
|
from django.conf import settings
from django.test import TestCase
from unittest import skipIf
from geotrek.core.factories import TrailFactory
from geotrek.authent.factories import UserFactory
from geotrek.core.forms import TrailForm
@skipIf(not settings.TREKKING_TOPOLOGY_ENABLED, 'Test with dynamic segmentation only')
class TopologyFormTest(TestCase):
def test_save_form_when_topology_has_not_changed(self):
user = UserFactory()
topo = TrailFactory()
form = TrailForm(instance=topo, user=user)
self.assertEqual(topo, form.instance)
form.cleaned_data = {'topology': topo}
form.save()
self.assertEqual(topo, form.instance)
|
from django.conf import settings
from django.test import TestCase
from unittest import skipIf
from geotrek.core.factories import TrailFactory, PathFactory
from geotrek.authent.factories import UserFactory
from geotrek.core.forms import TrailForm, PathForm
@skipIf(not settings.TREKKING_TOPOLOGY_ENABLED, 'Test with dynamic segmentation only')
class TopologyFormTest(TestCase):
def test_save_form_when_topology_has_not_changed(self):
user = UserFactory()
topo = TrailFactory()
form = TrailForm(instance=topo, user=user)
self.assertEqual(topo, form.instance)
form.cleaned_data = {'topology': topo}
form.save()
self.assertEqual(topo, form.instance)
class PathFormTest(TestCase):
def test_overlapping_path(self):
user = UserFactory()
PathFactory.create(geom='SRID=4326;LINESTRING(3 45, 3 46)')
# Just intersecting
form1 = PathForm(
user=user,
data={'geom': '{"geom": "LINESTRING(2.5 45.5, 3.5 45.5)", "snap": [null, null]}'}
)
self.assertTrue(form1.is_valid(), str(form1.errors))
# Overlapping
form2 = PathForm(
user=user,
data={'geom': '{"geom": "LINESTRING(3 45.5, 3 46.5)", "snap": [null, null]}'}
)
self.assertFalse(form2.is_valid(), str(form2.errors))
|
Add tests for path overlapping check
|
Add tests for path overlapping check
|
Python
|
bsd-2-clause
|
GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,makinacorpus/Geotrek,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek
|
from django.conf import settings
from django.test import TestCase
from unittest import skipIf
from geotrek.core.factories import TrailFactory
from geotrek.authent.factories import UserFactory
from geotrek.core.forms import TrailForm
@skipIf(not settings.TREKKING_TOPOLOGY_ENABLED, 'Test with dynamic segmentation only')
class TopologyFormTest(TestCase):
def test_save_form_when_topology_has_not_changed(self):
user = UserFactory()
topo = TrailFactory()
form = TrailForm(instance=topo, user=user)
self.assertEqual(topo, form.instance)
form.cleaned_data = {'topology': topo}
form.save()
self.assertEqual(topo, form.instance)
Add tests for path overlapping check
|
from django.conf import settings
from django.test import TestCase
from unittest import skipIf
from geotrek.core.factories import TrailFactory, PathFactory
from geotrek.authent.factories import UserFactory
from geotrek.core.forms import TrailForm, PathForm
@skipIf(not settings.TREKKING_TOPOLOGY_ENABLED, 'Test with dynamic segmentation only')
class TopologyFormTest(TestCase):
def test_save_form_when_topology_has_not_changed(self):
user = UserFactory()
topo = TrailFactory()
form = TrailForm(instance=topo, user=user)
self.assertEqual(topo, form.instance)
form.cleaned_data = {'topology': topo}
form.save()
self.assertEqual(topo, form.instance)
class PathFormTest(TestCase):
def test_overlapping_path(self):
user = UserFactory()
PathFactory.create(geom='SRID=4326;LINESTRING(3 45, 3 46)')
# Just intersecting
form1 = PathForm(
user=user,
data={'geom': '{"geom": "LINESTRING(2.5 45.5, 3.5 45.5)", "snap": [null, null]}'}
)
self.assertTrue(form1.is_valid(), str(form1.errors))
# Overlapping
form2 = PathForm(
user=user,
data={'geom': '{"geom": "LINESTRING(3 45.5, 3 46.5)", "snap": [null, null]}'}
)
self.assertFalse(form2.is_valid(), str(form2.errors))
|
<commit_before>from django.conf import settings
from django.test import TestCase
from unittest import skipIf
from geotrek.core.factories import TrailFactory
from geotrek.authent.factories import UserFactory
from geotrek.core.forms import TrailForm
@skipIf(not settings.TREKKING_TOPOLOGY_ENABLED, 'Test with dynamic segmentation only')
class TopologyFormTest(TestCase):
def test_save_form_when_topology_has_not_changed(self):
user = UserFactory()
topo = TrailFactory()
form = TrailForm(instance=topo, user=user)
self.assertEqual(topo, form.instance)
form.cleaned_data = {'topology': topo}
form.save()
self.assertEqual(topo, form.instance)
<commit_msg>Add tests for path overlapping check<commit_after>
|
from django.conf import settings
from django.test import TestCase
from unittest import skipIf
from geotrek.core.factories import TrailFactory, PathFactory
from geotrek.authent.factories import UserFactory
from geotrek.core.forms import TrailForm, PathForm
@skipIf(not settings.TREKKING_TOPOLOGY_ENABLED, 'Test with dynamic segmentation only')
class TopologyFormTest(TestCase):
def test_save_form_when_topology_has_not_changed(self):
user = UserFactory()
topo = TrailFactory()
form = TrailForm(instance=topo, user=user)
self.assertEqual(topo, form.instance)
form.cleaned_data = {'topology': topo}
form.save()
self.assertEqual(topo, form.instance)
class PathFormTest(TestCase):
def test_overlapping_path(self):
user = UserFactory()
PathFactory.create(geom='SRID=4326;LINESTRING(3 45, 3 46)')
# Just intersecting
form1 = PathForm(
user=user,
data={'geom': '{"geom": "LINESTRING(2.5 45.5, 3.5 45.5)", "snap": [null, null]}'}
)
self.assertTrue(form1.is_valid(), str(form1.errors))
# Overlapping
form2 = PathForm(
user=user,
data={'geom': '{"geom": "LINESTRING(3 45.5, 3 46.5)", "snap": [null, null]}'}
)
self.assertFalse(form2.is_valid(), str(form2.errors))
|
from django.conf import settings
from django.test import TestCase
from unittest import skipIf
from geotrek.core.factories import TrailFactory
from geotrek.authent.factories import UserFactory
from geotrek.core.forms import TrailForm
@skipIf(not settings.TREKKING_TOPOLOGY_ENABLED, 'Test with dynamic segmentation only')
class TopologyFormTest(TestCase):
def test_save_form_when_topology_has_not_changed(self):
user = UserFactory()
topo = TrailFactory()
form = TrailForm(instance=topo, user=user)
self.assertEqual(topo, form.instance)
form.cleaned_data = {'topology': topo}
form.save()
self.assertEqual(topo, form.instance)
Add tests for path overlapping checkfrom django.conf import settings
from django.test import TestCase
from unittest import skipIf
from geotrek.core.factories import TrailFactory, PathFactory
from geotrek.authent.factories import UserFactory
from geotrek.core.forms import TrailForm, PathForm
@skipIf(not settings.TREKKING_TOPOLOGY_ENABLED, 'Test with dynamic segmentation only')
class TopologyFormTest(TestCase):
def test_save_form_when_topology_has_not_changed(self):
user = UserFactory()
topo = TrailFactory()
form = TrailForm(instance=topo, user=user)
self.assertEqual(topo, form.instance)
form.cleaned_data = {'topology': topo}
form.save()
self.assertEqual(topo, form.instance)
class PathFormTest(TestCase):
def test_overlapping_path(self):
user = UserFactory()
PathFactory.create(geom='SRID=4326;LINESTRING(3 45, 3 46)')
# Just intersecting
form1 = PathForm(
user=user,
data={'geom': '{"geom": "LINESTRING(2.5 45.5, 3.5 45.5)", "snap": [null, null]}'}
)
self.assertTrue(form1.is_valid(), str(form1.errors))
# Overlapping
form2 = PathForm(
user=user,
data={'geom': '{"geom": "LINESTRING(3 45.5, 3 46.5)", "snap": [null, null]}'}
)
self.assertFalse(form2.is_valid(), str(form2.errors))
|
<commit_before>from django.conf import settings
from django.test import TestCase
from unittest import skipIf
from geotrek.core.factories import TrailFactory
from geotrek.authent.factories import UserFactory
from geotrek.core.forms import TrailForm
@skipIf(not settings.TREKKING_TOPOLOGY_ENABLED, 'Test with dynamic segmentation only')
class TopologyFormTest(TestCase):
def test_save_form_when_topology_has_not_changed(self):
user = UserFactory()
topo = TrailFactory()
form = TrailForm(instance=topo, user=user)
self.assertEqual(topo, form.instance)
form.cleaned_data = {'topology': topo}
form.save()
self.assertEqual(topo, form.instance)
<commit_msg>Add tests for path overlapping check<commit_after>from django.conf import settings
from django.test import TestCase
from unittest import skipIf
from geotrek.core.factories import TrailFactory, PathFactory
from geotrek.authent.factories import UserFactory
from geotrek.core.forms import TrailForm, PathForm
@skipIf(not settings.TREKKING_TOPOLOGY_ENABLED, 'Test with dynamic segmentation only')
class TopologyFormTest(TestCase):
def test_save_form_when_topology_has_not_changed(self):
user = UserFactory()
topo = TrailFactory()
form = TrailForm(instance=topo, user=user)
self.assertEqual(topo, form.instance)
form.cleaned_data = {'topology': topo}
form.save()
self.assertEqual(topo, form.instance)
class PathFormTest(TestCase):
def test_overlapping_path(self):
user = UserFactory()
PathFactory.create(geom='SRID=4326;LINESTRING(3 45, 3 46)')
# Just intersecting
form1 = PathForm(
user=user,
data={'geom': '{"geom": "LINESTRING(2.5 45.5, 3.5 45.5)", "snap": [null, null]}'}
)
self.assertTrue(form1.is_valid(), str(form1.errors))
# Overlapping
form2 = PathForm(
user=user,
data={'geom': '{"geom": "LINESTRING(3 45.5, 3 46.5)", "snap": [null, null]}'}
)
self.assertFalse(form2.is_valid(), str(form2.errors))
|
4b26066a6f3b666ec107621334ddbcceec6a819a
|
micro/read_code.py
|
micro/read_code.py
|
import fileinput
def read_code():
return ''.join([line for line in fileinput.input()])
if __name__ == '__main__':
code = read_code()
print(code)
|
import fileinput
def read_code(filename='-'):
return ''.join([line for line in fileinput.input(filename)])
if __name__ == '__main__':
import sys
filename = sys.argv[1] if len(sys.argv) > 1 else '-'
code = read_code(filename)
print(code)
|
Correct a reading of a code
|
Correct a reading of a code
|
Python
|
mit
|
thewizardplusplus/micro,thewizardplusplus/micro,thewizardplusplus/micro
|
import fileinput
def read_code():
return ''.join([line for line in fileinput.input()])
if __name__ == '__main__':
code = read_code()
print(code)
Correct a reading of a code
|
import fileinput
def read_code(filename='-'):
return ''.join([line for line in fileinput.input(filename)])
if __name__ == '__main__':
import sys
filename = sys.argv[1] if len(sys.argv) > 1 else '-'
code = read_code(filename)
print(code)
|
<commit_before>import fileinput
def read_code():
return ''.join([line for line in fileinput.input()])
if __name__ == '__main__':
code = read_code()
print(code)
<commit_msg>Correct a reading of a code<commit_after>
|
import fileinput
def read_code(filename='-'):
return ''.join([line for line in fileinput.input(filename)])
if __name__ == '__main__':
import sys
filename = sys.argv[1] if len(sys.argv) > 1 else '-'
code = read_code(filename)
print(code)
|
import fileinput
def read_code():
return ''.join([line for line in fileinput.input()])
if __name__ == '__main__':
code = read_code()
print(code)
Correct a reading of a codeimport fileinput
def read_code(filename='-'):
return ''.join([line for line in fileinput.input(filename)])
if __name__ == '__main__':
import sys
filename = sys.argv[1] if len(sys.argv) > 1 else '-'
code = read_code(filename)
print(code)
|
<commit_before>import fileinput
def read_code():
return ''.join([line for line in fileinput.input()])
if __name__ == '__main__':
code = read_code()
print(code)
<commit_msg>Correct a reading of a code<commit_after>import fileinput
def read_code(filename='-'):
return ''.join([line for line in fileinput.input(filename)])
if __name__ == '__main__':
import sys
filename = sys.argv[1] if len(sys.argv) > 1 else '-'
code = read_code(filename)
print(code)
|
9be37b96450780b41f5a5443568ca41a18e06d22
|
lcapy/sequence.py
|
lcapy/sequence.py
|
"""This module handles sequences.
Copyright 2020 Michael Hayes, UCECE
"""
from .expr import ExprList
class Sequence(ExprList):
def __init__(self, seq, n=None):
super (Sequence, self).__init__(seq)
# Save the indexes. Ideally, should annotate which item
# in sequence corresponds to n = 0.
self.n = n
def latex(self):
items = []
for v1, n1 in zip(self.n, self):
s = v.latex()
if n1 == 0:
s = r'\underline{%s}' % v1
items.append(s)
return '\left{%s\right\}' % ', '.join(items)
|
"""This module handles sequences.
Copyright 2020 Michael Hayes, UCECE
"""
from .expr import ExprList
class Sequence(ExprList):
def __init__(self, seq, n=None):
super (Sequence, self).__init__(seq)
# Save the indexes. Ideally, should annotate which item
# in sequence corresponds to n = 0.
self.n = n
def latex(self):
items = []
for v1, n1 in zip(self, self.n):
try:
s = v1.latex()
except:
s = str(v1)
if n1 == 0:
s = r'\underline{%s}' % v1
items.append(s)
return r'\left\{%s\right\}' % ', '.join(items)
def pretty(self):
items = []
for v1, n1 in zip(self, self.n):
try:
s = v1.pretty()
except:
s = str(v1)
if n1 == 0:
s = '_%s_' % v1
items.append(s)
return r'{%s}' % ', '.join(items)
|
Add pretty and latex for Sequence
|
Add pretty and latex for Sequence
|
Python
|
lgpl-2.1
|
mph-/lcapy
|
"""This module handles sequences.
Copyright 2020 Michael Hayes, UCECE
"""
from .expr import ExprList
class Sequence(ExprList):
def __init__(self, seq, n=None):
super (Sequence, self).__init__(seq)
# Save the indexes. Ideally, should annotate which item
# in sequence corresponds to n = 0.
self.n = n
def latex(self):
items = []
for v1, n1 in zip(self.n, self):
s = v.latex()
if n1 == 0:
s = r'\underline{%s}' % v1
items.append(s)
return '\left{%s\right\}' % ', '.join(items)
Add pretty and latex for Sequence
|
"""This module handles sequences.
Copyright 2020 Michael Hayes, UCECE
"""
from .expr import ExprList
class Sequence(ExprList):
def __init__(self, seq, n=None):
super (Sequence, self).__init__(seq)
# Save the indexes. Ideally, should annotate which item
# in sequence corresponds to n = 0.
self.n = n
def latex(self):
items = []
for v1, n1 in zip(self, self.n):
try:
s = v1.latex()
except:
s = str(v1)
if n1 == 0:
s = r'\underline{%s}' % v1
items.append(s)
return r'\left\{%s\right\}' % ', '.join(items)
def pretty(self):
items = []
for v1, n1 in zip(self, self.n):
try:
s = v1.pretty()
except:
s = str(v1)
if n1 == 0:
s = '_%s_' % v1
items.append(s)
return r'{%s}' % ', '.join(items)
|
<commit_before>"""This module handles sequences.
Copyright 2020 Michael Hayes, UCECE
"""
from .expr import ExprList
class Sequence(ExprList):
def __init__(self, seq, n=None):
super (Sequence, self).__init__(seq)
# Save the indexes. Ideally, should annotate which item
# in sequence corresponds to n = 0.
self.n = n
def latex(self):
items = []
for v1, n1 in zip(self.n, self):
s = v.latex()
if n1 == 0:
s = r'\underline{%s}' % v1
items.append(s)
return '\left{%s\right\}' % ', '.join(items)
<commit_msg>Add pretty and latex for Sequence<commit_after>
|
"""This module handles sequences.
Copyright 2020 Michael Hayes, UCECE
"""
from .expr import ExprList
class Sequence(ExprList):
def __init__(self, seq, n=None):
super (Sequence, self).__init__(seq)
# Save the indexes. Ideally, should annotate which item
# in sequence corresponds to n = 0.
self.n = n
def latex(self):
items = []
for v1, n1 in zip(self, self.n):
try:
s = v1.latex()
except:
s = str(v1)
if n1 == 0:
s = r'\underline{%s}' % v1
items.append(s)
return r'\left\{%s\right\}' % ', '.join(items)
def pretty(self):
items = []
for v1, n1 in zip(self, self.n):
try:
s = v1.pretty()
except:
s = str(v1)
if n1 == 0:
s = '_%s_' % v1
items.append(s)
return r'{%s}' % ', '.join(items)
|
"""This module handles sequences.
Copyright 2020 Michael Hayes, UCECE
"""
from .expr import ExprList
class Sequence(ExprList):
def __init__(self, seq, n=None):
super (Sequence, self).__init__(seq)
# Save the indexes. Ideally, should annotate which item
# in sequence corresponds to n = 0.
self.n = n
def latex(self):
items = []
for v1, n1 in zip(self.n, self):
s = v.latex()
if n1 == 0:
s = r'\underline{%s}' % v1
items.append(s)
return '\left{%s\right\}' % ', '.join(items)
Add pretty and latex for Sequence"""This module handles sequences.
Copyright 2020 Michael Hayes, UCECE
"""
from .expr import ExprList
class Sequence(ExprList):
def __init__(self, seq, n=None):
super (Sequence, self).__init__(seq)
# Save the indexes. Ideally, should annotate which item
# in sequence corresponds to n = 0.
self.n = n
def latex(self):
items = []
for v1, n1 in zip(self, self.n):
try:
s = v1.latex()
except:
s = str(v1)
if n1 == 0:
s = r'\underline{%s}' % v1
items.append(s)
return r'\left\{%s\right\}' % ', '.join(items)
def pretty(self):
items = []
for v1, n1 in zip(self, self.n):
try:
s = v1.pretty()
except:
s = str(v1)
if n1 == 0:
s = '_%s_' % v1
items.append(s)
return r'{%s}' % ', '.join(items)
|
<commit_before>"""This module handles sequences.
Copyright 2020 Michael Hayes, UCECE
"""
from .expr import ExprList
class Sequence(ExprList):
def __init__(self, seq, n=None):
super (Sequence, self).__init__(seq)
# Save the indexes. Ideally, should annotate which item
# in sequence corresponds to n = 0.
self.n = n
def latex(self):
items = []
for v1, n1 in zip(self.n, self):
s = v.latex()
if n1 == 0:
s = r'\underline{%s}' % v1
items.append(s)
return '\left{%s\right\}' % ', '.join(items)
<commit_msg>Add pretty and latex for Sequence<commit_after>"""This module handles sequences.
Copyright 2020 Michael Hayes, UCECE
"""
from .expr import ExprList
class Sequence(ExprList):
def __init__(self, seq, n=None):
super (Sequence, self).__init__(seq)
# Save the indexes. Ideally, should annotate which item
# in sequence corresponds to n = 0.
self.n = n
def latex(self):
items = []
for v1, n1 in zip(self, self.n):
try:
s = v1.latex()
except:
s = str(v1)
if n1 == 0:
s = r'\underline{%s}' % v1
items.append(s)
return r'\left\{%s\right\}' % ', '.join(items)
def pretty(self):
items = []
for v1, n1 in zip(self, self.n):
try:
s = v1.pretty()
except:
s = str(v1)
if n1 == 0:
s = '_%s_' % v1
items.append(s)
return r'{%s}' % ', '.join(items)
|
4559e01646010e1ed260d77e612774778a0c1359
|
lib/rfk/site/forms/login.py
|
lib/rfk/site/forms/login.py
|
from wtforms import Form, SubmitField, BooleanField, TextField, SelectField, \
PasswordField, IntegerField, FieldList, FormField, validators
class LoginForm(Form):
username = TextField('Username', [validators.Required()])
password = PasswordField('Password', [validators.Required()])
remember = BooleanField('Remember me')
def login_form(rform):
return LoginForm(rform)
class RegisterForm(Form):
username = TextField('Username', [validators.Required()])
password = PasswordField('Password', [validators.Required(),
validators.Length(min=5, message='Password too short.'),
validators.EqualTo('password_retype', message='Passwords must match.')])
password_retype = PasswordField('Password (verification)', [validators.Required()])
email = TextField('E-Mail', [validators.Optional(), validators.Email()])
def register_form(rform):
return RegisterForm(rform)
|
from wtforms import Form, SubmitField, BooleanField, TextField, SelectField, \
PasswordField, IntegerField, FieldList, FormField, validators
class LoginForm(Form):
username = TextField('Username', [validators.Required()])
password = PasswordField('Password', [validators.Required()])
remember = BooleanField('Remember me')
def login_form(rform):
return LoginForm(rform)
class RegisterForm(Form):
username = TextField('Username', [validators.Required()])
password = PasswordField('Password', [validators.Required(),
validators.Length(min=5, message='Password too short.'),
validators.EqualTo('password_retype', message='Passwords must match.')])
password_retype = PasswordField('Password (verification)', [validators.Required()])
email = TextField('E-Mail (optional)', [validators.Optional(), validators.Email()])
def register_form(rform):
return RegisterForm(rform)
|
Make it clear that the E-Mail address is optional.
|
Make it clear that the E-Mail address is optional.
|
Python
|
bsd-3-clause
|
buckket/weltklang,buckket/weltklang,krautradio/PyRfK,krautradio/PyRfK,krautradio/PyRfK,buckket/weltklang,buckket/weltklang,krautradio/PyRfK
|
from wtforms import Form, SubmitField, BooleanField, TextField, SelectField, \
PasswordField, IntegerField, FieldList, FormField, validators
class LoginForm(Form):
username = TextField('Username', [validators.Required()])
password = PasswordField('Password', [validators.Required()])
remember = BooleanField('Remember me')
def login_form(rform):
return LoginForm(rform)
class RegisterForm(Form):
username = TextField('Username', [validators.Required()])
password = PasswordField('Password', [validators.Required(),
validators.Length(min=5, message='Password too short.'),
validators.EqualTo('password_retype', message='Passwords must match.')])
password_retype = PasswordField('Password (verification)', [validators.Required()])
email = TextField('E-Mail', [validators.Optional(), validators.Email()])
def register_form(rform):
return RegisterForm(rform)
Make it clear that the E-Mail address is optional.
|
from wtforms import Form, SubmitField, BooleanField, TextField, SelectField, \
PasswordField, IntegerField, FieldList, FormField, validators
class LoginForm(Form):
username = TextField('Username', [validators.Required()])
password = PasswordField('Password', [validators.Required()])
remember = BooleanField('Remember me')
def login_form(rform):
return LoginForm(rform)
class RegisterForm(Form):
username = TextField('Username', [validators.Required()])
password = PasswordField('Password', [validators.Required(),
validators.Length(min=5, message='Password too short.'),
validators.EqualTo('password_retype', message='Passwords must match.')])
password_retype = PasswordField('Password (verification)', [validators.Required()])
email = TextField('E-Mail (optional)', [validators.Optional(), validators.Email()])
def register_form(rform):
return RegisterForm(rform)
|
<commit_before>from wtforms import Form, SubmitField, BooleanField, TextField, SelectField, \
PasswordField, IntegerField, FieldList, FormField, validators
class LoginForm(Form):
username = TextField('Username', [validators.Required()])
password = PasswordField('Password', [validators.Required()])
remember = BooleanField('Remember me')
def login_form(rform):
return LoginForm(rform)
class RegisterForm(Form):
username = TextField('Username', [validators.Required()])
password = PasswordField('Password', [validators.Required(),
validators.Length(min=5, message='Password too short.'),
validators.EqualTo('password_retype', message='Passwords must match.')])
password_retype = PasswordField('Password (verification)', [validators.Required()])
email = TextField('E-Mail', [validators.Optional(), validators.Email()])
def register_form(rform):
return RegisterForm(rform)
<commit_msg>Make it clear that the E-Mail address is optional.<commit_after>
|
from wtforms import Form, SubmitField, BooleanField, TextField, SelectField, \
PasswordField, IntegerField, FieldList, FormField, validators
class LoginForm(Form):
username = TextField('Username', [validators.Required()])
password = PasswordField('Password', [validators.Required()])
remember = BooleanField('Remember me')
def login_form(rform):
return LoginForm(rform)
class RegisterForm(Form):
username = TextField('Username', [validators.Required()])
password = PasswordField('Password', [validators.Required(),
validators.Length(min=5, message='Password too short.'),
validators.EqualTo('password_retype', message='Passwords must match.')])
password_retype = PasswordField('Password (verification)', [validators.Required()])
email = TextField('E-Mail (optional)', [validators.Optional(), validators.Email()])
def register_form(rform):
return RegisterForm(rform)
|
from wtforms import Form, SubmitField, BooleanField, TextField, SelectField, \
PasswordField, IntegerField, FieldList, FormField, validators
class LoginForm(Form):
username = TextField('Username', [validators.Required()])
password = PasswordField('Password', [validators.Required()])
remember = BooleanField('Remember me')
def login_form(rform):
return LoginForm(rform)
class RegisterForm(Form):
username = TextField('Username', [validators.Required()])
password = PasswordField('Password', [validators.Required(),
validators.Length(min=5, message='Password too short.'),
validators.EqualTo('password_retype', message='Passwords must match.')])
password_retype = PasswordField('Password (verification)', [validators.Required()])
email = TextField('E-Mail', [validators.Optional(), validators.Email()])
def register_form(rform):
return RegisterForm(rform)
Make it clear that the E-Mail address is optional.from wtforms import Form, SubmitField, BooleanField, TextField, SelectField, \
PasswordField, IntegerField, FieldList, FormField, validators
class LoginForm(Form):
username = TextField('Username', [validators.Required()])
password = PasswordField('Password', [validators.Required()])
remember = BooleanField('Remember me')
def login_form(rform):
return LoginForm(rform)
class RegisterForm(Form):
username = TextField('Username', [validators.Required()])
password = PasswordField('Password', [validators.Required(),
validators.Length(min=5, message='Password too short.'),
validators.EqualTo('password_retype', message='Passwords must match.')])
password_retype = PasswordField('Password (verification)', [validators.Required()])
email = TextField('E-Mail (optional)', [validators.Optional(), validators.Email()])
def register_form(rform):
return RegisterForm(rform)
|
<commit_before>from wtforms import Form, SubmitField, BooleanField, TextField, SelectField, \
PasswordField, IntegerField, FieldList, FormField, validators
class LoginForm(Form):
username = TextField('Username', [validators.Required()])
password = PasswordField('Password', [validators.Required()])
remember = BooleanField('Remember me')
def login_form(rform):
return LoginForm(rform)
class RegisterForm(Form):
username = TextField('Username', [validators.Required()])
password = PasswordField('Password', [validators.Required(),
validators.Length(min=5, message='Password too short.'),
validators.EqualTo('password_retype', message='Passwords must match.')])
password_retype = PasswordField('Password (verification)', [validators.Required()])
email = TextField('E-Mail', [validators.Optional(), validators.Email()])
def register_form(rform):
return RegisterForm(rform)
<commit_msg>Make it clear that the E-Mail address is optional.<commit_after>from wtforms import Form, SubmitField, BooleanField, TextField, SelectField, \
PasswordField, IntegerField, FieldList, FormField, validators
class LoginForm(Form):
username = TextField('Username', [validators.Required()])
password = PasswordField('Password', [validators.Required()])
remember = BooleanField('Remember me')
def login_form(rform):
return LoginForm(rform)
class RegisterForm(Form):
username = TextField('Username', [validators.Required()])
password = PasswordField('Password', [validators.Required(),
validators.Length(min=5, message='Password too short.'),
validators.EqualTo('password_retype', message='Passwords must match.')])
password_retype = PasswordField('Password (verification)', [validators.Required()])
email = TextField('E-Mail (optional)', [validators.Optional(), validators.Email()])
def register_form(rform):
return RegisterForm(rform)
|
3d9bf8afd912ccb0d1df72353a9c306c59773007
|
swf/exceptions.py
|
swf/exceptions.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2013, Theo Crevon
# Copyright (c) 2013, Greg Leclercq
#
# See the file LICENSE for copying permission.
class SWFError(Exception):
def __init__(self, message, raw_error, *args):
Exception.__init__(self, message, *args)
self.kind, self.details = raw_error.split(':')
def __repr__(self):
msg = self.message
if self.kind and self.details:
msg += '\nReason: {}, {}'.format(self.kind, self.details)
return msg
def __str__(self):
msg = self.message
if self.kind and self.details:
msg += '\nReason: {}, {}'.format(self.kind, self.details)
return msg
class PollTimeout(SWFError):
pass
class InvalidCredentialsError(SWFError):
pass
class ResponseError(SWFError):
pass
class DoesNotExistError(SWFError):
pass
class AlreadyExistsError(SWFError):
pass
class InvalidKeywordArgumentError(SWFError):
pass
|
# -*- coding: utf-8 -*-
# Copyright (c) 2013, Theo Crevon
# Copyright (c) 2013, Greg Leclercq
#
# See the file LICENSE for copying permission.
class SWFError(Exception):
def __init__(self, message, raw_error, *args):
Exception.__init__(self, message, *args)
self.kind, self.details = raw_error.split(':')
self.type_ = self.kind.lower().strip().replace(' ', '_') if self.kind else None
def __repr__(self):
msg = self.message
if self.kind and self.details:
msg += '\nReason: {}, {}'.format(self.kind, self.details)
return msg
def __str__(self):
msg = self.message
if self.kind and self.details:
msg += '\nReason: {}, {}'.format(self.kind, self.details)
return msg
class PollTimeout(SWFError):
pass
class InvalidCredentialsError(SWFError):
pass
class ResponseError(SWFError):
pass
class DoesNotExistError(SWFError):
pass
class AlreadyExistsError(SWFError):
pass
class InvalidKeywordArgumentError(SWFError):
pass
|
Update SWFError with a formatted type
|
Update SWFError with a formatted type
|
Python
|
mit
|
botify-labs/python-simple-workflow,botify-labs/python-simple-workflow
|
# -*- coding: utf-8 -*-
# Copyright (c) 2013, Theo Crevon
# Copyright (c) 2013, Greg Leclercq
#
# See the file LICENSE for copying permission.
class SWFError(Exception):
def __init__(self, message, raw_error, *args):
Exception.__init__(self, message, *args)
self.kind, self.details = raw_error.split(':')
def __repr__(self):
msg = self.message
if self.kind and self.details:
msg += '\nReason: {}, {}'.format(self.kind, self.details)
return msg
def __str__(self):
msg = self.message
if self.kind and self.details:
msg += '\nReason: {}, {}'.format(self.kind, self.details)
return msg
class PollTimeout(SWFError):
pass
class InvalidCredentialsError(SWFError):
pass
class ResponseError(SWFError):
pass
class DoesNotExistError(SWFError):
pass
class AlreadyExistsError(SWFError):
pass
class InvalidKeywordArgumentError(SWFError):
pass
Update SWFError with a formatted type
|
# -*- coding: utf-8 -*-
# Copyright (c) 2013, Theo Crevon
# Copyright (c) 2013, Greg Leclercq
#
# See the file LICENSE for copying permission.
class SWFError(Exception):
def __init__(self, message, raw_error, *args):
Exception.__init__(self, message, *args)
self.kind, self.details = raw_error.split(':')
self.type_ = self.kind.lower().strip().replace(' ', '_') if self.kind else None
def __repr__(self):
msg = self.message
if self.kind and self.details:
msg += '\nReason: {}, {}'.format(self.kind, self.details)
return msg
def __str__(self):
msg = self.message
if self.kind and self.details:
msg += '\nReason: {}, {}'.format(self.kind, self.details)
return msg
class PollTimeout(SWFError):
pass
class InvalidCredentialsError(SWFError):
pass
class ResponseError(SWFError):
pass
class DoesNotExistError(SWFError):
pass
class AlreadyExistsError(SWFError):
pass
class InvalidKeywordArgumentError(SWFError):
pass
|
<commit_before># -*- coding: utf-8 -*-
# Copyright (c) 2013, Theo Crevon
# Copyright (c) 2013, Greg Leclercq
#
# See the file LICENSE for copying permission.
class SWFError(Exception):
def __init__(self, message, raw_error, *args):
Exception.__init__(self, message, *args)
self.kind, self.details = raw_error.split(':')
def __repr__(self):
msg = self.message
if self.kind and self.details:
msg += '\nReason: {}, {}'.format(self.kind, self.details)
return msg
def __str__(self):
msg = self.message
if self.kind and self.details:
msg += '\nReason: {}, {}'.format(self.kind, self.details)
return msg
class PollTimeout(SWFError):
pass
class InvalidCredentialsError(SWFError):
pass
class ResponseError(SWFError):
pass
class DoesNotExistError(SWFError):
pass
class AlreadyExistsError(SWFError):
pass
class InvalidKeywordArgumentError(SWFError):
pass
<commit_msg>Update SWFError with a formatted type<commit_after>
|
# -*- coding: utf-8 -*-
# Copyright (c) 2013, Theo Crevon
# Copyright (c) 2013, Greg Leclercq
#
# See the file LICENSE for copying permission.
class SWFError(Exception):
def __init__(self, message, raw_error, *args):
Exception.__init__(self, message, *args)
self.kind, self.details = raw_error.split(':')
self.type_ = self.kind.lower().strip().replace(' ', '_') if self.kind else None
def __repr__(self):
msg = self.message
if self.kind and self.details:
msg += '\nReason: {}, {}'.format(self.kind, self.details)
return msg
def __str__(self):
msg = self.message
if self.kind and self.details:
msg += '\nReason: {}, {}'.format(self.kind, self.details)
return msg
class PollTimeout(SWFError):
pass
class InvalidCredentialsError(SWFError):
pass
class ResponseError(SWFError):
pass
class DoesNotExistError(SWFError):
pass
class AlreadyExistsError(SWFError):
pass
class InvalidKeywordArgumentError(SWFError):
pass
|
# -*- coding: utf-8 -*-
# Copyright (c) 2013, Theo Crevon
# Copyright (c) 2013, Greg Leclercq
#
# See the file LICENSE for copying permission.
class SWFError(Exception):
def __init__(self, message, raw_error, *args):
Exception.__init__(self, message, *args)
self.kind, self.details = raw_error.split(':')
def __repr__(self):
msg = self.message
if self.kind and self.details:
msg += '\nReason: {}, {}'.format(self.kind, self.details)
return msg
def __str__(self):
msg = self.message
if self.kind and self.details:
msg += '\nReason: {}, {}'.format(self.kind, self.details)
return msg
class PollTimeout(SWFError):
pass
class InvalidCredentialsError(SWFError):
pass
class ResponseError(SWFError):
pass
class DoesNotExistError(SWFError):
pass
class AlreadyExistsError(SWFError):
pass
class InvalidKeywordArgumentError(SWFError):
pass
Update SWFError with a formatted type# -*- coding: utf-8 -*-
# Copyright (c) 2013, Theo Crevon
# Copyright (c) 2013, Greg Leclercq
#
# See the file LICENSE for copying permission.
class SWFError(Exception):
def __init__(self, message, raw_error, *args):
Exception.__init__(self, message, *args)
self.kind, self.details = raw_error.split(':')
self.type_ = self.kind.lower().strip().replace(' ', '_') if self.kind else None
def __repr__(self):
msg = self.message
if self.kind and self.details:
msg += '\nReason: {}, {}'.format(self.kind, self.details)
return msg
def __str__(self):
msg = self.message
if self.kind and self.details:
msg += '\nReason: {}, {}'.format(self.kind, self.details)
return msg
class PollTimeout(SWFError):
pass
class InvalidCredentialsError(SWFError):
pass
class ResponseError(SWFError):
pass
class DoesNotExistError(SWFError):
pass
class AlreadyExistsError(SWFError):
pass
class InvalidKeywordArgumentError(SWFError):
pass
|
<commit_before># -*- coding: utf-8 -*-
# Copyright (c) 2013, Theo Crevon
# Copyright (c) 2013, Greg Leclercq
#
# See the file LICENSE for copying permission.
class SWFError(Exception):
def __init__(self, message, raw_error, *args):
Exception.__init__(self, message, *args)
self.kind, self.details = raw_error.split(':')
def __repr__(self):
msg = self.message
if self.kind and self.details:
msg += '\nReason: {}, {}'.format(self.kind, self.details)
return msg
def __str__(self):
msg = self.message
if self.kind and self.details:
msg += '\nReason: {}, {}'.format(self.kind, self.details)
return msg
class PollTimeout(SWFError):
pass
class InvalidCredentialsError(SWFError):
pass
class ResponseError(SWFError):
pass
class DoesNotExistError(SWFError):
pass
class AlreadyExistsError(SWFError):
pass
class InvalidKeywordArgumentError(SWFError):
pass
<commit_msg>Update SWFError with a formatted type<commit_after># -*- coding: utf-8 -*-
# Copyright (c) 2013, Theo Crevon
# Copyright (c) 2013, Greg Leclercq
#
# See the file LICENSE for copying permission.
class SWFError(Exception):
def __init__(self, message, raw_error, *args):
Exception.__init__(self, message, *args)
self.kind, self.details = raw_error.split(':')
self.type_ = self.kind.lower().strip().replace(' ', '_') if self.kind else None
def __repr__(self):
msg = self.message
if self.kind and self.details:
msg += '\nReason: {}, {}'.format(self.kind, self.details)
return msg
def __str__(self):
msg = self.message
if self.kind and self.details:
msg += '\nReason: {}, {}'.format(self.kind, self.details)
return msg
class PollTimeout(SWFError):
pass
class InvalidCredentialsError(SWFError):
pass
class ResponseError(SWFError):
pass
class DoesNotExistError(SWFError):
pass
class AlreadyExistsError(SWFError):
pass
class InvalidKeywordArgumentError(SWFError):
pass
|
458688aa3a1ce901b2ffcf64497965484cea4e53
|
temperature_db.py
|
temperature_db.py
|
#!/usr/bin/env python
"""Temperature into database"""
import glob
from time import sleep
import urllib2
import urllib
base_dir = '/sys/bus/w1/devices/'
device_folder = glob.glob(base_dir + '28*')[0]
device_file = device_folder + '/w1_slave'
try:
while True:
lines = open(device_file, 'r').readlines()
string = lines[1][-6:].replace('=', '')
t = int(string)
temp_c = t / 1000.0
temp_f = temp_c * 9.0 / 5.0 + 32.0
data = {}
data['temperature'] = str(temp_f)
data['room'] = '1'
url_values = urllib.urlencode(data)
url = 'http://192.168.1.6/addtemperature'
full_url = url + '?' + url_values
data = urllib2.urlopen(full_url)
print data.read()
sleep(4)
except KeyboardInterrupt:
pass
|
#!/usr/bin/env python
"""Temperature into database"""
import glob
from time import sleep
import urllib2
import urllib
base_dir = '/sys/bus/w1/devices/'
device_folder = glob.glob(base_dir + '28*')[0]
device_file = device_folder + '/w1_slave'
try:
while True:
lines = open(device_file, 'r').readlines()
string = lines[1][-6:].replace('=', '')
t = int(string)
temp_c = t / 1000.0
temp_f = temp_c * 9.0 / 5.0 + 32.0
data = {}
data['temperature'] = str(temp_f)
data['room'] = '1'
url_values = urllib.urlencode(data)
url = 'http://192.168.1.4/addtemperature'
full_url = url + '?' + url_values
data = urllib2.urlopen(full_url)
print data.read()
sleep(60)
except KeyboardInterrupt:
pass
|
Update the DB-inserting script for wifi.
|
Update the DB-inserting script for wifi.
|
Python
|
mit
|
zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie
|
#!/usr/bin/env python
"""Temperature into database"""
import glob
from time import sleep
import urllib2
import urllib
base_dir = '/sys/bus/w1/devices/'
device_folder = glob.glob(base_dir + '28*')[0]
device_file = device_folder + '/w1_slave'
try:
while True:
lines = open(device_file, 'r').readlines()
string = lines[1][-6:].replace('=', '')
t = int(string)
temp_c = t / 1000.0
temp_f = temp_c * 9.0 / 5.0 + 32.0
data = {}
data['temperature'] = str(temp_f)
data['room'] = '1'
url_values = urllib.urlencode(data)
url = 'http://192.168.1.6/addtemperature'
full_url = url + '?' + url_values
data = urllib2.urlopen(full_url)
print data.read()
sleep(4)
except KeyboardInterrupt:
pass
Update the DB-inserting script for wifi.
|
#!/usr/bin/env python
"""Temperature into database"""
import glob
from time import sleep
import urllib2
import urllib
base_dir = '/sys/bus/w1/devices/'
device_folder = glob.glob(base_dir + '28*')[0]
device_file = device_folder + '/w1_slave'
try:
while True:
lines = open(device_file, 'r').readlines()
string = lines[1][-6:].replace('=', '')
t = int(string)
temp_c = t / 1000.0
temp_f = temp_c * 9.0 / 5.0 + 32.0
data = {}
data['temperature'] = str(temp_f)
data['room'] = '1'
url_values = urllib.urlencode(data)
url = 'http://192.168.1.4/addtemperature'
full_url = url + '?' + url_values
data = urllib2.urlopen(full_url)
print data.read()
sleep(60)
except KeyboardInterrupt:
pass
|
<commit_before>#!/usr/bin/env python
"""Temperature into database"""
import glob
from time import sleep
import urllib2
import urllib
base_dir = '/sys/bus/w1/devices/'
device_folder = glob.glob(base_dir + '28*')[0]
device_file = device_folder + '/w1_slave'
try:
while True:
lines = open(device_file, 'r').readlines()
string = lines[1][-6:].replace('=', '')
t = int(string)
temp_c = t / 1000.0
temp_f = temp_c * 9.0 / 5.0 + 32.0
data = {}
data['temperature'] = str(temp_f)
data['room'] = '1'
url_values = urllib.urlencode(data)
url = 'http://192.168.1.6/addtemperature'
full_url = url + '?' + url_values
data = urllib2.urlopen(full_url)
print data.read()
sleep(4)
except KeyboardInterrupt:
pass
<commit_msg>Update the DB-inserting script for wifi.<commit_after>
|
#!/usr/bin/env python
"""Temperature into database"""
import glob
from time import sleep
import urllib2
import urllib
base_dir = '/sys/bus/w1/devices/'
device_folder = glob.glob(base_dir + '28*')[0]
device_file = device_folder + '/w1_slave'
try:
while True:
lines = open(device_file, 'r').readlines()
string = lines[1][-6:].replace('=', '')
t = int(string)
temp_c = t / 1000.0
temp_f = temp_c * 9.0 / 5.0 + 32.0
data = {}
data['temperature'] = str(temp_f)
data['room'] = '1'
url_values = urllib.urlencode(data)
url = 'http://192.168.1.4/addtemperature'
full_url = url + '?' + url_values
data = urllib2.urlopen(full_url)
print data.read()
sleep(60)
except KeyboardInterrupt:
pass
|
#!/usr/bin/env python
"""Temperature into database"""
import glob
from time import sleep
import urllib2
import urllib
base_dir = '/sys/bus/w1/devices/'
device_folder = glob.glob(base_dir + '28*')[0]
device_file = device_folder + '/w1_slave'
try:
while True:
lines = open(device_file, 'r').readlines()
string = lines[1][-6:].replace('=', '')
t = int(string)
temp_c = t / 1000.0
temp_f = temp_c * 9.0 / 5.0 + 32.0
data = {}
data['temperature'] = str(temp_f)
data['room'] = '1'
url_values = urllib.urlencode(data)
url = 'http://192.168.1.6/addtemperature'
full_url = url + '?' + url_values
data = urllib2.urlopen(full_url)
print data.read()
sleep(4)
except KeyboardInterrupt:
pass
Update the DB-inserting script for wifi.#!/usr/bin/env python
"""Temperature into database"""
import glob
from time import sleep
import urllib2
import urllib
base_dir = '/sys/bus/w1/devices/'
device_folder = glob.glob(base_dir + '28*')[0]
device_file = device_folder + '/w1_slave'
try:
while True:
lines = open(device_file, 'r').readlines()
string = lines[1][-6:].replace('=', '')
t = int(string)
temp_c = t / 1000.0
temp_f = temp_c * 9.0 / 5.0 + 32.0
data = {}
data['temperature'] = str(temp_f)
data['room'] = '1'
url_values = urllib.urlencode(data)
url = 'http://192.168.1.4/addtemperature'
full_url = url + '?' + url_values
data = urllib2.urlopen(full_url)
print data.read()
sleep(60)
except KeyboardInterrupt:
pass
|
<commit_before>#!/usr/bin/env python
"""Temperature into database"""
import glob
from time import sleep
import urllib2
import urllib
base_dir = '/sys/bus/w1/devices/'
device_folder = glob.glob(base_dir + '28*')[0]
device_file = device_folder + '/w1_slave'
try:
while True:
lines = open(device_file, 'r').readlines()
string = lines[1][-6:].replace('=', '')
t = int(string)
temp_c = t / 1000.0
temp_f = temp_c * 9.0 / 5.0 + 32.0
data = {}
data['temperature'] = str(temp_f)
data['room'] = '1'
url_values = urllib.urlencode(data)
url = 'http://192.168.1.6/addtemperature'
full_url = url + '?' + url_values
data = urllib2.urlopen(full_url)
print data.read()
sleep(4)
except KeyboardInterrupt:
pass
<commit_msg>Update the DB-inserting script for wifi.<commit_after>#!/usr/bin/env python
"""Temperature into database"""
import glob
from time import sleep
import urllib2
import urllib
base_dir = '/sys/bus/w1/devices/'
device_folder = glob.glob(base_dir + '28*')[0]
device_file = device_folder + '/w1_slave'
try:
while True:
lines = open(device_file, 'r').readlines()
string = lines[1][-6:].replace('=', '')
t = int(string)
temp_c = t / 1000.0
temp_f = temp_c * 9.0 / 5.0 + 32.0
data = {}
data['temperature'] = str(temp_f)
data['room'] = '1'
url_values = urllib.urlencode(data)
url = 'http://192.168.1.4/addtemperature'
full_url = url + '?' + url_values
data = urllib2.urlopen(full_url)
print data.read()
sleep(60)
except KeyboardInterrupt:
pass
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.