commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d633d3c13a958b279b93d09142a772e59c798f6f
|
peas-demo/plugins/pythonhello/pythonhello.py
|
peas-demo/plugins/pythonhello/pythonhello.py
|
# -*- coding: utf-8 -*-
# ex:set ts=4 et sw=4 ai:
import gobject
import libpeas
import gtk
LABEL_STRING="Python Says Hello!"
class PythonHelloPlugin(libpeas.Plugin):
def do_activate(self, window):
print "PythonHelloPlugin.do_activate", repr(window)
window._pythonhello_label = gtk.Label(LABEL_STRING)
window._pythonhello_label.show()
window.get_child().pack_start(window._pythonhello_label)
def do_deactivate(self, window):
print "PythonHelloPlugin.do_activate", repr(window)
window.get_child().remove(window._pythonhello_label)
window._pythonhello_label.destroy()
gobject.type_register(PythonHelloPlugin)
|
# -*- coding: utf-8 -*-
# ex:set ts=4 et sw=4 ai:
import gobject
import libpeas
import gtk
LABEL_STRING="Python Says Hello!"
class PythonHelloPlugin(libpeas.Plugin):
def do_activate(self, window):
print "PythonHelloPlugin.do_activate", repr(window)
window._pythonhello_label = gtk.Label(LABEL_STRING)
window._pythonhello_label.show()
window.get_child().pack_start(window._pythonhello_label)
def do_deactivate(self, window):
print "PythonHelloPlugin.do_deactivate", repr(window)
window.get_child().remove(window._pythonhello_label)
window._pythonhello_label.destroy()
gobject.type_register(PythonHelloPlugin)
|
Fix a typo in the python plugin.
|
[PeasDemo] Fix a typo in the python plugin.
It was indicating "do_activate" in the console when actually
deactivating the plugin.
|
Python
|
lgpl-2.1
|
GNOME/libpeas,gregier/libpeas,Distrotech/libpeas,chergert/libpeas,GNOME/libpeas,chergert/libpeas,gregier/libpeas,chergert/libpeas,gregier/libpeas,gregier/libpeas,Distrotech/libpeas,Distrotech/libpeas
|
# -*- coding: utf-8 -*-
# ex:set ts=4 et sw=4 ai:
import gobject
import libpeas
import gtk
LABEL_STRING="Python Says Hello!"
class PythonHelloPlugin(libpeas.Plugin):
def do_activate(self, window):
print "PythonHelloPlugin.do_activate", repr(window)
window._pythonhello_label = gtk.Label(LABEL_STRING)
window._pythonhello_label.show()
window.get_child().pack_start(window._pythonhello_label)
def do_deactivate(self, window):
print "PythonHelloPlugin.do_activate", repr(window)
window.get_child().remove(window._pythonhello_label)
window._pythonhello_label.destroy()
gobject.type_register(PythonHelloPlugin)
[PeasDemo] Fix a typo in the python plugin.
It was indicating "do_activate" in the console when actually
deactivating the plugin.
|
# -*- coding: utf-8 -*-
# ex:set ts=4 et sw=4 ai:
import gobject
import libpeas
import gtk
LABEL_STRING="Python Says Hello!"
class PythonHelloPlugin(libpeas.Plugin):
def do_activate(self, window):
print "PythonHelloPlugin.do_activate", repr(window)
window._pythonhello_label = gtk.Label(LABEL_STRING)
window._pythonhello_label.show()
window.get_child().pack_start(window._pythonhello_label)
def do_deactivate(self, window):
print "PythonHelloPlugin.do_deactivate", repr(window)
window.get_child().remove(window._pythonhello_label)
window._pythonhello_label.destroy()
gobject.type_register(PythonHelloPlugin)
|
<commit_before># -*- coding: utf-8 -*-
# ex:set ts=4 et sw=4 ai:
import gobject
import libpeas
import gtk
LABEL_STRING="Python Says Hello!"
class PythonHelloPlugin(libpeas.Plugin):
def do_activate(self, window):
print "PythonHelloPlugin.do_activate", repr(window)
window._pythonhello_label = gtk.Label(LABEL_STRING)
window._pythonhello_label.show()
window.get_child().pack_start(window._pythonhello_label)
def do_deactivate(self, window):
print "PythonHelloPlugin.do_activate", repr(window)
window.get_child().remove(window._pythonhello_label)
window._pythonhello_label.destroy()
gobject.type_register(PythonHelloPlugin)
<commit_msg>[PeasDemo] Fix a typo in the python plugin.
It was indicating "do_activate" in the console when actually
deactivating the plugin.<commit_after>
|
# -*- coding: utf-8 -*-
# ex:set ts=4 et sw=4 ai:
import gobject
import libpeas
import gtk
LABEL_STRING="Python Says Hello!"
class PythonHelloPlugin(libpeas.Plugin):
def do_activate(self, window):
print "PythonHelloPlugin.do_activate", repr(window)
window._pythonhello_label = gtk.Label(LABEL_STRING)
window._pythonhello_label.show()
window.get_child().pack_start(window._pythonhello_label)
def do_deactivate(self, window):
print "PythonHelloPlugin.do_deactivate", repr(window)
window.get_child().remove(window._pythonhello_label)
window._pythonhello_label.destroy()
gobject.type_register(PythonHelloPlugin)
|
# -*- coding: utf-8 -*-
# ex:set ts=4 et sw=4 ai:
import gobject
import libpeas
import gtk
LABEL_STRING="Python Says Hello!"
class PythonHelloPlugin(libpeas.Plugin):
def do_activate(self, window):
print "PythonHelloPlugin.do_activate", repr(window)
window._pythonhello_label = gtk.Label(LABEL_STRING)
window._pythonhello_label.show()
window.get_child().pack_start(window._pythonhello_label)
def do_deactivate(self, window):
print "PythonHelloPlugin.do_activate", repr(window)
window.get_child().remove(window._pythonhello_label)
window._pythonhello_label.destroy()
gobject.type_register(PythonHelloPlugin)
[PeasDemo] Fix a typo in the python plugin.
It was indicating "do_activate" in the console when actually
deactivating the plugin.# -*- coding: utf-8 -*-
# ex:set ts=4 et sw=4 ai:
import gobject
import libpeas
import gtk
LABEL_STRING="Python Says Hello!"
class PythonHelloPlugin(libpeas.Plugin):
def do_activate(self, window):
print "PythonHelloPlugin.do_activate", repr(window)
window._pythonhello_label = gtk.Label(LABEL_STRING)
window._pythonhello_label.show()
window.get_child().pack_start(window._pythonhello_label)
def do_deactivate(self, window):
print "PythonHelloPlugin.do_deactivate", repr(window)
window.get_child().remove(window._pythonhello_label)
window._pythonhello_label.destroy()
gobject.type_register(PythonHelloPlugin)
|
<commit_before># -*- coding: utf-8 -*-
# ex:set ts=4 et sw=4 ai:
import gobject
import libpeas
import gtk
LABEL_STRING="Python Says Hello!"
class PythonHelloPlugin(libpeas.Plugin):
def do_activate(self, window):
print "PythonHelloPlugin.do_activate", repr(window)
window._pythonhello_label = gtk.Label(LABEL_STRING)
window._pythonhello_label.show()
window.get_child().pack_start(window._pythonhello_label)
def do_deactivate(self, window):
print "PythonHelloPlugin.do_activate", repr(window)
window.get_child().remove(window._pythonhello_label)
window._pythonhello_label.destroy()
gobject.type_register(PythonHelloPlugin)
<commit_msg>[PeasDemo] Fix a typo in the python plugin.
It was indicating "do_activate" in the console when actually
deactivating the plugin.<commit_after># -*- coding: utf-8 -*-
# ex:set ts=4 et sw=4 ai:
import gobject
import libpeas
import gtk
LABEL_STRING="Python Says Hello!"
class PythonHelloPlugin(libpeas.Plugin):
def do_activate(self, window):
print "PythonHelloPlugin.do_activate", repr(window)
window._pythonhello_label = gtk.Label(LABEL_STRING)
window._pythonhello_label.show()
window.get_child().pack_start(window._pythonhello_label)
def do_deactivate(self, window):
print "PythonHelloPlugin.do_deactivate", repr(window)
window.get_child().remove(window._pythonhello_label)
window._pythonhello_label.destroy()
gobject.type_register(PythonHelloPlugin)
|
83d6c1695a147649972f36cbee1bc68fd0786765
|
molly/rest.py
|
molly/rest.py
|
import os
from flask import Flask
from molly.config import ConfigLoader
from molly.apps.homepage import App as Homepage
flask_app = Flask(__name__)
with open(os.environ.get('MOLLY_CONFIG', 'conf/default.conf')) as fd:
config_loader = ConfigLoader()
apps, services = config_loader.load_from_config(fd)
for service in services.values():
service.init_app(flask_app)
for app in apps:
flask_app.register_blueprint(app.blueprint, url_prefix='/' + app.instance_name)
flask_app.register_blueprint(Homepage(apps).blueprint)
def start_debug(address=None):
flask_app.debug = True
flask_app.run(debug=True, host=address, port=8000)
|
import os
from flask import Flask
from molly.config import ConfigLoader
from molly.apps.homepage import App as Homepage
flask_app = Flask(__name__)
with open(os.environ.get('MOLLY_CONFIG', 'conf/default.conf')) as fd:
config_loader = ConfigLoader()
config, apps, services = config_loader.load_from_config(fd)
flask_app.config.update(config)
for service in services.values():
service.init_app(flask_app)
for app in apps:
flask_app.register_blueprint(app.blueprint, url_prefix='/' + app.instance_name)
flask_app.register_blueprint(Homepage(apps).blueprint)
def start_debug(address=None):
flask_app.debug = True
flask_app.run(debug=True, host=address, port=8000)
|
Configure Flask from global config
|
Configure Flask from global config
|
Python
|
apache-2.0
|
ManchesterIO/mollyproject-next,ManchesterIO/mollyproject-next,ManchesterIO/mollyproject-next
|
import os
from flask import Flask
from molly.config import ConfigLoader
from molly.apps.homepage import App as Homepage
flask_app = Flask(__name__)
with open(os.environ.get('MOLLY_CONFIG', 'conf/default.conf')) as fd:
config_loader = ConfigLoader()
apps, services = config_loader.load_from_config(fd)
for service in services.values():
service.init_app(flask_app)
for app in apps:
flask_app.register_blueprint(app.blueprint, url_prefix='/' + app.instance_name)
flask_app.register_blueprint(Homepage(apps).blueprint)
def start_debug(address=None):
flask_app.debug = True
flask_app.run(debug=True, host=address, port=8000)
Configure Flask from global config
|
import os
from flask import Flask
from molly.config import ConfigLoader
from molly.apps.homepage import App as Homepage
flask_app = Flask(__name__)
with open(os.environ.get('MOLLY_CONFIG', 'conf/default.conf')) as fd:
config_loader = ConfigLoader()
config, apps, services = config_loader.load_from_config(fd)
flask_app.config.update(config)
for service in services.values():
service.init_app(flask_app)
for app in apps:
flask_app.register_blueprint(app.blueprint, url_prefix='/' + app.instance_name)
flask_app.register_blueprint(Homepage(apps).blueprint)
def start_debug(address=None):
flask_app.debug = True
flask_app.run(debug=True, host=address, port=8000)
|
<commit_before>import os
from flask import Flask
from molly.config import ConfigLoader
from molly.apps.homepage import App as Homepage
flask_app = Flask(__name__)
with open(os.environ.get('MOLLY_CONFIG', 'conf/default.conf')) as fd:
config_loader = ConfigLoader()
apps, services = config_loader.load_from_config(fd)
for service in services.values():
service.init_app(flask_app)
for app in apps:
flask_app.register_blueprint(app.blueprint, url_prefix='/' + app.instance_name)
flask_app.register_blueprint(Homepage(apps).blueprint)
def start_debug(address=None):
flask_app.debug = True
flask_app.run(debug=True, host=address, port=8000)
<commit_msg>Configure Flask from global config<commit_after>
|
import os
from flask import Flask
from molly.config import ConfigLoader
from molly.apps.homepage import App as Homepage
flask_app = Flask(__name__)
with open(os.environ.get('MOLLY_CONFIG', 'conf/default.conf')) as fd:
config_loader = ConfigLoader()
config, apps, services = config_loader.load_from_config(fd)
flask_app.config.update(config)
for service in services.values():
service.init_app(flask_app)
for app in apps:
flask_app.register_blueprint(app.blueprint, url_prefix='/' + app.instance_name)
flask_app.register_blueprint(Homepage(apps).blueprint)
def start_debug(address=None):
flask_app.debug = True
flask_app.run(debug=True, host=address, port=8000)
|
import os
from flask import Flask
from molly.config import ConfigLoader
from molly.apps.homepage import App as Homepage
flask_app = Flask(__name__)
with open(os.environ.get('MOLLY_CONFIG', 'conf/default.conf')) as fd:
config_loader = ConfigLoader()
apps, services = config_loader.load_from_config(fd)
for service in services.values():
service.init_app(flask_app)
for app in apps:
flask_app.register_blueprint(app.blueprint, url_prefix='/' + app.instance_name)
flask_app.register_blueprint(Homepage(apps).blueprint)
def start_debug(address=None):
flask_app.debug = True
flask_app.run(debug=True, host=address, port=8000)
Configure Flask from global configimport os
from flask import Flask
from molly.config import ConfigLoader
from molly.apps.homepage import App as Homepage
flask_app = Flask(__name__)
with open(os.environ.get('MOLLY_CONFIG', 'conf/default.conf')) as fd:
config_loader = ConfigLoader()
config, apps, services = config_loader.load_from_config(fd)
flask_app.config.update(config)
for service in services.values():
service.init_app(flask_app)
for app in apps:
flask_app.register_blueprint(app.blueprint, url_prefix='/' + app.instance_name)
flask_app.register_blueprint(Homepage(apps).blueprint)
def start_debug(address=None):
flask_app.debug = True
flask_app.run(debug=True, host=address, port=8000)
|
<commit_before>import os
from flask import Flask
from molly.config import ConfigLoader
from molly.apps.homepage import App as Homepage
flask_app = Flask(__name__)
with open(os.environ.get('MOLLY_CONFIG', 'conf/default.conf')) as fd:
config_loader = ConfigLoader()
apps, services = config_loader.load_from_config(fd)
for service in services.values():
service.init_app(flask_app)
for app in apps:
flask_app.register_blueprint(app.blueprint, url_prefix='/' + app.instance_name)
flask_app.register_blueprint(Homepage(apps).blueprint)
def start_debug(address=None):
flask_app.debug = True
flask_app.run(debug=True, host=address, port=8000)
<commit_msg>Configure Flask from global config<commit_after>import os
from flask import Flask
from molly.config import ConfigLoader
from molly.apps.homepage import App as Homepage
flask_app = Flask(__name__)
with open(os.environ.get('MOLLY_CONFIG', 'conf/default.conf')) as fd:
config_loader = ConfigLoader()
config, apps, services = config_loader.load_from_config(fd)
flask_app.config.update(config)
for service in services.values():
service.init_app(flask_app)
for app in apps:
flask_app.register_blueprint(app.blueprint, url_prefix='/' + app.instance_name)
flask_app.register_blueprint(Homepage(apps).blueprint)
def start_debug(address=None):
flask_app.debug = True
flask_app.run(debug=True, host=address, port=8000)
|
851fff051a194e061cdd110e32d0f88fe2d60587
|
nodular/db.py
|
nodular/db.py
|
# -*- coding: utf-8 -*-
"""
Nodular provides a Flask-SQLAlchemy database object that all models in
your app must use. Typical usage::
from nodular import db
from coaster.sqlalchemy import BaseMixin
class MyModel(BaseMixin, db.Model):
pass
To initialize with an app::
from flask import Flask
app = Flask(__name__)
db.init_app(app)
If you have only one app per Python process (which is typical), add
this line to your init sequence::
db.app = app
This makes your app the default app for this database object and removes
the need to use ``app.test_request_context()`` when querying the database
outside a request context.
"""
from flask.ext.sqlalchemy import SQLAlchemy
__all__ = ['db']
db = SQLAlchemy()
|
# -*- coding: utf-8 -*-
"""
Nodular provides a Flask-SQLAlchemy database object that all models in
your app must use. Typical usage::
from nodular import db
from coaster.sqlalchemy import BaseMixin
class MyModel(BaseMixin, db.Model):
pass
To initialize with an app::
from flask import Flask
app = Flask(__name__)
db.init_app(app)
If you have only one app per Python process (which is typical), add
this line to your init sequence::
db.app = app
This makes your app the default app for this database object and removes
the need to use ``app.test_request_context()`` when querying the database
outside a request context.
"""
from flask.ext.sqlalchemy import SQLAlchemy
__all__ = ['db']
db = SQLAlchemy()
# To enable foreign key support in SQLite3
from sqlalchemy import event
from sqlalchemy.engine import Engine
from sqlite3 import Connection as SQLite3Connection
@event.listens_for(Engine, "connect")
def _set_sqlite_pragma(dbapi_connection, connection_record):
if isinstance(dbapi_connection, SQLite3Connection):
cursor = dbapi_connection.cursor()
cursor.execute("PRAGMA foreign_keys=ON;")
cursor.close()
|
Fix for SQLite3 foreign keys.
|
Fix for SQLite3 foreign keys.
|
Python
|
bsd-2-clause
|
hasgeek/nodular,hasgeek/nodular
|
# -*- coding: utf-8 -*-
"""
Nodular provides a Flask-SQLAlchemy database object that all models in
your app must use. Typical usage::
from nodular import db
from coaster.sqlalchemy import BaseMixin
class MyModel(BaseMixin, db.Model):
pass
To initialize with an app::
from flask import Flask
app = Flask(__name__)
db.init_app(app)
If you have only one app per Python process (which is typical), add
this line to your init sequence::
db.app = app
This makes your app the default app for this database object and removes
the need to use ``app.test_request_context()`` when querying the database
outside a request context.
"""
from flask.ext.sqlalchemy import SQLAlchemy
__all__ = ['db']
db = SQLAlchemy()
Fix for SQLite3 foreign keys.
|
# -*- coding: utf-8 -*-
"""
Nodular provides a Flask-SQLAlchemy database object that all models in
your app must use. Typical usage::
from nodular import db
from coaster.sqlalchemy import BaseMixin
class MyModel(BaseMixin, db.Model):
pass
To initialize with an app::
from flask import Flask
app = Flask(__name__)
db.init_app(app)
If you have only one app per Python process (which is typical), add
this line to your init sequence::
db.app = app
This makes your app the default app for this database object and removes
the need to use ``app.test_request_context()`` when querying the database
outside a request context.
"""
from flask.ext.sqlalchemy import SQLAlchemy
__all__ = ['db']
db = SQLAlchemy()
# To enable foreign key support in SQLite3
from sqlalchemy import event
from sqlalchemy.engine import Engine
from sqlite3 import Connection as SQLite3Connection
@event.listens_for(Engine, "connect")
def _set_sqlite_pragma(dbapi_connection, connection_record):
if isinstance(dbapi_connection, SQLite3Connection):
cursor = dbapi_connection.cursor()
cursor.execute("PRAGMA foreign_keys=ON;")
cursor.close()
|
<commit_before># -*- coding: utf-8 -*-
"""
Nodular provides a Flask-SQLAlchemy database object that all models in
your app must use. Typical usage::
from nodular import db
from coaster.sqlalchemy import BaseMixin
class MyModel(BaseMixin, db.Model):
pass
To initialize with an app::
from flask import Flask
app = Flask(__name__)
db.init_app(app)
If you have only one app per Python process (which is typical), add
this line to your init sequence::
db.app = app
This makes your app the default app for this database object and removes
the need to use ``app.test_request_context()`` when querying the database
outside a request context.
"""
from flask.ext.sqlalchemy import SQLAlchemy
__all__ = ['db']
db = SQLAlchemy()
<commit_msg>Fix for SQLite3 foreign keys.<commit_after>
|
# -*- coding: utf-8 -*-
"""
Nodular provides a Flask-SQLAlchemy database object that all models in
your app must use. Typical usage::
from nodular import db
from coaster.sqlalchemy import BaseMixin
class MyModel(BaseMixin, db.Model):
pass
To initialize with an app::
from flask import Flask
app = Flask(__name__)
db.init_app(app)
If you have only one app per Python process (which is typical), add
this line to your init sequence::
db.app = app
This makes your app the default app for this database object and removes
the need to use ``app.test_request_context()`` when querying the database
outside a request context.
"""
from flask.ext.sqlalchemy import SQLAlchemy
__all__ = ['db']
db = SQLAlchemy()
# To enable foreign key support in SQLite3
from sqlalchemy import event
from sqlalchemy.engine import Engine
from sqlite3 import Connection as SQLite3Connection
@event.listens_for(Engine, "connect")
def _set_sqlite_pragma(dbapi_connection, connection_record):
if isinstance(dbapi_connection, SQLite3Connection):
cursor = dbapi_connection.cursor()
cursor.execute("PRAGMA foreign_keys=ON;")
cursor.close()
|
# -*- coding: utf-8 -*-
"""
Nodular provides a Flask-SQLAlchemy database object that all models in
your app must use. Typical usage::
from nodular import db
from coaster.sqlalchemy import BaseMixin
class MyModel(BaseMixin, db.Model):
pass
To initialize with an app::
from flask import Flask
app = Flask(__name__)
db.init_app(app)
If you have only one app per Python process (which is typical), add
this line to your init sequence::
db.app = app
This makes your app the default app for this database object and removes
the need to use ``app.test_request_context()`` when querying the database
outside a request context.
"""
from flask.ext.sqlalchemy import SQLAlchemy
__all__ = ['db']
db = SQLAlchemy()
Fix for SQLite3 foreign keys.# -*- coding: utf-8 -*-
"""
Nodular provides a Flask-SQLAlchemy database object that all models in
your app must use. Typical usage::
from nodular import db
from coaster.sqlalchemy import BaseMixin
class MyModel(BaseMixin, db.Model):
pass
To initialize with an app::
from flask import Flask
app = Flask(__name__)
db.init_app(app)
If you have only one app per Python process (which is typical), add
this line to your init sequence::
db.app = app
This makes your app the default app for this database object and removes
the need to use ``app.test_request_context()`` when querying the database
outside a request context.
"""
from flask.ext.sqlalchemy import SQLAlchemy
__all__ = ['db']
db = SQLAlchemy()
# To enable foreign key support in SQLite3
from sqlalchemy import event
from sqlalchemy.engine import Engine
from sqlite3 import Connection as SQLite3Connection
@event.listens_for(Engine, "connect")
def _set_sqlite_pragma(dbapi_connection, connection_record):
if isinstance(dbapi_connection, SQLite3Connection):
cursor = dbapi_connection.cursor()
cursor.execute("PRAGMA foreign_keys=ON;")
cursor.close()
|
<commit_before># -*- coding: utf-8 -*-
"""
Nodular provides a Flask-SQLAlchemy database object that all models in
your app must use. Typical usage::
from nodular import db
from coaster.sqlalchemy import BaseMixin
class MyModel(BaseMixin, db.Model):
pass
To initialize with an app::
from flask import Flask
app = Flask(__name__)
db.init_app(app)
If you have only one app per Python process (which is typical), add
this line to your init sequence::
db.app = app
This makes your app the default app for this database object and removes
the need to use ``app.test_request_context()`` when querying the database
outside a request context.
"""
from flask.ext.sqlalchemy import SQLAlchemy
__all__ = ['db']
db = SQLAlchemy()
<commit_msg>Fix for SQLite3 foreign keys.<commit_after># -*- coding: utf-8 -*-
"""
Nodular provides a Flask-SQLAlchemy database object that all models in
your app must use. Typical usage::
from nodular import db
from coaster.sqlalchemy import BaseMixin
class MyModel(BaseMixin, db.Model):
pass
To initialize with an app::
from flask import Flask
app = Flask(__name__)
db.init_app(app)
If you have only one app per Python process (which is typical), add
this line to your init sequence::
db.app = app
This makes your app the default app for this database object and removes
the need to use ``app.test_request_context()`` when querying the database
outside a request context.
"""
from flask.ext.sqlalchemy import SQLAlchemy
__all__ = ['db']
db = SQLAlchemy()
# To enable foreign key support in SQLite3
from sqlalchemy import event
from sqlalchemy.engine import Engine
from sqlite3 import Connection as SQLite3Connection
@event.listens_for(Engine, "connect")
def _set_sqlite_pragma(dbapi_connection, connection_record):
if isinstance(dbapi_connection, SQLite3Connection):
cursor = dbapi_connection.cursor()
cursor.execute("PRAGMA foreign_keys=ON;")
cursor.close()
|
09309cbfb321dbf8d5e5c4e4754259b2cb1619ac
|
startup.py
|
startup.py
|
import tables
import tables as tb
import fipy
import fipy as fp
import numpy
import numpy as np
import scipy
import scipy as sp
import pylab
import pylab as pl
|
import tables
import tables as tb
import fipy
import fipy as fp
import numpy
import numpy as np
import scipy
import scipy as spy
import matplotlib.pylot as plt
|
Use canonical matplotlib.pyplot as plt.
|
Use canonical matplotlib.pyplot as plt.
|
Python
|
mit
|
wd15/env,wd15/env,wd15/env
|
import tables
import tables as tb
import fipy
import fipy as fp
import numpy
import numpy as np
import scipy
import scipy as sp
import pylab
import pylab as pl
Use canonical matplotlib.pyplot as plt.
|
import tables
import tables as tb
import fipy
import fipy as fp
import numpy
import numpy as np
import scipy
import scipy as spy
import matplotlib.pylot as plt
|
<commit_before>import tables
import tables as tb
import fipy
import fipy as fp
import numpy
import numpy as np
import scipy
import scipy as sp
import pylab
import pylab as pl
<commit_msg>Use canonical matplotlib.pyplot as plt.<commit_after>
|
import tables
import tables as tb
import fipy
import fipy as fp
import numpy
import numpy as np
import scipy
import scipy as spy
import matplotlib.pylot as plt
|
import tables
import tables as tb
import fipy
import fipy as fp
import numpy
import numpy as np
import scipy
import scipy as sp
import pylab
import pylab as pl
Use canonical matplotlib.pyplot as plt.import tables
import tables as tb
import fipy
import fipy as fp
import numpy
import numpy as np
import scipy
import scipy as spy
import matplotlib.pylot as plt
|
<commit_before>import tables
import tables as tb
import fipy
import fipy as fp
import numpy
import numpy as np
import scipy
import scipy as sp
import pylab
import pylab as pl
<commit_msg>Use canonical matplotlib.pyplot as plt.<commit_after>import tables
import tables as tb
import fipy
import fipy as fp
import numpy
import numpy as np
import scipy
import scipy as spy
import matplotlib.pylot as plt
|
c5b1908e0351ef558d127802dd1f114ee0e42f77
|
dist/docker/redhat/docker-entrypoint.py
|
dist/docker/redhat/docker-entrypoint.py
|
#!/usr/bin/env python3
import os
import scyllasetup
import logging
import commandlineparser
logging.basicConfig(filename="/var/log/scylla/docker-entrypoint.log", level=logging.DEBUG, format="%(message)s")
try:
arguments = commandlineparser.parse()
setup = scyllasetup.ScyllaSetup(arguments)
setup.developerMode()
setup.cpuSet()
setup.io()
setup.scyllaYAML()
setup.cqlshrc()
setup.arguments()
os.system("/usr/bin/supervisord -c /etc/supervisord.conf")
except:
logging.exception('failed!')
|
#!/usr/bin/env python3
import os
import sys
import scyllasetup
import logging
import commandlineparser
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG, format="%(message)s")
try:
arguments = commandlineparser.parse()
setup = scyllasetup.ScyllaSetup(arguments)
setup.developerMode()
setup.cpuSet()
setup.io()
setup.scyllaYAML()
setup.cqlshrc()
setup.arguments()
os.system("/usr/bin/supervisord -c /etc/supervisord.conf")
except:
logging.exception('failed!')
|
Use stdout as logging output
|
dist/docker: Use stdout as logging output
If early startup fails in docker-entrypoint.py, the container does not
start. It's therefore not very helpful to log to a file _within_ the
container...
Message-Id: <f797be0a54dde0f854601eff980092b84c77ada6@scylladb.com>
|
Python
|
agpl-3.0
|
avikivity/scylla,avikivity/scylla,scylladb/scylla,duarten/scylla,scylladb/scylla,duarten/scylla,duarten/scylla,scylladb/scylla,avikivity/scylla,scylladb/scylla
|
#!/usr/bin/env python3
import os
import scyllasetup
import logging
import commandlineparser
logging.basicConfig(filename="/var/log/scylla/docker-entrypoint.log", level=logging.DEBUG, format="%(message)s")
try:
arguments = commandlineparser.parse()
setup = scyllasetup.ScyllaSetup(arguments)
setup.developerMode()
setup.cpuSet()
setup.io()
setup.scyllaYAML()
setup.cqlshrc()
setup.arguments()
os.system("/usr/bin/supervisord -c /etc/supervisord.conf")
except:
logging.exception('failed!')
dist/docker: Use stdout as logging output
If early startup fails in docker-entrypoint.py, the container does not
start. It's therefore not very helpful to log to a file _within_ the
container...
Message-Id: <f797be0a54dde0f854601eff980092b84c77ada6@scylladb.com>
|
#!/usr/bin/env python3
import os
import sys
import scyllasetup
import logging
import commandlineparser
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG, format="%(message)s")
try:
arguments = commandlineparser.parse()
setup = scyllasetup.ScyllaSetup(arguments)
setup.developerMode()
setup.cpuSet()
setup.io()
setup.scyllaYAML()
setup.cqlshrc()
setup.arguments()
os.system("/usr/bin/supervisord -c /etc/supervisord.conf")
except:
logging.exception('failed!')
|
<commit_before>#!/usr/bin/env python3
import os
import scyllasetup
import logging
import commandlineparser
logging.basicConfig(filename="/var/log/scylla/docker-entrypoint.log", level=logging.DEBUG, format="%(message)s")
try:
arguments = commandlineparser.parse()
setup = scyllasetup.ScyllaSetup(arguments)
setup.developerMode()
setup.cpuSet()
setup.io()
setup.scyllaYAML()
setup.cqlshrc()
setup.arguments()
os.system("/usr/bin/supervisord -c /etc/supervisord.conf")
except:
logging.exception('failed!')
<commit_msg>dist/docker: Use stdout as logging output
If early startup fails in docker-entrypoint.py, the container does not
start. It's therefore not very helpful to log to a file _within_ the
container...
Message-Id: <f797be0a54dde0f854601eff980092b84c77ada6@scylladb.com><commit_after>
|
#!/usr/bin/env python3
import os
import sys
import scyllasetup
import logging
import commandlineparser
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG, format="%(message)s")
try:
arguments = commandlineparser.parse()
setup = scyllasetup.ScyllaSetup(arguments)
setup.developerMode()
setup.cpuSet()
setup.io()
setup.scyllaYAML()
setup.cqlshrc()
setup.arguments()
os.system("/usr/bin/supervisord -c /etc/supervisord.conf")
except:
logging.exception('failed!')
|
#!/usr/bin/env python3
import os
import scyllasetup
import logging
import commandlineparser
logging.basicConfig(filename="/var/log/scylla/docker-entrypoint.log", level=logging.DEBUG, format="%(message)s")
try:
arguments = commandlineparser.parse()
setup = scyllasetup.ScyllaSetup(arguments)
setup.developerMode()
setup.cpuSet()
setup.io()
setup.scyllaYAML()
setup.cqlshrc()
setup.arguments()
os.system("/usr/bin/supervisord -c /etc/supervisord.conf")
except:
logging.exception('failed!')
dist/docker: Use stdout as logging output
If early startup fails in docker-entrypoint.py, the container does not
start. It's therefore not very helpful to log to a file _within_ the
container...
Message-Id: <f797be0a54dde0f854601eff980092b84c77ada6@scylladb.com>#!/usr/bin/env python3
import os
import sys
import scyllasetup
import logging
import commandlineparser
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG, format="%(message)s")
try:
arguments = commandlineparser.parse()
setup = scyllasetup.ScyllaSetup(arguments)
setup.developerMode()
setup.cpuSet()
setup.io()
setup.scyllaYAML()
setup.cqlshrc()
setup.arguments()
os.system("/usr/bin/supervisord -c /etc/supervisord.conf")
except:
logging.exception('failed!')
|
<commit_before>#!/usr/bin/env python3
import os
import scyllasetup
import logging
import commandlineparser
logging.basicConfig(filename="/var/log/scylla/docker-entrypoint.log", level=logging.DEBUG, format="%(message)s")
try:
arguments = commandlineparser.parse()
setup = scyllasetup.ScyllaSetup(arguments)
setup.developerMode()
setup.cpuSet()
setup.io()
setup.scyllaYAML()
setup.cqlshrc()
setup.arguments()
os.system("/usr/bin/supervisord -c /etc/supervisord.conf")
except:
logging.exception('failed!')
<commit_msg>dist/docker: Use stdout as logging output
If early startup fails in docker-entrypoint.py, the container does not
start. It's therefore not very helpful to log to a file _within_ the
container...
Message-Id: <f797be0a54dde0f854601eff980092b84c77ada6@scylladb.com><commit_after>#!/usr/bin/env python3
import os
import sys
import scyllasetup
import logging
import commandlineparser
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG, format="%(message)s")
try:
arguments = commandlineparser.parse()
setup = scyllasetup.ScyllaSetup(arguments)
setup.developerMode()
setup.cpuSet()
setup.io()
setup.scyllaYAML()
setup.cqlshrc()
setup.arguments()
os.system("/usr/bin/supervisord -c /etc/supervisord.conf")
except:
logging.exception('failed!')
|
f2cbd4493cdfaed87f45cffdbc7fe471a4f6e06c
|
tools/perf/page_sets/oopif_basic_page_set.py
|
tools/perf/page_sets/oopif_basic_page_set.py
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page
from telemetry import story
class OopifBasicPageSet(story.StorySet):
""" Basic set of pages used to measure performance of out-of-process
iframes.
"""
def __init__(self):
super(OopifBasicPageSet, self).__init__(
archive_data_file='data/oopif_basic.json',
cloud_storage_bucket=story.PARTNER_BUCKET)
urls = [
'http://www.cnn.com',
'http://www.ebay.com',
'http://booking.com',
'http://www.rei.com/',
'http://www.fifa.com/',
# Disabled because it is flaky on Windows and Android
#'http://arstechnica.com/',
'http://www.nationalgeographic.com/',
# Cross-site heavy! Enable them once they render without crashing.
#'http://www.nba.com/',
#'http://www.phonearena.com/',
#'http://slickdeals.net/',
#'http://www.163.com/',
]
for url in urls:
self.AddStory(page.Page(url, self))
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page
from telemetry import story
class OopifBasicPageSet(story.StorySet):
""" Basic set of pages used to measure performance of out-of-process
iframes.
"""
def __init__(self):
super(OopifBasicPageSet, self).__init__(
archive_data_file='data/oopif_basic.json',
cloud_storage_bucket=story.PARTNER_BUCKET)
urls = [
'http://www.cnn.com',
'http://www.ebay.com',
'http://booking.com',
# Disabled because it causes flaky runs https://crbug.com/522870
#'http://www.rei.com/',
'http://www.fifa.com/',
# Disabled because it is flaky on Windows and Android
#'http://arstechnica.com/',
'http://www.nationalgeographic.com/',
# Cross-site heavy! Enable them once they render without crashing.
#'http://www.nba.com/',
#'http://www.phonearena.com/',
#'http://slickdeals.net/',
#'http://www.163.com/',
]
for url in urls:
self.AddStory(page.Page(url, self))
|
Disable rei.com on oopif benchmarks.
|
Disable rei.com on oopif benchmarks.
CQ_EXTRA_TRYBOTS=tryserver.chromium.perf:linux_perf_bisect
BUG=522870
Review URL: https://codereview.chromium.org/1334733002
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#348251}
|
Python
|
bsd-3-clause
|
ltilve/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page
from telemetry import story
class OopifBasicPageSet(story.StorySet):
""" Basic set of pages used to measure performance of out-of-process
iframes.
"""
def __init__(self):
super(OopifBasicPageSet, self).__init__(
archive_data_file='data/oopif_basic.json',
cloud_storage_bucket=story.PARTNER_BUCKET)
urls = [
'http://www.cnn.com',
'http://www.ebay.com',
'http://booking.com',
'http://www.rei.com/',
'http://www.fifa.com/',
# Disabled because it is flaky on Windows and Android
#'http://arstechnica.com/',
'http://www.nationalgeographic.com/',
# Cross-site heavy! Enable them once they render without crashing.
#'http://www.nba.com/',
#'http://www.phonearena.com/',
#'http://slickdeals.net/',
#'http://www.163.com/',
]
for url in urls:
self.AddStory(page.Page(url, self))
Disable rei.com on oopif benchmarks.
CQ_EXTRA_TRYBOTS=tryserver.chromium.perf:linux_perf_bisect
BUG=522870
Review URL: https://codereview.chromium.org/1334733002
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#348251}
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page
from telemetry import story
class OopifBasicPageSet(story.StorySet):
""" Basic set of pages used to measure performance of out-of-process
iframes.
"""
def __init__(self):
super(OopifBasicPageSet, self).__init__(
archive_data_file='data/oopif_basic.json',
cloud_storage_bucket=story.PARTNER_BUCKET)
urls = [
'http://www.cnn.com',
'http://www.ebay.com',
'http://booking.com',
# Disabled because it causes flaky runs https://crbug.com/522870
#'http://www.rei.com/',
'http://www.fifa.com/',
# Disabled because it is flaky on Windows and Android
#'http://arstechnica.com/',
'http://www.nationalgeographic.com/',
# Cross-site heavy! Enable them once they render without crashing.
#'http://www.nba.com/',
#'http://www.phonearena.com/',
#'http://slickdeals.net/',
#'http://www.163.com/',
]
for url in urls:
self.AddStory(page.Page(url, self))
|
<commit_before># Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page
from telemetry import story
class OopifBasicPageSet(story.StorySet):
""" Basic set of pages used to measure performance of out-of-process
iframes.
"""
def __init__(self):
super(OopifBasicPageSet, self).__init__(
archive_data_file='data/oopif_basic.json',
cloud_storage_bucket=story.PARTNER_BUCKET)
urls = [
'http://www.cnn.com',
'http://www.ebay.com',
'http://booking.com',
'http://www.rei.com/',
'http://www.fifa.com/',
# Disabled because it is flaky on Windows and Android
#'http://arstechnica.com/',
'http://www.nationalgeographic.com/',
# Cross-site heavy! Enable them once they render without crashing.
#'http://www.nba.com/',
#'http://www.phonearena.com/',
#'http://slickdeals.net/',
#'http://www.163.com/',
]
for url in urls:
self.AddStory(page.Page(url, self))
<commit_msg>Disable rei.com on oopif benchmarks.
CQ_EXTRA_TRYBOTS=tryserver.chromium.perf:linux_perf_bisect
BUG=522870
Review URL: https://codereview.chromium.org/1334733002
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#348251}<commit_after>
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page
from telemetry import story
class OopifBasicPageSet(story.StorySet):
""" Basic set of pages used to measure performance of out-of-process
iframes.
"""
def __init__(self):
super(OopifBasicPageSet, self).__init__(
archive_data_file='data/oopif_basic.json',
cloud_storage_bucket=story.PARTNER_BUCKET)
urls = [
'http://www.cnn.com',
'http://www.ebay.com',
'http://booking.com',
# Disabled because it causes flaky runs https://crbug.com/522870
#'http://www.rei.com/',
'http://www.fifa.com/',
# Disabled because it is flaky on Windows and Android
#'http://arstechnica.com/',
'http://www.nationalgeographic.com/',
# Cross-site heavy! Enable them once they render without crashing.
#'http://www.nba.com/',
#'http://www.phonearena.com/',
#'http://slickdeals.net/',
#'http://www.163.com/',
]
for url in urls:
self.AddStory(page.Page(url, self))
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page
from telemetry import story
class OopifBasicPageSet(story.StorySet):
""" Basic set of pages used to measure performance of out-of-process
iframes.
"""
def __init__(self):
super(OopifBasicPageSet, self).__init__(
archive_data_file='data/oopif_basic.json',
cloud_storage_bucket=story.PARTNER_BUCKET)
urls = [
'http://www.cnn.com',
'http://www.ebay.com',
'http://booking.com',
'http://www.rei.com/',
'http://www.fifa.com/',
# Disabled because it is flaky on Windows and Android
#'http://arstechnica.com/',
'http://www.nationalgeographic.com/',
# Cross-site heavy! Enable them once they render without crashing.
#'http://www.nba.com/',
#'http://www.phonearena.com/',
#'http://slickdeals.net/',
#'http://www.163.com/',
]
for url in urls:
self.AddStory(page.Page(url, self))
Disable rei.com on oopif benchmarks.
CQ_EXTRA_TRYBOTS=tryserver.chromium.perf:linux_perf_bisect
BUG=522870
Review URL: https://codereview.chromium.org/1334733002
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#348251}# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page
from telemetry import story
class OopifBasicPageSet(story.StorySet):
""" Basic set of pages used to measure performance of out-of-process
iframes.
"""
def __init__(self):
super(OopifBasicPageSet, self).__init__(
archive_data_file='data/oopif_basic.json',
cloud_storage_bucket=story.PARTNER_BUCKET)
urls = [
'http://www.cnn.com',
'http://www.ebay.com',
'http://booking.com',
# Disabled because it causes flaky runs https://crbug.com/522870
#'http://www.rei.com/',
'http://www.fifa.com/',
# Disabled because it is flaky on Windows and Android
#'http://arstechnica.com/',
'http://www.nationalgeographic.com/',
# Cross-site heavy! Enable them once they render without crashing.
#'http://www.nba.com/',
#'http://www.phonearena.com/',
#'http://slickdeals.net/',
#'http://www.163.com/',
]
for url in urls:
self.AddStory(page.Page(url, self))
|
<commit_before># Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page
from telemetry import story
class OopifBasicPageSet(story.StorySet):
""" Basic set of pages used to measure performance of out-of-process
iframes.
"""
def __init__(self):
super(OopifBasicPageSet, self).__init__(
archive_data_file='data/oopif_basic.json',
cloud_storage_bucket=story.PARTNER_BUCKET)
urls = [
'http://www.cnn.com',
'http://www.ebay.com',
'http://booking.com',
'http://www.rei.com/',
'http://www.fifa.com/',
# Disabled because it is flaky on Windows and Android
#'http://arstechnica.com/',
'http://www.nationalgeographic.com/',
# Cross-site heavy! Enable them once they render without crashing.
#'http://www.nba.com/',
#'http://www.phonearena.com/',
#'http://slickdeals.net/',
#'http://www.163.com/',
]
for url in urls:
self.AddStory(page.Page(url, self))
<commit_msg>Disable rei.com on oopif benchmarks.
CQ_EXTRA_TRYBOTS=tryserver.chromium.perf:linux_perf_bisect
BUG=522870
Review URL: https://codereview.chromium.org/1334733002
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#348251}<commit_after># Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page
from telemetry import story
class OopifBasicPageSet(story.StorySet):
""" Basic set of pages used to measure performance of out-of-process
iframes.
"""
def __init__(self):
super(OopifBasicPageSet, self).__init__(
archive_data_file='data/oopif_basic.json',
cloud_storage_bucket=story.PARTNER_BUCKET)
urls = [
'http://www.cnn.com',
'http://www.ebay.com',
'http://booking.com',
# Disabled because it causes flaky runs https://crbug.com/522870
#'http://www.rei.com/',
'http://www.fifa.com/',
# Disabled because it is flaky on Windows and Android
#'http://arstechnica.com/',
'http://www.nationalgeographic.com/',
# Cross-site heavy! Enable them once they render without crashing.
#'http://www.nba.com/',
#'http://www.phonearena.com/',
#'http://slickdeals.net/',
#'http://www.163.com/',
]
for url in urls:
self.AddStory(page.Page(url, self))
|
b3d066e9ff5bc0508eec4fc9f317b7df112e2218
|
test/test_url_subcommand.py
|
test/test_url_subcommand.py
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import print_function
import path
import pytest
import responses
import simplesqlite
from click.testing import CliRunner
from sqlitebiter._enum import ExitCode
from sqlitebiter.sqlitebiter import cmd
from sqliteschema import SqliteSchemaExtractor
from .common import print_traceback
from .dataset import complex_json
class Test_TableUrlLoader(object):
@responses.activate
def test_normal(self):
url = "https://example.com/complex_jeson.json"
responses.add(
responses.GET,
url,
body=complex_json,
content_type='text/plain; charset=utf-8',
status=200)
runner = CliRunner()
db_path = "test_complex_json.sqlite"
with runner.isolated_filesystem():
result = runner.invoke(cmd, ["url", url, "-o", db_path])
print_traceback(result)
assert result.exit_code == ExitCode.SUCCESS
extractor = SqliteSchemaExtractor(db_path)
con = simplesqlite.SimpleSQLite(db_path, "r")
expected = set([
'ratings', 'screenshots_4', 'screenshots_3', 'screenshots_5', 'screenshots_1',
'screenshots_2', 'tags', 'versions', 'root'])
assert set(con.get_table_name_list()) == expected
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import print_function
import responses
import simplesqlite
from click.testing import CliRunner
from sqlitebiter._enum import ExitCode
from sqlitebiter.sqlitebiter import cmd
from .common import print_traceback
from .dataset import complex_json
class Test_TableUrlLoader(object):
@responses.activate
def test_normal(self):
url = "https://example.com/complex_jeson.json"
responses.add(
responses.GET,
url,
body=complex_json,
content_type='text/plain; charset=utf-8',
status=200)
runner = CliRunner()
db_path = "test_complex_json.sqlite"
with runner.isolated_filesystem():
result = runner.invoke(cmd, ["url", url, "-o", db_path])
print_traceback(result)
assert result.exit_code == ExitCode.SUCCESS
extractor = SqliteSchemaExtractor(db_path)
con = simplesqlite.SimpleSQLite(db_path, "r")
expected = set([
'ratings', 'screenshots_4', 'screenshots_3', 'screenshots_5', 'screenshots_1',
'screenshots_2', 'tags', 'versions', 'root'])
assert set(con.get_table_name_list()) == expected
|
Remove imports that no longer used
|
Remove imports that no longer used
|
Python
|
mit
|
thombashi/sqlitebiter,thombashi/sqlitebiter
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import print_function
import path
import pytest
import responses
import simplesqlite
from click.testing import CliRunner
from sqlitebiter._enum import ExitCode
from sqlitebiter.sqlitebiter import cmd
from sqliteschema import SqliteSchemaExtractor
from .common import print_traceback
from .dataset import complex_json
class Test_TableUrlLoader(object):
@responses.activate
def test_normal(self):
url = "https://example.com/complex_jeson.json"
responses.add(
responses.GET,
url,
body=complex_json,
content_type='text/plain; charset=utf-8',
status=200)
runner = CliRunner()
db_path = "test_complex_json.sqlite"
with runner.isolated_filesystem():
result = runner.invoke(cmd, ["url", url, "-o", db_path])
print_traceback(result)
assert result.exit_code == ExitCode.SUCCESS
extractor = SqliteSchemaExtractor(db_path)
con = simplesqlite.SimpleSQLite(db_path, "r")
expected = set([
'ratings', 'screenshots_4', 'screenshots_3', 'screenshots_5', 'screenshots_1',
'screenshots_2', 'tags', 'versions', 'root'])
assert set(con.get_table_name_list()) == expected
Remove imports that no longer used
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import print_function
import responses
import simplesqlite
from click.testing import CliRunner
from sqlitebiter._enum import ExitCode
from sqlitebiter.sqlitebiter import cmd
from .common import print_traceback
from .dataset import complex_json
class Test_TableUrlLoader(object):
@responses.activate
def test_normal(self):
url = "https://example.com/complex_jeson.json"
responses.add(
responses.GET,
url,
body=complex_json,
content_type='text/plain; charset=utf-8',
status=200)
runner = CliRunner()
db_path = "test_complex_json.sqlite"
with runner.isolated_filesystem():
result = runner.invoke(cmd, ["url", url, "-o", db_path])
print_traceback(result)
assert result.exit_code == ExitCode.SUCCESS
extractor = SqliteSchemaExtractor(db_path)
con = simplesqlite.SimpleSQLite(db_path, "r")
expected = set([
'ratings', 'screenshots_4', 'screenshots_3', 'screenshots_5', 'screenshots_1',
'screenshots_2', 'tags', 'versions', 'root'])
assert set(con.get_table_name_list()) == expected
|
<commit_before># encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import print_function
import path
import pytest
import responses
import simplesqlite
from click.testing import CliRunner
from sqlitebiter._enum import ExitCode
from sqlitebiter.sqlitebiter import cmd
from sqliteschema import SqliteSchemaExtractor
from .common import print_traceback
from .dataset import complex_json
class Test_TableUrlLoader(object):
@responses.activate
def test_normal(self):
url = "https://example.com/complex_jeson.json"
responses.add(
responses.GET,
url,
body=complex_json,
content_type='text/plain; charset=utf-8',
status=200)
runner = CliRunner()
db_path = "test_complex_json.sqlite"
with runner.isolated_filesystem():
result = runner.invoke(cmd, ["url", url, "-o", db_path])
print_traceback(result)
assert result.exit_code == ExitCode.SUCCESS
extractor = SqliteSchemaExtractor(db_path)
con = simplesqlite.SimpleSQLite(db_path, "r")
expected = set([
'ratings', 'screenshots_4', 'screenshots_3', 'screenshots_5', 'screenshots_1',
'screenshots_2', 'tags', 'versions', 'root'])
assert set(con.get_table_name_list()) == expected
<commit_msg>Remove imports that no longer used<commit_after>
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import print_function
import responses
import simplesqlite
from click.testing import CliRunner
from sqlitebiter._enum import ExitCode
from sqlitebiter.sqlitebiter import cmd
from .common import print_traceback
from .dataset import complex_json
class Test_TableUrlLoader(object):
@responses.activate
def test_normal(self):
url = "https://example.com/complex_jeson.json"
responses.add(
responses.GET,
url,
body=complex_json,
content_type='text/plain; charset=utf-8',
status=200)
runner = CliRunner()
db_path = "test_complex_json.sqlite"
with runner.isolated_filesystem():
result = runner.invoke(cmd, ["url", url, "-o", db_path])
print_traceback(result)
assert result.exit_code == ExitCode.SUCCESS
extractor = SqliteSchemaExtractor(db_path)
con = simplesqlite.SimpleSQLite(db_path, "r")
expected = set([
'ratings', 'screenshots_4', 'screenshots_3', 'screenshots_5', 'screenshots_1',
'screenshots_2', 'tags', 'versions', 'root'])
assert set(con.get_table_name_list()) == expected
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import print_function
import path
import pytest
import responses
import simplesqlite
from click.testing import CliRunner
from sqlitebiter._enum import ExitCode
from sqlitebiter.sqlitebiter import cmd
from sqliteschema import SqliteSchemaExtractor
from .common import print_traceback
from .dataset import complex_json
class Test_TableUrlLoader(object):
@responses.activate
def test_normal(self):
url = "https://example.com/complex_jeson.json"
responses.add(
responses.GET,
url,
body=complex_json,
content_type='text/plain; charset=utf-8',
status=200)
runner = CliRunner()
db_path = "test_complex_json.sqlite"
with runner.isolated_filesystem():
result = runner.invoke(cmd, ["url", url, "-o", db_path])
print_traceback(result)
assert result.exit_code == ExitCode.SUCCESS
extractor = SqliteSchemaExtractor(db_path)
con = simplesqlite.SimpleSQLite(db_path, "r")
expected = set([
'ratings', 'screenshots_4', 'screenshots_3', 'screenshots_5', 'screenshots_1',
'screenshots_2', 'tags', 'versions', 'root'])
assert set(con.get_table_name_list()) == expected
Remove imports that no longer used# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import print_function
import responses
import simplesqlite
from click.testing import CliRunner
from sqlitebiter._enum import ExitCode
from sqlitebiter.sqlitebiter import cmd
from .common import print_traceback
from .dataset import complex_json
class Test_TableUrlLoader(object):
@responses.activate
def test_normal(self):
url = "https://example.com/complex_jeson.json"
responses.add(
responses.GET,
url,
body=complex_json,
content_type='text/plain; charset=utf-8',
status=200)
runner = CliRunner()
db_path = "test_complex_json.sqlite"
with runner.isolated_filesystem():
result = runner.invoke(cmd, ["url", url, "-o", db_path])
print_traceback(result)
assert result.exit_code == ExitCode.SUCCESS
extractor = SqliteSchemaExtractor(db_path)
con = simplesqlite.SimpleSQLite(db_path, "r")
expected = set([
'ratings', 'screenshots_4', 'screenshots_3', 'screenshots_5', 'screenshots_1',
'screenshots_2', 'tags', 'versions', 'root'])
assert set(con.get_table_name_list()) == expected
|
<commit_before># encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import print_function
import path
import pytest
import responses
import simplesqlite
from click.testing import CliRunner
from sqlitebiter._enum import ExitCode
from sqlitebiter.sqlitebiter import cmd
from sqliteschema import SqliteSchemaExtractor
from .common import print_traceback
from .dataset import complex_json
class Test_TableUrlLoader(object):
@responses.activate
def test_normal(self):
url = "https://example.com/complex_jeson.json"
responses.add(
responses.GET,
url,
body=complex_json,
content_type='text/plain; charset=utf-8',
status=200)
runner = CliRunner()
db_path = "test_complex_json.sqlite"
with runner.isolated_filesystem():
result = runner.invoke(cmd, ["url", url, "-o", db_path])
print_traceback(result)
assert result.exit_code == ExitCode.SUCCESS
extractor = SqliteSchemaExtractor(db_path)
con = simplesqlite.SimpleSQLite(db_path, "r")
expected = set([
'ratings', 'screenshots_4', 'screenshots_3', 'screenshots_5', 'screenshots_1',
'screenshots_2', 'tags', 'versions', 'root'])
assert set(con.get_table_name_list()) == expected
<commit_msg>Remove imports that no longer used<commit_after># encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import print_function
import responses
import simplesqlite
from click.testing import CliRunner
from sqlitebiter._enum import ExitCode
from sqlitebiter.sqlitebiter import cmd
from .common import print_traceback
from .dataset import complex_json
class Test_TableUrlLoader(object):
@responses.activate
def test_normal(self):
url = "https://example.com/complex_jeson.json"
responses.add(
responses.GET,
url,
body=complex_json,
content_type='text/plain; charset=utf-8',
status=200)
runner = CliRunner()
db_path = "test_complex_json.sqlite"
with runner.isolated_filesystem():
result = runner.invoke(cmd, ["url", url, "-o", db_path])
print_traceback(result)
assert result.exit_code == ExitCode.SUCCESS
extractor = SqliteSchemaExtractor(db_path)
con = simplesqlite.SimpleSQLite(db_path, "r")
expected = set([
'ratings', 'screenshots_4', 'screenshots_3', 'screenshots_5', 'screenshots_1',
'screenshots_2', 'tags', 'versions', 'root'])
assert set(con.get_table_name_list()) == expected
|
0c7c19a9edffea9474b0aa6379bafef283425483
|
reboot_router_claro3G.py
|
reboot_router_claro3G.py
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import urllib2 as http
# URL with GET to reboot router
url_get_reboot = 'http://10.11.12.254/log/in?un=admin&pw=admin12&rd=%2Fuir%2Frebo.htm?rc=&Nrd=0&Nsm=1'
# Handling HTTP Cookie - Session Cookie Router
cookieprocessor = http.HTTPCookieProcessor()
# Customize it Opener with CookieProcessor
opener = http.build_opener(cookieprocessor)
# Using here Opener + CookieProcessor
http.install_opener(opener)
# Open URL with Opener above
payload_router = http.urlopen(url_get_reboot)
# Print payload Request URL
print "Payload %s" % payload_router.read()
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import urllib2 as http
# URL with GET to reboot router or status main page to tests
#url_get_reboot = 'http://10.11.12.254/log/in?un=admin&pw=admin12&rd=%2Fuir%2Frebo.htm?rc=&Nrd=0&Nsm=1'
url_get_status = 'http://10.11.12.254/log/in?un=admin&pw=admin12&rd=%2Fuir%2Fstatus.htm&rd2=%2Fuir%2Fwanst.htm&Nrd=1'
url_root = url_get_status
# Handling HTTP Cookie - Session Cookie Router
cookieprocessor = http.HTTPCookieProcessor()
# Customize it Opener with CookieProcessor
opener = http.build_opener(cookieprocessor)
# Using here Opener + CookieProcessor
http.install_opener(opener)
# Open URL with Opener above
payload_router = http.urlopen(url_root)
# Print payload Request URL
print "Payload %s" % payload_router.read()
|
Add variavel url_root to url of the reboot router and status mainpage router
|
Add variavel url_root to url of the reboot router and status mainpage router
|
Python
|
apache-2.0
|
cleitonbueno/reboot_router
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import urllib2 as http
# URL with GET to reboot router
url_get_reboot = 'http://10.11.12.254/log/in?un=admin&pw=admin12&rd=%2Fuir%2Frebo.htm?rc=&Nrd=0&Nsm=1'
# Handling HTTP Cookie - Session Cookie Router
cookieprocessor = http.HTTPCookieProcessor()
# Customize it Opener with CookieProcessor
opener = http.build_opener(cookieprocessor)
# Using here Opener + CookieProcessor
http.install_opener(opener)
# Open URL with Opener above
payload_router = http.urlopen(url_get_reboot)
# Print payload Request URL
print "Payload %s" % payload_router.read()
Add variavel url_root to url of the reboot router and status mainpage router
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import urllib2 as http
# URL with GET to reboot router or status main page to tests
#url_get_reboot = 'http://10.11.12.254/log/in?un=admin&pw=admin12&rd=%2Fuir%2Frebo.htm?rc=&Nrd=0&Nsm=1'
url_get_status = 'http://10.11.12.254/log/in?un=admin&pw=admin12&rd=%2Fuir%2Fstatus.htm&rd2=%2Fuir%2Fwanst.htm&Nrd=1'
url_root = url_get_status
# Handling HTTP Cookie - Session Cookie Router
cookieprocessor = http.HTTPCookieProcessor()
# Customize it Opener with CookieProcessor
opener = http.build_opener(cookieprocessor)
# Using here Opener + CookieProcessor
http.install_opener(opener)
# Open URL with Opener above
payload_router = http.urlopen(url_root)
# Print payload Request URL
print "Payload %s" % payload_router.read()
|
<commit_before>#! /usr/bin/env python
# -*- coding: utf-8 -*-
import urllib2 as http
# URL with GET to reboot router
url_get_reboot = 'http://10.11.12.254/log/in?un=admin&pw=admin12&rd=%2Fuir%2Frebo.htm?rc=&Nrd=0&Nsm=1'
# Handling HTTP Cookie - Session Cookie Router
cookieprocessor = http.HTTPCookieProcessor()
# Customize it Opener with CookieProcessor
opener = http.build_opener(cookieprocessor)
# Using here Opener + CookieProcessor
http.install_opener(opener)
# Open URL with Opener above
payload_router = http.urlopen(url_get_reboot)
# Print payload Request URL
print "Payload %s" % payload_router.read()
<commit_msg>Add variavel url_root to url of the reboot router and status mainpage router<commit_after>
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import urllib2 as http
# URL with GET to reboot router or status main page to tests
#url_get_reboot = 'http://10.11.12.254/log/in?un=admin&pw=admin12&rd=%2Fuir%2Frebo.htm?rc=&Nrd=0&Nsm=1'
url_get_status = 'http://10.11.12.254/log/in?un=admin&pw=admin12&rd=%2Fuir%2Fstatus.htm&rd2=%2Fuir%2Fwanst.htm&Nrd=1'
url_root = url_get_status
# Handling HTTP Cookie - Session Cookie Router
cookieprocessor = http.HTTPCookieProcessor()
# Customize it Opener with CookieProcessor
opener = http.build_opener(cookieprocessor)
# Using here Opener + CookieProcessor
http.install_opener(opener)
# Open URL with Opener above
payload_router = http.urlopen(url_root)
# Print payload Request URL
print "Payload %s" % payload_router.read()
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import urllib2 as http
# URL with GET to reboot router
url_get_reboot = 'http://10.11.12.254/log/in?un=admin&pw=admin12&rd=%2Fuir%2Frebo.htm?rc=&Nrd=0&Nsm=1'
# Handling HTTP Cookie - Session Cookie Router
cookieprocessor = http.HTTPCookieProcessor()
# Customize it Opener with CookieProcessor
opener = http.build_opener(cookieprocessor)
# Using here Opener + CookieProcessor
http.install_opener(opener)
# Open URL with Opener above
payload_router = http.urlopen(url_get_reboot)
# Print payload Request URL
print "Payload %s" % payload_router.read()
Add variavel url_root to url of the reboot router and status mainpage router#! /usr/bin/env python
# -*- coding: utf-8 -*-
import urllib2 as http
# URL with GET to reboot router or status main page to tests
#url_get_reboot = 'http://10.11.12.254/log/in?un=admin&pw=admin12&rd=%2Fuir%2Frebo.htm?rc=&Nrd=0&Nsm=1'
url_get_status = 'http://10.11.12.254/log/in?un=admin&pw=admin12&rd=%2Fuir%2Fstatus.htm&rd2=%2Fuir%2Fwanst.htm&Nrd=1'
url_root = url_get_status
# Handling HTTP Cookie - Session Cookie Router
cookieprocessor = http.HTTPCookieProcessor()
# Customize it Opener with CookieProcessor
opener = http.build_opener(cookieprocessor)
# Using here Opener + CookieProcessor
http.install_opener(opener)
# Open URL with Opener above
payload_router = http.urlopen(url_root)
# Print payload Request URL
print "Payload %s" % payload_router.read()
|
<commit_before>#! /usr/bin/env python
# -*- coding: utf-8 -*-
import urllib2 as http
# URL with GET to reboot router
url_get_reboot = 'http://10.11.12.254/log/in?un=admin&pw=admin12&rd=%2Fuir%2Frebo.htm?rc=&Nrd=0&Nsm=1'
# Handling HTTP Cookie - Session Cookie Router
cookieprocessor = http.HTTPCookieProcessor()
# Customize it Opener with CookieProcessor
opener = http.build_opener(cookieprocessor)
# Using here Opener + CookieProcessor
http.install_opener(opener)
# Open URL with Opener above
payload_router = http.urlopen(url_get_reboot)
# Print payload Request URL
print "Payload %s" % payload_router.read()
<commit_msg>Add variavel url_root to url of the reboot router and status mainpage router<commit_after>#! /usr/bin/env python
# -*- coding: utf-8 -*-
import urllib2 as http
# URL with GET to reboot router or status main page to tests
#url_get_reboot = 'http://10.11.12.254/log/in?un=admin&pw=admin12&rd=%2Fuir%2Frebo.htm?rc=&Nrd=0&Nsm=1'
url_get_status = 'http://10.11.12.254/log/in?un=admin&pw=admin12&rd=%2Fuir%2Fstatus.htm&rd2=%2Fuir%2Fwanst.htm&Nrd=1'
url_root = url_get_status
# Handling HTTP Cookie - Session Cookie Router
cookieprocessor = http.HTTPCookieProcessor()
# Customize it Opener with CookieProcessor
opener = http.build_opener(cookieprocessor)
# Using here Opener + CookieProcessor
http.install_opener(opener)
# Open URL with Opener above
payload_router = http.urlopen(url_root)
# Print payload Request URL
print "Payload %s" % payload_router.read()
|
287a13d30ab70dd6dd9ee7e59021d309d10839bf
|
examples/tox21/tox21_tensorgraph_graph_conv.py
|
examples/tox21/tox21_tensorgraph_graph_conv.py
|
"""
Script that trains graph-conv models on Tox21 dataset.
"""
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
np.random.seed(123)
import tensorflow as tf
tf.set_random_seed(123)
import deepchem as dc
from deepchem.molnet import load_tox21
from deepchem.models.tensorgraph.models.graph_models import GraphConvTensorGraph
model_dir = "/tmp/graph_conv"
# Load Tox21 dataset
tox21_tasks, tox21_datasets, transformers = load_tox21(featurizer='GraphConv')
train_dataset, valid_dataset, test_dataset = tox21_datasets
print(train_dataset.data_dir)
print(valid_dataset.data_dir)
# Fit models
metric = dc.metrics.Metric(
dc.metrics.roc_auc_score, np.mean, mode="classification")
# Batch size of models
batch_size = 50
model = GraphConvTensorGraph(
len(tox21_tasks), batch_size=batch_size, mode='classification')
model.fit(train_dataset, nb_epoch=10)
print("Evaluating model")
train_scores = model.evaluate(train_dataset, [metric], transformers)
valid_scores = model.evaluate(valid_dataset, [metric], transformers)
print("Train scores")
print(train_scores)
print("Validation scores")
print(valid_scores)
|
"""
Script that trains graph-conv models on Tox21 dataset.
"""
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
np.random.seed(123)
import tensorflow as tf
tf.set_random_seed(123)
import deepchem as dc
from deepchem.molnet import load_tox21
from deepchem.models.tensorgraph.models.graph_models import GraphConvModel
model_dir = "/tmp/graph_conv"
# Load Tox21 dataset
tox21_tasks, tox21_datasets, transformers = load_tox21(featurizer='GraphConv')
train_dataset, valid_dataset, test_dataset = tox21_datasets
print(train_dataset.data_dir)
print(valid_dataset.data_dir)
# Fit models
metric = dc.metrics.Metric(
dc.metrics.roc_auc_score, np.mean, mode="classification")
# Batch size of models
batch_size = 50
model = GraphConvModel(
len(tox21_tasks), batch_size=batch_size, mode='classification')
model.fit(train_dataset, nb_epoch=10)
print("Evaluating model")
train_scores = model.evaluate(train_dataset, [metric], transformers)
valid_scores = model.evaluate(valid_dataset, [metric], transformers)
print("Train scores")
print(train_scores)
print("Validation scores")
print(valid_scores)
|
Fix GraphConvTensorGraph to GraphConvModel in tox21
|
Fix GraphConvTensorGraph to GraphConvModel in tox21
|
Python
|
mit
|
ktaneishi/deepchem,Agent007/deepchem,miaecle/deepchem,lilleswing/deepchem,ktaneishi/deepchem,Agent007/deepchem,Agent007/deepchem,deepchem/deepchem,peastman/deepchem,miaecle/deepchem,miaecle/deepchem,peastman/deepchem,lilleswing/deepchem,ktaneishi/deepchem,lilleswing/deepchem,deepchem/deepchem
|
"""
Script that trains graph-conv models on Tox21 dataset.
"""
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
np.random.seed(123)
import tensorflow as tf
tf.set_random_seed(123)
import deepchem as dc
from deepchem.molnet import load_tox21
from deepchem.models.tensorgraph.models.graph_models import GraphConvTensorGraph
model_dir = "/tmp/graph_conv"
# Load Tox21 dataset
tox21_tasks, tox21_datasets, transformers = load_tox21(featurizer='GraphConv')
train_dataset, valid_dataset, test_dataset = tox21_datasets
print(train_dataset.data_dir)
print(valid_dataset.data_dir)
# Fit models
metric = dc.metrics.Metric(
dc.metrics.roc_auc_score, np.mean, mode="classification")
# Batch size of models
batch_size = 50
model = GraphConvTensorGraph(
len(tox21_tasks), batch_size=batch_size, mode='classification')
model.fit(train_dataset, nb_epoch=10)
print("Evaluating model")
train_scores = model.evaluate(train_dataset, [metric], transformers)
valid_scores = model.evaluate(valid_dataset, [metric], transformers)
print("Train scores")
print(train_scores)
print("Validation scores")
print(valid_scores)
Fix GraphConvTensorGraph to GraphConvModel in tox21
|
"""
Script that trains graph-conv models on Tox21 dataset.
"""
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
np.random.seed(123)
import tensorflow as tf
tf.set_random_seed(123)
import deepchem as dc
from deepchem.molnet import load_tox21
from deepchem.models.tensorgraph.models.graph_models import GraphConvModel
model_dir = "/tmp/graph_conv"
# Load Tox21 dataset
tox21_tasks, tox21_datasets, transformers = load_tox21(featurizer='GraphConv')
train_dataset, valid_dataset, test_dataset = tox21_datasets
print(train_dataset.data_dir)
print(valid_dataset.data_dir)
# Fit models
metric = dc.metrics.Metric(
dc.metrics.roc_auc_score, np.mean, mode="classification")
# Batch size of models
batch_size = 50
model = GraphConvModel(
len(tox21_tasks), batch_size=batch_size, mode='classification')
model.fit(train_dataset, nb_epoch=10)
print("Evaluating model")
train_scores = model.evaluate(train_dataset, [metric], transformers)
valid_scores = model.evaluate(valid_dataset, [metric], transformers)
print("Train scores")
print(train_scores)
print("Validation scores")
print(valid_scores)
|
<commit_before>"""
Script that trains graph-conv models on Tox21 dataset.
"""
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
np.random.seed(123)
import tensorflow as tf
tf.set_random_seed(123)
import deepchem as dc
from deepchem.molnet import load_tox21
from deepchem.models.tensorgraph.models.graph_models import GraphConvTensorGraph
model_dir = "/tmp/graph_conv"
# Load Tox21 dataset
tox21_tasks, tox21_datasets, transformers = load_tox21(featurizer='GraphConv')
train_dataset, valid_dataset, test_dataset = tox21_datasets
print(train_dataset.data_dir)
print(valid_dataset.data_dir)
# Fit models
metric = dc.metrics.Metric(
dc.metrics.roc_auc_score, np.mean, mode="classification")
# Batch size of models
batch_size = 50
model = GraphConvTensorGraph(
len(tox21_tasks), batch_size=batch_size, mode='classification')
model.fit(train_dataset, nb_epoch=10)
print("Evaluating model")
train_scores = model.evaluate(train_dataset, [metric], transformers)
valid_scores = model.evaluate(valid_dataset, [metric], transformers)
print("Train scores")
print(train_scores)
print("Validation scores")
print(valid_scores)
<commit_msg>Fix GraphConvTensorGraph to GraphConvModel in tox21<commit_after>
|
"""
Script that trains graph-conv models on Tox21 dataset.
"""
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
np.random.seed(123)
import tensorflow as tf
tf.set_random_seed(123)
import deepchem as dc
from deepchem.molnet import load_tox21
from deepchem.models.tensorgraph.models.graph_models import GraphConvModel
model_dir = "/tmp/graph_conv"
# Load Tox21 dataset
tox21_tasks, tox21_datasets, transformers = load_tox21(featurizer='GraphConv')
train_dataset, valid_dataset, test_dataset = tox21_datasets
print(train_dataset.data_dir)
print(valid_dataset.data_dir)
# Fit models
metric = dc.metrics.Metric(
dc.metrics.roc_auc_score, np.mean, mode="classification")
# Batch size of models
batch_size = 50
model = GraphConvModel(
len(tox21_tasks), batch_size=batch_size, mode='classification')
model.fit(train_dataset, nb_epoch=10)
print("Evaluating model")
train_scores = model.evaluate(train_dataset, [metric], transformers)
valid_scores = model.evaluate(valid_dataset, [metric], transformers)
print("Train scores")
print(train_scores)
print("Validation scores")
print(valid_scores)
|
"""
Script that trains graph-conv models on Tox21 dataset.
"""
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
np.random.seed(123)
import tensorflow as tf
tf.set_random_seed(123)
import deepchem as dc
from deepchem.molnet import load_tox21
from deepchem.models.tensorgraph.models.graph_models import GraphConvTensorGraph
model_dir = "/tmp/graph_conv"
# Load Tox21 dataset
tox21_tasks, tox21_datasets, transformers = load_tox21(featurizer='GraphConv')
train_dataset, valid_dataset, test_dataset = tox21_datasets
print(train_dataset.data_dir)
print(valid_dataset.data_dir)
# Fit models
metric = dc.metrics.Metric(
dc.metrics.roc_auc_score, np.mean, mode="classification")
# Batch size of models
batch_size = 50
model = GraphConvTensorGraph(
len(tox21_tasks), batch_size=batch_size, mode='classification')
model.fit(train_dataset, nb_epoch=10)
print("Evaluating model")
train_scores = model.evaluate(train_dataset, [metric], transformers)
valid_scores = model.evaluate(valid_dataset, [metric], transformers)
print("Train scores")
print(train_scores)
print("Validation scores")
print(valid_scores)
Fix GraphConvTensorGraph to GraphConvModel in tox21"""
Script that trains graph-conv models on Tox21 dataset.
"""
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
np.random.seed(123)
import tensorflow as tf
tf.set_random_seed(123)
import deepchem as dc
from deepchem.molnet import load_tox21
from deepchem.models.tensorgraph.models.graph_models import GraphConvModel
model_dir = "/tmp/graph_conv"
# Load Tox21 dataset
tox21_tasks, tox21_datasets, transformers = load_tox21(featurizer='GraphConv')
train_dataset, valid_dataset, test_dataset = tox21_datasets
print(train_dataset.data_dir)
print(valid_dataset.data_dir)
# Fit models
metric = dc.metrics.Metric(
dc.metrics.roc_auc_score, np.mean, mode="classification")
# Batch size of models
batch_size = 50
model = GraphConvModel(
len(tox21_tasks), batch_size=batch_size, mode='classification')
model.fit(train_dataset, nb_epoch=10)
print("Evaluating model")
train_scores = model.evaluate(train_dataset, [metric], transformers)
valid_scores = model.evaluate(valid_dataset, [metric], transformers)
print("Train scores")
print(train_scores)
print("Validation scores")
print(valid_scores)
|
<commit_before>"""
Script that trains graph-conv models on Tox21 dataset.
"""
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
np.random.seed(123)
import tensorflow as tf
tf.set_random_seed(123)
import deepchem as dc
from deepchem.molnet import load_tox21
from deepchem.models.tensorgraph.models.graph_models import GraphConvTensorGraph
model_dir = "/tmp/graph_conv"
# Load Tox21 dataset
tox21_tasks, tox21_datasets, transformers = load_tox21(featurizer='GraphConv')
train_dataset, valid_dataset, test_dataset = tox21_datasets
print(train_dataset.data_dir)
print(valid_dataset.data_dir)
# Fit models
metric = dc.metrics.Metric(
dc.metrics.roc_auc_score, np.mean, mode="classification")
# Batch size of models
batch_size = 50
model = GraphConvTensorGraph(
len(tox21_tasks), batch_size=batch_size, mode='classification')
model.fit(train_dataset, nb_epoch=10)
print("Evaluating model")
train_scores = model.evaluate(train_dataset, [metric], transformers)
valid_scores = model.evaluate(valid_dataset, [metric], transformers)
print("Train scores")
print(train_scores)
print("Validation scores")
print(valid_scores)
<commit_msg>Fix GraphConvTensorGraph to GraphConvModel in tox21<commit_after>"""
Script that trains graph-conv models on Tox21 dataset.
"""
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
np.random.seed(123)
import tensorflow as tf
tf.set_random_seed(123)
import deepchem as dc
from deepchem.molnet import load_tox21
from deepchem.models.tensorgraph.models.graph_models import GraphConvModel
model_dir = "/tmp/graph_conv"
# Load Tox21 dataset
tox21_tasks, tox21_datasets, transformers = load_tox21(featurizer='GraphConv')
train_dataset, valid_dataset, test_dataset = tox21_datasets
print(train_dataset.data_dir)
print(valid_dataset.data_dir)
# Fit models
metric = dc.metrics.Metric(
dc.metrics.roc_auc_score, np.mean, mode="classification")
# Batch size of models
batch_size = 50
model = GraphConvModel(
len(tox21_tasks), batch_size=batch_size, mode='classification')
model.fit(train_dataset, nb_epoch=10)
print("Evaluating model")
train_scores = model.evaluate(train_dataset, [metric], transformers)
valid_scores = model.evaluate(valid_dataset, [metric], transformers)
print("Train scores")
print(train_scores)
print("Validation scores")
print(valid_scores)
|
38254c64bf94f5c1570a129cfe41f94dd88fb780
|
config/regenerate_launch_files.py
|
config/regenerate_launch_files.py
|
#!/usr/bin/env python2
# (C) 2015 Jean Nassar
# Released under BSD
import glob
import os
import subprocess as sp
import rospkg
import tqdm
def get_launch_dir(package):
return os.path.join(rospkg.RosPack().get_path(package), "launch")
def get_file_root(path):
"""
>>> get_file_root("/tmp/test.txt")
'test'
"""
return os.path.split(path[:path.rindex(".")])[1]
def compile_xacro(inpath, outpath, stdout):
sp.call("rosrun xacro xacro {inpath} --inorder -o {outpath}"
.format(inpath=inpath, outpath=outpath).split(),
stdout=stdout)
def main():
launch_dir = get_launch_dir("spirit")
os.chdir(launch_dir)
with open(os.devnull, "w") as DEVNULL:
for path in tqdm.tqdm(glob.glob("xacro/*.xacro"),
desc="Regenerating launch files",
leave=True):
root = get_file_root(path)
compile_xacro(path, os.path.join("launchers", root), DEVNULL)
if __name__ == "__main__":
main()
|
#!/usr/bin/env python2
# (C) 2015 Jean Nassar
# Released under BSD
import glob
import os
import subprocess as sp
import rospkg
import tqdm
def get_launch_dir(package):
return os.path.join(rospkg.RosPack().get_path(package), "launch")
def get_file_root(path):
"""
>>> get_file_root("/tmp/test.txt")
'test'
"""
return os.path.split(path[:path.rindex(".")])[1]
def compile_xacro(inpath, outpath, stdout):
sp.call("rosrun xacro xacro {inpath} --inorder -o {outpath}"
.format(inpath=inpath, outpath=outpath).split(),
stdout=stdout)
def main():
launch_dir = get_launch_dir("spirit")
os.chdir(launch_dir)
with open(os.devnull, "w") as DEVNULL:
for path in tqdm.tqdm(glob.glob("xacro/*.xacro"),
desc="Regenerating launch files",
unit=" files",
leave=True):
root = get_file_root(path)
compile_xacro(path, os.path.join("launchers", root), DEVNULL)
if __name__ == "__main__":
main()
|
Change unit from "it" to "files"
|
Change unit from "it" to "files"
|
Python
|
mit
|
masasin/spirit,masasin/spirit
|
#!/usr/bin/env python2
# (C) 2015 Jean Nassar
# Released under BSD
import glob
import os
import subprocess as sp
import rospkg
import tqdm
def get_launch_dir(package):
return os.path.join(rospkg.RosPack().get_path(package), "launch")
def get_file_root(path):
"""
>>> get_file_root("/tmp/test.txt")
'test'
"""
return os.path.split(path[:path.rindex(".")])[1]
def compile_xacro(inpath, outpath, stdout):
sp.call("rosrun xacro xacro {inpath} --inorder -o {outpath}"
.format(inpath=inpath, outpath=outpath).split(),
stdout=stdout)
def main():
launch_dir = get_launch_dir("spirit")
os.chdir(launch_dir)
with open(os.devnull, "w") as DEVNULL:
for path in tqdm.tqdm(glob.glob("xacro/*.xacro"),
desc="Regenerating launch files",
leave=True):
root = get_file_root(path)
compile_xacro(path, os.path.join("launchers", root), DEVNULL)
if __name__ == "__main__":
main()
Change unit from "it" to "files"
|
#!/usr/bin/env python2
# (C) 2015 Jean Nassar
# Released under BSD
import glob
import os
import subprocess as sp
import rospkg
import tqdm
def get_launch_dir(package):
return os.path.join(rospkg.RosPack().get_path(package), "launch")
def get_file_root(path):
"""
>>> get_file_root("/tmp/test.txt")
'test'
"""
return os.path.split(path[:path.rindex(".")])[1]
def compile_xacro(inpath, outpath, stdout):
sp.call("rosrun xacro xacro {inpath} --inorder -o {outpath}"
.format(inpath=inpath, outpath=outpath).split(),
stdout=stdout)
def main():
launch_dir = get_launch_dir("spirit")
os.chdir(launch_dir)
with open(os.devnull, "w") as DEVNULL:
for path in tqdm.tqdm(glob.glob("xacro/*.xacro"),
desc="Regenerating launch files",
unit=" files",
leave=True):
root = get_file_root(path)
compile_xacro(path, os.path.join("launchers", root), DEVNULL)
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python2
# (C) 2015 Jean Nassar
# Released under BSD
import glob
import os
import subprocess as sp
import rospkg
import tqdm
def get_launch_dir(package):
return os.path.join(rospkg.RosPack().get_path(package), "launch")
def get_file_root(path):
"""
>>> get_file_root("/tmp/test.txt")
'test'
"""
return os.path.split(path[:path.rindex(".")])[1]
def compile_xacro(inpath, outpath, stdout):
sp.call("rosrun xacro xacro {inpath} --inorder -o {outpath}"
.format(inpath=inpath, outpath=outpath).split(),
stdout=stdout)
def main():
launch_dir = get_launch_dir("spirit")
os.chdir(launch_dir)
with open(os.devnull, "w") as DEVNULL:
for path in tqdm.tqdm(glob.glob("xacro/*.xacro"),
desc="Regenerating launch files",
leave=True):
root = get_file_root(path)
compile_xacro(path, os.path.join("launchers", root), DEVNULL)
if __name__ == "__main__":
main()
<commit_msg>Change unit from "it" to "files"<commit_after>
|
#!/usr/bin/env python2
# (C) 2015 Jean Nassar
# Released under BSD
import glob
import os
import subprocess as sp
import rospkg
import tqdm
def get_launch_dir(package):
return os.path.join(rospkg.RosPack().get_path(package), "launch")
def get_file_root(path):
"""
>>> get_file_root("/tmp/test.txt")
'test'
"""
return os.path.split(path[:path.rindex(".")])[1]
def compile_xacro(inpath, outpath, stdout):
sp.call("rosrun xacro xacro {inpath} --inorder -o {outpath}"
.format(inpath=inpath, outpath=outpath).split(),
stdout=stdout)
def main():
launch_dir = get_launch_dir("spirit")
os.chdir(launch_dir)
with open(os.devnull, "w") as DEVNULL:
for path in tqdm.tqdm(glob.glob("xacro/*.xacro"),
desc="Regenerating launch files",
unit=" files",
leave=True):
root = get_file_root(path)
compile_xacro(path, os.path.join("launchers", root), DEVNULL)
if __name__ == "__main__":
main()
|
#!/usr/bin/env python2
# (C) 2015 Jean Nassar
# Released under BSD
import glob
import os
import subprocess as sp
import rospkg
import tqdm
def get_launch_dir(package):
return os.path.join(rospkg.RosPack().get_path(package), "launch")
def get_file_root(path):
"""
>>> get_file_root("/tmp/test.txt")
'test'
"""
return os.path.split(path[:path.rindex(".")])[1]
def compile_xacro(inpath, outpath, stdout):
sp.call("rosrun xacro xacro {inpath} --inorder -o {outpath}"
.format(inpath=inpath, outpath=outpath).split(),
stdout=stdout)
def main():
launch_dir = get_launch_dir("spirit")
os.chdir(launch_dir)
with open(os.devnull, "w") as DEVNULL:
for path in tqdm.tqdm(glob.glob("xacro/*.xacro"),
desc="Regenerating launch files",
leave=True):
root = get_file_root(path)
compile_xacro(path, os.path.join("launchers", root), DEVNULL)
if __name__ == "__main__":
main()
Change unit from "it" to "files"#!/usr/bin/env python2
# (C) 2015 Jean Nassar
# Released under BSD
import glob
import os
import subprocess as sp
import rospkg
import tqdm
def get_launch_dir(package):
return os.path.join(rospkg.RosPack().get_path(package), "launch")
def get_file_root(path):
"""
>>> get_file_root("/tmp/test.txt")
'test'
"""
return os.path.split(path[:path.rindex(".")])[1]
def compile_xacro(inpath, outpath, stdout):
sp.call("rosrun xacro xacro {inpath} --inorder -o {outpath}"
.format(inpath=inpath, outpath=outpath).split(),
stdout=stdout)
def main():
launch_dir = get_launch_dir("spirit")
os.chdir(launch_dir)
with open(os.devnull, "w") as DEVNULL:
for path in tqdm.tqdm(glob.glob("xacro/*.xacro"),
desc="Regenerating launch files",
unit=" files",
leave=True):
root = get_file_root(path)
compile_xacro(path, os.path.join("launchers", root), DEVNULL)
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python2
# (C) 2015 Jean Nassar
# Released under BSD
import glob
import os
import subprocess as sp
import rospkg
import tqdm
def get_launch_dir(package):
return os.path.join(rospkg.RosPack().get_path(package), "launch")
def get_file_root(path):
"""
>>> get_file_root("/tmp/test.txt")
'test'
"""
return os.path.split(path[:path.rindex(".")])[1]
def compile_xacro(inpath, outpath, stdout):
sp.call("rosrun xacro xacro {inpath} --inorder -o {outpath}"
.format(inpath=inpath, outpath=outpath).split(),
stdout=stdout)
def main():
launch_dir = get_launch_dir("spirit")
os.chdir(launch_dir)
with open(os.devnull, "w") as DEVNULL:
for path in tqdm.tqdm(glob.glob("xacro/*.xacro"),
desc="Regenerating launch files",
leave=True):
root = get_file_root(path)
compile_xacro(path, os.path.join("launchers", root), DEVNULL)
if __name__ == "__main__":
main()
<commit_msg>Change unit from "it" to "files"<commit_after>#!/usr/bin/env python2
# (C) 2015 Jean Nassar
# Released under BSD
import glob
import os
import subprocess as sp
import rospkg
import tqdm
def get_launch_dir(package):
return os.path.join(rospkg.RosPack().get_path(package), "launch")
def get_file_root(path):
"""
>>> get_file_root("/tmp/test.txt")
'test'
"""
return os.path.split(path[:path.rindex(".")])[1]
def compile_xacro(inpath, outpath, stdout):
sp.call("rosrun xacro xacro {inpath} --inorder -o {outpath}"
.format(inpath=inpath, outpath=outpath).split(),
stdout=stdout)
def main():
launch_dir = get_launch_dir("spirit")
os.chdir(launch_dir)
with open(os.devnull, "w") as DEVNULL:
for path in tqdm.tqdm(glob.glob("xacro/*.xacro"),
desc="Regenerating launch files",
unit=" files",
leave=True):
root = get_file_root(path)
compile_xacro(path, os.path.join("launchers", root), DEVNULL)
if __name__ == "__main__":
main()
|
179ecf15c678c7d4d5e19cc453c4507481298747
|
contrib/performance/httpclient.py
|
contrib/performance/httpclient.py
|
##
# Copyright (c) 2010 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from zope.interface import implements
from twisted.internet.protocol import Protocol
from twisted.internet.defer import Deferred, succeed
from twisted.web.iweb import IBodyProducer
class _DiscardReader(Protocol):
def __init__(self, finished):
self.finished = finished
def dataReceived(self, bytes):
pass
def connectionLost(self, reason):
self.finished.callback(None)
def readBody(response):
if response.length == 0:
return succeed(None)
finished = Deferred()
response.deliverBody(_DiscardReader(finished))
return finished
class StringProducer(object):
implements(IBodyProducer)
def __init__(self, body):
self._body = body
self.length = len(self._body)
def startProducing(self, consumer):
consumer.write(self._body)
return succeed(None)
|
##
# Copyright (c) 2010 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from StringIO import StringIO
from zope.interface import implements
from twisted.internet.protocol import Protocol
from twisted.internet.defer import Deferred, succeed
from twisted.web.iweb import IBodyProducer
class _BufferReader(Protocol):
def __init__(self, finished):
self.finished = finished
self.received = StringIO()
def dataReceived(self, bytes):
self.received.write(bytes)
def connectionLost(self, reason):
self.finished.callback(self.received.getvalue())
def readBody(response):
if response.length == 0:
return succeed(None)
finished = Deferred()
response.deliverBody(_BufferReader(finished))
return finished
class StringProducer(object):
implements(IBodyProducer)
def __init__(self, body):
self._body = body
self.length = len(self._body)
def startProducing(self, consumer):
consumer.write(self._body)
return succeed(None)
|
Make the response body available via readBody
|
Make the response body available via readBody
git-svn-id: 81e381228600e5752b80483efd2b45b26c451ea2@6660 e27351fd-9f3e-4f54-a53b-843176b1656c
|
Python
|
apache-2.0
|
trevor/calendarserver,trevor/calendarserver,trevor/calendarserver
|
##
# Copyright (c) 2010 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from zope.interface import implements
from twisted.internet.protocol import Protocol
from twisted.internet.defer import Deferred, succeed
from twisted.web.iweb import IBodyProducer
class _DiscardReader(Protocol):
def __init__(self, finished):
self.finished = finished
def dataReceived(self, bytes):
pass
def connectionLost(self, reason):
self.finished.callback(None)
def readBody(response):
if response.length == 0:
return succeed(None)
finished = Deferred()
response.deliverBody(_DiscardReader(finished))
return finished
class StringProducer(object):
implements(IBodyProducer)
def __init__(self, body):
self._body = body
self.length = len(self._body)
def startProducing(self, consumer):
consumer.write(self._body)
return succeed(None)
Make the response body available via readBody
git-svn-id: 81e381228600e5752b80483efd2b45b26c451ea2@6660 e27351fd-9f3e-4f54-a53b-843176b1656c
|
##
# Copyright (c) 2010 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from StringIO import StringIO
from zope.interface import implements
from twisted.internet.protocol import Protocol
from twisted.internet.defer import Deferred, succeed
from twisted.web.iweb import IBodyProducer
class _BufferReader(Protocol):
def __init__(self, finished):
self.finished = finished
self.received = StringIO()
def dataReceived(self, bytes):
self.received.write(bytes)
def connectionLost(self, reason):
self.finished.callback(self.received.getvalue())
def readBody(response):
if response.length == 0:
return succeed(None)
finished = Deferred()
response.deliverBody(_BufferReader(finished))
return finished
class StringProducer(object):
implements(IBodyProducer)
def __init__(self, body):
self._body = body
self.length = len(self._body)
def startProducing(self, consumer):
consumer.write(self._body)
return succeed(None)
|
<commit_before>##
# Copyright (c) 2010 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from zope.interface import implements
from twisted.internet.protocol import Protocol
from twisted.internet.defer import Deferred, succeed
from twisted.web.iweb import IBodyProducer
class _DiscardReader(Protocol):
def __init__(self, finished):
self.finished = finished
def dataReceived(self, bytes):
pass
def connectionLost(self, reason):
self.finished.callback(None)
def readBody(response):
if response.length == 0:
return succeed(None)
finished = Deferred()
response.deliverBody(_DiscardReader(finished))
return finished
class StringProducer(object):
implements(IBodyProducer)
def __init__(self, body):
self._body = body
self.length = len(self._body)
def startProducing(self, consumer):
consumer.write(self._body)
return succeed(None)
<commit_msg>Make the response body available via readBody
git-svn-id: 81e381228600e5752b80483efd2b45b26c451ea2@6660 e27351fd-9f3e-4f54-a53b-843176b1656c<commit_after>
|
##
# Copyright (c) 2010 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from StringIO import StringIO
from zope.interface import implements
from twisted.internet.protocol import Protocol
from twisted.internet.defer import Deferred, succeed
from twisted.web.iweb import IBodyProducer
class _BufferReader(Protocol):
def __init__(self, finished):
self.finished = finished
self.received = StringIO()
def dataReceived(self, bytes):
self.received.write(bytes)
def connectionLost(self, reason):
self.finished.callback(self.received.getvalue())
def readBody(response):
if response.length == 0:
return succeed(None)
finished = Deferred()
response.deliverBody(_BufferReader(finished))
return finished
class StringProducer(object):
implements(IBodyProducer)
def __init__(self, body):
self._body = body
self.length = len(self._body)
def startProducing(self, consumer):
consumer.write(self._body)
return succeed(None)
|
##
# Copyright (c) 2010 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from zope.interface import implements
from twisted.internet.protocol import Protocol
from twisted.internet.defer import Deferred, succeed
from twisted.web.iweb import IBodyProducer
class _DiscardReader(Protocol):
def __init__(self, finished):
self.finished = finished
def dataReceived(self, bytes):
pass
def connectionLost(self, reason):
self.finished.callback(None)
def readBody(response):
if response.length == 0:
return succeed(None)
finished = Deferred()
response.deliverBody(_DiscardReader(finished))
return finished
class StringProducer(object):
implements(IBodyProducer)
def __init__(self, body):
self._body = body
self.length = len(self._body)
def startProducing(self, consumer):
consumer.write(self._body)
return succeed(None)
Make the response body available via readBody
git-svn-id: 81e381228600e5752b80483efd2b45b26c451ea2@6660 e27351fd-9f3e-4f54-a53b-843176b1656c##
# Copyright (c) 2010 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from StringIO import StringIO
from zope.interface import implements
from twisted.internet.protocol import Protocol
from twisted.internet.defer import Deferred, succeed
from twisted.web.iweb import IBodyProducer
class _BufferReader(Protocol):
def __init__(self, finished):
self.finished = finished
self.received = StringIO()
def dataReceived(self, bytes):
self.received.write(bytes)
def connectionLost(self, reason):
self.finished.callback(self.received.getvalue())
def readBody(response):
if response.length == 0:
return succeed(None)
finished = Deferred()
response.deliverBody(_BufferReader(finished))
return finished
class StringProducer(object):
implements(IBodyProducer)
def __init__(self, body):
self._body = body
self.length = len(self._body)
def startProducing(self, consumer):
consumer.write(self._body)
return succeed(None)
|
<commit_before>##
# Copyright (c) 2010 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from zope.interface import implements
from twisted.internet.protocol import Protocol
from twisted.internet.defer import Deferred, succeed
from twisted.web.iweb import IBodyProducer
class _DiscardReader(Protocol):
def __init__(self, finished):
self.finished = finished
def dataReceived(self, bytes):
pass
def connectionLost(self, reason):
self.finished.callback(None)
def readBody(response):
if response.length == 0:
return succeed(None)
finished = Deferred()
response.deliverBody(_DiscardReader(finished))
return finished
class StringProducer(object):
implements(IBodyProducer)
def __init__(self, body):
self._body = body
self.length = len(self._body)
def startProducing(self, consumer):
consumer.write(self._body)
return succeed(None)
<commit_msg>Make the response body available via readBody
git-svn-id: 81e381228600e5752b80483efd2b45b26c451ea2@6660 e27351fd-9f3e-4f54-a53b-843176b1656c<commit_after>##
# Copyright (c) 2010 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from StringIO import StringIO
from zope.interface import implements
from twisted.internet.protocol import Protocol
from twisted.internet.defer import Deferred, succeed
from twisted.web.iweb import IBodyProducer
class _BufferReader(Protocol):
def __init__(self, finished):
self.finished = finished
self.received = StringIO()
def dataReceived(self, bytes):
self.received.write(bytes)
def connectionLost(self, reason):
self.finished.callback(self.received.getvalue())
def readBody(response):
if response.length == 0:
return succeed(None)
finished = Deferred()
response.deliverBody(_BufferReader(finished))
return finished
class StringProducer(object):
implements(IBodyProducer)
def __init__(self, body):
self._body = body
self.length = len(self._body)
def startProducing(self, consumer):
consumer.write(self._body)
return succeed(None)
|
ba60384a1f4232a2d9e1d04fbf95a5841b183d3f
|
lymph/services/scheduler.py
|
lymph/services/scheduler.py
|
import gevent
import msgpack
import redis
import time
from lymph.core.interfaces import Interface
from lymph.core.decorators import rpc
class Scheduler(Interface):
service_type = 'scheduler'
schedule_key = 'schedule'
def __init__(self, *args, **kwargs):
super(Scheduler, self).__init__(*args, **kwargs)
self.redis = redis.StrictRedis()
def on_start(self):
self.container.spawn(self.loop)
@rpc()
def schedule(self, channel, eta, event_type, payload):
self.redis.zadd(self.schedule_key, eta, msgpack.dumps({
'event_type': event_type,
'payload': payload,
}))
channel.ack()
def loop(self):
while True:
pipe = self.redis.pipeline()
now = int(time.time()) # FIXME: handle timezones
pipe.zrangebyscore(self.schedule_key, 0, now)
pipe.zremrangebyscore(self.schedule_key, 0, now)
events, n = pipe.execute()
for event in events:
event = msgpack.loads(event, encoding='utf-8')
self.emit(event['event_type'], event['payload'])
gevent.sleep(1)
|
import gevent
import msgpack
import redis
import time
from lymph.core.interfaces import Interface
from lymph.core.decorators import rpc
from lymph.utils import make_id
class Scheduler(Interface):
service_type = 'scheduler'
schedule_key = 'schedule'
def __init__(self, *args, **kwargs):
super(Scheduler, self).__init__(*args, **kwargs)
self.redis = redis.StrictRedis()
def on_start(self):
self.container.spawn(self.loop)
@rpc()
def schedule(self, channel, eta, event_type, payload):
self.redis.zadd(self.schedule_key, eta, msgpack.dumps({
'id': make_id(),
'event_type': event_type,
'payload': payload,
}))
channel.ack()
def loop(self):
while True:
pipe = self.redis.pipeline()
now = int(time.time())
pipe.zrangebyscore(self.schedule_key, 0, now)
pipe.zremrangebyscore(self.schedule_key, 0, now)
events, n = pipe.execute()
for event in events:
event = msgpack.loads(event, encoding='utf-8')
self.emit(event['event_type'], event['payload'])
gevent.sleep(1)
|
Include ids for scheduled events to make schedule data unique
|
Include ids for scheduled events to make schedule data unique
|
Python
|
apache-2.0
|
deliveryhero/lymph,alazaro/lymph,itakouna/lymph,lyudmildrx/lymph,itakouna/lymph,mamachanko/lymph,mamachanko/lymph,lyudmildrx/lymph,kstrempel/lymph,mouadino/lymph,emulbreh/lymph,dushyant88/lymph,alazaro/lymph,vpikulik/lymph,emulbreh/lymph,Drahflow/lymph,itakouna/lymph,mamachanko/lymph,torte/lymph,alazaro/lymph,mouadino/lymph,mouadino/lymph,lyudmildrx/lymph
|
import gevent
import msgpack
import redis
import time
from lymph.core.interfaces import Interface
from lymph.core.decorators import rpc
class Scheduler(Interface):
service_type = 'scheduler'
schedule_key = 'schedule'
def __init__(self, *args, **kwargs):
super(Scheduler, self).__init__(*args, **kwargs)
self.redis = redis.StrictRedis()
def on_start(self):
self.container.spawn(self.loop)
@rpc()
def schedule(self, channel, eta, event_type, payload):
self.redis.zadd(self.schedule_key, eta, msgpack.dumps({
'event_type': event_type,
'payload': payload,
}))
channel.ack()
def loop(self):
while True:
pipe = self.redis.pipeline()
now = int(time.time()) # FIXME: handle timezones
pipe.zrangebyscore(self.schedule_key, 0, now)
pipe.zremrangebyscore(self.schedule_key, 0, now)
events, n = pipe.execute()
for event in events:
event = msgpack.loads(event, encoding='utf-8')
self.emit(event['event_type'], event['payload'])
gevent.sleep(1)
Include ids for scheduled events to make schedule data unique
|
import gevent
import msgpack
import redis
import time
from lymph.core.interfaces import Interface
from lymph.core.decorators import rpc
from lymph.utils import make_id
class Scheduler(Interface):
service_type = 'scheduler'
schedule_key = 'schedule'
def __init__(self, *args, **kwargs):
super(Scheduler, self).__init__(*args, **kwargs)
self.redis = redis.StrictRedis()
def on_start(self):
self.container.spawn(self.loop)
@rpc()
def schedule(self, channel, eta, event_type, payload):
self.redis.zadd(self.schedule_key, eta, msgpack.dumps({
'id': make_id(),
'event_type': event_type,
'payload': payload,
}))
channel.ack()
def loop(self):
while True:
pipe = self.redis.pipeline()
now = int(time.time())
pipe.zrangebyscore(self.schedule_key, 0, now)
pipe.zremrangebyscore(self.schedule_key, 0, now)
events, n = pipe.execute()
for event in events:
event = msgpack.loads(event, encoding='utf-8')
self.emit(event['event_type'], event['payload'])
gevent.sleep(1)
|
<commit_before>import gevent
import msgpack
import redis
import time
from lymph.core.interfaces import Interface
from lymph.core.decorators import rpc
class Scheduler(Interface):
service_type = 'scheduler'
schedule_key = 'schedule'
def __init__(self, *args, **kwargs):
super(Scheduler, self).__init__(*args, **kwargs)
self.redis = redis.StrictRedis()
def on_start(self):
self.container.spawn(self.loop)
@rpc()
def schedule(self, channel, eta, event_type, payload):
self.redis.zadd(self.schedule_key, eta, msgpack.dumps({
'event_type': event_type,
'payload': payload,
}))
channel.ack()
def loop(self):
while True:
pipe = self.redis.pipeline()
now = int(time.time()) # FIXME: handle timezones
pipe.zrangebyscore(self.schedule_key, 0, now)
pipe.zremrangebyscore(self.schedule_key, 0, now)
events, n = pipe.execute()
for event in events:
event = msgpack.loads(event, encoding='utf-8')
self.emit(event['event_type'], event['payload'])
gevent.sleep(1)
<commit_msg>Include ids for scheduled events to make schedule data unique<commit_after>
|
import gevent
import msgpack
import redis
import time
from lymph.core.interfaces import Interface
from lymph.core.decorators import rpc
from lymph.utils import make_id
class Scheduler(Interface):
service_type = 'scheduler'
schedule_key = 'schedule'
def __init__(self, *args, **kwargs):
super(Scheduler, self).__init__(*args, **kwargs)
self.redis = redis.StrictRedis()
def on_start(self):
self.container.spawn(self.loop)
@rpc()
def schedule(self, channel, eta, event_type, payload):
self.redis.zadd(self.schedule_key, eta, msgpack.dumps({
'id': make_id(),
'event_type': event_type,
'payload': payload,
}))
channel.ack()
def loop(self):
while True:
pipe = self.redis.pipeline()
now = int(time.time())
pipe.zrangebyscore(self.schedule_key, 0, now)
pipe.zremrangebyscore(self.schedule_key, 0, now)
events, n = pipe.execute()
for event in events:
event = msgpack.loads(event, encoding='utf-8')
self.emit(event['event_type'], event['payload'])
gevent.sleep(1)
|
import gevent
import msgpack
import redis
import time
from lymph.core.interfaces import Interface
from lymph.core.decorators import rpc
class Scheduler(Interface):
service_type = 'scheduler'
schedule_key = 'schedule'
def __init__(self, *args, **kwargs):
super(Scheduler, self).__init__(*args, **kwargs)
self.redis = redis.StrictRedis()
def on_start(self):
self.container.spawn(self.loop)
@rpc()
def schedule(self, channel, eta, event_type, payload):
self.redis.zadd(self.schedule_key, eta, msgpack.dumps({
'event_type': event_type,
'payload': payload,
}))
channel.ack()
def loop(self):
while True:
pipe = self.redis.pipeline()
now = int(time.time()) # FIXME: handle timezones
pipe.zrangebyscore(self.schedule_key, 0, now)
pipe.zremrangebyscore(self.schedule_key, 0, now)
events, n = pipe.execute()
for event in events:
event = msgpack.loads(event, encoding='utf-8')
self.emit(event['event_type'], event['payload'])
gevent.sleep(1)
Include ids for scheduled events to make schedule data uniqueimport gevent
import msgpack
import redis
import time
from lymph.core.interfaces import Interface
from lymph.core.decorators import rpc
from lymph.utils import make_id
class Scheduler(Interface):
service_type = 'scheduler'
schedule_key = 'schedule'
def __init__(self, *args, **kwargs):
super(Scheduler, self).__init__(*args, **kwargs)
self.redis = redis.StrictRedis()
def on_start(self):
self.container.spawn(self.loop)
@rpc()
def schedule(self, channel, eta, event_type, payload):
self.redis.zadd(self.schedule_key, eta, msgpack.dumps({
'id': make_id(),
'event_type': event_type,
'payload': payload,
}))
channel.ack()
def loop(self):
while True:
pipe = self.redis.pipeline()
now = int(time.time())
pipe.zrangebyscore(self.schedule_key, 0, now)
pipe.zremrangebyscore(self.schedule_key, 0, now)
events, n = pipe.execute()
for event in events:
event = msgpack.loads(event, encoding='utf-8')
self.emit(event['event_type'], event['payload'])
gevent.sleep(1)
|
<commit_before>import gevent
import msgpack
import redis
import time
from lymph.core.interfaces import Interface
from lymph.core.decorators import rpc
class Scheduler(Interface):
service_type = 'scheduler'
schedule_key = 'schedule'
def __init__(self, *args, **kwargs):
super(Scheduler, self).__init__(*args, **kwargs)
self.redis = redis.StrictRedis()
def on_start(self):
self.container.spawn(self.loop)
@rpc()
def schedule(self, channel, eta, event_type, payload):
self.redis.zadd(self.schedule_key, eta, msgpack.dumps({
'event_type': event_type,
'payload': payload,
}))
channel.ack()
def loop(self):
while True:
pipe = self.redis.pipeline()
now = int(time.time()) # FIXME: handle timezones
pipe.zrangebyscore(self.schedule_key, 0, now)
pipe.zremrangebyscore(self.schedule_key, 0, now)
events, n = pipe.execute()
for event in events:
event = msgpack.loads(event, encoding='utf-8')
self.emit(event['event_type'], event['payload'])
gevent.sleep(1)
<commit_msg>Include ids for scheduled events to make schedule data unique<commit_after>import gevent
import msgpack
import redis
import time
from lymph.core.interfaces import Interface
from lymph.core.decorators import rpc
from lymph.utils import make_id
class Scheduler(Interface):
service_type = 'scheduler'
schedule_key = 'schedule'
def __init__(self, *args, **kwargs):
super(Scheduler, self).__init__(*args, **kwargs)
self.redis = redis.StrictRedis()
def on_start(self):
self.container.spawn(self.loop)
@rpc()
def schedule(self, channel, eta, event_type, payload):
self.redis.zadd(self.schedule_key, eta, msgpack.dumps({
'id': make_id(),
'event_type': event_type,
'payload': payload,
}))
channel.ack()
def loop(self):
while True:
pipe = self.redis.pipeline()
now = int(time.time())
pipe.zrangebyscore(self.schedule_key, 0, now)
pipe.zremrangebyscore(self.schedule_key, 0, now)
events, n = pipe.execute()
for event in events:
event = msgpack.loads(event, encoding='utf-8')
self.emit(event['event_type'], event['payload'])
gevent.sleep(1)
|
d23ee11cb7deb9ae9ada7b3eca603d3589f9a343
|
stagecraft/apps/datasets/admin/backdrop_user.py
|
stagecraft/apps/datasets/admin/backdrop_user.py
|
from __future__ import unicode_literals
from django.contrib import admin
import reversion
from stagecraft.apps.datasets.models.backdrop_user import BackdropUser
class BackdropUserAdmin(reversion.VersionAdmin):
search_fields = ['email']
list_display = ('email')
admin.site.register(BackdropUser)
|
from __future__ import unicode_literals
from django.contrib import admin
from django.db import models
import reversion
from stagecraft.apps.datasets.models.backdrop_user import BackdropUser
from stagecraft.apps.datasets.models.data_set import DataSet
class DataSetInline(admin.StackedInline):
model = DataSet
fields = ('name',)
extra = 0
class BackdropUserAdmin(reversion.VersionAdmin):
search_fields = ['email', 'data_sets']
list_display = ('email', 'numer_of_datasets_user_has_access_to',)
list_per_page = 30
def queryset(self, request):
return BackdropUser.objects.annotate(
dataset_count=models.Count('data_sets')
)
def numer_of_datasets_user_has_access_to(self, obj):
return obj.dataset_count
numer_of_datasets_user_has_access_to.admin_order_field = 'dataset_count'
admin.site.register(BackdropUser, BackdropUserAdmin)
|
Add user:dataset count to backdrop user admin
|
Add user:dataset count to backdrop user admin
- Sortable by number of data-sets the user has access to
- Vaguely useful, but mainly an exercise in seeing how easily we can customise the output of the manytomany field (not very easily)
|
Python
|
mit
|
alphagov/stagecraft,alphagov/stagecraft,alphagov/stagecraft,alphagov/stagecraft
|
from __future__ import unicode_literals
from django.contrib import admin
import reversion
from stagecraft.apps.datasets.models.backdrop_user import BackdropUser
class BackdropUserAdmin(reversion.VersionAdmin):
search_fields = ['email']
list_display = ('email')
admin.site.register(BackdropUser)
Add user:dataset count to backdrop user admin
- Sortable by number of data-sets the user has access to
- Vaguely useful, but mainly an exercise in seeing how easily we can customise the output of the manytomany field (not very easily)
|
from __future__ import unicode_literals
from django.contrib import admin
from django.db import models
import reversion
from stagecraft.apps.datasets.models.backdrop_user import BackdropUser
from stagecraft.apps.datasets.models.data_set import DataSet
class DataSetInline(admin.StackedInline):
model = DataSet
fields = ('name',)
extra = 0
class BackdropUserAdmin(reversion.VersionAdmin):
search_fields = ['email', 'data_sets']
list_display = ('email', 'numer_of_datasets_user_has_access_to',)
list_per_page = 30
def queryset(self, request):
return BackdropUser.objects.annotate(
dataset_count=models.Count('data_sets')
)
def numer_of_datasets_user_has_access_to(self, obj):
return obj.dataset_count
numer_of_datasets_user_has_access_to.admin_order_field = 'dataset_count'
admin.site.register(BackdropUser, BackdropUserAdmin)
|
<commit_before>from __future__ import unicode_literals
from django.contrib import admin
import reversion
from stagecraft.apps.datasets.models.backdrop_user import BackdropUser
class BackdropUserAdmin(reversion.VersionAdmin):
search_fields = ['email']
list_display = ('email')
admin.site.register(BackdropUser)
<commit_msg>Add user:dataset count to backdrop user admin
- Sortable by number of data-sets the user has access to
- Vaguely useful, but mainly an exercise in seeing how easily we can customise the output of the manytomany field (not very easily)<commit_after>
|
from __future__ import unicode_literals
from django.contrib import admin
from django.db import models
import reversion
from stagecraft.apps.datasets.models.backdrop_user import BackdropUser
from stagecraft.apps.datasets.models.data_set import DataSet
class DataSetInline(admin.StackedInline):
model = DataSet
fields = ('name',)
extra = 0
class BackdropUserAdmin(reversion.VersionAdmin):
search_fields = ['email', 'data_sets']
list_display = ('email', 'numer_of_datasets_user_has_access_to',)
list_per_page = 30
def queryset(self, request):
return BackdropUser.objects.annotate(
dataset_count=models.Count('data_sets')
)
def numer_of_datasets_user_has_access_to(self, obj):
return obj.dataset_count
numer_of_datasets_user_has_access_to.admin_order_field = 'dataset_count'
admin.site.register(BackdropUser, BackdropUserAdmin)
|
from __future__ import unicode_literals
from django.contrib import admin
import reversion
from stagecraft.apps.datasets.models.backdrop_user import BackdropUser
class BackdropUserAdmin(reversion.VersionAdmin):
search_fields = ['email']
list_display = ('email')
admin.site.register(BackdropUser)
Add user:dataset count to backdrop user admin
- Sortable by number of data-sets the user has access to
- Vaguely useful, but mainly an exercise in seeing how easily we can customise the output of the manytomany field (not very easily)from __future__ import unicode_literals
from django.contrib import admin
from django.db import models
import reversion
from stagecraft.apps.datasets.models.backdrop_user import BackdropUser
from stagecraft.apps.datasets.models.data_set import DataSet
class DataSetInline(admin.StackedInline):
model = DataSet
fields = ('name',)
extra = 0
class BackdropUserAdmin(reversion.VersionAdmin):
search_fields = ['email', 'data_sets']
list_display = ('email', 'numer_of_datasets_user_has_access_to',)
list_per_page = 30
def queryset(self, request):
return BackdropUser.objects.annotate(
dataset_count=models.Count('data_sets')
)
def numer_of_datasets_user_has_access_to(self, obj):
return obj.dataset_count
numer_of_datasets_user_has_access_to.admin_order_field = 'dataset_count'
admin.site.register(BackdropUser, BackdropUserAdmin)
|
<commit_before>from __future__ import unicode_literals
from django.contrib import admin
import reversion
from stagecraft.apps.datasets.models.backdrop_user import BackdropUser
class BackdropUserAdmin(reversion.VersionAdmin):
search_fields = ['email']
list_display = ('email')
admin.site.register(BackdropUser)
<commit_msg>Add user:dataset count to backdrop user admin
- Sortable by number of data-sets the user has access to
- Vaguely useful, but mainly an exercise in seeing how easily we can customise the output of the manytomany field (not very easily)<commit_after>from __future__ import unicode_literals
from django.contrib import admin
from django.db import models
import reversion
from stagecraft.apps.datasets.models.backdrop_user import BackdropUser
from stagecraft.apps.datasets.models.data_set import DataSet
class DataSetInline(admin.StackedInline):
model = DataSet
fields = ('name',)
extra = 0
class BackdropUserAdmin(reversion.VersionAdmin):
search_fields = ['email', 'data_sets']
list_display = ('email', 'numer_of_datasets_user_has_access_to',)
list_per_page = 30
def queryset(self, request):
return BackdropUser.objects.annotate(
dataset_count=models.Count('data_sets')
)
def numer_of_datasets_user_has_access_to(self, obj):
return obj.dataset_count
numer_of_datasets_user_has_access_to.admin_order_field = 'dataset_count'
admin.site.register(BackdropUser, BackdropUserAdmin)
|
7290b589219587e161380da2f323c581c2b436eb
|
manager/apps/brand/forms.py
|
manager/apps/brand/forms.py
|
from django import forms
class BrandProposalForm(forms.Form):
brand_nm = forms.CharField(max_length=255, label='Brand name')
brand_type = forms.ChoiceField(choices=(('test', '1'), ('test2', '2')))
owner_nm = forms.CharField(
max_length=255, label='Owner name', required=False)
brand_link = forms.URLField(
max_length=255, label='Brand website', required=False)
#Add logo
comments = forms.CharField(
max_length=255, label='Comments', required=False)
sender = forms.EmailField(max_length=255, label='Your mail')
|
from django import forms
from .models import BrandType
class BrandProposalForm(forms.Form):
brand_nm = forms.CharField(max_length=255, label='Brand name')
brand_type = forms.ModelChoiceField(queryset=BrandType.objects.all())
owner_nm = forms.CharField(
max_length=255, label='Owner name', required=False)
brand_link = forms.URLField(
max_length=255, label='Brand website', required=False)
#Add logo
comments = forms.CharField(
max_length=255, label='Comments', required=False)
sender = forms.EmailField(max_length=255, label='Your mail')
|
Fix BrandProposalForm to use BrandType in a choice
|
Fix BrandProposalForm to use BrandType in a choice
|
Python
|
mit
|
okfn/brand-manager,okfn/opd-brand-manager,okfn/brand-manager,okfn/opd-brand-manager
|
from django import forms
class BrandProposalForm(forms.Form):
brand_nm = forms.CharField(max_length=255, label='Brand name')
brand_type = forms.ChoiceField(choices=(('test', '1'), ('test2', '2')))
owner_nm = forms.CharField(
max_length=255, label='Owner name', required=False)
brand_link = forms.URLField(
max_length=255, label='Brand website', required=False)
#Add logo
comments = forms.CharField(
max_length=255, label='Comments', required=False)
sender = forms.EmailField(max_length=255, label='Your mail')
Fix BrandProposalForm to use BrandType in a choice
|
from django import forms
from .models import BrandType
class BrandProposalForm(forms.Form):
brand_nm = forms.CharField(max_length=255, label='Brand name')
brand_type = forms.ModelChoiceField(queryset=BrandType.objects.all())
owner_nm = forms.CharField(
max_length=255, label='Owner name', required=False)
brand_link = forms.URLField(
max_length=255, label='Brand website', required=False)
#Add logo
comments = forms.CharField(
max_length=255, label='Comments', required=False)
sender = forms.EmailField(max_length=255, label='Your mail')
|
<commit_before>from django import forms
class BrandProposalForm(forms.Form):
brand_nm = forms.CharField(max_length=255, label='Brand name')
brand_type = forms.ChoiceField(choices=(('test', '1'), ('test2', '2')))
owner_nm = forms.CharField(
max_length=255, label='Owner name', required=False)
brand_link = forms.URLField(
max_length=255, label='Brand website', required=False)
#Add logo
comments = forms.CharField(
max_length=255, label='Comments', required=False)
sender = forms.EmailField(max_length=255, label='Your mail')
<commit_msg>Fix BrandProposalForm to use BrandType in a choice<commit_after>
|
from django import forms
from .models import BrandType
class BrandProposalForm(forms.Form):
brand_nm = forms.CharField(max_length=255, label='Brand name')
brand_type = forms.ModelChoiceField(queryset=BrandType.objects.all())
owner_nm = forms.CharField(
max_length=255, label='Owner name', required=False)
brand_link = forms.URLField(
max_length=255, label='Brand website', required=False)
#Add logo
comments = forms.CharField(
max_length=255, label='Comments', required=False)
sender = forms.EmailField(max_length=255, label='Your mail')
|
from django import forms
class BrandProposalForm(forms.Form):
brand_nm = forms.CharField(max_length=255, label='Brand name')
brand_type = forms.ChoiceField(choices=(('test', '1'), ('test2', '2')))
owner_nm = forms.CharField(
max_length=255, label='Owner name', required=False)
brand_link = forms.URLField(
max_length=255, label='Brand website', required=False)
#Add logo
comments = forms.CharField(
max_length=255, label='Comments', required=False)
sender = forms.EmailField(max_length=255, label='Your mail')
Fix BrandProposalForm to use BrandType in a choicefrom django import forms
from .models import BrandType
class BrandProposalForm(forms.Form):
brand_nm = forms.CharField(max_length=255, label='Brand name')
brand_type = forms.ModelChoiceField(queryset=BrandType.objects.all())
owner_nm = forms.CharField(
max_length=255, label='Owner name', required=False)
brand_link = forms.URLField(
max_length=255, label='Brand website', required=False)
#Add logo
comments = forms.CharField(
max_length=255, label='Comments', required=False)
sender = forms.EmailField(max_length=255, label='Your mail')
|
<commit_before>from django import forms
class BrandProposalForm(forms.Form):
brand_nm = forms.CharField(max_length=255, label='Brand name')
brand_type = forms.ChoiceField(choices=(('test', '1'), ('test2', '2')))
owner_nm = forms.CharField(
max_length=255, label='Owner name', required=False)
brand_link = forms.URLField(
max_length=255, label='Brand website', required=False)
#Add logo
comments = forms.CharField(
max_length=255, label='Comments', required=False)
sender = forms.EmailField(max_length=255, label='Your mail')
<commit_msg>Fix BrandProposalForm to use BrandType in a choice<commit_after>from django import forms
from .models import BrandType
class BrandProposalForm(forms.Form):
brand_nm = forms.CharField(max_length=255, label='Brand name')
brand_type = forms.ModelChoiceField(queryset=BrandType.objects.all())
owner_nm = forms.CharField(
max_length=255, label='Owner name', required=False)
brand_link = forms.URLField(
max_length=255, label='Brand website', required=False)
#Add logo
comments = forms.CharField(
max_length=255, label='Comments', required=False)
sender = forms.EmailField(max_length=255, label='Your mail')
|
1144519128b881b4530d2caf0f93959040457404
|
version.py
|
version.py
|
major = 0
minor=0
patch=26
branch="master"
timestamp=1376610243.26
|
major = 0
minor=0
patch=27
branch="master"
timestamp=1376610643.76
|
Tag commit for v0.0.27-master generated by gitmake.py
|
Tag commit for v0.0.27-master generated by gitmake.py
|
Python
|
mit
|
ryansturmer/gitmake
|
major = 0
minor=0
patch=26
branch="master"
timestamp=1376610243.26Tag commit for v0.0.27-master generated by gitmake.py
|
major = 0
minor=0
patch=27
branch="master"
timestamp=1376610643.76
|
<commit_before>major = 0
minor=0
patch=26
branch="master"
timestamp=1376610243.26<commit_msg>Tag commit for v0.0.27-master generated by gitmake.py<commit_after>
|
major = 0
minor=0
patch=27
branch="master"
timestamp=1376610643.76
|
major = 0
minor=0
patch=26
branch="master"
timestamp=1376610243.26Tag commit for v0.0.27-master generated by gitmake.pymajor = 0
minor=0
patch=27
branch="master"
timestamp=1376610643.76
|
<commit_before>major = 0
minor=0
patch=26
branch="master"
timestamp=1376610243.26<commit_msg>Tag commit for v0.0.27-master generated by gitmake.py<commit_after>major = 0
minor=0
patch=27
branch="master"
timestamp=1376610643.76
|
e7d271c41dd713750a8224f0e8f65e2d3b119623
|
polyaxon/auditor/service.py
|
polyaxon/auditor/service.py
|
import activitylogs
import tracker
from auditor.manager import default_manager
from event_manager.event_service import EventService
class AuditorService(EventService):
"""An service that just passes the event to author services."""
event_manager = default_manager
def get_event(self, event_type, instance, **kwargs):
return {
'event_type': event_type,
'instance': instance,
'kwargs': kwargs
}
def record_event(self, event):
tracker.record(event_type=event['event_type'],
instance=event['instance'],
**event['kwargs'])
activitylogs.record(event_type=event['event_type'],
instance=event['instance'],
**event['kwargs'])
def setup(self):
# Load default event types
import auditor.events # noqa
|
from auditor.manager import default_manager
from event_manager.event_service import EventService
class AuditorService(EventService):
"""An service that just passes the event to author services."""
event_manager = default_manager
def __init__(self):
self.tracker = None
self.activitylogs = None
def get_event(self, event_type, instance, **kwargs):
return {
'event_type': event_type,
'instance': instance,
'kwargs': kwargs
}
def record_event(self, event):
self.tracker.record(event_type=event['event_type'],
instance=event['instance'],
**event['kwargs'])
self.activitylogs.record(event_type=event['event_type'],
instance=event['instance'],
**event['kwargs'])
def setup(self):
# Load default event types
import auditor.events # noqa
import activitylogs
import tracker
self.tracker = tracker
self.activitylogs = activitylogs
|
Move event managers imports to setup in auditor
|
Move event managers imports to setup in auditor
|
Python
|
apache-2.0
|
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
|
import activitylogs
import tracker
from auditor.manager import default_manager
from event_manager.event_service import EventService
class AuditorService(EventService):
"""An service that just passes the event to author services."""
event_manager = default_manager
def get_event(self, event_type, instance, **kwargs):
return {
'event_type': event_type,
'instance': instance,
'kwargs': kwargs
}
def record_event(self, event):
tracker.record(event_type=event['event_type'],
instance=event['instance'],
**event['kwargs'])
activitylogs.record(event_type=event['event_type'],
instance=event['instance'],
**event['kwargs'])
def setup(self):
# Load default event types
import auditor.events # noqa
Move event managers imports to setup in auditor
|
from auditor.manager import default_manager
from event_manager.event_service import EventService
class AuditorService(EventService):
"""An service that just passes the event to author services."""
event_manager = default_manager
def __init__(self):
self.tracker = None
self.activitylogs = None
def get_event(self, event_type, instance, **kwargs):
return {
'event_type': event_type,
'instance': instance,
'kwargs': kwargs
}
def record_event(self, event):
self.tracker.record(event_type=event['event_type'],
instance=event['instance'],
**event['kwargs'])
self.activitylogs.record(event_type=event['event_type'],
instance=event['instance'],
**event['kwargs'])
def setup(self):
# Load default event types
import auditor.events # noqa
import activitylogs
import tracker
self.tracker = tracker
self.activitylogs = activitylogs
|
<commit_before>import activitylogs
import tracker
from auditor.manager import default_manager
from event_manager.event_service import EventService
class AuditorService(EventService):
"""An service that just passes the event to author services."""
event_manager = default_manager
def get_event(self, event_type, instance, **kwargs):
return {
'event_type': event_type,
'instance': instance,
'kwargs': kwargs
}
def record_event(self, event):
tracker.record(event_type=event['event_type'],
instance=event['instance'],
**event['kwargs'])
activitylogs.record(event_type=event['event_type'],
instance=event['instance'],
**event['kwargs'])
def setup(self):
# Load default event types
import auditor.events # noqa
<commit_msg>Move event managers imports to setup in auditor<commit_after>
|
from auditor.manager import default_manager
from event_manager.event_service import EventService
class AuditorService(EventService):
"""An service that just passes the event to author services."""
event_manager = default_manager
def __init__(self):
self.tracker = None
self.activitylogs = None
def get_event(self, event_type, instance, **kwargs):
return {
'event_type': event_type,
'instance': instance,
'kwargs': kwargs
}
def record_event(self, event):
self.tracker.record(event_type=event['event_type'],
instance=event['instance'],
**event['kwargs'])
self.activitylogs.record(event_type=event['event_type'],
instance=event['instance'],
**event['kwargs'])
def setup(self):
# Load default event types
import auditor.events # noqa
import activitylogs
import tracker
self.tracker = tracker
self.activitylogs = activitylogs
|
import activitylogs
import tracker
from auditor.manager import default_manager
from event_manager.event_service import EventService
class AuditorService(EventService):
"""An service that just passes the event to author services."""
event_manager = default_manager
def get_event(self, event_type, instance, **kwargs):
return {
'event_type': event_type,
'instance': instance,
'kwargs': kwargs
}
def record_event(self, event):
tracker.record(event_type=event['event_type'],
instance=event['instance'],
**event['kwargs'])
activitylogs.record(event_type=event['event_type'],
instance=event['instance'],
**event['kwargs'])
def setup(self):
# Load default event types
import auditor.events # noqa
Move event managers imports to setup in auditorfrom auditor.manager import default_manager
from event_manager.event_service import EventService
class AuditorService(EventService):
"""An service that just passes the event to author services."""
event_manager = default_manager
def __init__(self):
self.tracker = None
self.activitylogs = None
def get_event(self, event_type, instance, **kwargs):
return {
'event_type': event_type,
'instance': instance,
'kwargs': kwargs
}
def record_event(self, event):
self.tracker.record(event_type=event['event_type'],
instance=event['instance'],
**event['kwargs'])
self.activitylogs.record(event_type=event['event_type'],
instance=event['instance'],
**event['kwargs'])
def setup(self):
# Load default event types
import auditor.events # noqa
import activitylogs
import tracker
self.tracker = tracker
self.activitylogs = activitylogs
|
<commit_before>import activitylogs
import tracker
from auditor.manager import default_manager
from event_manager.event_service import EventService
class AuditorService(EventService):
"""An service that just passes the event to author services."""
event_manager = default_manager
def get_event(self, event_type, instance, **kwargs):
return {
'event_type': event_type,
'instance': instance,
'kwargs': kwargs
}
def record_event(self, event):
tracker.record(event_type=event['event_type'],
instance=event['instance'],
**event['kwargs'])
activitylogs.record(event_type=event['event_type'],
instance=event['instance'],
**event['kwargs'])
def setup(self):
# Load default event types
import auditor.events # noqa
<commit_msg>Move event managers imports to setup in auditor<commit_after>from auditor.manager import default_manager
from event_manager.event_service import EventService
class AuditorService(EventService):
"""An service that just passes the event to author services."""
event_manager = default_manager
def __init__(self):
self.tracker = None
self.activitylogs = None
def get_event(self, event_type, instance, **kwargs):
return {
'event_type': event_type,
'instance': instance,
'kwargs': kwargs
}
def record_event(self, event):
self.tracker.record(event_type=event['event_type'],
instance=event['instance'],
**event['kwargs'])
self.activitylogs.record(event_type=event['event_type'],
instance=event['instance'],
**event['kwargs'])
def setup(self):
# Load default event types
import auditor.events # noqa
import activitylogs
import tracker
self.tracker = tracker
self.activitylogs = activitylogs
|
a0eb0779cac8e9e26448596ebb557f576bba8b5a
|
test_app/settings.py
|
test_app/settings.py
|
# coding: utf-8
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'registration',
},
}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.sites',
'django.contrib.sessions',
'django.contrib.contenttypes',
'registration',
'test_app',
)
DEBUG = True
ALLOWED_HOSTS = ['*']
SECRET_KEY = 'aa2^x-^2u9(zaigih^7!3onca_rql1rnk6ec6=sahm*r$vd2-$)=5'
SITE_ID = 1
ROOT_URLCONF = 'test_app.urls'
TEMPLATE_LOADERS = (
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ACCOUNT_ACTIVATION_DAYS = 7
EMAIL_HOST = ""
EMAIL_HOST_PASSWORD = ""
EMAIL_HOST_USER = ""
EMAIL_PORT = ""
EMAIL_USE_TLS = False
DEFAULT_FROM_EMAIL = ""
|
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'dr.sqlite3',
},
}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.sites',
'django.contrib.sessions',
'django.contrib.contenttypes',
'registration',
'test_app',
)
DEBUG = True
ALLOWED_HOSTS = ['*']
SECRET_KEY = '_'
SITE_ID = 1
ROOT_URLCONF = 'test_app.urls'
TEMPLATE_LOADERS = (
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ACCOUNT_ACTIVATION_DAYS = 7
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
|
Reset back to using sqlite default. Add console.EmailBackend default
|
Reset back to using sqlite default. Add console.EmailBackend default
|
Python
|
bsd-3-clause
|
memnonila/django-registration,alawnchen/django-registration,timgraham/django-registration,ei-grad/django-registration,rulz/django-registration,yorkedork/django-registration,nikolas/django-registration,percipient/django-registration,percipient/django-registration,torchingloom/django-registration,torchingloom/django-registration,wda-hb/test,kinsights/django-registration,yorkedork/django-registration,stillmatic/django-registration,allo-/django-registration,wy123123/django-registration,tanjunyen/django-registration,mick-t/django-registration,pando85/django-registration,PSU-OIT-ARC/django-registration,imgmix/django-registration,mick-t/django-registration,tanjunyen/django-registration,PetrDlouhy/django-registration,stillmatic/django-registration,PetrDlouhy/django-registration,wda-hb/test,ei-grad/django-registration,maitho/django-registration,rulz/django-registration,matejkloska/django-registration,sergafts/django-registration,kazitanvirahsan/django-registration,alawnchen/django-registration,PSU-OIT-ARC/django-registration,sergafts/django-registration,timgraham/django-registration,Geffersonvivan/django-registration,pando85/django-registration,imgmix/django-registration,arpitremarkable/django-registration,kinsights/django-registration,furious-luke/django-registration,allo-/django-registration,arpitremarkable/django-registration,kazitanvirahsan/django-registration,nikolas/django-registration,Geffersonvivan/django-registration,furious-luke/django-registration,wy123123/django-registration,erinspace/django-registration,maitho/django-registration,erinspace/django-registration,memnonila/django-registration,matejkloska/django-registration
|
# coding: utf-8
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'registration',
},
}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.sites',
'django.contrib.sessions',
'django.contrib.contenttypes',
'registration',
'test_app',
)
DEBUG = True
ALLOWED_HOSTS = ['*']
SECRET_KEY = 'aa2^x-^2u9(zaigih^7!3onca_rql1rnk6ec6=sahm*r$vd2-$)=5'
SITE_ID = 1
ROOT_URLCONF = 'test_app.urls'
TEMPLATE_LOADERS = (
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ACCOUNT_ACTIVATION_DAYS = 7
EMAIL_HOST = ""
EMAIL_HOST_PASSWORD = ""
EMAIL_HOST_USER = ""
EMAIL_PORT = ""
EMAIL_USE_TLS = False
DEFAULT_FROM_EMAIL = ""Reset back to using sqlite default. Add console.EmailBackend default
|
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'dr.sqlite3',
},
}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.sites',
'django.contrib.sessions',
'django.contrib.contenttypes',
'registration',
'test_app',
)
DEBUG = True
ALLOWED_HOSTS = ['*']
SECRET_KEY = '_'
SITE_ID = 1
ROOT_URLCONF = 'test_app.urls'
TEMPLATE_LOADERS = (
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ACCOUNT_ACTIVATION_DAYS = 7
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
|
<commit_before># coding: utf-8
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'registration',
},
}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.sites',
'django.contrib.sessions',
'django.contrib.contenttypes',
'registration',
'test_app',
)
DEBUG = True
ALLOWED_HOSTS = ['*']
SECRET_KEY = 'aa2^x-^2u9(zaigih^7!3onca_rql1rnk6ec6=sahm*r$vd2-$)=5'
SITE_ID = 1
ROOT_URLCONF = 'test_app.urls'
TEMPLATE_LOADERS = (
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ACCOUNT_ACTIVATION_DAYS = 7
EMAIL_HOST = ""
EMAIL_HOST_PASSWORD = ""
EMAIL_HOST_USER = ""
EMAIL_PORT = ""
EMAIL_USE_TLS = False
DEFAULT_FROM_EMAIL = ""<commit_msg>Reset back to using sqlite default. Add console.EmailBackend default<commit_after>
|
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'dr.sqlite3',
},
}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.sites',
'django.contrib.sessions',
'django.contrib.contenttypes',
'registration',
'test_app',
)
DEBUG = True
ALLOWED_HOSTS = ['*']
SECRET_KEY = '_'
SITE_ID = 1
ROOT_URLCONF = 'test_app.urls'
TEMPLATE_LOADERS = (
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ACCOUNT_ACTIVATION_DAYS = 7
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
|
# coding: utf-8
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'registration',
},
}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.sites',
'django.contrib.sessions',
'django.contrib.contenttypes',
'registration',
'test_app',
)
DEBUG = True
ALLOWED_HOSTS = ['*']
SECRET_KEY = 'aa2^x-^2u9(zaigih^7!3onca_rql1rnk6ec6=sahm*r$vd2-$)=5'
SITE_ID = 1
ROOT_URLCONF = 'test_app.urls'
TEMPLATE_LOADERS = (
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ACCOUNT_ACTIVATION_DAYS = 7
EMAIL_HOST = ""
EMAIL_HOST_PASSWORD = ""
EMAIL_HOST_USER = ""
EMAIL_PORT = ""
EMAIL_USE_TLS = False
DEFAULT_FROM_EMAIL = ""Reset back to using sqlite default. Add console.EmailBackend defaultDATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'dr.sqlite3',
},
}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.sites',
'django.contrib.sessions',
'django.contrib.contenttypes',
'registration',
'test_app',
)
DEBUG = True
ALLOWED_HOSTS = ['*']
SECRET_KEY = '_'
SITE_ID = 1
ROOT_URLCONF = 'test_app.urls'
TEMPLATE_LOADERS = (
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ACCOUNT_ACTIVATION_DAYS = 7
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
|
<commit_before># coding: utf-8
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'registration',
},
}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.sites',
'django.contrib.sessions',
'django.contrib.contenttypes',
'registration',
'test_app',
)
DEBUG = True
ALLOWED_HOSTS = ['*']
SECRET_KEY = 'aa2^x-^2u9(zaigih^7!3onca_rql1rnk6ec6=sahm*r$vd2-$)=5'
SITE_ID = 1
ROOT_URLCONF = 'test_app.urls'
TEMPLATE_LOADERS = (
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ACCOUNT_ACTIVATION_DAYS = 7
EMAIL_HOST = ""
EMAIL_HOST_PASSWORD = ""
EMAIL_HOST_USER = ""
EMAIL_PORT = ""
EMAIL_USE_TLS = False
DEFAULT_FROM_EMAIL = ""<commit_msg>Reset back to using sqlite default. Add console.EmailBackend default<commit_after>DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'dr.sqlite3',
},
}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.sites',
'django.contrib.sessions',
'django.contrib.contenttypes',
'registration',
'test_app',
)
DEBUG = True
ALLOWED_HOSTS = ['*']
SECRET_KEY = '_'
SITE_ID = 1
ROOT_URLCONF = 'test_app.urls'
TEMPLATE_LOADERS = (
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ACCOUNT_ACTIVATION_DAYS = 7
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
|
6f8472bdd605a6815d40ae90c05cbb0032907b6c
|
tests/parse_token.py
|
tests/parse_token.py
|
"""parse_token test case"""
import unittest
from lighty.templates.tag import parse_token
class FormFieldsTestCase(unittest.TestCase):
''' Test form fields '''
def setUp(self):
# Test Field class
pass
def testCleanBrackets(self):
parsed = parse_token('"test.html"')
needed = ['test.html']
assert parsed == needed, 'Brackets cleaning failed: %s except %s' % (
parsed, needed)
def testCleanInnerBrackets(self):
parsed = parse_token('"test\'html"')
needed = ['test\'html']
assert parsed == needed, 'Brackets cleaning failed: %s except %s' % (
parsed, needed)
def testParseSimpleTokens(self):
parsed = parse_token('a in b')
needed = ['a', 'in', 'b']
assert parsed == needed, 'Token parsing failed: %s except %s' % (
parsed, needed)
def testParseTokensWithSentence(self):
parsed = parse_token('a as "Let me in"')
needed = ['a', 'as', 'Let me in']
assert parsed == needed, 'Token with sentence parsing failed: %s' % (
' '.join((parsed, 'except', needed)))
|
"""parse_token test case"""
import unittest
from lighty.templates.tag import parse_token
class ParseTokenTestCase(unittest.TestCase):
''' Test form fields '''
def setUp(self):
# Test Field class
pass
def testCleanBrackets(self):
parsed = parse_token('"test.html"')
needed = ['test.html']
assert parsed == needed, 'Brackets cleaning failed: %s except %s' % (
parsed, needed)
def testCleanInnerBrackets(self):
parsed = parse_token('"test\'html"')
needed = ['test\'html']
assert parsed == needed, 'Brackets cleaning failed: %s except %s' % (
parsed, needed)
def testParseSimpleTokens(self):
parsed = parse_token('a in b')
needed = ['a', 'in', 'b']
assert parsed == needed, 'Token parsing failed: %s except %s' % (
parsed, needed)
def testParseTokensWithSentence(self):
parsed = parse_token('a as "Let me in"')
needed = ['a', 'as', 'Let me in']
assert parsed == needed, 'Token with sentence parsing failed: %s' % (
' '.join((parsed, 'except', needed)))
def test():
suite = unittest.TestSuite()
suite.addTest(ParseTokenTestCase('testCleanBrackets'))
suite.addTest(ParseTokenTestCase("testCleanInnerBrackets"))
suite.addTest(ParseTokenTestCase("testParseSimpleTokens"))
suite.addTest(ParseTokenTestCase("testParseTokensWithSentence"))
return suite
|
Add tests for parser tokens
|
Add tests for parser tokens
|
Python
|
bsd-3-clause
|
GrAndSE/lighty-template,GrAndSE/lighty
|
"""parse_token test case"""
import unittest
from lighty.templates.tag import parse_token
class FormFieldsTestCase(unittest.TestCase):
''' Test form fields '''
def setUp(self):
# Test Field class
pass
def testCleanBrackets(self):
parsed = parse_token('"test.html"')
needed = ['test.html']
assert parsed == needed, 'Brackets cleaning failed: %s except %s' % (
parsed, needed)
def testCleanInnerBrackets(self):
parsed = parse_token('"test\'html"')
needed = ['test\'html']
assert parsed == needed, 'Brackets cleaning failed: %s except %s' % (
parsed, needed)
def testParseSimpleTokens(self):
parsed = parse_token('a in b')
needed = ['a', 'in', 'b']
assert parsed == needed, 'Token parsing failed: %s except %s' % (
parsed, needed)
def testParseTokensWithSentence(self):
parsed = parse_token('a as "Let me in"')
needed = ['a', 'as', 'Let me in']
assert parsed == needed, 'Token with sentence parsing failed: %s' % (
' '.join((parsed, 'except', needed)))
Add tests for parser tokens
|
"""parse_token test case"""
import unittest
from lighty.templates.tag import parse_token
class ParseTokenTestCase(unittest.TestCase):
''' Test form fields '''
def setUp(self):
# Test Field class
pass
def testCleanBrackets(self):
parsed = parse_token('"test.html"')
needed = ['test.html']
assert parsed == needed, 'Brackets cleaning failed: %s except %s' % (
parsed, needed)
def testCleanInnerBrackets(self):
parsed = parse_token('"test\'html"')
needed = ['test\'html']
assert parsed == needed, 'Brackets cleaning failed: %s except %s' % (
parsed, needed)
def testParseSimpleTokens(self):
parsed = parse_token('a in b')
needed = ['a', 'in', 'b']
assert parsed == needed, 'Token parsing failed: %s except %s' % (
parsed, needed)
def testParseTokensWithSentence(self):
parsed = parse_token('a as "Let me in"')
needed = ['a', 'as', 'Let me in']
assert parsed == needed, 'Token with sentence parsing failed: %s' % (
' '.join((parsed, 'except', needed)))
def test():
suite = unittest.TestSuite()
suite.addTest(ParseTokenTestCase('testCleanBrackets'))
suite.addTest(ParseTokenTestCase("testCleanInnerBrackets"))
suite.addTest(ParseTokenTestCase("testParseSimpleTokens"))
suite.addTest(ParseTokenTestCase("testParseTokensWithSentence"))
return suite
|
<commit_before>"""parse_token test case"""
import unittest
from lighty.templates.tag import parse_token
class FormFieldsTestCase(unittest.TestCase):
''' Test form fields '''
def setUp(self):
# Test Field class
pass
def testCleanBrackets(self):
parsed = parse_token('"test.html"')
needed = ['test.html']
assert parsed == needed, 'Brackets cleaning failed: %s except %s' % (
parsed, needed)
def testCleanInnerBrackets(self):
parsed = parse_token('"test\'html"')
needed = ['test\'html']
assert parsed == needed, 'Brackets cleaning failed: %s except %s' % (
parsed, needed)
def testParseSimpleTokens(self):
parsed = parse_token('a in b')
needed = ['a', 'in', 'b']
assert parsed == needed, 'Token parsing failed: %s except %s' % (
parsed, needed)
def testParseTokensWithSentence(self):
parsed = parse_token('a as "Let me in"')
needed = ['a', 'as', 'Let me in']
assert parsed == needed, 'Token with sentence parsing failed: %s' % (
' '.join((parsed, 'except', needed)))
<commit_msg>Add tests for parser tokens<commit_after>
|
"""parse_token test case"""
import unittest
from lighty.templates.tag import parse_token
class ParseTokenTestCase(unittest.TestCase):
''' Test form fields '''
def setUp(self):
# Test Field class
pass
def testCleanBrackets(self):
parsed = parse_token('"test.html"')
needed = ['test.html']
assert parsed == needed, 'Brackets cleaning failed: %s except %s' % (
parsed, needed)
def testCleanInnerBrackets(self):
parsed = parse_token('"test\'html"')
needed = ['test\'html']
assert parsed == needed, 'Brackets cleaning failed: %s except %s' % (
parsed, needed)
def testParseSimpleTokens(self):
parsed = parse_token('a in b')
needed = ['a', 'in', 'b']
assert parsed == needed, 'Token parsing failed: %s except %s' % (
parsed, needed)
def testParseTokensWithSentence(self):
parsed = parse_token('a as "Let me in"')
needed = ['a', 'as', 'Let me in']
assert parsed == needed, 'Token with sentence parsing failed: %s' % (
' '.join((parsed, 'except', needed)))
def test():
suite = unittest.TestSuite()
suite.addTest(ParseTokenTestCase('testCleanBrackets'))
suite.addTest(ParseTokenTestCase("testCleanInnerBrackets"))
suite.addTest(ParseTokenTestCase("testParseSimpleTokens"))
suite.addTest(ParseTokenTestCase("testParseTokensWithSentence"))
return suite
|
"""parse_token test case"""
import unittest
from lighty.templates.tag import parse_token
class FormFieldsTestCase(unittest.TestCase):
''' Test form fields '''
def setUp(self):
# Test Field class
pass
def testCleanBrackets(self):
parsed = parse_token('"test.html"')
needed = ['test.html']
assert parsed == needed, 'Brackets cleaning failed: %s except %s' % (
parsed, needed)
def testCleanInnerBrackets(self):
parsed = parse_token('"test\'html"')
needed = ['test\'html']
assert parsed == needed, 'Brackets cleaning failed: %s except %s' % (
parsed, needed)
def testParseSimpleTokens(self):
parsed = parse_token('a in b')
needed = ['a', 'in', 'b']
assert parsed == needed, 'Token parsing failed: %s except %s' % (
parsed, needed)
def testParseTokensWithSentence(self):
parsed = parse_token('a as "Let me in"')
needed = ['a', 'as', 'Let me in']
assert parsed == needed, 'Token with sentence parsing failed: %s' % (
' '.join((parsed, 'except', needed)))
Add tests for parser tokens"""parse_token test case"""
import unittest
from lighty.templates.tag import parse_token
class ParseTokenTestCase(unittest.TestCase):
''' Test form fields '''
def setUp(self):
# Test Field class
pass
def testCleanBrackets(self):
parsed = parse_token('"test.html"')
needed = ['test.html']
assert parsed == needed, 'Brackets cleaning failed: %s except %s' % (
parsed, needed)
def testCleanInnerBrackets(self):
parsed = parse_token('"test\'html"')
needed = ['test\'html']
assert parsed == needed, 'Brackets cleaning failed: %s except %s' % (
parsed, needed)
def testParseSimpleTokens(self):
parsed = parse_token('a in b')
needed = ['a', 'in', 'b']
assert parsed == needed, 'Token parsing failed: %s except %s' % (
parsed, needed)
def testParseTokensWithSentence(self):
parsed = parse_token('a as "Let me in"')
needed = ['a', 'as', 'Let me in']
assert parsed == needed, 'Token with sentence parsing failed: %s' % (
' '.join((parsed, 'except', needed)))
def test():
suite = unittest.TestSuite()
suite.addTest(ParseTokenTestCase('testCleanBrackets'))
suite.addTest(ParseTokenTestCase("testCleanInnerBrackets"))
suite.addTest(ParseTokenTestCase("testParseSimpleTokens"))
suite.addTest(ParseTokenTestCase("testParseTokensWithSentence"))
return suite
|
<commit_before>"""parse_token test case"""
import unittest
from lighty.templates.tag import parse_token
class FormFieldsTestCase(unittest.TestCase):
''' Test form fields '''
def setUp(self):
# Test Field class
pass
def testCleanBrackets(self):
parsed = parse_token('"test.html"')
needed = ['test.html']
assert parsed == needed, 'Brackets cleaning failed: %s except %s' % (
parsed, needed)
def testCleanInnerBrackets(self):
parsed = parse_token('"test\'html"')
needed = ['test\'html']
assert parsed == needed, 'Brackets cleaning failed: %s except %s' % (
parsed, needed)
def testParseSimpleTokens(self):
parsed = parse_token('a in b')
needed = ['a', 'in', 'b']
assert parsed == needed, 'Token parsing failed: %s except %s' % (
parsed, needed)
def testParseTokensWithSentence(self):
parsed = parse_token('a as "Let me in"')
needed = ['a', 'as', 'Let me in']
assert parsed == needed, 'Token with sentence parsing failed: %s' % (
' '.join((parsed, 'except', needed)))
<commit_msg>Add tests for parser tokens<commit_after>"""parse_token test case"""
import unittest
from lighty.templates.tag import parse_token
class ParseTokenTestCase(unittest.TestCase):
''' Test form fields '''
def setUp(self):
# Test Field class
pass
def testCleanBrackets(self):
parsed = parse_token('"test.html"')
needed = ['test.html']
assert parsed == needed, 'Brackets cleaning failed: %s except %s' % (
parsed, needed)
def testCleanInnerBrackets(self):
parsed = parse_token('"test\'html"')
needed = ['test\'html']
assert parsed == needed, 'Brackets cleaning failed: %s except %s' % (
parsed, needed)
def testParseSimpleTokens(self):
parsed = parse_token('a in b')
needed = ['a', 'in', 'b']
assert parsed == needed, 'Token parsing failed: %s except %s' % (
parsed, needed)
def testParseTokensWithSentence(self):
parsed = parse_token('a as "Let me in"')
needed = ['a', 'as', 'Let me in']
assert parsed == needed, 'Token with sentence parsing failed: %s' % (
' '.join((parsed, 'except', needed)))
def test():
suite = unittest.TestSuite()
suite.addTest(ParseTokenTestCase('testCleanBrackets'))
suite.addTest(ParseTokenTestCase("testCleanInnerBrackets"))
suite.addTest(ParseTokenTestCase("testParseSimpleTokens"))
suite.addTest(ParseTokenTestCase("testParseTokensWithSentence"))
return suite
|
267a36731b7f103bdc9318a2646b24dec520f9e6
|
tests/test_cindex.py
|
tests/test_cindex.py
|
import sublime
import os.path
import time
from YetAnotherCodeSearch.tests import CommandTestCase
class CindexCommandTest(CommandTestCase):
def test_cindex(self):
self.window.run_command('cindex', {'index_project': True})
max_iters = 10
while max_iters > 0 and self.view.get_status('YetAnotherCodeSearch') != '':
time.sleep(0.1)
max_iters -= 1
self.assertEquals('', self.view.get_status('YetAnotherCodeSearch'))
self.assertTrue(os.path.isfile('test_csearchindex'))
|
import sublime
import os.path
import shutil
import time
from YetAnotherCodeSearch.tests import CommandTestCase
class CindexCommandTest(CommandTestCase):
def test_cindex(self):
self.window.run_command('cindex', {'index_project': True})
max_iters = 10
while max_iters > 0 and self.view.get_status('YetAnotherCodeSearch') != '':
time.sleep(0.1)
max_iters -= 1
self.assertEquals('', self.view.get_status('YetAnotherCodeSearch'))
self.assertTrue(os.path.isfile('test_csearchindex'))
def test_cindex_exists(self):
"""This test verifies that `cindex` is installed."""
self.assertIsNotNone(shutil.which('cindex'))
|
Add explicit test to ensure cindex is present.
|
Add explicit test to ensure cindex is present.
|
Python
|
mit
|
pope/SublimeYetAnotherCodeSearch,pope/SublimeYetAnotherCodeSearch
|
import sublime
import os.path
import time
from YetAnotherCodeSearch.tests import CommandTestCase
class CindexCommandTest(CommandTestCase):
def test_cindex(self):
self.window.run_command('cindex', {'index_project': True})
max_iters = 10
while max_iters > 0 and self.view.get_status('YetAnotherCodeSearch') != '':
time.sleep(0.1)
max_iters -= 1
self.assertEquals('', self.view.get_status('YetAnotherCodeSearch'))
self.assertTrue(os.path.isfile('test_csearchindex'))
Add explicit test to ensure cindex is present.
|
import sublime
import os.path
import shutil
import time
from YetAnotherCodeSearch.tests import CommandTestCase
class CindexCommandTest(CommandTestCase):
def test_cindex(self):
self.window.run_command('cindex', {'index_project': True})
max_iters = 10
while max_iters > 0 and self.view.get_status('YetAnotherCodeSearch') != '':
time.sleep(0.1)
max_iters -= 1
self.assertEquals('', self.view.get_status('YetAnotherCodeSearch'))
self.assertTrue(os.path.isfile('test_csearchindex'))
def test_cindex_exists(self):
"""This test verifies that `cindex` is installed."""
self.assertIsNotNone(shutil.which('cindex'))
|
<commit_before>import sublime
import os.path
import time
from YetAnotherCodeSearch.tests import CommandTestCase
class CindexCommandTest(CommandTestCase):
def test_cindex(self):
self.window.run_command('cindex', {'index_project': True})
max_iters = 10
while max_iters > 0 and self.view.get_status('YetAnotherCodeSearch') != '':
time.sleep(0.1)
max_iters -= 1
self.assertEquals('', self.view.get_status('YetAnotherCodeSearch'))
self.assertTrue(os.path.isfile('test_csearchindex'))
<commit_msg>Add explicit test to ensure cindex is present.<commit_after>
|
import sublime
import os.path
import shutil
import time
from YetAnotherCodeSearch.tests import CommandTestCase
class CindexCommandTest(CommandTestCase):
def test_cindex(self):
self.window.run_command('cindex', {'index_project': True})
max_iters = 10
while max_iters > 0 and self.view.get_status('YetAnotherCodeSearch') != '':
time.sleep(0.1)
max_iters -= 1
self.assertEquals('', self.view.get_status('YetAnotherCodeSearch'))
self.assertTrue(os.path.isfile('test_csearchindex'))
def test_cindex_exists(self):
"""This test verifies that `cindex` is installed."""
self.assertIsNotNone(shutil.which('cindex'))
|
import sublime
import os.path
import time
from YetAnotherCodeSearch.tests import CommandTestCase
class CindexCommandTest(CommandTestCase):
def test_cindex(self):
self.window.run_command('cindex', {'index_project': True})
max_iters = 10
while max_iters > 0 and self.view.get_status('YetAnotherCodeSearch') != '':
time.sleep(0.1)
max_iters -= 1
self.assertEquals('', self.view.get_status('YetAnotherCodeSearch'))
self.assertTrue(os.path.isfile('test_csearchindex'))
Add explicit test to ensure cindex is present.import sublime
import os.path
import shutil
import time
from YetAnotherCodeSearch.tests import CommandTestCase
class CindexCommandTest(CommandTestCase):
def test_cindex(self):
self.window.run_command('cindex', {'index_project': True})
max_iters = 10
while max_iters > 0 and self.view.get_status('YetAnotherCodeSearch') != '':
time.sleep(0.1)
max_iters -= 1
self.assertEquals('', self.view.get_status('YetAnotherCodeSearch'))
self.assertTrue(os.path.isfile('test_csearchindex'))
def test_cindex_exists(self):
"""This test verifies that `cindex` is installed."""
self.assertIsNotNone(shutil.which('cindex'))
|
<commit_before>import sublime
import os.path
import time
from YetAnotherCodeSearch.tests import CommandTestCase
class CindexCommandTest(CommandTestCase):
def test_cindex(self):
self.window.run_command('cindex', {'index_project': True})
max_iters = 10
while max_iters > 0 and self.view.get_status('YetAnotherCodeSearch') != '':
time.sleep(0.1)
max_iters -= 1
self.assertEquals('', self.view.get_status('YetAnotherCodeSearch'))
self.assertTrue(os.path.isfile('test_csearchindex'))
<commit_msg>Add explicit test to ensure cindex is present.<commit_after>import sublime
import os.path
import shutil
import time
from YetAnotherCodeSearch.tests import CommandTestCase
class CindexCommandTest(CommandTestCase):
def test_cindex(self):
self.window.run_command('cindex', {'index_project': True})
max_iters = 10
while max_iters > 0 and self.view.get_status('YetAnotherCodeSearch') != '':
time.sleep(0.1)
max_iters -= 1
self.assertEquals('', self.view.get_status('YetAnotherCodeSearch'))
self.assertTrue(os.path.isfile('test_csearchindex'))
def test_cindex_exists(self):
"""This test verifies that `cindex` is installed."""
self.assertIsNotNone(shutil.which('cindex'))
|
9a474cbea3a2713a94e9e5dbc0b90762b4f354c6
|
automated_ebs_snapshots/connection_manager.py
|
automated_ebs_snapshots/connection_manager.py
|
""" Handles connections to AWS """
import logging
import sys
from boto import ec2
from boto.utils import get_instance_metadata
logger = logging.getLogger(__name__)
def connect_to_ec2(region='us-east-1', access_key=None, secret_key=None):
""" Connect to AWS ec2
:type region: str
:param region: AWS region to connect to
:type access_key: str
:param access_key: AWS access key id
:type secret_key: str
:param secret_key: AWS secret access key
:returns: boto.ec2.connection.EC2Connection -- EC2 connection
"""
logger.info('Connecting to AWS EC2 in {}'.format(region))
if access_key:
# Connect using supplied credentials
connection = ec2.connect_to_region(
region,
aws_access_key_id=access_key,
aws_secret_access_key=secret_key)
else:
# Fetch instance metadata
metadata = get_instance_metadata(timeout=1, num_retries=1)
if metadata:
try:
region = metadata['placement']['availability-zone'][:-1]
except KeyError:
pass
# Connect using env vars or boto credentials
connection = ec2.connect_to_region(region)
if not connection:
logger.error('An error occurred when connecting to EC2')
sys.exit(1)
return connection
|
""" Handles connections to AWS """
import logging
import sys
from boto import ec2
from boto.utils import get_instance_metadata
logger = logging.getLogger(__name__)
def connect_to_ec2(region='us-east-1', access_key=None, secret_key=None):
""" Connect to AWS ec2
:type region: str
:param region: AWS region to connect to
:type access_key: str
:param access_key: AWS access key id
:type secret_key: str
:param secret_key: AWS secret access key
:returns: boto.ec2.connection.EC2Connection -- EC2 connection
"""
if access_key:
# Connect using supplied credentials
logger.info('Connecting to AWS EC2 in {}'.format(region))
connection = ec2.connect_to_region(
region,
aws_access_key_id=access_key,
aws_secret_access_key=secret_key)
else:
# Fetch instance metadata
metadata = get_instance_metadata(timeout=1, num_retries=1)
if metadata:
try:
region = metadata['placement']['availability-zone'][:-1]
except KeyError:
pass
# Connect using env vars or boto credentials
logger.info('Connecting to AWS EC2 in {}'.format(region))
connection = ec2.connect_to_region(region)
if not connection:
logger.error('An error occurred when connecting to EC2')
sys.exit(1)
return connection
|
Fix for logging incorrect region information when using instance role for authentication.
|
Fix for logging incorrect region information when using instance role for authentication.
|
Python
|
apache-2.0
|
bkarakashev/automated-ebs-snapshots,skymill/automated-ebs-snapshots,CBitLabs/automated-ebs-snapshots
|
""" Handles connections to AWS """
import logging
import sys
from boto import ec2
from boto.utils import get_instance_metadata
logger = logging.getLogger(__name__)
def connect_to_ec2(region='us-east-1', access_key=None, secret_key=None):
""" Connect to AWS ec2
:type region: str
:param region: AWS region to connect to
:type access_key: str
:param access_key: AWS access key id
:type secret_key: str
:param secret_key: AWS secret access key
:returns: boto.ec2.connection.EC2Connection -- EC2 connection
"""
logger.info('Connecting to AWS EC2 in {}'.format(region))
if access_key:
# Connect using supplied credentials
connection = ec2.connect_to_region(
region,
aws_access_key_id=access_key,
aws_secret_access_key=secret_key)
else:
# Fetch instance metadata
metadata = get_instance_metadata(timeout=1, num_retries=1)
if metadata:
try:
region = metadata['placement']['availability-zone'][:-1]
except KeyError:
pass
# Connect using env vars or boto credentials
connection = ec2.connect_to_region(region)
if not connection:
logger.error('An error occurred when connecting to EC2')
sys.exit(1)
return connection
Fix for logging incorrect region information when using instance role for authentication.
|
""" Handles connections to AWS """
import logging
import sys
from boto import ec2
from boto.utils import get_instance_metadata
logger = logging.getLogger(__name__)
def connect_to_ec2(region='us-east-1', access_key=None, secret_key=None):
""" Connect to AWS ec2
:type region: str
:param region: AWS region to connect to
:type access_key: str
:param access_key: AWS access key id
:type secret_key: str
:param secret_key: AWS secret access key
:returns: boto.ec2.connection.EC2Connection -- EC2 connection
"""
if access_key:
# Connect using supplied credentials
logger.info('Connecting to AWS EC2 in {}'.format(region))
connection = ec2.connect_to_region(
region,
aws_access_key_id=access_key,
aws_secret_access_key=secret_key)
else:
# Fetch instance metadata
metadata = get_instance_metadata(timeout=1, num_retries=1)
if metadata:
try:
region = metadata['placement']['availability-zone'][:-1]
except KeyError:
pass
# Connect using env vars or boto credentials
logger.info('Connecting to AWS EC2 in {}'.format(region))
connection = ec2.connect_to_region(region)
if not connection:
logger.error('An error occurred when connecting to EC2')
sys.exit(1)
return connection
|
<commit_before>""" Handles connections to AWS """
import logging
import sys
from boto import ec2
from boto.utils import get_instance_metadata
logger = logging.getLogger(__name__)
def connect_to_ec2(region='us-east-1', access_key=None, secret_key=None):
""" Connect to AWS ec2
:type region: str
:param region: AWS region to connect to
:type access_key: str
:param access_key: AWS access key id
:type secret_key: str
:param secret_key: AWS secret access key
:returns: boto.ec2.connection.EC2Connection -- EC2 connection
"""
logger.info('Connecting to AWS EC2 in {}'.format(region))
if access_key:
# Connect using supplied credentials
connection = ec2.connect_to_region(
region,
aws_access_key_id=access_key,
aws_secret_access_key=secret_key)
else:
# Fetch instance metadata
metadata = get_instance_metadata(timeout=1, num_retries=1)
if metadata:
try:
region = metadata['placement']['availability-zone'][:-1]
except KeyError:
pass
# Connect using env vars or boto credentials
connection = ec2.connect_to_region(region)
if not connection:
logger.error('An error occurred when connecting to EC2')
sys.exit(1)
return connection
<commit_msg>Fix for logging incorrect region information when using instance role for authentication.<commit_after>
|
""" Handles connections to AWS """
import logging
import sys
from boto import ec2
from boto.utils import get_instance_metadata
logger = logging.getLogger(__name__)
def connect_to_ec2(region='us-east-1', access_key=None, secret_key=None):
""" Connect to AWS ec2
:type region: str
:param region: AWS region to connect to
:type access_key: str
:param access_key: AWS access key id
:type secret_key: str
:param secret_key: AWS secret access key
:returns: boto.ec2.connection.EC2Connection -- EC2 connection
"""
if access_key:
# Connect using supplied credentials
logger.info('Connecting to AWS EC2 in {}'.format(region))
connection = ec2.connect_to_region(
region,
aws_access_key_id=access_key,
aws_secret_access_key=secret_key)
else:
# Fetch instance metadata
metadata = get_instance_metadata(timeout=1, num_retries=1)
if metadata:
try:
region = metadata['placement']['availability-zone'][:-1]
except KeyError:
pass
# Connect using env vars or boto credentials
logger.info('Connecting to AWS EC2 in {}'.format(region))
connection = ec2.connect_to_region(region)
if not connection:
logger.error('An error occurred when connecting to EC2')
sys.exit(1)
return connection
|
""" Handles connections to AWS """
import logging
import sys
from boto import ec2
from boto.utils import get_instance_metadata
logger = logging.getLogger(__name__)
def connect_to_ec2(region='us-east-1', access_key=None, secret_key=None):
""" Connect to AWS ec2
:type region: str
:param region: AWS region to connect to
:type access_key: str
:param access_key: AWS access key id
:type secret_key: str
:param secret_key: AWS secret access key
:returns: boto.ec2.connection.EC2Connection -- EC2 connection
"""
logger.info('Connecting to AWS EC2 in {}'.format(region))
if access_key:
# Connect using supplied credentials
connection = ec2.connect_to_region(
region,
aws_access_key_id=access_key,
aws_secret_access_key=secret_key)
else:
# Fetch instance metadata
metadata = get_instance_metadata(timeout=1, num_retries=1)
if metadata:
try:
region = metadata['placement']['availability-zone'][:-1]
except KeyError:
pass
# Connect using env vars or boto credentials
connection = ec2.connect_to_region(region)
if not connection:
logger.error('An error occurred when connecting to EC2')
sys.exit(1)
return connection
Fix for logging incorrect region information when using instance role for authentication.""" Handles connections to AWS """
import logging
import sys
from boto import ec2
from boto.utils import get_instance_metadata
logger = logging.getLogger(__name__)
def connect_to_ec2(region='us-east-1', access_key=None, secret_key=None):
""" Connect to AWS ec2
:type region: str
:param region: AWS region to connect to
:type access_key: str
:param access_key: AWS access key id
:type secret_key: str
:param secret_key: AWS secret access key
:returns: boto.ec2.connection.EC2Connection -- EC2 connection
"""
if access_key:
# Connect using supplied credentials
logger.info('Connecting to AWS EC2 in {}'.format(region))
connection = ec2.connect_to_region(
region,
aws_access_key_id=access_key,
aws_secret_access_key=secret_key)
else:
# Fetch instance metadata
metadata = get_instance_metadata(timeout=1, num_retries=1)
if metadata:
try:
region = metadata['placement']['availability-zone'][:-1]
except KeyError:
pass
# Connect using env vars or boto credentials
logger.info('Connecting to AWS EC2 in {}'.format(region))
connection = ec2.connect_to_region(region)
if not connection:
logger.error('An error occurred when connecting to EC2')
sys.exit(1)
return connection
|
<commit_before>""" Handles connections to AWS """
import logging
import sys
from boto import ec2
from boto.utils import get_instance_metadata
logger = logging.getLogger(__name__)
def connect_to_ec2(region='us-east-1', access_key=None, secret_key=None):
""" Connect to AWS ec2
:type region: str
:param region: AWS region to connect to
:type access_key: str
:param access_key: AWS access key id
:type secret_key: str
:param secret_key: AWS secret access key
:returns: boto.ec2.connection.EC2Connection -- EC2 connection
"""
logger.info('Connecting to AWS EC2 in {}'.format(region))
if access_key:
# Connect using supplied credentials
connection = ec2.connect_to_region(
region,
aws_access_key_id=access_key,
aws_secret_access_key=secret_key)
else:
# Fetch instance metadata
metadata = get_instance_metadata(timeout=1, num_retries=1)
if metadata:
try:
region = metadata['placement']['availability-zone'][:-1]
except KeyError:
pass
# Connect using env vars or boto credentials
connection = ec2.connect_to_region(region)
if not connection:
logger.error('An error occurred when connecting to EC2')
sys.exit(1)
return connection
<commit_msg>Fix for logging incorrect region information when using instance role for authentication.<commit_after>""" Handles connections to AWS """
import logging
import sys
from boto import ec2
from boto.utils import get_instance_metadata
logger = logging.getLogger(__name__)
def connect_to_ec2(region='us-east-1', access_key=None, secret_key=None):
""" Connect to AWS ec2
:type region: str
:param region: AWS region to connect to
:type access_key: str
:param access_key: AWS access key id
:type secret_key: str
:param secret_key: AWS secret access key
:returns: boto.ec2.connection.EC2Connection -- EC2 connection
"""
if access_key:
# Connect using supplied credentials
logger.info('Connecting to AWS EC2 in {}'.format(region))
connection = ec2.connect_to_region(
region,
aws_access_key_id=access_key,
aws_secret_access_key=secret_key)
else:
# Fetch instance metadata
metadata = get_instance_metadata(timeout=1, num_retries=1)
if metadata:
try:
region = metadata['placement']['availability-zone'][:-1]
except KeyError:
pass
# Connect using env vars or boto credentials
logger.info('Connecting to AWS EC2 in {}'.format(region))
connection = ec2.connect_to_region(region)
if not connection:
logger.error('An error occurred when connecting to EC2')
sys.exit(1)
return connection
|
b95cf9fce9c8c878ffb81767fea45b23c633eac5
|
convert.py
|
convert.py
|
#!/usr/bin/python
import config, os, string
def format_filename(s):
valid_chars = "-_.() %s%s" % (string.ascii_letters, string.digits)
filename = s.replace('/','-')
filename = ''.join(c for c in filename if c in valid_chars)
filename = filename.replace(' ','_')
return filename
class RecordingInfo:
def __init__(self, recording):
self.recording = recording
self.__readInfo()
def __readInfo(self):
with open(os.path.join(self.recording, "info")) as infoFile:
for line in infoFile:
infoType = line[:1]
content = line[2:].strip()
if (infoType == 'T'):
self.title = content
elif (infoType == 'D'):
self.description = content
def filename(self):
return format_filename(self.title)
def __repr__(self):
return self.filename()
def read_recordings(parent, folder):
recordings = []
if (folder == None):
recording = parent
else :
recording = os.path.join(parent, folder)
if (recording[-3:] == "rec"):
recordings.append(RecordingInfo(recording))
elif (os.path.isdir(recording)) :
for subfolder in os.listdir(recording):
recordings.extend(read_recordings(recording, subfolder))
return recordings
print "converting VDR recordings from directory " + config.recordings
print(read_recordings(config.recordings, None))
|
#!/usr/bin/python
import config, os, string
def format_filename(s):
valid_chars = "-_.() %s%s" % (string.ascii_letters, string.digits)
filename = s.replace('/','-')
filename = ''.join(c for c in filename if c in valid_chars)
filename = filename.replace(' ','_')
return filename
class RecordingInfo:
def __init__(self, recording):
self.recording = recording
self.title = ""
self.description = ""
self.__readInfo()
def __readInfo(self):
with open(os.path.join(self.recording, "info")) as infoFile:
for line in infoFile:
infoType = line[:1]
content = line[2:].strip()
if (infoType == 'T'):
self.title = content
elif (infoType == 'D'):
self.description = content
def filename(self):
return format_filename(self.title)
def __repr__(self):
return self.filename()
def read_recordings(parent, folder):
recordings = []
if (folder == None):
recording = parent
else :
recording = os.path.join(parent, folder)
if (recording[-3:] == "rec"):
recordings.append(RecordingInfo(recording))
elif (os.path.isdir(recording)) :
for subfolder in os.listdir(recording):
recordings.extend(read_recordings(recording, subfolder))
return recordings
print "converting VDR recordings from directory " + config.recordings
print(read_recordings(config.recordings, None))
|
Initialize title and description with empty string
|
Initialize title and description with empty string
|
Python
|
apache-2.0
|
andreasbehnke/vdr-handbrake
|
#!/usr/bin/python
import config, os, string
def format_filename(s):
valid_chars = "-_.() %s%s" % (string.ascii_letters, string.digits)
filename = s.replace('/','-')
filename = ''.join(c for c in filename if c in valid_chars)
filename = filename.replace(' ','_')
return filename
class RecordingInfo:
def __init__(self, recording):
self.recording = recording
self.__readInfo()
def __readInfo(self):
with open(os.path.join(self.recording, "info")) as infoFile:
for line in infoFile:
infoType = line[:1]
content = line[2:].strip()
if (infoType == 'T'):
self.title = content
elif (infoType == 'D'):
self.description = content
def filename(self):
return format_filename(self.title)
def __repr__(self):
return self.filename()
def read_recordings(parent, folder):
recordings = []
if (folder == None):
recording = parent
else :
recording = os.path.join(parent, folder)
if (recording[-3:] == "rec"):
recordings.append(RecordingInfo(recording))
elif (os.path.isdir(recording)) :
for subfolder in os.listdir(recording):
recordings.extend(read_recordings(recording, subfolder))
return recordings
print "converting VDR recordings from directory " + config.recordings
print(read_recordings(config.recordings, None))
Initialize title and description with empty string
|
#!/usr/bin/python
import config, os, string
def format_filename(s):
valid_chars = "-_.() %s%s" % (string.ascii_letters, string.digits)
filename = s.replace('/','-')
filename = ''.join(c for c in filename if c in valid_chars)
filename = filename.replace(' ','_')
return filename
class RecordingInfo:
def __init__(self, recording):
self.recording = recording
self.title = ""
self.description = ""
self.__readInfo()
def __readInfo(self):
with open(os.path.join(self.recording, "info")) as infoFile:
for line in infoFile:
infoType = line[:1]
content = line[2:].strip()
if (infoType == 'T'):
self.title = content
elif (infoType == 'D'):
self.description = content
def filename(self):
return format_filename(self.title)
def __repr__(self):
return self.filename()
def read_recordings(parent, folder):
recordings = []
if (folder == None):
recording = parent
else :
recording = os.path.join(parent, folder)
if (recording[-3:] == "rec"):
recordings.append(RecordingInfo(recording))
elif (os.path.isdir(recording)) :
for subfolder in os.listdir(recording):
recordings.extend(read_recordings(recording, subfolder))
return recordings
print "converting VDR recordings from directory " + config.recordings
print(read_recordings(config.recordings, None))
|
<commit_before>#!/usr/bin/python
import config, os, string
def format_filename(s):
valid_chars = "-_.() %s%s" % (string.ascii_letters, string.digits)
filename = s.replace('/','-')
filename = ''.join(c for c in filename if c in valid_chars)
filename = filename.replace(' ','_')
return filename
class RecordingInfo:
def __init__(self, recording):
self.recording = recording
self.__readInfo()
def __readInfo(self):
with open(os.path.join(self.recording, "info")) as infoFile:
for line in infoFile:
infoType = line[:1]
content = line[2:].strip()
if (infoType == 'T'):
self.title = content
elif (infoType == 'D'):
self.description = content
def filename(self):
return format_filename(self.title)
def __repr__(self):
return self.filename()
def read_recordings(parent, folder):
recordings = []
if (folder == None):
recording = parent
else :
recording = os.path.join(parent, folder)
if (recording[-3:] == "rec"):
recordings.append(RecordingInfo(recording))
elif (os.path.isdir(recording)) :
for subfolder in os.listdir(recording):
recordings.extend(read_recordings(recording, subfolder))
return recordings
print "converting VDR recordings from directory " + config.recordings
print(read_recordings(config.recordings, None))
<commit_msg>Initialize title and description with empty string<commit_after>
|
#!/usr/bin/python
import config, os, string
def format_filename(s):
valid_chars = "-_.() %s%s" % (string.ascii_letters, string.digits)
filename = s.replace('/','-')
filename = ''.join(c for c in filename if c in valid_chars)
filename = filename.replace(' ','_')
return filename
class RecordingInfo:
def __init__(self, recording):
self.recording = recording
self.title = ""
self.description = ""
self.__readInfo()
def __readInfo(self):
with open(os.path.join(self.recording, "info")) as infoFile:
for line in infoFile:
infoType = line[:1]
content = line[2:].strip()
if (infoType == 'T'):
self.title = content
elif (infoType == 'D'):
self.description = content
def filename(self):
return format_filename(self.title)
def __repr__(self):
return self.filename()
def read_recordings(parent, folder):
recordings = []
if (folder == None):
recording = parent
else :
recording = os.path.join(parent, folder)
if (recording[-3:] == "rec"):
recordings.append(RecordingInfo(recording))
elif (os.path.isdir(recording)) :
for subfolder in os.listdir(recording):
recordings.extend(read_recordings(recording, subfolder))
return recordings
print "converting VDR recordings from directory " + config.recordings
print(read_recordings(config.recordings, None))
|
#!/usr/bin/python
import config, os, string
def format_filename(s):
valid_chars = "-_.() %s%s" % (string.ascii_letters, string.digits)
filename = s.replace('/','-')
filename = ''.join(c for c in filename if c in valid_chars)
filename = filename.replace(' ','_')
return filename
class RecordingInfo:
def __init__(self, recording):
self.recording = recording
self.__readInfo()
def __readInfo(self):
with open(os.path.join(self.recording, "info")) as infoFile:
for line in infoFile:
infoType = line[:1]
content = line[2:].strip()
if (infoType == 'T'):
self.title = content
elif (infoType == 'D'):
self.description = content
def filename(self):
return format_filename(self.title)
def __repr__(self):
return self.filename()
def read_recordings(parent, folder):
recordings = []
if (folder == None):
recording = parent
else :
recording = os.path.join(parent, folder)
if (recording[-3:] == "rec"):
recordings.append(RecordingInfo(recording))
elif (os.path.isdir(recording)) :
for subfolder in os.listdir(recording):
recordings.extend(read_recordings(recording, subfolder))
return recordings
print "converting VDR recordings from directory " + config.recordings
print(read_recordings(config.recordings, None))
Initialize title and description with empty string#!/usr/bin/python
import config, os, string
def format_filename(s):
valid_chars = "-_.() %s%s" % (string.ascii_letters, string.digits)
filename = s.replace('/','-')
filename = ''.join(c for c in filename if c in valid_chars)
filename = filename.replace(' ','_')
return filename
class RecordingInfo:
def __init__(self, recording):
self.recording = recording
self.title = ""
self.description = ""
self.__readInfo()
def __readInfo(self):
with open(os.path.join(self.recording, "info")) as infoFile:
for line in infoFile:
infoType = line[:1]
content = line[2:].strip()
if (infoType == 'T'):
self.title = content
elif (infoType == 'D'):
self.description = content
def filename(self):
return format_filename(self.title)
def __repr__(self):
return self.filename()
def read_recordings(parent, folder):
recordings = []
if (folder == None):
recording = parent
else :
recording = os.path.join(parent, folder)
if (recording[-3:] == "rec"):
recordings.append(RecordingInfo(recording))
elif (os.path.isdir(recording)) :
for subfolder in os.listdir(recording):
recordings.extend(read_recordings(recording, subfolder))
return recordings
print "converting VDR recordings from directory " + config.recordings
print(read_recordings(config.recordings, None))
|
<commit_before>#!/usr/bin/python
import config, os, string
def format_filename(s):
valid_chars = "-_.() %s%s" % (string.ascii_letters, string.digits)
filename = s.replace('/','-')
filename = ''.join(c for c in filename if c in valid_chars)
filename = filename.replace(' ','_')
return filename
class RecordingInfo:
def __init__(self, recording):
self.recording = recording
self.__readInfo()
def __readInfo(self):
with open(os.path.join(self.recording, "info")) as infoFile:
for line in infoFile:
infoType = line[:1]
content = line[2:].strip()
if (infoType == 'T'):
self.title = content
elif (infoType == 'D'):
self.description = content
def filename(self):
return format_filename(self.title)
def __repr__(self):
return self.filename()
def read_recordings(parent, folder):
recordings = []
if (folder == None):
recording = parent
else :
recording = os.path.join(parent, folder)
if (recording[-3:] == "rec"):
recordings.append(RecordingInfo(recording))
elif (os.path.isdir(recording)) :
for subfolder in os.listdir(recording):
recordings.extend(read_recordings(recording, subfolder))
return recordings
print "converting VDR recordings from directory " + config.recordings
print(read_recordings(config.recordings, None))
<commit_msg>Initialize title and description with empty string<commit_after>#!/usr/bin/python
import config, os, string
def format_filename(s):
valid_chars = "-_.() %s%s" % (string.ascii_letters, string.digits)
filename = s.replace('/','-')
filename = ''.join(c for c in filename if c in valid_chars)
filename = filename.replace(' ','_')
return filename
class RecordingInfo:
def __init__(self, recording):
self.recording = recording
self.title = ""
self.description = ""
self.__readInfo()
def __readInfo(self):
with open(os.path.join(self.recording, "info")) as infoFile:
for line in infoFile:
infoType = line[:1]
content = line[2:].strip()
if (infoType == 'T'):
self.title = content
elif (infoType == 'D'):
self.description = content
def filename(self):
return format_filename(self.title)
def __repr__(self):
return self.filename()
def read_recordings(parent, folder):
recordings = []
if (folder == None):
recording = parent
else :
recording = os.path.join(parent, folder)
if (recording[-3:] == "rec"):
recordings.append(RecordingInfo(recording))
elif (os.path.isdir(recording)) :
for subfolder in os.listdir(recording):
recordings.extend(read_recordings(recording, subfolder))
return recordings
print "converting VDR recordings from directory " + config.recordings
print(read_recordings(config.recordings, None))
|
d848143cdeb915029fefab7be34d0585296502c2
|
castor/server.py
|
castor/server.py
|
"""
This module defines the Castor server, that consumes the Docker events from
a given host. This module can be run as a command line script or get imported
by another Python script.
"""
import docker
import tasks
import settings
DOCKER_SETTINGS = settings.SETTINGS.get('docker', {})
# Customize the Docker client according to settings in `settings.json`
DOCKER_CLIENT = docker.Client(**DOCKER_SETTINGS)
def consume():
"""
Starts consuming Docker events accoding to the already defined settings.
"""
print 'Start consuming events from %s' % DOCKER_SETTINGS['base_url']
for event in DOCKER_CLIENT.events(decode=True):
for hook in settings.HOOKS:
tasks.dispatch_event.delay(event, hook)
if __name__ == '__main__':
try:
consume()
except KeyboardInterrupt:
# Do not display ugly exception if stopped with Ctrl + C
print '\rBye.'
|
"""
This module defines the Castor server, that consumes the Docker events from
a given host. This module can be run as a command line script or get imported
by another Python script.
"""
import docker
import redis
import settings
import tasks
def consume(docker_client, redis_client):
"""
Starts consuming Docker events accoding to the already defined settings.
"""
print 'Start consuming events from %s' % docker_client.base_url
since = redis_client.get('castor:last_event')
for event in docker_client.events(decode=True, since=since):
for hook in settings.HOOKS:
tasks.dispatch_event.delay(event, hook)
redis_client.set('castor:last_event', event['time'])
if __name__ == '__main__':
try:
docker_client = docker.Client(**settings.SETTINGS.get('docker', {}))
redis_client = redis.StrictRedis(
host=settings.REDIS_HOST, port=settings.REDIS_PORT,
db=settings.REDIS_DB,
)
consume(docker_client, redis_client)
except KeyboardInterrupt:
# Do not display ugly exception if stopped with Ctrl + C
print '\rBye.'
|
Add last event time in Redis
|
Add last event time in Redis
This makes sure events are not missed if the server restarts.
|
Python
|
mit
|
sourcelair/castor
|
"""
This module defines the Castor server, that consumes the Docker events from
a given host. This module can be run as a command line script or get imported
by another Python script.
"""
import docker
import tasks
import settings
DOCKER_SETTINGS = settings.SETTINGS.get('docker', {})
# Customize the Docker client according to settings in `settings.json`
DOCKER_CLIENT = docker.Client(**DOCKER_SETTINGS)
def consume():
"""
Starts consuming Docker events accoding to the already defined settings.
"""
print 'Start consuming events from %s' % DOCKER_SETTINGS['base_url']
for event in DOCKER_CLIENT.events(decode=True):
for hook in settings.HOOKS:
tasks.dispatch_event.delay(event, hook)
if __name__ == '__main__':
try:
consume()
except KeyboardInterrupt:
# Do not display ugly exception if stopped with Ctrl + C
print '\rBye.'
Add last event time in Redis
This makes sure events are not missed if the server restarts.
|
"""
This module defines the Castor server, that consumes the Docker events from
a given host. This module can be run as a command line script or get imported
by another Python script.
"""
import docker
import redis
import settings
import tasks
def consume(docker_client, redis_client):
"""
Starts consuming Docker events accoding to the already defined settings.
"""
print 'Start consuming events from %s' % docker_client.base_url
since = redis_client.get('castor:last_event')
for event in docker_client.events(decode=True, since=since):
for hook in settings.HOOKS:
tasks.dispatch_event.delay(event, hook)
redis_client.set('castor:last_event', event['time'])
if __name__ == '__main__':
try:
docker_client = docker.Client(**settings.SETTINGS.get('docker', {}))
redis_client = redis.StrictRedis(
host=settings.REDIS_HOST, port=settings.REDIS_PORT,
db=settings.REDIS_DB,
)
consume(docker_client, redis_client)
except KeyboardInterrupt:
# Do not display ugly exception if stopped with Ctrl + C
print '\rBye.'
|
<commit_before>"""
This module defines the Castor server, that consumes the Docker events from
a given host. This module can be run as a command line script or get imported
by another Python script.
"""
import docker
import tasks
import settings
DOCKER_SETTINGS = settings.SETTINGS.get('docker', {})
# Customize the Docker client according to settings in `settings.json`
DOCKER_CLIENT = docker.Client(**DOCKER_SETTINGS)
def consume():
"""
Starts consuming Docker events accoding to the already defined settings.
"""
print 'Start consuming events from %s' % DOCKER_SETTINGS['base_url']
for event in DOCKER_CLIENT.events(decode=True):
for hook in settings.HOOKS:
tasks.dispatch_event.delay(event, hook)
if __name__ == '__main__':
try:
consume()
except KeyboardInterrupt:
# Do not display ugly exception if stopped with Ctrl + C
print '\rBye.'
<commit_msg>Add last event time in Redis
This makes sure events are not missed if the server restarts.<commit_after>
|
"""
This module defines the Castor server, that consumes the Docker events from
a given host. This module can be run as a command line script or get imported
by another Python script.
"""
import docker
import redis
import settings
import tasks
def consume(docker_client, redis_client):
"""
Starts consuming Docker events accoding to the already defined settings.
"""
print 'Start consuming events from %s' % docker_client.base_url
since = redis_client.get('castor:last_event')
for event in docker_client.events(decode=True, since=since):
for hook in settings.HOOKS:
tasks.dispatch_event.delay(event, hook)
redis_client.set('castor:last_event', event['time'])
if __name__ == '__main__':
try:
docker_client = docker.Client(**settings.SETTINGS.get('docker', {}))
redis_client = redis.StrictRedis(
host=settings.REDIS_HOST, port=settings.REDIS_PORT,
db=settings.REDIS_DB,
)
consume(docker_client, redis_client)
except KeyboardInterrupt:
# Do not display ugly exception if stopped with Ctrl + C
print '\rBye.'
|
"""
This module defines the Castor server, that consumes the Docker events from
a given host. This module can be run as a command line script or get imported
by another Python script.
"""
import docker
import tasks
import settings
DOCKER_SETTINGS = settings.SETTINGS.get('docker', {})
# Customize the Docker client according to settings in `settings.json`
DOCKER_CLIENT = docker.Client(**DOCKER_SETTINGS)
def consume():
"""
Starts consuming Docker events accoding to the already defined settings.
"""
print 'Start consuming events from %s' % DOCKER_SETTINGS['base_url']
for event in DOCKER_CLIENT.events(decode=True):
for hook in settings.HOOKS:
tasks.dispatch_event.delay(event, hook)
if __name__ == '__main__':
try:
consume()
except KeyboardInterrupt:
# Do not display ugly exception if stopped with Ctrl + C
print '\rBye.'
Add last event time in Redis
This makes sure events are not missed if the server restarts."""
This module defines the Castor server, that consumes the Docker events from
a given host. This module can be run as a command line script or get imported
by another Python script.
"""
import docker
import redis
import settings
import tasks
def consume(docker_client, redis_client):
"""
Starts consuming Docker events accoding to the already defined settings.
"""
print 'Start consuming events from %s' % docker_client.base_url
since = redis_client.get('castor:last_event')
for event in docker_client.events(decode=True, since=since):
for hook in settings.HOOKS:
tasks.dispatch_event.delay(event, hook)
redis_client.set('castor:last_event', event['time'])
if __name__ == '__main__':
try:
docker_client = docker.Client(**settings.SETTINGS.get('docker', {}))
redis_client = redis.StrictRedis(
host=settings.REDIS_HOST, port=settings.REDIS_PORT,
db=settings.REDIS_DB,
)
consume(docker_client, redis_client)
except KeyboardInterrupt:
# Do not display ugly exception if stopped with Ctrl + C
print '\rBye.'
|
<commit_before>"""
This module defines the Castor server, that consumes the Docker events from
a given host. This module can be run as a command line script or get imported
by another Python script.
"""
import docker
import tasks
import settings
DOCKER_SETTINGS = settings.SETTINGS.get('docker', {})
# Customize the Docker client according to settings in `settings.json`
DOCKER_CLIENT = docker.Client(**DOCKER_SETTINGS)
def consume():
"""
Starts consuming Docker events accoding to the already defined settings.
"""
print 'Start consuming events from %s' % DOCKER_SETTINGS['base_url']
for event in DOCKER_CLIENT.events(decode=True):
for hook in settings.HOOKS:
tasks.dispatch_event.delay(event, hook)
if __name__ == '__main__':
try:
consume()
except KeyboardInterrupt:
# Do not display ugly exception if stopped with Ctrl + C
print '\rBye.'
<commit_msg>Add last event time in Redis
This makes sure events are not missed if the server restarts.<commit_after>"""
This module defines the Castor server, that consumes the Docker events from
a given host. This module can be run as a command line script or get imported
by another Python script.
"""
import docker
import redis
import settings
import tasks
def consume(docker_client, redis_client):
"""
Starts consuming Docker events accoding to the already defined settings.
"""
print 'Start consuming events from %s' % docker_client.base_url
since = redis_client.get('castor:last_event')
for event in docker_client.events(decode=True, since=since):
for hook in settings.HOOKS:
tasks.dispatch_event.delay(event, hook)
redis_client.set('castor:last_event', event['time'])
if __name__ == '__main__':
try:
docker_client = docker.Client(**settings.SETTINGS.get('docker', {}))
redis_client = redis.StrictRedis(
host=settings.REDIS_HOST, port=settings.REDIS_PORT,
db=settings.REDIS_DB,
)
consume(docker_client, redis_client)
except KeyboardInterrupt:
# Do not display ugly exception if stopped with Ctrl + C
print '\rBye.'
|
72e8dbbac6f45e07dbd55c87aa9f0627e2639a98
|
python/ball.py
|
python/ball.py
|
#!/usr/bin/env python
import sys, time
import math
class Ball:
gravity = -3 # Dots per second squared
def __init__(self):
self.r = 255
self.g = 0
self.b = 0
self.x = 0
self.y = 0
self.vx = 0
self.vy = 0
def updateValues(self, timeElapsed=1): # timeElapsed in seconds
self.x += self.vx * timeElapsed
self.y += self.vy * timeElapsed + 0.5 * Ball.gravity * timeElapsed ** 2
self.vy += Ball.gravity * timeElapsed
def bounceOnEdge():
if self.x < 0 || self.x > 16
self.vx *= -1
if self.y < 0 || self.y > 16
self.vy *= -1
def drawOnMatrix(self, ledMatrix):
ledMatrix.SetPixel(int(self.x), int(self.y), self.r, self.g, self.b)
def printValues(self):
print "x: %.2f, y: %.2f - vx: %.2f vy: %.2f" % (self.x, self.y, self.vx, self.vy)
|
#!/usr/bin/env python
import sys, time
import math
class Ball:
gravity = -3 # Dots per second squared
def __init__(self, x=0, y=0):
self.r = 255
self.g = 0
self.b = 0
self.x = 0
self.y = 0
self.vx = 0
self.vy = 0
def updateValues(self, timeElapsed=1): # timeElapsed in seconds
self.x += self.vx * timeElapsed
self.y += self.vy * timeElapsed + 0.5 * Ball.gravity * timeElapsed ** 2
self.vy += Ball.gravity * timeElapsed
def bounceOnEdge():
if self.x < 0 || self.x > 16
self.vx *= -1
if self.y < 0 || self.y > 16
self.vy *= -1
def drawOnMatrix(self, ledMatrix):
ledMatrix.SetPixel(int(self.x), int(self.y), self.r, self.g, self.b)
def printValues(self):
print "x: %.2f, y: %.2f - vx: %.2f vy: %.2f" % (self.x, self.y, self.vx, self.vy)
|
Add optional parameters for starting location
|
Add optional parameters for starting location
|
Python
|
mit
|
DarkAce65/rpi-led-matrix,DarkAce65/rpi-led-matrix
|
#!/usr/bin/env python
import sys, time
import math
class Ball:
gravity = -3 # Dots per second squared
def __init__(self):
self.r = 255
self.g = 0
self.b = 0
self.x = 0
self.y = 0
self.vx = 0
self.vy = 0
def updateValues(self, timeElapsed=1): # timeElapsed in seconds
self.x += self.vx * timeElapsed
self.y += self.vy * timeElapsed + 0.5 * Ball.gravity * timeElapsed ** 2
self.vy += Ball.gravity * timeElapsed
def bounceOnEdge():
if self.x < 0 || self.x > 16
self.vx *= -1
if self.y < 0 || self.y > 16
self.vy *= -1
def drawOnMatrix(self, ledMatrix):
ledMatrix.SetPixel(int(self.x), int(self.y), self.r, self.g, self.b)
def printValues(self):
print "x: %.2f, y: %.2f - vx: %.2f vy: %.2f" % (self.x, self.y, self.vx, self.vy)Add optional parameters for starting location
|
#!/usr/bin/env python
import sys, time
import math
class Ball:
gravity = -3 # Dots per second squared
def __init__(self, x=0, y=0):
self.r = 255
self.g = 0
self.b = 0
self.x = 0
self.y = 0
self.vx = 0
self.vy = 0
def updateValues(self, timeElapsed=1): # timeElapsed in seconds
self.x += self.vx * timeElapsed
self.y += self.vy * timeElapsed + 0.5 * Ball.gravity * timeElapsed ** 2
self.vy += Ball.gravity * timeElapsed
def bounceOnEdge():
if self.x < 0 || self.x > 16
self.vx *= -1
if self.y < 0 || self.y > 16
self.vy *= -1
def drawOnMatrix(self, ledMatrix):
ledMatrix.SetPixel(int(self.x), int(self.y), self.r, self.g, self.b)
def printValues(self):
print "x: %.2f, y: %.2f - vx: %.2f vy: %.2f" % (self.x, self.y, self.vx, self.vy)
|
<commit_before>#!/usr/bin/env python
import sys, time
import math
class Ball:
gravity = -3 # Dots per second squared
def __init__(self):
self.r = 255
self.g = 0
self.b = 0
self.x = 0
self.y = 0
self.vx = 0
self.vy = 0
def updateValues(self, timeElapsed=1): # timeElapsed in seconds
self.x += self.vx * timeElapsed
self.y += self.vy * timeElapsed + 0.5 * Ball.gravity * timeElapsed ** 2
self.vy += Ball.gravity * timeElapsed
def bounceOnEdge():
if self.x < 0 || self.x > 16
self.vx *= -1
if self.y < 0 || self.y > 16
self.vy *= -1
def drawOnMatrix(self, ledMatrix):
ledMatrix.SetPixel(int(self.x), int(self.y), self.r, self.g, self.b)
def printValues(self):
print "x: %.2f, y: %.2f - vx: %.2f vy: %.2f" % (self.x, self.y, self.vx, self.vy)<commit_msg>Add optional parameters for starting location<commit_after>
|
#!/usr/bin/env python
import sys, time
import math
class Ball:
gravity = -3 # Dots per second squared
def __init__(self, x=0, y=0):
self.r = 255
self.g = 0
self.b = 0
self.x = 0
self.y = 0
self.vx = 0
self.vy = 0
def updateValues(self, timeElapsed=1): # timeElapsed in seconds
self.x += self.vx * timeElapsed
self.y += self.vy * timeElapsed + 0.5 * Ball.gravity * timeElapsed ** 2
self.vy += Ball.gravity * timeElapsed
def bounceOnEdge():
if self.x < 0 || self.x > 16
self.vx *= -1
if self.y < 0 || self.y > 16
self.vy *= -1
def drawOnMatrix(self, ledMatrix):
ledMatrix.SetPixel(int(self.x), int(self.y), self.r, self.g, self.b)
def printValues(self):
print "x: %.2f, y: %.2f - vx: %.2f vy: %.2f" % (self.x, self.y, self.vx, self.vy)
|
#!/usr/bin/env python
import sys, time
import math
class Ball:
gravity = -3 # Dots per second squared
def __init__(self):
self.r = 255
self.g = 0
self.b = 0
self.x = 0
self.y = 0
self.vx = 0
self.vy = 0
def updateValues(self, timeElapsed=1): # timeElapsed in seconds
self.x += self.vx * timeElapsed
self.y += self.vy * timeElapsed + 0.5 * Ball.gravity * timeElapsed ** 2
self.vy += Ball.gravity * timeElapsed
def bounceOnEdge():
if self.x < 0 || self.x > 16
self.vx *= -1
if self.y < 0 || self.y > 16
self.vy *= -1
def drawOnMatrix(self, ledMatrix):
ledMatrix.SetPixel(int(self.x), int(self.y), self.r, self.g, self.b)
def printValues(self):
print "x: %.2f, y: %.2f - vx: %.2f vy: %.2f" % (self.x, self.y, self.vx, self.vy)Add optional parameters for starting location#!/usr/bin/env python
import sys, time
import math
class Ball:
gravity = -3 # Dots per second squared
def __init__(self, x=0, y=0):
self.r = 255
self.g = 0
self.b = 0
self.x = 0
self.y = 0
self.vx = 0
self.vy = 0
def updateValues(self, timeElapsed=1): # timeElapsed in seconds
self.x += self.vx * timeElapsed
self.y += self.vy * timeElapsed + 0.5 * Ball.gravity * timeElapsed ** 2
self.vy += Ball.gravity * timeElapsed
def bounceOnEdge():
if self.x < 0 || self.x > 16
self.vx *= -1
if self.y < 0 || self.y > 16
self.vy *= -1
def drawOnMatrix(self, ledMatrix):
ledMatrix.SetPixel(int(self.x), int(self.y), self.r, self.g, self.b)
def printValues(self):
print "x: %.2f, y: %.2f - vx: %.2f vy: %.2f" % (self.x, self.y, self.vx, self.vy)
|
<commit_before>#!/usr/bin/env python
import sys, time
import math
class Ball:
gravity = -3 # Dots per second squared
def __init__(self):
self.r = 255
self.g = 0
self.b = 0
self.x = 0
self.y = 0
self.vx = 0
self.vy = 0
def updateValues(self, timeElapsed=1): # timeElapsed in seconds
self.x += self.vx * timeElapsed
self.y += self.vy * timeElapsed + 0.5 * Ball.gravity * timeElapsed ** 2
self.vy += Ball.gravity * timeElapsed
def bounceOnEdge():
if self.x < 0 || self.x > 16
self.vx *= -1
if self.y < 0 || self.y > 16
self.vy *= -1
def drawOnMatrix(self, ledMatrix):
ledMatrix.SetPixel(int(self.x), int(self.y), self.r, self.g, self.b)
def printValues(self):
print "x: %.2f, y: %.2f - vx: %.2f vy: %.2f" % (self.x, self.y, self.vx, self.vy)<commit_msg>Add optional parameters for starting location<commit_after>#!/usr/bin/env python
import sys, time
import math
class Ball:
gravity = -3 # Dots per second squared
def __init__(self, x=0, y=0):
self.r = 255
self.g = 0
self.b = 0
self.x = 0
self.y = 0
self.vx = 0
self.vy = 0
def updateValues(self, timeElapsed=1): # timeElapsed in seconds
self.x += self.vx * timeElapsed
self.y += self.vy * timeElapsed + 0.5 * Ball.gravity * timeElapsed ** 2
self.vy += Ball.gravity * timeElapsed
def bounceOnEdge():
if self.x < 0 || self.x > 16
self.vx *= -1
if self.y < 0 || self.y > 16
self.vy *= -1
def drawOnMatrix(self, ledMatrix):
ledMatrix.SetPixel(int(self.x), int(self.y), self.r, self.g, self.b)
def printValues(self):
print "x: %.2f, y: %.2f - vx: %.2f vy: %.2f" % (self.x, self.y, self.vx, self.vy)
|
d078ec00d5553b0985d9c724a223c74b80b2c5ab
|
grains/grains.py
|
grains/grains.py
|
# File: grains.py
# Purpose: Write a program that calculates the number of grains of wheat
# on a chessboard given that the number on each square doubles.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Sunday 18 September 2016, 05:25 PM
square = [x for x in range(1, 65)]
grains = [2 ** x for x in range(0, 65)]
board = zip(square, grains)
print (board)
|
# File: grains.py
# Purpose: Write a program that calculates the number of grains of wheat
# on a chessboard given that the number on each square doubles.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Sunday 18 September 2016, 05:25 PM
square = [x for x in range(1, 65)]
grains = [2 ** x for x in range(0, 65)]
board = dict(zip(square, grains))
print type(board)
for k, v in board.iteritems():
print k, v
|
Convert zipped list to dictionary
|
Convert zipped list to dictionary
|
Python
|
mit
|
amalshehu/exercism-python
|
# File: grains.py
# Purpose: Write a program that calculates the number of grains of wheat
# on a chessboard given that the number on each square doubles.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Sunday 18 September 2016, 05:25 PM
square = [x for x in range(1, 65)]
grains = [2 ** x for x in range(0, 65)]
board = zip(square, grains)
print (board)
Convert zipped list to dictionary
|
# File: grains.py
# Purpose: Write a program that calculates the number of grains of wheat
# on a chessboard given that the number on each square doubles.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Sunday 18 September 2016, 05:25 PM
square = [x for x in range(1, 65)]
grains = [2 ** x for x in range(0, 65)]
board = dict(zip(square, grains))
print type(board)
for k, v in board.iteritems():
print k, v
|
<commit_before># File: grains.py
# Purpose: Write a program that calculates the number of grains of wheat
# on a chessboard given that the number on each square doubles.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Sunday 18 September 2016, 05:25 PM
square = [x for x in range(1, 65)]
grains = [2 ** x for x in range(0, 65)]
board = zip(square, grains)
print (board)
<commit_msg>Convert zipped list to dictionary<commit_after>
|
# File: grains.py
# Purpose: Write a program that calculates the number of grains of wheat
# on a chessboard given that the number on each square doubles.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Sunday 18 September 2016, 05:25 PM
square = [x for x in range(1, 65)]
grains = [2 ** x for x in range(0, 65)]
board = dict(zip(square, grains))
print type(board)
for k, v in board.iteritems():
print k, v
|
# File: grains.py
# Purpose: Write a program that calculates the number of grains of wheat
# on a chessboard given that the number on each square doubles.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Sunday 18 September 2016, 05:25 PM
square = [x for x in range(1, 65)]
grains = [2 ** x for x in range(0, 65)]
board = zip(square, grains)
print (board)
Convert zipped list to dictionary# File: grains.py
# Purpose: Write a program that calculates the number of grains of wheat
# on a chessboard given that the number on each square doubles.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Sunday 18 September 2016, 05:25 PM
square = [x for x in range(1, 65)]
grains = [2 ** x for x in range(0, 65)]
board = dict(zip(square, grains))
print type(board)
for k, v in board.iteritems():
print k, v
|
<commit_before># File: grains.py
# Purpose: Write a program that calculates the number of grains of wheat
# on a chessboard given that the number on each square doubles.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Sunday 18 September 2016, 05:25 PM
square = [x for x in range(1, 65)]
grains = [2 ** x for x in range(0, 65)]
board = zip(square, grains)
print (board)
<commit_msg>Convert zipped list to dictionary<commit_after># File: grains.py
# Purpose: Write a program that calculates the number of grains of wheat
# on a chessboard given that the number on each square doubles.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Sunday 18 September 2016, 05:25 PM
square = [x for x in range(1, 65)]
grains = [2 ** x for x in range(0, 65)]
board = dict(zip(square, grains))
print type(board)
for k, v in board.iteritems():
print k, v
|
3a703d05a85c275cbc02ec6ab19d621d7bc2f85a
|
accounts/forms.py
|
accounts/forms.py
|
from django.forms import ModelForm
from .models import CompsocUser, DatabaseAccount, ShellAccount
class CompsocUserForm(ModelForm):
class Meta:
model = CompsocUser
fields = ['nickname', 'website_title', 'website_url']
class ShellAccountForm(ModelForm):
class Meta:
model = ShellAccount
fields = ['name']
error_messages = {
'name': {
}
}
class DatabaseAccountForm(ModelForm):
class Meta:
model = DatabaseAccount
fields = ['name']
|
from django.forms import ModelForm
from .models import CompsocUser, DatabaseAccount, ShellAccount
class CompsocUserForm(ModelForm):
class Meta:
model = CompsocUser
fields = ['nickname', 'website_title', 'website_url']
class ShellAccountForm(ModelForm):
class Meta:
model = ShellAccount
fields = ['name']
class DatabaseAccountForm(ModelForm):
class Meta:
model = DatabaseAccount
fields = ['name']
|
Remove blank field from Meta class
|
Remove blank field from Meta class
|
Python
|
mit
|
davidjrichardson/uwcs-zarya,davidjrichardson/uwcs-zarya
|
from django.forms import ModelForm
from .models import CompsocUser, DatabaseAccount, ShellAccount
class CompsocUserForm(ModelForm):
class Meta:
model = CompsocUser
fields = ['nickname', 'website_title', 'website_url']
class ShellAccountForm(ModelForm):
class Meta:
model = ShellAccount
fields = ['name']
error_messages = {
'name': {
}
}
class DatabaseAccountForm(ModelForm):
class Meta:
model = DatabaseAccount
fields = ['name']
Remove blank field from Meta class
|
from django.forms import ModelForm
from .models import CompsocUser, DatabaseAccount, ShellAccount
class CompsocUserForm(ModelForm):
class Meta:
model = CompsocUser
fields = ['nickname', 'website_title', 'website_url']
class ShellAccountForm(ModelForm):
class Meta:
model = ShellAccount
fields = ['name']
class DatabaseAccountForm(ModelForm):
class Meta:
model = DatabaseAccount
fields = ['name']
|
<commit_before>from django.forms import ModelForm
from .models import CompsocUser, DatabaseAccount, ShellAccount
class CompsocUserForm(ModelForm):
class Meta:
model = CompsocUser
fields = ['nickname', 'website_title', 'website_url']
class ShellAccountForm(ModelForm):
class Meta:
model = ShellAccount
fields = ['name']
error_messages = {
'name': {
}
}
class DatabaseAccountForm(ModelForm):
class Meta:
model = DatabaseAccount
fields = ['name']
<commit_msg>Remove blank field from Meta class<commit_after>
|
from django.forms import ModelForm
from .models import CompsocUser, DatabaseAccount, ShellAccount
class CompsocUserForm(ModelForm):
class Meta:
model = CompsocUser
fields = ['nickname', 'website_title', 'website_url']
class ShellAccountForm(ModelForm):
class Meta:
model = ShellAccount
fields = ['name']
class DatabaseAccountForm(ModelForm):
class Meta:
model = DatabaseAccount
fields = ['name']
|
from django.forms import ModelForm
from .models import CompsocUser, DatabaseAccount, ShellAccount
class CompsocUserForm(ModelForm):
class Meta:
model = CompsocUser
fields = ['nickname', 'website_title', 'website_url']
class ShellAccountForm(ModelForm):
class Meta:
model = ShellAccount
fields = ['name']
error_messages = {
'name': {
}
}
class DatabaseAccountForm(ModelForm):
class Meta:
model = DatabaseAccount
fields = ['name']
Remove blank field from Meta classfrom django.forms import ModelForm
from .models import CompsocUser, DatabaseAccount, ShellAccount
class CompsocUserForm(ModelForm):
class Meta:
model = CompsocUser
fields = ['nickname', 'website_title', 'website_url']
class ShellAccountForm(ModelForm):
class Meta:
model = ShellAccount
fields = ['name']
class DatabaseAccountForm(ModelForm):
class Meta:
model = DatabaseAccount
fields = ['name']
|
<commit_before>from django.forms import ModelForm
from .models import CompsocUser, DatabaseAccount, ShellAccount
class CompsocUserForm(ModelForm):
class Meta:
model = CompsocUser
fields = ['nickname', 'website_title', 'website_url']
class ShellAccountForm(ModelForm):
class Meta:
model = ShellAccount
fields = ['name']
error_messages = {
'name': {
}
}
class DatabaseAccountForm(ModelForm):
class Meta:
model = DatabaseAccount
fields = ['name']
<commit_msg>Remove blank field from Meta class<commit_after>from django.forms import ModelForm
from .models import CompsocUser, DatabaseAccount, ShellAccount
class CompsocUserForm(ModelForm):
class Meta:
model = CompsocUser
fields = ['nickname', 'website_title', 'website_url']
class ShellAccountForm(ModelForm):
class Meta:
model = ShellAccount
fields = ['name']
class DatabaseAccountForm(ModelForm):
class Meta:
model = DatabaseAccount
fields = ['name']
|
bfd0058efdf362567cd4638244ab3ff47e078398
|
opencog/python/examples/blending_agent_demo.py
|
opencog/python/examples/blending_agent_demo.py
|
__author__ = 'Keyvan'
from blending.agents import DummyBlendingAgent
from opencog.atomspace import AtomSpace
from opencog.cogserver import Server
if __name__ == '__main__':
server = Server()
server.add_mind_agent(DummyBlendingAgent())
server.run(AtomSpace())
|
__author__ = 'Keyvan'
from blending.agents import DummyBlendingAgent
from opencog.atomspace import AtomSpace
from opencog.cogserver import Server
# Check if git shows the branch
if __name__ == '__main__':
server = Server()
server.add_mind_agent(DummyBlendingAgent())
server.run(AtomSpace())
|
Check if git shows graph nicely
|
Check if git shows graph nicely
|
Python
|
agpl-3.0
|
shujingke/opencog,ArvinPan/atomspace,roselleebarle04/opencog,yantrabuddhi/atomspace,kim135797531/opencog,sanuj/opencog,printedheart/atomspace,ceefour/atomspace,cosmoharrigan/opencog,TheNameIsNigel/opencog,andre-senna/opencog,jlegendary/opencog,cosmoharrigan/atomspace,shujingke/opencog,ruiting/opencog,anitzkin/opencog,MarcosPividori/atomspace,iAMr00t/opencog,virneo/atomspace,gavrieltal/opencog,ArvinPan/opencog,Selameab/opencog,MarcosPividori/atomspace,inflector/opencog,prateeksaxena2809/opencog,inflector/opencog,rTreutlein/atomspace,rodsol/atomspace,Tiggels/opencog,printedheart/atomspace,williampma/atomspace,andre-senna/opencog,cosmoharrigan/atomspace,inflector/opencog,printedheart/opencog,Tiggels/opencog,rohit12/opencog,prateeksaxena2809/opencog,iAMr00t/opencog,printedheart/opencog,misgeatgit/opencog,cosmoharrigan/opencog,yantrabuddhi/opencog,yantrabuddhi/opencog,UIKit0/atomspace,zhaozengguang/opencog,williampma/opencog,inflector/atomspace,Selameab/atomspace,AmeBel/opencog,rohit12/opencog,misgeatgit/atomspace,gavrieltal/opencog,andre-senna/opencog,UIKit0/atomspace,yantrabuddhi/opencog,rohit12/opencog,shujingke/opencog,virneo/atomspace,sumitsourabh/opencog,tim777z/opencog,Allend575/opencog,ruiting/opencog,kinoc/opencog,virneo/opencog,yantrabuddhi/opencog,kinoc/opencog,cosmoharrigan/opencog,UIKit0/atomspace,AmeBel/atomspace,kinoc/opencog,ceefour/atomspace,sumitsourabh/opencog,misgeatgit/opencog,AmeBel/atomspace,gaapt/opencog,tim777z/opencog,Allend575/opencog,kim135797531/opencog,ruiting/opencog,kim135797531/opencog,ArvinPan/atomspace,anitzkin/opencog,ceefour/opencog,misgeatgit/opencog,rTreutlein/atomspace,AmeBel/atomspace,prateeksaxena2809/opencog,jlegendary/opencog,TheNameIsNigel/opencog,cosmoharrigan/atomspace,williampma/opencog,eddiemonroe/atomspace,rohit12/opencog,Selameab/atomspace,williampma/atomspace,ArvinPan/opencog,eddiemonroe/opencog,virneo/opencog,inflector/opencog,ceefour/opencog,zhaozengguang/opencog,gavrieltal/opencog,kinoc/opencog,rodsol/opencog,misgeatgit/opencog,jlegendary/opencog,Selameab/atomspace,prateeksaxena2809/opencog,ceefour/atomspace,eddiemonroe/atomspace,williampma/opencog,eddiemonroe/atomspace,gaapt/opencog,virneo/opencog,williampma/opencog,sanuj/opencog,ceefour/opencog,yantrabuddhi/opencog,rodsol/atomspace,ceefour/opencog,prateeksaxena2809/opencog,zhaozengguang/opencog,TheNameIsNigel/opencog,AmeBel/atomspace,virneo/opencog,tim777z/opencog,gaapt/opencog,iAMr00t/opencog,misgeatgit/atomspace,shujingke/opencog,virneo/opencog,Allend575/opencog,yantrabuddhi/opencog,prateeksaxena2809/opencog,eddiemonroe/atomspace,inflector/opencog,eddiemonroe/opencog,rohit12/atomspace,rodsol/opencog,jlegendary/opencog,Allend575/opencog,roselleebarle04/opencog,tim777z/opencog,AmeBel/opencog,kim135797531/opencog,yantrabuddhi/atomspace,misgeatgit/atomspace,printedheart/opencog,jlegendary/opencog,virneo/atomspace,misgeatgit/atomspace,misgeatgit/opencog,andre-senna/opencog,kinoc/opencog,TheNameIsNigel/opencog,anitzkin/opencog,AmeBel/opencog,inflector/atomspace,shujingke/opencog,andre-senna/opencog,sanuj/opencog,inflector/atomspace,yantrabuddhi/atomspace,ArvinPan/opencog,eddiemonroe/opencog,iAMr00t/opencog,TheNameIsNigel/opencog,ruiting/opencog,roselleebarle04/opencog,Selameab/opencog,eddiemonroe/opencog,MarcosPividori/atomspace,Selameab/opencog,Tiggels/opencog,sumitsourabh/opencog,rodsol/opencog,anitzkin/opencog,ruiting/opencog,rodsol/opencog,gavrieltal/opencog,TheNameIsNigel/opencog,zhaozengguang/opencog,ArvinPan/opencog,misgeatgit/opencog,andre-senna/opencog,Selameab/opencog,ceefour/opencog,eddiemonroe/atomspace,Selameab/atomspace,jswiergo/atomspace,gaapt/opencog,roselleebarle04/opencog,zhaozengguang/opencog,ceefour/opencog,ArvinPan/atomspace,misgeatgit/atomspace,andre-senna/opencog,Selameab/opencog,eddiemonroe/opencog,Allend575/opencog,roselleebarle04/opencog,gavrieltal/opencog,kinoc/opencog,rTreutlein/atomspace,ruiting/opencog,gavrieltal/opencog,Selameab/opencog,rohit12/atomspace,eddiemonroe/opencog,inflector/atomspace,gaapt/opencog,virneo/opencog,misgeatgit/opencog,tim777z/opencog,AmeBel/atomspace,cosmoharrigan/opencog,shujingke/opencog,printedheart/atomspace,Allend575/opencog,jswiergo/atomspace,ArvinPan/opencog,kim135797531/opencog,sumitsourabh/opencog,misgeatgit/opencog,jlegendary/opencog,inflector/opencog,gaapt/opencog,anitzkin/opencog,roselleebarle04/opencog,gavrieltal/opencog,sumitsourabh/opencog,sanuj/opencog,MarcosPividori/atomspace,williampma/opencog,yantrabuddhi/atomspace,jswiergo/atomspace,cosmoharrigan/opencog,zhaozengguang/opencog,printedheart/atomspace,rTreutlein/atomspace,kinoc/opencog,jswiergo/atomspace,misgeatgit/opencog,rodsol/atomspace,kim135797531/opencog,virneo/opencog,yantrabuddhi/opencog,williampma/atomspace,rTreutlein/atomspace,ArvinPan/atomspace,iAMr00t/opencog,williampma/opencog,rohit12/opencog,rohit12/opencog,yantrabuddhi/atomspace,rohit12/atomspace,anitzkin/opencog,Tiggels/opencog,sanuj/opencog,Tiggels/opencog,anitzkin/opencog,ArvinPan/opencog,williampma/atomspace,ceefour/opencog,iAMr00t/opencog,UIKit0/atomspace,sumitsourabh/opencog,virneo/atomspace,inflector/opencog,shujingke/opencog,kim135797531/opencog,sumitsourabh/opencog,printedheart/opencog,ceefour/atomspace,roselleebarle04/opencog,inflector/atomspace,AmeBel/opencog,cosmoharrigan/atomspace,printedheart/opencog,cosmoharrigan/opencog,gaapt/opencog,Tiggels/opencog,AmeBel/opencog,rodsol/atomspace,inflector/opencog,rodsol/opencog,AmeBel/opencog,jlegendary/opencog,rodsol/opencog,prateeksaxena2809/opencog,sanuj/opencog,ruiting/opencog,tim777z/opencog,AmeBel/opencog,rohit12/atomspace,eddiemonroe/opencog,Allend575/opencog,printedheart/opencog
|
__author__ = 'Keyvan'
from blending.agents import DummyBlendingAgent
from opencog.atomspace import AtomSpace
from opencog.cogserver import Server
if __name__ == '__main__':
server = Server()
server.add_mind_agent(DummyBlendingAgent())
server.run(AtomSpace())Check if git shows graph nicely
|
__author__ = 'Keyvan'
from blending.agents import DummyBlendingAgent
from opencog.atomspace import AtomSpace
from opencog.cogserver import Server
# Check if git shows the branch
if __name__ == '__main__':
server = Server()
server.add_mind_agent(DummyBlendingAgent())
server.run(AtomSpace())
|
<commit_before>__author__ = 'Keyvan'
from blending.agents import DummyBlendingAgent
from opencog.atomspace import AtomSpace
from opencog.cogserver import Server
if __name__ == '__main__':
server = Server()
server.add_mind_agent(DummyBlendingAgent())
server.run(AtomSpace())<commit_msg>Check if git shows graph nicely<commit_after>
|
__author__ = 'Keyvan'
from blending.agents import DummyBlendingAgent
from opencog.atomspace import AtomSpace
from opencog.cogserver import Server
# Check if git shows the branch
if __name__ == '__main__':
server = Server()
server.add_mind_agent(DummyBlendingAgent())
server.run(AtomSpace())
|
__author__ = 'Keyvan'
from blending.agents import DummyBlendingAgent
from opencog.atomspace import AtomSpace
from opencog.cogserver import Server
if __name__ == '__main__':
server = Server()
server.add_mind_agent(DummyBlendingAgent())
server.run(AtomSpace())Check if git shows graph nicely__author__ = 'Keyvan'
from blending.agents import DummyBlendingAgent
from opencog.atomspace import AtomSpace
from opencog.cogserver import Server
# Check if git shows the branch
if __name__ == '__main__':
server = Server()
server.add_mind_agent(DummyBlendingAgent())
server.run(AtomSpace())
|
<commit_before>__author__ = 'Keyvan'
from blending.agents import DummyBlendingAgent
from opencog.atomspace import AtomSpace
from opencog.cogserver import Server
if __name__ == '__main__':
server = Server()
server.add_mind_agent(DummyBlendingAgent())
server.run(AtomSpace())<commit_msg>Check if git shows graph nicely<commit_after>__author__ = 'Keyvan'
from blending.agents import DummyBlendingAgent
from opencog.atomspace import AtomSpace
from opencog.cogserver import Server
# Check if git shows the branch
if __name__ == '__main__':
server = Server()
server.add_mind_agent(DummyBlendingAgent())
server.run(AtomSpace())
|
81fc712a28c44bc9aca2b7dd48449285dcd32bcc
|
satori.tools/satori/tools/console/__init__.py
|
satori.tools/satori/tools/console/__init__.py
|
# vim:ts=4:sts=4:sw=4:expandtab
def main():
from satori.tools import setup
setup()
import code
import readline
console = code.InteractiveConsole()
console.runcode('from satori.client.common import want_import')
console.runcode('want_import(globals(), "*")')
console.interact()
|
# vim:ts=4:sts=4:sw=4:expandtab
def main():
from satori.tools import options, setup
options.add_argument('--ipython', help='Use IPython', action='store_true')
flags = setup()
from satori.client.common import want_import
want_import(globals(), "*")
if flags.ipython:
print 'IPython needs to be manually installed in your virtual environment'
from IPython import embed
embed()
else:
import code
console = code.InteractiveConsole()
console.interact()
|
Add IPython support to satori.console
|
Add IPython support to satori.console
|
Python
|
mit
|
zielmicha/satori,zielmicha/satori,zielmicha/satori,zielmicha/satori,zielmicha/satori,zielmicha/satori,zielmicha/satori
|
# vim:ts=4:sts=4:sw=4:expandtab
def main():
from satori.tools import setup
setup()
import code
import readline
console = code.InteractiveConsole()
console.runcode('from satori.client.common import want_import')
console.runcode('want_import(globals(), "*")')
console.interact()
Add IPython support to satori.console
|
# vim:ts=4:sts=4:sw=4:expandtab
def main():
from satori.tools import options, setup
options.add_argument('--ipython', help='Use IPython', action='store_true')
flags = setup()
from satori.client.common import want_import
want_import(globals(), "*")
if flags.ipython:
print 'IPython needs to be manually installed in your virtual environment'
from IPython import embed
embed()
else:
import code
console = code.InteractiveConsole()
console.interact()
|
<commit_before># vim:ts=4:sts=4:sw=4:expandtab
def main():
from satori.tools import setup
setup()
import code
import readline
console = code.InteractiveConsole()
console.runcode('from satori.client.common import want_import')
console.runcode('want_import(globals(), "*")')
console.interact()
<commit_msg>Add IPython support to satori.console<commit_after>
|
# vim:ts=4:sts=4:sw=4:expandtab
def main():
from satori.tools import options, setup
options.add_argument('--ipython', help='Use IPython', action='store_true')
flags = setup()
from satori.client.common import want_import
want_import(globals(), "*")
if flags.ipython:
print 'IPython needs to be manually installed in your virtual environment'
from IPython import embed
embed()
else:
import code
console = code.InteractiveConsole()
console.interact()
|
# vim:ts=4:sts=4:sw=4:expandtab
def main():
from satori.tools import setup
setup()
import code
import readline
console = code.InteractiveConsole()
console.runcode('from satori.client.common import want_import')
console.runcode('want_import(globals(), "*")')
console.interact()
Add IPython support to satori.console# vim:ts=4:sts=4:sw=4:expandtab
def main():
from satori.tools import options, setup
options.add_argument('--ipython', help='Use IPython', action='store_true')
flags = setup()
from satori.client.common import want_import
want_import(globals(), "*")
if flags.ipython:
print 'IPython needs to be manually installed in your virtual environment'
from IPython import embed
embed()
else:
import code
console = code.InteractiveConsole()
console.interact()
|
<commit_before># vim:ts=4:sts=4:sw=4:expandtab
def main():
from satori.tools import setup
setup()
import code
import readline
console = code.InteractiveConsole()
console.runcode('from satori.client.common import want_import')
console.runcode('want_import(globals(), "*")')
console.interact()
<commit_msg>Add IPython support to satori.console<commit_after># vim:ts=4:sts=4:sw=4:expandtab
def main():
from satori.tools import options, setup
options.add_argument('--ipython', help='Use IPython', action='store_true')
flags = setup()
from satori.client.common import want_import
want_import(globals(), "*")
if flags.ipython:
print 'IPython needs to be manually installed in your virtual environment'
from IPython import embed
embed()
else:
import code
console = code.InteractiveConsole()
console.interact()
|
bec987c4a506adf8811b197bf1066f316b31e74b
|
two_factor/models.py
|
two_factor/models.py
|
from django.utils.translation import ugettext_lazy as _
from django.db import models
from django.contrib.auth.models import User
from two_factor import call_gateways, sms_gateways
TOKEN_METHODS = [
('generator', _('Token generator (iPhone/Android App)')),
]
if call_gateways.GATEWAY:
TOKEN_METHODS += [
('call', _('Phone call')),
]
if sms_gateways.GATEWAY:
TOKEN_METHODS += [
('sms', _('Text message')),
]
class VerifiedComputer(models.Model):
user = models.ForeignKey(User, verbose_name=_('verified computer'))
verified_until = models.DateTimeField(_('verified until'))
ip = models.IPAddressField(_('IP address'))
last_used_at = models.DateTimeField(_('last used at'))
class Token(models.Model):
user = models.OneToOneField(User, verbose_name=_('user'))
seed = models.CharField(_('seed'), max_length=16)
method = models.CharField(_('authentication method'),
choices=TOKEN_METHODS, max_length=16)
phone = models.CharField(_('phone number'), max_length=16)
backup_phone = models.CharField(_('backup phone number'), max_length=16,
null=True, blank=True)
|
from django.utils.translation import ugettext_lazy as _
from django.db import models
from django.contrib.auth.models import User
from two_factor import call_gateways, sms_gateways
TOKEN_METHODS = [
('generator', _('Token generator (iPhone/Android App)')),
]
if call_gateways.GATEWAY:
TOKEN_METHODS += [
('call', _('Phone call')),
]
if sms_gateways.GATEWAY:
TOKEN_METHODS += [
('sms', _('Text message')),
]
class VerifiedComputer(models.Model):
user = models.ForeignKey(User, verbose_name=_('verified computer'))
verified_until = models.DateTimeField(_('verified until'))
ip = models.IPAddressField(_('IP address'))
last_used_at = models.DateTimeField(_('last used at'))
class Token(models.Model):
user = models.OneToOneField(User, verbose_name=_('user'))
seed = models.CharField(_('seed'), max_length=32)
method = models.CharField(_('authentication method'),
choices=TOKEN_METHODS, max_length=16)
phone = models.CharField(_('phone number'), max_length=16)
backup_phone = models.CharField(_('backup phone number'), max_length=16,
null=True, blank=True)
|
Increase seed length in database to fit default
|
Increase seed length in database to fit default
|
Python
|
mit
|
fusionbox/django-two-factor-auth,mathspace/django-two-factor-auth,mathspace/django-two-factor-auth,moreati/django-two-factor-auth,fusionbox/django-two-factor-auth,moreati/django-two-factor-auth,Bouke/django-two-factor-auth,percipient/django-two-factor-auth,Bouke/django-two-factor-auth,percipient/django-two-factor-auth,koleror/django-two-factor-auth,koleror/django-two-factor-auth
|
from django.utils.translation import ugettext_lazy as _
from django.db import models
from django.contrib.auth.models import User
from two_factor import call_gateways, sms_gateways
TOKEN_METHODS = [
('generator', _('Token generator (iPhone/Android App)')),
]
if call_gateways.GATEWAY:
TOKEN_METHODS += [
('call', _('Phone call')),
]
if sms_gateways.GATEWAY:
TOKEN_METHODS += [
('sms', _('Text message')),
]
class VerifiedComputer(models.Model):
user = models.ForeignKey(User, verbose_name=_('verified computer'))
verified_until = models.DateTimeField(_('verified until'))
ip = models.IPAddressField(_('IP address'))
last_used_at = models.DateTimeField(_('last used at'))
class Token(models.Model):
user = models.OneToOneField(User, verbose_name=_('user'))
seed = models.CharField(_('seed'), max_length=16)
method = models.CharField(_('authentication method'),
choices=TOKEN_METHODS, max_length=16)
phone = models.CharField(_('phone number'), max_length=16)
backup_phone = models.CharField(_('backup phone number'), max_length=16,
null=True, blank=True)
Increase seed length in database to fit default
|
from django.utils.translation import ugettext_lazy as _
from django.db import models
from django.contrib.auth.models import User
from two_factor import call_gateways, sms_gateways
TOKEN_METHODS = [
('generator', _('Token generator (iPhone/Android App)')),
]
if call_gateways.GATEWAY:
TOKEN_METHODS += [
('call', _('Phone call')),
]
if sms_gateways.GATEWAY:
TOKEN_METHODS += [
('sms', _('Text message')),
]
class VerifiedComputer(models.Model):
user = models.ForeignKey(User, verbose_name=_('verified computer'))
verified_until = models.DateTimeField(_('verified until'))
ip = models.IPAddressField(_('IP address'))
last_used_at = models.DateTimeField(_('last used at'))
class Token(models.Model):
user = models.OneToOneField(User, verbose_name=_('user'))
seed = models.CharField(_('seed'), max_length=32)
method = models.CharField(_('authentication method'),
choices=TOKEN_METHODS, max_length=16)
phone = models.CharField(_('phone number'), max_length=16)
backup_phone = models.CharField(_('backup phone number'), max_length=16,
null=True, blank=True)
|
<commit_before>from django.utils.translation import ugettext_lazy as _
from django.db import models
from django.contrib.auth.models import User
from two_factor import call_gateways, sms_gateways
TOKEN_METHODS = [
('generator', _('Token generator (iPhone/Android App)')),
]
if call_gateways.GATEWAY:
TOKEN_METHODS += [
('call', _('Phone call')),
]
if sms_gateways.GATEWAY:
TOKEN_METHODS += [
('sms', _('Text message')),
]
class VerifiedComputer(models.Model):
user = models.ForeignKey(User, verbose_name=_('verified computer'))
verified_until = models.DateTimeField(_('verified until'))
ip = models.IPAddressField(_('IP address'))
last_used_at = models.DateTimeField(_('last used at'))
class Token(models.Model):
user = models.OneToOneField(User, verbose_name=_('user'))
seed = models.CharField(_('seed'), max_length=16)
method = models.CharField(_('authentication method'),
choices=TOKEN_METHODS, max_length=16)
phone = models.CharField(_('phone number'), max_length=16)
backup_phone = models.CharField(_('backup phone number'), max_length=16,
null=True, blank=True)
<commit_msg>Increase seed length in database to fit default<commit_after>
|
from django.utils.translation import ugettext_lazy as _
from django.db import models
from django.contrib.auth.models import User
from two_factor import call_gateways, sms_gateways
TOKEN_METHODS = [
('generator', _('Token generator (iPhone/Android App)')),
]
if call_gateways.GATEWAY:
TOKEN_METHODS += [
('call', _('Phone call')),
]
if sms_gateways.GATEWAY:
TOKEN_METHODS += [
('sms', _('Text message')),
]
class VerifiedComputer(models.Model):
user = models.ForeignKey(User, verbose_name=_('verified computer'))
verified_until = models.DateTimeField(_('verified until'))
ip = models.IPAddressField(_('IP address'))
last_used_at = models.DateTimeField(_('last used at'))
class Token(models.Model):
user = models.OneToOneField(User, verbose_name=_('user'))
seed = models.CharField(_('seed'), max_length=32)
method = models.CharField(_('authentication method'),
choices=TOKEN_METHODS, max_length=16)
phone = models.CharField(_('phone number'), max_length=16)
backup_phone = models.CharField(_('backup phone number'), max_length=16,
null=True, blank=True)
|
from django.utils.translation import ugettext_lazy as _
from django.db import models
from django.contrib.auth.models import User
from two_factor import call_gateways, sms_gateways
TOKEN_METHODS = [
('generator', _('Token generator (iPhone/Android App)')),
]
if call_gateways.GATEWAY:
TOKEN_METHODS += [
('call', _('Phone call')),
]
if sms_gateways.GATEWAY:
TOKEN_METHODS += [
('sms', _('Text message')),
]
class VerifiedComputer(models.Model):
user = models.ForeignKey(User, verbose_name=_('verified computer'))
verified_until = models.DateTimeField(_('verified until'))
ip = models.IPAddressField(_('IP address'))
last_used_at = models.DateTimeField(_('last used at'))
class Token(models.Model):
user = models.OneToOneField(User, verbose_name=_('user'))
seed = models.CharField(_('seed'), max_length=16)
method = models.CharField(_('authentication method'),
choices=TOKEN_METHODS, max_length=16)
phone = models.CharField(_('phone number'), max_length=16)
backup_phone = models.CharField(_('backup phone number'), max_length=16,
null=True, blank=True)
Increase seed length in database to fit defaultfrom django.utils.translation import ugettext_lazy as _
from django.db import models
from django.contrib.auth.models import User
from two_factor import call_gateways, sms_gateways
TOKEN_METHODS = [
('generator', _('Token generator (iPhone/Android App)')),
]
if call_gateways.GATEWAY:
TOKEN_METHODS += [
('call', _('Phone call')),
]
if sms_gateways.GATEWAY:
TOKEN_METHODS += [
('sms', _('Text message')),
]
class VerifiedComputer(models.Model):
user = models.ForeignKey(User, verbose_name=_('verified computer'))
verified_until = models.DateTimeField(_('verified until'))
ip = models.IPAddressField(_('IP address'))
last_used_at = models.DateTimeField(_('last used at'))
class Token(models.Model):
user = models.OneToOneField(User, verbose_name=_('user'))
seed = models.CharField(_('seed'), max_length=32)
method = models.CharField(_('authentication method'),
choices=TOKEN_METHODS, max_length=16)
phone = models.CharField(_('phone number'), max_length=16)
backup_phone = models.CharField(_('backup phone number'), max_length=16,
null=True, blank=True)
|
<commit_before>from django.utils.translation import ugettext_lazy as _
from django.db import models
from django.contrib.auth.models import User
from two_factor import call_gateways, sms_gateways
TOKEN_METHODS = [
('generator', _('Token generator (iPhone/Android App)')),
]
if call_gateways.GATEWAY:
TOKEN_METHODS += [
('call', _('Phone call')),
]
if sms_gateways.GATEWAY:
TOKEN_METHODS += [
('sms', _('Text message')),
]
class VerifiedComputer(models.Model):
user = models.ForeignKey(User, verbose_name=_('verified computer'))
verified_until = models.DateTimeField(_('verified until'))
ip = models.IPAddressField(_('IP address'))
last_used_at = models.DateTimeField(_('last used at'))
class Token(models.Model):
user = models.OneToOneField(User, verbose_name=_('user'))
seed = models.CharField(_('seed'), max_length=16)
method = models.CharField(_('authentication method'),
choices=TOKEN_METHODS, max_length=16)
phone = models.CharField(_('phone number'), max_length=16)
backup_phone = models.CharField(_('backup phone number'), max_length=16,
null=True, blank=True)
<commit_msg>Increase seed length in database to fit default<commit_after>from django.utils.translation import ugettext_lazy as _
from django.db import models
from django.contrib.auth.models import User
from two_factor import call_gateways, sms_gateways
TOKEN_METHODS = [
('generator', _('Token generator (iPhone/Android App)')),
]
if call_gateways.GATEWAY:
TOKEN_METHODS += [
('call', _('Phone call')),
]
if sms_gateways.GATEWAY:
TOKEN_METHODS += [
('sms', _('Text message')),
]
class VerifiedComputer(models.Model):
user = models.ForeignKey(User, verbose_name=_('verified computer'))
verified_until = models.DateTimeField(_('verified until'))
ip = models.IPAddressField(_('IP address'))
last_used_at = models.DateTimeField(_('last used at'))
class Token(models.Model):
user = models.OneToOneField(User, verbose_name=_('user'))
seed = models.CharField(_('seed'), max_length=32)
method = models.CharField(_('authentication method'),
choices=TOKEN_METHODS, max_length=16)
phone = models.CharField(_('phone number'), max_length=16)
backup_phone = models.CharField(_('backup phone number'), max_length=16,
null=True, blank=True)
|
f7d914f71c81fc1f9dd04f7b5ce37a43dd1fcdeb
|
fabfile.py
|
fabfile.py
|
from fabric.api import local
def test():
local('coverage run --source=btsync,test $(which nosetests) test/unit')
def coverage():
test()
local('coverage report -m')
def test_integration():
pass
|
from fabric.api import local
def test():
local('coverage run --source=btsync,test/unit $(which nosetests) test/unit')
def coverage():
test()
local('coverage report -m')
def test_integration():
pass
|
Exclude integration test in coverage report
|
Exclude integration test in coverage report
|
Python
|
mit
|
kevinjqiu/btsync.py
|
from fabric.api import local
def test():
local('coverage run --source=btsync,test $(which nosetests) test/unit')
def coverage():
test()
local('coverage report -m')
def test_integration():
pass
Exclude integration test in coverage report
|
from fabric.api import local
def test():
local('coverage run --source=btsync,test/unit $(which nosetests) test/unit')
def coverage():
test()
local('coverage report -m')
def test_integration():
pass
|
<commit_before>from fabric.api import local
def test():
local('coverage run --source=btsync,test $(which nosetests) test/unit')
def coverage():
test()
local('coverage report -m')
def test_integration():
pass
<commit_msg>Exclude integration test in coverage report<commit_after>
|
from fabric.api import local
def test():
local('coverage run --source=btsync,test/unit $(which nosetests) test/unit')
def coverage():
test()
local('coverage report -m')
def test_integration():
pass
|
from fabric.api import local
def test():
local('coverage run --source=btsync,test $(which nosetests) test/unit')
def coverage():
test()
local('coverage report -m')
def test_integration():
pass
Exclude integration test in coverage reportfrom fabric.api import local
def test():
local('coverage run --source=btsync,test/unit $(which nosetests) test/unit')
def coverage():
test()
local('coverage report -m')
def test_integration():
pass
|
<commit_before>from fabric.api import local
def test():
local('coverage run --source=btsync,test $(which nosetests) test/unit')
def coverage():
test()
local('coverage report -m')
def test_integration():
pass
<commit_msg>Exclude integration test in coverage report<commit_after>from fabric.api import local
def test():
local('coverage run --source=btsync,test/unit $(which nosetests) test/unit')
def coverage():
test()
local('coverage report -m')
def test_integration():
pass
|
a2c92c0be31e1d7a31625878e7bc68e23930224c
|
loop.py
|
loop.py
|
# Say anything you type, and write anything you say.
# Stops when you say "turn off" or type "turn off".
import speech
import sys
inputs = ["hi", "foo", "lemon", "hello world"]
output = []
current_run = []
def callback(phrase, listener):
speech.say(phrase)
if phrase == "turn off":
speech.say("Goodbye.")
listener.stoplistening()
sys.exit()
print "Anything you type, speech will say back."
print "Anything you say, speech will print out."
print "Say or type 'turn off' to quit."
print
listener = speech.listenforanything(callback)
while listener.islistening():
for i in range(0, len(inputs)):
speech.say(str(inputs[i]))
text = raw_input("> ")
if text == "turn off":
listener.stoplistening()
sys.exit()
else:
speech.say(text)
|
# Say anything you type, and write anything you say.
# Stops when you say "turn off" or type "turn off".
import speech
import sys
import time
lemon = "lemon"
output = []
current_run = []
waiting = False
hasDetect = False
print "Say something."
def callback(phrase, listener):
speech.say(phrase)
print phrase
lemon = str(phrase)
print lemon
hasDetect = True
waiting = False
listener = speech.listenforanything(callback)
while listener.islistening():
if not waiting and not hasDetect:
waiting = True
speech.say(lemon)
|
Change lemon if speech is detected
|
Change lemon if speech is detected
If speech is detected, change lemon to whatever was detected. Also print
it.
|
Python
|
mit
|
powderblock/SpeechLooper
|
# Say anything you type, and write anything you say.
# Stops when you say "turn off" or type "turn off".
import speech
import sys
inputs = ["hi", "foo", "lemon", "hello world"]
output = []
current_run = []
def callback(phrase, listener):
speech.say(phrase)
if phrase == "turn off":
speech.say("Goodbye.")
listener.stoplistening()
sys.exit()
print "Anything you type, speech will say back."
print "Anything you say, speech will print out."
print "Say or type 'turn off' to quit."
print
listener = speech.listenforanything(callback)
while listener.islistening():
for i in range(0, len(inputs)):
speech.say(str(inputs[i]))
text = raw_input("> ")
if text == "turn off":
listener.stoplistening()
sys.exit()
else:
speech.say(text)
Change lemon if speech is detected
If speech is detected, change lemon to whatever was detected. Also print
it.
|
# Say anything you type, and write anything you say.
# Stops when you say "turn off" or type "turn off".
import speech
import sys
import time
lemon = "lemon"
output = []
current_run = []
waiting = False
hasDetect = False
print "Say something."
def callback(phrase, listener):
speech.say(phrase)
print phrase
lemon = str(phrase)
print lemon
hasDetect = True
waiting = False
listener = speech.listenforanything(callback)
while listener.islistening():
if not waiting and not hasDetect:
waiting = True
speech.say(lemon)
|
<commit_before># Say anything you type, and write anything you say.
# Stops when you say "turn off" or type "turn off".
import speech
import sys
inputs = ["hi", "foo", "lemon", "hello world"]
output = []
current_run = []
def callback(phrase, listener):
speech.say(phrase)
if phrase == "turn off":
speech.say("Goodbye.")
listener.stoplistening()
sys.exit()
print "Anything you type, speech will say back."
print "Anything you say, speech will print out."
print "Say or type 'turn off' to quit."
print
listener = speech.listenforanything(callback)
while listener.islistening():
for i in range(0, len(inputs)):
speech.say(str(inputs[i]))
text = raw_input("> ")
if text == "turn off":
listener.stoplistening()
sys.exit()
else:
speech.say(text)
<commit_msg>Change lemon if speech is detected
If speech is detected, change lemon to whatever was detected. Also print
it.<commit_after>
|
# Say anything you type, and write anything you say.
# Stops when you say "turn off" or type "turn off".
import speech
import sys
import time
lemon = "lemon"
output = []
current_run = []
waiting = False
hasDetect = False
print "Say something."
def callback(phrase, listener):
speech.say(phrase)
print phrase
lemon = str(phrase)
print lemon
hasDetect = True
waiting = False
listener = speech.listenforanything(callback)
while listener.islistening():
if not waiting and not hasDetect:
waiting = True
speech.say(lemon)
|
# Say anything you type, and write anything you say.
# Stops when you say "turn off" or type "turn off".
import speech
import sys
inputs = ["hi", "foo", "lemon", "hello world"]
output = []
current_run = []
def callback(phrase, listener):
speech.say(phrase)
if phrase == "turn off":
speech.say("Goodbye.")
listener.stoplistening()
sys.exit()
print "Anything you type, speech will say back."
print "Anything you say, speech will print out."
print "Say or type 'turn off' to quit."
print
listener = speech.listenforanything(callback)
while listener.islistening():
for i in range(0, len(inputs)):
speech.say(str(inputs[i]))
text = raw_input("> ")
if text == "turn off":
listener.stoplistening()
sys.exit()
else:
speech.say(text)
Change lemon if speech is detected
If speech is detected, change lemon to whatever was detected. Also print
it.# Say anything you type, and write anything you say.
# Stops when you say "turn off" or type "turn off".
import speech
import sys
import time
lemon = "lemon"
output = []
current_run = []
waiting = False
hasDetect = False
print "Say something."
def callback(phrase, listener):
speech.say(phrase)
print phrase
lemon = str(phrase)
print lemon
hasDetect = True
waiting = False
listener = speech.listenforanything(callback)
while listener.islistening():
if not waiting and not hasDetect:
waiting = True
speech.say(lemon)
|
<commit_before># Say anything you type, and write anything you say.
# Stops when you say "turn off" or type "turn off".
import speech
import sys
inputs = ["hi", "foo", "lemon", "hello world"]
output = []
current_run = []
def callback(phrase, listener):
speech.say(phrase)
if phrase == "turn off":
speech.say("Goodbye.")
listener.stoplistening()
sys.exit()
print "Anything you type, speech will say back."
print "Anything you say, speech will print out."
print "Say or type 'turn off' to quit."
print
listener = speech.listenforanything(callback)
while listener.islistening():
for i in range(0, len(inputs)):
speech.say(str(inputs[i]))
text = raw_input("> ")
if text == "turn off":
listener.stoplistening()
sys.exit()
else:
speech.say(text)
<commit_msg>Change lemon if speech is detected
If speech is detected, change lemon to whatever was detected. Also print
it.<commit_after># Say anything you type, and write anything you say.
# Stops when you say "turn off" or type "turn off".
import speech
import sys
import time
lemon = "lemon"
output = []
current_run = []
waiting = False
hasDetect = False
print "Say something."
def callback(phrase, listener):
speech.say(phrase)
print phrase
lemon = str(phrase)
print lemon
hasDetect = True
waiting = False
listener = speech.listenforanything(callback)
while listener.islistening():
if not waiting and not hasDetect:
waiting = True
speech.say(lemon)
|
8286198235e70a314bd924a2ee7ed29f717b599d
|
session_cleanup/tests.py
|
session_cleanup/tests.py
|
from django.conf import settings
from django.core.cache import cache
from django.test import TestCase
from django.utils.importlib import import_module
from session_cleanup.tasks import cleanup
import datetime
class CleanupTest(TestCase):
def test_session_cleanup(self):
"""
Tests that sessions are deleted by the task
"""
engine = import_module(settings.SESSION_ENGINE)
SessionStore = engine.SessionStore
now = datetime.datetime.now()
last_week = now - datetime.timedelta(days=7)
stores = []
unexpired_stores = []
expired_stores = []
# create unexpired sessions
for i in range(20):
store = SessionStore()
store.save()
stores.append(store)
for store in stores:
self.assertEquals(store.exists(store.session_key), True, 'Session store could not be created')
unexpired_stores = stores[:10]
expired_stores = stores[10:]
# expire some sessions
for store in expired_stores:
store.set_expiry(last_week)
store.save()
cleanup()
for store in unexpired_stores:
self.assertEquals(store.exists(store.session_key), True, 'Unexpired store was deleted by cleanup')
for store in expired_stores:
self.assertEquals(store.exists(store.session_key), False, 'Expired store was not deleted by cleanup')
|
from django.conf import settings
from django.core.cache import cache
from django.test import TestCase
from django.test.utils import override_settings
from django.utils import timezone
from django.utils.importlib import import_module
from session_cleanup.tasks import cleanup
import datetime
class CleanupTest(TestCase):
@override_settings(SESSION_ENGINE="django.contrib.sessions.backends.file")
def test_session_cleanup(self):
"""
Tests that sessions are deleted by the task
"""
engine = import_module(settings.SESSION_ENGINE)
SessionStore = engine.SessionStore
now = timezone.now()
last_week = now - datetime.timedelta(days=7)
stores = []
unexpired_stores = []
expired_stores = []
# create unexpired sessions
for i in range(20):
store = SessionStore()
store.save()
stores.append(store)
for store in stores:
self.assertEquals(store.exists(store.session_key), True, 'Session store could not be created')
unexpired_stores = stores[:10]
expired_stores = stores[10:]
# expire some sessions
for store in expired_stores:
store.set_expiry(last_week)
store.save()
cleanup()
for store in unexpired_stores:
self.assertEquals(store.exists(store.session_key), True, 'Unexpired store was deleted by cleanup')
for store in expired_stores:
self.assertEquals(store.exists(store.session_key), False, 'Expired store was not deleted by cleanup')
|
Update test for timezones & file-based sessions.
|
Update test for timezones & file-based sessions.
|
Python
|
bsd-2-clause
|
sandersnewmedia/django-session-cleanup
|
from django.conf import settings
from django.core.cache import cache
from django.test import TestCase
from django.utils.importlib import import_module
from session_cleanup.tasks import cleanup
import datetime
class CleanupTest(TestCase):
def test_session_cleanup(self):
"""
Tests that sessions are deleted by the task
"""
engine = import_module(settings.SESSION_ENGINE)
SessionStore = engine.SessionStore
now = datetime.datetime.now()
last_week = now - datetime.timedelta(days=7)
stores = []
unexpired_stores = []
expired_stores = []
# create unexpired sessions
for i in range(20):
store = SessionStore()
store.save()
stores.append(store)
for store in stores:
self.assertEquals(store.exists(store.session_key), True, 'Session store could not be created')
unexpired_stores = stores[:10]
expired_stores = stores[10:]
# expire some sessions
for store in expired_stores:
store.set_expiry(last_week)
store.save()
cleanup()
for store in unexpired_stores:
self.assertEquals(store.exists(store.session_key), True, 'Unexpired store was deleted by cleanup')
for store in expired_stores:
self.assertEquals(store.exists(store.session_key), False, 'Expired store was not deleted by cleanup')
Update test for timezones & file-based sessions.
|
from django.conf import settings
from django.core.cache import cache
from django.test import TestCase
from django.test.utils import override_settings
from django.utils import timezone
from django.utils.importlib import import_module
from session_cleanup.tasks import cleanup
import datetime
class CleanupTest(TestCase):
@override_settings(SESSION_ENGINE="django.contrib.sessions.backends.file")
def test_session_cleanup(self):
"""
Tests that sessions are deleted by the task
"""
engine = import_module(settings.SESSION_ENGINE)
SessionStore = engine.SessionStore
now = timezone.now()
last_week = now - datetime.timedelta(days=7)
stores = []
unexpired_stores = []
expired_stores = []
# create unexpired sessions
for i in range(20):
store = SessionStore()
store.save()
stores.append(store)
for store in stores:
self.assertEquals(store.exists(store.session_key), True, 'Session store could not be created')
unexpired_stores = stores[:10]
expired_stores = stores[10:]
# expire some sessions
for store in expired_stores:
store.set_expiry(last_week)
store.save()
cleanup()
for store in unexpired_stores:
self.assertEquals(store.exists(store.session_key), True, 'Unexpired store was deleted by cleanup')
for store in expired_stores:
self.assertEquals(store.exists(store.session_key), False, 'Expired store was not deleted by cleanup')
|
<commit_before>
from django.conf import settings
from django.core.cache import cache
from django.test import TestCase
from django.utils.importlib import import_module
from session_cleanup.tasks import cleanup
import datetime
class CleanupTest(TestCase):
def test_session_cleanup(self):
"""
Tests that sessions are deleted by the task
"""
engine = import_module(settings.SESSION_ENGINE)
SessionStore = engine.SessionStore
now = datetime.datetime.now()
last_week = now - datetime.timedelta(days=7)
stores = []
unexpired_stores = []
expired_stores = []
# create unexpired sessions
for i in range(20):
store = SessionStore()
store.save()
stores.append(store)
for store in stores:
self.assertEquals(store.exists(store.session_key), True, 'Session store could not be created')
unexpired_stores = stores[:10]
expired_stores = stores[10:]
# expire some sessions
for store in expired_stores:
store.set_expiry(last_week)
store.save()
cleanup()
for store in unexpired_stores:
self.assertEquals(store.exists(store.session_key), True, 'Unexpired store was deleted by cleanup')
for store in expired_stores:
self.assertEquals(store.exists(store.session_key), False, 'Expired store was not deleted by cleanup')
<commit_msg>Update test for timezones & file-based sessions.<commit_after>
|
from django.conf import settings
from django.core.cache import cache
from django.test import TestCase
from django.test.utils import override_settings
from django.utils import timezone
from django.utils.importlib import import_module
from session_cleanup.tasks import cleanup
import datetime
class CleanupTest(TestCase):
@override_settings(SESSION_ENGINE="django.contrib.sessions.backends.file")
def test_session_cleanup(self):
"""
Tests that sessions are deleted by the task
"""
engine = import_module(settings.SESSION_ENGINE)
SessionStore = engine.SessionStore
now = timezone.now()
last_week = now - datetime.timedelta(days=7)
stores = []
unexpired_stores = []
expired_stores = []
# create unexpired sessions
for i in range(20):
store = SessionStore()
store.save()
stores.append(store)
for store in stores:
self.assertEquals(store.exists(store.session_key), True, 'Session store could not be created')
unexpired_stores = stores[:10]
expired_stores = stores[10:]
# expire some sessions
for store in expired_stores:
store.set_expiry(last_week)
store.save()
cleanup()
for store in unexpired_stores:
self.assertEquals(store.exists(store.session_key), True, 'Unexpired store was deleted by cleanup')
for store in expired_stores:
self.assertEquals(store.exists(store.session_key), False, 'Expired store was not deleted by cleanup')
|
from django.conf import settings
from django.core.cache import cache
from django.test import TestCase
from django.utils.importlib import import_module
from session_cleanup.tasks import cleanup
import datetime
class CleanupTest(TestCase):
def test_session_cleanup(self):
"""
Tests that sessions are deleted by the task
"""
engine = import_module(settings.SESSION_ENGINE)
SessionStore = engine.SessionStore
now = datetime.datetime.now()
last_week = now - datetime.timedelta(days=7)
stores = []
unexpired_stores = []
expired_stores = []
# create unexpired sessions
for i in range(20):
store = SessionStore()
store.save()
stores.append(store)
for store in stores:
self.assertEquals(store.exists(store.session_key), True, 'Session store could not be created')
unexpired_stores = stores[:10]
expired_stores = stores[10:]
# expire some sessions
for store in expired_stores:
store.set_expiry(last_week)
store.save()
cleanup()
for store in unexpired_stores:
self.assertEquals(store.exists(store.session_key), True, 'Unexpired store was deleted by cleanup')
for store in expired_stores:
self.assertEquals(store.exists(store.session_key), False, 'Expired store was not deleted by cleanup')
Update test for timezones & file-based sessions.from django.conf import settings
from django.core.cache import cache
from django.test import TestCase
from django.test.utils import override_settings
from django.utils import timezone
from django.utils.importlib import import_module
from session_cleanup.tasks import cleanup
import datetime
class CleanupTest(TestCase):
@override_settings(SESSION_ENGINE="django.contrib.sessions.backends.file")
def test_session_cleanup(self):
"""
Tests that sessions are deleted by the task
"""
engine = import_module(settings.SESSION_ENGINE)
SessionStore = engine.SessionStore
now = timezone.now()
last_week = now - datetime.timedelta(days=7)
stores = []
unexpired_stores = []
expired_stores = []
# create unexpired sessions
for i in range(20):
store = SessionStore()
store.save()
stores.append(store)
for store in stores:
self.assertEquals(store.exists(store.session_key), True, 'Session store could not be created')
unexpired_stores = stores[:10]
expired_stores = stores[10:]
# expire some sessions
for store in expired_stores:
store.set_expiry(last_week)
store.save()
cleanup()
for store in unexpired_stores:
self.assertEquals(store.exists(store.session_key), True, 'Unexpired store was deleted by cleanup')
for store in expired_stores:
self.assertEquals(store.exists(store.session_key), False, 'Expired store was not deleted by cleanup')
|
<commit_before>
from django.conf import settings
from django.core.cache import cache
from django.test import TestCase
from django.utils.importlib import import_module
from session_cleanup.tasks import cleanup
import datetime
class CleanupTest(TestCase):
def test_session_cleanup(self):
"""
Tests that sessions are deleted by the task
"""
engine = import_module(settings.SESSION_ENGINE)
SessionStore = engine.SessionStore
now = datetime.datetime.now()
last_week = now - datetime.timedelta(days=7)
stores = []
unexpired_stores = []
expired_stores = []
# create unexpired sessions
for i in range(20):
store = SessionStore()
store.save()
stores.append(store)
for store in stores:
self.assertEquals(store.exists(store.session_key), True, 'Session store could not be created')
unexpired_stores = stores[:10]
expired_stores = stores[10:]
# expire some sessions
for store in expired_stores:
store.set_expiry(last_week)
store.save()
cleanup()
for store in unexpired_stores:
self.assertEquals(store.exists(store.session_key), True, 'Unexpired store was deleted by cleanup')
for store in expired_stores:
self.assertEquals(store.exists(store.session_key), False, 'Expired store was not deleted by cleanup')
<commit_msg>Update test for timezones & file-based sessions.<commit_after>from django.conf import settings
from django.core.cache import cache
from django.test import TestCase
from django.test.utils import override_settings
from django.utils import timezone
from django.utils.importlib import import_module
from session_cleanup.tasks import cleanup
import datetime
class CleanupTest(TestCase):
@override_settings(SESSION_ENGINE="django.contrib.sessions.backends.file")
def test_session_cleanup(self):
"""
Tests that sessions are deleted by the task
"""
engine = import_module(settings.SESSION_ENGINE)
SessionStore = engine.SessionStore
now = timezone.now()
last_week = now - datetime.timedelta(days=7)
stores = []
unexpired_stores = []
expired_stores = []
# create unexpired sessions
for i in range(20):
store = SessionStore()
store.save()
stores.append(store)
for store in stores:
self.assertEquals(store.exists(store.session_key), True, 'Session store could not be created')
unexpired_stores = stores[:10]
expired_stores = stores[10:]
# expire some sessions
for store in expired_stores:
store.set_expiry(last_week)
store.save()
cleanup()
for store in unexpired_stores:
self.assertEquals(store.exists(store.session_key), True, 'Unexpired store was deleted by cleanup')
for store in expired_stores:
self.assertEquals(store.exists(store.session_key), False, 'Expired store was not deleted by cleanup')
|
76ae7716090fde2dfad03de1635082644ac8fbb4
|
account_wallet_sale/hooks.py
|
account_wallet_sale/hooks.py
|
# Copyright 2021 ACSONE SA/NV
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import api, SUPERUSER_ID
from openupgradelib import openupgrade
def _rename_cagnotte(env):
columns = {
"sale_order_line": [
("account_cagnotte_id", "account_wallet_id"),
],
}
openupgrade.rename_columns(env.cr, columns)
def pre_init_hook(cr):
with api.Environment.manage():
env = api.Environment(cr, SUPERUSER_ID, {})
_rename_cagnotte(env)
|
# Copyright 2021 ACSONE SA/NV
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import api, SUPERUSER_ID
from openupgradelib import openupgrade
def _rename_cagnotte(env):
if not openupgrade.column_exists(
env.cr, "sale_order_line", "account_cagnotte_id"):
return
columns = {
"sale_order_line": [
("account_cagnotte_id", "account_wallet_id"),
],
}
openupgrade.rename_columns(env.cr, columns)
def pre_init_hook(cr):
with api.Environment.manage():
env = api.Environment(cr, SUPERUSER_ID, {})
_rename_cagnotte(env)
|
Migrate only if former column exist
|
[14.0][IMP] account_wallet_sale: Migrate only if former column exist
|
Python
|
agpl-3.0
|
acsone/acsone-addons,acsone/acsone-addons,acsone/acsone-addons
|
# Copyright 2021 ACSONE SA/NV
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import api, SUPERUSER_ID
from openupgradelib import openupgrade
def _rename_cagnotte(env):
columns = {
"sale_order_line": [
("account_cagnotte_id", "account_wallet_id"),
],
}
openupgrade.rename_columns(env.cr, columns)
def pre_init_hook(cr):
with api.Environment.manage():
env = api.Environment(cr, SUPERUSER_ID, {})
_rename_cagnotte(env)
[14.0][IMP] account_wallet_sale: Migrate only if former column exist
|
# Copyright 2021 ACSONE SA/NV
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import api, SUPERUSER_ID
from openupgradelib import openupgrade
def _rename_cagnotte(env):
if not openupgrade.column_exists(
env.cr, "sale_order_line", "account_cagnotte_id"):
return
columns = {
"sale_order_line": [
("account_cagnotte_id", "account_wallet_id"),
],
}
openupgrade.rename_columns(env.cr, columns)
def pre_init_hook(cr):
with api.Environment.manage():
env = api.Environment(cr, SUPERUSER_ID, {})
_rename_cagnotte(env)
|
<commit_before># Copyright 2021 ACSONE SA/NV
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import api, SUPERUSER_ID
from openupgradelib import openupgrade
def _rename_cagnotte(env):
columns = {
"sale_order_line": [
("account_cagnotte_id", "account_wallet_id"),
],
}
openupgrade.rename_columns(env.cr, columns)
def pre_init_hook(cr):
with api.Environment.manage():
env = api.Environment(cr, SUPERUSER_ID, {})
_rename_cagnotte(env)
<commit_msg>[14.0][IMP] account_wallet_sale: Migrate only if former column exist<commit_after>
|
# Copyright 2021 ACSONE SA/NV
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import api, SUPERUSER_ID
from openupgradelib import openupgrade
def _rename_cagnotte(env):
if not openupgrade.column_exists(
env.cr, "sale_order_line", "account_cagnotte_id"):
return
columns = {
"sale_order_line": [
("account_cagnotte_id", "account_wallet_id"),
],
}
openupgrade.rename_columns(env.cr, columns)
def pre_init_hook(cr):
with api.Environment.manage():
env = api.Environment(cr, SUPERUSER_ID, {})
_rename_cagnotte(env)
|
# Copyright 2021 ACSONE SA/NV
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import api, SUPERUSER_ID
from openupgradelib import openupgrade
def _rename_cagnotte(env):
columns = {
"sale_order_line": [
("account_cagnotte_id", "account_wallet_id"),
],
}
openupgrade.rename_columns(env.cr, columns)
def pre_init_hook(cr):
with api.Environment.manage():
env = api.Environment(cr, SUPERUSER_ID, {})
_rename_cagnotte(env)
[14.0][IMP] account_wallet_sale: Migrate only if former column exist# Copyright 2021 ACSONE SA/NV
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import api, SUPERUSER_ID
from openupgradelib import openupgrade
def _rename_cagnotte(env):
if not openupgrade.column_exists(
env.cr, "sale_order_line", "account_cagnotte_id"):
return
columns = {
"sale_order_line": [
("account_cagnotte_id", "account_wallet_id"),
],
}
openupgrade.rename_columns(env.cr, columns)
def pre_init_hook(cr):
with api.Environment.manage():
env = api.Environment(cr, SUPERUSER_ID, {})
_rename_cagnotte(env)
|
<commit_before># Copyright 2021 ACSONE SA/NV
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import api, SUPERUSER_ID
from openupgradelib import openupgrade
def _rename_cagnotte(env):
columns = {
"sale_order_line": [
("account_cagnotte_id", "account_wallet_id"),
],
}
openupgrade.rename_columns(env.cr, columns)
def pre_init_hook(cr):
with api.Environment.manage():
env = api.Environment(cr, SUPERUSER_ID, {})
_rename_cagnotte(env)
<commit_msg>[14.0][IMP] account_wallet_sale: Migrate only if former column exist<commit_after># Copyright 2021 ACSONE SA/NV
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import api, SUPERUSER_ID
from openupgradelib import openupgrade
def _rename_cagnotte(env):
if not openupgrade.column_exists(
env.cr, "sale_order_line", "account_cagnotte_id"):
return
columns = {
"sale_order_line": [
("account_cagnotte_id", "account_wallet_id"),
],
}
openupgrade.rename_columns(env.cr, columns)
def pre_init_hook(cr):
with api.Environment.manage():
env = api.Environment(cr, SUPERUSER_ID, {})
_rename_cagnotte(env)
|
e6de5c8f3204b14bc822c769712e1c1d4ba0ee69
|
slave/skia_slave_scripts/chromeos_compile.py
|
slave/skia_slave_scripts/chromeos_compile.py
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Compile step """
from utils import shell_utils
from build_step import BuildStep
from slave import slave_utils
import os
import sys
class Compile(BuildStep):
def _Run(self):
# Add gsutil to PATH
gsutil = slave_utils.GSUtilSetup()
os.environ['PATH'] += os.pathsep + os.path.dirname(gsutil)
# Override the default boto file with one which works with ChromeOS utils.
cros_boto_file = os.path.join(os.path.dirname(os.path.abspath(__file__)),
os.pardir, os.pardir, 'site_config',
'.boto_cros')
# Run the chromeos_make script.
make_cmd = os.path.join('platform_tools', 'chromeos', 'bin',
'chromeos_make')
cmd = [make_cmd,
'-d', self._args['board'],
self._args['target'],
'--cros-boto-file', cros_boto_file,
'BUILDTYPE=%s' % self._configuration,
]
cmd.extend(self._default_make_flags)
cmd.extend(self._make_flags)
shell_utils.Bash(cmd)
if '__main__' == __name__:
sys.exit(BuildStep.RunBuildStep(Compile))
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Compile step """
from utils import shell_utils
from build_step import BuildStep
from slave import slave_utils
import os
import sys
class Compile(BuildStep):
def _Run(self):
# Add gsutil to PATH
gsutil = slave_utils.GSUtilSetup()
os.environ['PATH'] += os.pathsep + os.path.dirname(gsutil)
# Run the chromeos_make script.
make_cmd = os.path.join('platform_tools', 'chromeos', 'bin',
'chromeos_make')
cmd = [make_cmd,
'-d', self._args['board'],
self._args['target'],
'BUILDTYPE=%s' % self._configuration,
]
cmd.extend(self._default_make_flags)
cmd.extend(self._make_flags)
shell_utils.Bash(cmd)
if '__main__' == __name__:
sys.exit(BuildStep.RunBuildStep(Compile))
|
Remove no longer used extra boto file parameter
|
Remove no longer used extra boto file parameter
This doesn't break anything, but it isn't needed.
(RunBuilders:Test-ChromeOS-Alex-GMA3150-x86-Debug,Test-ChromeOS-Alex-GMA3150-x86-Release,Perf-ChromeOS-Alex-GMA3150-x86-Release)
R=rmistry@google.com
Review URL: https://codereview.chromium.org/17575017
git-svn-id: 32fc27f4dcfb6c0385cd9719852b95fe6680452d@9745 2bbb7eff-a529-9590-31e7-b0007b416f81
|
Python
|
bsd-3-clause
|
google/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Compile step """
from utils import shell_utils
from build_step import BuildStep
from slave import slave_utils
import os
import sys
class Compile(BuildStep):
def _Run(self):
# Add gsutil to PATH
gsutil = slave_utils.GSUtilSetup()
os.environ['PATH'] += os.pathsep + os.path.dirname(gsutil)
# Override the default boto file with one which works with ChromeOS utils.
cros_boto_file = os.path.join(os.path.dirname(os.path.abspath(__file__)),
os.pardir, os.pardir, 'site_config',
'.boto_cros')
# Run the chromeos_make script.
make_cmd = os.path.join('platform_tools', 'chromeos', 'bin',
'chromeos_make')
cmd = [make_cmd,
'-d', self._args['board'],
self._args['target'],
'--cros-boto-file', cros_boto_file,
'BUILDTYPE=%s' % self._configuration,
]
cmd.extend(self._default_make_flags)
cmd.extend(self._make_flags)
shell_utils.Bash(cmd)
if '__main__' == __name__:
sys.exit(BuildStep.RunBuildStep(Compile))
Remove no longer used extra boto file parameter
This doesn't break anything, but it isn't needed.
(RunBuilders:Test-ChromeOS-Alex-GMA3150-x86-Debug,Test-ChromeOS-Alex-GMA3150-x86-Release,Perf-ChromeOS-Alex-GMA3150-x86-Release)
R=rmistry@google.com
Review URL: https://codereview.chromium.org/17575017
git-svn-id: 32fc27f4dcfb6c0385cd9719852b95fe6680452d@9745 2bbb7eff-a529-9590-31e7-b0007b416f81
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Compile step """
from utils import shell_utils
from build_step import BuildStep
from slave import slave_utils
import os
import sys
class Compile(BuildStep):
def _Run(self):
# Add gsutil to PATH
gsutil = slave_utils.GSUtilSetup()
os.environ['PATH'] += os.pathsep + os.path.dirname(gsutil)
# Run the chromeos_make script.
make_cmd = os.path.join('platform_tools', 'chromeos', 'bin',
'chromeos_make')
cmd = [make_cmd,
'-d', self._args['board'],
self._args['target'],
'BUILDTYPE=%s' % self._configuration,
]
cmd.extend(self._default_make_flags)
cmd.extend(self._make_flags)
shell_utils.Bash(cmd)
if '__main__' == __name__:
sys.exit(BuildStep.RunBuildStep(Compile))
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Compile step """
from utils import shell_utils
from build_step import BuildStep
from slave import slave_utils
import os
import sys
class Compile(BuildStep):
def _Run(self):
# Add gsutil to PATH
gsutil = slave_utils.GSUtilSetup()
os.environ['PATH'] += os.pathsep + os.path.dirname(gsutil)
# Override the default boto file with one which works with ChromeOS utils.
cros_boto_file = os.path.join(os.path.dirname(os.path.abspath(__file__)),
os.pardir, os.pardir, 'site_config',
'.boto_cros')
# Run the chromeos_make script.
make_cmd = os.path.join('platform_tools', 'chromeos', 'bin',
'chromeos_make')
cmd = [make_cmd,
'-d', self._args['board'],
self._args['target'],
'--cros-boto-file', cros_boto_file,
'BUILDTYPE=%s' % self._configuration,
]
cmd.extend(self._default_make_flags)
cmd.extend(self._make_flags)
shell_utils.Bash(cmd)
if '__main__' == __name__:
sys.exit(BuildStep.RunBuildStep(Compile))
<commit_msg>Remove no longer used extra boto file parameter
This doesn't break anything, but it isn't needed.
(RunBuilders:Test-ChromeOS-Alex-GMA3150-x86-Debug,Test-ChromeOS-Alex-GMA3150-x86-Release,Perf-ChromeOS-Alex-GMA3150-x86-Release)
R=rmistry@google.com
Review URL: https://codereview.chromium.org/17575017
git-svn-id: 32fc27f4dcfb6c0385cd9719852b95fe6680452d@9745 2bbb7eff-a529-9590-31e7-b0007b416f81<commit_after>
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Compile step """
from utils import shell_utils
from build_step import BuildStep
from slave import slave_utils
import os
import sys
class Compile(BuildStep):
def _Run(self):
# Add gsutil to PATH
gsutil = slave_utils.GSUtilSetup()
os.environ['PATH'] += os.pathsep + os.path.dirname(gsutil)
# Run the chromeos_make script.
make_cmd = os.path.join('platform_tools', 'chromeos', 'bin',
'chromeos_make')
cmd = [make_cmd,
'-d', self._args['board'],
self._args['target'],
'BUILDTYPE=%s' % self._configuration,
]
cmd.extend(self._default_make_flags)
cmd.extend(self._make_flags)
shell_utils.Bash(cmd)
if '__main__' == __name__:
sys.exit(BuildStep.RunBuildStep(Compile))
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Compile step """
from utils import shell_utils
from build_step import BuildStep
from slave import slave_utils
import os
import sys
class Compile(BuildStep):
def _Run(self):
# Add gsutil to PATH
gsutil = slave_utils.GSUtilSetup()
os.environ['PATH'] += os.pathsep + os.path.dirname(gsutil)
# Override the default boto file with one which works with ChromeOS utils.
cros_boto_file = os.path.join(os.path.dirname(os.path.abspath(__file__)),
os.pardir, os.pardir, 'site_config',
'.boto_cros')
# Run the chromeos_make script.
make_cmd = os.path.join('platform_tools', 'chromeos', 'bin',
'chromeos_make')
cmd = [make_cmd,
'-d', self._args['board'],
self._args['target'],
'--cros-boto-file', cros_boto_file,
'BUILDTYPE=%s' % self._configuration,
]
cmd.extend(self._default_make_flags)
cmd.extend(self._make_flags)
shell_utils.Bash(cmd)
if '__main__' == __name__:
sys.exit(BuildStep.RunBuildStep(Compile))
Remove no longer used extra boto file parameter
This doesn't break anything, but it isn't needed.
(RunBuilders:Test-ChromeOS-Alex-GMA3150-x86-Debug,Test-ChromeOS-Alex-GMA3150-x86-Release,Perf-ChromeOS-Alex-GMA3150-x86-Release)
R=rmistry@google.com
Review URL: https://codereview.chromium.org/17575017
git-svn-id: 32fc27f4dcfb6c0385cd9719852b95fe6680452d@9745 2bbb7eff-a529-9590-31e7-b0007b416f81#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Compile step """
from utils import shell_utils
from build_step import BuildStep
from slave import slave_utils
import os
import sys
class Compile(BuildStep):
def _Run(self):
# Add gsutil to PATH
gsutil = slave_utils.GSUtilSetup()
os.environ['PATH'] += os.pathsep + os.path.dirname(gsutil)
# Run the chromeos_make script.
make_cmd = os.path.join('platform_tools', 'chromeos', 'bin',
'chromeos_make')
cmd = [make_cmd,
'-d', self._args['board'],
self._args['target'],
'BUILDTYPE=%s' % self._configuration,
]
cmd.extend(self._default_make_flags)
cmd.extend(self._make_flags)
shell_utils.Bash(cmd)
if '__main__' == __name__:
sys.exit(BuildStep.RunBuildStep(Compile))
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Compile step """
from utils import shell_utils
from build_step import BuildStep
from slave import slave_utils
import os
import sys
class Compile(BuildStep):
def _Run(self):
# Add gsutil to PATH
gsutil = slave_utils.GSUtilSetup()
os.environ['PATH'] += os.pathsep + os.path.dirname(gsutil)
# Override the default boto file with one which works with ChromeOS utils.
cros_boto_file = os.path.join(os.path.dirname(os.path.abspath(__file__)),
os.pardir, os.pardir, 'site_config',
'.boto_cros')
# Run the chromeos_make script.
make_cmd = os.path.join('platform_tools', 'chromeos', 'bin',
'chromeos_make')
cmd = [make_cmd,
'-d', self._args['board'],
self._args['target'],
'--cros-boto-file', cros_boto_file,
'BUILDTYPE=%s' % self._configuration,
]
cmd.extend(self._default_make_flags)
cmd.extend(self._make_flags)
shell_utils.Bash(cmd)
if '__main__' == __name__:
sys.exit(BuildStep.RunBuildStep(Compile))
<commit_msg>Remove no longer used extra boto file parameter
This doesn't break anything, but it isn't needed.
(RunBuilders:Test-ChromeOS-Alex-GMA3150-x86-Debug,Test-ChromeOS-Alex-GMA3150-x86-Release,Perf-ChromeOS-Alex-GMA3150-x86-Release)
R=rmistry@google.com
Review URL: https://codereview.chromium.org/17575017
git-svn-id: 32fc27f4dcfb6c0385cd9719852b95fe6680452d@9745 2bbb7eff-a529-9590-31e7-b0007b416f81<commit_after>#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Compile step """
from utils import shell_utils
from build_step import BuildStep
from slave import slave_utils
import os
import sys
class Compile(BuildStep):
def _Run(self):
# Add gsutil to PATH
gsutil = slave_utils.GSUtilSetup()
os.environ['PATH'] += os.pathsep + os.path.dirname(gsutil)
# Run the chromeos_make script.
make_cmd = os.path.join('platform_tools', 'chromeos', 'bin',
'chromeos_make')
cmd = [make_cmd,
'-d', self._args['board'],
self._args['target'],
'BUILDTYPE=%s' % self._configuration,
]
cmd.extend(self._default_make_flags)
cmd.extend(self._make_flags)
shell_utils.Bash(cmd)
if '__main__' == __name__:
sys.exit(BuildStep.RunBuildStep(Compile))
|
5efc9c9d8bc226759574bf2334fffffab340e4ff
|
boxsdk/auth/developer_token_auth.py
|
boxsdk/auth/developer_token_auth.py
|
# coding: utf-8
from __future__ import unicode_literals, absolute_import
from six.moves import input
from .oauth2 import OAuth2
class DeveloperTokenAuth(OAuth2):
ENTER_TOKEN_PROMPT = 'Enter developer token: '
def __init__(self, get_new_token_callback=None, **kwargs):
self._get_new_token = get_new_token_callback
super(DeveloperTokenAuth, self).__init__(
client_id=None,
client_secret=None,
access_token=self._refresh_developer_token(),
**kwargs
)
def _refresh_developer_token(self):
if self._get_new_token is not None:
return self._get_new_token()
else:
return input(self.ENTER_TOKEN_PROMPT)
def _refresh(self, access_token):
"""
Base class override.
Ask for a new developer token.
"""
self._access_token = self._refresh_developer_token()
return self._access_token, None
|
# coding: utf-8
from __future__ import unicode_literals, absolute_import
from six.moves import input # pylint:disable=redefined-builtin
from .oauth2 import OAuth2
class DeveloperTokenAuth(OAuth2):
ENTER_TOKEN_PROMPT = 'Enter developer token: '
def __init__(self, get_new_token_callback=None, **kwargs):
self._get_new_token = get_new_token_callback
super(DeveloperTokenAuth, self).__init__(
client_id=None,
client_secret=None,
access_token=self._refresh_developer_token(),
**kwargs
)
def _refresh_developer_token(self):
if self._get_new_token is not None:
return self._get_new_token()
else:
return input(self.ENTER_TOKEN_PROMPT)
def _refresh(self, access_token):
"""
Base class override.
Ask for a new developer token.
"""
self._access_token = self._refresh_developer_token()
return self._access_token, None
|
Fix pylint error for redefined builtin input.
|
Fix pylint error for redefined builtin input.
|
Python
|
apache-2.0
|
Frencil/box-python-sdk,box/box-python-sdk,Frencil/box-python-sdk
|
# coding: utf-8
from __future__ import unicode_literals, absolute_import
from six.moves import input
from .oauth2 import OAuth2
class DeveloperTokenAuth(OAuth2):
ENTER_TOKEN_PROMPT = 'Enter developer token: '
def __init__(self, get_new_token_callback=None, **kwargs):
self._get_new_token = get_new_token_callback
super(DeveloperTokenAuth, self).__init__(
client_id=None,
client_secret=None,
access_token=self._refresh_developer_token(),
**kwargs
)
def _refresh_developer_token(self):
if self._get_new_token is not None:
return self._get_new_token()
else:
return input(self.ENTER_TOKEN_PROMPT)
def _refresh(self, access_token):
"""
Base class override.
Ask for a new developer token.
"""
self._access_token = self._refresh_developer_token()
return self._access_token, None
Fix pylint error for redefined builtin input.
|
# coding: utf-8
from __future__ import unicode_literals, absolute_import
from six.moves import input # pylint:disable=redefined-builtin
from .oauth2 import OAuth2
class DeveloperTokenAuth(OAuth2):
ENTER_TOKEN_PROMPT = 'Enter developer token: '
def __init__(self, get_new_token_callback=None, **kwargs):
self._get_new_token = get_new_token_callback
super(DeveloperTokenAuth, self).__init__(
client_id=None,
client_secret=None,
access_token=self._refresh_developer_token(),
**kwargs
)
def _refresh_developer_token(self):
if self._get_new_token is not None:
return self._get_new_token()
else:
return input(self.ENTER_TOKEN_PROMPT)
def _refresh(self, access_token):
"""
Base class override.
Ask for a new developer token.
"""
self._access_token = self._refresh_developer_token()
return self._access_token, None
|
<commit_before># coding: utf-8
from __future__ import unicode_literals, absolute_import
from six.moves import input
from .oauth2 import OAuth2
class DeveloperTokenAuth(OAuth2):
ENTER_TOKEN_PROMPT = 'Enter developer token: '
def __init__(self, get_new_token_callback=None, **kwargs):
self._get_new_token = get_new_token_callback
super(DeveloperTokenAuth, self).__init__(
client_id=None,
client_secret=None,
access_token=self._refresh_developer_token(),
**kwargs
)
def _refresh_developer_token(self):
if self._get_new_token is not None:
return self._get_new_token()
else:
return input(self.ENTER_TOKEN_PROMPT)
def _refresh(self, access_token):
"""
Base class override.
Ask for a new developer token.
"""
self._access_token = self._refresh_developer_token()
return self._access_token, None
<commit_msg>Fix pylint error for redefined builtin input.<commit_after>
|
# coding: utf-8
from __future__ import unicode_literals, absolute_import
from six.moves import input # pylint:disable=redefined-builtin
from .oauth2 import OAuth2
class DeveloperTokenAuth(OAuth2):
ENTER_TOKEN_PROMPT = 'Enter developer token: '
def __init__(self, get_new_token_callback=None, **kwargs):
self._get_new_token = get_new_token_callback
super(DeveloperTokenAuth, self).__init__(
client_id=None,
client_secret=None,
access_token=self._refresh_developer_token(),
**kwargs
)
def _refresh_developer_token(self):
if self._get_new_token is not None:
return self._get_new_token()
else:
return input(self.ENTER_TOKEN_PROMPT)
def _refresh(self, access_token):
"""
Base class override.
Ask for a new developer token.
"""
self._access_token = self._refresh_developer_token()
return self._access_token, None
|
# coding: utf-8
from __future__ import unicode_literals, absolute_import
from six.moves import input
from .oauth2 import OAuth2
class DeveloperTokenAuth(OAuth2):
ENTER_TOKEN_PROMPT = 'Enter developer token: '
def __init__(self, get_new_token_callback=None, **kwargs):
self._get_new_token = get_new_token_callback
super(DeveloperTokenAuth, self).__init__(
client_id=None,
client_secret=None,
access_token=self._refresh_developer_token(),
**kwargs
)
def _refresh_developer_token(self):
if self._get_new_token is not None:
return self._get_new_token()
else:
return input(self.ENTER_TOKEN_PROMPT)
def _refresh(self, access_token):
"""
Base class override.
Ask for a new developer token.
"""
self._access_token = self._refresh_developer_token()
return self._access_token, None
Fix pylint error for redefined builtin input.# coding: utf-8
from __future__ import unicode_literals, absolute_import
from six.moves import input # pylint:disable=redefined-builtin
from .oauth2 import OAuth2
class DeveloperTokenAuth(OAuth2):
ENTER_TOKEN_PROMPT = 'Enter developer token: '
def __init__(self, get_new_token_callback=None, **kwargs):
self._get_new_token = get_new_token_callback
super(DeveloperTokenAuth, self).__init__(
client_id=None,
client_secret=None,
access_token=self._refresh_developer_token(),
**kwargs
)
def _refresh_developer_token(self):
if self._get_new_token is not None:
return self._get_new_token()
else:
return input(self.ENTER_TOKEN_PROMPT)
def _refresh(self, access_token):
"""
Base class override.
Ask for a new developer token.
"""
self._access_token = self._refresh_developer_token()
return self._access_token, None
|
<commit_before># coding: utf-8
from __future__ import unicode_literals, absolute_import
from six.moves import input
from .oauth2 import OAuth2
class DeveloperTokenAuth(OAuth2):
ENTER_TOKEN_PROMPT = 'Enter developer token: '
def __init__(self, get_new_token_callback=None, **kwargs):
self._get_new_token = get_new_token_callback
super(DeveloperTokenAuth, self).__init__(
client_id=None,
client_secret=None,
access_token=self._refresh_developer_token(),
**kwargs
)
def _refresh_developer_token(self):
if self._get_new_token is not None:
return self._get_new_token()
else:
return input(self.ENTER_TOKEN_PROMPT)
def _refresh(self, access_token):
"""
Base class override.
Ask for a new developer token.
"""
self._access_token = self._refresh_developer_token()
return self._access_token, None
<commit_msg>Fix pylint error for redefined builtin input.<commit_after># coding: utf-8
from __future__ import unicode_literals, absolute_import
from six.moves import input # pylint:disable=redefined-builtin
from .oauth2 import OAuth2
class DeveloperTokenAuth(OAuth2):
ENTER_TOKEN_PROMPT = 'Enter developer token: '
def __init__(self, get_new_token_callback=None, **kwargs):
self._get_new_token = get_new_token_callback
super(DeveloperTokenAuth, self).__init__(
client_id=None,
client_secret=None,
access_token=self._refresh_developer_token(),
**kwargs
)
def _refresh_developer_token(self):
if self._get_new_token is not None:
return self._get_new_token()
else:
return input(self.ENTER_TOKEN_PROMPT)
def _refresh(self, access_token):
"""
Base class override.
Ask for a new developer token.
"""
self._access_token = self._refresh_developer_token()
return self._access_token, None
|
00323935eb8ff1ba5171da56dbb1587d46c48885
|
nodeconductor/core/perms.py
|
nodeconductor/core/perms.py
|
from django.contrib.auth import get_user_model
from nodeconductor.core.permissions import StaffPermissionLogic
User = get_user_model()
PERMISSION_LOGICS = (
(get_user_model(), StaffPermissionLogic(any_permission=True)),
)
|
from django.contrib.auth import get_user_model
from rest_framework.authtoken.models import Token
from nodeconductor.core.permissions import StaffPermissionLogic
User = get_user_model()
PERMISSION_LOGICS = (
(get_user_model(), StaffPermissionLogic(any_permission=True)),
(Token, StaffPermissionLogic(any_permission=True)),
)
|
Allow deletion of tokens from admin (NC-224)
|
Allow deletion of tokens from admin (NC-224)
|
Python
|
mit
|
opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor
|
from django.contrib.auth import get_user_model
from nodeconductor.core.permissions import StaffPermissionLogic
User = get_user_model()
PERMISSION_LOGICS = (
(get_user_model(), StaffPermissionLogic(any_permission=True)),
)
Allow deletion of tokens from admin (NC-224)
|
from django.contrib.auth import get_user_model
from rest_framework.authtoken.models import Token
from nodeconductor.core.permissions import StaffPermissionLogic
User = get_user_model()
PERMISSION_LOGICS = (
(get_user_model(), StaffPermissionLogic(any_permission=True)),
(Token, StaffPermissionLogic(any_permission=True)),
)
|
<commit_before>from django.contrib.auth import get_user_model
from nodeconductor.core.permissions import StaffPermissionLogic
User = get_user_model()
PERMISSION_LOGICS = (
(get_user_model(), StaffPermissionLogic(any_permission=True)),
)
<commit_msg>Allow deletion of tokens from admin (NC-224)<commit_after>
|
from django.contrib.auth import get_user_model
from rest_framework.authtoken.models import Token
from nodeconductor.core.permissions import StaffPermissionLogic
User = get_user_model()
PERMISSION_LOGICS = (
(get_user_model(), StaffPermissionLogic(any_permission=True)),
(Token, StaffPermissionLogic(any_permission=True)),
)
|
from django.contrib.auth import get_user_model
from nodeconductor.core.permissions import StaffPermissionLogic
User = get_user_model()
PERMISSION_LOGICS = (
(get_user_model(), StaffPermissionLogic(any_permission=True)),
)
Allow deletion of tokens from admin (NC-224)from django.contrib.auth import get_user_model
from rest_framework.authtoken.models import Token
from nodeconductor.core.permissions import StaffPermissionLogic
User = get_user_model()
PERMISSION_LOGICS = (
(get_user_model(), StaffPermissionLogic(any_permission=True)),
(Token, StaffPermissionLogic(any_permission=True)),
)
|
<commit_before>from django.contrib.auth import get_user_model
from nodeconductor.core.permissions import StaffPermissionLogic
User = get_user_model()
PERMISSION_LOGICS = (
(get_user_model(), StaffPermissionLogic(any_permission=True)),
)
<commit_msg>Allow deletion of tokens from admin (NC-224)<commit_after>from django.contrib.auth import get_user_model
from rest_framework.authtoken.models import Token
from nodeconductor.core.permissions import StaffPermissionLogic
User = get_user_model()
PERMISSION_LOGICS = (
(get_user_model(), StaffPermissionLogic(any_permission=True)),
(Token, StaffPermissionLogic(any_permission=True)),
)
|
61148ba3a9d863034336ca0b220762b012e5ad55
|
papermill/tests/test_cli.py
|
papermill/tests/test_cli.py
|
""" Test the command line interface """
import pytest
from ..cli import _is_float, _resolve_type
@pytest.mark.parametrize("test_input,expected", [
("True", True),
("False", False),
("None", None),
(13.3, 13.3),
(10, 10),
("hello world", "hello world"),
(u"😍", u"😍"),
])
def test_resolve_type(test_input, expected):
assert _resolve_type(test_input) == expected
@pytest.mark.parametrize("value,expected", [
(13.71, 13.71),
("False", False),
("None", False),
(-8.2, -8.2),
(10, 10),
("hello world", False),
("😍", False),
])
def test_is_float(value, expected):
assert (_is_float(value)) == expected
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" Test the command line interface """
import pytest
from ..cli import _is_float, _resolve_type
@pytest.mark.parametrize("test_input,expected", [
("True", True),
("False", False),
("None", None),
(13.3, 13.3),
(10, 10),
("hello world", "hello world"),
(u"😍", u"😍"),
])
def test_resolve_type(test_input, expected):
assert _resolve_type(test_input) == expected
@pytest.mark.parametrize("value,expected", [
(13.71, 13.71),
("False", False),
("None", False),
(-8.2, -8.2),
(10, 10),
("hello world", False),
("😍", False),
])
def test_is_float(value, expected):
assert (_is_float(value)) == expected
|
Add default encoding for py2
|
Add default encoding for py2
|
Python
|
bsd-3-clause
|
nteract/papermill,nteract/papermill
|
""" Test the command line interface """
import pytest
from ..cli import _is_float, _resolve_type
@pytest.mark.parametrize("test_input,expected", [
("True", True),
("False", False),
("None", None),
(13.3, 13.3),
(10, 10),
("hello world", "hello world"),
(u"😍", u"😍"),
])
def test_resolve_type(test_input, expected):
assert _resolve_type(test_input) == expected
@pytest.mark.parametrize("value,expected", [
(13.71, 13.71),
("False", False),
("None", False),
(-8.2, -8.2),
(10, 10),
("hello world", False),
("😍", False),
])
def test_is_float(value, expected):
assert (_is_float(value)) == expected
Add default encoding for py2
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" Test the command line interface """
import pytest
from ..cli import _is_float, _resolve_type
@pytest.mark.parametrize("test_input,expected", [
("True", True),
("False", False),
("None", None),
(13.3, 13.3),
(10, 10),
("hello world", "hello world"),
(u"😍", u"😍"),
])
def test_resolve_type(test_input, expected):
assert _resolve_type(test_input) == expected
@pytest.mark.parametrize("value,expected", [
(13.71, 13.71),
("False", False),
("None", False),
(-8.2, -8.2),
(10, 10),
("hello world", False),
("😍", False),
])
def test_is_float(value, expected):
assert (_is_float(value)) == expected
|
<commit_before>""" Test the command line interface """
import pytest
from ..cli import _is_float, _resolve_type
@pytest.mark.parametrize("test_input,expected", [
("True", True),
("False", False),
("None", None),
(13.3, 13.3),
(10, 10),
("hello world", "hello world"),
(u"😍", u"😍"),
])
def test_resolve_type(test_input, expected):
assert _resolve_type(test_input) == expected
@pytest.mark.parametrize("value,expected", [
(13.71, 13.71),
("False", False),
("None", False),
(-8.2, -8.2),
(10, 10),
("hello world", False),
("😍", False),
])
def test_is_float(value, expected):
assert (_is_float(value)) == expected
<commit_msg>Add default encoding for py2<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" Test the command line interface """
import pytest
from ..cli import _is_float, _resolve_type
@pytest.mark.parametrize("test_input,expected", [
("True", True),
("False", False),
("None", None),
(13.3, 13.3),
(10, 10),
("hello world", "hello world"),
(u"😍", u"😍"),
])
def test_resolve_type(test_input, expected):
assert _resolve_type(test_input) == expected
@pytest.mark.parametrize("value,expected", [
(13.71, 13.71),
("False", False),
("None", False),
(-8.2, -8.2),
(10, 10),
("hello world", False),
("😍", False),
])
def test_is_float(value, expected):
assert (_is_float(value)) == expected
|
""" Test the command line interface """
import pytest
from ..cli import _is_float, _resolve_type
@pytest.mark.parametrize("test_input,expected", [
("True", True),
("False", False),
("None", None),
(13.3, 13.3),
(10, 10),
("hello world", "hello world"),
(u"😍", u"😍"),
])
def test_resolve_type(test_input, expected):
assert _resolve_type(test_input) == expected
@pytest.mark.parametrize("value,expected", [
(13.71, 13.71),
("False", False),
("None", False),
(-8.2, -8.2),
(10, 10),
("hello world", False),
("😍", False),
])
def test_is_float(value, expected):
assert (_is_float(value)) == expected
Add default encoding for py2#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" Test the command line interface """
import pytest
from ..cli import _is_float, _resolve_type
@pytest.mark.parametrize("test_input,expected", [
("True", True),
("False", False),
("None", None),
(13.3, 13.3),
(10, 10),
("hello world", "hello world"),
(u"😍", u"😍"),
])
def test_resolve_type(test_input, expected):
assert _resolve_type(test_input) == expected
@pytest.mark.parametrize("value,expected", [
(13.71, 13.71),
("False", False),
("None", False),
(-8.2, -8.2),
(10, 10),
("hello world", False),
("😍", False),
])
def test_is_float(value, expected):
assert (_is_float(value)) == expected
|
<commit_before>""" Test the command line interface """
import pytest
from ..cli import _is_float, _resolve_type
@pytest.mark.parametrize("test_input,expected", [
("True", True),
("False", False),
("None", None),
(13.3, 13.3),
(10, 10),
("hello world", "hello world"),
(u"😍", u"😍"),
])
def test_resolve_type(test_input, expected):
assert _resolve_type(test_input) == expected
@pytest.mark.parametrize("value,expected", [
(13.71, 13.71),
("False", False),
("None", False),
(-8.2, -8.2),
(10, 10),
("hello world", False),
("😍", False),
])
def test_is_float(value, expected):
assert (_is_float(value)) == expected
<commit_msg>Add default encoding for py2<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" Test the command line interface """
import pytest
from ..cli import _is_float, _resolve_type
@pytest.mark.parametrize("test_input,expected", [
("True", True),
("False", False),
("None", None),
(13.3, 13.3),
(10, 10),
("hello world", "hello world"),
(u"😍", u"😍"),
])
def test_resolve_type(test_input, expected):
assert _resolve_type(test_input) == expected
@pytest.mark.parametrize("value,expected", [
(13.71, 13.71),
("False", False),
("None", False),
(-8.2, -8.2),
(10, 10),
("hello world", False),
("😍", False),
])
def test_is_float(value, expected):
assert (_is_float(value)) == expected
|
6299103d9f53a9db26cbb5609a8b93996c55d556
|
pyconde/attendees/management/commands/export_badges.py
|
pyconde/attendees/management/commands/export_badges.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from pyconde.attendees.exporters import BadgeExporter
from pyconde.attendees.models import VenueTicket
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--base-url',
action='store',
dest='base_url',
default='',
help='Base URL for profile URLs. Use {uid} as placeholder to '
'circumvent URL resolving and use custom URLs'),
make_option('--indent',
action='store_true',
dest='indent',
default=None,
help='Indent the output'),
)
help = 'Export all valid venue / conference tickets'
def handle(self, *args, **options):
qs = VenueTicket.objects.only_valid()
exporter = BadgeExporter(qs, base_url=options['base_url'],
indent=options['indent'])
self.stdout.write(exporter.json)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from pyconde.attendees.exporters import BadgeExporter
from pyconde.attendees.models import VenueTicket
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--base-url',
action='store',
dest='base_url',
default='',
help='Base URL for profile URLs. Use {uid} as placeholder to '
'circumvent URL resolving and use custom URLs'),
make_option('--indent',
action='store_true',
dest='indent',
default=None,
help='Indent the output'),
make_option('--exclude-ticket-type',
action='store',
dest='exclude_tt',
default=None,
help='comma separated list of ticket type IDs to exclude'),
)
help = 'Export all valid venue / conference tickets'
def handle(self, *args, **options):
qs = VenueTicket.objects.only_valid()
if options['exclude_tt'] is not None:
excluded_tt_ids = map(int, options['exclude_tt'].split(','))
qs = qs.exclude(ticket_type_id__in=excluded_tt_ids)
exporter = BadgeExporter(qs, base_url=options['base_url'],
indent=options['indent'])
self.stdout.write(exporter.json)
|
Allow to exclude ticket types from badge export
|
Allow to exclude ticket types from badge export
7 and 22 are partner program
|
Python
|
bsd-3-clause
|
pysv/djep,EuroPython/djep,EuroPython/djep,pysv/djep,pysv/djep,EuroPython/djep,pysv/djep,pysv/djep,EuroPython/djep
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from pyconde.attendees.exporters import BadgeExporter
from pyconde.attendees.models import VenueTicket
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--base-url',
action='store',
dest='base_url',
default='',
help='Base URL for profile URLs. Use {uid} as placeholder to '
'circumvent URL resolving and use custom URLs'),
make_option('--indent',
action='store_true',
dest='indent',
default=None,
help='Indent the output'),
)
help = 'Export all valid venue / conference tickets'
def handle(self, *args, **options):
qs = VenueTicket.objects.only_valid()
exporter = BadgeExporter(qs, base_url=options['base_url'],
indent=options['indent'])
self.stdout.write(exporter.json)
Allow to exclude ticket types from badge export
7 and 22 are partner program
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from pyconde.attendees.exporters import BadgeExporter
from pyconde.attendees.models import VenueTicket
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--base-url',
action='store',
dest='base_url',
default='',
help='Base URL for profile URLs. Use {uid} as placeholder to '
'circumvent URL resolving and use custom URLs'),
make_option('--indent',
action='store_true',
dest='indent',
default=None,
help='Indent the output'),
make_option('--exclude-ticket-type',
action='store',
dest='exclude_tt',
default=None,
help='comma separated list of ticket type IDs to exclude'),
)
help = 'Export all valid venue / conference tickets'
def handle(self, *args, **options):
qs = VenueTicket.objects.only_valid()
if options['exclude_tt'] is not None:
excluded_tt_ids = map(int, options['exclude_tt'].split(','))
qs = qs.exclude(ticket_type_id__in=excluded_tt_ids)
exporter = BadgeExporter(qs, base_url=options['base_url'],
indent=options['indent'])
self.stdout.write(exporter.json)
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from pyconde.attendees.exporters import BadgeExporter
from pyconde.attendees.models import VenueTicket
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--base-url',
action='store',
dest='base_url',
default='',
help='Base URL for profile URLs. Use {uid} as placeholder to '
'circumvent URL resolving and use custom URLs'),
make_option('--indent',
action='store_true',
dest='indent',
default=None,
help='Indent the output'),
)
help = 'Export all valid venue / conference tickets'
def handle(self, *args, **options):
qs = VenueTicket.objects.only_valid()
exporter = BadgeExporter(qs, base_url=options['base_url'],
indent=options['indent'])
self.stdout.write(exporter.json)
<commit_msg>Allow to exclude ticket types from badge export
7 and 22 are partner program<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from pyconde.attendees.exporters import BadgeExporter
from pyconde.attendees.models import VenueTicket
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--base-url',
action='store',
dest='base_url',
default='',
help='Base URL for profile URLs. Use {uid} as placeholder to '
'circumvent URL resolving and use custom URLs'),
make_option('--indent',
action='store_true',
dest='indent',
default=None,
help='Indent the output'),
make_option('--exclude-ticket-type',
action='store',
dest='exclude_tt',
default=None,
help='comma separated list of ticket type IDs to exclude'),
)
help = 'Export all valid venue / conference tickets'
def handle(self, *args, **options):
qs = VenueTicket.objects.only_valid()
if options['exclude_tt'] is not None:
excluded_tt_ids = map(int, options['exclude_tt'].split(','))
qs = qs.exclude(ticket_type_id__in=excluded_tt_ids)
exporter = BadgeExporter(qs, base_url=options['base_url'],
indent=options['indent'])
self.stdout.write(exporter.json)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from pyconde.attendees.exporters import BadgeExporter
from pyconde.attendees.models import VenueTicket
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--base-url',
action='store',
dest='base_url',
default='',
help='Base URL for profile URLs. Use {uid} as placeholder to '
'circumvent URL resolving and use custom URLs'),
make_option('--indent',
action='store_true',
dest='indent',
default=None,
help='Indent the output'),
)
help = 'Export all valid venue / conference tickets'
def handle(self, *args, **options):
qs = VenueTicket.objects.only_valid()
exporter = BadgeExporter(qs, base_url=options['base_url'],
indent=options['indent'])
self.stdout.write(exporter.json)
Allow to exclude ticket types from badge export
7 and 22 are partner program# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from pyconde.attendees.exporters import BadgeExporter
from pyconde.attendees.models import VenueTicket
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--base-url',
action='store',
dest='base_url',
default='',
help='Base URL for profile URLs. Use {uid} as placeholder to '
'circumvent URL resolving and use custom URLs'),
make_option('--indent',
action='store_true',
dest='indent',
default=None,
help='Indent the output'),
make_option('--exclude-ticket-type',
action='store',
dest='exclude_tt',
default=None,
help='comma separated list of ticket type IDs to exclude'),
)
help = 'Export all valid venue / conference tickets'
def handle(self, *args, **options):
qs = VenueTicket.objects.only_valid()
if options['exclude_tt'] is not None:
excluded_tt_ids = map(int, options['exclude_tt'].split(','))
qs = qs.exclude(ticket_type_id__in=excluded_tt_ids)
exporter = BadgeExporter(qs, base_url=options['base_url'],
indent=options['indent'])
self.stdout.write(exporter.json)
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from pyconde.attendees.exporters import BadgeExporter
from pyconde.attendees.models import VenueTicket
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--base-url',
action='store',
dest='base_url',
default='',
help='Base URL for profile URLs. Use {uid} as placeholder to '
'circumvent URL resolving and use custom URLs'),
make_option('--indent',
action='store_true',
dest='indent',
default=None,
help='Indent the output'),
)
help = 'Export all valid venue / conference tickets'
def handle(self, *args, **options):
qs = VenueTicket.objects.only_valid()
exporter = BadgeExporter(qs, base_url=options['base_url'],
indent=options['indent'])
self.stdout.write(exporter.json)
<commit_msg>Allow to exclude ticket types from badge export
7 and 22 are partner program<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from pyconde.attendees.exporters import BadgeExporter
from pyconde.attendees.models import VenueTicket
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--base-url',
action='store',
dest='base_url',
default='',
help='Base URL for profile URLs. Use {uid} as placeholder to '
'circumvent URL resolving and use custom URLs'),
make_option('--indent',
action='store_true',
dest='indent',
default=None,
help='Indent the output'),
make_option('--exclude-ticket-type',
action='store',
dest='exclude_tt',
default=None,
help='comma separated list of ticket type IDs to exclude'),
)
help = 'Export all valid venue / conference tickets'
def handle(self, *args, **options):
qs = VenueTicket.objects.only_valid()
if options['exclude_tt'] is not None:
excluded_tt_ids = map(int, options['exclude_tt'].split(','))
qs = qs.exclude(ticket_type_id__in=excluded_tt_ids)
exporter = BadgeExporter(qs, base_url=options['base_url'],
indent=options['indent'])
self.stdout.write(exporter.json)
|
86f7badc8913783eb559a61569fc2b80ceedf744
|
src/nfc/archive/dummy_archive.py
|
src/nfc/archive/dummy_archive.py
|
"""Module containing `DummyArchive` class."""
class DummyArchive(object):
"""Trivial archive implementation for testing purposes."""
@staticmethod
def create(dir_path, stations, detectors, clip_classes):
return DummyArchive(stations, detectors, clip_classes)
def __init__(self, stations, detectors, clip_classes):
self.stations = stations
self.detectors = detectors
self.clip_classes = clip_classes
|
"""Module containing `DummyArchive` class."""
class DummyArchive(object):
"""Trivial archive implementation for testing purposes."""
@staticmethod
def create(dir_path, stations, detectors, clip_classes):
return DummyArchive(stations, detectors, clip_classes)
def __init__(self, stations, detectors, clip_classes):
self.stations = stations
self.detectors = detectors
self.clip_classes = clip_classes
def open(self, cache_db=False):
pass
def close(self):
pass
|
Add open and close methods to dummy archive.
|
Add open and close methods to dummy archive.
|
Python
|
mit
|
HaroldMills/Vesper,HaroldMills/Vesper,HaroldMills/Vesper,HaroldMills/Vesper,HaroldMills/Vesper
|
"""Module containing `DummyArchive` class."""
class DummyArchive(object):
"""Trivial archive implementation for testing purposes."""
@staticmethod
def create(dir_path, stations, detectors, clip_classes):
return DummyArchive(stations, detectors, clip_classes)
def __init__(self, stations, detectors, clip_classes):
self.stations = stations
self.detectors = detectors
self.clip_classes = clip_classes
Add open and close methods to dummy archive.
|
"""Module containing `DummyArchive` class."""
class DummyArchive(object):
"""Trivial archive implementation for testing purposes."""
@staticmethod
def create(dir_path, stations, detectors, clip_classes):
return DummyArchive(stations, detectors, clip_classes)
def __init__(self, stations, detectors, clip_classes):
self.stations = stations
self.detectors = detectors
self.clip_classes = clip_classes
def open(self, cache_db=False):
pass
def close(self):
pass
|
<commit_before>"""Module containing `DummyArchive` class."""
class DummyArchive(object):
"""Trivial archive implementation for testing purposes."""
@staticmethod
def create(dir_path, stations, detectors, clip_classes):
return DummyArchive(stations, detectors, clip_classes)
def __init__(self, stations, detectors, clip_classes):
self.stations = stations
self.detectors = detectors
self.clip_classes = clip_classes
<commit_msg>Add open and close methods to dummy archive.<commit_after>
|
"""Module containing `DummyArchive` class."""
class DummyArchive(object):
"""Trivial archive implementation for testing purposes."""
@staticmethod
def create(dir_path, stations, detectors, clip_classes):
return DummyArchive(stations, detectors, clip_classes)
def __init__(self, stations, detectors, clip_classes):
self.stations = stations
self.detectors = detectors
self.clip_classes = clip_classes
def open(self, cache_db=False):
pass
def close(self):
pass
|
"""Module containing `DummyArchive` class."""
class DummyArchive(object):
"""Trivial archive implementation for testing purposes."""
@staticmethod
def create(dir_path, stations, detectors, clip_classes):
return DummyArchive(stations, detectors, clip_classes)
def __init__(self, stations, detectors, clip_classes):
self.stations = stations
self.detectors = detectors
self.clip_classes = clip_classes
Add open and close methods to dummy archive."""Module containing `DummyArchive` class."""
class DummyArchive(object):
"""Trivial archive implementation for testing purposes."""
@staticmethod
def create(dir_path, stations, detectors, clip_classes):
return DummyArchive(stations, detectors, clip_classes)
def __init__(self, stations, detectors, clip_classes):
self.stations = stations
self.detectors = detectors
self.clip_classes = clip_classes
def open(self, cache_db=False):
pass
def close(self):
pass
|
<commit_before>"""Module containing `DummyArchive` class."""
class DummyArchive(object):
"""Trivial archive implementation for testing purposes."""
@staticmethod
def create(dir_path, stations, detectors, clip_classes):
return DummyArchive(stations, detectors, clip_classes)
def __init__(self, stations, detectors, clip_classes):
self.stations = stations
self.detectors = detectors
self.clip_classes = clip_classes
<commit_msg>Add open and close methods to dummy archive.<commit_after>"""Module containing `DummyArchive` class."""
class DummyArchive(object):
"""Trivial archive implementation for testing purposes."""
@staticmethod
def create(dir_path, stations, detectors, clip_classes):
return DummyArchive(stations, detectors, clip_classes)
def __init__(self, stations, detectors, clip_classes):
self.stations = stations
self.detectors = detectors
self.clip_classes = clip_classes
def open(self, cache_db=False):
pass
def close(self):
pass
|
1709c602b8a423d1eee6521c5e74987db0fc8b81
|
fancypages/contrib/oscar_fancypages/mixins.py
|
fancypages/contrib/oscar_fancypages/mixins.py
|
from ... import mixins
class OscarFancyPageMixin(mixins.FancyPageMixin):
node_attr_name = 'category'
slug_url_kwarg = 'category_slug'
context_object_name = 'fancypage'
def get_context_data(self, **kwargs):
ctx = super(OscarFancyPageMixin, self).get_context_data(**kwargs)
ctx[self.context_object_name] = getattr(self, self.page_attr_name)
if self.category:
ctx['object'] = self.category
for container in self.category.page.containers.all():
ctx[container.name] = container
return ctx
|
from ... import mixins
class OscarFancyPageMixin(mixins.FancyPageMixin):
node_attr_name = 'category'
slug_url_kwarg = 'category_slug'
context_object_name = 'products'
def get_context_data(self, **kwargs):
ctx = super(OscarFancyPageMixin, self).get_context_data(**kwargs)
ctx['fancypage'] = getattr(self, self.page_attr_name)
if self.category:
ctx['object'] = self.category
for container in self.category.page.containers.all():
ctx[container.name] = container
return ctx
|
Change context object for product list view in Oscar contrib
|
Change context object for product list view in Oscar contrib
|
Python
|
bsd-3-clause
|
tangentlabs/django-fancypages,tangentlabs/django-fancypages,tangentlabs/django-fancypages,socradev/django-fancypages,socradev/django-fancypages,socradev/django-fancypages
|
from ... import mixins
class OscarFancyPageMixin(mixins.FancyPageMixin):
node_attr_name = 'category'
slug_url_kwarg = 'category_slug'
context_object_name = 'fancypage'
def get_context_data(self, **kwargs):
ctx = super(OscarFancyPageMixin, self).get_context_data(**kwargs)
ctx[self.context_object_name] = getattr(self, self.page_attr_name)
if self.category:
ctx['object'] = self.category
for container in self.category.page.containers.all():
ctx[container.name] = container
return ctx
Change context object for product list view in Oscar contrib
|
from ... import mixins
class OscarFancyPageMixin(mixins.FancyPageMixin):
node_attr_name = 'category'
slug_url_kwarg = 'category_slug'
context_object_name = 'products'
def get_context_data(self, **kwargs):
ctx = super(OscarFancyPageMixin, self).get_context_data(**kwargs)
ctx['fancypage'] = getattr(self, self.page_attr_name)
if self.category:
ctx['object'] = self.category
for container in self.category.page.containers.all():
ctx[container.name] = container
return ctx
|
<commit_before>from ... import mixins
class OscarFancyPageMixin(mixins.FancyPageMixin):
node_attr_name = 'category'
slug_url_kwarg = 'category_slug'
context_object_name = 'fancypage'
def get_context_data(self, **kwargs):
ctx = super(OscarFancyPageMixin, self).get_context_data(**kwargs)
ctx[self.context_object_name] = getattr(self, self.page_attr_name)
if self.category:
ctx['object'] = self.category
for container in self.category.page.containers.all():
ctx[container.name] = container
return ctx
<commit_msg>Change context object for product list view in Oscar contrib<commit_after>
|
from ... import mixins
class OscarFancyPageMixin(mixins.FancyPageMixin):
node_attr_name = 'category'
slug_url_kwarg = 'category_slug'
context_object_name = 'products'
def get_context_data(self, **kwargs):
ctx = super(OscarFancyPageMixin, self).get_context_data(**kwargs)
ctx['fancypage'] = getattr(self, self.page_attr_name)
if self.category:
ctx['object'] = self.category
for container in self.category.page.containers.all():
ctx[container.name] = container
return ctx
|
from ... import mixins
class OscarFancyPageMixin(mixins.FancyPageMixin):
node_attr_name = 'category'
slug_url_kwarg = 'category_slug'
context_object_name = 'fancypage'
def get_context_data(self, **kwargs):
ctx = super(OscarFancyPageMixin, self).get_context_data(**kwargs)
ctx[self.context_object_name] = getattr(self, self.page_attr_name)
if self.category:
ctx['object'] = self.category
for container in self.category.page.containers.all():
ctx[container.name] = container
return ctx
Change context object for product list view in Oscar contribfrom ... import mixins
class OscarFancyPageMixin(mixins.FancyPageMixin):
node_attr_name = 'category'
slug_url_kwarg = 'category_slug'
context_object_name = 'products'
def get_context_data(self, **kwargs):
ctx = super(OscarFancyPageMixin, self).get_context_data(**kwargs)
ctx['fancypage'] = getattr(self, self.page_attr_name)
if self.category:
ctx['object'] = self.category
for container in self.category.page.containers.all():
ctx[container.name] = container
return ctx
|
<commit_before>from ... import mixins
class OscarFancyPageMixin(mixins.FancyPageMixin):
node_attr_name = 'category'
slug_url_kwarg = 'category_slug'
context_object_name = 'fancypage'
def get_context_data(self, **kwargs):
ctx = super(OscarFancyPageMixin, self).get_context_data(**kwargs)
ctx[self.context_object_name] = getattr(self, self.page_attr_name)
if self.category:
ctx['object'] = self.category
for container in self.category.page.containers.all():
ctx[container.name] = container
return ctx
<commit_msg>Change context object for product list view in Oscar contrib<commit_after>from ... import mixins
class OscarFancyPageMixin(mixins.FancyPageMixin):
node_attr_name = 'category'
slug_url_kwarg = 'category_slug'
context_object_name = 'products'
def get_context_data(self, **kwargs):
ctx = super(OscarFancyPageMixin, self).get_context_data(**kwargs)
ctx['fancypage'] = getattr(self, self.page_attr_name)
if self.category:
ctx['object'] = self.category
for container in self.category.page.containers.all():
ctx[container.name] = container
return ctx
|
31c68ae56801377327e2cc0901222a9d961a6502
|
tests/integration/test_skytap.py
|
tests/integration/test_skytap.py
|
"""
Integration tests for the Skytap XBlock.
"""
from xblockutils.studio_editable_test import StudioEditableBaseTest
class TestSkytap(StudioEditableBaseTest):
"""
Integration tests for the Skytap XBlock.
"""
|
"""
Integration tests for the Skytap XBlock.
"""
from xblockutils.studio_editable_test import StudioEditableBaseTest
class TestSkytap(StudioEditableBaseTest):
"""
Integration tests for the Skytap XBlock.
"""
def test_keyboard_layouts(self):
"""
"""
pass
|
Add stub for integration test.
|
Add stub for integration test.
|
Python
|
agpl-3.0
|
open-craft/xblock-skytap,open-craft/xblock-skytap,open-craft/xblock-skytap
|
"""
Integration tests for the Skytap XBlock.
"""
from xblockutils.studio_editable_test import StudioEditableBaseTest
class TestSkytap(StudioEditableBaseTest):
"""
Integration tests for the Skytap XBlock.
"""
Add stub for integration test.
|
"""
Integration tests for the Skytap XBlock.
"""
from xblockutils.studio_editable_test import StudioEditableBaseTest
class TestSkytap(StudioEditableBaseTest):
"""
Integration tests for the Skytap XBlock.
"""
def test_keyboard_layouts(self):
"""
"""
pass
|
<commit_before>"""
Integration tests for the Skytap XBlock.
"""
from xblockutils.studio_editable_test import StudioEditableBaseTest
class TestSkytap(StudioEditableBaseTest):
"""
Integration tests for the Skytap XBlock.
"""
<commit_msg>Add stub for integration test.<commit_after>
|
"""
Integration tests for the Skytap XBlock.
"""
from xblockutils.studio_editable_test import StudioEditableBaseTest
class TestSkytap(StudioEditableBaseTest):
"""
Integration tests for the Skytap XBlock.
"""
def test_keyboard_layouts(self):
"""
"""
pass
|
"""
Integration tests for the Skytap XBlock.
"""
from xblockutils.studio_editable_test import StudioEditableBaseTest
class TestSkytap(StudioEditableBaseTest):
"""
Integration tests for the Skytap XBlock.
"""
Add stub for integration test."""
Integration tests for the Skytap XBlock.
"""
from xblockutils.studio_editable_test import StudioEditableBaseTest
class TestSkytap(StudioEditableBaseTest):
"""
Integration tests for the Skytap XBlock.
"""
def test_keyboard_layouts(self):
"""
"""
pass
|
<commit_before>"""
Integration tests for the Skytap XBlock.
"""
from xblockutils.studio_editable_test import StudioEditableBaseTest
class TestSkytap(StudioEditableBaseTest):
"""
Integration tests for the Skytap XBlock.
"""
<commit_msg>Add stub for integration test.<commit_after>"""
Integration tests for the Skytap XBlock.
"""
from xblockutils.studio_editable_test import StudioEditableBaseTest
class TestSkytap(StudioEditableBaseTest):
"""
Integration tests for the Skytap XBlock.
"""
def test_keyboard_layouts(self):
"""
"""
pass
|
5477bb26cb076e67ad255982f435abc177d055b4
|
tests/sequence/test_alignment.py
|
tests/sequence/test_alignment.py
|
import unittest
from unittest import mock
from io import StringIO
from cref.sequence.alignment import Blast
class AlignmentTestCase(unittest.TestCase):
def test_blast_local(self):
blast = Blast('data/blastdb/pdbseqres')
results = blast.align('AASSF')
pdbs = {result.pdb_code for result in results}
self.assertIn('1o61', pdbs)
def test_blast_local_error(self):
blast = Blast('db')
with self.assertRaises(Exception) as cm:
blast.align('AASSF')
self.assertIn('Database error', cm.exception.args[-1])
|
import unittest
from unittest import mock
from io import StringIO
from cref.sequence.alignment import Blast
class AlignmentTestCase(unittest.TestCase):
def test_blast_local(self):
blast = Blast('data/blastdb/pdbseqres')
results = blast.align('AASSF')
pdbs = {result.pdb_code for result in results}
self.assertIn('4eef', pdbs)
def test_blast_local_error(self):
blast = Blast('db')
with self.assertRaises(Exception) as cm:
blast.align('AASSF')
self.assertIn('Database error', cm.exception.args[-1])
|
Fix tests due to pdb update
|
Fix tests due to pdb update
|
Python
|
mit
|
mchelem/cref2,mchelem/cref2,mchelem/cref2
|
import unittest
from unittest import mock
from io import StringIO
from cref.sequence.alignment import Blast
class AlignmentTestCase(unittest.TestCase):
def test_blast_local(self):
blast = Blast('data/blastdb/pdbseqres')
results = blast.align('AASSF')
pdbs = {result.pdb_code for result in results}
self.assertIn('1o61', pdbs)
def test_blast_local_error(self):
blast = Blast('db')
with self.assertRaises(Exception) as cm:
blast.align('AASSF')
self.assertIn('Database error', cm.exception.args[-1])
Fix tests due to pdb update
|
import unittest
from unittest import mock
from io import StringIO
from cref.sequence.alignment import Blast
class AlignmentTestCase(unittest.TestCase):
def test_blast_local(self):
blast = Blast('data/blastdb/pdbseqres')
results = blast.align('AASSF')
pdbs = {result.pdb_code for result in results}
self.assertIn('4eef', pdbs)
def test_blast_local_error(self):
blast = Blast('db')
with self.assertRaises(Exception) as cm:
blast.align('AASSF')
self.assertIn('Database error', cm.exception.args[-1])
|
<commit_before>import unittest
from unittest import mock
from io import StringIO
from cref.sequence.alignment import Blast
class AlignmentTestCase(unittest.TestCase):
def test_blast_local(self):
blast = Blast('data/blastdb/pdbseqres')
results = blast.align('AASSF')
pdbs = {result.pdb_code for result in results}
self.assertIn('1o61', pdbs)
def test_blast_local_error(self):
blast = Blast('db')
with self.assertRaises(Exception) as cm:
blast.align('AASSF')
self.assertIn('Database error', cm.exception.args[-1])
<commit_msg>Fix tests due to pdb update<commit_after>
|
import unittest
from unittest import mock
from io import StringIO
from cref.sequence.alignment import Blast
class AlignmentTestCase(unittest.TestCase):
def test_blast_local(self):
blast = Blast('data/blastdb/pdbseqres')
results = blast.align('AASSF')
pdbs = {result.pdb_code for result in results}
self.assertIn('4eef', pdbs)
def test_blast_local_error(self):
blast = Blast('db')
with self.assertRaises(Exception) as cm:
blast.align('AASSF')
self.assertIn('Database error', cm.exception.args[-1])
|
import unittest
from unittest import mock
from io import StringIO
from cref.sequence.alignment import Blast
class AlignmentTestCase(unittest.TestCase):
def test_blast_local(self):
blast = Blast('data/blastdb/pdbseqres')
results = blast.align('AASSF')
pdbs = {result.pdb_code for result in results}
self.assertIn('1o61', pdbs)
def test_blast_local_error(self):
blast = Blast('db')
with self.assertRaises(Exception) as cm:
blast.align('AASSF')
self.assertIn('Database error', cm.exception.args[-1])
Fix tests due to pdb updateimport unittest
from unittest import mock
from io import StringIO
from cref.sequence.alignment import Blast
class AlignmentTestCase(unittest.TestCase):
def test_blast_local(self):
blast = Blast('data/blastdb/pdbseqres')
results = blast.align('AASSF')
pdbs = {result.pdb_code for result in results}
self.assertIn('4eef', pdbs)
def test_blast_local_error(self):
blast = Blast('db')
with self.assertRaises(Exception) as cm:
blast.align('AASSF')
self.assertIn('Database error', cm.exception.args[-1])
|
<commit_before>import unittest
from unittest import mock
from io import StringIO
from cref.sequence.alignment import Blast
class AlignmentTestCase(unittest.TestCase):
def test_blast_local(self):
blast = Blast('data/blastdb/pdbseqres')
results = blast.align('AASSF')
pdbs = {result.pdb_code for result in results}
self.assertIn('1o61', pdbs)
def test_blast_local_error(self):
blast = Blast('db')
with self.assertRaises(Exception) as cm:
blast.align('AASSF')
self.assertIn('Database error', cm.exception.args[-1])
<commit_msg>Fix tests due to pdb update<commit_after>import unittest
from unittest import mock
from io import StringIO
from cref.sequence.alignment import Blast
class AlignmentTestCase(unittest.TestCase):
def test_blast_local(self):
blast = Blast('data/blastdb/pdbseqres')
results = blast.align('AASSF')
pdbs = {result.pdb_code for result in results}
self.assertIn('4eef', pdbs)
def test_blast_local_error(self):
blast = Blast('db')
with self.assertRaises(Exception) as cm:
blast.align('AASSF')
self.assertIn('Database error', cm.exception.args[-1])
|
0082f48347b5f75263687e59b8c000b66cad8b77
|
salt/_modules/caasp_orch.py
|
salt/_modules/caasp_orch.py
|
from __future__ import absolute_import
def __virtual__():
return "caasp_orch"
def sync_all():
'''
Syncronize everything before starting a new orchestration
'''
__utils__['caasp_log.debug']('orch: refreshing all')
__salt__['saltutil.sync_all'](refresh=True)
__utils__['caasp_log.debug']('orch: synchronizing the mine')
__salt__['saltutil.runner']('mine.update', tgt='*', clear=True)
|
from __future__ import absolute_import
def __virtual__():
return "caasp_orch"
def sync_all():
'''
Syncronize everything before starting a new orchestration
'''
__utils__['caasp_log.debug']('orch: refreshing all')
__salt__['saltutil.sync_all'](refresh=True)
# make sure we refresh modules synchronously
__salt__['saltutil.refresh_modules'](async=False) # noqa: W606
__utils__['caasp_log.debug']('orch: synchronizing the mine')
__salt__['saltutil.runner']('mine.update', tgt='*', clear=True)
|
Make sure we refresh modules synchronously
|
Make sure we refresh modules synchronously
bsc#1124784
Signed-off-by: Alvaro Saurin <95275619247e0a0a4a5f7a8d5144a85ee8f50dbb@gmail.com>
|
Python
|
apache-2.0
|
kubic-project/salt,kubic-project/salt,kubic-project/salt
|
from __future__ import absolute_import
def __virtual__():
return "caasp_orch"
def sync_all():
'''
Syncronize everything before starting a new orchestration
'''
__utils__['caasp_log.debug']('orch: refreshing all')
__salt__['saltutil.sync_all'](refresh=True)
__utils__['caasp_log.debug']('orch: synchronizing the mine')
__salt__['saltutil.runner']('mine.update', tgt='*', clear=True)
Make sure we refresh modules synchronously
bsc#1124784
Signed-off-by: Alvaro Saurin <95275619247e0a0a4a5f7a8d5144a85ee8f50dbb@gmail.com>
|
from __future__ import absolute_import
def __virtual__():
return "caasp_orch"
def sync_all():
'''
Syncronize everything before starting a new orchestration
'''
__utils__['caasp_log.debug']('orch: refreshing all')
__salt__['saltutil.sync_all'](refresh=True)
# make sure we refresh modules synchronously
__salt__['saltutil.refresh_modules'](async=False) # noqa: W606
__utils__['caasp_log.debug']('orch: synchronizing the mine')
__salt__['saltutil.runner']('mine.update', tgt='*', clear=True)
|
<commit_before>from __future__ import absolute_import
def __virtual__():
return "caasp_orch"
def sync_all():
'''
Syncronize everything before starting a new orchestration
'''
__utils__['caasp_log.debug']('orch: refreshing all')
__salt__['saltutil.sync_all'](refresh=True)
__utils__['caasp_log.debug']('orch: synchronizing the mine')
__salt__['saltutil.runner']('mine.update', tgt='*', clear=True)
<commit_msg>Make sure we refresh modules synchronously
bsc#1124784
Signed-off-by: Alvaro Saurin <95275619247e0a0a4a5f7a8d5144a85ee8f50dbb@gmail.com><commit_after>
|
from __future__ import absolute_import
def __virtual__():
return "caasp_orch"
def sync_all():
'''
Syncronize everything before starting a new orchestration
'''
__utils__['caasp_log.debug']('orch: refreshing all')
__salt__['saltutil.sync_all'](refresh=True)
# make sure we refresh modules synchronously
__salt__['saltutil.refresh_modules'](async=False) # noqa: W606
__utils__['caasp_log.debug']('orch: synchronizing the mine')
__salt__['saltutil.runner']('mine.update', tgt='*', clear=True)
|
from __future__ import absolute_import
def __virtual__():
return "caasp_orch"
def sync_all():
'''
Syncronize everything before starting a new orchestration
'''
__utils__['caasp_log.debug']('orch: refreshing all')
__salt__['saltutil.sync_all'](refresh=True)
__utils__['caasp_log.debug']('orch: synchronizing the mine')
__salt__['saltutil.runner']('mine.update', tgt='*', clear=True)
Make sure we refresh modules synchronously
bsc#1124784
Signed-off-by: Alvaro Saurin <95275619247e0a0a4a5f7a8d5144a85ee8f50dbb@gmail.com>from __future__ import absolute_import
def __virtual__():
return "caasp_orch"
def sync_all():
'''
Syncronize everything before starting a new orchestration
'''
__utils__['caasp_log.debug']('orch: refreshing all')
__salt__['saltutil.sync_all'](refresh=True)
# make sure we refresh modules synchronously
__salt__['saltutil.refresh_modules'](async=False) # noqa: W606
__utils__['caasp_log.debug']('orch: synchronizing the mine')
__salt__['saltutil.runner']('mine.update', tgt='*', clear=True)
|
<commit_before>from __future__ import absolute_import
def __virtual__():
return "caasp_orch"
def sync_all():
'''
Syncronize everything before starting a new orchestration
'''
__utils__['caasp_log.debug']('orch: refreshing all')
__salt__['saltutil.sync_all'](refresh=True)
__utils__['caasp_log.debug']('orch: synchronizing the mine')
__salt__['saltutil.runner']('mine.update', tgt='*', clear=True)
<commit_msg>Make sure we refresh modules synchronously
bsc#1124784
Signed-off-by: Alvaro Saurin <95275619247e0a0a4a5f7a8d5144a85ee8f50dbb@gmail.com><commit_after>from __future__ import absolute_import
def __virtual__():
return "caasp_orch"
def sync_all():
'''
Syncronize everything before starting a new orchestration
'''
__utils__['caasp_log.debug']('orch: refreshing all')
__salt__['saltutil.sync_all'](refresh=True)
# make sure we refresh modules synchronously
__salt__['saltutil.refresh_modules'](async=False) # noqa: W606
__utils__['caasp_log.debug']('orch: synchronizing the mine')
__salt__['saltutil.runner']('mine.update', tgt='*', clear=True)
|
b764e2ed3edbc089c1fa51659ddab30b5091a6a5
|
salt/utils/validate/user.py
|
salt/utils/validate/user.py
|
# -*- coding: utf-8 -*-
'''
Various user validation utilities
'''
# Import python libs
import re
import logging
log = logging.getLogger(__name__)
def valid_username(user):
'''
Validates a username based on the guidelines in `useradd(8)`
'''
if type(user) not str:
return False
if len(user) > 32:
return False
valid = re.compile(r'[a-z_][a-z0-9_-]*[$]?', re.IGNORECASE)
return valid.match(user) is not None
|
# -*- coding: utf-8 -*-
'''
Various user validation utilities
'''
# Import python libs
import re
import logging
log = logging.getLogger(__name__)
VALID_USERNAME= re.compile(r'[a-z_][a-z0-9_-]*[$]?', re.IGNORECASE)
def valid_username(user):
'''
Validates a username based on the guidelines in `useradd(8)`
'''
if type(user) not str:
return False
if len(user) > 32:
return False
return VALID_USERNAME.match(user) is not None
|
Move re compilation to the module load
|
Move re compilation to the module load
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
# -*- coding: utf-8 -*-
'''
Various user validation utilities
'''
# Import python libs
import re
import logging
log = logging.getLogger(__name__)
def valid_username(user):
'''
Validates a username based on the guidelines in `useradd(8)`
'''
if type(user) not str:
return False
if len(user) > 32:
return False
valid = re.compile(r'[a-z_][a-z0-9_-]*[$]?', re.IGNORECASE)
return valid.match(user) is not None
Move re compilation to the module load
|
# -*- coding: utf-8 -*-
'''
Various user validation utilities
'''
# Import python libs
import re
import logging
log = logging.getLogger(__name__)
VALID_USERNAME= re.compile(r'[a-z_][a-z0-9_-]*[$]?', re.IGNORECASE)
def valid_username(user):
'''
Validates a username based on the guidelines in `useradd(8)`
'''
if type(user) not str:
return False
if len(user) > 32:
return False
return VALID_USERNAME.match(user) is not None
|
<commit_before># -*- coding: utf-8 -*-
'''
Various user validation utilities
'''
# Import python libs
import re
import logging
log = logging.getLogger(__name__)
def valid_username(user):
'''
Validates a username based on the guidelines in `useradd(8)`
'''
if type(user) not str:
return False
if len(user) > 32:
return False
valid = re.compile(r'[a-z_][a-z0-9_-]*[$]?', re.IGNORECASE)
return valid.match(user) is not None
<commit_msg>Move re compilation to the module load<commit_after>
|
# -*- coding: utf-8 -*-
'''
Various user validation utilities
'''
# Import python libs
import re
import logging
log = logging.getLogger(__name__)
VALID_USERNAME= re.compile(r'[a-z_][a-z0-9_-]*[$]?', re.IGNORECASE)
def valid_username(user):
'''
Validates a username based on the guidelines in `useradd(8)`
'''
if type(user) not str:
return False
if len(user) > 32:
return False
return VALID_USERNAME.match(user) is not None
|
# -*- coding: utf-8 -*-
'''
Various user validation utilities
'''
# Import python libs
import re
import logging
log = logging.getLogger(__name__)
def valid_username(user):
'''
Validates a username based on the guidelines in `useradd(8)`
'''
if type(user) not str:
return False
if len(user) > 32:
return False
valid = re.compile(r'[a-z_][a-z0-9_-]*[$]?', re.IGNORECASE)
return valid.match(user) is not None
Move re compilation to the module load# -*- coding: utf-8 -*-
'''
Various user validation utilities
'''
# Import python libs
import re
import logging
log = logging.getLogger(__name__)
VALID_USERNAME= re.compile(r'[a-z_][a-z0-9_-]*[$]?', re.IGNORECASE)
def valid_username(user):
'''
Validates a username based on the guidelines in `useradd(8)`
'''
if type(user) not str:
return False
if len(user) > 32:
return False
return VALID_USERNAME.match(user) is not None
|
<commit_before># -*- coding: utf-8 -*-
'''
Various user validation utilities
'''
# Import python libs
import re
import logging
log = logging.getLogger(__name__)
def valid_username(user):
'''
Validates a username based on the guidelines in `useradd(8)`
'''
if type(user) not str:
return False
if len(user) > 32:
return False
valid = re.compile(r'[a-z_][a-z0-9_-]*[$]?', re.IGNORECASE)
return valid.match(user) is not None
<commit_msg>Move re compilation to the module load<commit_after># -*- coding: utf-8 -*-
'''
Various user validation utilities
'''
# Import python libs
import re
import logging
log = logging.getLogger(__name__)
VALID_USERNAME= re.compile(r'[a-z_][a-z0-9_-]*[$]?', re.IGNORECASE)
def valid_username(user):
'''
Validates a username based on the guidelines in `useradd(8)`
'''
if type(user) not str:
return False
if len(user) > 32:
return False
return VALID_USERNAME.match(user) is not None
|
600f809805e5ade5ff2a4f0db27f2d274b46e08e
|
pystock_crawler/settings.py
|
pystock_crawler/settings.py
|
# Scrapy settings for pystock-crawler project
#
# For simplicity, this file contains only the most important settings by
# default. All the other settings are documented here:
#
# http://doc.scrapy.org/en/latest/topics/settings.html
#
BOT_NAME = 'pystock-crawler'
EXPORT_FIELDS = (
# Price columns
'symbol', 'date', 'open', 'high', 'low', 'close', 'volume', 'adj_close',
# Report columns
'end_date', 'amend', 'period_focus', 'doc_type', 'revenues', 'net_income',
'eps_basic', 'eps_diluted', 'dividend', 'assets', 'cash', 'equity',
)
FEED_EXPORTERS = {
'csv': 'pystock_crawler.exporters.CsvItemExporter2'
}
HTTPCACHE_ENABLED = True
HTTPCACHE_POLICY = 'scrapy.contrib.httpcache.RFC2616Policy'
LOG_LEVEL = 'INFO'
NEWSPIDER_MODULE = 'pystock_crawler.spiders'
SPIDER_MODULES = ['pystock_crawler.spiders']
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'pystock-crawler (+http://www.yourdomain.com)'
|
# Scrapy settings for pystock-crawler project
#
# For simplicity, this file contains only the most important settings by
# default. All the other settings are documented here:
#
# http://doc.scrapy.org/en/latest/topics/settings.html
#
BOT_NAME = 'pystock-crawler'
EXPORT_FIELDS = (
# Price columns
'symbol', 'date', 'open', 'high', 'low', 'close', 'volume', 'adj_close',
# Report columns
'end_date', 'amend', 'period_focus', 'doc_type', 'revenues', 'net_income',
'eps_basic', 'eps_diluted', 'dividend', 'assets', 'cash', 'equity',
)
FEED_EXPORTERS = {
'csv': 'pystock_crawler.exporters.CsvItemExporter2'
}
HTTPCACHE_ENABLED = True
HTTPCACHE_POLICY = 'scrapy.contrib.httpcache.RFC2616Policy'
HTTPCACHE_STORAGE = 'scrapy.contrib.httpcache.DbmCacheStorage'
LOG_LEVEL = 'INFO'
NEWSPIDER_MODULE = 'pystock_crawler.spiders'
SPIDER_MODULES = ['pystock_crawler.spiders']
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'pystock-crawler (+http://www.yourdomain.com)'
|
Use dbm cache instead of file system
|
Use dbm cache instead of file system
|
Python
|
mit
|
hsd315/pystock-crawler,eliangcs/pystock-crawler
|
# Scrapy settings for pystock-crawler project
#
# For simplicity, this file contains only the most important settings by
# default. All the other settings are documented here:
#
# http://doc.scrapy.org/en/latest/topics/settings.html
#
BOT_NAME = 'pystock-crawler'
EXPORT_FIELDS = (
# Price columns
'symbol', 'date', 'open', 'high', 'low', 'close', 'volume', 'adj_close',
# Report columns
'end_date', 'amend', 'period_focus', 'doc_type', 'revenues', 'net_income',
'eps_basic', 'eps_diluted', 'dividend', 'assets', 'cash', 'equity',
)
FEED_EXPORTERS = {
'csv': 'pystock_crawler.exporters.CsvItemExporter2'
}
HTTPCACHE_ENABLED = True
HTTPCACHE_POLICY = 'scrapy.contrib.httpcache.RFC2616Policy'
LOG_LEVEL = 'INFO'
NEWSPIDER_MODULE = 'pystock_crawler.spiders'
SPIDER_MODULES = ['pystock_crawler.spiders']
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'pystock-crawler (+http://www.yourdomain.com)'
Use dbm cache instead of file system
|
# Scrapy settings for pystock-crawler project
#
# For simplicity, this file contains only the most important settings by
# default. All the other settings are documented here:
#
# http://doc.scrapy.org/en/latest/topics/settings.html
#
BOT_NAME = 'pystock-crawler'
EXPORT_FIELDS = (
# Price columns
'symbol', 'date', 'open', 'high', 'low', 'close', 'volume', 'adj_close',
# Report columns
'end_date', 'amend', 'period_focus', 'doc_type', 'revenues', 'net_income',
'eps_basic', 'eps_diluted', 'dividend', 'assets', 'cash', 'equity',
)
FEED_EXPORTERS = {
'csv': 'pystock_crawler.exporters.CsvItemExporter2'
}
HTTPCACHE_ENABLED = True
HTTPCACHE_POLICY = 'scrapy.contrib.httpcache.RFC2616Policy'
HTTPCACHE_STORAGE = 'scrapy.contrib.httpcache.DbmCacheStorage'
LOG_LEVEL = 'INFO'
NEWSPIDER_MODULE = 'pystock_crawler.spiders'
SPIDER_MODULES = ['pystock_crawler.spiders']
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'pystock-crawler (+http://www.yourdomain.com)'
|
<commit_before># Scrapy settings for pystock-crawler project
#
# For simplicity, this file contains only the most important settings by
# default. All the other settings are documented here:
#
# http://doc.scrapy.org/en/latest/topics/settings.html
#
BOT_NAME = 'pystock-crawler'
EXPORT_FIELDS = (
# Price columns
'symbol', 'date', 'open', 'high', 'low', 'close', 'volume', 'adj_close',
# Report columns
'end_date', 'amend', 'period_focus', 'doc_type', 'revenues', 'net_income',
'eps_basic', 'eps_diluted', 'dividend', 'assets', 'cash', 'equity',
)
FEED_EXPORTERS = {
'csv': 'pystock_crawler.exporters.CsvItemExporter2'
}
HTTPCACHE_ENABLED = True
HTTPCACHE_POLICY = 'scrapy.contrib.httpcache.RFC2616Policy'
LOG_LEVEL = 'INFO'
NEWSPIDER_MODULE = 'pystock_crawler.spiders'
SPIDER_MODULES = ['pystock_crawler.spiders']
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'pystock-crawler (+http://www.yourdomain.com)'
<commit_msg>Use dbm cache instead of file system<commit_after>
|
# Scrapy settings for pystock-crawler project
#
# For simplicity, this file contains only the most important settings by
# default. All the other settings are documented here:
#
# http://doc.scrapy.org/en/latest/topics/settings.html
#
BOT_NAME = 'pystock-crawler'
EXPORT_FIELDS = (
# Price columns
'symbol', 'date', 'open', 'high', 'low', 'close', 'volume', 'adj_close',
# Report columns
'end_date', 'amend', 'period_focus', 'doc_type', 'revenues', 'net_income',
'eps_basic', 'eps_diluted', 'dividend', 'assets', 'cash', 'equity',
)
FEED_EXPORTERS = {
'csv': 'pystock_crawler.exporters.CsvItemExporter2'
}
HTTPCACHE_ENABLED = True
HTTPCACHE_POLICY = 'scrapy.contrib.httpcache.RFC2616Policy'
HTTPCACHE_STORAGE = 'scrapy.contrib.httpcache.DbmCacheStorage'
LOG_LEVEL = 'INFO'
NEWSPIDER_MODULE = 'pystock_crawler.spiders'
SPIDER_MODULES = ['pystock_crawler.spiders']
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'pystock-crawler (+http://www.yourdomain.com)'
|
# Scrapy settings for pystock-crawler project
#
# For simplicity, this file contains only the most important settings by
# default. All the other settings are documented here:
#
# http://doc.scrapy.org/en/latest/topics/settings.html
#
BOT_NAME = 'pystock-crawler'
EXPORT_FIELDS = (
# Price columns
'symbol', 'date', 'open', 'high', 'low', 'close', 'volume', 'adj_close',
# Report columns
'end_date', 'amend', 'period_focus', 'doc_type', 'revenues', 'net_income',
'eps_basic', 'eps_diluted', 'dividend', 'assets', 'cash', 'equity',
)
FEED_EXPORTERS = {
'csv': 'pystock_crawler.exporters.CsvItemExporter2'
}
HTTPCACHE_ENABLED = True
HTTPCACHE_POLICY = 'scrapy.contrib.httpcache.RFC2616Policy'
LOG_LEVEL = 'INFO'
NEWSPIDER_MODULE = 'pystock_crawler.spiders'
SPIDER_MODULES = ['pystock_crawler.spiders']
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'pystock-crawler (+http://www.yourdomain.com)'
Use dbm cache instead of file system# Scrapy settings for pystock-crawler project
#
# For simplicity, this file contains only the most important settings by
# default. All the other settings are documented here:
#
# http://doc.scrapy.org/en/latest/topics/settings.html
#
BOT_NAME = 'pystock-crawler'
EXPORT_FIELDS = (
# Price columns
'symbol', 'date', 'open', 'high', 'low', 'close', 'volume', 'adj_close',
# Report columns
'end_date', 'amend', 'period_focus', 'doc_type', 'revenues', 'net_income',
'eps_basic', 'eps_diluted', 'dividend', 'assets', 'cash', 'equity',
)
FEED_EXPORTERS = {
'csv': 'pystock_crawler.exporters.CsvItemExporter2'
}
HTTPCACHE_ENABLED = True
HTTPCACHE_POLICY = 'scrapy.contrib.httpcache.RFC2616Policy'
HTTPCACHE_STORAGE = 'scrapy.contrib.httpcache.DbmCacheStorage'
LOG_LEVEL = 'INFO'
NEWSPIDER_MODULE = 'pystock_crawler.spiders'
SPIDER_MODULES = ['pystock_crawler.spiders']
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'pystock-crawler (+http://www.yourdomain.com)'
|
<commit_before># Scrapy settings for pystock-crawler project
#
# For simplicity, this file contains only the most important settings by
# default. All the other settings are documented here:
#
# http://doc.scrapy.org/en/latest/topics/settings.html
#
BOT_NAME = 'pystock-crawler'
EXPORT_FIELDS = (
# Price columns
'symbol', 'date', 'open', 'high', 'low', 'close', 'volume', 'adj_close',
# Report columns
'end_date', 'amend', 'period_focus', 'doc_type', 'revenues', 'net_income',
'eps_basic', 'eps_diluted', 'dividend', 'assets', 'cash', 'equity',
)
FEED_EXPORTERS = {
'csv': 'pystock_crawler.exporters.CsvItemExporter2'
}
HTTPCACHE_ENABLED = True
HTTPCACHE_POLICY = 'scrapy.contrib.httpcache.RFC2616Policy'
LOG_LEVEL = 'INFO'
NEWSPIDER_MODULE = 'pystock_crawler.spiders'
SPIDER_MODULES = ['pystock_crawler.spiders']
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'pystock-crawler (+http://www.yourdomain.com)'
<commit_msg>Use dbm cache instead of file system<commit_after># Scrapy settings for pystock-crawler project
#
# For simplicity, this file contains only the most important settings by
# default. All the other settings are documented here:
#
# http://doc.scrapy.org/en/latest/topics/settings.html
#
BOT_NAME = 'pystock-crawler'
EXPORT_FIELDS = (
# Price columns
'symbol', 'date', 'open', 'high', 'low', 'close', 'volume', 'adj_close',
# Report columns
'end_date', 'amend', 'period_focus', 'doc_type', 'revenues', 'net_income',
'eps_basic', 'eps_diluted', 'dividend', 'assets', 'cash', 'equity',
)
FEED_EXPORTERS = {
'csv': 'pystock_crawler.exporters.CsvItemExporter2'
}
HTTPCACHE_ENABLED = True
HTTPCACHE_POLICY = 'scrapy.contrib.httpcache.RFC2616Policy'
HTTPCACHE_STORAGE = 'scrapy.contrib.httpcache.DbmCacheStorage'
LOG_LEVEL = 'INFO'
NEWSPIDER_MODULE = 'pystock_crawler.spiders'
SPIDER_MODULES = ['pystock_crawler.spiders']
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'pystock-crawler (+http://www.yourdomain.com)'
|
f630a15fff5b0a6af3d3d7a9ee908f8159e2d4b4
|
tests/unit/test_spec_set.py
|
tests/unit/test_spec_set.py
|
import unittest
from piptools.datastructures import SpecSet
class TestSpecSet(unittest.TestCase):
def test_adding_specs(self):
"""Adding specs to a set."""
specset = SpecSet()
specset.add_spec('Django>=1.3')
assert 'Django>=1.3' in map(str, specset)
specset.add_spec('django-pipeline')
self.assertItemsEqual(['Django>=1.3', 'django-pipeline'], map(str, specset))
specset.add_spec('Django<1.4')
self.assertItemsEqual(['Django>=1.3', 'django-pipeline', 'Django<1.4'], map(str, specset))
def test_normalizing(self):
"""Normalizing combines predicates to a single Spec."""
specset = SpecSet()
specset.add_spec('Django>=1.3')
specset.add_spec('Django<1.4')
specset.add_spec('Django>=1.3.2')
specset.add_spec('Django<1.3.99')
normalized = specset.normalize()
assert 'Django>=1.3.2,<1.3.99' in map(str, normalized)
specset.add_spec('Django<=1.3.2')
normalized = specset.normalize()
assert 'Django==1.3.2' in map(str, normalized)
|
import unittest
from piptools.datastructures import SpecSet
class TestSpecSet(unittest.TestCase):
def test_adding_specs(self):
"""Adding specs to a set."""
specset = SpecSet()
specset.add_spec('Django>=1.3')
assert 'Django>=1.3' in map(str, specset)
specset.add_spec('django-pipeline')
self.assertItemsEqual(['Django>=1.3', 'django-pipeline'], map(str, specset))
specset.add_spec('Django<1.4')
self.assertItemsEqual(['Django>=1.3', 'django-pipeline', 'Django<1.4'], map(str, specset))
def test_normalizing(self):
"""Normalizing combines predicates to a single Spec."""
specset = SpecSet()
specset.add_spec('Django>=1.3')
specset.add_spec('Django<1.4')
specset.add_spec('Django>=1.3.2')
specset.add_spec('Django<1.3.99')
normalized = specset.normalize()
assert 'Django>=1.3.2,<1.3.99' in map(str, normalized)
specset.add_spec('Django<=1.3.2')
normalized = specset.normalize()
assert 'Django==1.3.2' in map(str, normalized)
def test_normalizing_2(self):
"""Normalizing combines predicates to a single Spec."""
specset = SpecSet()
specset.add_spec('Django')
specset.add_spec('Django<1.4')
normalized = specset.normalize()
assert 'Django<1.4' in map(str, normalized)
|
Add a simple test case.
|
Add a simple test case.
|
Python
|
bsd-2-clause
|
suutari/prequ,suutari/prequ,suutari-ai/prequ
|
import unittest
from piptools.datastructures import SpecSet
class TestSpecSet(unittest.TestCase):
def test_adding_specs(self):
"""Adding specs to a set."""
specset = SpecSet()
specset.add_spec('Django>=1.3')
assert 'Django>=1.3' in map(str, specset)
specset.add_spec('django-pipeline')
self.assertItemsEqual(['Django>=1.3', 'django-pipeline'], map(str, specset))
specset.add_spec('Django<1.4')
self.assertItemsEqual(['Django>=1.3', 'django-pipeline', 'Django<1.4'], map(str, specset))
def test_normalizing(self):
"""Normalizing combines predicates to a single Spec."""
specset = SpecSet()
specset.add_spec('Django>=1.3')
specset.add_spec('Django<1.4')
specset.add_spec('Django>=1.3.2')
specset.add_spec('Django<1.3.99')
normalized = specset.normalize()
assert 'Django>=1.3.2,<1.3.99' in map(str, normalized)
specset.add_spec('Django<=1.3.2')
normalized = specset.normalize()
assert 'Django==1.3.2' in map(str, normalized)
Add a simple test case.
|
import unittest
from piptools.datastructures import SpecSet
class TestSpecSet(unittest.TestCase):
def test_adding_specs(self):
"""Adding specs to a set."""
specset = SpecSet()
specset.add_spec('Django>=1.3')
assert 'Django>=1.3' in map(str, specset)
specset.add_spec('django-pipeline')
self.assertItemsEqual(['Django>=1.3', 'django-pipeline'], map(str, specset))
specset.add_spec('Django<1.4')
self.assertItemsEqual(['Django>=1.3', 'django-pipeline', 'Django<1.4'], map(str, specset))
def test_normalizing(self):
"""Normalizing combines predicates to a single Spec."""
specset = SpecSet()
specset.add_spec('Django>=1.3')
specset.add_spec('Django<1.4')
specset.add_spec('Django>=1.3.2')
specset.add_spec('Django<1.3.99')
normalized = specset.normalize()
assert 'Django>=1.3.2,<1.3.99' in map(str, normalized)
specset.add_spec('Django<=1.3.2')
normalized = specset.normalize()
assert 'Django==1.3.2' in map(str, normalized)
def test_normalizing_2(self):
"""Normalizing combines predicates to a single Spec."""
specset = SpecSet()
specset.add_spec('Django')
specset.add_spec('Django<1.4')
normalized = specset.normalize()
assert 'Django<1.4' in map(str, normalized)
|
<commit_before>import unittest
from piptools.datastructures import SpecSet
class TestSpecSet(unittest.TestCase):
def test_adding_specs(self):
"""Adding specs to a set."""
specset = SpecSet()
specset.add_spec('Django>=1.3')
assert 'Django>=1.3' in map(str, specset)
specset.add_spec('django-pipeline')
self.assertItemsEqual(['Django>=1.3', 'django-pipeline'], map(str, specset))
specset.add_spec('Django<1.4')
self.assertItemsEqual(['Django>=1.3', 'django-pipeline', 'Django<1.4'], map(str, specset))
def test_normalizing(self):
"""Normalizing combines predicates to a single Spec."""
specset = SpecSet()
specset.add_spec('Django>=1.3')
specset.add_spec('Django<1.4')
specset.add_spec('Django>=1.3.2')
specset.add_spec('Django<1.3.99')
normalized = specset.normalize()
assert 'Django>=1.3.2,<1.3.99' in map(str, normalized)
specset.add_spec('Django<=1.3.2')
normalized = specset.normalize()
assert 'Django==1.3.2' in map(str, normalized)
<commit_msg>Add a simple test case.<commit_after>
|
import unittest
from piptools.datastructures import SpecSet
class TestSpecSet(unittest.TestCase):
def test_adding_specs(self):
"""Adding specs to a set."""
specset = SpecSet()
specset.add_spec('Django>=1.3')
assert 'Django>=1.3' in map(str, specset)
specset.add_spec('django-pipeline')
self.assertItemsEqual(['Django>=1.3', 'django-pipeline'], map(str, specset))
specset.add_spec('Django<1.4')
self.assertItemsEqual(['Django>=1.3', 'django-pipeline', 'Django<1.4'], map(str, specset))
def test_normalizing(self):
"""Normalizing combines predicates to a single Spec."""
specset = SpecSet()
specset.add_spec('Django>=1.3')
specset.add_spec('Django<1.4')
specset.add_spec('Django>=1.3.2')
specset.add_spec('Django<1.3.99')
normalized = specset.normalize()
assert 'Django>=1.3.2,<1.3.99' in map(str, normalized)
specset.add_spec('Django<=1.3.2')
normalized = specset.normalize()
assert 'Django==1.3.2' in map(str, normalized)
def test_normalizing_2(self):
"""Normalizing combines predicates to a single Spec."""
specset = SpecSet()
specset.add_spec('Django')
specset.add_spec('Django<1.4')
normalized = specset.normalize()
assert 'Django<1.4' in map(str, normalized)
|
import unittest
from piptools.datastructures import SpecSet
class TestSpecSet(unittest.TestCase):
def test_adding_specs(self):
"""Adding specs to a set."""
specset = SpecSet()
specset.add_spec('Django>=1.3')
assert 'Django>=1.3' in map(str, specset)
specset.add_spec('django-pipeline')
self.assertItemsEqual(['Django>=1.3', 'django-pipeline'], map(str, specset))
specset.add_spec('Django<1.4')
self.assertItemsEqual(['Django>=1.3', 'django-pipeline', 'Django<1.4'], map(str, specset))
def test_normalizing(self):
"""Normalizing combines predicates to a single Spec."""
specset = SpecSet()
specset.add_spec('Django>=1.3')
specset.add_spec('Django<1.4')
specset.add_spec('Django>=1.3.2')
specset.add_spec('Django<1.3.99')
normalized = specset.normalize()
assert 'Django>=1.3.2,<1.3.99' in map(str, normalized)
specset.add_spec('Django<=1.3.2')
normalized = specset.normalize()
assert 'Django==1.3.2' in map(str, normalized)
Add a simple test case.import unittest
from piptools.datastructures import SpecSet
class TestSpecSet(unittest.TestCase):
def test_adding_specs(self):
"""Adding specs to a set."""
specset = SpecSet()
specset.add_spec('Django>=1.3')
assert 'Django>=1.3' in map(str, specset)
specset.add_spec('django-pipeline')
self.assertItemsEqual(['Django>=1.3', 'django-pipeline'], map(str, specset))
specset.add_spec('Django<1.4')
self.assertItemsEqual(['Django>=1.3', 'django-pipeline', 'Django<1.4'], map(str, specset))
def test_normalizing(self):
"""Normalizing combines predicates to a single Spec."""
specset = SpecSet()
specset.add_spec('Django>=1.3')
specset.add_spec('Django<1.4')
specset.add_spec('Django>=1.3.2')
specset.add_spec('Django<1.3.99')
normalized = specset.normalize()
assert 'Django>=1.3.2,<1.3.99' in map(str, normalized)
specset.add_spec('Django<=1.3.2')
normalized = specset.normalize()
assert 'Django==1.3.2' in map(str, normalized)
def test_normalizing_2(self):
"""Normalizing combines predicates to a single Spec."""
specset = SpecSet()
specset.add_spec('Django')
specset.add_spec('Django<1.4')
normalized = specset.normalize()
assert 'Django<1.4' in map(str, normalized)
|
<commit_before>import unittest
from piptools.datastructures import SpecSet
class TestSpecSet(unittest.TestCase):
def test_adding_specs(self):
"""Adding specs to a set."""
specset = SpecSet()
specset.add_spec('Django>=1.3')
assert 'Django>=1.3' in map(str, specset)
specset.add_spec('django-pipeline')
self.assertItemsEqual(['Django>=1.3', 'django-pipeline'], map(str, specset))
specset.add_spec('Django<1.4')
self.assertItemsEqual(['Django>=1.3', 'django-pipeline', 'Django<1.4'], map(str, specset))
def test_normalizing(self):
"""Normalizing combines predicates to a single Spec."""
specset = SpecSet()
specset.add_spec('Django>=1.3')
specset.add_spec('Django<1.4')
specset.add_spec('Django>=1.3.2')
specset.add_spec('Django<1.3.99')
normalized = specset.normalize()
assert 'Django>=1.3.2,<1.3.99' in map(str, normalized)
specset.add_spec('Django<=1.3.2')
normalized = specset.normalize()
assert 'Django==1.3.2' in map(str, normalized)
<commit_msg>Add a simple test case.<commit_after>import unittest
from piptools.datastructures import SpecSet
class TestSpecSet(unittest.TestCase):
def test_adding_specs(self):
"""Adding specs to a set."""
specset = SpecSet()
specset.add_spec('Django>=1.3')
assert 'Django>=1.3' in map(str, specset)
specset.add_spec('django-pipeline')
self.assertItemsEqual(['Django>=1.3', 'django-pipeline'], map(str, specset))
specset.add_spec('Django<1.4')
self.assertItemsEqual(['Django>=1.3', 'django-pipeline', 'Django<1.4'], map(str, specset))
def test_normalizing(self):
"""Normalizing combines predicates to a single Spec."""
specset = SpecSet()
specset.add_spec('Django>=1.3')
specset.add_spec('Django<1.4')
specset.add_spec('Django>=1.3.2')
specset.add_spec('Django<1.3.99')
normalized = specset.normalize()
assert 'Django>=1.3.2,<1.3.99' in map(str, normalized)
specset.add_spec('Django<=1.3.2')
normalized = specset.normalize()
assert 'Django==1.3.2' in map(str, normalized)
def test_normalizing_2(self):
"""Normalizing combines predicates to a single Spec."""
specset = SpecSet()
specset.add_spec('Django')
specset.add_spec('Django<1.4')
normalized = specset.normalize()
assert 'Django<1.4' in map(str, normalized)
|
4d4a639ba46cf72454497bc100b3e811e66af4b2
|
tests/test_deprecations.py
|
tests/test_deprecations.py
|
# -*- coding: utf-8 -*-
"""
tests.deprecations
~~~~~~~~~~~~~~~~~~
Tests deprecation support. Not used currently.
:copyright: (c) 2014 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
|
# -*- coding: utf-8 -*-
"""
tests.deprecations
~~~~~~~~~~~~~~~~~~
Tests deprecation support. Not used currently.
:copyright: (c) 2014 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import pytest
import flask
class TestRequestDeprecation(object):
def test_request_json(self, catch_deprecation_warnings):
"""Request.json is deprecated"""
app = flask.Flask(__name__)
app.testing = True
@app.route('/', methods=['POST'])
def index():
assert flask.request.json == {'spam': 42}
print(flask.request.json)
return 'OK'
with catch_deprecation_warnings() as captured:
c = app.test_client()
c.post('/', data='{"spam": 42}', content_type='application/json')
assert len(captured) == 1
def test_request_module(self, catch_deprecation_warnings):
"""Request.module is deprecated"""
app = flask.Flask(__name__)
app.testing = True
@app.route('/')
def index():
assert flask.request.module is None
return 'OK'
with catch_deprecation_warnings() as captured:
c = app.test_client()
c.get('/')
assert len(captured) == 1
|
Add test for deprecated flask.Request properties.
|
Add test for deprecated flask.Request properties.
|
Python
|
bsd-3-clause
|
moluzhang/flask,karen-wang/flask,happyspace/flask,alanhamlett/flask,kuhli/flask,auready/flask,margguo/flask,tcnoviembre2013/flask,rollingstone/flask,wudafucode/flask,mysweet/flask,drewja/flask,tcnoviembre2013/flask,karen-wang/flask,horica-ionescu/flask,nwags/flask,cgvarela/flask,sam-tsai/flask,postelin/flask,jiimaho/flask,jenalgit/flask,bluedazzle/flask,ghhong1986/flask,ffsdmad/flask,raven47git/flask,kuhli/flask,lynget/flask,jawed123/flask,pallets/flask,kuku940/flask,alexwidener/flask,lord63-forks/flask,iamantee/flask,edcomstock/flask,Endika/flask,felipefuka/flask,flamelizard/flask,s7v7nislands/flask,postelin/flask,algoStruct/flask,WSDC-NITWarangal/flask,zailushangde/flask,gbolahanalade/flask,SsangYoon/Flask,arvinls/flask,WSDC-NITWarangal/flask,theo-l/flask,warm200/flask,rawrgulmuffins/flask,liuyi1112/flask,antsar/flask,Freebird2014/flask,wtorcasoGB/flask,GavinCruise/flask,szymonm/flask,Faiz7412/flask,vivekanand1101/flask,AvivC/flask,godfreyy/flask,JingZhou0404/flask,suppandi/flask,zailushangde/flask,mitsuhiko/flask,Faiz7412/flask,luzzyzhang/flask,ffsdmad/flask,s7v7nislands/flask,anujk3/flask,blankme/flask,dominjune/flask,grayswand1r/flask,skmezanul/flask,artleaf/flask,mAzurkovic/flask,Eagles2F/flask,oculardexterity/flask,moluzhang/flask,onetinyr/flask,drewja/flask,TylerB24890/flask,anujk3/flask,hyunchel/flask,dawran6/flask,nZac/flask,szymonm/flask,dhruvsrivastava/flask,gbolahanalade/flask,nju520/flask,Papa2k15/flask,dawran6/flask,treedledee/flask,szymonm/flask,mujiatong/flask,abdulbaqi/flask,happyspace/flask,ryandvill/flask,jonieli/flask,tjxiter/flask,ankurpandey/flask,ghhong1986/flask,bodefuwa/flask,Ricardo666666/flask,wangjun/flask,garaden/flask,SsangYoon/Flask,chenke91/flask,SsangYoon/Flask,rollingstone/flask,hoatle/flask,tripathi62674/flask,AndreaEdwards/flask,fevxie/flask,tangfeng1/flask,xiaoshaozi52/flask,luzzyzhang/flask,jeezybrick/flask,chenke91/flask,treedledee/flask,mAzurkovic/flask,lyn233/flask,grayswand1r/flask,gangeshwark/flask,q1ang/flask,mitchfriedman/flask,artleaf/flask,DesQi/flask,jorpramo/flask,wtorcasoGB/flask,oculardexterity/flask,brianbeale/flask,LonglyCode/flask,mujiatong/flask,homeworkprod/flask,s7v7nislands/flask,rawrgulmuffins/flask,aniketshukla/flask,suppandi/flask,nZac/flask,cgvarela/flask,theo-l/flask,gangeshwark/flask,sage417/flask,jonieli/flask,beni55/flask,ezequielo/flask,EasonYi/flask,ezequielo/flask,JamesTFarrington/flask,sam-tsai/flask,blankme/flask,kyle-sorensen/flask,justanr/flask,liaoqingwei/flask,EasonYi/flask,Faiz7412/flask,mitsuhiko/flask,huwenhui/flask,jorpramo/flask,liuyi1112/flask,fevxie/flask,jeezybrick/flask,lalinsky/flask,pledo/flask,iamantee/flask,brianbeale/flask,jstacoder/flask,lyn233/flask,dominjune/flask,Parkayun/flask,cheesecake4392/flask,adrianmoisey/cptdevops,flabe81/flask,JingZhou0404/flask,tangfeng1/flask,fanshaohua-fan/flask,stephaniemertz/flask,GavinCruise/flask,liukaijv/flask,HeyItsAlan/flask,xujianhai/flask,homeworkprod/flask,liukaijv/flask,tjxiter/flask,nju520/flask,TylerB24890/flask,edcomstock/flask,pxzhenren/flask,tristanfisher/flask,elelianghh/flask,tripathi62674/flask,hyunchel/flask,raven47git/flask,RohithKP/flask,liaoqingwei/flask,vishnugonela/flask,cliu-aa/flask,Papa2k15/flask,VShangxiao/flask,arvinls/flask,zoyanhui/flask,TylerB24890/flask,antsar/flask,godfreyy/flask,mysweet/flask,tony/flask,felipefuka/flask,beni55/flask,jstacoder/flask,kyle-sorensen/flask,aniketshukla/flask,nju520/flask,sonnyhu/flask,lalinsky/flask,wtl-zju/flask,chenrenyi/flask,bodefuwa/flask,alexwidener/flask,Parkayun/flask,gangeshwark/flask,alanhamlett/flask,artleaf/flask,gaomingnudt/gm-flask2.0,robertglen/flask,AndreaEdwards/flask,sixpi/flask,cliu-aa/flask,horica-ionescu/flask,raven47git/flask,karen-wang/flask,ankravch/flask,ZacariasBendeck/flask,robertglen/flask,adrianmoisey/cptdevops,kyle-sorensen/flask,huwenhui/flask,LonglyCode/flask,xiaoshaozi52/flask,anujk3/flask,sonnyhu/flask,niima/flask,godfreyy/flask,luvten/flask,karlw00t/flask,XGiton/flask,sixpi/flask,dhruvsrivastava/flask,alanhamlett/flask,xujianhai/flask,ankurpandey/flask,mitchfriedman/flask,ankravch/flask,pxzhenren/flask,kuku940/flask,margguo/flask,bluedazzle/flask,skmezanul/flask,flamelizard/flask,qinfengsuiyu/flask,VShangxiao/flask,ABaldwinHunter/flask-clone-classic,auready/flask,fkazimierczak/flask,fkazimierczak/flask,AvivC/flask,tangfeng1/flask,wangjun/flask,lord63-forks/flask,jstacoder/flask,wilbert-abreu/flask,ZacariasBendeck/flask,RohithKP/flask,VShangxiao/flask,luvten/flask,Ricardo666666/flask,sage417/flask,gaomingnudt/gm-flask2.0,wtl-zju/flask,EasonYi/flask,XGiton/flask,pinkpet/flask,wldtyp/flask,tony/flask,oculardexterity/flask,justanr/flask,hoatle/flask,cheesecake4392/flask,visaxin/flask,Endika/flask,gilesburnfield/flask,onetinyr/flask,ryandvill/flask,warm200/flask,liaoqingwei/flask,jenalgit/flask,ezequielo/flask,wilbert-abreu/flask,zoyanhui/flask,vishnugonela/flask,jiimaho/flask,HeyItsAlan/flask,Ricardo666666/flask,AndreaEdwards/flask,jonatanblue/flask,fanshaohua-fan/flask,chuijiaolianying/flask,pledo/flask,qinfengsuiyu/flask,JamesTFarrington/flask,jenalgit/flask,ryandvill/flask,liuyi1112/flask,happyspace/flask,algoStruct/flask,vishnugonela/flask,flabe81/flask,wldtyp/flask,warm200/flask,jonieli/flask,chuijiaolianying/flask,vivekanand1101/flask,visaxin/flask,pallets/flask,fengsp/flask,ABaldwinHunter/flask-clone-classic,jonatanblue/flask,GavinCruise/flask,gilesburnfield/flask,niima/flask,jorpramo/flask,Freebird2014/flask,Eagles2F/flask,ankurpandey/flask,suppandi/flask,lynget/flask,fengsp/flask,garaden/flask,visaxin/flask,jawed123/flask,chenrenyi/flask,DesQi/flask,wudafucode/flask,onetinyr/flask,pinkpet/flask,abdulbaqi/flask,elelianghh/flask,stephaniemertz/flask,pallets/flask,nwags/flask,justanr/flask,chenke91/flask,tristanfisher/flask,postelin/flask,chuijiaolianying/flask,karlw00t/flask,algoStruct/flask,q1ang/flask,fkazimierczak/flask,drewja/flask
|
# -*- coding: utf-8 -*-
"""
tests.deprecations
~~~~~~~~~~~~~~~~~~
Tests deprecation support. Not used currently.
:copyright: (c) 2014 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
Add test for deprecated flask.Request properties.
|
# -*- coding: utf-8 -*-
"""
tests.deprecations
~~~~~~~~~~~~~~~~~~
Tests deprecation support. Not used currently.
:copyright: (c) 2014 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import pytest
import flask
class TestRequestDeprecation(object):
def test_request_json(self, catch_deprecation_warnings):
"""Request.json is deprecated"""
app = flask.Flask(__name__)
app.testing = True
@app.route('/', methods=['POST'])
def index():
assert flask.request.json == {'spam': 42}
print(flask.request.json)
return 'OK'
with catch_deprecation_warnings() as captured:
c = app.test_client()
c.post('/', data='{"spam": 42}', content_type='application/json')
assert len(captured) == 1
def test_request_module(self, catch_deprecation_warnings):
"""Request.module is deprecated"""
app = flask.Flask(__name__)
app.testing = True
@app.route('/')
def index():
assert flask.request.module is None
return 'OK'
with catch_deprecation_warnings() as captured:
c = app.test_client()
c.get('/')
assert len(captured) == 1
|
<commit_before># -*- coding: utf-8 -*-
"""
tests.deprecations
~~~~~~~~~~~~~~~~~~
Tests deprecation support. Not used currently.
:copyright: (c) 2014 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
<commit_msg>Add test for deprecated flask.Request properties.<commit_after>
|
# -*- coding: utf-8 -*-
"""
tests.deprecations
~~~~~~~~~~~~~~~~~~
Tests deprecation support. Not used currently.
:copyright: (c) 2014 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import pytest
import flask
class TestRequestDeprecation(object):
def test_request_json(self, catch_deprecation_warnings):
"""Request.json is deprecated"""
app = flask.Flask(__name__)
app.testing = True
@app.route('/', methods=['POST'])
def index():
assert flask.request.json == {'spam': 42}
print(flask.request.json)
return 'OK'
with catch_deprecation_warnings() as captured:
c = app.test_client()
c.post('/', data='{"spam": 42}', content_type='application/json')
assert len(captured) == 1
def test_request_module(self, catch_deprecation_warnings):
"""Request.module is deprecated"""
app = flask.Flask(__name__)
app.testing = True
@app.route('/')
def index():
assert flask.request.module is None
return 'OK'
with catch_deprecation_warnings() as captured:
c = app.test_client()
c.get('/')
assert len(captured) == 1
|
# -*- coding: utf-8 -*-
"""
tests.deprecations
~~~~~~~~~~~~~~~~~~
Tests deprecation support. Not used currently.
:copyright: (c) 2014 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
Add test for deprecated flask.Request properties.# -*- coding: utf-8 -*-
"""
tests.deprecations
~~~~~~~~~~~~~~~~~~
Tests deprecation support. Not used currently.
:copyright: (c) 2014 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import pytest
import flask
class TestRequestDeprecation(object):
def test_request_json(self, catch_deprecation_warnings):
"""Request.json is deprecated"""
app = flask.Flask(__name__)
app.testing = True
@app.route('/', methods=['POST'])
def index():
assert flask.request.json == {'spam': 42}
print(flask.request.json)
return 'OK'
with catch_deprecation_warnings() as captured:
c = app.test_client()
c.post('/', data='{"spam": 42}', content_type='application/json')
assert len(captured) == 1
def test_request_module(self, catch_deprecation_warnings):
"""Request.module is deprecated"""
app = flask.Flask(__name__)
app.testing = True
@app.route('/')
def index():
assert flask.request.module is None
return 'OK'
with catch_deprecation_warnings() as captured:
c = app.test_client()
c.get('/')
assert len(captured) == 1
|
<commit_before># -*- coding: utf-8 -*-
"""
tests.deprecations
~~~~~~~~~~~~~~~~~~
Tests deprecation support. Not used currently.
:copyright: (c) 2014 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
<commit_msg>Add test for deprecated flask.Request properties.<commit_after># -*- coding: utf-8 -*-
"""
tests.deprecations
~~~~~~~~~~~~~~~~~~
Tests deprecation support. Not used currently.
:copyright: (c) 2014 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import pytest
import flask
class TestRequestDeprecation(object):
def test_request_json(self, catch_deprecation_warnings):
"""Request.json is deprecated"""
app = flask.Flask(__name__)
app.testing = True
@app.route('/', methods=['POST'])
def index():
assert flask.request.json == {'spam': 42}
print(flask.request.json)
return 'OK'
with catch_deprecation_warnings() as captured:
c = app.test_client()
c.post('/', data='{"spam": 42}', content_type='application/json')
assert len(captured) == 1
def test_request_module(self, catch_deprecation_warnings):
"""Request.module is deprecated"""
app = flask.Flask(__name__)
app.testing = True
@app.route('/')
def index():
assert flask.request.module is None
return 'OK'
with catch_deprecation_warnings() as captured:
c = app.test_client()
c.get('/')
assert len(captured) == 1
|
83eef98bd8cf36e62718c60f2bba71337a9a9ea0
|
kolibri/plugins/coach/kolibri_plugin.py
|
kolibri/plugins/coach/kolibri_plugin.py
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from . import hooks
from kolibri.core.auth.constants.user_kinds import COACH
from kolibri.core.hooks import NavigationHook
from kolibri.core.hooks import RoleBasedRedirectHook
from kolibri.core.webpack import hooks as webpack_hooks
from kolibri.plugins.base import KolibriPluginBase
class Coach(KolibriPluginBase):
untranslated_view_urls = "api_urls"
translated_view_urls = "urls"
class CoachRedirect(RoleBasedRedirectHook):
role = COACH
@property
def url(self):
return self.plugin_url(Coach, "coach")
class CoachNavItem(NavigationHook, webpack_hooks.WebpackBundleHook):
unique_slug = "coach_side_nav"
class CoachAsset(webpack_hooks.WebpackBundleHook):
unique_slug = "coach_module"
class CoachInclusionHook(hooks.CoachSyncHook):
bundle_class = CoachAsset
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from kolibri.core.auth.constants.user_kinds import COACH
from kolibri.core.hooks import NavigationHook
from kolibri.core.hooks import RoleBasedRedirectHook
from kolibri.core.webpack import hooks as webpack_hooks
from kolibri.plugins.base import KolibriPluginBase
class Coach(KolibriPluginBase):
untranslated_view_urls = "api_urls"
translated_view_urls = "urls"
class CoachRedirect(RoleBasedRedirectHook):
role = COACH
@property
def url(self):
return self.plugin_url(Coach, "coach")
class CoachNavItem(NavigationHook, webpack_hooks.WebpackBundleHook):
unique_slug = "coach_side_nav"
class CoachAsset(webpack_hooks.WebpackBundleHook):
unique_slug = "coach_module"
|
Remove undefined import in coach plugin.
|
Remove undefined import in coach plugin.
|
Python
|
mit
|
indirectlylit/kolibri,learningequality/kolibri,mrpau/kolibri,mrpau/kolibri,mrpau/kolibri,learningequality/kolibri,indirectlylit/kolibri,indirectlylit/kolibri,learningequality/kolibri,mrpau/kolibri,indirectlylit/kolibri,learningequality/kolibri
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from . import hooks
from kolibri.core.auth.constants.user_kinds import COACH
from kolibri.core.hooks import NavigationHook
from kolibri.core.hooks import RoleBasedRedirectHook
from kolibri.core.webpack import hooks as webpack_hooks
from kolibri.plugins.base import KolibriPluginBase
class Coach(KolibriPluginBase):
untranslated_view_urls = "api_urls"
translated_view_urls = "urls"
class CoachRedirect(RoleBasedRedirectHook):
role = COACH
@property
def url(self):
return self.plugin_url(Coach, "coach")
class CoachNavItem(NavigationHook, webpack_hooks.WebpackBundleHook):
unique_slug = "coach_side_nav"
class CoachAsset(webpack_hooks.WebpackBundleHook):
unique_slug = "coach_module"
class CoachInclusionHook(hooks.CoachSyncHook):
bundle_class = CoachAsset
Remove undefined import in coach plugin.
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from kolibri.core.auth.constants.user_kinds import COACH
from kolibri.core.hooks import NavigationHook
from kolibri.core.hooks import RoleBasedRedirectHook
from kolibri.core.webpack import hooks as webpack_hooks
from kolibri.plugins.base import KolibriPluginBase
class Coach(KolibriPluginBase):
untranslated_view_urls = "api_urls"
translated_view_urls = "urls"
class CoachRedirect(RoleBasedRedirectHook):
role = COACH
@property
def url(self):
return self.plugin_url(Coach, "coach")
class CoachNavItem(NavigationHook, webpack_hooks.WebpackBundleHook):
unique_slug = "coach_side_nav"
class CoachAsset(webpack_hooks.WebpackBundleHook):
unique_slug = "coach_module"
|
<commit_before>from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from . import hooks
from kolibri.core.auth.constants.user_kinds import COACH
from kolibri.core.hooks import NavigationHook
from kolibri.core.hooks import RoleBasedRedirectHook
from kolibri.core.webpack import hooks as webpack_hooks
from kolibri.plugins.base import KolibriPluginBase
class Coach(KolibriPluginBase):
untranslated_view_urls = "api_urls"
translated_view_urls = "urls"
class CoachRedirect(RoleBasedRedirectHook):
role = COACH
@property
def url(self):
return self.plugin_url(Coach, "coach")
class CoachNavItem(NavigationHook, webpack_hooks.WebpackBundleHook):
unique_slug = "coach_side_nav"
class CoachAsset(webpack_hooks.WebpackBundleHook):
unique_slug = "coach_module"
class CoachInclusionHook(hooks.CoachSyncHook):
bundle_class = CoachAsset
<commit_msg>Remove undefined import in coach plugin.<commit_after>
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from kolibri.core.auth.constants.user_kinds import COACH
from kolibri.core.hooks import NavigationHook
from kolibri.core.hooks import RoleBasedRedirectHook
from kolibri.core.webpack import hooks as webpack_hooks
from kolibri.plugins.base import KolibriPluginBase
class Coach(KolibriPluginBase):
untranslated_view_urls = "api_urls"
translated_view_urls = "urls"
class CoachRedirect(RoleBasedRedirectHook):
role = COACH
@property
def url(self):
return self.plugin_url(Coach, "coach")
class CoachNavItem(NavigationHook, webpack_hooks.WebpackBundleHook):
unique_slug = "coach_side_nav"
class CoachAsset(webpack_hooks.WebpackBundleHook):
unique_slug = "coach_module"
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from . import hooks
from kolibri.core.auth.constants.user_kinds import COACH
from kolibri.core.hooks import NavigationHook
from kolibri.core.hooks import RoleBasedRedirectHook
from kolibri.core.webpack import hooks as webpack_hooks
from kolibri.plugins.base import KolibriPluginBase
class Coach(KolibriPluginBase):
untranslated_view_urls = "api_urls"
translated_view_urls = "urls"
class CoachRedirect(RoleBasedRedirectHook):
role = COACH
@property
def url(self):
return self.plugin_url(Coach, "coach")
class CoachNavItem(NavigationHook, webpack_hooks.WebpackBundleHook):
unique_slug = "coach_side_nav"
class CoachAsset(webpack_hooks.WebpackBundleHook):
unique_slug = "coach_module"
class CoachInclusionHook(hooks.CoachSyncHook):
bundle_class = CoachAsset
Remove undefined import in coach plugin.from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from kolibri.core.auth.constants.user_kinds import COACH
from kolibri.core.hooks import NavigationHook
from kolibri.core.hooks import RoleBasedRedirectHook
from kolibri.core.webpack import hooks as webpack_hooks
from kolibri.plugins.base import KolibriPluginBase
class Coach(KolibriPluginBase):
untranslated_view_urls = "api_urls"
translated_view_urls = "urls"
class CoachRedirect(RoleBasedRedirectHook):
role = COACH
@property
def url(self):
return self.plugin_url(Coach, "coach")
class CoachNavItem(NavigationHook, webpack_hooks.WebpackBundleHook):
unique_slug = "coach_side_nav"
class CoachAsset(webpack_hooks.WebpackBundleHook):
unique_slug = "coach_module"
|
<commit_before>from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from . import hooks
from kolibri.core.auth.constants.user_kinds import COACH
from kolibri.core.hooks import NavigationHook
from kolibri.core.hooks import RoleBasedRedirectHook
from kolibri.core.webpack import hooks as webpack_hooks
from kolibri.plugins.base import KolibriPluginBase
class Coach(KolibriPluginBase):
untranslated_view_urls = "api_urls"
translated_view_urls = "urls"
class CoachRedirect(RoleBasedRedirectHook):
role = COACH
@property
def url(self):
return self.plugin_url(Coach, "coach")
class CoachNavItem(NavigationHook, webpack_hooks.WebpackBundleHook):
unique_slug = "coach_side_nav"
class CoachAsset(webpack_hooks.WebpackBundleHook):
unique_slug = "coach_module"
class CoachInclusionHook(hooks.CoachSyncHook):
bundle_class = CoachAsset
<commit_msg>Remove undefined import in coach plugin.<commit_after>from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from kolibri.core.auth.constants.user_kinds import COACH
from kolibri.core.hooks import NavigationHook
from kolibri.core.hooks import RoleBasedRedirectHook
from kolibri.core.webpack import hooks as webpack_hooks
from kolibri.plugins.base import KolibriPluginBase
class Coach(KolibriPluginBase):
untranslated_view_urls = "api_urls"
translated_view_urls = "urls"
class CoachRedirect(RoleBasedRedirectHook):
role = COACH
@property
def url(self):
return self.plugin_url(Coach, "coach")
class CoachNavItem(NavigationHook, webpack_hooks.WebpackBundleHook):
unique_slug = "coach_side_nav"
class CoachAsset(webpack_hooks.WebpackBundleHook):
unique_slug = "coach_module"
|
a254fa1e3b4b0c6d491fdd503fc543674773f43f
|
bikeshop_project/bikeshop/urls.py
|
bikeshop_project/bikeshop/urls.py
|
"""bikeshop URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth.views import login, logout_then_login
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from core import urls as core_urls
from registration import urls as member_urls
urlpatterns = [
url(r'^', include(core_urls)),
url(r'^login/', login, {'template_name': 'login.html'}, name='login'),
url(r'^logout/', logout_then_login),
url(r'^member/', include(member_urls)),
url(r'^admin/', admin.site.urls),
]
if getattr(settings, 'DEBUG'):
urlpatterns += staticfiles_urlpatterns()
|
"""bikeshop URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth.views import login, logout_then_login
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from core import urls as core_urls
from registration import urls as member_urls
urlpatterns = [
url(r'^', include(core_urls)),
url(r'^login/', login, {'template_name': 'login.html'}, name='login'),
url(r'^logout/', logout_then_login, name='logout'),
url(r'^member/', include(member_urls)),
url(r'^admin/', admin.site.urls),
]
if getattr(settings, 'DEBUG'):
urlpatterns += staticfiles_urlpatterns()
|
Add name to logout url.
|
Add name to logout url.
|
Python
|
mit
|
BridgeCityBicycleCoop/workstand,BridgeCityBicycleCoop/workstand,BridgeCityBicycleCoop/workstand,BridgeCityBicycleCoop/workstand
|
"""bikeshop URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth.views import login, logout_then_login
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from core import urls as core_urls
from registration import urls as member_urls
urlpatterns = [
url(r'^', include(core_urls)),
url(r'^login/', login, {'template_name': 'login.html'}, name='login'),
url(r'^logout/', logout_then_login),
url(r'^member/', include(member_urls)),
url(r'^admin/', admin.site.urls),
]
if getattr(settings, 'DEBUG'):
urlpatterns += staticfiles_urlpatterns()
Add name to logout url.
|
"""bikeshop URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth.views import login, logout_then_login
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from core import urls as core_urls
from registration import urls as member_urls
urlpatterns = [
url(r'^', include(core_urls)),
url(r'^login/', login, {'template_name': 'login.html'}, name='login'),
url(r'^logout/', logout_then_login, name='logout'),
url(r'^member/', include(member_urls)),
url(r'^admin/', admin.site.urls),
]
if getattr(settings, 'DEBUG'):
urlpatterns += staticfiles_urlpatterns()
|
<commit_before>"""bikeshop URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth.views import login, logout_then_login
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from core import urls as core_urls
from registration import urls as member_urls
urlpatterns = [
url(r'^', include(core_urls)),
url(r'^login/', login, {'template_name': 'login.html'}, name='login'),
url(r'^logout/', logout_then_login),
url(r'^member/', include(member_urls)),
url(r'^admin/', admin.site.urls),
]
if getattr(settings, 'DEBUG'):
urlpatterns += staticfiles_urlpatterns()
<commit_msg>Add name to logout url.<commit_after>
|
"""bikeshop URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth.views import login, logout_then_login
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from core import urls as core_urls
from registration import urls as member_urls
urlpatterns = [
url(r'^', include(core_urls)),
url(r'^login/', login, {'template_name': 'login.html'}, name='login'),
url(r'^logout/', logout_then_login, name='logout'),
url(r'^member/', include(member_urls)),
url(r'^admin/', admin.site.urls),
]
if getattr(settings, 'DEBUG'):
urlpatterns += staticfiles_urlpatterns()
|
"""bikeshop URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth.views import login, logout_then_login
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from core import urls as core_urls
from registration import urls as member_urls
urlpatterns = [
url(r'^', include(core_urls)),
url(r'^login/', login, {'template_name': 'login.html'}, name='login'),
url(r'^logout/', logout_then_login),
url(r'^member/', include(member_urls)),
url(r'^admin/', admin.site.urls),
]
if getattr(settings, 'DEBUG'):
urlpatterns += staticfiles_urlpatterns()
Add name to logout url."""bikeshop URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth.views import login, logout_then_login
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from core import urls as core_urls
from registration import urls as member_urls
urlpatterns = [
url(r'^', include(core_urls)),
url(r'^login/', login, {'template_name': 'login.html'}, name='login'),
url(r'^logout/', logout_then_login, name='logout'),
url(r'^member/', include(member_urls)),
url(r'^admin/', admin.site.urls),
]
if getattr(settings, 'DEBUG'):
urlpatterns += staticfiles_urlpatterns()
|
<commit_before>"""bikeshop URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth.views import login, logout_then_login
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from core import urls as core_urls
from registration import urls as member_urls
urlpatterns = [
url(r'^', include(core_urls)),
url(r'^login/', login, {'template_name': 'login.html'}, name='login'),
url(r'^logout/', logout_then_login),
url(r'^member/', include(member_urls)),
url(r'^admin/', admin.site.urls),
]
if getattr(settings, 'DEBUG'):
urlpatterns += staticfiles_urlpatterns()
<commit_msg>Add name to logout url.<commit_after>"""bikeshop URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth.views import login, logout_then_login
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from core import urls as core_urls
from registration import urls as member_urls
urlpatterns = [
url(r'^', include(core_urls)),
url(r'^login/', login, {'template_name': 'login.html'}, name='login'),
url(r'^logout/', logout_then_login, name='logout'),
url(r'^member/', include(member_urls)),
url(r'^admin/', admin.site.urls),
]
if getattr(settings, 'DEBUG'):
urlpatterns += staticfiles_urlpatterns()
|
d18e4681ac4fb8465cc100f600a86063c71b96f3
|
txircd/modules/cmd_names.py
|
txircd/modules/cmd_names.py
|
from twisted.words.protocols import irc
from txircd.modbase import Command
class NamesCommand(Command):
def onUse(self, user, data):
for chan in data["targetchan"]:
user.report_names(chan)
def processParams(self, user, params):
if user.registered > 0:
user.sendMessage(irc.ERR_NOTREGISTERED, "NAMES", ":You have not registered")
return {}
if params:
channels = filter(lambda x: x in user.channels and x in self.ircd.channels, params[0].split(","))
else:
channels = user.channels.keys()
chan_param = []
for chan in channels:
if chan in self.ircd.channels:
chan_param.append(self.ircd.channels[chan])
return {
"user": user,
"targetchan": chan_param
}
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"commands": {
"NAMES": NamesCommand()
}
}
def cleanup(self):
del self.ircd.commands["NAMES"]
|
from twisted.words.protocols import irc
from txircd.modbase import Command
class NamesCommand(Command):
def onUse(self, user, data):
for chan in data["targetchan"]:
user.report_names(chan)
def processParams(self, user, params):
if user.registered > 0:
user.sendMessage(irc.ERR_NOTREGISTERED, "NAMES", ":You have not registered")
return {}
if params:
channels = params[0].split(",")
else:
channels = user.channels.keys()
chan_param = []
for chan in channels:
if chan in self.ircd.channels:
chan_param.append(self.ircd.channels[chan])
else:
user.sendMessage(irc.ERR_NOSUCHNICK, chan, ":No such nick/channel")
return {
"user": user,
"targetchan": chan_param
}
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"commands": {
"NAMES": NamesCommand()
}
}
def cleanup(self):
del self.ircd.commands["NAMES"]
|
Send "no such channel" message on NAMES with a nonexistent channel
|
Send "no such channel" message on NAMES with a nonexistent channel
|
Python
|
bsd-3-clause
|
Heufneutje/txircd,ElementalAlchemist/txircd,DesertBus/txircd
|
from twisted.words.protocols import irc
from txircd.modbase import Command
class NamesCommand(Command):
def onUse(self, user, data):
for chan in data["targetchan"]:
user.report_names(chan)
def processParams(self, user, params):
if user.registered > 0:
user.sendMessage(irc.ERR_NOTREGISTERED, "NAMES", ":You have not registered")
return {}
if params:
channels = filter(lambda x: x in user.channels and x in self.ircd.channels, params[0].split(","))
else:
channels = user.channels.keys()
chan_param = []
for chan in channels:
if chan in self.ircd.channels:
chan_param.append(self.ircd.channels[chan])
return {
"user": user,
"targetchan": chan_param
}
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"commands": {
"NAMES": NamesCommand()
}
}
def cleanup(self):
del self.ircd.commands["NAMES"]Send "no such channel" message on NAMES with a nonexistent channel
|
from twisted.words.protocols import irc
from txircd.modbase import Command
class NamesCommand(Command):
def onUse(self, user, data):
for chan in data["targetchan"]:
user.report_names(chan)
def processParams(self, user, params):
if user.registered > 0:
user.sendMessage(irc.ERR_NOTREGISTERED, "NAMES", ":You have not registered")
return {}
if params:
channels = params[0].split(",")
else:
channels = user.channels.keys()
chan_param = []
for chan in channels:
if chan in self.ircd.channels:
chan_param.append(self.ircd.channels[chan])
else:
user.sendMessage(irc.ERR_NOSUCHNICK, chan, ":No such nick/channel")
return {
"user": user,
"targetchan": chan_param
}
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"commands": {
"NAMES": NamesCommand()
}
}
def cleanup(self):
del self.ircd.commands["NAMES"]
|
<commit_before>from twisted.words.protocols import irc
from txircd.modbase import Command
class NamesCommand(Command):
def onUse(self, user, data):
for chan in data["targetchan"]:
user.report_names(chan)
def processParams(self, user, params):
if user.registered > 0:
user.sendMessage(irc.ERR_NOTREGISTERED, "NAMES", ":You have not registered")
return {}
if params:
channels = filter(lambda x: x in user.channels and x in self.ircd.channels, params[0].split(","))
else:
channels = user.channels.keys()
chan_param = []
for chan in channels:
if chan in self.ircd.channels:
chan_param.append(self.ircd.channels[chan])
return {
"user": user,
"targetchan": chan_param
}
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"commands": {
"NAMES": NamesCommand()
}
}
def cleanup(self):
del self.ircd.commands["NAMES"]<commit_msg>Send "no such channel" message on NAMES with a nonexistent channel<commit_after>
|
from twisted.words.protocols import irc
from txircd.modbase import Command
class NamesCommand(Command):
def onUse(self, user, data):
for chan in data["targetchan"]:
user.report_names(chan)
def processParams(self, user, params):
if user.registered > 0:
user.sendMessage(irc.ERR_NOTREGISTERED, "NAMES", ":You have not registered")
return {}
if params:
channels = params[0].split(",")
else:
channels = user.channels.keys()
chan_param = []
for chan in channels:
if chan in self.ircd.channels:
chan_param.append(self.ircd.channels[chan])
else:
user.sendMessage(irc.ERR_NOSUCHNICK, chan, ":No such nick/channel")
return {
"user": user,
"targetchan": chan_param
}
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"commands": {
"NAMES": NamesCommand()
}
}
def cleanup(self):
del self.ircd.commands["NAMES"]
|
from twisted.words.protocols import irc
from txircd.modbase import Command
class NamesCommand(Command):
def onUse(self, user, data):
for chan in data["targetchan"]:
user.report_names(chan)
def processParams(self, user, params):
if user.registered > 0:
user.sendMessage(irc.ERR_NOTREGISTERED, "NAMES", ":You have not registered")
return {}
if params:
channels = filter(lambda x: x in user.channels and x in self.ircd.channels, params[0].split(","))
else:
channels = user.channels.keys()
chan_param = []
for chan in channels:
if chan in self.ircd.channels:
chan_param.append(self.ircd.channels[chan])
return {
"user": user,
"targetchan": chan_param
}
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"commands": {
"NAMES": NamesCommand()
}
}
def cleanup(self):
del self.ircd.commands["NAMES"]Send "no such channel" message on NAMES with a nonexistent channelfrom twisted.words.protocols import irc
from txircd.modbase import Command
class NamesCommand(Command):
def onUse(self, user, data):
for chan in data["targetchan"]:
user.report_names(chan)
def processParams(self, user, params):
if user.registered > 0:
user.sendMessage(irc.ERR_NOTREGISTERED, "NAMES", ":You have not registered")
return {}
if params:
channels = params[0].split(",")
else:
channels = user.channels.keys()
chan_param = []
for chan in channels:
if chan in self.ircd.channels:
chan_param.append(self.ircd.channels[chan])
else:
user.sendMessage(irc.ERR_NOSUCHNICK, chan, ":No such nick/channel")
return {
"user": user,
"targetchan": chan_param
}
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"commands": {
"NAMES": NamesCommand()
}
}
def cleanup(self):
del self.ircd.commands["NAMES"]
|
<commit_before>from twisted.words.protocols import irc
from txircd.modbase import Command
class NamesCommand(Command):
def onUse(self, user, data):
for chan in data["targetchan"]:
user.report_names(chan)
def processParams(self, user, params):
if user.registered > 0:
user.sendMessage(irc.ERR_NOTREGISTERED, "NAMES", ":You have not registered")
return {}
if params:
channels = filter(lambda x: x in user.channels and x in self.ircd.channels, params[0].split(","))
else:
channels = user.channels.keys()
chan_param = []
for chan in channels:
if chan in self.ircd.channels:
chan_param.append(self.ircd.channels[chan])
return {
"user": user,
"targetchan": chan_param
}
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"commands": {
"NAMES": NamesCommand()
}
}
def cleanup(self):
del self.ircd.commands["NAMES"]<commit_msg>Send "no such channel" message on NAMES with a nonexistent channel<commit_after>from twisted.words.protocols import irc
from txircd.modbase import Command
class NamesCommand(Command):
def onUse(self, user, data):
for chan in data["targetchan"]:
user.report_names(chan)
def processParams(self, user, params):
if user.registered > 0:
user.sendMessage(irc.ERR_NOTREGISTERED, "NAMES", ":You have not registered")
return {}
if params:
channels = params[0].split(",")
else:
channels = user.channels.keys()
chan_param = []
for chan in channels:
if chan in self.ircd.channels:
chan_param.append(self.ircd.channels[chan])
else:
user.sendMessage(irc.ERR_NOSUCHNICK, chan, ":No such nick/channel")
return {
"user": user,
"targetchan": chan_param
}
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"commands": {
"NAMES": NamesCommand()
}
}
def cleanup(self):
del self.ircd.commands["NAMES"]
|
b0edd701f2adbe5b62a1f87ae17474b1ba91d674
|
hxl/commands/hxlvalidate.py
|
hxl/commands/hxlvalidate.py
|
"""
Command function to schema-validate a HXL dataset.
David Megginson
November 2014
Can use a whitelist of HXL tags, a blacklist, or both.
Usage:
import sys
from hxl.scripts.hxlvalidate import hxlvalidate
hxlvalidate(sys.stdin, sys.stdout, open('MySchema.csv', 'r'))
License: Public Domain
Documentation: http://hxlstandard.org
"""
import sys
from hxl.parser import HXLReader
from hxl.schema import loadHXLSchema
def hxlvalidate(input=sys.stdin, output=sys.stderr, schema_input=None):
parser = HXLReader(input)
schema = loadHXLSchema(schema_input)
return schema.validate(parser)
# end
|
"""
Command function to schema-validate a HXL dataset.
David Megginson
November 2014
Can use a whitelist of HXL tags, a blacklist, or both.
Usage:
import sys
from hxl.scripts.hxlvalidate import hxlvalidate
hxlvalidate(sys.stdin, sys.stdout, open('MySchema.csv', 'r'))
License: Public Domain
Documentation: http://hxlstandard.org
"""
import sys
from hxl.parser import HXLReader
from hxl.schema import loadHXLSchema
def hxlvalidate(input=sys.stdin, output=sys.stderr, schema_input=None):
def callback(error):
print >>output, error
parser = HXLReader(input)
schema = loadHXLSchema(schema_input)
schema.callback = callback
return schema.validate(parser)
# end
|
Add callback to control output.
|
Add callback to control output.
|
Python
|
unlicense
|
HXLStandard/libhxl-python,HXLStandard/libhxl-python
|
"""
Command function to schema-validate a HXL dataset.
David Megginson
November 2014
Can use a whitelist of HXL tags, a blacklist, or both.
Usage:
import sys
from hxl.scripts.hxlvalidate import hxlvalidate
hxlvalidate(sys.stdin, sys.stdout, open('MySchema.csv', 'r'))
License: Public Domain
Documentation: http://hxlstandard.org
"""
import sys
from hxl.parser import HXLReader
from hxl.schema import loadHXLSchema
def hxlvalidate(input=sys.stdin, output=sys.stderr, schema_input=None):
parser = HXLReader(input)
schema = loadHXLSchema(schema_input)
return schema.validate(parser)
# end
Add callback to control output.
|
"""
Command function to schema-validate a HXL dataset.
David Megginson
November 2014
Can use a whitelist of HXL tags, a blacklist, or both.
Usage:
import sys
from hxl.scripts.hxlvalidate import hxlvalidate
hxlvalidate(sys.stdin, sys.stdout, open('MySchema.csv', 'r'))
License: Public Domain
Documentation: http://hxlstandard.org
"""
import sys
from hxl.parser import HXLReader
from hxl.schema import loadHXLSchema
def hxlvalidate(input=sys.stdin, output=sys.stderr, schema_input=None):
def callback(error):
print >>output, error
parser = HXLReader(input)
schema = loadHXLSchema(schema_input)
schema.callback = callback
return schema.validate(parser)
# end
|
<commit_before>"""
Command function to schema-validate a HXL dataset.
David Megginson
November 2014
Can use a whitelist of HXL tags, a blacklist, or both.
Usage:
import sys
from hxl.scripts.hxlvalidate import hxlvalidate
hxlvalidate(sys.stdin, sys.stdout, open('MySchema.csv', 'r'))
License: Public Domain
Documentation: http://hxlstandard.org
"""
import sys
from hxl.parser import HXLReader
from hxl.schema import loadHXLSchema
def hxlvalidate(input=sys.stdin, output=sys.stderr, schema_input=None):
parser = HXLReader(input)
schema = loadHXLSchema(schema_input)
return schema.validate(parser)
# end
<commit_msg>Add callback to control output.<commit_after>
|
"""
Command function to schema-validate a HXL dataset.
David Megginson
November 2014
Can use a whitelist of HXL tags, a blacklist, or both.
Usage:
import sys
from hxl.scripts.hxlvalidate import hxlvalidate
hxlvalidate(sys.stdin, sys.stdout, open('MySchema.csv', 'r'))
License: Public Domain
Documentation: http://hxlstandard.org
"""
import sys
from hxl.parser import HXLReader
from hxl.schema import loadHXLSchema
def hxlvalidate(input=sys.stdin, output=sys.stderr, schema_input=None):
def callback(error):
print >>output, error
parser = HXLReader(input)
schema = loadHXLSchema(schema_input)
schema.callback = callback
return schema.validate(parser)
# end
|
"""
Command function to schema-validate a HXL dataset.
David Megginson
November 2014
Can use a whitelist of HXL tags, a blacklist, or both.
Usage:
import sys
from hxl.scripts.hxlvalidate import hxlvalidate
hxlvalidate(sys.stdin, sys.stdout, open('MySchema.csv', 'r'))
License: Public Domain
Documentation: http://hxlstandard.org
"""
import sys
from hxl.parser import HXLReader
from hxl.schema import loadHXLSchema
def hxlvalidate(input=sys.stdin, output=sys.stderr, schema_input=None):
parser = HXLReader(input)
schema = loadHXLSchema(schema_input)
return schema.validate(parser)
# end
Add callback to control output."""
Command function to schema-validate a HXL dataset.
David Megginson
November 2014
Can use a whitelist of HXL tags, a blacklist, or both.
Usage:
import sys
from hxl.scripts.hxlvalidate import hxlvalidate
hxlvalidate(sys.stdin, sys.stdout, open('MySchema.csv', 'r'))
License: Public Domain
Documentation: http://hxlstandard.org
"""
import sys
from hxl.parser import HXLReader
from hxl.schema import loadHXLSchema
def hxlvalidate(input=sys.stdin, output=sys.stderr, schema_input=None):
def callback(error):
print >>output, error
parser = HXLReader(input)
schema = loadHXLSchema(schema_input)
schema.callback = callback
return schema.validate(parser)
# end
|
<commit_before>"""
Command function to schema-validate a HXL dataset.
David Megginson
November 2014
Can use a whitelist of HXL tags, a blacklist, or both.
Usage:
import sys
from hxl.scripts.hxlvalidate import hxlvalidate
hxlvalidate(sys.stdin, sys.stdout, open('MySchema.csv', 'r'))
License: Public Domain
Documentation: http://hxlstandard.org
"""
import sys
from hxl.parser import HXLReader
from hxl.schema import loadHXLSchema
def hxlvalidate(input=sys.stdin, output=sys.stderr, schema_input=None):
parser = HXLReader(input)
schema = loadHXLSchema(schema_input)
return schema.validate(parser)
# end
<commit_msg>Add callback to control output.<commit_after>"""
Command function to schema-validate a HXL dataset.
David Megginson
November 2014
Can use a whitelist of HXL tags, a blacklist, or both.
Usage:
import sys
from hxl.scripts.hxlvalidate import hxlvalidate
hxlvalidate(sys.stdin, sys.stdout, open('MySchema.csv', 'r'))
License: Public Domain
Documentation: http://hxlstandard.org
"""
import sys
from hxl.parser import HXLReader
from hxl.schema import loadHXLSchema
def hxlvalidate(input=sys.stdin, output=sys.stderr, schema_input=None):
def callback(error):
print >>output, error
parser = HXLReader(input)
schema = loadHXLSchema(schema_input)
schema.callback = callback
return schema.validate(parser)
# end
|
962bd7b1f3e8a8e827075e5b37f043eab7f4a1d0
|
backend/management/commands/contributions.py
|
backend/management/commands/contributions.py
|
from django.core.management.base import BaseCommand, CommandError
from backend.models import Account
from backend.tasks import get_all_contributions
class Command(BaseCommand):
help = 'Closes the specified poll for voting'
def add_arguments(self, parser):
parser.add_argument('--username', dest='username', default=None)
def handle(self, *args, **options):
if options.get('username'):
username = options.get('username')
try:
account = Account.objects.get(username=username)
except Account.DoesNotExist:
raise CommandError('Account "%s" does not exist' % username)
get_all_contributions(account)
self.stdout.write(
'Successfully fetched all user "%s" contributions' % username)
else:
get_all_contributions()
self.stdout.write('Successfully fetched all users contributions')
|
from django.core.management.base import BaseCommand, CommandError
from backend.models.account import Account
from backend.tasks import get_all_contributions
class Command(BaseCommand):
help = 'Closes the specified poll for voting'
def add_arguments(self, parser):
parser.add_argument('--username', dest='username', default=None)
def handle(self, *args, **options):
if options.get('username'):
username = options.get('username')
try:
account = Account.objects.get(username=username)
except Account.DoesNotExist:
raise CommandError('Account "%s" does not exist' % username)
get_all_contributions(account)
self.stdout.write(
'Successfully fetched all user "%s" contributions' % username)
else:
get_all_contributions()
self.stdout.write('Successfully fetched all users contributions')
|
Fix Account import in management command
|
Fix Account import in management command
|
Python
|
agpl-3.0
|
maria/cub,maria/cub,maria/cub,maria/cub
|
from django.core.management.base import BaseCommand, CommandError
from backend.models import Account
from backend.tasks import get_all_contributions
class Command(BaseCommand):
help = 'Closes the specified poll for voting'
def add_arguments(self, parser):
parser.add_argument('--username', dest='username', default=None)
def handle(self, *args, **options):
if options.get('username'):
username = options.get('username')
try:
account = Account.objects.get(username=username)
except Account.DoesNotExist:
raise CommandError('Account "%s" does not exist' % username)
get_all_contributions(account)
self.stdout.write(
'Successfully fetched all user "%s" contributions' % username)
else:
get_all_contributions()
self.stdout.write('Successfully fetched all users contributions')
Fix Account import in management command
|
from django.core.management.base import BaseCommand, CommandError
from backend.models.account import Account
from backend.tasks import get_all_contributions
class Command(BaseCommand):
help = 'Closes the specified poll for voting'
def add_arguments(self, parser):
parser.add_argument('--username', dest='username', default=None)
def handle(self, *args, **options):
if options.get('username'):
username = options.get('username')
try:
account = Account.objects.get(username=username)
except Account.DoesNotExist:
raise CommandError('Account "%s" does not exist' % username)
get_all_contributions(account)
self.stdout.write(
'Successfully fetched all user "%s" contributions' % username)
else:
get_all_contributions()
self.stdout.write('Successfully fetched all users contributions')
|
<commit_before>from django.core.management.base import BaseCommand, CommandError
from backend.models import Account
from backend.tasks import get_all_contributions
class Command(BaseCommand):
help = 'Closes the specified poll for voting'
def add_arguments(self, parser):
parser.add_argument('--username', dest='username', default=None)
def handle(self, *args, **options):
if options.get('username'):
username = options.get('username')
try:
account = Account.objects.get(username=username)
except Account.DoesNotExist:
raise CommandError('Account "%s" does not exist' % username)
get_all_contributions(account)
self.stdout.write(
'Successfully fetched all user "%s" contributions' % username)
else:
get_all_contributions()
self.stdout.write('Successfully fetched all users contributions')
<commit_msg>Fix Account import in management command<commit_after>
|
from django.core.management.base import BaseCommand, CommandError
from backend.models.account import Account
from backend.tasks import get_all_contributions
class Command(BaseCommand):
help = 'Closes the specified poll for voting'
def add_arguments(self, parser):
parser.add_argument('--username', dest='username', default=None)
def handle(self, *args, **options):
if options.get('username'):
username = options.get('username')
try:
account = Account.objects.get(username=username)
except Account.DoesNotExist:
raise CommandError('Account "%s" does not exist' % username)
get_all_contributions(account)
self.stdout.write(
'Successfully fetched all user "%s" contributions' % username)
else:
get_all_contributions()
self.stdout.write('Successfully fetched all users contributions')
|
from django.core.management.base import BaseCommand, CommandError
from backend.models import Account
from backend.tasks import get_all_contributions
class Command(BaseCommand):
help = 'Closes the specified poll for voting'
def add_arguments(self, parser):
parser.add_argument('--username', dest='username', default=None)
def handle(self, *args, **options):
if options.get('username'):
username = options.get('username')
try:
account = Account.objects.get(username=username)
except Account.DoesNotExist:
raise CommandError('Account "%s" does not exist' % username)
get_all_contributions(account)
self.stdout.write(
'Successfully fetched all user "%s" contributions' % username)
else:
get_all_contributions()
self.stdout.write('Successfully fetched all users contributions')
Fix Account import in management commandfrom django.core.management.base import BaseCommand, CommandError
from backend.models.account import Account
from backend.tasks import get_all_contributions
class Command(BaseCommand):
help = 'Closes the specified poll for voting'
def add_arguments(self, parser):
parser.add_argument('--username', dest='username', default=None)
def handle(self, *args, **options):
if options.get('username'):
username = options.get('username')
try:
account = Account.objects.get(username=username)
except Account.DoesNotExist:
raise CommandError('Account "%s" does not exist' % username)
get_all_contributions(account)
self.stdout.write(
'Successfully fetched all user "%s" contributions' % username)
else:
get_all_contributions()
self.stdout.write('Successfully fetched all users contributions')
|
<commit_before>from django.core.management.base import BaseCommand, CommandError
from backend.models import Account
from backend.tasks import get_all_contributions
class Command(BaseCommand):
help = 'Closes the specified poll for voting'
def add_arguments(self, parser):
parser.add_argument('--username', dest='username', default=None)
def handle(self, *args, **options):
if options.get('username'):
username = options.get('username')
try:
account = Account.objects.get(username=username)
except Account.DoesNotExist:
raise CommandError('Account "%s" does not exist' % username)
get_all_contributions(account)
self.stdout.write(
'Successfully fetched all user "%s" contributions' % username)
else:
get_all_contributions()
self.stdout.write('Successfully fetched all users contributions')
<commit_msg>Fix Account import in management command<commit_after>from django.core.management.base import BaseCommand, CommandError
from backend.models.account import Account
from backend.tasks import get_all_contributions
class Command(BaseCommand):
help = 'Closes the specified poll for voting'
def add_arguments(self, parser):
parser.add_argument('--username', dest='username', default=None)
def handle(self, *args, **options):
if options.get('username'):
username = options.get('username')
try:
account = Account.objects.get(username=username)
except Account.DoesNotExist:
raise CommandError('Account "%s" does not exist' % username)
get_all_contributions(account)
self.stdout.write(
'Successfully fetched all user "%s" contributions' % username)
else:
get_all_contributions()
self.stdout.write('Successfully fetched all users contributions')
|
563aba155f5df0465bd7f96734ae8b598e693465
|
ghettoq/backends/pyredis.py
|
ghettoq/backends/pyredis.py
|
from redis import Redis as Redis
from ghettoq.backends.base import BaseBackend
class RedisBackend(BaseBackend):
def establish_connection(self):
return Redis(host=self.host, port=self.port, db=self.database,
timeout=self.timeout)
def put(self, queue, message):
self.client.push(queue, message, head=False)
def get(self, queue):
return self.client.pop(queue)
def purge(self, queue):
return self.client.delete(queue)
|
from redis import Redis as Redis
from ghettoq.backends.base import BaseBackend
class RedisBackend(BaseBackend):
def establish_connection(self):
return Redis(host=self.host, port=self.port, db=self.database,
password=self.password)
def put(self, queue, message):
self.client.push(queue, message, head=False)
def get(self, queue):
return self.client.pop(queue)
def purge(self, queue):
return self.client.delete(queue)
|
Make redis backend work with redis-py 1.34.1
|
Make redis backend work with redis-py 1.34.1
|
Python
|
bsd-3-clause
|
ask/ghettoq
|
from redis import Redis as Redis
from ghettoq.backends.base import BaseBackend
class RedisBackend(BaseBackend):
def establish_connection(self):
return Redis(host=self.host, port=self.port, db=self.database,
timeout=self.timeout)
def put(self, queue, message):
self.client.push(queue, message, head=False)
def get(self, queue):
return self.client.pop(queue)
def purge(self, queue):
return self.client.delete(queue)
Make redis backend work with redis-py 1.34.1
|
from redis import Redis as Redis
from ghettoq.backends.base import BaseBackend
class RedisBackend(BaseBackend):
def establish_connection(self):
return Redis(host=self.host, port=self.port, db=self.database,
password=self.password)
def put(self, queue, message):
self.client.push(queue, message, head=False)
def get(self, queue):
return self.client.pop(queue)
def purge(self, queue):
return self.client.delete(queue)
|
<commit_before>from redis import Redis as Redis
from ghettoq.backends.base import BaseBackend
class RedisBackend(BaseBackend):
def establish_connection(self):
return Redis(host=self.host, port=self.port, db=self.database,
timeout=self.timeout)
def put(self, queue, message):
self.client.push(queue, message, head=False)
def get(self, queue):
return self.client.pop(queue)
def purge(self, queue):
return self.client.delete(queue)
<commit_msg>Make redis backend work with redis-py 1.34.1<commit_after>
|
from redis import Redis as Redis
from ghettoq.backends.base import BaseBackend
class RedisBackend(BaseBackend):
def establish_connection(self):
return Redis(host=self.host, port=self.port, db=self.database,
password=self.password)
def put(self, queue, message):
self.client.push(queue, message, head=False)
def get(self, queue):
return self.client.pop(queue)
def purge(self, queue):
return self.client.delete(queue)
|
from redis import Redis as Redis
from ghettoq.backends.base import BaseBackend
class RedisBackend(BaseBackend):
def establish_connection(self):
return Redis(host=self.host, port=self.port, db=self.database,
timeout=self.timeout)
def put(self, queue, message):
self.client.push(queue, message, head=False)
def get(self, queue):
return self.client.pop(queue)
def purge(self, queue):
return self.client.delete(queue)
Make redis backend work with redis-py 1.34.1from redis import Redis as Redis
from ghettoq.backends.base import BaseBackend
class RedisBackend(BaseBackend):
def establish_connection(self):
return Redis(host=self.host, port=self.port, db=self.database,
password=self.password)
def put(self, queue, message):
self.client.push(queue, message, head=False)
def get(self, queue):
return self.client.pop(queue)
def purge(self, queue):
return self.client.delete(queue)
|
<commit_before>from redis import Redis as Redis
from ghettoq.backends.base import BaseBackend
class RedisBackend(BaseBackend):
def establish_connection(self):
return Redis(host=self.host, port=self.port, db=self.database,
timeout=self.timeout)
def put(self, queue, message):
self.client.push(queue, message, head=False)
def get(self, queue):
return self.client.pop(queue)
def purge(self, queue):
return self.client.delete(queue)
<commit_msg>Make redis backend work with redis-py 1.34.1<commit_after>from redis import Redis as Redis
from ghettoq.backends.base import BaseBackend
class RedisBackend(BaseBackend):
def establish_connection(self):
return Redis(host=self.host, port=self.port, db=self.database,
password=self.password)
def put(self, queue, message):
self.client.push(queue, message, head=False)
def get(self, queue):
return self.client.pop(queue)
def purge(self, queue):
return self.client.delete(queue)
|
cc9084c744b3ba3525f464adeaa7edaea64abf41
|
torchtext/data/pipeline.py
|
torchtext/data/pipeline.py
|
class Pipeline(object):
"""Defines a pipeline for transforming sequence data."""
def __init__(self, convert_token=None):
if convert_token is None:
self.convert_token = lambda x: x
elif callable(convert_token):
self.convert_token = convert_token
else:
raise ValueError("Pipeline input convert_token {} is not None "
"or callable".format(convert_token))
self.pipes = [self]
def __call__(self, x, *args):
for pipe in self.pipes:
x = pipe.call(x, *args)
return x
def call(self, x, *args):
if isinstance(x, list):
return [self(tok, *args) for tok in x]
return self.convert_token(x, *args)
def add_before(self, pipeline):
"""Add `pipeline` before this processing pipeline."""
if not isinstance(pipeline, Pipeline):
pipeline = Pipeline(pipeline)
self.pipes = pipeline.pipes[:] + self.pipes[:]
return self
def add_after(self, pipeline):
"""Add `pipeline` after this processing pipeline."""
if not isinstance(pipeline, Pipeline):
pipeline = Pipeline(pipeline)
self.pipes = self.pipes[:] + pipeline.pipes[:]
return self
|
class Pipeline(object):
"""Defines a pipeline for transforming sequence data."""
def __init__(self, convert_token=None):
if convert_token is None:
self.convert_token = lambda x: x
elif callable(convert_token):
self.convert_token = convert_token
else:
raise ValueError("Pipeline input convert_token {} is not None "
"or callable".format(convert_token))
self.pipes = [self]
def __call__(self, x, *args):
for pipe in self.pipes:
x = pipe.call(x, *args)
return x
def call(self, x, *args):
if isinstance(x, list):
return [self.convert_token(tok, *args) for tok in x]
return self.convert_token(x, *args)
def add_before(self, pipeline):
"""Add `pipeline` before this processing pipeline."""
if not isinstance(pipeline, Pipeline):
pipeline = Pipeline(pipeline)
self.pipes = pipeline.pipes[:] + self.pipes[:]
return self
def add_after(self, pipeline):
"""Add `pipeline` after this processing pipeline."""
if not isinstance(pipeline, Pipeline):
pipeline = Pipeline(pipeline)
self.pipes = self.pipes[:] + pipeline.pipes[:]
return self
|
Fix bug in Pipeline call where we run the whole Pipeline on list input
|
Fix bug in Pipeline call where we run the whole Pipeline on list input
|
Python
|
bsd-3-clause
|
pytorch/text,pytorch/text,pytorch/text,pytorch/text
|
class Pipeline(object):
"""Defines a pipeline for transforming sequence data."""
def __init__(self, convert_token=None):
if convert_token is None:
self.convert_token = lambda x: x
elif callable(convert_token):
self.convert_token = convert_token
else:
raise ValueError("Pipeline input convert_token {} is not None "
"or callable".format(convert_token))
self.pipes = [self]
def __call__(self, x, *args):
for pipe in self.pipes:
x = pipe.call(x, *args)
return x
def call(self, x, *args):
if isinstance(x, list):
return [self(tok, *args) for tok in x]
return self.convert_token(x, *args)
def add_before(self, pipeline):
"""Add `pipeline` before this processing pipeline."""
if not isinstance(pipeline, Pipeline):
pipeline = Pipeline(pipeline)
self.pipes = pipeline.pipes[:] + self.pipes[:]
return self
def add_after(self, pipeline):
"""Add `pipeline` after this processing pipeline."""
if not isinstance(pipeline, Pipeline):
pipeline = Pipeline(pipeline)
self.pipes = self.pipes[:] + pipeline.pipes[:]
return self
Fix bug in Pipeline call where we run the whole Pipeline on list input
|
class Pipeline(object):
"""Defines a pipeline for transforming sequence data."""
def __init__(self, convert_token=None):
if convert_token is None:
self.convert_token = lambda x: x
elif callable(convert_token):
self.convert_token = convert_token
else:
raise ValueError("Pipeline input convert_token {} is not None "
"or callable".format(convert_token))
self.pipes = [self]
def __call__(self, x, *args):
for pipe in self.pipes:
x = pipe.call(x, *args)
return x
def call(self, x, *args):
if isinstance(x, list):
return [self.convert_token(tok, *args) for tok in x]
return self.convert_token(x, *args)
def add_before(self, pipeline):
"""Add `pipeline` before this processing pipeline."""
if not isinstance(pipeline, Pipeline):
pipeline = Pipeline(pipeline)
self.pipes = pipeline.pipes[:] + self.pipes[:]
return self
def add_after(self, pipeline):
"""Add `pipeline` after this processing pipeline."""
if not isinstance(pipeline, Pipeline):
pipeline = Pipeline(pipeline)
self.pipes = self.pipes[:] + pipeline.pipes[:]
return self
|
<commit_before>class Pipeline(object):
"""Defines a pipeline for transforming sequence data."""
def __init__(self, convert_token=None):
if convert_token is None:
self.convert_token = lambda x: x
elif callable(convert_token):
self.convert_token = convert_token
else:
raise ValueError("Pipeline input convert_token {} is not None "
"or callable".format(convert_token))
self.pipes = [self]
def __call__(self, x, *args):
for pipe in self.pipes:
x = pipe.call(x, *args)
return x
def call(self, x, *args):
if isinstance(x, list):
return [self(tok, *args) for tok in x]
return self.convert_token(x, *args)
def add_before(self, pipeline):
"""Add `pipeline` before this processing pipeline."""
if not isinstance(pipeline, Pipeline):
pipeline = Pipeline(pipeline)
self.pipes = pipeline.pipes[:] + self.pipes[:]
return self
def add_after(self, pipeline):
"""Add `pipeline` after this processing pipeline."""
if not isinstance(pipeline, Pipeline):
pipeline = Pipeline(pipeline)
self.pipes = self.pipes[:] + pipeline.pipes[:]
return self
<commit_msg>Fix bug in Pipeline call where we run the whole Pipeline on list input<commit_after>
|
class Pipeline(object):
"""Defines a pipeline for transforming sequence data."""
def __init__(self, convert_token=None):
if convert_token is None:
self.convert_token = lambda x: x
elif callable(convert_token):
self.convert_token = convert_token
else:
raise ValueError("Pipeline input convert_token {} is not None "
"or callable".format(convert_token))
self.pipes = [self]
def __call__(self, x, *args):
for pipe in self.pipes:
x = pipe.call(x, *args)
return x
def call(self, x, *args):
if isinstance(x, list):
return [self.convert_token(tok, *args) for tok in x]
return self.convert_token(x, *args)
def add_before(self, pipeline):
"""Add `pipeline` before this processing pipeline."""
if not isinstance(pipeline, Pipeline):
pipeline = Pipeline(pipeline)
self.pipes = pipeline.pipes[:] + self.pipes[:]
return self
def add_after(self, pipeline):
"""Add `pipeline` after this processing pipeline."""
if not isinstance(pipeline, Pipeline):
pipeline = Pipeline(pipeline)
self.pipes = self.pipes[:] + pipeline.pipes[:]
return self
|
class Pipeline(object):
"""Defines a pipeline for transforming sequence data."""
def __init__(self, convert_token=None):
if convert_token is None:
self.convert_token = lambda x: x
elif callable(convert_token):
self.convert_token = convert_token
else:
raise ValueError("Pipeline input convert_token {} is not None "
"or callable".format(convert_token))
self.pipes = [self]
def __call__(self, x, *args):
for pipe in self.pipes:
x = pipe.call(x, *args)
return x
def call(self, x, *args):
if isinstance(x, list):
return [self(tok, *args) for tok in x]
return self.convert_token(x, *args)
def add_before(self, pipeline):
"""Add `pipeline` before this processing pipeline."""
if not isinstance(pipeline, Pipeline):
pipeline = Pipeline(pipeline)
self.pipes = pipeline.pipes[:] + self.pipes[:]
return self
def add_after(self, pipeline):
"""Add `pipeline` after this processing pipeline."""
if not isinstance(pipeline, Pipeline):
pipeline = Pipeline(pipeline)
self.pipes = self.pipes[:] + pipeline.pipes[:]
return self
Fix bug in Pipeline call where we run the whole Pipeline on list inputclass Pipeline(object):
"""Defines a pipeline for transforming sequence data."""
def __init__(self, convert_token=None):
if convert_token is None:
self.convert_token = lambda x: x
elif callable(convert_token):
self.convert_token = convert_token
else:
raise ValueError("Pipeline input convert_token {} is not None "
"or callable".format(convert_token))
self.pipes = [self]
def __call__(self, x, *args):
for pipe in self.pipes:
x = pipe.call(x, *args)
return x
def call(self, x, *args):
if isinstance(x, list):
return [self.convert_token(tok, *args) for tok in x]
return self.convert_token(x, *args)
def add_before(self, pipeline):
"""Add `pipeline` before this processing pipeline."""
if not isinstance(pipeline, Pipeline):
pipeline = Pipeline(pipeline)
self.pipes = pipeline.pipes[:] + self.pipes[:]
return self
def add_after(self, pipeline):
"""Add `pipeline` after this processing pipeline."""
if not isinstance(pipeline, Pipeline):
pipeline = Pipeline(pipeline)
self.pipes = self.pipes[:] + pipeline.pipes[:]
return self
|
<commit_before>class Pipeline(object):
"""Defines a pipeline for transforming sequence data."""
def __init__(self, convert_token=None):
if convert_token is None:
self.convert_token = lambda x: x
elif callable(convert_token):
self.convert_token = convert_token
else:
raise ValueError("Pipeline input convert_token {} is not None "
"or callable".format(convert_token))
self.pipes = [self]
def __call__(self, x, *args):
for pipe in self.pipes:
x = pipe.call(x, *args)
return x
def call(self, x, *args):
if isinstance(x, list):
return [self(tok, *args) for tok in x]
return self.convert_token(x, *args)
def add_before(self, pipeline):
"""Add `pipeline` before this processing pipeline."""
if not isinstance(pipeline, Pipeline):
pipeline = Pipeline(pipeline)
self.pipes = pipeline.pipes[:] + self.pipes[:]
return self
def add_after(self, pipeline):
"""Add `pipeline` after this processing pipeline."""
if not isinstance(pipeline, Pipeline):
pipeline = Pipeline(pipeline)
self.pipes = self.pipes[:] + pipeline.pipes[:]
return self
<commit_msg>Fix bug in Pipeline call where we run the whole Pipeline on list input<commit_after>class Pipeline(object):
"""Defines a pipeline for transforming sequence data."""
def __init__(self, convert_token=None):
if convert_token is None:
self.convert_token = lambda x: x
elif callable(convert_token):
self.convert_token = convert_token
else:
raise ValueError("Pipeline input convert_token {} is not None "
"or callable".format(convert_token))
self.pipes = [self]
def __call__(self, x, *args):
for pipe in self.pipes:
x = pipe.call(x, *args)
return x
def call(self, x, *args):
if isinstance(x, list):
return [self.convert_token(tok, *args) for tok in x]
return self.convert_token(x, *args)
def add_before(self, pipeline):
"""Add `pipeline` before this processing pipeline."""
if not isinstance(pipeline, Pipeline):
pipeline = Pipeline(pipeline)
self.pipes = pipeline.pipes[:] + self.pipes[:]
return self
def add_after(self, pipeline):
"""Add `pipeline` after this processing pipeline."""
if not isinstance(pipeline, Pipeline):
pipeline = Pipeline(pipeline)
self.pipes = self.pipes[:] + pipeline.pipes[:]
return self
|
d7cb638c8a9505623dced1811b48b32ab73ebc71
|
hello/logo_grab.py
|
hello/logo_grab.py
|
import requests
from bs4 import BeautifulSoup
try:
from urllib.parse import urlparse
except ImportError:
from urllib2 import urlparse
urlparse = urlparse.urlparse
try:
ConnectionError
except NameError:
ConnectionError = ValueError
YAHOO_ENDPOINT = "http://finance.yahoo.com/q/pr?s={}"
CLEARBIT_ENDPOINT = "https://logo.clearbit.com/{}?format=png&size=438"
def get_endpoint(ticker):
return YAHOO_ENDPOINT.format(ticker).lower()
def get_response(target):
return requests.get(target)
def handle_response(response):
if not response.ok:
raise ConnectionError("Yahoo didn't like that")
soup = BeautifulSoup(response.text, "html5lib")
pool = []
for anchor in soup.find_all("a", href=True):
link = anchor['href'].lower()
if link.startswith("http://") or link.startswith("https://"):
if link in pool:
return link
if "yahoo" not in link:
pool.append(link)
def get_domain(url):
return urlparse(url).netloc
def get_logo(domain):
return CLEARBIT_ENDPOINT.format(domain)
def main(ticker):
target = get_endpoint(ticker)
response = get_response(target)
url = handle_response(response)
domain = get_domain(url)
return get_logo(domain)
|
import requests
from bs4 import BeautifulSoup
try:
from urllib.parse import urlparse
except ImportError:
from urllib2 import urlparse
urlparse = urlparse.urlparse
try:
ConnectionError
except NameError:
ConnectionError = ValueError
YAHOO_ENDPOINT = "http://finance.yahoo.com/q/pr?s={}"
CLEARBIT_ENDPOINT = "https://logo.clearbit.com/{}?format=png&size=438"
def get_endpoint(ticker):
return YAHOO_ENDPOINT.format(ticker).lower()
def get_response(target):
return requests.get(target)
def handle_response(response):
if not response.ok:
raise ConnectionError("Yahoo didn't like that")
soup = BeautifulSoup(response.text, "html5lib")
pool = []
for anchor in soup.find_all("a", href=True):
link = anchor['href'].lower()
if link.startswith("http://") or link.startswith("https://"):
if link in pool:
return link
if "yahoo" not in link:
pool.append(link)
raise ValueError("Invalid Ticker")
def get_domain(url):
return urlparse(url).netloc
def get_logo(domain):
return CLEARBIT_ENDPOINT.format(domain)
def main(ticker):
target = get_endpoint(ticker)
response = get_response(target)
url = handle_response(response)
domain = get_domain(url)
return get_logo(domain)
|
Raise exception on not found ticker
|
Raise exception on not found ticker
|
Python
|
mit
|
qwergram/GroupProject1,qwergram/GroupProject1,qwergram/GroupProject1
|
import requests
from bs4 import BeautifulSoup
try:
from urllib.parse import urlparse
except ImportError:
from urllib2 import urlparse
urlparse = urlparse.urlparse
try:
ConnectionError
except NameError:
ConnectionError = ValueError
YAHOO_ENDPOINT = "http://finance.yahoo.com/q/pr?s={}"
CLEARBIT_ENDPOINT = "https://logo.clearbit.com/{}?format=png&size=438"
def get_endpoint(ticker):
return YAHOO_ENDPOINT.format(ticker).lower()
def get_response(target):
return requests.get(target)
def handle_response(response):
if not response.ok:
raise ConnectionError("Yahoo didn't like that")
soup = BeautifulSoup(response.text, "html5lib")
pool = []
for anchor in soup.find_all("a", href=True):
link = anchor['href'].lower()
if link.startswith("http://") or link.startswith("https://"):
if link in pool:
return link
if "yahoo" not in link:
pool.append(link)
def get_domain(url):
return urlparse(url).netloc
def get_logo(domain):
return CLEARBIT_ENDPOINT.format(domain)
def main(ticker):
target = get_endpoint(ticker)
response = get_response(target)
url = handle_response(response)
domain = get_domain(url)
return get_logo(domain)
Raise exception on not found ticker
|
import requests
from bs4 import BeautifulSoup
try:
from urllib.parse import urlparse
except ImportError:
from urllib2 import urlparse
urlparse = urlparse.urlparse
try:
ConnectionError
except NameError:
ConnectionError = ValueError
YAHOO_ENDPOINT = "http://finance.yahoo.com/q/pr?s={}"
CLEARBIT_ENDPOINT = "https://logo.clearbit.com/{}?format=png&size=438"
def get_endpoint(ticker):
return YAHOO_ENDPOINT.format(ticker).lower()
def get_response(target):
return requests.get(target)
def handle_response(response):
if not response.ok:
raise ConnectionError("Yahoo didn't like that")
soup = BeautifulSoup(response.text, "html5lib")
pool = []
for anchor in soup.find_all("a", href=True):
link = anchor['href'].lower()
if link.startswith("http://") or link.startswith("https://"):
if link in pool:
return link
if "yahoo" not in link:
pool.append(link)
raise ValueError("Invalid Ticker")
def get_domain(url):
return urlparse(url).netloc
def get_logo(domain):
return CLEARBIT_ENDPOINT.format(domain)
def main(ticker):
target = get_endpoint(ticker)
response = get_response(target)
url = handle_response(response)
domain = get_domain(url)
return get_logo(domain)
|
<commit_before>import requests
from bs4 import BeautifulSoup
try:
from urllib.parse import urlparse
except ImportError:
from urllib2 import urlparse
urlparse = urlparse.urlparse
try:
ConnectionError
except NameError:
ConnectionError = ValueError
YAHOO_ENDPOINT = "http://finance.yahoo.com/q/pr?s={}"
CLEARBIT_ENDPOINT = "https://logo.clearbit.com/{}?format=png&size=438"
def get_endpoint(ticker):
return YAHOO_ENDPOINT.format(ticker).lower()
def get_response(target):
return requests.get(target)
def handle_response(response):
if not response.ok:
raise ConnectionError("Yahoo didn't like that")
soup = BeautifulSoup(response.text, "html5lib")
pool = []
for anchor in soup.find_all("a", href=True):
link = anchor['href'].lower()
if link.startswith("http://") or link.startswith("https://"):
if link in pool:
return link
if "yahoo" not in link:
pool.append(link)
def get_domain(url):
return urlparse(url).netloc
def get_logo(domain):
return CLEARBIT_ENDPOINT.format(domain)
def main(ticker):
target = get_endpoint(ticker)
response = get_response(target)
url = handle_response(response)
domain = get_domain(url)
return get_logo(domain)
<commit_msg>Raise exception on not found ticker<commit_after>
|
import requests
from bs4 import BeautifulSoup
try:
from urllib.parse import urlparse
except ImportError:
from urllib2 import urlparse
urlparse = urlparse.urlparse
try:
ConnectionError
except NameError:
ConnectionError = ValueError
YAHOO_ENDPOINT = "http://finance.yahoo.com/q/pr?s={}"
CLEARBIT_ENDPOINT = "https://logo.clearbit.com/{}?format=png&size=438"
def get_endpoint(ticker):
return YAHOO_ENDPOINT.format(ticker).lower()
def get_response(target):
return requests.get(target)
def handle_response(response):
if not response.ok:
raise ConnectionError("Yahoo didn't like that")
soup = BeautifulSoup(response.text, "html5lib")
pool = []
for anchor in soup.find_all("a", href=True):
link = anchor['href'].lower()
if link.startswith("http://") or link.startswith("https://"):
if link in pool:
return link
if "yahoo" not in link:
pool.append(link)
raise ValueError("Invalid Ticker")
def get_domain(url):
return urlparse(url).netloc
def get_logo(domain):
return CLEARBIT_ENDPOINT.format(domain)
def main(ticker):
target = get_endpoint(ticker)
response = get_response(target)
url = handle_response(response)
domain = get_domain(url)
return get_logo(domain)
|
import requests
from bs4 import BeautifulSoup
try:
from urllib.parse import urlparse
except ImportError:
from urllib2 import urlparse
urlparse = urlparse.urlparse
try:
ConnectionError
except NameError:
ConnectionError = ValueError
YAHOO_ENDPOINT = "http://finance.yahoo.com/q/pr?s={}"
CLEARBIT_ENDPOINT = "https://logo.clearbit.com/{}?format=png&size=438"
def get_endpoint(ticker):
return YAHOO_ENDPOINT.format(ticker).lower()
def get_response(target):
return requests.get(target)
def handle_response(response):
if not response.ok:
raise ConnectionError("Yahoo didn't like that")
soup = BeautifulSoup(response.text, "html5lib")
pool = []
for anchor in soup.find_all("a", href=True):
link = anchor['href'].lower()
if link.startswith("http://") or link.startswith("https://"):
if link in pool:
return link
if "yahoo" not in link:
pool.append(link)
def get_domain(url):
return urlparse(url).netloc
def get_logo(domain):
return CLEARBIT_ENDPOINT.format(domain)
def main(ticker):
target = get_endpoint(ticker)
response = get_response(target)
url = handle_response(response)
domain = get_domain(url)
return get_logo(domain)
Raise exception on not found tickerimport requests
from bs4 import BeautifulSoup
try:
from urllib.parse import urlparse
except ImportError:
from urllib2 import urlparse
urlparse = urlparse.urlparse
try:
ConnectionError
except NameError:
ConnectionError = ValueError
YAHOO_ENDPOINT = "http://finance.yahoo.com/q/pr?s={}"
CLEARBIT_ENDPOINT = "https://logo.clearbit.com/{}?format=png&size=438"
def get_endpoint(ticker):
return YAHOO_ENDPOINT.format(ticker).lower()
def get_response(target):
return requests.get(target)
def handle_response(response):
if not response.ok:
raise ConnectionError("Yahoo didn't like that")
soup = BeautifulSoup(response.text, "html5lib")
pool = []
for anchor in soup.find_all("a", href=True):
link = anchor['href'].lower()
if link.startswith("http://") or link.startswith("https://"):
if link in pool:
return link
if "yahoo" not in link:
pool.append(link)
raise ValueError("Invalid Ticker")
def get_domain(url):
return urlparse(url).netloc
def get_logo(domain):
return CLEARBIT_ENDPOINT.format(domain)
def main(ticker):
target = get_endpoint(ticker)
response = get_response(target)
url = handle_response(response)
domain = get_domain(url)
return get_logo(domain)
|
<commit_before>import requests
from bs4 import BeautifulSoup
try:
from urllib.parse import urlparse
except ImportError:
from urllib2 import urlparse
urlparse = urlparse.urlparse
try:
ConnectionError
except NameError:
ConnectionError = ValueError
YAHOO_ENDPOINT = "http://finance.yahoo.com/q/pr?s={}"
CLEARBIT_ENDPOINT = "https://logo.clearbit.com/{}?format=png&size=438"
def get_endpoint(ticker):
return YAHOO_ENDPOINT.format(ticker).lower()
def get_response(target):
return requests.get(target)
def handle_response(response):
if not response.ok:
raise ConnectionError("Yahoo didn't like that")
soup = BeautifulSoup(response.text, "html5lib")
pool = []
for anchor in soup.find_all("a", href=True):
link = anchor['href'].lower()
if link.startswith("http://") or link.startswith("https://"):
if link in pool:
return link
if "yahoo" not in link:
pool.append(link)
def get_domain(url):
return urlparse(url).netloc
def get_logo(domain):
return CLEARBIT_ENDPOINT.format(domain)
def main(ticker):
target = get_endpoint(ticker)
response = get_response(target)
url = handle_response(response)
domain = get_domain(url)
return get_logo(domain)
<commit_msg>Raise exception on not found ticker<commit_after>import requests
from bs4 import BeautifulSoup
try:
from urllib.parse import urlparse
except ImportError:
from urllib2 import urlparse
urlparse = urlparse.urlparse
try:
ConnectionError
except NameError:
ConnectionError = ValueError
YAHOO_ENDPOINT = "http://finance.yahoo.com/q/pr?s={}"
CLEARBIT_ENDPOINT = "https://logo.clearbit.com/{}?format=png&size=438"
def get_endpoint(ticker):
return YAHOO_ENDPOINT.format(ticker).lower()
def get_response(target):
return requests.get(target)
def handle_response(response):
if not response.ok:
raise ConnectionError("Yahoo didn't like that")
soup = BeautifulSoup(response.text, "html5lib")
pool = []
for anchor in soup.find_all("a", href=True):
link = anchor['href'].lower()
if link.startswith("http://") or link.startswith("https://"):
if link in pool:
return link
if "yahoo" not in link:
pool.append(link)
raise ValueError("Invalid Ticker")
def get_domain(url):
return urlparse(url).netloc
def get_logo(domain):
return CLEARBIT_ENDPOINT.format(domain)
def main(ticker):
target = get_endpoint(ticker)
response = get_response(target)
url = handle_response(response)
domain = get_domain(url)
return get_logo(domain)
|
f59870e66253f4a9f60875ae61f9e71037ae5b73
|
tests/test_channel_shim.py
|
tests/test_channel_shim.py
|
import gevent
from gevent import queue
from wal_e import channel
def test_channel_shim():
v = tuple(int(x) for x in gevent.__version__.split('.'))
print('Version info:', gevent.__version__, v)
if v >= (0, 13) and v < (1, 0):
assert isinstance(channel.Channel(), queue.Queue)
elif v >= (1, 0):
assert isinstance(channel.Channel(), queue.Channel)
else:
assert False, 'Unexpected version ' + gevent.__version__
|
import gevent
from gevent import queue
from wal_e import channel
def test_channel_shim():
v = tuple(int(x) for x in gevent.__version__.split('.')[:2])
print('Version info:', gevent.__version__, v)
if v >= (0, 13) and v < (1, 0):
assert isinstance(channel.Channel(), queue.Queue)
elif v >= (1, 0):
assert isinstance(channel.Channel(), queue.Channel)
else:
assert False, 'Unexpected version ' + gevent.__version__
|
Fix extraction of gevent version in test
|
Fix extraction of gevent version in test
Current version of gevent installed in travis seems to have a ".post0" as the last part of the version, which fails the integer conversion. Since we are only interested in the first two parts of the version anyway we can just strip the rest away.
|
Python
|
bsd-3-clause
|
wal-e/wal-e
|
import gevent
from gevent import queue
from wal_e import channel
def test_channel_shim():
v = tuple(int(x) for x in gevent.__version__.split('.'))
print('Version info:', gevent.__version__, v)
if v >= (0, 13) and v < (1, 0):
assert isinstance(channel.Channel(), queue.Queue)
elif v >= (1, 0):
assert isinstance(channel.Channel(), queue.Channel)
else:
assert False, 'Unexpected version ' + gevent.__version__
Fix extraction of gevent version in test
Current version of gevent installed in travis seems to have a ".post0" as the last part of the version, which fails the integer conversion. Since we are only interested in the first two parts of the version anyway we can just strip the rest away.
|
import gevent
from gevent import queue
from wal_e import channel
def test_channel_shim():
v = tuple(int(x) for x in gevent.__version__.split('.')[:2])
print('Version info:', gevent.__version__, v)
if v >= (0, 13) and v < (1, 0):
assert isinstance(channel.Channel(), queue.Queue)
elif v >= (1, 0):
assert isinstance(channel.Channel(), queue.Channel)
else:
assert False, 'Unexpected version ' + gevent.__version__
|
<commit_before>import gevent
from gevent import queue
from wal_e import channel
def test_channel_shim():
v = tuple(int(x) for x in gevent.__version__.split('.'))
print('Version info:', gevent.__version__, v)
if v >= (0, 13) and v < (1, 0):
assert isinstance(channel.Channel(), queue.Queue)
elif v >= (1, 0):
assert isinstance(channel.Channel(), queue.Channel)
else:
assert False, 'Unexpected version ' + gevent.__version__
<commit_msg>Fix extraction of gevent version in test
Current version of gevent installed in travis seems to have a ".post0" as the last part of the version, which fails the integer conversion. Since we are only interested in the first two parts of the version anyway we can just strip the rest away.<commit_after>
|
import gevent
from gevent import queue
from wal_e import channel
def test_channel_shim():
v = tuple(int(x) for x in gevent.__version__.split('.')[:2])
print('Version info:', gevent.__version__, v)
if v >= (0, 13) and v < (1, 0):
assert isinstance(channel.Channel(), queue.Queue)
elif v >= (1, 0):
assert isinstance(channel.Channel(), queue.Channel)
else:
assert False, 'Unexpected version ' + gevent.__version__
|
import gevent
from gevent import queue
from wal_e import channel
def test_channel_shim():
v = tuple(int(x) for x in gevent.__version__.split('.'))
print('Version info:', gevent.__version__, v)
if v >= (0, 13) and v < (1, 0):
assert isinstance(channel.Channel(), queue.Queue)
elif v >= (1, 0):
assert isinstance(channel.Channel(), queue.Channel)
else:
assert False, 'Unexpected version ' + gevent.__version__
Fix extraction of gevent version in test
Current version of gevent installed in travis seems to have a ".post0" as the last part of the version, which fails the integer conversion. Since we are only interested in the first two parts of the version anyway we can just strip the rest away.import gevent
from gevent import queue
from wal_e import channel
def test_channel_shim():
v = tuple(int(x) for x in gevent.__version__.split('.')[:2])
print('Version info:', gevent.__version__, v)
if v >= (0, 13) and v < (1, 0):
assert isinstance(channel.Channel(), queue.Queue)
elif v >= (1, 0):
assert isinstance(channel.Channel(), queue.Channel)
else:
assert False, 'Unexpected version ' + gevent.__version__
|
<commit_before>import gevent
from gevent import queue
from wal_e import channel
def test_channel_shim():
v = tuple(int(x) for x in gevent.__version__.split('.'))
print('Version info:', gevent.__version__, v)
if v >= (0, 13) and v < (1, 0):
assert isinstance(channel.Channel(), queue.Queue)
elif v >= (1, 0):
assert isinstance(channel.Channel(), queue.Channel)
else:
assert False, 'Unexpected version ' + gevent.__version__
<commit_msg>Fix extraction of gevent version in test
Current version of gevent installed in travis seems to have a ".post0" as the last part of the version, which fails the integer conversion. Since we are only interested in the first two parts of the version anyway we can just strip the rest away.<commit_after>import gevent
from gevent import queue
from wal_e import channel
def test_channel_shim():
v = tuple(int(x) for x in gevent.__version__.split('.')[:2])
print('Version info:', gevent.__version__, v)
if v >= (0, 13) and v < (1, 0):
assert isinstance(channel.Channel(), queue.Queue)
elif v >= (1, 0):
assert isinstance(channel.Channel(), queue.Channel)
else:
assert False, 'Unexpected version ' + gevent.__version__
|
dfd7a3aad5d3ae60559495a303f94abdd1327735
|
tssim/functions/wrapper.py
|
tssim/functions/wrapper.py
|
"""This module contains the main wrapper class."""
class BaseWrapper:
"""Define base template for function wrapper classes. """
def __init__(self, func):
self.func = func
self.__doc__ = func.__doc__
def __call__(self, *args, **kwargs):
raise NotImplementedError
class NumpyWrapper(BaseWrapper):
"""Function wrapper for numpy's random functions. Allows easy usage
avoiding the creation anonymous lambda functions. In addition, the `size`
attribute is adjusted automatically.
For instance, instead of writing
'lambda x: np.random.randint(low=1, high=10, size=x.shape[0])'
you may simply write
'ts.random.randint(low=1, high=10)'.
"""
def __init__(self, func, size="arg"):
super(NumpyWrapper, self).__init__(func)
self.size = size
def __call__(self, *args, **kwargs):
if self.size == "arg":
def wrapped(x):
return self.func(x.shape[0], *args, **kwargs)
elif self.size == "kwarg":
def wrapped(x):
return self.func(*args, size=x.shape[0], **kwargs)
else:
raise ValueError("Size argument must be 'arg' or 'kwarg'.")
wrapped.__doc__ = self.func.__doc__
return wrapped
|
"""This module contains the main wrapper class."""
class BaseWrapper(object):
"""Define base template for function wrapper classes. """
def __init__(self, func):
self.func = func
self.__doc__ = func.__doc__
def __call__(self, *args, **kwargs):
raise NotImplementedError
class NumpyWrapper(BaseWrapper):
"""Function wrapper for numpy's random functions. Allows easy usage
avoiding the creation anonymous lambda functions. In addition, the `size`
attribute is adjusted automatically.
For instance, instead of writing
'lambda x: np.random.randint(low=1, high=10, size=x.shape[0])'
you may simply write
'ts.random.randint(low=1, high=10)'.
"""
def __init__(self, func, size="arg"):
super(NumpyWrapper, self).__init__(func)
self.size = size
def __call__(self, *args, **kwargs):
if self.size == "arg":
def wrapped(x):
return self.func(x.shape[0], *args, **kwargs)
elif self.size == "kwarg":
def wrapped(x):
return self.func(*args, size=x.shape[0], **kwargs)
else:
raise ValueError("Size argument must be 'arg' or 'kwarg'.")
wrapped.__doc__ = self.func.__doc__
return wrapped
|
Add inheritance from object for BaseWrapper to force new style classes in python 2.7.
|
Add inheritance from object for BaseWrapper to force new style classes in python 2.7.
|
Python
|
mit
|
mansenfranzen/tssim
|
"""This module contains the main wrapper class."""
class BaseWrapper:
"""Define base template for function wrapper classes. """
def __init__(self, func):
self.func = func
self.__doc__ = func.__doc__
def __call__(self, *args, **kwargs):
raise NotImplementedError
class NumpyWrapper(BaseWrapper):
"""Function wrapper for numpy's random functions. Allows easy usage
avoiding the creation anonymous lambda functions. In addition, the `size`
attribute is adjusted automatically.
For instance, instead of writing
'lambda x: np.random.randint(low=1, high=10, size=x.shape[0])'
you may simply write
'ts.random.randint(low=1, high=10)'.
"""
def __init__(self, func, size="arg"):
super(NumpyWrapper, self).__init__(func)
self.size = size
def __call__(self, *args, **kwargs):
if self.size == "arg":
def wrapped(x):
return self.func(x.shape[0], *args, **kwargs)
elif self.size == "kwarg":
def wrapped(x):
return self.func(*args, size=x.shape[0], **kwargs)
else:
raise ValueError("Size argument must be 'arg' or 'kwarg'.")
wrapped.__doc__ = self.func.__doc__
return wrapped
Add inheritance from object for BaseWrapper to force new style classes in python 2.7.
|
"""This module contains the main wrapper class."""
class BaseWrapper(object):
"""Define base template for function wrapper classes. """
def __init__(self, func):
self.func = func
self.__doc__ = func.__doc__
def __call__(self, *args, **kwargs):
raise NotImplementedError
class NumpyWrapper(BaseWrapper):
"""Function wrapper for numpy's random functions. Allows easy usage
avoiding the creation anonymous lambda functions. In addition, the `size`
attribute is adjusted automatically.
For instance, instead of writing
'lambda x: np.random.randint(low=1, high=10, size=x.shape[0])'
you may simply write
'ts.random.randint(low=1, high=10)'.
"""
def __init__(self, func, size="arg"):
super(NumpyWrapper, self).__init__(func)
self.size = size
def __call__(self, *args, **kwargs):
if self.size == "arg":
def wrapped(x):
return self.func(x.shape[0], *args, **kwargs)
elif self.size == "kwarg":
def wrapped(x):
return self.func(*args, size=x.shape[0], **kwargs)
else:
raise ValueError("Size argument must be 'arg' or 'kwarg'.")
wrapped.__doc__ = self.func.__doc__
return wrapped
|
<commit_before>"""This module contains the main wrapper class."""
class BaseWrapper:
"""Define base template for function wrapper classes. """
def __init__(self, func):
self.func = func
self.__doc__ = func.__doc__
def __call__(self, *args, **kwargs):
raise NotImplementedError
class NumpyWrapper(BaseWrapper):
"""Function wrapper for numpy's random functions. Allows easy usage
avoiding the creation anonymous lambda functions. In addition, the `size`
attribute is adjusted automatically.
For instance, instead of writing
'lambda x: np.random.randint(low=1, high=10, size=x.shape[0])'
you may simply write
'ts.random.randint(low=1, high=10)'.
"""
def __init__(self, func, size="arg"):
super(NumpyWrapper, self).__init__(func)
self.size = size
def __call__(self, *args, **kwargs):
if self.size == "arg":
def wrapped(x):
return self.func(x.shape[0], *args, **kwargs)
elif self.size == "kwarg":
def wrapped(x):
return self.func(*args, size=x.shape[0], **kwargs)
else:
raise ValueError("Size argument must be 'arg' or 'kwarg'.")
wrapped.__doc__ = self.func.__doc__
return wrapped
<commit_msg>Add inheritance from object for BaseWrapper to force new style classes in python 2.7.<commit_after>
|
"""This module contains the main wrapper class."""
class BaseWrapper(object):
"""Define base template for function wrapper classes. """
def __init__(self, func):
self.func = func
self.__doc__ = func.__doc__
def __call__(self, *args, **kwargs):
raise NotImplementedError
class NumpyWrapper(BaseWrapper):
"""Function wrapper for numpy's random functions. Allows easy usage
avoiding the creation anonymous lambda functions. In addition, the `size`
attribute is adjusted automatically.
For instance, instead of writing
'lambda x: np.random.randint(low=1, high=10, size=x.shape[0])'
you may simply write
'ts.random.randint(low=1, high=10)'.
"""
def __init__(self, func, size="arg"):
super(NumpyWrapper, self).__init__(func)
self.size = size
def __call__(self, *args, **kwargs):
if self.size == "arg":
def wrapped(x):
return self.func(x.shape[0], *args, **kwargs)
elif self.size == "kwarg":
def wrapped(x):
return self.func(*args, size=x.shape[0], **kwargs)
else:
raise ValueError("Size argument must be 'arg' or 'kwarg'.")
wrapped.__doc__ = self.func.__doc__
return wrapped
|
"""This module contains the main wrapper class."""
class BaseWrapper:
"""Define base template for function wrapper classes. """
def __init__(self, func):
self.func = func
self.__doc__ = func.__doc__
def __call__(self, *args, **kwargs):
raise NotImplementedError
class NumpyWrapper(BaseWrapper):
"""Function wrapper for numpy's random functions. Allows easy usage
avoiding the creation anonymous lambda functions. In addition, the `size`
attribute is adjusted automatically.
For instance, instead of writing
'lambda x: np.random.randint(low=1, high=10, size=x.shape[0])'
you may simply write
'ts.random.randint(low=1, high=10)'.
"""
def __init__(self, func, size="arg"):
super(NumpyWrapper, self).__init__(func)
self.size = size
def __call__(self, *args, **kwargs):
if self.size == "arg":
def wrapped(x):
return self.func(x.shape[0], *args, **kwargs)
elif self.size == "kwarg":
def wrapped(x):
return self.func(*args, size=x.shape[0], **kwargs)
else:
raise ValueError("Size argument must be 'arg' or 'kwarg'.")
wrapped.__doc__ = self.func.__doc__
return wrapped
Add inheritance from object for BaseWrapper to force new style classes in python 2.7."""This module contains the main wrapper class."""
class BaseWrapper(object):
"""Define base template for function wrapper classes. """
def __init__(self, func):
self.func = func
self.__doc__ = func.__doc__
def __call__(self, *args, **kwargs):
raise NotImplementedError
class NumpyWrapper(BaseWrapper):
"""Function wrapper for numpy's random functions. Allows easy usage
avoiding the creation anonymous lambda functions. In addition, the `size`
attribute is adjusted automatically.
For instance, instead of writing
'lambda x: np.random.randint(low=1, high=10, size=x.shape[0])'
you may simply write
'ts.random.randint(low=1, high=10)'.
"""
def __init__(self, func, size="arg"):
super(NumpyWrapper, self).__init__(func)
self.size = size
def __call__(self, *args, **kwargs):
if self.size == "arg":
def wrapped(x):
return self.func(x.shape[0], *args, **kwargs)
elif self.size == "kwarg":
def wrapped(x):
return self.func(*args, size=x.shape[0], **kwargs)
else:
raise ValueError("Size argument must be 'arg' or 'kwarg'.")
wrapped.__doc__ = self.func.__doc__
return wrapped
|
<commit_before>"""This module contains the main wrapper class."""
class BaseWrapper:
"""Define base template for function wrapper classes. """
def __init__(self, func):
self.func = func
self.__doc__ = func.__doc__
def __call__(self, *args, **kwargs):
raise NotImplementedError
class NumpyWrapper(BaseWrapper):
"""Function wrapper for numpy's random functions. Allows easy usage
avoiding the creation anonymous lambda functions. In addition, the `size`
attribute is adjusted automatically.
For instance, instead of writing
'lambda x: np.random.randint(low=1, high=10, size=x.shape[0])'
you may simply write
'ts.random.randint(low=1, high=10)'.
"""
def __init__(self, func, size="arg"):
super(NumpyWrapper, self).__init__(func)
self.size = size
def __call__(self, *args, **kwargs):
if self.size == "arg":
def wrapped(x):
return self.func(x.shape[0], *args, **kwargs)
elif self.size == "kwarg":
def wrapped(x):
return self.func(*args, size=x.shape[0], **kwargs)
else:
raise ValueError("Size argument must be 'arg' or 'kwarg'.")
wrapped.__doc__ = self.func.__doc__
return wrapped
<commit_msg>Add inheritance from object for BaseWrapper to force new style classes in python 2.7.<commit_after>"""This module contains the main wrapper class."""
class BaseWrapper(object):
"""Define base template for function wrapper classes. """
def __init__(self, func):
self.func = func
self.__doc__ = func.__doc__
def __call__(self, *args, **kwargs):
raise NotImplementedError
class NumpyWrapper(BaseWrapper):
"""Function wrapper for numpy's random functions. Allows easy usage
avoiding the creation anonymous lambda functions. In addition, the `size`
attribute is adjusted automatically.
For instance, instead of writing
'lambda x: np.random.randint(low=1, high=10, size=x.shape[0])'
you may simply write
'ts.random.randint(low=1, high=10)'.
"""
def __init__(self, func, size="arg"):
super(NumpyWrapper, self).__init__(func)
self.size = size
def __call__(self, *args, **kwargs):
if self.size == "arg":
def wrapped(x):
return self.func(x.shape[0], *args, **kwargs)
elif self.size == "kwarg":
def wrapped(x):
return self.func(*args, size=x.shape[0], **kwargs)
else:
raise ValueError("Size argument must be 'arg' or 'kwarg'.")
wrapped.__doc__ = self.func.__doc__
return wrapped
|
d46d0b5a5392b6ca047b519a9d6280b5b0581e81
|
system_maintenance/tests/functional/tests.py
|
system_maintenance/tests/functional/tests.py
|
from django.test import LiveServerTestCase
from selenium import webdriver
class FunctionalTest(LiveServerTestCase):
def setUp(self):
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(3)
def tearDown(self):
self.browser.quit()
def test_app_home_title(self):
self.browser.get('http://localhost:8000/system_maintenance')
self.assertIn('System Maintenance', self.browser.title)
|
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from selenium import webdriver
class FunctionalTest(StaticLiveServerTestCase):
def setUp(self):
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(3)
def tearDown(self):
self.browser.quit()
def test_app_home_title(self):
self.browser.get('http://localhost:8000/system_maintenance')
self.assertIn('System Maintenance', self.browser.title)
|
Switch to 'StaticLiveServerTestCase' to avoid having to set 'settings.STATIC_ROOT'
|
Switch to 'StaticLiveServerTestCase' to avoid having to set 'settings.STATIC_ROOT'
|
Python
|
bsd-3-clause
|
mfcovington/django-system-maintenance,mfcovington/django-system-maintenance,mfcovington/django-system-maintenance
|
from django.test import LiveServerTestCase
from selenium import webdriver
class FunctionalTest(LiveServerTestCase):
def setUp(self):
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(3)
def tearDown(self):
self.browser.quit()
def test_app_home_title(self):
self.browser.get('http://localhost:8000/system_maintenance')
self.assertIn('System Maintenance', self.browser.title)
Switch to 'StaticLiveServerTestCase' to avoid having to set 'settings.STATIC_ROOT'
|
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from selenium import webdriver
class FunctionalTest(StaticLiveServerTestCase):
def setUp(self):
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(3)
def tearDown(self):
self.browser.quit()
def test_app_home_title(self):
self.browser.get('http://localhost:8000/system_maintenance')
self.assertIn('System Maintenance', self.browser.title)
|
<commit_before>from django.test import LiveServerTestCase
from selenium import webdriver
class FunctionalTest(LiveServerTestCase):
def setUp(self):
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(3)
def tearDown(self):
self.browser.quit()
def test_app_home_title(self):
self.browser.get('http://localhost:8000/system_maintenance')
self.assertIn('System Maintenance', self.browser.title)
<commit_msg>Switch to 'StaticLiveServerTestCase' to avoid having to set 'settings.STATIC_ROOT'<commit_after>
|
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from selenium import webdriver
class FunctionalTest(StaticLiveServerTestCase):
def setUp(self):
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(3)
def tearDown(self):
self.browser.quit()
def test_app_home_title(self):
self.browser.get('http://localhost:8000/system_maintenance')
self.assertIn('System Maintenance', self.browser.title)
|
from django.test import LiveServerTestCase
from selenium import webdriver
class FunctionalTest(LiveServerTestCase):
def setUp(self):
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(3)
def tearDown(self):
self.browser.quit()
def test_app_home_title(self):
self.browser.get('http://localhost:8000/system_maintenance')
self.assertIn('System Maintenance', self.browser.title)
Switch to 'StaticLiveServerTestCase' to avoid having to set 'settings.STATIC_ROOT'from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from selenium import webdriver
class FunctionalTest(StaticLiveServerTestCase):
def setUp(self):
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(3)
def tearDown(self):
self.browser.quit()
def test_app_home_title(self):
self.browser.get('http://localhost:8000/system_maintenance')
self.assertIn('System Maintenance', self.browser.title)
|
<commit_before>from django.test import LiveServerTestCase
from selenium import webdriver
class FunctionalTest(LiveServerTestCase):
def setUp(self):
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(3)
def tearDown(self):
self.browser.quit()
def test_app_home_title(self):
self.browser.get('http://localhost:8000/system_maintenance')
self.assertIn('System Maintenance', self.browser.title)
<commit_msg>Switch to 'StaticLiveServerTestCase' to avoid having to set 'settings.STATIC_ROOT'<commit_after>from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from selenium import webdriver
class FunctionalTest(StaticLiveServerTestCase):
def setUp(self):
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(3)
def tearDown(self):
self.browser.quit()
def test_app_home_title(self):
self.browser.get('http://localhost:8000/system_maintenance')
self.assertIn('System Maintenance', self.browser.title)
|
f12ba3bd1c49f83de7585337518d1e9ac9fb98f7
|
waterbutler/s3/metadata.py
|
waterbutler/s3/metadata.py
|
import os
from waterbutler.core import metadata
class S3FileMetadata(metadata.BaseMetadata):
@property
def provider(self):
return 's3'
@property
def kind(self):
return 'file'
@property
def name(self):
return os.path.split(self.raw.Key.text)[1]
@property
def path(self):
return self.raw.Key.text
@property
def size(self):
return self.raw.Size.text
@property
def modified(self):
return self.raw.LastModified.text
@property
def extra(self):
return {
'md5': self.raw.ETag.text.replace('"', '')
}
class S3FolderMetadata(metadata.BaseMetadata):
@property
def provider(self):
return 's3'
@property
def kind(self):
return 'folder'
@property
def name(self):
return self.raw.Prefix.text.split('/')[-2]
@property
def path(self):
return self.raw.Prefix.text
@property
def size(self):
return None
@property
def modified(self):
return None
|
import os
from waterbutler.core import metadata
class S3FileMetadata(metadata.BaseMetadata):
@property
def provider(self):
return 's3'
@property
def kind(self):
return 'file'
@property
def name(self):
return os.path.split(self.raw.Key.text)[1]
@property
def path(self):
return self.raw.Key.text
@property
def size(self):
return self.raw.Size.text
@property
def modified(self):
return self.raw.LastModified.text
@property
def extra(self):
return {
'md5': self.raw.ETag.text.replace('"', '')
}
class S3FolderMetadata(metadata.BaseMetadata):
@property
def provider(self):
return 's3'
@property
def kind(self):
return 'folder'
@property
def name(self):
return self.raw.Prefix.text.split('/')[-2]
@property
def path(self):
return self.raw.Prefix.text
@property
def size(self):
return None
@property
def modified(self):
return None
# TODO dates!
class S3Revision(metadata.BaseRevision):
@property
def provider(self):
return 's3'
@property
def size(self):
return int(self.raw.Size.text)
@property
def revision(self):
return self.raw.VersionId.text
@property
def modified(self):
return self.raw.LastModified.text
@property
def extra(self):
return {
'md5': self.raw.ETag.text.replace('"', '')
}
|
Add an s3 revision class
|
Add an s3 revision class
|
Python
|
apache-2.0
|
Johnetordoff/waterbutler,TomBaxter/waterbutler,cosenal/waterbutler,felliott/waterbutler,kwierman/waterbutler,chrisseto/waterbutler,rdhyee/waterbutler,RCOSDP/waterbutler,CenterForOpenScience/waterbutler,Ghalko/waterbutler,icereval/waterbutler,hmoco/waterbutler,rafaeldelucena/waterbutler
|
import os
from waterbutler.core import metadata
class S3FileMetadata(metadata.BaseMetadata):
@property
def provider(self):
return 's3'
@property
def kind(self):
return 'file'
@property
def name(self):
return os.path.split(self.raw.Key.text)[1]
@property
def path(self):
return self.raw.Key.text
@property
def size(self):
return self.raw.Size.text
@property
def modified(self):
return self.raw.LastModified.text
@property
def extra(self):
return {
'md5': self.raw.ETag.text.replace('"', '')
}
class S3FolderMetadata(metadata.BaseMetadata):
@property
def provider(self):
return 's3'
@property
def kind(self):
return 'folder'
@property
def name(self):
return self.raw.Prefix.text.split('/')[-2]
@property
def path(self):
return self.raw.Prefix.text
@property
def size(self):
return None
@property
def modified(self):
return None
Add an s3 revision class
|
import os
from waterbutler.core import metadata
class S3FileMetadata(metadata.BaseMetadata):
@property
def provider(self):
return 's3'
@property
def kind(self):
return 'file'
@property
def name(self):
return os.path.split(self.raw.Key.text)[1]
@property
def path(self):
return self.raw.Key.text
@property
def size(self):
return self.raw.Size.text
@property
def modified(self):
return self.raw.LastModified.text
@property
def extra(self):
return {
'md5': self.raw.ETag.text.replace('"', '')
}
class S3FolderMetadata(metadata.BaseMetadata):
@property
def provider(self):
return 's3'
@property
def kind(self):
return 'folder'
@property
def name(self):
return self.raw.Prefix.text.split('/')[-2]
@property
def path(self):
return self.raw.Prefix.text
@property
def size(self):
return None
@property
def modified(self):
return None
# TODO dates!
class S3Revision(metadata.BaseRevision):
@property
def provider(self):
return 's3'
@property
def size(self):
return int(self.raw.Size.text)
@property
def revision(self):
return self.raw.VersionId.text
@property
def modified(self):
return self.raw.LastModified.text
@property
def extra(self):
return {
'md5': self.raw.ETag.text.replace('"', '')
}
|
<commit_before>import os
from waterbutler.core import metadata
class S3FileMetadata(metadata.BaseMetadata):
@property
def provider(self):
return 's3'
@property
def kind(self):
return 'file'
@property
def name(self):
return os.path.split(self.raw.Key.text)[1]
@property
def path(self):
return self.raw.Key.text
@property
def size(self):
return self.raw.Size.text
@property
def modified(self):
return self.raw.LastModified.text
@property
def extra(self):
return {
'md5': self.raw.ETag.text.replace('"', '')
}
class S3FolderMetadata(metadata.BaseMetadata):
@property
def provider(self):
return 's3'
@property
def kind(self):
return 'folder'
@property
def name(self):
return self.raw.Prefix.text.split('/')[-2]
@property
def path(self):
return self.raw.Prefix.text
@property
def size(self):
return None
@property
def modified(self):
return None
<commit_msg>Add an s3 revision class<commit_after>
|
import os
from waterbutler.core import metadata
class S3FileMetadata(metadata.BaseMetadata):
@property
def provider(self):
return 's3'
@property
def kind(self):
return 'file'
@property
def name(self):
return os.path.split(self.raw.Key.text)[1]
@property
def path(self):
return self.raw.Key.text
@property
def size(self):
return self.raw.Size.text
@property
def modified(self):
return self.raw.LastModified.text
@property
def extra(self):
return {
'md5': self.raw.ETag.text.replace('"', '')
}
class S3FolderMetadata(metadata.BaseMetadata):
@property
def provider(self):
return 's3'
@property
def kind(self):
return 'folder'
@property
def name(self):
return self.raw.Prefix.text.split('/')[-2]
@property
def path(self):
return self.raw.Prefix.text
@property
def size(self):
return None
@property
def modified(self):
return None
# TODO dates!
class S3Revision(metadata.BaseRevision):
@property
def provider(self):
return 's3'
@property
def size(self):
return int(self.raw.Size.text)
@property
def revision(self):
return self.raw.VersionId.text
@property
def modified(self):
return self.raw.LastModified.text
@property
def extra(self):
return {
'md5': self.raw.ETag.text.replace('"', '')
}
|
import os
from waterbutler.core import metadata
class S3FileMetadata(metadata.BaseMetadata):
@property
def provider(self):
return 's3'
@property
def kind(self):
return 'file'
@property
def name(self):
return os.path.split(self.raw.Key.text)[1]
@property
def path(self):
return self.raw.Key.text
@property
def size(self):
return self.raw.Size.text
@property
def modified(self):
return self.raw.LastModified.text
@property
def extra(self):
return {
'md5': self.raw.ETag.text.replace('"', '')
}
class S3FolderMetadata(metadata.BaseMetadata):
@property
def provider(self):
return 's3'
@property
def kind(self):
return 'folder'
@property
def name(self):
return self.raw.Prefix.text.split('/')[-2]
@property
def path(self):
return self.raw.Prefix.text
@property
def size(self):
return None
@property
def modified(self):
return None
Add an s3 revision classimport os
from waterbutler.core import metadata
class S3FileMetadata(metadata.BaseMetadata):
@property
def provider(self):
return 's3'
@property
def kind(self):
return 'file'
@property
def name(self):
return os.path.split(self.raw.Key.text)[1]
@property
def path(self):
return self.raw.Key.text
@property
def size(self):
return self.raw.Size.text
@property
def modified(self):
return self.raw.LastModified.text
@property
def extra(self):
return {
'md5': self.raw.ETag.text.replace('"', '')
}
class S3FolderMetadata(metadata.BaseMetadata):
@property
def provider(self):
return 's3'
@property
def kind(self):
return 'folder'
@property
def name(self):
return self.raw.Prefix.text.split('/')[-2]
@property
def path(self):
return self.raw.Prefix.text
@property
def size(self):
return None
@property
def modified(self):
return None
# TODO dates!
class S3Revision(metadata.BaseRevision):
@property
def provider(self):
return 's3'
@property
def size(self):
return int(self.raw.Size.text)
@property
def revision(self):
return self.raw.VersionId.text
@property
def modified(self):
return self.raw.LastModified.text
@property
def extra(self):
return {
'md5': self.raw.ETag.text.replace('"', '')
}
|
<commit_before>import os
from waterbutler.core import metadata
class S3FileMetadata(metadata.BaseMetadata):
@property
def provider(self):
return 's3'
@property
def kind(self):
return 'file'
@property
def name(self):
return os.path.split(self.raw.Key.text)[1]
@property
def path(self):
return self.raw.Key.text
@property
def size(self):
return self.raw.Size.text
@property
def modified(self):
return self.raw.LastModified.text
@property
def extra(self):
return {
'md5': self.raw.ETag.text.replace('"', '')
}
class S3FolderMetadata(metadata.BaseMetadata):
@property
def provider(self):
return 's3'
@property
def kind(self):
return 'folder'
@property
def name(self):
return self.raw.Prefix.text.split('/')[-2]
@property
def path(self):
return self.raw.Prefix.text
@property
def size(self):
return None
@property
def modified(self):
return None
<commit_msg>Add an s3 revision class<commit_after>import os
from waterbutler.core import metadata
class S3FileMetadata(metadata.BaseMetadata):
@property
def provider(self):
return 's3'
@property
def kind(self):
return 'file'
@property
def name(self):
return os.path.split(self.raw.Key.text)[1]
@property
def path(self):
return self.raw.Key.text
@property
def size(self):
return self.raw.Size.text
@property
def modified(self):
return self.raw.LastModified.text
@property
def extra(self):
return {
'md5': self.raw.ETag.text.replace('"', '')
}
class S3FolderMetadata(metadata.BaseMetadata):
@property
def provider(self):
return 's3'
@property
def kind(self):
return 'folder'
@property
def name(self):
return self.raw.Prefix.text.split('/')[-2]
@property
def path(self):
return self.raw.Prefix.text
@property
def size(self):
return None
@property
def modified(self):
return None
# TODO dates!
class S3Revision(metadata.BaseRevision):
@property
def provider(self):
return 's3'
@property
def size(self):
return int(self.raw.Size.text)
@property
def revision(self):
return self.raw.VersionId.text
@property
def modified(self):
return self.raw.LastModified.text
@property
def extra(self):
return {
'md5': self.raw.ETag.text.replace('"', '')
}
|
f7f23a85931a2fb3ba3decafb9b8ebf2c1ec4594
|
arxiv_vanity/sitemaps.py
|
arxiv_vanity/sitemaps.py
|
from django.contrib.sitemaps import Sitemap
from .papers.models import Paper
class PaperSitemap(Sitemap):
changefreq = "monthly"
priority = 0.5
limit = 2000
def items(self):
return Paper.objects.only("arxiv_id", "updated").all()
def lastmod(self, obj):
return obj.updated
sitemaps = {"papers": PaperSitemap}
|
import datetime
from django.contrib.sitemaps import Sitemap
from django.utils import timezone
from .papers.models import Paper
class PaperSitemap(Sitemap):
priority = 0.5
limit = 2000
def items(self):
return Paper.objects.only("arxiv_id", "updated").all()
def lastmod(self, obj):
return obj.updated
def changefreq(self, obj):
# greater than 5 years ago, assume it ain't gonna change
if obj.updated < timezone.now() - datetime.timedelta(days=5 * 365):
return "yearly"
return "monthly"
sitemaps = {"papers": PaperSitemap}
|
Index old papers less often
|
Index old papers less often
|
Python
|
apache-2.0
|
arxiv-vanity/arxiv-vanity,arxiv-vanity/arxiv-vanity,arxiv-vanity/arxiv-vanity,arxiv-vanity/arxiv-vanity
|
from django.contrib.sitemaps import Sitemap
from .papers.models import Paper
class PaperSitemap(Sitemap):
changefreq = "monthly"
priority = 0.5
limit = 2000
def items(self):
return Paper.objects.only("arxiv_id", "updated").all()
def lastmod(self, obj):
return obj.updated
sitemaps = {"papers": PaperSitemap}
Index old papers less often
|
import datetime
from django.contrib.sitemaps import Sitemap
from django.utils import timezone
from .papers.models import Paper
class PaperSitemap(Sitemap):
priority = 0.5
limit = 2000
def items(self):
return Paper.objects.only("arxiv_id", "updated").all()
def lastmod(self, obj):
return obj.updated
def changefreq(self, obj):
# greater than 5 years ago, assume it ain't gonna change
if obj.updated < timezone.now() - datetime.timedelta(days=5 * 365):
return "yearly"
return "monthly"
sitemaps = {"papers": PaperSitemap}
|
<commit_before>from django.contrib.sitemaps import Sitemap
from .papers.models import Paper
class PaperSitemap(Sitemap):
changefreq = "monthly"
priority = 0.5
limit = 2000
def items(self):
return Paper.objects.only("arxiv_id", "updated").all()
def lastmod(self, obj):
return obj.updated
sitemaps = {"papers": PaperSitemap}
<commit_msg>Index old papers less often<commit_after>
|
import datetime
from django.contrib.sitemaps import Sitemap
from django.utils import timezone
from .papers.models import Paper
class PaperSitemap(Sitemap):
priority = 0.5
limit = 2000
def items(self):
return Paper.objects.only("arxiv_id", "updated").all()
def lastmod(self, obj):
return obj.updated
def changefreq(self, obj):
# greater than 5 years ago, assume it ain't gonna change
if obj.updated < timezone.now() - datetime.timedelta(days=5 * 365):
return "yearly"
return "monthly"
sitemaps = {"papers": PaperSitemap}
|
from django.contrib.sitemaps import Sitemap
from .papers.models import Paper
class PaperSitemap(Sitemap):
changefreq = "monthly"
priority = 0.5
limit = 2000
def items(self):
return Paper.objects.only("arxiv_id", "updated").all()
def lastmod(self, obj):
return obj.updated
sitemaps = {"papers": PaperSitemap}
Index old papers less oftenimport datetime
from django.contrib.sitemaps import Sitemap
from django.utils import timezone
from .papers.models import Paper
class PaperSitemap(Sitemap):
priority = 0.5
limit = 2000
def items(self):
return Paper.objects.only("arxiv_id", "updated").all()
def lastmod(self, obj):
return obj.updated
def changefreq(self, obj):
# greater than 5 years ago, assume it ain't gonna change
if obj.updated < timezone.now() - datetime.timedelta(days=5 * 365):
return "yearly"
return "monthly"
sitemaps = {"papers": PaperSitemap}
|
<commit_before>from django.contrib.sitemaps import Sitemap
from .papers.models import Paper
class PaperSitemap(Sitemap):
changefreq = "monthly"
priority = 0.5
limit = 2000
def items(self):
return Paper.objects.only("arxiv_id", "updated").all()
def lastmod(self, obj):
return obj.updated
sitemaps = {"papers": PaperSitemap}
<commit_msg>Index old papers less often<commit_after>import datetime
from django.contrib.sitemaps import Sitemap
from django.utils import timezone
from .papers.models import Paper
class PaperSitemap(Sitemap):
priority = 0.5
limit = 2000
def items(self):
return Paper.objects.only("arxiv_id", "updated").all()
def lastmod(self, obj):
return obj.updated
def changefreq(self, obj):
# greater than 5 years ago, assume it ain't gonna change
if obj.updated < timezone.now() - datetime.timedelta(days=5 * 365):
return "yearly"
return "monthly"
sitemaps = {"papers": PaperSitemap}
|
ce28b359122475f544b9ae3bc9e05a5bc02377e4
|
conda_kapsel/internal/py2_compat.py
|
conda_kapsel/internal/py2_compat.py
|
# -*- coding: utf-8 -*-
# ----------------------------------------------------------------------------
# Copyright © 2016, Continuum Analytics, Inc. All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
# ----------------------------------------------------------------------------
from __future__ import absolute_import, print_function
import platform
import sys
_PY2 = sys.version_info[0] == 2
def is_string(s):
if _PY2: # pragma: no cover (py2/py3)
return isinstance(s, basestring) # pragma: no cover (py2/py3) # noqa
else: # pragma: no cover (py2/py3)
return isinstance(s, str) # pragma: no cover (py2/py3)
def env_without_unicode(environ):
# On Windows / Python 2.7, Popen explodes if given unicode strings in the environment.
if _PY2 and platform.system() == 'Windows': # pragma: no cover (py2/py3)
environ_copy = dict()
for key, value in environ.items():
assert isinstance(key, basestring) # noqa
assert isinstance(key, str)
if isinstance(value, unicode): # noqa
environ_copy[key] = value.encode()
assert isinstance(environ_copy[key], str)
elif not isinstance(value, str):
raise TypeError("Environment contains non-unicode non-str value %r" % value)
else:
environ_copy[key] = value
return environ_copy
else: # pragma: no cover (py2/py3)
return environ
|
# -*- coding: utf-8 -*-
# ----------------------------------------------------------------------------
# Copyright © 2016, Continuum Analytics, Inc. All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
# ----------------------------------------------------------------------------
from __future__ import absolute_import, print_function
import platform
import sys
_PY2 = sys.version_info[0] == 2
def is_string(s):
if _PY2: # pragma: no cover (py2/py3)
return isinstance(s, basestring) # pragma: no cover (py2/py3) # noqa
else: # pragma: no cover (py2/py3)
return isinstance(s, str) # pragma: no cover (py2/py3)
def env_without_unicode(environ):
# On Windows / Python 2.7, Popen explodes if given unicode strings in the environment.
if _PY2 and platform.system() == 'Windows': # pragma: no cover (py2/py3)
environ_copy = dict()
for key, value in environ.items():
if isinstance(key, unicode): # noqa
key = key.encode()
if isinstance(value, unicode): # noqa
value = value.encode()
assert isinstance(key, str)
assert isinstance(value, str)
environ_copy[key] = value
return environ_copy
else: # pragma: no cover (py2/py3)
return environ
|
Fix unicode keys in addition to values for windows/py2 environment
|
Fix unicode keys in addition to values for windows/py2 environment
|
Python
|
bsd-3-clause
|
conda/kapsel,conda/kapsel
|
# -*- coding: utf-8 -*-
# ----------------------------------------------------------------------------
# Copyright © 2016, Continuum Analytics, Inc. All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
# ----------------------------------------------------------------------------
from __future__ import absolute_import, print_function
import platform
import sys
_PY2 = sys.version_info[0] == 2
def is_string(s):
if _PY2: # pragma: no cover (py2/py3)
return isinstance(s, basestring) # pragma: no cover (py2/py3) # noqa
else: # pragma: no cover (py2/py3)
return isinstance(s, str) # pragma: no cover (py2/py3)
def env_without_unicode(environ):
# On Windows / Python 2.7, Popen explodes if given unicode strings in the environment.
if _PY2 and platform.system() == 'Windows': # pragma: no cover (py2/py3)
environ_copy = dict()
for key, value in environ.items():
assert isinstance(key, basestring) # noqa
assert isinstance(key, str)
if isinstance(value, unicode): # noqa
environ_copy[key] = value.encode()
assert isinstance(environ_copy[key], str)
elif not isinstance(value, str):
raise TypeError("Environment contains non-unicode non-str value %r" % value)
else:
environ_copy[key] = value
return environ_copy
else: # pragma: no cover (py2/py3)
return environ
Fix unicode keys in addition to values for windows/py2 environment
|
# -*- coding: utf-8 -*-
# ----------------------------------------------------------------------------
# Copyright © 2016, Continuum Analytics, Inc. All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
# ----------------------------------------------------------------------------
from __future__ import absolute_import, print_function
import platform
import sys
_PY2 = sys.version_info[0] == 2
def is_string(s):
if _PY2: # pragma: no cover (py2/py3)
return isinstance(s, basestring) # pragma: no cover (py2/py3) # noqa
else: # pragma: no cover (py2/py3)
return isinstance(s, str) # pragma: no cover (py2/py3)
def env_without_unicode(environ):
# On Windows / Python 2.7, Popen explodes if given unicode strings in the environment.
if _PY2 and platform.system() == 'Windows': # pragma: no cover (py2/py3)
environ_copy = dict()
for key, value in environ.items():
if isinstance(key, unicode): # noqa
key = key.encode()
if isinstance(value, unicode): # noqa
value = value.encode()
assert isinstance(key, str)
assert isinstance(value, str)
environ_copy[key] = value
return environ_copy
else: # pragma: no cover (py2/py3)
return environ
|
<commit_before># -*- coding: utf-8 -*-
# ----------------------------------------------------------------------------
# Copyright © 2016, Continuum Analytics, Inc. All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
# ----------------------------------------------------------------------------
from __future__ import absolute_import, print_function
import platform
import sys
_PY2 = sys.version_info[0] == 2
def is_string(s):
if _PY2: # pragma: no cover (py2/py3)
return isinstance(s, basestring) # pragma: no cover (py2/py3) # noqa
else: # pragma: no cover (py2/py3)
return isinstance(s, str) # pragma: no cover (py2/py3)
def env_without_unicode(environ):
# On Windows / Python 2.7, Popen explodes if given unicode strings in the environment.
if _PY2 and platform.system() == 'Windows': # pragma: no cover (py2/py3)
environ_copy = dict()
for key, value in environ.items():
assert isinstance(key, basestring) # noqa
assert isinstance(key, str)
if isinstance(value, unicode): # noqa
environ_copy[key] = value.encode()
assert isinstance(environ_copy[key], str)
elif not isinstance(value, str):
raise TypeError("Environment contains non-unicode non-str value %r" % value)
else:
environ_copy[key] = value
return environ_copy
else: # pragma: no cover (py2/py3)
return environ
<commit_msg>Fix unicode keys in addition to values for windows/py2 environment<commit_after>
|
# -*- coding: utf-8 -*-
# ----------------------------------------------------------------------------
# Copyright © 2016, Continuum Analytics, Inc. All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
# ----------------------------------------------------------------------------
from __future__ import absolute_import, print_function
import platform
import sys
_PY2 = sys.version_info[0] == 2
def is_string(s):
if _PY2: # pragma: no cover (py2/py3)
return isinstance(s, basestring) # pragma: no cover (py2/py3) # noqa
else: # pragma: no cover (py2/py3)
return isinstance(s, str) # pragma: no cover (py2/py3)
def env_without_unicode(environ):
# On Windows / Python 2.7, Popen explodes if given unicode strings in the environment.
if _PY2 and platform.system() == 'Windows': # pragma: no cover (py2/py3)
environ_copy = dict()
for key, value in environ.items():
if isinstance(key, unicode): # noqa
key = key.encode()
if isinstance(value, unicode): # noqa
value = value.encode()
assert isinstance(key, str)
assert isinstance(value, str)
environ_copy[key] = value
return environ_copy
else: # pragma: no cover (py2/py3)
return environ
|
# -*- coding: utf-8 -*-
# ----------------------------------------------------------------------------
# Copyright © 2016, Continuum Analytics, Inc. All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
# ----------------------------------------------------------------------------
from __future__ import absolute_import, print_function
import platform
import sys
_PY2 = sys.version_info[0] == 2
def is_string(s):
if _PY2: # pragma: no cover (py2/py3)
return isinstance(s, basestring) # pragma: no cover (py2/py3) # noqa
else: # pragma: no cover (py2/py3)
return isinstance(s, str) # pragma: no cover (py2/py3)
def env_without_unicode(environ):
# On Windows / Python 2.7, Popen explodes if given unicode strings in the environment.
if _PY2 and platform.system() == 'Windows': # pragma: no cover (py2/py3)
environ_copy = dict()
for key, value in environ.items():
assert isinstance(key, basestring) # noqa
assert isinstance(key, str)
if isinstance(value, unicode): # noqa
environ_copy[key] = value.encode()
assert isinstance(environ_copy[key], str)
elif not isinstance(value, str):
raise TypeError("Environment contains non-unicode non-str value %r" % value)
else:
environ_copy[key] = value
return environ_copy
else: # pragma: no cover (py2/py3)
return environ
Fix unicode keys in addition to values for windows/py2 environment# -*- coding: utf-8 -*-
# ----------------------------------------------------------------------------
# Copyright © 2016, Continuum Analytics, Inc. All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
# ----------------------------------------------------------------------------
from __future__ import absolute_import, print_function
import platform
import sys
_PY2 = sys.version_info[0] == 2
def is_string(s):
if _PY2: # pragma: no cover (py2/py3)
return isinstance(s, basestring) # pragma: no cover (py2/py3) # noqa
else: # pragma: no cover (py2/py3)
return isinstance(s, str) # pragma: no cover (py2/py3)
def env_without_unicode(environ):
# On Windows / Python 2.7, Popen explodes if given unicode strings in the environment.
if _PY2 and platform.system() == 'Windows': # pragma: no cover (py2/py3)
environ_copy = dict()
for key, value in environ.items():
if isinstance(key, unicode): # noqa
key = key.encode()
if isinstance(value, unicode): # noqa
value = value.encode()
assert isinstance(key, str)
assert isinstance(value, str)
environ_copy[key] = value
return environ_copy
else: # pragma: no cover (py2/py3)
return environ
|
<commit_before># -*- coding: utf-8 -*-
# ----------------------------------------------------------------------------
# Copyright © 2016, Continuum Analytics, Inc. All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
# ----------------------------------------------------------------------------
from __future__ import absolute_import, print_function
import platform
import sys
_PY2 = sys.version_info[0] == 2
def is_string(s):
if _PY2: # pragma: no cover (py2/py3)
return isinstance(s, basestring) # pragma: no cover (py2/py3) # noqa
else: # pragma: no cover (py2/py3)
return isinstance(s, str) # pragma: no cover (py2/py3)
def env_without_unicode(environ):
# On Windows / Python 2.7, Popen explodes if given unicode strings in the environment.
if _PY2 and platform.system() == 'Windows': # pragma: no cover (py2/py3)
environ_copy = dict()
for key, value in environ.items():
assert isinstance(key, basestring) # noqa
assert isinstance(key, str)
if isinstance(value, unicode): # noqa
environ_copy[key] = value.encode()
assert isinstance(environ_copy[key], str)
elif not isinstance(value, str):
raise TypeError("Environment contains non-unicode non-str value %r" % value)
else:
environ_copy[key] = value
return environ_copy
else: # pragma: no cover (py2/py3)
return environ
<commit_msg>Fix unicode keys in addition to values for windows/py2 environment<commit_after># -*- coding: utf-8 -*-
# ----------------------------------------------------------------------------
# Copyright © 2016, Continuum Analytics, Inc. All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
# ----------------------------------------------------------------------------
from __future__ import absolute_import, print_function
import platform
import sys
_PY2 = sys.version_info[0] == 2
def is_string(s):
if _PY2: # pragma: no cover (py2/py3)
return isinstance(s, basestring) # pragma: no cover (py2/py3) # noqa
else: # pragma: no cover (py2/py3)
return isinstance(s, str) # pragma: no cover (py2/py3)
def env_without_unicode(environ):
# On Windows / Python 2.7, Popen explodes if given unicode strings in the environment.
if _PY2 and platform.system() == 'Windows': # pragma: no cover (py2/py3)
environ_copy = dict()
for key, value in environ.items():
if isinstance(key, unicode): # noqa
key = key.encode()
if isinstance(value, unicode): # noqa
value = value.encode()
assert isinstance(key, str)
assert isinstance(value, str)
environ_copy[key] = value
return environ_copy
else: # pragma: no cover (py2/py3)
return environ
|
b5980bfef36d93413d2056e2bde74958fc1db6b4
|
checks.d/linux_vm_extras.py
|
checks.d/linux_vm_extras.py
|
# project
from checks import AgentCheck
VM_COUNTS = {
'pgpgin': 'pages.in',
'pgpgout': 'pages.out',
'pswpin': 'pages.swapped_in',
'pswpout': 'pages.swapped_out',
'pgfault': 'pages.faults',
'pgmajfault': 'pages.major_faults'
}
class MoreLinuxVMCheck(AgentCheck):
def check(self, instance):
tags = instance.get('tags', [])
enabled_metrics = instance.get('enabled_metrics', list(VM_COUNTS.keys()))
with open('/proc/vmstat', 'r') as vm_info:
content = [line.strip().split() for line in vm_info.readlines()]
for line in content:
if line[0] in VM_COUNTS:
self.monotonic_count("system.linux.vm.{0}".format(VM_COUNTS[line[0]]), int(line[1]), tags=tags)
|
# project
from checks import AgentCheck
VM_COUNTS = {
'pgpgin': 'pages.in',
'pgpgout': 'pages.out',
'pswpin': 'pages.swapped_in',
'pswpout': 'pages.swapped_out',
'pgfault': 'pages.faults',
'pgmajfault': 'pages.major_faults'
}
class MoreLinuxVMCheck(AgentCheck):
def check(self, instance):
tags = instance.get('tags', [])
enabled_metrics = instance.get('enabled_metrics', list(VM_COUNTS.keys()))
with open('/proc/vmstat', 'r') as vm_info:
content = [line.strip().split() for line in vm_info.readlines()]
for line in content:
if line[0] in enabled_metrics:
self.monotonic_count("system.linux.vm.{0}".format(VM_COUNTS[line[0]]), int(line[1]), tags=tags)
|
Fix bug that was causing this to not even work at all
|
Fix bug that was causing this to not even work at all
|
Python
|
mit
|
stripe/datadog-checks,stripe/stripe-datadog-checks
|
# project
from checks import AgentCheck
VM_COUNTS = {
'pgpgin': 'pages.in',
'pgpgout': 'pages.out',
'pswpin': 'pages.swapped_in',
'pswpout': 'pages.swapped_out',
'pgfault': 'pages.faults',
'pgmajfault': 'pages.major_faults'
}
class MoreLinuxVMCheck(AgentCheck):
def check(self, instance):
tags = instance.get('tags', [])
enabled_metrics = instance.get('enabled_metrics', list(VM_COUNTS.keys()))
with open('/proc/vmstat', 'r') as vm_info:
content = [line.strip().split() for line in vm_info.readlines()]
for line in content:
if line[0] in VM_COUNTS:
self.monotonic_count("system.linux.vm.{0}".format(VM_COUNTS[line[0]]), int(line[1]), tags=tags)
Fix bug that was causing this to not even work at all
|
# project
from checks import AgentCheck
VM_COUNTS = {
'pgpgin': 'pages.in',
'pgpgout': 'pages.out',
'pswpin': 'pages.swapped_in',
'pswpout': 'pages.swapped_out',
'pgfault': 'pages.faults',
'pgmajfault': 'pages.major_faults'
}
class MoreLinuxVMCheck(AgentCheck):
def check(self, instance):
tags = instance.get('tags', [])
enabled_metrics = instance.get('enabled_metrics', list(VM_COUNTS.keys()))
with open('/proc/vmstat', 'r') as vm_info:
content = [line.strip().split() for line in vm_info.readlines()]
for line in content:
if line[0] in enabled_metrics:
self.monotonic_count("system.linux.vm.{0}".format(VM_COUNTS[line[0]]), int(line[1]), tags=tags)
|
<commit_before># project
from checks import AgentCheck
VM_COUNTS = {
'pgpgin': 'pages.in',
'pgpgout': 'pages.out',
'pswpin': 'pages.swapped_in',
'pswpout': 'pages.swapped_out',
'pgfault': 'pages.faults',
'pgmajfault': 'pages.major_faults'
}
class MoreLinuxVMCheck(AgentCheck):
def check(self, instance):
tags = instance.get('tags', [])
enabled_metrics = instance.get('enabled_metrics', list(VM_COUNTS.keys()))
with open('/proc/vmstat', 'r') as vm_info:
content = [line.strip().split() for line in vm_info.readlines()]
for line in content:
if line[0] in VM_COUNTS:
self.monotonic_count("system.linux.vm.{0}".format(VM_COUNTS[line[0]]), int(line[1]), tags=tags)
<commit_msg>Fix bug that was causing this to not even work at all<commit_after>
|
# project
from checks import AgentCheck
VM_COUNTS = {
'pgpgin': 'pages.in',
'pgpgout': 'pages.out',
'pswpin': 'pages.swapped_in',
'pswpout': 'pages.swapped_out',
'pgfault': 'pages.faults',
'pgmajfault': 'pages.major_faults'
}
class MoreLinuxVMCheck(AgentCheck):
def check(self, instance):
tags = instance.get('tags', [])
enabled_metrics = instance.get('enabled_metrics', list(VM_COUNTS.keys()))
with open('/proc/vmstat', 'r') as vm_info:
content = [line.strip().split() for line in vm_info.readlines()]
for line in content:
if line[0] in enabled_metrics:
self.monotonic_count("system.linux.vm.{0}".format(VM_COUNTS[line[0]]), int(line[1]), tags=tags)
|
# project
from checks import AgentCheck
VM_COUNTS = {
'pgpgin': 'pages.in',
'pgpgout': 'pages.out',
'pswpin': 'pages.swapped_in',
'pswpout': 'pages.swapped_out',
'pgfault': 'pages.faults',
'pgmajfault': 'pages.major_faults'
}
class MoreLinuxVMCheck(AgentCheck):
def check(self, instance):
tags = instance.get('tags', [])
enabled_metrics = instance.get('enabled_metrics', list(VM_COUNTS.keys()))
with open('/proc/vmstat', 'r') as vm_info:
content = [line.strip().split() for line in vm_info.readlines()]
for line in content:
if line[0] in VM_COUNTS:
self.monotonic_count("system.linux.vm.{0}".format(VM_COUNTS[line[0]]), int(line[1]), tags=tags)
Fix bug that was causing this to not even work at all# project
from checks import AgentCheck
VM_COUNTS = {
'pgpgin': 'pages.in',
'pgpgout': 'pages.out',
'pswpin': 'pages.swapped_in',
'pswpout': 'pages.swapped_out',
'pgfault': 'pages.faults',
'pgmajfault': 'pages.major_faults'
}
class MoreLinuxVMCheck(AgentCheck):
def check(self, instance):
tags = instance.get('tags', [])
enabled_metrics = instance.get('enabled_metrics', list(VM_COUNTS.keys()))
with open('/proc/vmstat', 'r') as vm_info:
content = [line.strip().split() for line in vm_info.readlines()]
for line in content:
if line[0] in enabled_metrics:
self.monotonic_count("system.linux.vm.{0}".format(VM_COUNTS[line[0]]), int(line[1]), tags=tags)
|
<commit_before># project
from checks import AgentCheck
VM_COUNTS = {
'pgpgin': 'pages.in',
'pgpgout': 'pages.out',
'pswpin': 'pages.swapped_in',
'pswpout': 'pages.swapped_out',
'pgfault': 'pages.faults',
'pgmajfault': 'pages.major_faults'
}
class MoreLinuxVMCheck(AgentCheck):
def check(self, instance):
tags = instance.get('tags', [])
enabled_metrics = instance.get('enabled_metrics', list(VM_COUNTS.keys()))
with open('/proc/vmstat', 'r') as vm_info:
content = [line.strip().split() for line in vm_info.readlines()]
for line in content:
if line[0] in VM_COUNTS:
self.monotonic_count("system.linux.vm.{0}".format(VM_COUNTS[line[0]]), int(line[1]), tags=tags)
<commit_msg>Fix bug that was causing this to not even work at all<commit_after># project
from checks import AgentCheck
VM_COUNTS = {
'pgpgin': 'pages.in',
'pgpgout': 'pages.out',
'pswpin': 'pages.swapped_in',
'pswpout': 'pages.swapped_out',
'pgfault': 'pages.faults',
'pgmajfault': 'pages.major_faults'
}
class MoreLinuxVMCheck(AgentCheck):
def check(self, instance):
tags = instance.get('tags', [])
enabled_metrics = instance.get('enabled_metrics', list(VM_COUNTS.keys()))
with open('/proc/vmstat', 'r') as vm_info:
content = [line.strip().split() for line in vm_info.readlines()]
for line in content:
if line[0] in enabled_metrics:
self.monotonic_count("system.linux.vm.{0}".format(VM_COUNTS[line[0]]), int(line[1]), tags=tags)
|
3ec54fbabd6f17eabb90ead66a87bc2723a00aa0
|
mvw/generator.py
|
mvw/generator.py
|
import os
class Generator:
def run(self, sourcedir, outputdir):
sourcedir = os.path.normpath(sourcedir)
outputdir = os.path.normpath(outputdir)
prefix = len(sourcedir)+len(os.path.sep)
for root, dirs, files in os.walk(sourcedir):
destpath = os.path.join(outputdir, root[prefix:])
print()
print('-'*25)
print('Pages')
for f in files:
src = os.path.join(root, f)
base, ext = os.path.splitext(f)
if ext in ['.md', '.markdown']:
dest = os.path.join(destpath, "%s%s" % (base, '.html'))
self.parse(src, dest)
else:
dest = os.path.join(destpath, f)
self.copy(src, dest)
print('-'*25)
print('Dirs')
for d in dirs:
print(os.path.join(destpath, d))
def parse(self, source, destination):
print("Parse Source: %s Destination: %s" % (source, destination))
def copy(self, source, destination):
print("Copy Source: %s Destination: %s" % (source, destination))
|
import os
import shutil
class Generator:
def run(self, sourcedir, outputdir):
sourcedir = os.path.normpath(sourcedir)
outputdir = os.path.normpath(outputdir)
prefix = len(sourcedir)+len(os.path.sep)
for root, dirs, files in os.walk(sourcedir):
destpath = os.path.join(outputdir, root[prefix:])
if not os.path.exists(destpath):
os.makedirs(destpath)
print()
print('-'*25)
print('Pages')
for f in files:
src = os.path.join(root, f)
base, ext = os.path.splitext(f)
if ext in ['.md', '.markdown']:
dest = os.path.join(destpath, "%s%s" % (base, '.html'))
self.parse(src, dest)
else:
dest = os.path.join(destpath, f)
shutil.copy(src, dest)
print('-'*25)
print('Dirs')
for d in dirs:
print(os.path.join(destpath, d))
def parse(self, source, destination):
print("Parse Source: %s Destination: %s" % (source, destination))
|
Create destination path if it does no exist and copy file using shutil
|
Create destination path if it does no exist and copy file using shutil
|
Python
|
mit
|
kevinbeaty/mvw
|
import os
class Generator:
def run(self, sourcedir, outputdir):
sourcedir = os.path.normpath(sourcedir)
outputdir = os.path.normpath(outputdir)
prefix = len(sourcedir)+len(os.path.sep)
for root, dirs, files in os.walk(sourcedir):
destpath = os.path.join(outputdir, root[prefix:])
print()
print('-'*25)
print('Pages')
for f in files:
src = os.path.join(root, f)
base, ext = os.path.splitext(f)
if ext in ['.md', '.markdown']:
dest = os.path.join(destpath, "%s%s" % (base, '.html'))
self.parse(src, dest)
else:
dest = os.path.join(destpath, f)
self.copy(src, dest)
print('-'*25)
print('Dirs')
for d in dirs:
print(os.path.join(destpath, d))
def parse(self, source, destination):
print("Parse Source: %s Destination: %s" % (source, destination))
def copy(self, source, destination):
print("Copy Source: %s Destination: %s" % (source, destination))
Create destination path if it does no exist and copy file using shutil
|
import os
import shutil
class Generator:
def run(self, sourcedir, outputdir):
sourcedir = os.path.normpath(sourcedir)
outputdir = os.path.normpath(outputdir)
prefix = len(sourcedir)+len(os.path.sep)
for root, dirs, files in os.walk(sourcedir):
destpath = os.path.join(outputdir, root[prefix:])
if not os.path.exists(destpath):
os.makedirs(destpath)
print()
print('-'*25)
print('Pages')
for f in files:
src = os.path.join(root, f)
base, ext = os.path.splitext(f)
if ext in ['.md', '.markdown']:
dest = os.path.join(destpath, "%s%s" % (base, '.html'))
self.parse(src, dest)
else:
dest = os.path.join(destpath, f)
shutil.copy(src, dest)
print('-'*25)
print('Dirs')
for d in dirs:
print(os.path.join(destpath, d))
def parse(self, source, destination):
print("Parse Source: %s Destination: %s" % (source, destination))
|
<commit_before>import os
class Generator:
def run(self, sourcedir, outputdir):
sourcedir = os.path.normpath(sourcedir)
outputdir = os.path.normpath(outputdir)
prefix = len(sourcedir)+len(os.path.sep)
for root, dirs, files in os.walk(sourcedir):
destpath = os.path.join(outputdir, root[prefix:])
print()
print('-'*25)
print('Pages')
for f in files:
src = os.path.join(root, f)
base, ext = os.path.splitext(f)
if ext in ['.md', '.markdown']:
dest = os.path.join(destpath, "%s%s" % (base, '.html'))
self.parse(src, dest)
else:
dest = os.path.join(destpath, f)
self.copy(src, dest)
print('-'*25)
print('Dirs')
for d in dirs:
print(os.path.join(destpath, d))
def parse(self, source, destination):
print("Parse Source: %s Destination: %s" % (source, destination))
def copy(self, source, destination):
print("Copy Source: %s Destination: %s" % (source, destination))
<commit_msg>Create destination path if it does no exist and copy file using shutil<commit_after>
|
import os
import shutil
class Generator:
def run(self, sourcedir, outputdir):
sourcedir = os.path.normpath(sourcedir)
outputdir = os.path.normpath(outputdir)
prefix = len(sourcedir)+len(os.path.sep)
for root, dirs, files in os.walk(sourcedir):
destpath = os.path.join(outputdir, root[prefix:])
if not os.path.exists(destpath):
os.makedirs(destpath)
print()
print('-'*25)
print('Pages')
for f in files:
src = os.path.join(root, f)
base, ext = os.path.splitext(f)
if ext in ['.md', '.markdown']:
dest = os.path.join(destpath, "%s%s" % (base, '.html'))
self.parse(src, dest)
else:
dest = os.path.join(destpath, f)
shutil.copy(src, dest)
print('-'*25)
print('Dirs')
for d in dirs:
print(os.path.join(destpath, d))
def parse(self, source, destination):
print("Parse Source: %s Destination: %s" % (source, destination))
|
import os
class Generator:
def run(self, sourcedir, outputdir):
sourcedir = os.path.normpath(sourcedir)
outputdir = os.path.normpath(outputdir)
prefix = len(sourcedir)+len(os.path.sep)
for root, dirs, files in os.walk(sourcedir):
destpath = os.path.join(outputdir, root[prefix:])
print()
print('-'*25)
print('Pages')
for f in files:
src = os.path.join(root, f)
base, ext = os.path.splitext(f)
if ext in ['.md', '.markdown']:
dest = os.path.join(destpath, "%s%s" % (base, '.html'))
self.parse(src, dest)
else:
dest = os.path.join(destpath, f)
self.copy(src, dest)
print('-'*25)
print('Dirs')
for d in dirs:
print(os.path.join(destpath, d))
def parse(self, source, destination):
print("Parse Source: %s Destination: %s" % (source, destination))
def copy(self, source, destination):
print("Copy Source: %s Destination: %s" % (source, destination))
Create destination path if it does no exist and copy file using shutilimport os
import shutil
class Generator:
def run(self, sourcedir, outputdir):
sourcedir = os.path.normpath(sourcedir)
outputdir = os.path.normpath(outputdir)
prefix = len(sourcedir)+len(os.path.sep)
for root, dirs, files in os.walk(sourcedir):
destpath = os.path.join(outputdir, root[prefix:])
if not os.path.exists(destpath):
os.makedirs(destpath)
print()
print('-'*25)
print('Pages')
for f in files:
src = os.path.join(root, f)
base, ext = os.path.splitext(f)
if ext in ['.md', '.markdown']:
dest = os.path.join(destpath, "%s%s" % (base, '.html'))
self.parse(src, dest)
else:
dest = os.path.join(destpath, f)
shutil.copy(src, dest)
print('-'*25)
print('Dirs')
for d in dirs:
print(os.path.join(destpath, d))
def parse(self, source, destination):
print("Parse Source: %s Destination: %s" % (source, destination))
|
<commit_before>import os
class Generator:
def run(self, sourcedir, outputdir):
sourcedir = os.path.normpath(sourcedir)
outputdir = os.path.normpath(outputdir)
prefix = len(sourcedir)+len(os.path.sep)
for root, dirs, files in os.walk(sourcedir):
destpath = os.path.join(outputdir, root[prefix:])
print()
print('-'*25)
print('Pages')
for f in files:
src = os.path.join(root, f)
base, ext = os.path.splitext(f)
if ext in ['.md', '.markdown']:
dest = os.path.join(destpath, "%s%s" % (base, '.html'))
self.parse(src, dest)
else:
dest = os.path.join(destpath, f)
self.copy(src, dest)
print('-'*25)
print('Dirs')
for d in dirs:
print(os.path.join(destpath, d))
def parse(self, source, destination):
print("Parse Source: %s Destination: %s" % (source, destination))
def copy(self, source, destination):
print("Copy Source: %s Destination: %s" % (source, destination))
<commit_msg>Create destination path if it does no exist and copy file using shutil<commit_after>import os
import shutil
class Generator:
def run(self, sourcedir, outputdir):
sourcedir = os.path.normpath(sourcedir)
outputdir = os.path.normpath(outputdir)
prefix = len(sourcedir)+len(os.path.sep)
for root, dirs, files in os.walk(sourcedir):
destpath = os.path.join(outputdir, root[prefix:])
if not os.path.exists(destpath):
os.makedirs(destpath)
print()
print('-'*25)
print('Pages')
for f in files:
src = os.path.join(root, f)
base, ext = os.path.splitext(f)
if ext in ['.md', '.markdown']:
dest = os.path.join(destpath, "%s%s" % (base, '.html'))
self.parse(src, dest)
else:
dest = os.path.join(destpath, f)
shutil.copy(src, dest)
print('-'*25)
print('Dirs')
for d in dirs:
print(os.path.join(destpath, d))
def parse(self, source, destination):
print("Parse Source: %s Destination: %s" % (source, destination))
|
ebec31105582235c8aa74e9bbfd608b9bf103ad1
|
calico_containers/tests/st/utils.py
|
calico_containers/tests/st/utils.py
|
import sh
from sh import docker
def get_ip():
"""Return a string of the IP of the hosts eth0 interface."""
intf = sh.ifconfig.eth0()
return sh.perl(intf, "-ne", 's/dr:(\S+)/print $1/e')
def cleanup_inside(name):
"""
Clean the inside of a container by deleting the containers and images within it.
"""
docker("exec", "-t", name, "bash", "-c",
"docker rm -f $(docker ps -qa) ; docker rmi $(docker images -qa)",
_ok_code=[0, 1, 255]) # 255 is; "bash": executable file not found in $PATH
def delete_container(name):
"""
Cleanly delete a container.
"""
# We *must* remove all inner containers and images before removing the outer
# container. Otherwise the inner images will stick around and fill disk.
# https://github.com/jpetazzo/dind#important-warning-about-disk-usage
cleanup_inside(name)
sh.docker.rm("-f", name, _ok_code=[0, 1])
|
import sh
from sh import docker
import socket
def get_ip():
"""Return a string of the IP of the hosts eth0 interface."""
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80))
ip = s.getsockname()[0]
s.close()
return ip
def cleanup_inside(name):
"""
Clean the inside of a container by deleting the containers and images within it.
"""
docker("exec", "-t", name, "bash", "-c",
"docker rm -f $(docker ps -qa) ; docker rmi $(docker images -qa)",
_ok_code=[0, 1, 255]) # 255 is; "bash": executable file not found in $PATH
def delete_container(name):
"""
Cleanly delete a container.
"""
# We *must* remove all inner containers and images before removing the outer
# container. Otherwise the inner images will stick around and fill disk.
# https://github.com/jpetazzo/dind#important-warning-about-disk-usage
cleanup_inside(name)
sh.docker.rm("-f", name, _ok_code=[0, 1])
|
Use socket connection to get own IP.
|
Use socket connection to get own IP.
|
Python
|
apache-2.0
|
L-MA/calico-docker,webwurst/calico-docker,TrimBiggs/calico-containers,quater/calico-containers,quater/calico-containers,robbrockbank/calicoctl,caseydavenport/calico-containers,caseydavenport/calico-docker,TrimBiggs/calico-containers,projectcalico/calico-containers,projectcalico/calico-containers,robbrockbank/calico-containers,robbrockbank/calico-docker,TeaBough/calico-docker,CiscoCloud/calico-docker,fasaxc/calicoctl,caseydavenport/calico-containers,insequent/calico-docker,tomdee/calico-docker,caseydavenport/calico-docker,Symmetric/calico-docker,alexhersh/calico-docker,projectcalico/calico-docker,tomdee/calico-containers,webwurst/calico-docker,projectcalico/calico-containers,TrimBiggs/calico-docker,fasaxc/calicoctl,fasaxc/calico-docker,Symmetric/calico-docker,tomdee/calico-containers,TeaBough/calico-docker,robbrockbank/calico-docker,CiscoCloud/calico-docker,Metaswitch/calico-docker,robbrockbank/calico-containers,johscheuer/calico-docker,TrimBiggs/calico-docker,alexhersh/calico-docker,L-MA/calico-docker,frostynova/calico-docker,caseydavenport/calico-containers,robbrockbank/calicoctl,projectcalico/calico-docker,frostynova/calico-docker,dalanlan/calico-docker,fasaxc/calico-docker,johscheuer/calico-docker,insequent/calico-docker,tomdee/calico-docker,dalanlan/calico-docker,Metaswitch/calico-docker
|
import sh
from sh import docker
def get_ip():
"""Return a string of the IP of the hosts eth0 interface."""
intf = sh.ifconfig.eth0()
return sh.perl(intf, "-ne", 's/dr:(\S+)/print $1/e')
def cleanup_inside(name):
"""
Clean the inside of a container by deleting the containers and images within it.
"""
docker("exec", "-t", name, "bash", "-c",
"docker rm -f $(docker ps -qa) ; docker rmi $(docker images -qa)",
_ok_code=[0, 1, 255]) # 255 is; "bash": executable file not found in $PATH
def delete_container(name):
"""
Cleanly delete a container.
"""
# We *must* remove all inner containers and images before removing the outer
# container. Otherwise the inner images will stick around and fill disk.
# https://github.com/jpetazzo/dind#important-warning-about-disk-usage
cleanup_inside(name)
sh.docker.rm("-f", name, _ok_code=[0, 1])
Use socket connection to get own IP.
|
import sh
from sh import docker
import socket
def get_ip():
"""Return a string of the IP of the hosts eth0 interface."""
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80))
ip = s.getsockname()[0]
s.close()
return ip
def cleanup_inside(name):
"""
Clean the inside of a container by deleting the containers and images within it.
"""
docker("exec", "-t", name, "bash", "-c",
"docker rm -f $(docker ps -qa) ; docker rmi $(docker images -qa)",
_ok_code=[0, 1, 255]) # 255 is; "bash": executable file not found in $PATH
def delete_container(name):
"""
Cleanly delete a container.
"""
# We *must* remove all inner containers and images before removing the outer
# container. Otherwise the inner images will stick around and fill disk.
# https://github.com/jpetazzo/dind#important-warning-about-disk-usage
cleanup_inside(name)
sh.docker.rm("-f", name, _ok_code=[0, 1])
|
<commit_before>import sh
from sh import docker
def get_ip():
"""Return a string of the IP of the hosts eth0 interface."""
intf = sh.ifconfig.eth0()
return sh.perl(intf, "-ne", 's/dr:(\S+)/print $1/e')
def cleanup_inside(name):
"""
Clean the inside of a container by deleting the containers and images within it.
"""
docker("exec", "-t", name, "bash", "-c",
"docker rm -f $(docker ps -qa) ; docker rmi $(docker images -qa)",
_ok_code=[0, 1, 255]) # 255 is; "bash": executable file not found in $PATH
def delete_container(name):
"""
Cleanly delete a container.
"""
# We *must* remove all inner containers and images before removing the outer
# container. Otherwise the inner images will stick around and fill disk.
# https://github.com/jpetazzo/dind#important-warning-about-disk-usage
cleanup_inside(name)
sh.docker.rm("-f", name, _ok_code=[0, 1])
<commit_msg>Use socket connection to get own IP.<commit_after>
|
import sh
from sh import docker
import socket
def get_ip():
"""Return a string of the IP of the hosts eth0 interface."""
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80))
ip = s.getsockname()[0]
s.close()
return ip
def cleanup_inside(name):
"""
Clean the inside of a container by deleting the containers and images within it.
"""
docker("exec", "-t", name, "bash", "-c",
"docker rm -f $(docker ps -qa) ; docker rmi $(docker images -qa)",
_ok_code=[0, 1, 255]) # 255 is; "bash": executable file not found in $PATH
def delete_container(name):
"""
Cleanly delete a container.
"""
# We *must* remove all inner containers and images before removing the outer
# container. Otherwise the inner images will stick around and fill disk.
# https://github.com/jpetazzo/dind#important-warning-about-disk-usage
cleanup_inside(name)
sh.docker.rm("-f", name, _ok_code=[0, 1])
|
import sh
from sh import docker
def get_ip():
"""Return a string of the IP of the hosts eth0 interface."""
intf = sh.ifconfig.eth0()
return sh.perl(intf, "-ne", 's/dr:(\S+)/print $1/e')
def cleanup_inside(name):
"""
Clean the inside of a container by deleting the containers and images within it.
"""
docker("exec", "-t", name, "bash", "-c",
"docker rm -f $(docker ps -qa) ; docker rmi $(docker images -qa)",
_ok_code=[0, 1, 255]) # 255 is; "bash": executable file not found in $PATH
def delete_container(name):
"""
Cleanly delete a container.
"""
# We *must* remove all inner containers and images before removing the outer
# container. Otherwise the inner images will stick around and fill disk.
# https://github.com/jpetazzo/dind#important-warning-about-disk-usage
cleanup_inside(name)
sh.docker.rm("-f", name, _ok_code=[0, 1])
Use socket connection to get own IP.import sh
from sh import docker
import socket
def get_ip():
"""Return a string of the IP of the hosts eth0 interface."""
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80))
ip = s.getsockname()[0]
s.close()
return ip
def cleanup_inside(name):
"""
Clean the inside of a container by deleting the containers and images within it.
"""
docker("exec", "-t", name, "bash", "-c",
"docker rm -f $(docker ps -qa) ; docker rmi $(docker images -qa)",
_ok_code=[0, 1, 255]) # 255 is; "bash": executable file not found in $PATH
def delete_container(name):
"""
Cleanly delete a container.
"""
# We *must* remove all inner containers and images before removing the outer
# container. Otherwise the inner images will stick around and fill disk.
# https://github.com/jpetazzo/dind#important-warning-about-disk-usage
cleanup_inside(name)
sh.docker.rm("-f", name, _ok_code=[0, 1])
|
<commit_before>import sh
from sh import docker
def get_ip():
"""Return a string of the IP of the hosts eth0 interface."""
intf = sh.ifconfig.eth0()
return sh.perl(intf, "-ne", 's/dr:(\S+)/print $1/e')
def cleanup_inside(name):
"""
Clean the inside of a container by deleting the containers and images within it.
"""
docker("exec", "-t", name, "bash", "-c",
"docker rm -f $(docker ps -qa) ; docker rmi $(docker images -qa)",
_ok_code=[0, 1, 255]) # 255 is; "bash": executable file not found in $PATH
def delete_container(name):
"""
Cleanly delete a container.
"""
# We *must* remove all inner containers and images before removing the outer
# container. Otherwise the inner images will stick around and fill disk.
# https://github.com/jpetazzo/dind#important-warning-about-disk-usage
cleanup_inside(name)
sh.docker.rm("-f", name, _ok_code=[0, 1])
<commit_msg>Use socket connection to get own IP.<commit_after>import sh
from sh import docker
import socket
def get_ip():
"""Return a string of the IP of the hosts eth0 interface."""
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80))
ip = s.getsockname()[0]
s.close()
return ip
def cleanup_inside(name):
"""
Clean the inside of a container by deleting the containers and images within it.
"""
docker("exec", "-t", name, "bash", "-c",
"docker rm -f $(docker ps -qa) ; docker rmi $(docker images -qa)",
_ok_code=[0, 1, 255]) # 255 is; "bash": executable file not found in $PATH
def delete_container(name):
"""
Cleanly delete a container.
"""
# We *must* remove all inner containers and images before removing the outer
# container. Otherwise the inner images will stick around and fill disk.
# https://github.com/jpetazzo/dind#important-warning-about-disk-usage
cleanup_inside(name)
sh.docker.rm("-f", name, _ok_code=[0, 1])
|
efa2feb1e6a4a67c4e6410cacb823f2110fd95a5
|
cla_public/config/docker.py
|
cla_public/config/docker.py
|
from cla_public.config.common import *
DEBUG = os.environ.get('SET_DEBUG', False) == 'True'
SECRET_KEY = os.environ['SECRET_KEY']
# TODO - change this to True when serving over HTTPS
SESSION_COOKIE_SECURE = os.environ.get('CLA_ENV', '') in ['prod', 'staging']
HOST_NAME = os.environ['HOST_NAME']
BACKEND_API = {
'url': os.environ['BACKEND_BASE_URI'] + '/checker/api/v1/'
}
if DEBUG:
LOGGING['handlers']['debug_file'] = {
'level': 'DEBUG',
'class': 'logging.handlers.RotatingFileHandler',
'filename': '/var/log/wsgi/debug.log',
'maxBytes': 1024 * 1024 * 5, # 5MB
'backupCount': 7,
'formatter': 'verbose'}
LOGGING['loggers'] = {
'': {
'handlers': ['debug_file'],
'level': 'DEBUG'
}
}
else:
LOGGING['handlers']['production_file'] = {
'level': 'INFO',
'class': 'logging.handlers.RotatingFileHandler',
'filename': '/var/log/wsgi/app.log',
'maxBytes': 1024 * 1024 * 5, # 5MB
'backupCount': 7,
'formatter': 'logstash'}
LOGGING['loggers'] = {
'': {
'handlers': ['production_file'],
'level': 'DEBUG'
}
}
|
from cla_public.config.common import *
DEBUG = os.environ.get('SET_DEBUG', False) == 'True'
SECRET_KEY = os.environ['SECRET_KEY']
# TODO - change this to True when serving over HTTPS
SESSION_COOKIE_SECURE = os.environ.get('CLA_ENV', '') in ['prod', 'staging']
HOST_NAME = os.environ.get('HOST_NAME') or os.environ.get('HOSTNAME')
BACKEND_API = {
'url': os.environ['BACKEND_BASE_URI'] + '/checker/api/v1/'
}
if DEBUG:
LOGGING['handlers']['debug_file'] = {
'level': 'DEBUG',
'class': 'logging.handlers.RotatingFileHandler',
'filename': '/var/log/wsgi/debug.log',
'maxBytes': 1024 * 1024 * 5, # 5MB
'backupCount': 7,
'formatter': 'verbose'}
LOGGING['loggers'] = {
'': {
'handlers': ['debug_file'],
'level': 'DEBUG'
}
}
else:
LOGGING['handlers']['production_file'] = {
'level': 'INFO',
'class': 'logging.handlers.RotatingFileHandler',
'filename': '/var/log/wsgi/app.log',
'maxBytes': 1024 * 1024 * 5, # 5MB
'backupCount': 7,
'formatter': 'logstash'}
LOGGING['loggers'] = {
'': {
'handlers': ['production_file'],
'level': 'DEBUG'
}
}
|
Test for HOST_NAME or HOSTNAME env var
|
Test for HOST_NAME or HOSTNAME env var
|
Python
|
mit
|
ministryofjustice/cla_public,ministryofjustice/cla_public,ministryofjustice/cla_public,ministryofjustice/cla_public
|
from cla_public.config.common import *
DEBUG = os.environ.get('SET_DEBUG', False) == 'True'
SECRET_KEY = os.environ['SECRET_KEY']
# TODO - change this to True when serving over HTTPS
SESSION_COOKIE_SECURE = os.environ.get('CLA_ENV', '') in ['prod', 'staging']
HOST_NAME = os.environ['HOST_NAME']
BACKEND_API = {
'url': os.environ['BACKEND_BASE_URI'] + '/checker/api/v1/'
}
if DEBUG:
LOGGING['handlers']['debug_file'] = {
'level': 'DEBUG',
'class': 'logging.handlers.RotatingFileHandler',
'filename': '/var/log/wsgi/debug.log',
'maxBytes': 1024 * 1024 * 5, # 5MB
'backupCount': 7,
'formatter': 'verbose'}
LOGGING['loggers'] = {
'': {
'handlers': ['debug_file'],
'level': 'DEBUG'
}
}
else:
LOGGING['handlers']['production_file'] = {
'level': 'INFO',
'class': 'logging.handlers.RotatingFileHandler',
'filename': '/var/log/wsgi/app.log',
'maxBytes': 1024 * 1024 * 5, # 5MB
'backupCount': 7,
'formatter': 'logstash'}
LOGGING['loggers'] = {
'': {
'handlers': ['production_file'],
'level': 'DEBUG'
}
}
Test for HOST_NAME or HOSTNAME env var
|
from cla_public.config.common import *
DEBUG = os.environ.get('SET_DEBUG', False) == 'True'
SECRET_KEY = os.environ['SECRET_KEY']
# TODO - change this to True when serving over HTTPS
SESSION_COOKIE_SECURE = os.environ.get('CLA_ENV', '') in ['prod', 'staging']
HOST_NAME = os.environ.get('HOST_NAME') or os.environ.get('HOSTNAME')
BACKEND_API = {
'url': os.environ['BACKEND_BASE_URI'] + '/checker/api/v1/'
}
if DEBUG:
LOGGING['handlers']['debug_file'] = {
'level': 'DEBUG',
'class': 'logging.handlers.RotatingFileHandler',
'filename': '/var/log/wsgi/debug.log',
'maxBytes': 1024 * 1024 * 5, # 5MB
'backupCount': 7,
'formatter': 'verbose'}
LOGGING['loggers'] = {
'': {
'handlers': ['debug_file'],
'level': 'DEBUG'
}
}
else:
LOGGING['handlers']['production_file'] = {
'level': 'INFO',
'class': 'logging.handlers.RotatingFileHandler',
'filename': '/var/log/wsgi/app.log',
'maxBytes': 1024 * 1024 * 5, # 5MB
'backupCount': 7,
'formatter': 'logstash'}
LOGGING['loggers'] = {
'': {
'handlers': ['production_file'],
'level': 'DEBUG'
}
}
|
<commit_before>from cla_public.config.common import *
DEBUG = os.environ.get('SET_DEBUG', False) == 'True'
SECRET_KEY = os.environ['SECRET_KEY']
# TODO - change this to True when serving over HTTPS
SESSION_COOKIE_SECURE = os.environ.get('CLA_ENV', '') in ['prod', 'staging']
HOST_NAME = os.environ['HOST_NAME']
BACKEND_API = {
'url': os.environ['BACKEND_BASE_URI'] + '/checker/api/v1/'
}
if DEBUG:
LOGGING['handlers']['debug_file'] = {
'level': 'DEBUG',
'class': 'logging.handlers.RotatingFileHandler',
'filename': '/var/log/wsgi/debug.log',
'maxBytes': 1024 * 1024 * 5, # 5MB
'backupCount': 7,
'formatter': 'verbose'}
LOGGING['loggers'] = {
'': {
'handlers': ['debug_file'],
'level': 'DEBUG'
}
}
else:
LOGGING['handlers']['production_file'] = {
'level': 'INFO',
'class': 'logging.handlers.RotatingFileHandler',
'filename': '/var/log/wsgi/app.log',
'maxBytes': 1024 * 1024 * 5, # 5MB
'backupCount': 7,
'formatter': 'logstash'}
LOGGING['loggers'] = {
'': {
'handlers': ['production_file'],
'level': 'DEBUG'
}
}
<commit_msg>Test for HOST_NAME or HOSTNAME env var<commit_after>
|
from cla_public.config.common import *
DEBUG = os.environ.get('SET_DEBUG', False) == 'True'
SECRET_KEY = os.environ['SECRET_KEY']
# TODO - change this to True when serving over HTTPS
SESSION_COOKIE_SECURE = os.environ.get('CLA_ENV', '') in ['prod', 'staging']
HOST_NAME = os.environ.get('HOST_NAME') or os.environ.get('HOSTNAME')
BACKEND_API = {
'url': os.environ['BACKEND_BASE_URI'] + '/checker/api/v1/'
}
if DEBUG:
LOGGING['handlers']['debug_file'] = {
'level': 'DEBUG',
'class': 'logging.handlers.RotatingFileHandler',
'filename': '/var/log/wsgi/debug.log',
'maxBytes': 1024 * 1024 * 5, # 5MB
'backupCount': 7,
'formatter': 'verbose'}
LOGGING['loggers'] = {
'': {
'handlers': ['debug_file'],
'level': 'DEBUG'
}
}
else:
LOGGING['handlers']['production_file'] = {
'level': 'INFO',
'class': 'logging.handlers.RotatingFileHandler',
'filename': '/var/log/wsgi/app.log',
'maxBytes': 1024 * 1024 * 5, # 5MB
'backupCount': 7,
'formatter': 'logstash'}
LOGGING['loggers'] = {
'': {
'handlers': ['production_file'],
'level': 'DEBUG'
}
}
|
from cla_public.config.common import *
DEBUG = os.environ.get('SET_DEBUG', False) == 'True'
SECRET_KEY = os.environ['SECRET_KEY']
# TODO - change this to True when serving over HTTPS
SESSION_COOKIE_SECURE = os.environ.get('CLA_ENV', '') in ['prod', 'staging']
HOST_NAME = os.environ['HOST_NAME']
BACKEND_API = {
'url': os.environ['BACKEND_BASE_URI'] + '/checker/api/v1/'
}
if DEBUG:
LOGGING['handlers']['debug_file'] = {
'level': 'DEBUG',
'class': 'logging.handlers.RotatingFileHandler',
'filename': '/var/log/wsgi/debug.log',
'maxBytes': 1024 * 1024 * 5, # 5MB
'backupCount': 7,
'formatter': 'verbose'}
LOGGING['loggers'] = {
'': {
'handlers': ['debug_file'],
'level': 'DEBUG'
}
}
else:
LOGGING['handlers']['production_file'] = {
'level': 'INFO',
'class': 'logging.handlers.RotatingFileHandler',
'filename': '/var/log/wsgi/app.log',
'maxBytes': 1024 * 1024 * 5, # 5MB
'backupCount': 7,
'formatter': 'logstash'}
LOGGING['loggers'] = {
'': {
'handlers': ['production_file'],
'level': 'DEBUG'
}
}
Test for HOST_NAME or HOSTNAME env varfrom cla_public.config.common import *
DEBUG = os.environ.get('SET_DEBUG', False) == 'True'
SECRET_KEY = os.environ['SECRET_KEY']
# TODO - change this to True when serving over HTTPS
SESSION_COOKIE_SECURE = os.environ.get('CLA_ENV', '') in ['prod', 'staging']
HOST_NAME = os.environ.get('HOST_NAME') or os.environ.get('HOSTNAME')
BACKEND_API = {
'url': os.environ['BACKEND_BASE_URI'] + '/checker/api/v1/'
}
if DEBUG:
LOGGING['handlers']['debug_file'] = {
'level': 'DEBUG',
'class': 'logging.handlers.RotatingFileHandler',
'filename': '/var/log/wsgi/debug.log',
'maxBytes': 1024 * 1024 * 5, # 5MB
'backupCount': 7,
'formatter': 'verbose'}
LOGGING['loggers'] = {
'': {
'handlers': ['debug_file'],
'level': 'DEBUG'
}
}
else:
LOGGING['handlers']['production_file'] = {
'level': 'INFO',
'class': 'logging.handlers.RotatingFileHandler',
'filename': '/var/log/wsgi/app.log',
'maxBytes': 1024 * 1024 * 5, # 5MB
'backupCount': 7,
'formatter': 'logstash'}
LOGGING['loggers'] = {
'': {
'handlers': ['production_file'],
'level': 'DEBUG'
}
}
|
<commit_before>from cla_public.config.common import *
DEBUG = os.environ.get('SET_DEBUG', False) == 'True'
SECRET_KEY = os.environ['SECRET_KEY']
# TODO - change this to True when serving over HTTPS
SESSION_COOKIE_SECURE = os.environ.get('CLA_ENV', '') in ['prod', 'staging']
HOST_NAME = os.environ['HOST_NAME']
BACKEND_API = {
'url': os.environ['BACKEND_BASE_URI'] + '/checker/api/v1/'
}
if DEBUG:
LOGGING['handlers']['debug_file'] = {
'level': 'DEBUG',
'class': 'logging.handlers.RotatingFileHandler',
'filename': '/var/log/wsgi/debug.log',
'maxBytes': 1024 * 1024 * 5, # 5MB
'backupCount': 7,
'formatter': 'verbose'}
LOGGING['loggers'] = {
'': {
'handlers': ['debug_file'],
'level': 'DEBUG'
}
}
else:
LOGGING['handlers']['production_file'] = {
'level': 'INFO',
'class': 'logging.handlers.RotatingFileHandler',
'filename': '/var/log/wsgi/app.log',
'maxBytes': 1024 * 1024 * 5, # 5MB
'backupCount': 7,
'formatter': 'logstash'}
LOGGING['loggers'] = {
'': {
'handlers': ['production_file'],
'level': 'DEBUG'
}
}
<commit_msg>Test for HOST_NAME or HOSTNAME env var<commit_after>from cla_public.config.common import *
DEBUG = os.environ.get('SET_DEBUG', False) == 'True'
SECRET_KEY = os.environ['SECRET_KEY']
# TODO - change this to True when serving over HTTPS
SESSION_COOKIE_SECURE = os.environ.get('CLA_ENV', '') in ['prod', 'staging']
HOST_NAME = os.environ.get('HOST_NAME') or os.environ.get('HOSTNAME')
BACKEND_API = {
'url': os.environ['BACKEND_BASE_URI'] + '/checker/api/v1/'
}
if DEBUG:
LOGGING['handlers']['debug_file'] = {
'level': 'DEBUG',
'class': 'logging.handlers.RotatingFileHandler',
'filename': '/var/log/wsgi/debug.log',
'maxBytes': 1024 * 1024 * 5, # 5MB
'backupCount': 7,
'formatter': 'verbose'}
LOGGING['loggers'] = {
'': {
'handlers': ['debug_file'],
'level': 'DEBUG'
}
}
else:
LOGGING['handlers']['production_file'] = {
'level': 'INFO',
'class': 'logging.handlers.RotatingFileHandler',
'filename': '/var/log/wsgi/app.log',
'maxBytes': 1024 * 1024 * 5, # 5MB
'backupCount': 7,
'formatter': 'logstash'}
LOGGING['loggers'] = {
'': {
'handlers': ['production_file'],
'level': 'DEBUG'
}
}
|
3e3ead7d9eb05c2fd713d83510cf08b46cf21f15
|
skimage/viewer/qt/QtCore.py
|
skimage/viewer/qt/QtCore.py
|
from . import qt_api
if qt_api == 'pyside':
from PySide.QtCore import *
elif qt_api == 'pyqt':
from PyQt4.QtCore import *
else:
# Mock objects
Qt = None
|
from . import qt_api
if qt_api == 'pyside':
from PySide.QtCore import *
elif qt_api == 'pyqt':
from PyQt4.QtCore import *
else:
# Mock objects
Qt = None
pyqtSignal = None
|
Add mock pyqtSignal to try to get Travis to build
|
Add mock pyqtSignal to try to get Travis to build
|
Python
|
bsd-3-clause
|
Britefury/scikit-image,oew1v07/scikit-image,almarklein/scikit-image,Hiyorimi/scikit-image,GaZ3ll3/scikit-image,Hiyorimi/scikit-image,dpshelio/scikit-image,ofgulban/scikit-image,ClinicalGraphics/scikit-image,paalge/scikit-image,vighneshbirodkar/scikit-image,youprofit/scikit-image,jwiggins/scikit-image,chriscrosscutler/scikit-image,oew1v07/scikit-image,chintak/scikit-image,SamHames/scikit-image,rjeli/scikit-image,Britefury/scikit-image,michaelaye/scikit-image,emon10005/scikit-image,WarrenWeckesser/scikits-image,ofgulban/scikit-image,blink1073/scikit-image,rjeli/scikit-image,robintw/scikit-image,dpshelio/scikit-image,michaelpacer/scikit-image,michaelaye/scikit-image,michaelpacer/scikit-image,Midafi/scikit-image,ajaybhat/scikit-image,jwiggins/scikit-image,bsipocz/scikit-image,ClinicalGraphics/scikit-image,rjeli/scikit-image,almarklein/scikit-image,juliusbierk/scikit-image,youprofit/scikit-image,pratapvardhan/scikit-image,bennlich/scikit-image,bsipocz/scikit-image,emon10005/scikit-image,Midafi/scikit-image,GaZ3ll3/scikit-image,paalge/scikit-image,vighneshbirodkar/scikit-image,paalge/scikit-image,warmspringwinds/scikit-image,WarrenWeckesser/scikits-image,juliusbierk/scikit-image,vighneshbirodkar/scikit-image,almarklein/scikit-image,warmspringwinds/scikit-image,robintw/scikit-image,chintak/scikit-image,SamHames/scikit-image,bennlich/scikit-image,keflavich/scikit-image,pratapvardhan/scikit-image,newville/scikit-image,almarklein/scikit-image,chintak/scikit-image,chintak/scikit-image,ajaybhat/scikit-image,newville/scikit-image,keflavich/scikit-image,chriscrosscutler/scikit-image,SamHames/scikit-image,SamHames/scikit-image,blink1073/scikit-image,ofgulban/scikit-image
|
from . import qt_api
if qt_api == 'pyside':
from PySide.QtCore import *
elif qt_api == 'pyqt':
from PyQt4.QtCore import *
else:
# Mock objects
Qt = None
Add mock pyqtSignal to try to get Travis to build
|
from . import qt_api
if qt_api == 'pyside':
from PySide.QtCore import *
elif qt_api == 'pyqt':
from PyQt4.QtCore import *
else:
# Mock objects
Qt = None
pyqtSignal = None
|
<commit_before>from . import qt_api
if qt_api == 'pyside':
from PySide.QtCore import *
elif qt_api == 'pyqt':
from PyQt4.QtCore import *
else:
# Mock objects
Qt = None
<commit_msg>Add mock pyqtSignal to try to get Travis to build<commit_after>
|
from . import qt_api
if qt_api == 'pyside':
from PySide.QtCore import *
elif qt_api == 'pyqt':
from PyQt4.QtCore import *
else:
# Mock objects
Qt = None
pyqtSignal = None
|
from . import qt_api
if qt_api == 'pyside':
from PySide.QtCore import *
elif qt_api == 'pyqt':
from PyQt4.QtCore import *
else:
# Mock objects
Qt = None
Add mock pyqtSignal to try to get Travis to buildfrom . import qt_api
if qt_api == 'pyside':
from PySide.QtCore import *
elif qt_api == 'pyqt':
from PyQt4.QtCore import *
else:
# Mock objects
Qt = None
pyqtSignal = None
|
<commit_before>from . import qt_api
if qt_api == 'pyside':
from PySide.QtCore import *
elif qt_api == 'pyqt':
from PyQt4.QtCore import *
else:
# Mock objects
Qt = None
<commit_msg>Add mock pyqtSignal to try to get Travis to build<commit_after>from . import qt_api
if qt_api == 'pyside':
from PySide.QtCore import *
elif qt_api == 'pyqt':
from PyQt4.QtCore import *
else:
# Mock objects
Qt = None
pyqtSignal = None
|
aa974a2d12020e324db222b022594d9e489e559f
|
convert.py
|
convert.py
|
import argparse
import numpy as np
from PIL import Image
lookup = " .,:-?X#"
def image_to_ascii(image):
"""
PIL image object -> 2d array of values
"""
quantised = image.quantize(len(lookup))
quantised.show()
array = np.asarray(quantised.resize((128,64)))
return [[lookup[k] for k in i] for i in array]
def convert_file(fn):
converted = ""
try:
image = Image.open(fn)
converted = image_to_ascii(image)
except Exception as e:
print e.message
return converted
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("-f", "--filename",
help="Convert this file to ASCII art",
required=True)
args = parser.parse_args()
print args
filename = args.filename
converted = convert_file(filename)
print '\n'.join(''.join(i) for i in converted)
|
import argparse
import numpy as np
from PIL import Image
lookup = " .,:-!?X#"
def image_to_ascii(image, width=128):
"""
PIL image object -> 2d array of values
"""
def scale_height(h, w, new_width):
print "original height: {}".format(h)
print "original width: {}".format(w)
print "new width: {}".format(new_width)
conversion_factor = float(1.0 * new_width / w)
print "conversion factor {}".format(conversion_factor)
new_height = (h * conversion_factor) / 2.0
print "new height: {}".format(new_height)
return int(new_height)
quantised = image.quantize(len(lookup))
#quantised.show()
array = np.asarray(quantised.resize((width, scale_height(image.height, image.width, width))))
return [[lookup[k] for k in i] for i in array]
def convert_file(fn):
converted = ""
try:
image = Image.open(fn)
converted = image_to_ascii(image)
except Exception as e:
print e.message
return converted
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("-f", "--filename",
help="Convert this file to ASCII art",
required=True)
args = parser.parse_args()
print args
filename = args.filename
converted = convert_file(filename)
print '\n'.join(''.join(i) for i in converted)
|
CORRECT height/width conversion this time around
|
CORRECT height/width conversion this time around
|
Python
|
mit
|
machineperson/fantastic-doodle
|
import argparse
import numpy as np
from PIL import Image
lookup = " .,:-?X#"
def image_to_ascii(image):
"""
PIL image object -> 2d array of values
"""
quantised = image.quantize(len(lookup))
quantised.show()
array = np.asarray(quantised.resize((128,64)))
return [[lookup[k] for k in i] for i in array]
def convert_file(fn):
converted = ""
try:
image = Image.open(fn)
converted = image_to_ascii(image)
except Exception as e:
print e.message
return converted
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("-f", "--filename",
help="Convert this file to ASCII art",
required=True)
args = parser.parse_args()
print args
filename = args.filename
converted = convert_file(filename)
print '\n'.join(''.join(i) for i in converted)
CORRECT height/width conversion this time around
|
import argparse
import numpy as np
from PIL import Image
lookup = " .,:-!?X#"
def image_to_ascii(image, width=128):
"""
PIL image object -> 2d array of values
"""
def scale_height(h, w, new_width):
print "original height: {}".format(h)
print "original width: {}".format(w)
print "new width: {}".format(new_width)
conversion_factor = float(1.0 * new_width / w)
print "conversion factor {}".format(conversion_factor)
new_height = (h * conversion_factor) / 2.0
print "new height: {}".format(new_height)
return int(new_height)
quantised = image.quantize(len(lookup))
#quantised.show()
array = np.asarray(quantised.resize((width, scale_height(image.height, image.width, width))))
return [[lookup[k] for k in i] for i in array]
def convert_file(fn):
converted = ""
try:
image = Image.open(fn)
converted = image_to_ascii(image)
except Exception as e:
print e.message
return converted
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("-f", "--filename",
help="Convert this file to ASCII art",
required=True)
args = parser.parse_args()
print args
filename = args.filename
converted = convert_file(filename)
print '\n'.join(''.join(i) for i in converted)
|
<commit_before>import argparse
import numpy as np
from PIL import Image
lookup = " .,:-?X#"
def image_to_ascii(image):
"""
PIL image object -> 2d array of values
"""
quantised = image.quantize(len(lookup))
quantised.show()
array = np.asarray(quantised.resize((128,64)))
return [[lookup[k] for k in i] for i in array]
def convert_file(fn):
converted = ""
try:
image = Image.open(fn)
converted = image_to_ascii(image)
except Exception as e:
print e.message
return converted
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("-f", "--filename",
help="Convert this file to ASCII art",
required=True)
args = parser.parse_args()
print args
filename = args.filename
converted = convert_file(filename)
print '\n'.join(''.join(i) for i in converted)
<commit_msg>CORRECT height/width conversion this time around<commit_after>
|
import argparse
import numpy as np
from PIL import Image
lookup = " .,:-!?X#"
def image_to_ascii(image, width=128):
"""
PIL image object -> 2d array of values
"""
def scale_height(h, w, new_width):
print "original height: {}".format(h)
print "original width: {}".format(w)
print "new width: {}".format(new_width)
conversion_factor = float(1.0 * new_width / w)
print "conversion factor {}".format(conversion_factor)
new_height = (h * conversion_factor) / 2.0
print "new height: {}".format(new_height)
return int(new_height)
quantised = image.quantize(len(lookup))
#quantised.show()
array = np.asarray(quantised.resize((width, scale_height(image.height, image.width, width))))
return [[lookup[k] for k in i] for i in array]
def convert_file(fn):
converted = ""
try:
image = Image.open(fn)
converted = image_to_ascii(image)
except Exception as e:
print e.message
return converted
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("-f", "--filename",
help="Convert this file to ASCII art",
required=True)
args = parser.parse_args()
print args
filename = args.filename
converted = convert_file(filename)
print '\n'.join(''.join(i) for i in converted)
|
import argparse
import numpy as np
from PIL import Image
lookup = " .,:-?X#"
def image_to_ascii(image):
"""
PIL image object -> 2d array of values
"""
quantised = image.quantize(len(lookup))
quantised.show()
array = np.asarray(quantised.resize((128,64)))
return [[lookup[k] for k in i] for i in array]
def convert_file(fn):
converted = ""
try:
image = Image.open(fn)
converted = image_to_ascii(image)
except Exception as e:
print e.message
return converted
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("-f", "--filename",
help="Convert this file to ASCII art",
required=True)
args = parser.parse_args()
print args
filename = args.filename
converted = convert_file(filename)
print '\n'.join(''.join(i) for i in converted)
CORRECT height/width conversion this time aroundimport argparse
import numpy as np
from PIL import Image
lookup = " .,:-!?X#"
def image_to_ascii(image, width=128):
"""
PIL image object -> 2d array of values
"""
def scale_height(h, w, new_width):
print "original height: {}".format(h)
print "original width: {}".format(w)
print "new width: {}".format(new_width)
conversion_factor = float(1.0 * new_width / w)
print "conversion factor {}".format(conversion_factor)
new_height = (h * conversion_factor) / 2.0
print "new height: {}".format(new_height)
return int(new_height)
quantised = image.quantize(len(lookup))
#quantised.show()
array = np.asarray(quantised.resize((width, scale_height(image.height, image.width, width))))
return [[lookup[k] for k in i] for i in array]
def convert_file(fn):
converted = ""
try:
image = Image.open(fn)
converted = image_to_ascii(image)
except Exception as e:
print e.message
return converted
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("-f", "--filename",
help="Convert this file to ASCII art",
required=True)
args = parser.parse_args()
print args
filename = args.filename
converted = convert_file(filename)
print '\n'.join(''.join(i) for i in converted)
|
<commit_before>import argparse
import numpy as np
from PIL import Image
lookup = " .,:-?X#"
def image_to_ascii(image):
"""
PIL image object -> 2d array of values
"""
quantised = image.quantize(len(lookup))
quantised.show()
array = np.asarray(quantised.resize((128,64)))
return [[lookup[k] for k in i] for i in array]
def convert_file(fn):
converted = ""
try:
image = Image.open(fn)
converted = image_to_ascii(image)
except Exception as e:
print e.message
return converted
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("-f", "--filename",
help="Convert this file to ASCII art",
required=True)
args = parser.parse_args()
print args
filename = args.filename
converted = convert_file(filename)
print '\n'.join(''.join(i) for i in converted)
<commit_msg>CORRECT height/width conversion this time around<commit_after>import argparse
import numpy as np
from PIL import Image
lookup = " .,:-!?X#"
def image_to_ascii(image, width=128):
"""
PIL image object -> 2d array of values
"""
def scale_height(h, w, new_width):
print "original height: {}".format(h)
print "original width: {}".format(w)
print "new width: {}".format(new_width)
conversion_factor = float(1.0 * new_width / w)
print "conversion factor {}".format(conversion_factor)
new_height = (h * conversion_factor) / 2.0
print "new height: {}".format(new_height)
return int(new_height)
quantised = image.quantize(len(lookup))
#quantised.show()
array = np.asarray(quantised.resize((width, scale_height(image.height, image.width, width))))
return [[lookup[k] for k in i] for i in array]
def convert_file(fn):
converted = ""
try:
image = Image.open(fn)
converted = image_to_ascii(image)
except Exception as e:
print e.message
return converted
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("-f", "--filename",
help="Convert this file to ASCII art",
required=True)
args = parser.parse_args()
print args
filename = args.filename
converted = convert_file(filename)
print '\n'.join(''.join(i) for i in converted)
|
d4c8b0f15cd1694b84f8dab7936571d9f9bca42f
|
tests/people/test_managers.py
|
tests/people/test_managers.py
|
import pytest
from components.people.factories import IdolFactory
from components.people.models import Idol
from components.people.constants import STATUS
pytestmark = pytest.mark.django_db
class TestIdols:
@pytest.fixture
def idols(self):
idols = []
[idols.append(IdolFactory(status=STATUS.active)) for i in xrange(3)]
[idols.append(IdolFactory(status=STATUS.former)) for i in xrange(2)]
return idols
def test_active_manager(self, idols):
assert len(Idol.objects.active()) == 3
def test_inactive_manager(self, idols):
assert len(Idol.objects.inactive()) == 2
|
# -*- coding: utf-8 -*-
import datetime
import pytest
from components.people.constants import STATUS
from components.people.factories import GroupFactory, IdolFactory
from components.people.models import Group, Idol
pytestmark = pytest.mark.django_db
class TestIdols:
@pytest.fixture
def status(self):
idols = []
[idols.append(IdolFactory(status=STATUS.active)) for i in xrange(3)]
[idols.append(IdolFactory(status=STATUS.former)) for i in xrange(2)]
return idols
def test_active_manager(self, status):
assert len(Idol.objects.active()) == 3
def test_inactive_manager(self, status):
assert len(Idol.objects.inactive()) == 2
class TestGroups:
@pytest.fixture
def status(self):
groups = [
GroupFactory(),
GroupFactory(ended=datetime.date.today() + datetime.timedelta(days=30)),
GroupFactory(ended=datetime.date.today() - datetime.timedelta(days=30))
]
return groups
def test_active_manager(self, status):
assert len(Group.objects.active()) == 2
def test_inactive_manager(self, status):
assert len(Group.objects.inactive()) == 1
|
Test group managers. Rename idols() => status().
|
Test group managers. Rename idols() => status().
|
Python
|
apache-2.0
|
hello-base/web,hello-base/web,hello-base/web,hello-base/web
|
import pytest
from components.people.factories import IdolFactory
from components.people.models import Idol
from components.people.constants import STATUS
pytestmark = pytest.mark.django_db
class TestIdols:
@pytest.fixture
def idols(self):
idols = []
[idols.append(IdolFactory(status=STATUS.active)) for i in xrange(3)]
[idols.append(IdolFactory(status=STATUS.former)) for i in xrange(2)]
return idols
def test_active_manager(self, idols):
assert len(Idol.objects.active()) == 3
def test_inactive_manager(self, idols):
assert len(Idol.objects.inactive()) == 2
Test group managers. Rename idols() => status().
|
# -*- coding: utf-8 -*-
import datetime
import pytest
from components.people.constants import STATUS
from components.people.factories import GroupFactory, IdolFactory
from components.people.models import Group, Idol
pytestmark = pytest.mark.django_db
class TestIdols:
@pytest.fixture
def status(self):
idols = []
[idols.append(IdolFactory(status=STATUS.active)) for i in xrange(3)]
[idols.append(IdolFactory(status=STATUS.former)) for i in xrange(2)]
return idols
def test_active_manager(self, status):
assert len(Idol.objects.active()) == 3
def test_inactive_manager(self, status):
assert len(Idol.objects.inactive()) == 2
class TestGroups:
@pytest.fixture
def status(self):
groups = [
GroupFactory(),
GroupFactory(ended=datetime.date.today() + datetime.timedelta(days=30)),
GroupFactory(ended=datetime.date.today() - datetime.timedelta(days=30))
]
return groups
def test_active_manager(self, status):
assert len(Group.objects.active()) == 2
def test_inactive_manager(self, status):
assert len(Group.objects.inactive()) == 1
|
<commit_before>import pytest
from components.people.factories import IdolFactory
from components.people.models import Idol
from components.people.constants import STATUS
pytestmark = pytest.mark.django_db
class TestIdols:
@pytest.fixture
def idols(self):
idols = []
[idols.append(IdolFactory(status=STATUS.active)) for i in xrange(3)]
[idols.append(IdolFactory(status=STATUS.former)) for i in xrange(2)]
return idols
def test_active_manager(self, idols):
assert len(Idol.objects.active()) == 3
def test_inactive_manager(self, idols):
assert len(Idol.objects.inactive()) == 2
<commit_msg>Test group managers. Rename idols() => status().<commit_after>
|
# -*- coding: utf-8 -*-
import datetime
import pytest
from components.people.constants import STATUS
from components.people.factories import GroupFactory, IdolFactory
from components.people.models import Group, Idol
pytestmark = pytest.mark.django_db
class TestIdols:
@pytest.fixture
def status(self):
idols = []
[idols.append(IdolFactory(status=STATUS.active)) for i in xrange(3)]
[idols.append(IdolFactory(status=STATUS.former)) for i in xrange(2)]
return idols
def test_active_manager(self, status):
assert len(Idol.objects.active()) == 3
def test_inactive_manager(self, status):
assert len(Idol.objects.inactive()) == 2
class TestGroups:
@pytest.fixture
def status(self):
groups = [
GroupFactory(),
GroupFactory(ended=datetime.date.today() + datetime.timedelta(days=30)),
GroupFactory(ended=datetime.date.today() - datetime.timedelta(days=30))
]
return groups
def test_active_manager(self, status):
assert len(Group.objects.active()) == 2
def test_inactive_manager(self, status):
assert len(Group.objects.inactive()) == 1
|
import pytest
from components.people.factories import IdolFactory
from components.people.models import Idol
from components.people.constants import STATUS
pytestmark = pytest.mark.django_db
class TestIdols:
@pytest.fixture
def idols(self):
idols = []
[idols.append(IdolFactory(status=STATUS.active)) for i in xrange(3)]
[idols.append(IdolFactory(status=STATUS.former)) for i in xrange(2)]
return idols
def test_active_manager(self, idols):
assert len(Idol.objects.active()) == 3
def test_inactive_manager(self, idols):
assert len(Idol.objects.inactive()) == 2
Test group managers. Rename idols() => status().# -*- coding: utf-8 -*-
import datetime
import pytest
from components.people.constants import STATUS
from components.people.factories import GroupFactory, IdolFactory
from components.people.models import Group, Idol
pytestmark = pytest.mark.django_db
class TestIdols:
@pytest.fixture
def status(self):
idols = []
[idols.append(IdolFactory(status=STATUS.active)) for i in xrange(3)]
[idols.append(IdolFactory(status=STATUS.former)) for i in xrange(2)]
return idols
def test_active_manager(self, status):
assert len(Idol.objects.active()) == 3
def test_inactive_manager(self, status):
assert len(Idol.objects.inactive()) == 2
class TestGroups:
@pytest.fixture
def status(self):
groups = [
GroupFactory(),
GroupFactory(ended=datetime.date.today() + datetime.timedelta(days=30)),
GroupFactory(ended=datetime.date.today() - datetime.timedelta(days=30))
]
return groups
def test_active_manager(self, status):
assert len(Group.objects.active()) == 2
def test_inactive_manager(self, status):
assert len(Group.objects.inactive()) == 1
|
<commit_before>import pytest
from components.people.factories import IdolFactory
from components.people.models import Idol
from components.people.constants import STATUS
pytestmark = pytest.mark.django_db
class TestIdols:
@pytest.fixture
def idols(self):
idols = []
[idols.append(IdolFactory(status=STATUS.active)) for i in xrange(3)]
[idols.append(IdolFactory(status=STATUS.former)) for i in xrange(2)]
return idols
def test_active_manager(self, idols):
assert len(Idol.objects.active()) == 3
def test_inactive_manager(self, idols):
assert len(Idol.objects.inactive()) == 2
<commit_msg>Test group managers. Rename idols() => status().<commit_after># -*- coding: utf-8 -*-
import datetime
import pytest
from components.people.constants import STATUS
from components.people.factories import GroupFactory, IdolFactory
from components.people.models import Group, Idol
pytestmark = pytest.mark.django_db
class TestIdols:
@pytest.fixture
def status(self):
idols = []
[idols.append(IdolFactory(status=STATUS.active)) for i in xrange(3)]
[idols.append(IdolFactory(status=STATUS.former)) for i in xrange(2)]
return idols
def test_active_manager(self, status):
assert len(Idol.objects.active()) == 3
def test_inactive_manager(self, status):
assert len(Idol.objects.inactive()) == 2
class TestGroups:
@pytest.fixture
def status(self):
groups = [
GroupFactory(),
GroupFactory(ended=datetime.date.today() + datetime.timedelta(days=30)),
GroupFactory(ended=datetime.date.today() - datetime.timedelta(days=30))
]
return groups
def test_active_manager(self, status):
assert len(Group.objects.active()) == 2
def test_inactive_manager(self, status):
assert len(Group.objects.inactive()) == 1
|
28478db4b317e35ecbb63e5c2b1f4a7724b256d9
|
della/inbox/urls.py
|
della/inbox/urls.py
|
from django.conf.urls import url
from .views import MessageCreateView, ThreadDetailView, ThreadListView
urlpatterns = [
url(r'^(?P<recipient>[a-zA-Z0-9_]+)/$', ThreadDetailView.as_view(),
name='thread-detail'),
url(r'^(?P<pk>\d+)/new/$', MessageCreateView.as_view(),
name='new-message'),
url(r'^$', ThreadListView.as_view())
]
|
from django.conf.urls import url
from .views import MessageCreateView, ThreadDetailView, ThreadListView
urlpatterns = [
url(r'^@(?P<recipient>[a-zA-Z0-9_]+)/$', ThreadDetailView.as_view(),
name='thread-detail'),
url(r'^(?P<pk>\d+)/new/$', MessageCreateView.as_view(),
name='new-message'),
url(r'^$', ThreadListView.as_view())
]
|
Use `@` in message thread
|
Use `@` in message thread
|
Python
|
mit
|
avinassh/della,avinassh/della,avinassh/della
|
from django.conf.urls import url
from .views import MessageCreateView, ThreadDetailView, ThreadListView
urlpatterns = [
url(r'^(?P<recipient>[a-zA-Z0-9_]+)/$', ThreadDetailView.as_view(),
name='thread-detail'),
url(r'^(?P<pk>\d+)/new/$', MessageCreateView.as_view(),
name='new-message'),
url(r'^$', ThreadListView.as_view())
]
Use `@` in message thread
|
from django.conf.urls import url
from .views import MessageCreateView, ThreadDetailView, ThreadListView
urlpatterns = [
url(r'^@(?P<recipient>[a-zA-Z0-9_]+)/$', ThreadDetailView.as_view(),
name='thread-detail'),
url(r'^(?P<pk>\d+)/new/$', MessageCreateView.as_view(),
name='new-message'),
url(r'^$', ThreadListView.as_view())
]
|
<commit_before>from django.conf.urls import url
from .views import MessageCreateView, ThreadDetailView, ThreadListView
urlpatterns = [
url(r'^(?P<recipient>[a-zA-Z0-9_]+)/$', ThreadDetailView.as_view(),
name='thread-detail'),
url(r'^(?P<pk>\d+)/new/$', MessageCreateView.as_view(),
name='new-message'),
url(r'^$', ThreadListView.as_view())
]
<commit_msg>Use `@` in message thread<commit_after>
|
from django.conf.urls import url
from .views import MessageCreateView, ThreadDetailView, ThreadListView
urlpatterns = [
url(r'^@(?P<recipient>[a-zA-Z0-9_]+)/$', ThreadDetailView.as_view(),
name='thread-detail'),
url(r'^(?P<pk>\d+)/new/$', MessageCreateView.as_view(),
name='new-message'),
url(r'^$', ThreadListView.as_view())
]
|
from django.conf.urls import url
from .views import MessageCreateView, ThreadDetailView, ThreadListView
urlpatterns = [
url(r'^(?P<recipient>[a-zA-Z0-9_]+)/$', ThreadDetailView.as_view(),
name='thread-detail'),
url(r'^(?P<pk>\d+)/new/$', MessageCreateView.as_view(),
name='new-message'),
url(r'^$', ThreadListView.as_view())
]
Use `@` in message threadfrom django.conf.urls import url
from .views import MessageCreateView, ThreadDetailView, ThreadListView
urlpatterns = [
url(r'^@(?P<recipient>[a-zA-Z0-9_]+)/$', ThreadDetailView.as_view(),
name='thread-detail'),
url(r'^(?P<pk>\d+)/new/$', MessageCreateView.as_view(),
name='new-message'),
url(r'^$', ThreadListView.as_view())
]
|
<commit_before>from django.conf.urls import url
from .views import MessageCreateView, ThreadDetailView, ThreadListView
urlpatterns = [
url(r'^(?P<recipient>[a-zA-Z0-9_]+)/$', ThreadDetailView.as_view(),
name='thread-detail'),
url(r'^(?P<pk>\d+)/new/$', MessageCreateView.as_view(),
name='new-message'),
url(r'^$', ThreadListView.as_view())
]
<commit_msg>Use `@` in message thread<commit_after>from django.conf.urls import url
from .views import MessageCreateView, ThreadDetailView, ThreadListView
urlpatterns = [
url(r'^@(?P<recipient>[a-zA-Z0-9_]+)/$', ThreadDetailView.as_view(),
name='thread-detail'),
url(r'^(?P<pk>\d+)/new/$', MessageCreateView.as_view(),
name='new-message'),
url(r'^$', ThreadListView.as_view())
]
|
6bcd2ffc67dfbb1265d4df1f69de8e8b45376889
|
src/foremast/slacknotify/slack_notification.py
|
src/foremast/slacknotify/slack_notification.py
|
"""Notify Slack channel."""
import time
from ..utils import get_properties, get_template, post_slack_message
class SlackNotification:
"""Post slack notification.
Inform users about infrastructure changes to prod* accounts.
"""
def __init__(self, app=None, env=None, prop_path=None):
self.info = {'app': app, 'env': env, 'properties': prop_path}
timestamp = time.strftime("%B %d, %Y %H:%M:%S %Z", time.gmtime())
self.info['timestamp'] = timestamp
self.settings = get_properties(self.info['properties'])
self.info['config_commit_short'] = self.settings['pipeline'][
'config_commit'][0:11]
def post_message(self):
"""Send templated message to **#deployments-{env}**."""
message = get_template(
template_file='slack-templates/pipeline-prepare-ran.j2',
info=self.info)
channel = '#deployments-{}'.format(self.info['env'].lower())
post_slack_message(message, channel)
def notify_slack_channel(self):
"""Post message to a defined Slack channel."""
if self.settings['pipeline']['notifications']['slack']:
post_slack_message(
message, self.settings['pipeline']['notifications']['slack'])
|
"""Notify Slack channel."""
import time
from ..utils import get_properties, get_template, post_slack_message
class SlackNotification:
"""Post slack notification.
Inform users about infrastructure changes to prod* accounts.
"""
def __init__(self, app=None, env=None, prop_path=None):
self.info = {'app': app, 'env': env, 'properties': prop_path}
timestamp = time.strftime("%B %d, %Y %H:%M:%S %Z", time.gmtime())
self.info['timestamp'] = timestamp
self.settings = get_properties(self.info['properties'])
self.info['config_commit_short'] = self.settings['pipeline'][
'config_commit'][0:11]
def post_message(self):
"""Send templated message to **#deployments-{env}**."""
message = get_template(
template_file='slack-templates/pipeline-prepare-ran.j2',
info=self.info)
channel = '#deployments-{}'.format(self.info['env'].lower())
post_slack_message(message, channel)
def notify_slack_channel(self):
"""Post message to a defined Slack channel."""
message = get_template(
template_file='slack-templates/pipeline-prepare-ran.j2',
info=self.info)
if self.settings['pipeline']['notifications']['slack']:
post_slack_message(
message, self.settings['pipeline']['notifications']['slack'])
|
Resolve `message` variable missing error
|
fix: Resolve `message` variable missing error
|
Python
|
apache-2.0
|
gogoair/foremast,gogoair/foremast
|
"""Notify Slack channel."""
import time
from ..utils import get_properties, get_template, post_slack_message
class SlackNotification:
"""Post slack notification.
Inform users about infrastructure changes to prod* accounts.
"""
def __init__(self, app=None, env=None, prop_path=None):
self.info = {'app': app, 'env': env, 'properties': prop_path}
timestamp = time.strftime("%B %d, %Y %H:%M:%S %Z", time.gmtime())
self.info['timestamp'] = timestamp
self.settings = get_properties(self.info['properties'])
self.info['config_commit_short'] = self.settings['pipeline'][
'config_commit'][0:11]
def post_message(self):
"""Send templated message to **#deployments-{env}**."""
message = get_template(
template_file='slack-templates/pipeline-prepare-ran.j2',
info=self.info)
channel = '#deployments-{}'.format(self.info['env'].lower())
post_slack_message(message, channel)
def notify_slack_channel(self):
"""Post message to a defined Slack channel."""
if self.settings['pipeline']['notifications']['slack']:
post_slack_message(
message, self.settings['pipeline']['notifications']['slack'])
fix: Resolve `message` variable missing error
|
"""Notify Slack channel."""
import time
from ..utils import get_properties, get_template, post_slack_message
class SlackNotification:
"""Post slack notification.
Inform users about infrastructure changes to prod* accounts.
"""
def __init__(self, app=None, env=None, prop_path=None):
self.info = {'app': app, 'env': env, 'properties': prop_path}
timestamp = time.strftime("%B %d, %Y %H:%M:%S %Z", time.gmtime())
self.info['timestamp'] = timestamp
self.settings = get_properties(self.info['properties'])
self.info['config_commit_short'] = self.settings['pipeline'][
'config_commit'][0:11]
def post_message(self):
"""Send templated message to **#deployments-{env}**."""
message = get_template(
template_file='slack-templates/pipeline-prepare-ran.j2',
info=self.info)
channel = '#deployments-{}'.format(self.info['env'].lower())
post_slack_message(message, channel)
def notify_slack_channel(self):
"""Post message to a defined Slack channel."""
message = get_template(
template_file='slack-templates/pipeline-prepare-ran.j2',
info=self.info)
if self.settings['pipeline']['notifications']['slack']:
post_slack_message(
message, self.settings['pipeline']['notifications']['slack'])
|
<commit_before>"""Notify Slack channel."""
import time
from ..utils import get_properties, get_template, post_slack_message
class SlackNotification:
"""Post slack notification.
Inform users about infrastructure changes to prod* accounts.
"""
def __init__(self, app=None, env=None, prop_path=None):
self.info = {'app': app, 'env': env, 'properties': prop_path}
timestamp = time.strftime("%B %d, %Y %H:%M:%S %Z", time.gmtime())
self.info['timestamp'] = timestamp
self.settings = get_properties(self.info['properties'])
self.info['config_commit_short'] = self.settings['pipeline'][
'config_commit'][0:11]
def post_message(self):
"""Send templated message to **#deployments-{env}**."""
message = get_template(
template_file='slack-templates/pipeline-prepare-ran.j2',
info=self.info)
channel = '#deployments-{}'.format(self.info['env'].lower())
post_slack_message(message, channel)
def notify_slack_channel(self):
"""Post message to a defined Slack channel."""
if self.settings['pipeline']['notifications']['slack']:
post_slack_message(
message, self.settings['pipeline']['notifications']['slack'])
<commit_msg>fix: Resolve `message` variable missing error<commit_after>
|
"""Notify Slack channel."""
import time
from ..utils import get_properties, get_template, post_slack_message
class SlackNotification:
"""Post slack notification.
Inform users about infrastructure changes to prod* accounts.
"""
def __init__(self, app=None, env=None, prop_path=None):
self.info = {'app': app, 'env': env, 'properties': prop_path}
timestamp = time.strftime("%B %d, %Y %H:%M:%S %Z", time.gmtime())
self.info['timestamp'] = timestamp
self.settings = get_properties(self.info['properties'])
self.info['config_commit_short'] = self.settings['pipeline'][
'config_commit'][0:11]
def post_message(self):
"""Send templated message to **#deployments-{env}**."""
message = get_template(
template_file='slack-templates/pipeline-prepare-ran.j2',
info=self.info)
channel = '#deployments-{}'.format(self.info['env'].lower())
post_slack_message(message, channel)
def notify_slack_channel(self):
"""Post message to a defined Slack channel."""
message = get_template(
template_file='slack-templates/pipeline-prepare-ran.j2',
info=self.info)
if self.settings['pipeline']['notifications']['slack']:
post_slack_message(
message, self.settings['pipeline']['notifications']['slack'])
|
"""Notify Slack channel."""
import time
from ..utils import get_properties, get_template, post_slack_message
class SlackNotification:
"""Post slack notification.
Inform users about infrastructure changes to prod* accounts.
"""
def __init__(self, app=None, env=None, prop_path=None):
self.info = {'app': app, 'env': env, 'properties': prop_path}
timestamp = time.strftime("%B %d, %Y %H:%M:%S %Z", time.gmtime())
self.info['timestamp'] = timestamp
self.settings = get_properties(self.info['properties'])
self.info['config_commit_short'] = self.settings['pipeline'][
'config_commit'][0:11]
def post_message(self):
"""Send templated message to **#deployments-{env}**."""
message = get_template(
template_file='slack-templates/pipeline-prepare-ran.j2',
info=self.info)
channel = '#deployments-{}'.format(self.info['env'].lower())
post_slack_message(message, channel)
def notify_slack_channel(self):
"""Post message to a defined Slack channel."""
if self.settings['pipeline']['notifications']['slack']:
post_slack_message(
message, self.settings['pipeline']['notifications']['slack'])
fix: Resolve `message` variable missing error"""Notify Slack channel."""
import time
from ..utils import get_properties, get_template, post_slack_message
class SlackNotification:
"""Post slack notification.
Inform users about infrastructure changes to prod* accounts.
"""
def __init__(self, app=None, env=None, prop_path=None):
self.info = {'app': app, 'env': env, 'properties': prop_path}
timestamp = time.strftime("%B %d, %Y %H:%M:%S %Z", time.gmtime())
self.info['timestamp'] = timestamp
self.settings = get_properties(self.info['properties'])
self.info['config_commit_short'] = self.settings['pipeline'][
'config_commit'][0:11]
def post_message(self):
"""Send templated message to **#deployments-{env}**."""
message = get_template(
template_file='slack-templates/pipeline-prepare-ran.j2',
info=self.info)
channel = '#deployments-{}'.format(self.info['env'].lower())
post_slack_message(message, channel)
def notify_slack_channel(self):
"""Post message to a defined Slack channel."""
message = get_template(
template_file='slack-templates/pipeline-prepare-ran.j2',
info=self.info)
if self.settings['pipeline']['notifications']['slack']:
post_slack_message(
message, self.settings['pipeline']['notifications']['slack'])
|
<commit_before>"""Notify Slack channel."""
import time
from ..utils import get_properties, get_template, post_slack_message
class SlackNotification:
"""Post slack notification.
Inform users about infrastructure changes to prod* accounts.
"""
def __init__(self, app=None, env=None, prop_path=None):
self.info = {'app': app, 'env': env, 'properties': prop_path}
timestamp = time.strftime("%B %d, %Y %H:%M:%S %Z", time.gmtime())
self.info['timestamp'] = timestamp
self.settings = get_properties(self.info['properties'])
self.info['config_commit_short'] = self.settings['pipeline'][
'config_commit'][0:11]
def post_message(self):
"""Send templated message to **#deployments-{env}**."""
message = get_template(
template_file='slack-templates/pipeline-prepare-ran.j2',
info=self.info)
channel = '#deployments-{}'.format(self.info['env'].lower())
post_slack_message(message, channel)
def notify_slack_channel(self):
"""Post message to a defined Slack channel."""
if self.settings['pipeline']['notifications']['slack']:
post_slack_message(
message, self.settings['pipeline']['notifications']['slack'])
<commit_msg>fix: Resolve `message` variable missing error<commit_after>"""Notify Slack channel."""
import time
from ..utils import get_properties, get_template, post_slack_message
class SlackNotification:
"""Post slack notification.
Inform users about infrastructure changes to prod* accounts.
"""
def __init__(self, app=None, env=None, prop_path=None):
self.info = {'app': app, 'env': env, 'properties': prop_path}
timestamp = time.strftime("%B %d, %Y %H:%M:%S %Z", time.gmtime())
self.info['timestamp'] = timestamp
self.settings = get_properties(self.info['properties'])
self.info['config_commit_short'] = self.settings['pipeline'][
'config_commit'][0:11]
def post_message(self):
"""Send templated message to **#deployments-{env}**."""
message = get_template(
template_file='slack-templates/pipeline-prepare-ran.j2',
info=self.info)
channel = '#deployments-{}'.format(self.info['env'].lower())
post_slack_message(message, channel)
def notify_slack_channel(self):
"""Post message to a defined Slack channel."""
message = get_template(
template_file='slack-templates/pipeline-prepare-ran.j2',
info=self.info)
if self.settings['pipeline']['notifications']['slack']:
post_slack_message(
message, self.settings['pipeline']['notifications']['slack'])
|
bf51352142f0ca2f2eed50c6862135d0d00baaa9
|
corehq/tests/test_toggles.py
|
corehq/tests/test_toggles.py
|
from __future__ import absolute_import, unicode_literals
from corehq import toggles
from corehq.toggles import ALL_TAGS
def test_toggle_properties():
"""
Check toggle properties
"""
for toggle in toggles.all_toggles():
assert toggle.slug
assert toggle.label, 'Toggle "{}" label missing'.format(toggle.slug)
assert toggle.tag, 'Toggle "{}" tag missing'.format(toggle.slug)
assert toggle.tag in ALL_TAGS, 'Toggle "{}" tag "{}" unrecognized'.format(toggle.slug, toggle.tag)
assert toggle.namespaces, 'Toggle "{}" namespaces missing'.format(toggle.slug)
def test_solutions_sub_tags():
"""
Check Solutions sub-tags begin with 'Solutions - '
"""
solutions_tags = [toggles.TAG_SOLUTIONS_OPEN, toggles.TAG_SOLUTIONS_CONDITIONAL, toggles.TAG_SOLUTIONS_LIMITED]
for tag in solutions_tags:
assert tag.name.startswith('Solutions - ')
|
from __future__ import absolute_import, unicode_literals
from corehq import toggles
from corehq.toggles import ALL_TAGS
def test_toggle_properties():
"""
Check toggle properties
"""
for toggle in toggles.all_toggles():
assert toggle.slug
assert toggle.label, 'Toggle "{}" label missing'.format(toggle.slug)
assert toggle.tag, 'Toggle "{}" tag missing'.format(toggle.slug)
assert toggle.tag in ALL_TAGS, 'Toggle "{}" tag "{}" unrecognized'.format(toggle.slug, toggle.tag)
assert toggle.namespaces, 'Toggle "{}" namespaces missing'.format(toggle.slug)
def test_solutions_sub_tags():
"""
Check Solutions sub-tags begin with 'Solutions - '
Client side toggle filtering logic currently depends on "Solutions" being in these tag names.
For context, see https://github.com/dimagi/commcare-hq/pull/24575#discussion_r293995391
"""
solutions_tags = [toggles.TAG_SOLUTIONS_OPEN, toggles.TAG_SOLUTIONS_CONDITIONAL, toggles.TAG_SOLUTIONS_LIMITED]
for tag in solutions_tags:
assert tag.name.startswith('Solutions - ')
|
Add comment to solutions sub-tags test
|
Add comment to solutions sub-tags test
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
from __future__ import absolute_import, unicode_literals
from corehq import toggles
from corehq.toggles import ALL_TAGS
def test_toggle_properties():
"""
Check toggle properties
"""
for toggle in toggles.all_toggles():
assert toggle.slug
assert toggle.label, 'Toggle "{}" label missing'.format(toggle.slug)
assert toggle.tag, 'Toggle "{}" tag missing'.format(toggle.slug)
assert toggle.tag in ALL_TAGS, 'Toggle "{}" tag "{}" unrecognized'.format(toggle.slug, toggle.tag)
assert toggle.namespaces, 'Toggle "{}" namespaces missing'.format(toggle.slug)
def test_solutions_sub_tags():
"""
Check Solutions sub-tags begin with 'Solutions - '
"""
solutions_tags = [toggles.TAG_SOLUTIONS_OPEN, toggles.TAG_SOLUTIONS_CONDITIONAL, toggles.TAG_SOLUTIONS_LIMITED]
for tag in solutions_tags:
assert tag.name.startswith('Solutions - ')
Add comment to solutions sub-tags test
|
from __future__ import absolute_import, unicode_literals
from corehq import toggles
from corehq.toggles import ALL_TAGS
def test_toggle_properties():
"""
Check toggle properties
"""
for toggle in toggles.all_toggles():
assert toggle.slug
assert toggle.label, 'Toggle "{}" label missing'.format(toggle.slug)
assert toggle.tag, 'Toggle "{}" tag missing'.format(toggle.slug)
assert toggle.tag in ALL_TAGS, 'Toggle "{}" tag "{}" unrecognized'.format(toggle.slug, toggle.tag)
assert toggle.namespaces, 'Toggle "{}" namespaces missing'.format(toggle.slug)
def test_solutions_sub_tags():
"""
Check Solutions sub-tags begin with 'Solutions - '
Client side toggle filtering logic currently depends on "Solutions" being in these tag names.
For context, see https://github.com/dimagi/commcare-hq/pull/24575#discussion_r293995391
"""
solutions_tags = [toggles.TAG_SOLUTIONS_OPEN, toggles.TAG_SOLUTIONS_CONDITIONAL, toggles.TAG_SOLUTIONS_LIMITED]
for tag in solutions_tags:
assert tag.name.startswith('Solutions - ')
|
<commit_before>from __future__ import absolute_import, unicode_literals
from corehq import toggles
from corehq.toggles import ALL_TAGS
def test_toggle_properties():
"""
Check toggle properties
"""
for toggle in toggles.all_toggles():
assert toggle.slug
assert toggle.label, 'Toggle "{}" label missing'.format(toggle.slug)
assert toggle.tag, 'Toggle "{}" tag missing'.format(toggle.slug)
assert toggle.tag in ALL_TAGS, 'Toggle "{}" tag "{}" unrecognized'.format(toggle.slug, toggle.tag)
assert toggle.namespaces, 'Toggle "{}" namespaces missing'.format(toggle.slug)
def test_solutions_sub_tags():
"""
Check Solutions sub-tags begin with 'Solutions - '
"""
solutions_tags = [toggles.TAG_SOLUTIONS_OPEN, toggles.TAG_SOLUTIONS_CONDITIONAL, toggles.TAG_SOLUTIONS_LIMITED]
for tag in solutions_tags:
assert tag.name.startswith('Solutions - ')
<commit_msg>Add comment to solutions sub-tags test<commit_after>
|
from __future__ import absolute_import, unicode_literals
from corehq import toggles
from corehq.toggles import ALL_TAGS
def test_toggle_properties():
"""
Check toggle properties
"""
for toggle in toggles.all_toggles():
assert toggle.slug
assert toggle.label, 'Toggle "{}" label missing'.format(toggle.slug)
assert toggle.tag, 'Toggle "{}" tag missing'.format(toggle.slug)
assert toggle.tag in ALL_TAGS, 'Toggle "{}" tag "{}" unrecognized'.format(toggle.slug, toggle.tag)
assert toggle.namespaces, 'Toggle "{}" namespaces missing'.format(toggle.slug)
def test_solutions_sub_tags():
"""
Check Solutions sub-tags begin with 'Solutions - '
Client side toggle filtering logic currently depends on "Solutions" being in these tag names.
For context, see https://github.com/dimagi/commcare-hq/pull/24575#discussion_r293995391
"""
solutions_tags = [toggles.TAG_SOLUTIONS_OPEN, toggles.TAG_SOLUTIONS_CONDITIONAL, toggles.TAG_SOLUTIONS_LIMITED]
for tag in solutions_tags:
assert tag.name.startswith('Solutions - ')
|
from __future__ import absolute_import, unicode_literals
from corehq import toggles
from corehq.toggles import ALL_TAGS
def test_toggle_properties():
"""
Check toggle properties
"""
for toggle in toggles.all_toggles():
assert toggle.slug
assert toggle.label, 'Toggle "{}" label missing'.format(toggle.slug)
assert toggle.tag, 'Toggle "{}" tag missing'.format(toggle.slug)
assert toggle.tag in ALL_TAGS, 'Toggle "{}" tag "{}" unrecognized'.format(toggle.slug, toggle.tag)
assert toggle.namespaces, 'Toggle "{}" namespaces missing'.format(toggle.slug)
def test_solutions_sub_tags():
"""
Check Solutions sub-tags begin with 'Solutions - '
"""
solutions_tags = [toggles.TAG_SOLUTIONS_OPEN, toggles.TAG_SOLUTIONS_CONDITIONAL, toggles.TAG_SOLUTIONS_LIMITED]
for tag in solutions_tags:
assert tag.name.startswith('Solutions - ')
Add comment to solutions sub-tags testfrom __future__ import absolute_import, unicode_literals
from corehq import toggles
from corehq.toggles import ALL_TAGS
def test_toggle_properties():
"""
Check toggle properties
"""
for toggle in toggles.all_toggles():
assert toggle.slug
assert toggle.label, 'Toggle "{}" label missing'.format(toggle.slug)
assert toggle.tag, 'Toggle "{}" tag missing'.format(toggle.slug)
assert toggle.tag in ALL_TAGS, 'Toggle "{}" tag "{}" unrecognized'.format(toggle.slug, toggle.tag)
assert toggle.namespaces, 'Toggle "{}" namespaces missing'.format(toggle.slug)
def test_solutions_sub_tags():
"""
Check Solutions sub-tags begin with 'Solutions - '
Client side toggle filtering logic currently depends on "Solutions" being in these tag names.
For context, see https://github.com/dimagi/commcare-hq/pull/24575#discussion_r293995391
"""
solutions_tags = [toggles.TAG_SOLUTIONS_OPEN, toggles.TAG_SOLUTIONS_CONDITIONAL, toggles.TAG_SOLUTIONS_LIMITED]
for tag in solutions_tags:
assert tag.name.startswith('Solutions - ')
|
<commit_before>from __future__ import absolute_import, unicode_literals
from corehq import toggles
from corehq.toggles import ALL_TAGS
def test_toggle_properties():
"""
Check toggle properties
"""
for toggle in toggles.all_toggles():
assert toggle.slug
assert toggle.label, 'Toggle "{}" label missing'.format(toggle.slug)
assert toggle.tag, 'Toggle "{}" tag missing'.format(toggle.slug)
assert toggle.tag in ALL_TAGS, 'Toggle "{}" tag "{}" unrecognized'.format(toggle.slug, toggle.tag)
assert toggle.namespaces, 'Toggle "{}" namespaces missing'.format(toggle.slug)
def test_solutions_sub_tags():
"""
Check Solutions sub-tags begin with 'Solutions - '
"""
solutions_tags = [toggles.TAG_SOLUTIONS_OPEN, toggles.TAG_SOLUTIONS_CONDITIONAL, toggles.TAG_SOLUTIONS_LIMITED]
for tag in solutions_tags:
assert tag.name.startswith('Solutions - ')
<commit_msg>Add comment to solutions sub-tags test<commit_after>from __future__ import absolute_import, unicode_literals
from corehq import toggles
from corehq.toggles import ALL_TAGS
def test_toggle_properties():
"""
Check toggle properties
"""
for toggle in toggles.all_toggles():
assert toggle.slug
assert toggle.label, 'Toggle "{}" label missing'.format(toggle.slug)
assert toggle.tag, 'Toggle "{}" tag missing'.format(toggle.slug)
assert toggle.tag in ALL_TAGS, 'Toggle "{}" tag "{}" unrecognized'.format(toggle.slug, toggle.tag)
assert toggle.namespaces, 'Toggle "{}" namespaces missing'.format(toggle.slug)
def test_solutions_sub_tags():
"""
Check Solutions sub-tags begin with 'Solutions - '
Client side toggle filtering logic currently depends on "Solutions" being in these tag names.
For context, see https://github.com/dimagi/commcare-hq/pull/24575#discussion_r293995391
"""
solutions_tags = [toggles.TAG_SOLUTIONS_OPEN, toggles.TAG_SOLUTIONS_CONDITIONAL, toggles.TAG_SOLUTIONS_LIMITED]
for tag in solutions_tags:
assert tag.name.startswith('Solutions - ')
|
3ca3f9473d7031ef9536f56c253ba0a4b7e1ee6e
|
test/unit/ggrc/converters/test_query_helper.py
|
test/unit/ggrc/converters/test_query_helper.py
|
# Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
import unittest
import mock
from ggrc.converters import query_helper
class TestQueryHelper(unittest.TestCase):
def test_expression_keys(self):
""" test expression keys function
Make sure it works with:
empty query
simple query
complex query
invalid complex query
"""
query = mock.MagicMock()
helper = query_helper.QueryHelper(query)
expressions = [
(set(), {}),
(set(["key_1"]), {
"left": "key_1",
"op": {"name": "="},
"right": "",
}),
(set(["key_1", "key_2"]), {
"left": {
"left": "key_2",
"op": {"name": "="},
"right": "",
},
"op": {"name": "AND"},
"right": {
"left": "key_1",
"op": {"name": "!="},
"right": "",
},
}),
(set(), {
"left": {
"left": "5",
"op": {"name": "="},
"right": "",
},
"op": {"name": "="},
"right": {
"left": "key_1",
"op": {"name": "!="},
"right": "",
},
}),
]
for expected_result, expression in expressions:
self.assertEqual(expected_result, helper.expression_keys(expression))
|
# Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
import unittest
import mock
from ggrc.converters import query_helper
class TestQueryHelper(unittest.TestCase):
def test_expression_keys(self):
""" test expression keys function
Make sure it works with:
empty query
simple query
complex query
invalid complex query
"""
# pylint: disable=protected-access
# needed for testing protected function inside the query helper
query = mock.MagicMock()
helper = query_helper.QueryHelper(query)
expressions = [
(set(), {}),
(set(["key_1"]), {
"left": "key_1",
"op": {"name": "="},
"right": "",
}),
(set(["key_1", "key_2"]), {
"left": {
"left": "key_2",
"op": {"name": "="},
"right": "",
},
"op": {"name": "AND"},
"right": {
"left": "key_1",
"op": {"name": "!="},
"right": "",
},
}),
(set(), {
"left": {
"left": "5",
"op": {"name": "="},
"right": "",
},
"op": {"name": "="},
"right": {
"left": "key_1",
"op": {"name": "!="},
"right": "",
},
}),
]
for expected_result, expression in expressions:
self.assertEqual(expected_result, helper._expression_keys(expression))
|
Update unit tests with new query helper names
|
Update unit tests with new query helper names
|
Python
|
apache-2.0
|
j0gurt/ggrc-core,AleksNeStu/ggrc-core,plamut/ggrc-core,j0gurt/ggrc-core,edofic/ggrc-core,andrei-karalionak/ggrc-core,josthkko/ggrc-core,andrei-karalionak/ggrc-core,AleksNeStu/ggrc-core,VinnieJohns/ggrc-core,AleksNeStu/ggrc-core,selahssea/ggrc-core,andrei-karalionak/ggrc-core,plamut/ggrc-core,plamut/ggrc-core,josthkko/ggrc-core,AleksNeStu/ggrc-core,j0gurt/ggrc-core,kr41/ggrc-core,andrei-karalionak/ggrc-core,kr41/ggrc-core,j0gurt/ggrc-core,selahssea/ggrc-core,kr41/ggrc-core,edofic/ggrc-core,edofic/ggrc-core,VinnieJohns/ggrc-core,josthkko/ggrc-core,josthkko/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core,edofic/ggrc-core,VinnieJohns/ggrc-core,selahssea/ggrc-core,VinnieJohns/ggrc-core,kr41/ggrc-core
|
# Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
import unittest
import mock
from ggrc.converters import query_helper
class TestQueryHelper(unittest.TestCase):
def test_expression_keys(self):
""" test expression keys function
Make sure it works with:
empty query
simple query
complex query
invalid complex query
"""
query = mock.MagicMock()
helper = query_helper.QueryHelper(query)
expressions = [
(set(), {}),
(set(["key_1"]), {
"left": "key_1",
"op": {"name": "="},
"right": "",
}),
(set(["key_1", "key_2"]), {
"left": {
"left": "key_2",
"op": {"name": "="},
"right": "",
},
"op": {"name": "AND"},
"right": {
"left": "key_1",
"op": {"name": "!="},
"right": "",
},
}),
(set(), {
"left": {
"left": "5",
"op": {"name": "="},
"right": "",
},
"op": {"name": "="},
"right": {
"left": "key_1",
"op": {"name": "!="},
"right": "",
},
}),
]
for expected_result, expression in expressions:
self.assertEqual(expected_result, helper.expression_keys(expression))
Update unit tests with new query helper names
|
# Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
import unittest
import mock
from ggrc.converters import query_helper
class TestQueryHelper(unittest.TestCase):
def test_expression_keys(self):
""" test expression keys function
Make sure it works with:
empty query
simple query
complex query
invalid complex query
"""
# pylint: disable=protected-access
# needed for testing protected function inside the query helper
query = mock.MagicMock()
helper = query_helper.QueryHelper(query)
expressions = [
(set(), {}),
(set(["key_1"]), {
"left": "key_1",
"op": {"name": "="},
"right": "",
}),
(set(["key_1", "key_2"]), {
"left": {
"left": "key_2",
"op": {"name": "="},
"right": "",
},
"op": {"name": "AND"},
"right": {
"left": "key_1",
"op": {"name": "!="},
"right": "",
},
}),
(set(), {
"left": {
"left": "5",
"op": {"name": "="},
"right": "",
},
"op": {"name": "="},
"right": {
"left": "key_1",
"op": {"name": "!="},
"right": "",
},
}),
]
for expected_result, expression in expressions:
self.assertEqual(expected_result, helper._expression_keys(expression))
|
<commit_before># Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
import unittest
import mock
from ggrc.converters import query_helper
class TestQueryHelper(unittest.TestCase):
def test_expression_keys(self):
""" test expression keys function
Make sure it works with:
empty query
simple query
complex query
invalid complex query
"""
query = mock.MagicMock()
helper = query_helper.QueryHelper(query)
expressions = [
(set(), {}),
(set(["key_1"]), {
"left": "key_1",
"op": {"name": "="},
"right": "",
}),
(set(["key_1", "key_2"]), {
"left": {
"left": "key_2",
"op": {"name": "="},
"right": "",
},
"op": {"name": "AND"},
"right": {
"left": "key_1",
"op": {"name": "!="},
"right": "",
},
}),
(set(), {
"left": {
"left": "5",
"op": {"name": "="},
"right": "",
},
"op": {"name": "="},
"right": {
"left": "key_1",
"op": {"name": "!="},
"right": "",
},
}),
]
for expected_result, expression in expressions:
self.assertEqual(expected_result, helper.expression_keys(expression))
<commit_msg>Update unit tests with new query helper names<commit_after>
|
# Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
import unittest
import mock
from ggrc.converters import query_helper
class TestQueryHelper(unittest.TestCase):
def test_expression_keys(self):
""" test expression keys function
Make sure it works with:
empty query
simple query
complex query
invalid complex query
"""
# pylint: disable=protected-access
# needed for testing protected function inside the query helper
query = mock.MagicMock()
helper = query_helper.QueryHelper(query)
expressions = [
(set(), {}),
(set(["key_1"]), {
"left": "key_1",
"op": {"name": "="},
"right": "",
}),
(set(["key_1", "key_2"]), {
"left": {
"left": "key_2",
"op": {"name": "="},
"right": "",
},
"op": {"name": "AND"},
"right": {
"left": "key_1",
"op": {"name": "!="},
"right": "",
},
}),
(set(), {
"left": {
"left": "5",
"op": {"name": "="},
"right": "",
},
"op": {"name": "="},
"right": {
"left": "key_1",
"op": {"name": "!="},
"right": "",
},
}),
]
for expected_result, expression in expressions:
self.assertEqual(expected_result, helper._expression_keys(expression))
|
# Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
import unittest
import mock
from ggrc.converters import query_helper
class TestQueryHelper(unittest.TestCase):
def test_expression_keys(self):
""" test expression keys function
Make sure it works with:
empty query
simple query
complex query
invalid complex query
"""
query = mock.MagicMock()
helper = query_helper.QueryHelper(query)
expressions = [
(set(), {}),
(set(["key_1"]), {
"left": "key_1",
"op": {"name": "="},
"right": "",
}),
(set(["key_1", "key_2"]), {
"left": {
"left": "key_2",
"op": {"name": "="},
"right": "",
},
"op": {"name": "AND"},
"right": {
"left": "key_1",
"op": {"name": "!="},
"right": "",
},
}),
(set(), {
"left": {
"left": "5",
"op": {"name": "="},
"right": "",
},
"op": {"name": "="},
"right": {
"left": "key_1",
"op": {"name": "!="},
"right": "",
},
}),
]
for expected_result, expression in expressions:
self.assertEqual(expected_result, helper.expression_keys(expression))
Update unit tests with new query helper names# Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
import unittest
import mock
from ggrc.converters import query_helper
class TestQueryHelper(unittest.TestCase):
def test_expression_keys(self):
""" test expression keys function
Make sure it works with:
empty query
simple query
complex query
invalid complex query
"""
# pylint: disable=protected-access
# needed for testing protected function inside the query helper
query = mock.MagicMock()
helper = query_helper.QueryHelper(query)
expressions = [
(set(), {}),
(set(["key_1"]), {
"left": "key_1",
"op": {"name": "="},
"right": "",
}),
(set(["key_1", "key_2"]), {
"left": {
"left": "key_2",
"op": {"name": "="},
"right": "",
},
"op": {"name": "AND"},
"right": {
"left": "key_1",
"op": {"name": "!="},
"right": "",
},
}),
(set(), {
"left": {
"left": "5",
"op": {"name": "="},
"right": "",
},
"op": {"name": "="},
"right": {
"left": "key_1",
"op": {"name": "!="},
"right": "",
},
}),
]
for expected_result, expression in expressions:
self.assertEqual(expected_result, helper._expression_keys(expression))
|
<commit_before># Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
import unittest
import mock
from ggrc.converters import query_helper
class TestQueryHelper(unittest.TestCase):
def test_expression_keys(self):
""" test expression keys function
Make sure it works with:
empty query
simple query
complex query
invalid complex query
"""
query = mock.MagicMock()
helper = query_helper.QueryHelper(query)
expressions = [
(set(), {}),
(set(["key_1"]), {
"left": "key_1",
"op": {"name": "="},
"right": "",
}),
(set(["key_1", "key_2"]), {
"left": {
"left": "key_2",
"op": {"name": "="},
"right": "",
},
"op": {"name": "AND"},
"right": {
"left": "key_1",
"op": {"name": "!="},
"right": "",
},
}),
(set(), {
"left": {
"left": "5",
"op": {"name": "="},
"right": "",
},
"op": {"name": "="},
"right": {
"left": "key_1",
"op": {"name": "!="},
"right": "",
},
}),
]
for expected_result, expression in expressions:
self.assertEqual(expected_result, helper.expression_keys(expression))
<commit_msg>Update unit tests with new query helper names<commit_after># Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
import unittest
import mock
from ggrc.converters import query_helper
class TestQueryHelper(unittest.TestCase):
def test_expression_keys(self):
""" test expression keys function
Make sure it works with:
empty query
simple query
complex query
invalid complex query
"""
# pylint: disable=protected-access
# needed for testing protected function inside the query helper
query = mock.MagicMock()
helper = query_helper.QueryHelper(query)
expressions = [
(set(), {}),
(set(["key_1"]), {
"left": "key_1",
"op": {"name": "="},
"right": "",
}),
(set(["key_1", "key_2"]), {
"left": {
"left": "key_2",
"op": {"name": "="},
"right": "",
},
"op": {"name": "AND"},
"right": {
"left": "key_1",
"op": {"name": "!="},
"right": "",
},
}),
(set(), {
"left": {
"left": "5",
"op": {"name": "="},
"right": "",
},
"op": {"name": "="},
"right": {
"left": "key_1",
"op": {"name": "!="},
"right": "",
},
}),
]
for expected_result, expression in expressions:
self.assertEqual(expected_result, helper._expression_keys(expression))
|
eb4322eb0744d07cb10442ab16d50384aabe1478
|
cumulusci/core/tests/test_github.py
|
cumulusci/core/tests/test_github.py
|
import unittest
from cumulusci.core.github import get_github_api
class TestGithub(unittest.TestCase):
def test_github_api_retries(self):
gh = get_github_api('TestUser', 'TestPass')
adapter = gh._session.get_adapter('http://')
self.assertEqual(0.3, adapter.max_retries.backoff_factor)
self.assertIn(502, adapter.max_retries.status_forcelist)
|
from http.client import HTTPMessage
import io
import unittest
import mock
from cumulusci.core.github import get_github_api
class MockHttpResponse(mock.Mock):
def __init__(self, status):
super(MockHttpResponse, self).__init__()
self.status = status
self.strict = 0
self.version = 0
self.reason = None
self.msg = HTTPMessage(io.BytesIO())
def read(self):
return b''
def isclosed(self):
return True
class TestGithub(unittest.TestCase):
@mock.patch('urllib3.connectionpool.HTTPConnectionPool._make_request')
def test_github_api_retries(self, _make_request):
gh = get_github_api('TestUser', 'TestPass')
adapter = gh._session.get_adapter('http://')
self.assertEqual(0.3, adapter.max_retries.backoff_factor)
self.assertIn(502, adapter.max_retries.status_forcelist)
_make_request.side_effect = [
MockHttpResponse(status=503),
MockHttpResponse(status=200),
]
gh.octocat('meow')
self.assertEqual(_make_request.call_count, 2)
|
Test that github requests are actually retried
|
Test that github requests are actually retried
|
Python
|
bsd-3-clause
|
SalesforceFoundation/CumulusCI,SalesforceFoundation/CumulusCI
|
import unittest
from cumulusci.core.github import get_github_api
class TestGithub(unittest.TestCase):
def test_github_api_retries(self):
gh = get_github_api('TestUser', 'TestPass')
adapter = gh._session.get_adapter('http://')
self.assertEqual(0.3, adapter.max_retries.backoff_factor)
self.assertIn(502, adapter.max_retries.status_forcelist)
Test that github requests are actually retried
|
from http.client import HTTPMessage
import io
import unittest
import mock
from cumulusci.core.github import get_github_api
class MockHttpResponse(mock.Mock):
def __init__(self, status):
super(MockHttpResponse, self).__init__()
self.status = status
self.strict = 0
self.version = 0
self.reason = None
self.msg = HTTPMessage(io.BytesIO())
def read(self):
return b''
def isclosed(self):
return True
class TestGithub(unittest.TestCase):
@mock.patch('urllib3.connectionpool.HTTPConnectionPool._make_request')
def test_github_api_retries(self, _make_request):
gh = get_github_api('TestUser', 'TestPass')
adapter = gh._session.get_adapter('http://')
self.assertEqual(0.3, adapter.max_retries.backoff_factor)
self.assertIn(502, adapter.max_retries.status_forcelist)
_make_request.side_effect = [
MockHttpResponse(status=503),
MockHttpResponse(status=200),
]
gh.octocat('meow')
self.assertEqual(_make_request.call_count, 2)
|
<commit_before>import unittest
from cumulusci.core.github import get_github_api
class TestGithub(unittest.TestCase):
def test_github_api_retries(self):
gh = get_github_api('TestUser', 'TestPass')
adapter = gh._session.get_adapter('http://')
self.assertEqual(0.3, adapter.max_retries.backoff_factor)
self.assertIn(502, adapter.max_retries.status_forcelist)
<commit_msg>Test that github requests are actually retried<commit_after>
|
from http.client import HTTPMessage
import io
import unittest
import mock
from cumulusci.core.github import get_github_api
class MockHttpResponse(mock.Mock):
def __init__(self, status):
super(MockHttpResponse, self).__init__()
self.status = status
self.strict = 0
self.version = 0
self.reason = None
self.msg = HTTPMessage(io.BytesIO())
def read(self):
return b''
def isclosed(self):
return True
class TestGithub(unittest.TestCase):
@mock.patch('urllib3.connectionpool.HTTPConnectionPool._make_request')
def test_github_api_retries(self, _make_request):
gh = get_github_api('TestUser', 'TestPass')
adapter = gh._session.get_adapter('http://')
self.assertEqual(0.3, adapter.max_retries.backoff_factor)
self.assertIn(502, adapter.max_retries.status_forcelist)
_make_request.side_effect = [
MockHttpResponse(status=503),
MockHttpResponse(status=200),
]
gh.octocat('meow')
self.assertEqual(_make_request.call_count, 2)
|
import unittest
from cumulusci.core.github import get_github_api
class TestGithub(unittest.TestCase):
def test_github_api_retries(self):
gh = get_github_api('TestUser', 'TestPass')
adapter = gh._session.get_adapter('http://')
self.assertEqual(0.3, adapter.max_retries.backoff_factor)
self.assertIn(502, adapter.max_retries.status_forcelist)
Test that github requests are actually retriedfrom http.client import HTTPMessage
import io
import unittest
import mock
from cumulusci.core.github import get_github_api
class MockHttpResponse(mock.Mock):
def __init__(self, status):
super(MockHttpResponse, self).__init__()
self.status = status
self.strict = 0
self.version = 0
self.reason = None
self.msg = HTTPMessage(io.BytesIO())
def read(self):
return b''
def isclosed(self):
return True
class TestGithub(unittest.TestCase):
@mock.patch('urllib3.connectionpool.HTTPConnectionPool._make_request')
def test_github_api_retries(self, _make_request):
gh = get_github_api('TestUser', 'TestPass')
adapter = gh._session.get_adapter('http://')
self.assertEqual(0.3, adapter.max_retries.backoff_factor)
self.assertIn(502, adapter.max_retries.status_forcelist)
_make_request.side_effect = [
MockHttpResponse(status=503),
MockHttpResponse(status=200),
]
gh.octocat('meow')
self.assertEqual(_make_request.call_count, 2)
|
<commit_before>import unittest
from cumulusci.core.github import get_github_api
class TestGithub(unittest.TestCase):
def test_github_api_retries(self):
gh = get_github_api('TestUser', 'TestPass')
adapter = gh._session.get_adapter('http://')
self.assertEqual(0.3, adapter.max_retries.backoff_factor)
self.assertIn(502, adapter.max_retries.status_forcelist)
<commit_msg>Test that github requests are actually retried<commit_after>from http.client import HTTPMessage
import io
import unittest
import mock
from cumulusci.core.github import get_github_api
class MockHttpResponse(mock.Mock):
def __init__(self, status):
super(MockHttpResponse, self).__init__()
self.status = status
self.strict = 0
self.version = 0
self.reason = None
self.msg = HTTPMessage(io.BytesIO())
def read(self):
return b''
def isclosed(self):
return True
class TestGithub(unittest.TestCase):
@mock.patch('urllib3.connectionpool.HTTPConnectionPool._make_request')
def test_github_api_retries(self, _make_request):
gh = get_github_api('TestUser', 'TestPass')
adapter = gh._session.get_adapter('http://')
self.assertEqual(0.3, adapter.max_retries.backoff_factor)
self.assertIn(502, adapter.max_retries.status_forcelist)
_make_request.side_effect = [
MockHttpResponse(status=503),
MockHttpResponse(status=200),
]
gh.octocat('meow')
self.assertEqual(_make_request.call_count, 2)
|
4cd04558d788e9c1ee77471e87f8ef27416bf60e
|
rcard/urls.py
|
rcard/urls.py
|
"""rcard URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from rest_framework import routers
from waterfall_wall import views
router = routers.DefaultRouter()
router.register(r'images', views.ImageViewSet)
urlpatterns = [
url(r'^', include(router.urls)),
url(r'^admin/', include(admin.site.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
]
|
"""rcard URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from rest_framework import routers
from waterfall_wall import views
router = routers.DefaultRouter()
router.register(r'images', views.ImageViewSet)
urlpatterns = [
url(r'^api/', include(router.urls)),
url(r'^admin/', include(admin.site.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
]
|
Add api prefix for image API
|
Add api prefix for image API
|
Python
|
mit
|
carlcarl/rcard,carlcarl/rcard
|
"""rcard URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from rest_framework import routers
from waterfall_wall import views
router = routers.DefaultRouter()
router.register(r'images', views.ImageViewSet)
urlpatterns = [
url(r'^', include(router.urls)),
url(r'^admin/', include(admin.site.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
]
Add api prefix for image API
|
"""rcard URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from rest_framework import routers
from waterfall_wall import views
router = routers.DefaultRouter()
router.register(r'images', views.ImageViewSet)
urlpatterns = [
url(r'^api/', include(router.urls)),
url(r'^admin/', include(admin.site.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
]
|
<commit_before>"""rcard URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from rest_framework import routers
from waterfall_wall import views
router = routers.DefaultRouter()
router.register(r'images', views.ImageViewSet)
urlpatterns = [
url(r'^', include(router.urls)),
url(r'^admin/', include(admin.site.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
]
<commit_msg>Add api prefix for image API<commit_after>
|
"""rcard URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from rest_framework import routers
from waterfall_wall import views
router = routers.DefaultRouter()
router.register(r'images', views.ImageViewSet)
urlpatterns = [
url(r'^api/', include(router.urls)),
url(r'^admin/', include(admin.site.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
]
|
"""rcard URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from rest_framework import routers
from waterfall_wall import views
router = routers.DefaultRouter()
router.register(r'images', views.ImageViewSet)
urlpatterns = [
url(r'^', include(router.urls)),
url(r'^admin/', include(admin.site.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
]
Add api prefix for image API"""rcard URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from rest_framework import routers
from waterfall_wall import views
router = routers.DefaultRouter()
router.register(r'images', views.ImageViewSet)
urlpatterns = [
url(r'^api/', include(router.urls)),
url(r'^admin/', include(admin.site.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
]
|
<commit_before>"""rcard URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from rest_framework import routers
from waterfall_wall import views
router = routers.DefaultRouter()
router.register(r'images', views.ImageViewSet)
urlpatterns = [
url(r'^', include(router.urls)),
url(r'^admin/', include(admin.site.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
]
<commit_msg>Add api prefix for image API<commit_after>"""rcard URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from rest_framework import routers
from waterfall_wall import views
router = routers.DefaultRouter()
router.register(r'images', views.ImageViewSet)
urlpatterns = [
url(r'^api/', include(router.urls)),
url(r'^admin/', include(admin.site.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
]
|
509990ffdaec354b09d34aae63e1851a2e8a2038
|
tests/test_address_book.py
|
tests/test_address_book.py
|
from unittest import TestCase
from address_book import AddressBook, Person, Group
class AddressBookTestCase(TestCase):
def test_add_person(self):
address_book = AddressBook()
person = Person(
'John',
'Doe',
['Russian Federation, Kemerovo region, Kemerovo, Kirova street 23, apt. 42'],
['+79834772053']
)
address_book.add_person(person)
self.assertIn(person, address_book)
def test_add_group(self):
address_book = AddressBook()
group = Group('Brozzz')
address_book.add_group(group)
self.assertIn(group, address_book)
def test_find_person_by_first_name(self):
pass
def test_find_person_by_last_name(self):
pass
def test_find_person_by_email(self):
pass
|
from unittest import TestCase
from address_book import AddressBook, Person, Group
class AddressBookTestCase(TestCase):
def test_add_person(self):
address_book = AddressBook()
person = Person(
'John',
'Doe',
['Russian Federation, Kemerovo region, Kemerovo, Kirova street 23, apt. 42'],
['+79834772053']
)
address_book.add_person(person)
self.assertIn(person, address_book)
def test_add_group(self):
address_book = AddressBook()
group = Group('Brozzz')
address_book.add_group(group)
self.assertIn(group, address_book)
def test_find_person_by_first_name(self):
address_book = AddressBook()
john_person = Person(
'John',
'Doe',
['Russian Federation, Kemerovo region, Kemerovo, Kirova street 23, apt. 42'],
['+79834772053']
)
ivan_person = Person(
'Ivan',
'Doe',
['Russian Federation, Kemerovo region, Belovo, Kirova street 42, apt. 13'],
['+79834771122']
)
found_person = address_book.find(first_name='Ivan')
self.assertEqual(found_person, ivan_person)
def test_find_person_by_last_name(self):
pass
def test_find_person_by_email(self):
pass
|
Add test for finding a person by first name
|
Add test for finding a person by first name
|
Python
|
mit
|
dizpers/python-address-book-assignment
|
from unittest import TestCase
from address_book import AddressBook, Person, Group
class AddressBookTestCase(TestCase):
def test_add_person(self):
address_book = AddressBook()
person = Person(
'John',
'Doe',
['Russian Federation, Kemerovo region, Kemerovo, Kirova street 23, apt. 42'],
['+79834772053']
)
address_book.add_person(person)
self.assertIn(person, address_book)
def test_add_group(self):
address_book = AddressBook()
group = Group('Brozzz')
address_book.add_group(group)
self.assertIn(group, address_book)
def test_find_person_by_first_name(self):
pass
def test_find_person_by_last_name(self):
pass
def test_find_person_by_email(self):
passAdd test for finding a person by first name
|
from unittest import TestCase
from address_book import AddressBook, Person, Group
class AddressBookTestCase(TestCase):
def test_add_person(self):
address_book = AddressBook()
person = Person(
'John',
'Doe',
['Russian Federation, Kemerovo region, Kemerovo, Kirova street 23, apt. 42'],
['+79834772053']
)
address_book.add_person(person)
self.assertIn(person, address_book)
def test_add_group(self):
address_book = AddressBook()
group = Group('Brozzz')
address_book.add_group(group)
self.assertIn(group, address_book)
def test_find_person_by_first_name(self):
address_book = AddressBook()
john_person = Person(
'John',
'Doe',
['Russian Federation, Kemerovo region, Kemerovo, Kirova street 23, apt. 42'],
['+79834772053']
)
ivan_person = Person(
'Ivan',
'Doe',
['Russian Federation, Kemerovo region, Belovo, Kirova street 42, apt. 13'],
['+79834771122']
)
found_person = address_book.find(first_name='Ivan')
self.assertEqual(found_person, ivan_person)
def test_find_person_by_last_name(self):
pass
def test_find_person_by_email(self):
pass
|
<commit_before>from unittest import TestCase
from address_book import AddressBook, Person, Group
class AddressBookTestCase(TestCase):
def test_add_person(self):
address_book = AddressBook()
person = Person(
'John',
'Doe',
['Russian Federation, Kemerovo region, Kemerovo, Kirova street 23, apt. 42'],
['+79834772053']
)
address_book.add_person(person)
self.assertIn(person, address_book)
def test_add_group(self):
address_book = AddressBook()
group = Group('Brozzz')
address_book.add_group(group)
self.assertIn(group, address_book)
def test_find_person_by_first_name(self):
pass
def test_find_person_by_last_name(self):
pass
def test_find_person_by_email(self):
pass<commit_msg>Add test for finding a person by first name<commit_after>
|
from unittest import TestCase
from address_book import AddressBook, Person, Group
class AddressBookTestCase(TestCase):
def test_add_person(self):
address_book = AddressBook()
person = Person(
'John',
'Doe',
['Russian Federation, Kemerovo region, Kemerovo, Kirova street 23, apt. 42'],
['+79834772053']
)
address_book.add_person(person)
self.assertIn(person, address_book)
def test_add_group(self):
address_book = AddressBook()
group = Group('Brozzz')
address_book.add_group(group)
self.assertIn(group, address_book)
def test_find_person_by_first_name(self):
address_book = AddressBook()
john_person = Person(
'John',
'Doe',
['Russian Federation, Kemerovo region, Kemerovo, Kirova street 23, apt. 42'],
['+79834772053']
)
ivan_person = Person(
'Ivan',
'Doe',
['Russian Federation, Kemerovo region, Belovo, Kirova street 42, apt. 13'],
['+79834771122']
)
found_person = address_book.find(first_name='Ivan')
self.assertEqual(found_person, ivan_person)
def test_find_person_by_last_name(self):
pass
def test_find_person_by_email(self):
pass
|
from unittest import TestCase
from address_book import AddressBook, Person, Group
class AddressBookTestCase(TestCase):
def test_add_person(self):
address_book = AddressBook()
person = Person(
'John',
'Doe',
['Russian Federation, Kemerovo region, Kemerovo, Kirova street 23, apt. 42'],
['+79834772053']
)
address_book.add_person(person)
self.assertIn(person, address_book)
def test_add_group(self):
address_book = AddressBook()
group = Group('Brozzz')
address_book.add_group(group)
self.assertIn(group, address_book)
def test_find_person_by_first_name(self):
pass
def test_find_person_by_last_name(self):
pass
def test_find_person_by_email(self):
passAdd test for finding a person by first namefrom unittest import TestCase
from address_book import AddressBook, Person, Group
class AddressBookTestCase(TestCase):
def test_add_person(self):
address_book = AddressBook()
person = Person(
'John',
'Doe',
['Russian Federation, Kemerovo region, Kemerovo, Kirova street 23, apt. 42'],
['+79834772053']
)
address_book.add_person(person)
self.assertIn(person, address_book)
def test_add_group(self):
address_book = AddressBook()
group = Group('Brozzz')
address_book.add_group(group)
self.assertIn(group, address_book)
def test_find_person_by_first_name(self):
address_book = AddressBook()
john_person = Person(
'John',
'Doe',
['Russian Federation, Kemerovo region, Kemerovo, Kirova street 23, apt. 42'],
['+79834772053']
)
ivan_person = Person(
'Ivan',
'Doe',
['Russian Federation, Kemerovo region, Belovo, Kirova street 42, apt. 13'],
['+79834771122']
)
found_person = address_book.find(first_name='Ivan')
self.assertEqual(found_person, ivan_person)
def test_find_person_by_last_name(self):
pass
def test_find_person_by_email(self):
pass
|
<commit_before>from unittest import TestCase
from address_book import AddressBook, Person, Group
class AddressBookTestCase(TestCase):
def test_add_person(self):
address_book = AddressBook()
person = Person(
'John',
'Doe',
['Russian Federation, Kemerovo region, Kemerovo, Kirova street 23, apt. 42'],
['+79834772053']
)
address_book.add_person(person)
self.assertIn(person, address_book)
def test_add_group(self):
address_book = AddressBook()
group = Group('Brozzz')
address_book.add_group(group)
self.assertIn(group, address_book)
def test_find_person_by_first_name(self):
pass
def test_find_person_by_last_name(self):
pass
def test_find_person_by_email(self):
pass<commit_msg>Add test for finding a person by first name<commit_after>from unittest import TestCase
from address_book import AddressBook, Person, Group
class AddressBookTestCase(TestCase):
def test_add_person(self):
address_book = AddressBook()
person = Person(
'John',
'Doe',
['Russian Federation, Kemerovo region, Kemerovo, Kirova street 23, apt. 42'],
['+79834772053']
)
address_book.add_person(person)
self.assertIn(person, address_book)
def test_add_group(self):
address_book = AddressBook()
group = Group('Brozzz')
address_book.add_group(group)
self.assertIn(group, address_book)
def test_find_person_by_first_name(self):
address_book = AddressBook()
john_person = Person(
'John',
'Doe',
['Russian Federation, Kemerovo region, Kemerovo, Kirova street 23, apt. 42'],
['+79834772053']
)
ivan_person = Person(
'Ivan',
'Doe',
['Russian Federation, Kemerovo region, Belovo, Kirova street 42, apt. 13'],
['+79834771122']
)
found_person = address_book.find(first_name='Ivan')
self.assertEqual(found_person, ivan_person)
def test_find_person_by_last_name(self):
pass
def test_find_person_by_email(self):
pass
|
8bf27ff0e112781724d0a8a28b1b3d44976f0155
|
lfs_order_numbers/models.py
|
lfs_order_numbers/models.py
|
# django imports
from django.utils.translation import ugettext_lazy as _
from django.db import models
# lfs imports
from lfs.plugins import OrderNumberGenerator as Base
class OrderNumberGenerator(Base):
"""Generates order numbers and saves the last one.
**Attributes:**
id
The primary key of the order number.
last
The last stored/returned order number.
format
The format of the integer part of the order number.
"""
id = models.CharField(primary_key=True, max_length=20)
last = models.IntegerField(_(u"Last order number"), default=0)
format = models.CharField(blank=True, max_length=20)
def get_next(self, formatted=True):
"""Returns the next order number.
**Parameters:**
formatted
If True the number will be returned within the stored format.
"""
self.last += 1
self.save()
if formatted:
return self.format % self.last
else:
return self.last
|
# django imports
from django.utils.translation import ugettext_lazy as _
from django.db import models
# lfs imports
from lfs.plugins import OrderNumberGenerator as Base
class OrderNumberGenerator(Base):
"""Generates order numbers and saves the last one.
**Attributes:**
id
The primary key of the order number.
last
The last stored/returned order number.
format
The format of the integer part of the order number.
"""
id = models.CharField(primary_key=True, max_length=20)
last = models.IntegerField(_(u"Last order number"), default=0)
format = models.CharField(blank=True, max_length=20)
def get_next(self, formatted=True):
"""Returns the next order number.
**Parameters:**
formatted
If True the number will be returned within the stored format.
"""
self.last += 1
self.save()
if formatted and self.format:
return self.format % self.last
else:
return self.last
|
Check whether there is a format given (this must be checked for validity though).
|
Check whether there is a format given (this must be checked for validity though).
|
Python
|
bsd-3-clause
|
diefenbach/lfs-order-numbers
|
# django imports
from django.utils.translation import ugettext_lazy as _
from django.db import models
# lfs imports
from lfs.plugins import OrderNumberGenerator as Base
class OrderNumberGenerator(Base):
"""Generates order numbers and saves the last one.
**Attributes:**
id
The primary key of the order number.
last
The last stored/returned order number.
format
The format of the integer part of the order number.
"""
id = models.CharField(primary_key=True, max_length=20)
last = models.IntegerField(_(u"Last order number"), default=0)
format = models.CharField(blank=True, max_length=20)
def get_next(self, formatted=True):
"""Returns the next order number.
**Parameters:**
formatted
If True the number will be returned within the stored format.
"""
self.last += 1
self.save()
if formatted:
return self.format % self.last
else:
return self.last
Check whether there is a format given (this must be checked for validity though).
|
# django imports
from django.utils.translation import ugettext_lazy as _
from django.db import models
# lfs imports
from lfs.plugins import OrderNumberGenerator as Base
class OrderNumberGenerator(Base):
"""Generates order numbers and saves the last one.
**Attributes:**
id
The primary key of the order number.
last
The last stored/returned order number.
format
The format of the integer part of the order number.
"""
id = models.CharField(primary_key=True, max_length=20)
last = models.IntegerField(_(u"Last order number"), default=0)
format = models.CharField(blank=True, max_length=20)
def get_next(self, formatted=True):
"""Returns the next order number.
**Parameters:**
formatted
If True the number will be returned within the stored format.
"""
self.last += 1
self.save()
if formatted and self.format:
return self.format % self.last
else:
return self.last
|
<commit_before># django imports
from django.utils.translation import ugettext_lazy as _
from django.db import models
# lfs imports
from lfs.plugins import OrderNumberGenerator as Base
class OrderNumberGenerator(Base):
"""Generates order numbers and saves the last one.
**Attributes:**
id
The primary key of the order number.
last
The last stored/returned order number.
format
The format of the integer part of the order number.
"""
id = models.CharField(primary_key=True, max_length=20)
last = models.IntegerField(_(u"Last order number"), default=0)
format = models.CharField(blank=True, max_length=20)
def get_next(self, formatted=True):
"""Returns the next order number.
**Parameters:**
formatted
If True the number will be returned within the stored format.
"""
self.last += 1
self.save()
if formatted:
return self.format % self.last
else:
return self.last
<commit_msg>Check whether there is a format given (this must be checked for validity though).<commit_after>
|
# django imports
from django.utils.translation import ugettext_lazy as _
from django.db import models
# lfs imports
from lfs.plugins import OrderNumberGenerator as Base
class OrderNumberGenerator(Base):
"""Generates order numbers and saves the last one.
**Attributes:**
id
The primary key of the order number.
last
The last stored/returned order number.
format
The format of the integer part of the order number.
"""
id = models.CharField(primary_key=True, max_length=20)
last = models.IntegerField(_(u"Last order number"), default=0)
format = models.CharField(blank=True, max_length=20)
def get_next(self, formatted=True):
"""Returns the next order number.
**Parameters:**
formatted
If True the number will be returned within the stored format.
"""
self.last += 1
self.save()
if formatted and self.format:
return self.format % self.last
else:
return self.last
|
# django imports
from django.utils.translation import ugettext_lazy as _
from django.db import models
# lfs imports
from lfs.plugins import OrderNumberGenerator as Base
class OrderNumberGenerator(Base):
"""Generates order numbers and saves the last one.
**Attributes:**
id
The primary key of the order number.
last
The last stored/returned order number.
format
The format of the integer part of the order number.
"""
id = models.CharField(primary_key=True, max_length=20)
last = models.IntegerField(_(u"Last order number"), default=0)
format = models.CharField(blank=True, max_length=20)
def get_next(self, formatted=True):
"""Returns the next order number.
**Parameters:**
formatted
If True the number will be returned within the stored format.
"""
self.last += 1
self.save()
if formatted:
return self.format % self.last
else:
return self.last
Check whether there is a format given (this must be checked for validity though).# django imports
from django.utils.translation import ugettext_lazy as _
from django.db import models
# lfs imports
from lfs.plugins import OrderNumberGenerator as Base
class OrderNumberGenerator(Base):
"""Generates order numbers and saves the last one.
**Attributes:**
id
The primary key of the order number.
last
The last stored/returned order number.
format
The format of the integer part of the order number.
"""
id = models.CharField(primary_key=True, max_length=20)
last = models.IntegerField(_(u"Last order number"), default=0)
format = models.CharField(blank=True, max_length=20)
def get_next(self, formatted=True):
"""Returns the next order number.
**Parameters:**
formatted
If True the number will be returned within the stored format.
"""
self.last += 1
self.save()
if formatted and self.format:
return self.format % self.last
else:
return self.last
|
<commit_before># django imports
from django.utils.translation import ugettext_lazy as _
from django.db import models
# lfs imports
from lfs.plugins import OrderNumberGenerator as Base
class OrderNumberGenerator(Base):
"""Generates order numbers and saves the last one.
**Attributes:**
id
The primary key of the order number.
last
The last stored/returned order number.
format
The format of the integer part of the order number.
"""
id = models.CharField(primary_key=True, max_length=20)
last = models.IntegerField(_(u"Last order number"), default=0)
format = models.CharField(blank=True, max_length=20)
def get_next(self, formatted=True):
"""Returns the next order number.
**Parameters:**
formatted
If True the number will be returned within the stored format.
"""
self.last += 1
self.save()
if formatted:
return self.format % self.last
else:
return self.last
<commit_msg>Check whether there is a format given (this must be checked for validity though).<commit_after># django imports
from django.utils.translation import ugettext_lazy as _
from django.db import models
# lfs imports
from lfs.plugins import OrderNumberGenerator as Base
class OrderNumberGenerator(Base):
"""Generates order numbers and saves the last one.
**Attributes:**
id
The primary key of the order number.
last
The last stored/returned order number.
format
The format of the integer part of the order number.
"""
id = models.CharField(primary_key=True, max_length=20)
last = models.IntegerField(_(u"Last order number"), default=0)
format = models.CharField(blank=True, max_length=20)
def get_next(self, formatted=True):
"""Returns the next order number.
**Parameters:**
formatted
If True the number will be returned within the stored format.
"""
self.last += 1
self.save()
if formatted and self.format:
return self.format % self.last
else:
return self.last
|
ccc76f356a0480767eceff83d2b573aa922896f5
|
package/management/commands/repo_updater.py
|
package/management/commands/repo_updater.py
|
import logging
import logging.config
from django.core.management.base import NoArgsCommand
from django.utils import timezone
from package.models import Package
logger = logging.getLogger(__name__)
class Command(NoArgsCommand):
help = "Updates all the packages in the system focusing on repo data"
def handle(self, *args, **options):
yesterday = timezone.now() - timezone.timedelta(1)
for package in Package.objects.filter().iterator():
# keep this here because for now we only have one last_fetched field.
package.repo.fetch_metadata(package, fetch_pypi=False)
if package.last_fetched <= yesterday:
continue
package.repo.fetch_commits(package)
# if package.repo.title == "Github":
# msg = "{}. {}. {}".format(count, package.repo.github.ratelimit_remaining, package)
# else:
# msg = "{}. {}".format(count, package)
# logger.info(msg)
|
import logging
import logging.config
from django.core.management.base import NoArgsCommand
from django.utils import timezone
from package.models import Package
logger = logging.getLogger(__name__)
class Command(NoArgsCommand):
help = "Updates all the packages in the system focusing on repo data"
def handle(self, *args, **options):
yesterday = timezone.now() - timezone.timedelta(1)
for package in Package.objects.filter().iterator():
# keep this here because for now we only have one last_fetched field.
package.repo.fetch_metadata(package)
if package.last_fetched <= yesterday:
continue
package.repo.fetch_commits(package)
package.last_fetched = timezone.now()
package.save()
# if package.repo.title == "Github":
# msg = "{}. {}. {}".format(count, package.repo.github.ratelimit_remaining, package)
# else:
# msg = "{}. {}".format(count, package)
# logger.info(msg)
|
Fix the last_fetched and repo commands
|
Fix the last_fetched and repo commands
|
Python
|
mit
|
QLGu/djangopackages,QLGu/djangopackages,nanuxbe/djangopackages,nanuxbe/djangopackages,pydanny/djangopackages,nanuxbe/djangopackages,pydanny/djangopackages,pydanny/djangopackages,QLGu/djangopackages
|
import logging
import logging.config
from django.core.management.base import NoArgsCommand
from django.utils import timezone
from package.models import Package
logger = logging.getLogger(__name__)
class Command(NoArgsCommand):
help = "Updates all the packages in the system focusing on repo data"
def handle(self, *args, **options):
yesterday = timezone.now() - timezone.timedelta(1)
for package in Package.objects.filter().iterator():
# keep this here because for now we only have one last_fetched field.
package.repo.fetch_metadata(package, fetch_pypi=False)
if package.last_fetched <= yesterday:
continue
package.repo.fetch_commits(package)
# if package.repo.title == "Github":
# msg = "{}. {}. {}".format(count, package.repo.github.ratelimit_remaining, package)
# else:
# msg = "{}. {}".format(count, package)
# logger.info(msg)
Fix the last_fetched and repo commands
|
import logging
import logging.config
from django.core.management.base import NoArgsCommand
from django.utils import timezone
from package.models import Package
logger = logging.getLogger(__name__)
class Command(NoArgsCommand):
help = "Updates all the packages in the system focusing on repo data"
def handle(self, *args, **options):
yesterday = timezone.now() - timezone.timedelta(1)
for package in Package.objects.filter().iterator():
# keep this here because for now we only have one last_fetched field.
package.repo.fetch_metadata(package)
if package.last_fetched <= yesterday:
continue
package.repo.fetch_commits(package)
package.last_fetched = timezone.now()
package.save()
# if package.repo.title == "Github":
# msg = "{}. {}. {}".format(count, package.repo.github.ratelimit_remaining, package)
# else:
# msg = "{}. {}".format(count, package)
# logger.info(msg)
|
<commit_before>import logging
import logging.config
from django.core.management.base import NoArgsCommand
from django.utils import timezone
from package.models import Package
logger = logging.getLogger(__name__)
class Command(NoArgsCommand):
help = "Updates all the packages in the system focusing on repo data"
def handle(self, *args, **options):
yesterday = timezone.now() - timezone.timedelta(1)
for package in Package.objects.filter().iterator():
# keep this here because for now we only have one last_fetched field.
package.repo.fetch_metadata(package, fetch_pypi=False)
if package.last_fetched <= yesterday:
continue
package.repo.fetch_commits(package)
# if package.repo.title == "Github":
# msg = "{}. {}. {}".format(count, package.repo.github.ratelimit_remaining, package)
# else:
# msg = "{}. {}".format(count, package)
# logger.info(msg)
<commit_msg>Fix the last_fetched and repo commands<commit_after>
|
import logging
import logging.config
from django.core.management.base import NoArgsCommand
from django.utils import timezone
from package.models import Package
logger = logging.getLogger(__name__)
class Command(NoArgsCommand):
help = "Updates all the packages in the system focusing on repo data"
def handle(self, *args, **options):
yesterday = timezone.now() - timezone.timedelta(1)
for package in Package.objects.filter().iterator():
# keep this here because for now we only have one last_fetched field.
package.repo.fetch_metadata(package)
if package.last_fetched <= yesterday:
continue
package.repo.fetch_commits(package)
package.last_fetched = timezone.now()
package.save()
# if package.repo.title == "Github":
# msg = "{}. {}. {}".format(count, package.repo.github.ratelimit_remaining, package)
# else:
# msg = "{}. {}".format(count, package)
# logger.info(msg)
|
import logging
import logging.config
from django.core.management.base import NoArgsCommand
from django.utils import timezone
from package.models import Package
logger = logging.getLogger(__name__)
class Command(NoArgsCommand):
help = "Updates all the packages in the system focusing on repo data"
def handle(self, *args, **options):
yesterday = timezone.now() - timezone.timedelta(1)
for package in Package.objects.filter().iterator():
# keep this here because for now we only have one last_fetched field.
package.repo.fetch_metadata(package, fetch_pypi=False)
if package.last_fetched <= yesterday:
continue
package.repo.fetch_commits(package)
# if package.repo.title == "Github":
# msg = "{}. {}. {}".format(count, package.repo.github.ratelimit_remaining, package)
# else:
# msg = "{}. {}".format(count, package)
# logger.info(msg)
Fix the last_fetched and repo commandsimport logging
import logging.config
from django.core.management.base import NoArgsCommand
from django.utils import timezone
from package.models import Package
logger = logging.getLogger(__name__)
class Command(NoArgsCommand):
help = "Updates all the packages in the system focusing on repo data"
def handle(self, *args, **options):
yesterday = timezone.now() - timezone.timedelta(1)
for package in Package.objects.filter().iterator():
# keep this here because for now we only have one last_fetched field.
package.repo.fetch_metadata(package)
if package.last_fetched <= yesterday:
continue
package.repo.fetch_commits(package)
package.last_fetched = timezone.now()
package.save()
# if package.repo.title == "Github":
# msg = "{}. {}. {}".format(count, package.repo.github.ratelimit_remaining, package)
# else:
# msg = "{}. {}".format(count, package)
# logger.info(msg)
|
<commit_before>import logging
import logging.config
from django.core.management.base import NoArgsCommand
from django.utils import timezone
from package.models import Package
logger = logging.getLogger(__name__)
class Command(NoArgsCommand):
help = "Updates all the packages in the system focusing on repo data"
def handle(self, *args, **options):
yesterday = timezone.now() - timezone.timedelta(1)
for package in Package.objects.filter().iterator():
# keep this here because for now we only have one last_fetched field.
package.repo.fetch_metadata(package, fetch_pypi=False)
if package.last_fetched <= yesterday:
continue
package.repo.fetch_commits(package)
# if package.repo.title == "Github":
# msg = "{}. {}. {}".format(count, package.repo.github.ratelimit_remaining, package)
# else:
# msg = "{}. {}".format(count, package)
# logger.info(msg)
<commit_msg>Fix the last_fetched and repo commands<commit_after>import logging
import logging.config
from django.core.management.base import NoArgsCommand
from django.utils import timezone
from package.models import Package
logger = logging.getLogger(__name__)
class Command(NoArgsCommand):
help = "Updates all the packages in the system focusing on repo data"
def handle(self, *args, **options):
yesterday = timezone.now() - timezone.timedelta(1)
for package in Package.objects.filter().iterator():
# keep this here because for now we only have one last_fetched field.
package.repo.fetch_metadata(package)
if package.last_fetched <= yesterday:
continue
package.repo.fetch_commits(package)
package.last_fetched = timezone.now()
package.save()
# if package.repo.title == "Github":
# msg = "{}. {}. {}".format(count, package.repo.github.ratelimit_remaining, package)
# else:
# msg = "{}. {}".format(count, package)
# logger.info(msg)
|
d0038096e800f7a25d848225ecb8f9c885ce5758
|
adhocracy4/projects/views.py
|
adhocracy4/projects/views.py
|
from django.shortcuts import redirect
from django.views import generic
from rules.contrib import views as rules_views
from . import mixins, models
class ProjectDetailView(rules_views.PermissionRequiredMixin,
mixins.PhaseDispatchMixin,
generic.DetailView):
model = models.Project
permission_required = 'a4projects.view_project'
@property
def raise_exception(self):
return self.request.user.is_authenticated()
def handle_no_permission(self):
"""
Check if user clould join
"""
membership_impossible = (
not self.request.user.is_authenticated()
or self.project.is_draft
or self.project.has_member(self.request.user)
)
if membership_impossible:
return super().handle_no_permission()
else:
return self._redirect_membership_request()
def _redirect_membership_request(self):
return redirect('memberships-request',
project_slug=self.project.slug)
@property
def project(self):
"""
Emulate ProjectMixin interface for template sharing.
"""
return self.get_object()
|
from django.shortcuts import redirect
from django.views import generic
from rules.contrib import views as rules_views
from . import mixins, models
class ProjectDetailView(rules_views.PermissionRequiredMixin,
mixins.PhaseDispatchMixin,
generic.DetailView):
model = models.Project
permission_required = 'a4projects.view_project'
@property
def raise_exception(self):
return self.request.user.is_authenticated()
def handle_no_permission(self):
"""
Check if user clould join
"""
user = self.request.user
is_member = user.is_authenticated() and self.project.has_member(user)
if not is_member:
return self.handle_no_membership()
else:
return super().handle_no_permission()
def handle_no_membership(self):
"""
Handle that an authenticated user is not member of project.
Override this function to configure the behaviour if a user has no
permissions to view the project and is not member of the project.
"""
return super().handle_no_permission()
@property
def project(self):
"""
Emulate ProjectMixin interface for template sharing.
"""
return self.get_object()
|
Remove request membership feature from project
|
Remove request membership feature from project
- add in euth.memberships app
|
Python
|
agpl-3.0
|
liqd/adhocracy4,liqd/adhocracy4,liqd/adhocracy4,liqd/adhocracy4
|
from django.shortcuts import redirect
from django.views import generic
from rules.contrib import views as rules_views
from . import mixins, models
class ProjectDetailView(rules_views.PermissionRequiredMixin,
mixins.PhaseDispatchMixin,
generic.DetailView):
model = models.Project
permission_required = 'a4projects.view_project'
@property
def raise_exception(self):
return self.request.user.is_authenticated()
def handle_no_permission(self):
"""
Check if user clould join
"""
membership_impossible = (
not self.request.user.is_authenticated()
or self.project.is_draft
or self.project.has_member(self.request.user)
)
if membership_impossible:
return super().handle_no_permission()
else:
return self._redirect_membership_request()
def _redirect_membership_request(self):
return redirect('memberships-request',
project_slug=self.project.slug)
@property
def project(self):
"""
Emulate ProjectMixin interface for template sharing.
"""
return self.get_object()
Remove request membership feature from project
- add in euth.memberships app
|
from django.shortcuts import redirect
from django.views import generic
from rules.contrib import views as rules_views
from . import mixins, models
class ProjectDetailView(rules_views.PermissionRequiredMixin,
mixins.PhaseDispatchMixin,
generic.DetailView):
model = models.Project
permission_required = 'a4projects.view_project'
@property
def raise_exception(self):
return self.request.user.is_authenticated()
def handle_no_permission(self):
"""
Check if user clould join
"""
user = self.request.user
is_member = user.is_authenticated() and self.project.has_member(user)
if not is_member:
return self.handle_no_membership()
else:
return super().handle_no_permission()
def handle_no_membership(self):
"""
Handle that an authenticated user is not member of project.
Override this function to configure the behaviour if a user has no
permissions to view the project and is not member of the project.
"""
return super().handle_no_permission()
@property
def project(self):
"""
Emulate ProjectMixin interface for template sharing.
"""
return self.get_object()
|
<commit_before>from django.shortcuts import redirect
from django.views import generic
from rules.contrib import views as rules_views
from . import mixins, models
class ProjectDetailView(rules_views.PermissionRequiredMixin,
mixins.PhaseDispatchMixin,
generic.DetailView):
model = models.Project
permission_required = 'a4projects.view_project'
@property
def raise_exception(self):
return self.request.user.is_authenticated()
def handle_no_permission(self):
"""
Check if user clould join
"""
membership_impossible = (
not self.request.user.is_authenticated()
or self.project.is_draft
or self.project.has_member(self.request.user)
)
if membership_impossible:
return super().handle_no_permission()
else:
return self._redirect_membership_request()
def _redirect_membership_request(self):
return redirect('memberships-request',
project_slug=self.project.slug)
@property
def project(self):
"""
Emulate ProjectMixin interface for template sharing.
"""
return self.get_object()
<commit_msg>Remove request membership feature from project
- add in euth.memberships app<commit_after>
|
from django.shortcuts import redirect
from django.views import generic
from rules.contrib import views as rules_views
from . import mixins, models
class ProjectDetailView(rules_views.PermissionRequiredMixin,
mixins.PhaseDispatchMixin,
generic.DetailView):
model = models.Project
permission_required = 'a4projects.view_project'
@property
def raise_exception(self):
return self.request.user.is_authenticated()
def handle_no_permission(self):
"""
Check if user clould join
"""
user = self.request.user
is_member = user.is_authenticated() and self.project.has_member(user)
if not is_member:
return self.handle_no_membership()
else:
return super().handle_no_permission()
def handle_no_membership(self):
"""
Handle that an authenticated user is not member of project.
Override this function to configure the behaviour if a user has no
permissions to view the project and is not member of the project.
"""
return super().handle_no_permission()
@property
def project(self):
"""
Emulate ProjectMixin interface for template sharing.
"""
return self.get_object()
|
from django.shortcuts import redirect
from django.views import generic
from rules.contrib import views as rules_views
from . import mixins, models
class ProjectDetailView(rules_views.PermissionRequiredMixin,
mixins.PhaseDispatchMixin,
generic.DetailView):
model = models.Project
permission_required = 'a4projects.view_project'
@property
def raise_exception(self):
return self.request.user.is_authenticated()
def handle_no_permission(self):
"""
Check if user clould join
"""
membership_impossible = (
not self.request.user.is_authenticated()
or self.project.is_draft
or self.project.has_member(self.request.user)
)
if membership_impossible:
return super().handle_no_permission()
else:
return self._redirect_membership_request()
def _redirect_membership_request(self):
return redirect('memberships-request',
project_slug=self.project.slug)
@property
def project(self):
"""
Emulate ProjectMixin interface for template sharing.
"""
return self.get_object()
Remove request membership feature from project
- add in euth.memberships appfrom django.shortcuts import redirect
from django.views import generic
from rules.contrib import views as rules_views
from . import mixins, models
class ProjectDetailView(rules_views.PermissionRequiredMixin,
mixins.PhaseDispatchMixin,
generic.DetailView):
model = models.Project
permission_required = 'a4projects.view_project'
@property
def raise_exception(self):
return self.request.user.is_authenticated()
def handle_no_permission(self):
"""
Check if user clould join
"""
user = self.request.user
is_member = user.is_authenticated() and self.project.has_member(user)
if not is_member:
return self.handle_no_membership()
else:
return super().handle_no_permission()
def handle_no_membership(self):
"""
Handle that an authenticated user is not member of project.
Override this function to configure the behaviour if a user has no
permissions to view the project and is not member of the project.
"""
return super().handle_no_permission()
@property
def project(self):
"""
Emulate ProjectMixin interface for template sharing.
"""
return self.get_object()
|
<commit_before>from django.shortcuts import redirect
from django.views import generic
from rules.contrib import views as rules_views
from . import mixins, models
class ProjectDetailView(rules_views.PermissionRequiredMixin,
mixins.PhaseDispatchMixin,
generic.DetailView):
model = models.Project
permission_required = 'a4projects.view_project'
@property
def raise_exception(self):
return self.request.user.is_authenticated()
def handle_no_permission(self):
"""
Check if user clould join
"""
membership_impossible = (
not self.request.user.is_authenticated()
or self.project.is_draft
or self.project.has_member(self.request.user)
)
if membership_impossible:
return super().handle_no_permission()
else:
return self._redirect_membership_request()
def _redirect_membership_request(self):
return redirect('memberships-request',
project_slug=self.project.slug)
@property
def project(self):
"""
Emulate ProjectMixin interface for template sharing.
"""
return self.get_object()
<commit_msg>Remove request membership feature from project
- add in euth.memberships app<commit_after>from django.shortcuts import redirect
from django.views import generic
from rules.contrib import views as rules_views
from . import mixins, models
class ProjectDetailView(rules_views.PermissionRequiredMixin,
mixins.PhaseDispatchMixin,
generic.DetailView):
model = models.Project
permission_required = 'a4projects.view_project'
@property
def raise_exception(self):
return self.request.user.is_authenticated()
def handle_no_permission(self):
"""
Check if user clould join
"""
user = self.request.user
is_member = user.is_authenticated() and self.project.has_member(user)
if not is_member:
return self.handle_no_membership()
else:
return super().handle_no_permission()
def handle_no_membership(self):
"""
Handle that an authenticated user is not member of project.
Override this function to configure the behaviour if a user has no
permissions to view the project and is not member of the project.
"""
return super().handle_no_permission()
@property
def project(self):
"""
Emulate ProjectMixin interface for template sharing.
"""
return self.get_object()
|
2f5f2112c4c6b97b76fd268d1e79537dac696f0e
|
src/nyc_trees/apps/home/training/decorators.py
|
src/nyc_trees/apps/home/training/decorators.py
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from functools import wraps
from django.db import transaction
from django.http import Http404
from django.contrib.flatpages.views import flatpage
def render_flatpage(url):
def fn(request, *args, **kwargs):
return flatpage(request, url)
return fn
def mark_user(attr):
def outer_decorator(view_fn):
@wraps(view_fn)
@transaction.atomic
def wrapper(request, *args, **kwargs):
user = request.user
if user.is_authenticated() and not getattr(user, attr):
setattr(user, attr, True)
user.save()
ctx = view_fn(request, *args, **kwargs)
return ctx
return wrapper
return outer_decorator
def require_visitability(step):
def outer_decorator(view_fn):
@wraps(view_fn)
@transaction.atomic
def wrapper(request, *args, **kwargs):
if not step.is_visitable(request.user):
raise Http404()
else:
return view_fn(request, *args, **kwargs)
return wrapper
return outer_decorator
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from functools import wraps
from django.db import transaction
from django.http import Http404
from django.contrib.flatpages.views import flatpage
def render_flatpage(url):
def fn(request, *args, **kwargs):
return flatpage(request, url)
return fn
def mark_user(attr, extra_block=None):
def outer_decorator(view_fn):
@wraps(view_fn)
@transaction.atomic
def wrapper(request, *args, **kwargs):
user = request.user
if user.is_authenticated() and not getattr(user, attr):
setattr(user, attr, True)
user.save()
should_trigger_extra_block = True
else:
should_trigger_extra_block = False
ctx = view_fn(request, *args, **kwargs)
# in case the extra block does things that don't
# work well with database transactions (like email)
# postpone it to the end of the transaction block
# to avoid cases in which an email is sent but the
# transaction is rolled back due to a later exception
if extra_block and should_trigger_extra_block:
extra_block(user)
return ctx
return wrapper
return outer_decorator
def require_visitability(step):
def outer_decorator(view_fn):
@wraps(view_fn)
@transaction.atomic
def wrapper(request, *args, **kwargs):
if not step.is_visitable(request.user):
raise Http404()
else:
return view_fn(request, *args, **kwargs)
return wrapper
return outer_decorator
|
Add support for arbitrary user actions on marking
|
Add support for arbitrary user actions on marking
|
Python
|
agpl-3.0
|
azavea/nyc-trees,maurizi/nyc-trees,azavea/nyc-trees,azavea/nyc-trees,maurizi/nyc-trees,kdeloach/nyc-trees,kdeloach/nyc-trees,maurizi/nyc-trees,azavea/nyc-trees,maurizi/nyc-trees,kdeloach/nyc-trees,kdeloach/nyc-trees,kdeloach/nyc-trees,azavea/nyc-trees
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from functools import wraps
from django.db import transaction
from django.http import Http404
from django.contrib.flatpages.views import flatpage
def render_flatpage(url):
def fn(request, *args, **kwargs):
return flatpage(request, url)
return fn
def mark_user(attr):
def outer_decorator(view_fn):
@wraps(view_fn)
@transaction.atomic
def wrapper(request, *args, **kwargs):
user = request.user
if user.is_authenticated() and not getattr(user, attr):
setattr(user, attr, True)
user.save()
ctx = view_fn(request, *args, **kwargs)
return ctx
return wrapper
return outer_decorator
def require_visitability(step):
def outer_decorator(view_fn):
@wraps(view_fn)
@transaction.atomic
def wrapper(request, *args, **kwargs):
if not step.is_visitable(request.user):
raise Http404()
else:
return view_fn(request, *args, **kwargs)
return wrapper
return outer_decorator
Add support for arbitrary user actions on marking
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from functools import wraps
from django.db import transaction
from django.http import Http404
from django.contrib.flatpages.views import flatpage
def render_flatpage(url):
def fn(request, *args, **kwargs):
return flatpage(request, url)
return fn
def mark_user(attr, extra_block=None):
def outer_decorator(view_fn):
@wraps(view_fn)
@transaction.atomic
def wrapper(request, *args, **kwargs):
user = request.user
if user.is_authenticated() and not getattr(user, attr):
setattr(user, attr, True)
user.save()
should_trigger_extra_block = True
else:
should_trigger_extra_block = False
ctx = view_fn(request, *args, **kwargs)
# in case the extra block does things that don't
# work well with database transactions (like email)
# postpone it to the end of the transaction block
# to avoid cases in which an email is sent but the
# transaction is rolled back due to a later exception
if extra_block and should_trigger_extra_block:
extra_block(user)
return ctx
return wrapper
return outer_decorator
def require_visitability(step):
def outer_decorator(view_fn):
@wraps(view_fn)
@transaction.atomic
def wrapper(request, *args, **kwargs):
if not step.is_visitable(request.user):
raise Http404()
else:
return view_fn(request, *args, **kwargs)
return wrapper
return outer_decorator
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from functools import wraps
from django.db import transaction
from django.http import Http404
from django.contrib.flatpages.views import flatpage
def render_flatpage(url):
def fn(request, *args, **kwargs):
return flatpage(request, url)
return fn
def mark_user(attr):
def outer_decorator(view_fn):
@wraps(view_fn)
@transaction.atomic
def wrapper(request, *args, **kwargs):
user = request.user
if user.is_authenticated() and not getattr(user, attr):
setattr(user, attr, True)
user.save()
ctx = view_fn(request, *args, **kwargs)
return ctx
return wrapper
return outer_decorator
def require_visitability(step):
def outer_decorator(view_fn):
@wraps(view_fn)
@transaction.atomic
def wrapper(request, *args, **kwargs):
if not step.is_visitable(request.user):
raise Http404()
else:
return view_fn(request, *args, **kwargs)
return wrapper
return outer_decorator
<commit_msg>Add support for arbitrary user actions on marking<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from functools import wraps
from django.db import transaction
from django.http import Http404
from django.contrib.flatpages.views import flatpage
def render_flatpage(url):
def fn(request, *args, **kwargs):
return flatpage(request, url)
return fn
def mark_user(attr, extra_block=None):
def outer_decorator(view_fn):
@wraps(view_fn)
@transaction.atomic
def wrapper(request, *args, **kwargs):
user = request.user
if user.is_authenticated() and not getattr(user, attr):
setattr(user, attr, True)
user.save()
should_trigger_extra_block = True
else:
should_trigger_extra_block = False
ctx = view_fn(request, *args, **kwargs)
# in case the extra block does things that don't
# work well with database transactions (like email)
# postpone it to the end of the transaction block
# to avoid cases in which an email is sent but the
# transaction is rolled back due to a later exception
if extra_block and should_trigger_extra_block:
extra_block(user)
return ctx
return wrapper
return outer_decorator
def require_visitability(step):
def outer_decorator(view_fn):
@wraps(view_fn)
@transaction.atomic
def wrapper(request, *args, **kwargs):
if not step.is_visitable(request.user):
raise Http404()
else:
return view_fn(request, *args, **kwargs)
return wrapper
return outer_decorator
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from functools import wraps
from django.db import transaction
from django.http import Http404
from django.contrib.flatpages.views import flatpage
def render_flatpage(url):
def fn(request, *args, **kwargs):
return flatpage(request, url)
return fn
def mark_user(attr):
def outer_decorator(view_fn):
@wraps(view_fn)
@transaction.atomic
def wrapper(request, *args, **kwargs):
user = request.user
if user.is_authenticated() and not getattr(user, attr):
setattr(user, attr, True)
user.save()
ctx = view_fn(request, *args, **kwargs)
return ctx
return wrapper
return outer_decorator
def require_visitability(step):
def outer_decorator(view_fn):
@wraps(view_fn)
@transaction.atomic
def wrapper(request, *args, **kwargs):
if not step.is_visitable(request.user):
raise Http404()
else:
return view_fn(request, *args, **kwargs)
return wrapper
return outer_decorator
Add support for arbitrary user actions on marking# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from functools import wraps
from django.db import transaction
from django.http import Http404
from django.contrib.flatpages.views import flatpage
def render_flatpage(url):
def fn(request, *args, **kwargs):
return flatpage(request, url)
return fn
def mark_user(attr, extra_block=None):
def outer_decorator(view_fn):
@wraps(view_fn)
@transaction.atomic
def wrapper(request, *args, **kwargs):
user = request.user
if user.is_authenticated() and not getattr(user, attr):
setattr(user, attr, True)
user.save()
should_trigger_extra_block = True
else:
should_trigger_extra_block = False
ctx = view_fn(request, *args, **kwargs)
# in case the extra block does things that don't
# work well with database transactions (like email)
# postpone it to the end of the transaction block
# to avoid cases in which an email is sent but the
# transaction is rolled back due to a later exception
if extra_block and should_trigger_extra_block:
extra_block(user)
return ctx
return wrapper
return outer_decorator
def require_visitability(step):
def outer_decorator(view_fn):
@wraps(view_fn)
@transaction.atomic
def wrapper(request, *args, **kwargs):
if not step.is_visitable(request.user):
raise Http404()
else:
return view_fn(request, *args, **kwargs)
return wrapper
return outer_decorator
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from functools import wraps
from django.db import transaction
from django.http import Http404
from django.contrib.flatpages.views import flatpage
def render_flatpage(url):
def fn(request, *args, **kwargs):
return flatpage(request, url)
return fn
def mark_user(attr):
def outer_decorator(view_fn):
@wraps(view_fn)
@transaction.atomic
def wrapper(request, *args, **kwargs):
user = request.user
if user.is_authenticated() and not getattr(user, attr):
setattr(user, attr, True)
user.save()
ctx = view_fn(request, *args, **kwargs)
return ctx
return wrapper
return outer_decorator
def require_visitability(step):
def outer_decorator(view_fn):
@wraps(view_fn)
@transaction.atomic
def wrapper(request, *args, **kwargs):
if not step.is_visitable(request.user):
raise Http404()
else:
return view_fn(request, *args, **kwargs)
return wrapper
return outer_decorator
<commit_msg>Add support for arbitrary user actions on marking<commit_after># -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from functools import wraps
from django.db import transaction
from django.http import Http404
from django.contrib.flatpages.views import flatpage
def render_flatpage(url):
def fn(request, *args, **kwargs):
return flatpage(request, url)
return fn
def mark_user(attr, extra_block=None):
def outer_decorator(view_fn):
@wraps(view_fn)
@transaction.atomic
def wrapper(request, *args, **kwargs):
user = request.user
if user.is_authenticated() and not getattr(user, attr):
setattr(user, attr, True)
user.save()
should_trigger_extra_block = True
else:
should_trigger_extra_block = False
ctx = view_fn(request, *args, **kwargs)
# in case the extra block does things that don't
# work well with database transactions (like email)
# postpone it to the end of the transaction block
# to avoid cases in which an email is sent but the
# transaction is rolled back due to a later exception
if extra_block and should_trigger_extra_block:
extra_block(user)
return ctx
return wrapper
return outer_decorator
def require_visitability(step):
def outer_decorator(view_fn):
@wraps(view_fn)
@transaction.atomic
def wrapper(request, *args, **kwargs):
if not step.is_visitable(request.user):
raise Http404()
else:
return view_fn(request, *args, **kwargs)
return wrapper
return outer_decorator
|
627ceb6adff6a2f954048b7641ac3b68d19ef019
|
experiments/stop_motion_tool/stop_motion_tool.py
|
experiments/stop_motion_tool/stop_motion_tool.py
|
from cam import OpenCV_Cam
import cv2
cam = OpenCV_Cam(0)
cam.size = (1920, 1080)
KEY_ESC = 27
KEY_SPACE = ord(' ')
prevFrame = None
i = 0
fourcc = cv2.cv.CV_FOURCC(*'XVID')
video = cv2.VideoWriter('output.avi',fourcc, 3.0, (1920,1080), isColor =True)
while True:
# Capture frame-by-frame
frame = cam.read()
# image processing functions
# Load the frame into a window named as 'Press any key to exit'
if (prevFrame is not None):
showFrame = cv2.addWeighted(frame,0.7,prevFrame,0.3,0)
else:
showFrame = frame
resizeShowFrame = cv2.resize(showFrame, (0,0), fx = 0.5, fy = 0.5 )
cv2.imshow('Press ESC to exit', resizeShowFrame)
# wait for the key
key_code = cv2.waitKey(10)
if key_code is KEY_SPACE:
cv2.imwrite('frame'+str(i)+'.png', frame)
video.write(frame)
prevFrame = frame
i += 1
elif key_code is KEY_ESC:
break
cv2.destroyAllWindows()
cam.release()
video.release()
|
from cam import OpenCV_Cam
import cv2
import os.path
cam = OpenCV_Cam(0)
cam.size = (1920, 1080)
KEY_ESC = 27
KEY_SPACE = ord(' ')
prevFrame = None
i = 0
fname="frame.png"
if os.path.isfile(fname):
prevFrame = cv2.imread(fname)
fourcc = cv2.cv.CV_FOURCC(*'XVID')
video = cv2.VideoWriter('output.avi',fourcc, 3.0, cam.size, isColor =True)
while True:
# Capture frame-by-frame
frame = cam.read()
# image processing functions
# Load the frame into a window named as 'Press any key to exit'
if (prevFrame is not None):
showFrame = cv2.addWeighted(frame,0.7,prevFrame,0.3,0)
else:
showFrame = frame
resizeShowFrame = cv2.resize(showFrame, (0,0), fx = 0.5, fy = 0.5 )
cv2.imshow('Press ESC to exit', resizeShowFrame)
# wait for the key
key_code = cv2.waitKey(10)
if key_code is KEY_SPACE or key_code == 2228224:
cv2.imwrite('frame'+str(i)+'.png', frame)
video.write(frame)
prevFrame = frame
i += 1
elif key_code is KEY_ESC:
break
cv2.destroyAllWindows()
cam.release()
video.release()
|
Add support for presenter and continuing frame capture
|
Add support for presenter and continuing frame capture
Click the right click of the presenter will trigger frame capture.
|
Python
|
mit
|
fatcloud/PyCV-time
|
from cam import OpenCV_Cam
import cv2
cam = OpenCV_Cam(0)
cam.size = (1920, 1080)
KEY_ESC = 27
KEY_SPACE = ord(' ')
prevFrame = None
i = 0
fourcc = cv2.cv.CV_FOURCC(*'XVID')
video = cv2.VideoWriter('output.avi',fourcc, 3.0, (1920,1080), isColor =True)
while True:
# Capture frame-by-frame
frame = cam.read()
# image processing functions
# Load the frame into a window named as 'Press any key to exit'
if (prevFrame is not None):
showFrame = cv2.addWeighted(frame,0.7,prevFrame,0.3,0)
else:
showFrame = frame
resizeShowFrame = cv2.resize(showFrame, (0,0), fx = 0.5, fy = 0.5 )
cv2.imshow('Press ESC to exit', resizeShowFrame)
# wait for the key
key_code = cv2.waitKey(10)
if key_code is KEY_SPACE:
cv2.imwrite('frame'+str(i)+'.png', frame)
video.write(frame)
prevFrame = frame
i += 1
elif key_code is KEY_ESC:
break
cv2.destroyAllWindows()
cam.release()
video.release()
Add support for presenter and continuing frame capture
Click the right click of the presenter will trigger frame capture.
|
from cam import OpenCV_Cam
import cv2
import os.path
cam = OpenCV_Cam(0)
cam.size = (1920, 1080)
KEY_ESC = 27
KEY_SPACE = ord(' ')
prevFrame = None
i = 0
fname="frame.png"
if os.path.isfile(fname):
prevFrame = cv2.imread(fname)
fourcc = cv2.cv.CV_FOURCC(*'XVID')
video = cv2.VideoWriter('output.avi',fourcc, 3.0, cam.size, isColor =True)
while True:
# Capture frame-by-frame
frame = cam.read()
# image processing functions
# Load the frame into a window named as 'Press any key to exit'
if (prevFrame is not None):
showFrame = cv2.addWeighted(frame,0.7,prevFrame,0.3,0)
else:
showFrame = frame
resizeShowFrame = cv2.resize(showFrame, (0,0), fx = 0.5, fy = 0.5 )
cv2.imshow('Press ESC to exit', resizeShowFrame)
# wait for the key
key_code = cv2.waitKey(10)
if key_code is KEY_SPACE or key_code == 2228224:
cv2.imwrite('frame'+str(i)+'.png', frame)
video.write(frame)
prevFrame = frame
i += 1
elif key_code is KEY_ESC:
break
cv2.destroyAllWindows()
cam.release()
video.release()
|
<commit_before>from cam import OpenCV_Cam
import cv2
cam = OpenCV_Cam(0)
cam.size = (1920, 1080)
KEY_ESC = 27
KEY_SPACE = ord(' ')
prevFrame = None
i = 0
fourcc = cv2.cv.CV_FOURCC(*'XVID')
video = cv2.VideoWriter('output.avi',fourcc, 3.0, (1920,1080), isColor =True)
while True:
# Capture frame-by-frame
frame = cam.read()
# image processing functions
# Load the frame into a window named as 'Press any key to exit'
if (prevFrame is not None):
showFrame = cv2.addWeighted(frame,0.7,prevFrame,0.3,0)
else:
showFrame = frame
resizeShowFrame = cv2.resize(showFrame, (0,0), fx = 0.5, fy = 0.5 )
cv2.imshow('Press ESC to exit', resizeShowFrame)
# wait for the key
key_code = cv2.waitKey(10)
if key_code is KEY_SPACE:
cv2.imwrite('frame'+str(i)+'.png', frame)
video.write(frame)
prevFrame = frame
i += 1
elif key_code is KEY_ESC:
break
cv2.destroyAllWindows()
cam.release()
video.release()
<commit_msg>Add support for presenter and continuing frame capture
Click the right click of the presenter will trigger frame capture.<commit_after>
|
from cam import OpenCV_Cam
import cv2
import os.path
cam = OpenCV_Cam(0)
cam.size = (1920, 1080)
KEY_ESC = 27
KEY_SPACE = ord(' ')
prevFrame = None
i = 0
fname="frame.png"
if os.path.isfile(fname):
prevFrame = cv2.imread(fname)
fourcc = cv2.cv.CV_FOURCC(*'XVID')
video = cv2.VideoWriter('output.avi',fourcc, 3.0, cam.size, isColor =True)
while True:
# Capture frame-by-frame
frame = cam.read()
# image processing functions
# Load the frame into a window named as 'Press any key to exit'
if (prevFrame is not None):
showFrame = cv2.addWeighted(frame,0.7,prevFrame,0.3,0)
else:
showFrame = frame
resizeShowFrame = cv2.resize(showFrame, (0,0), fx = 0.5, fy = 0.5 )
cv2.imshow('Press ESC to exit', resizeShowFrame)
# wait for the key
key_code = cv2.waitKey(10)
if key_code is KEY_SPACE or key_code == 2228224:
cv2.imwrite('frame'+str(i)+'.png', frame)
video.write(frame)
prevFrame = frame
i += 1
elif key_code is KEY_ESC:
break
cv2.destroyAllWindows()
cam.release()
video.release()
|
from cam import OpenCV_Cam
import cv2
cam = OpenCV_Cam(0)
cam.size = (1920, 1080)
KEY_ESC = 27
KEY_SPACE = ord(' ')
prevFrame = None
i = 0
fourcc = cv2.cv.CV_FOURCC(*'XVID')
video = cv2.VideoWriter('output.avi',fourcc, 3.0, (1920,1080), isColor =True)
while True:
# Capture frame-by-frame
frame = cam.read()
# image processing functions
# Load the frame into a window named as 'Press any key to exit'
if (prevFrame is not None):
showFrame = cv2.addWeighted(frame,0.7,prevFrame,0.3,0)
else:
showFrame = frame
resizeShowFrame = cv2.resize(showFrame, (0,0), fx = 0.5, fy = 0.5 )
cv2.imshow('Press ESC to exit', resizeShowFrame)
# wait for the key
key_code = cv2.waitKey(10)
if key_code is KEY_SPACE:
cv2.imwrite('frame'+str(i)+'.png', frame)
video.write(frame)
prevFrame = frame
i += 1
elif key_code is KEY_ESC:
break
cv2.destroyAllWindows()
cam.release()
video.release()
Add support for presenter and continuing frame capture
Click the right click of the presenter will trigger frame capture.from cam import OpenCV_Cam
import cv2
import os.path
cam = OpenCV_Cam(0)
cam.size = (1920, 1080)
KEY_ESC = 27
KEY_SPACE = ord(' ')
prevFrame = None
i = 0
fname="frame.png"
if os.path.isfile(fname):
prevFrame = cv2.imread(fname)
fourcc = cv2.cv.CV_FOURCC(*'XVID')
video = cv2.VideoWriter('output.avi',fourcc, 3.0, cam.size, isColor =True)
while True:
# Capture frame-by-frame
frame = cam.read()
# image processing functions
# Load the frame into a window named as 'Press any key to exit'
if (prevFrame is not None):
showFrame = cv2.addWeighted(frame,0.7,prevFrame,0.3,0)
else:
showFrame = frame
resizeShowFrame = cv2.resize(showFrame, (0,0), fx = 0.5, fy = 0.5 )
cv2.imshow('Press ESC to exit', resizeShowFrame)
# wait for the key
key_code = cv2.waitKey(10)
if key_code is KEY_SPACE or key_code == 2228224:
cv2.imwrite('frame'+str(i)+'.png', frame)
video.write(frame)
prevFrame = frame
i += 1
elif key_code is KEY_ESC:
break
cv2.destroyAllWindows()
cam.release()
video.release()
|
<commit_before>from cam import OpenCV_Cam
import cv2
cam = OpenCV_Cam(0)
cam.size = (1920, 1080)
KEY_ESC = 27
KEY_SPACE = ord(' ')
prevFrame = None
i = 0
fourcc = cv2.cv.CV_FOURCC(*'XVID')
video = cv2.VideoWriter('output.avi',fourcc, 3.0, (1920,1080), isColor =True)
while True:
# Capture frame-by-frame
frame = cam.read()
# image processing functions
# Load the frame into a window named as 'Press any key to exit'
if (prevFrame is not None):
showFrame = cv2.addWeighted(frame,0.7,prevFrame,0.3,0)
else:
showFrame = frame
resizeShowFrame = cv2.resize(showFrame, (0,0), fx = 0.5, fy = 0.5 )
cv2.imshow('Press ESC to exit', resizeShowFrame)
# wait for the key
key_code = cv2.waitKey(10)
if key_code is KEY_SPACE:
cv2.imwrite('frame'+str(i)+'.png', frame)
video.write(frame)
prevFrame = frame
i += 1
elif key_code is KEY_ESC:
break
cv2.destroyAllWindows()
cam.release()
video.release()
<commit_msg>Add support for presenter and continuing frame capture
Click the right click of the presenter will trigger frame capture.<commit_after>from cam import OpenCV_Cam
import cv2
import os.path
cam = OpenCV_Cam(0)
cam.size = (1920, 1080)
KEY_ESC = 27
KEY_SPACE = ord(' ')
prevFrame = None
i = 0
fname="frame.png"
if os.path.isfile(fname):
prevFrame = cv2.imread(fname)
fourcc = cv2.cv.CV_FOURCC(*'XVID')
video = cv2.VideoWriter('output.avi',fourcc, 3.0, cam.size, isColor =True)
while True:
# Capture frame-by-frame
frame = cam.read()
# image processing functions
# Load the frame into a window named as 'Press any key to exit'
if (prevFrame is not None):
showFrame = cv2.addWeighted(frame,0.7,prevFrame,0.3,0)
else:
showFrame = frame
resizeShowFrame = cv2.resize(showFrame, (0,0), fx = 0.5, fy = 0.5 )
cv2.imshow('Press ESC to exit', resizeShowFrame)
# wait for the key
key_code = cv2.waitKey(10)
if key_code is KEY_SPACE or key_code == 2228224:
cv2.imwrite('frame'+str(i)+'.png', frame)
video.write(frame)
prevFrame = frame
i += 1
elif key_code is KEY_ESC:
break
cv2.destroyAllWindows()
cam.release()
video.release()
|
0050711d85ba4084e9d0f32d3bad1b3400350476
|
name/feeds.py
|
name/feeds.py
|
from django.contrib.syndication.views import Feed
from django.core.urlresolvers import reverse_lazy
from django.utils.feedgenerator import Atom1Feed
from . import app_settings
from .models import Name
class NameAtomFeedType(Atom1Feed):
"""Create an Atom feed that sets the Content-Type response
header to application/xml.
"""
mime_type = 'application/xml'
class NameAtomFeed(Feed):
feed_type = NameAtomFeedType
link = reverse_lazy("name_feed")
title = "Name App"
subtitle = "New Name Records"
author_name = app_settings.NAME_FEED_AUTHOR_NAME
author_email = app_settings.NAME_FEED_AUTHOR_EMAIL
author_link = app_settings.NAME_FEED_AUTHOR_LINK
def items(self):
# last 5 added items
return Name.objects.order_by('-date_created')[:20]
def item_title(self, obj):
return obj.name
def item_description(self, obj):
return 'Name Type: {0}'.format(obj.get_name_type_label())
def item_link(self, obj):
return obj.get_absolute_url()
|
from django.contrib.syndication.views import Feed
from django.core.urlresolvers import reverse_lazy
from django.utils.feedgenerator import Atom1Feed
from . import app_settings
from .models import Name
class NameAtomFeedType(Atom1Feed):
"""Create an Atom feed that sets the Content-Type response
header to application/xml.
"""
mime_type = 'application/xml'
def root_attributes(self):
attrs = super(NameAtomFeedType, self).root_attributes()
attrs['xmlns:georss'] = 'http://www.georss.org/georss'
return attrs
def add_item_elements(self, handler, item):
super(NameAtomFeedType, self).add_item_elements(handler, item)
if item.get('location'):
handler.addQuickElement('georss:point', item['location'])
class NameAtomFeed(Feed):
feed_type = NameAtomFeedType
link = reverse_lazy("name_feed")
title = "Name App"
subtitle = "New Name Records"
author_name = app_settings.NAME_FEED_AUTHOR_NAME
author_email = app_settings.NAME_FEED_AUTHOR_EMAIL
author_link = app_settings.NAME_FEED_AUTHOR_LINK
def items(self):
# last 5 added items
return Name.objects.order_by('-date_created')[:20]
def item_title(self, obj):
return obj.name
def item_description(self, obj):
return 'Name Type: {0}'.format(obj.get_name_type_label())
def item_link(self, obj):
return obj.get_absolute_url()
def item_location(self, obj):
if obj.has_locations() and obj.location_set.current_location:
return obj.location_set.current_location.geo_point()
def item_extra_kwargs(self, obj):
return dict(location=self.item_location(obj))
|
Add the location as a georss:point element.
|
Add the location as a georss:point element.
|
Python
|
bsd-3-clause
|
damonkelley/django-name,damonkelley/django-name,unt-libraries/django-name,damonkelley/django-name,unt-libraries/django-name,unt-libraries/django-name
|
from django.contrib.syndication.views import Feed
from django.core.urlresolvers import reverse_lazy
from django.utils.feedgenerator import Atom1Feed
from . import app_settings
from .models import Name
class NameAtomFeedType(Atom1Feed):
"""Create an Atom feed that sets the Content-Type response
header to application/xml.
"""
mime_type = 'application/xml'
class NameAtomFeed(Feed):
feed_type = NameAtomFeedType
link = reverse_lazy("name_feed")
title = "Name App"
subtitle = "New Name Records"
author_name = app_settings.NAME_FEED_AUTHOR_NAME
author_email = app_settings.NAME_FEED_AUTHOR_EMAIL
author_link = app_settings.NAME_FEED_AUTHOR_LINK
def items(self):
# last 5 added items
return Name.objects.order_by('-date_created')[:20]
def item_title(self, obj):
return obj.name
def item_description(self, obj):
return 'Name Type: {0}'.format(obj.get_name_type_label())
def item_link(self, obj):
return obj.get_absolute_url()
Add the location as a georss:point element.
|
from django.contrib.syndication.views import Feed
from django.core.urlresolvers import reverse_lazy
from django.utils.feedgenerator import Atom1Feed
from . import app_settings
from .models import Name
class NameAtomFeedType(Atom1Feed):
"""Create an Atom feed that sets the Content-Type response
header to application/xml.
"""
mime_type = 'application/xml'
def root_attributes(self):
attrs = super(NameAtomFeedType, self).root_attributes()
attrs['xmlns:georss'] = 'http://www.georss.org/georss'
return attrs
def add_item_elements(self, handler, item):
super(NameAtomFeedType, self).add_item_elements(handler, item)
if item.get('location'):
handler.addQuickElement('georss:point', item['location'])
class NameAtomFeed(Feed):
feed_type = NameAtomFeedType
link = reverse_lazy("name_feed")
title = "Name App"
subtitle = "New Name Records"
author_name = app_settings.NAME_FEED_AUTHOR_NAME
author_email = app_settings.NAME_FEED_AUTHOR_EMAIL
author_link = app_settings.NAME_FEED_AUTHOR_LINK
def items(self):
# last 5 added items
return Name.objects.order_by('-date_created')[:20]
def item_title(self, obj):
return obj.name
def item_description(self, obj):
return 'Name Type: {0}'.format(obj.get_name_type_label())
def item_link(self, obj):
return obj.get_absolute_url()
def item_location(self, obj):
if obj.has_locations() and obj.location_set.current_location:
return obj.location_set.current_location.geo_point()
def item_extra_kwargs(self, obj):
return dict(location=self.item_location(obj))
|
<commit_before>from django.contrib.syndication.views import Feed
from django.core.urlresolvers import reverse_lazy
from django.utils.feedgenerator import Atom1Feed
from . import app_settings
from .models import Name
class NameAtomFeedType(Atom1Feed):
"""Create an Atom feed that sets the Content-Type response
header to application/xml.
"""
mime_type = 'application/xml'
class NameAtomFeed(Feed):
feed_type = NameAtomFeedType
link = reverse_lazy("name_feed")
title = "Name App"
subtitle = "New Name Records"
author_name = app_settings.NAME_FEED_AUTHOR_NAME
author_email = app_settings.NAME_FEED_AUTHOR_EMAIL
author_link = app_settings.NAME_FEED_AUTHOR_LINK
def items(self):
# last 5 added items
return Name.objects.order_by('-date_created')[:20]
def item_title(self, obj):
return obj.name
def item_description(self, obj):
return 'Name Type: {0}'.format(obj.get_name_type_label())
def item_link(self, obj):
return obj.get_absolute_url()
<commit_msg>Add the location as a georss:point element.<commit_after>
|
from django.contrib.syndication.views import Feed
from django.core.urlresolvers import reverse_lazy
from django.utils.feedgenerator import Atom1Feed
from . import app_settings
from .models import Name
class NameAtomFeedType(Atom1Feed):
"""Create an Atom feed that sets the Content-Type response
header to application/xml.
"""
mime_type = 'application/xml'
def root_attributes(self):
attrs = super(NameAtomFeedType, self).root_attributes()
attrs['xmlns:georss'] = 'http://www.georss.org/georss'
return attrs
def add_item_elements(self, handler, item):
super(NameAtomFeedType, self).add_item_elements(handler, item)
if item.get('location'):
handler.addQuickElement('georss:point', item['location'])
class NameAtomFeed(Feed):
feed_type = NameAtomFeedType
link = reverse_lazy("name_feed")
title = "Name App"
subtitle = "New Name Records"
author_name = app_settings.NAME_FEED_AUTHOR_NAME
author_email = app_settings.NAME_FEED_AUTHOR_EMAIL
author_link = app_settings.NAME_FEED_AUTHOR_LINK
def items(self):
# last 5 added items
return Name.objects.order_by('-date_created')[:20]
def item_title(self, obj):
return obj.name
def item_description(self, obj):
return 'Name Type: {0}'.format(obj.get_name_type_label())
def item_link(self, obj):
return obj.get_absolute_url()
def item_location(self, obj):
if obj.has_locations() and obj.location_set.current_location:
return obj.location_set.current_location.geo_point()
def item_extra_kwargs(self, obj):
return dict(location=self.item_location(obj))
|
from django.contrib.syndication.views import Feed
from django.core.urlresolvers import reverse_lazy
from django.utils.feedgenerator import Atom1Feed
from . import app_settings
from .models import Name
class NameAtomFeedType(Atom1Feed):
"""Create an Atom feed that sets the Content-Type response
header to application/xml.
"""
mime_type = 'application/xml'
class NameAtomFeed(Feed):
feed_type = NameAtomFeedType
link = reverse_lazy("name_feed")
title = "Name App"
subtitle = "New Name Records"
author_name = app_settings.NAME_FEED_AUTHOR_NAME
author_email = app_settings.NAME_FEED_AUTHOR_EMAIL
author_link = app_settings.NAME_FEED_AUTHOR_LINK
def items(self):
# last 5 added items
return Name.objects.order_by('-date_created')[:20]
def item_title(self, obj):
return obj.name
def item_description(self, obj):
return 'Name Type: {0}'.format(obj.get_name_type_label())
def item_link(self, obj):
return obj.get_absolute_url()
Add the location as a georss:point element.from django.contrib.syndication.views import Feed
from django.core.urlresolvers import reverse_lazy
from django.utils.feedgenerator import Atom1Feed
from . import app_settings
from .models import Name
class NameAtomFeedType(Atom1Feed):
"""Create an Atom feed that sets the Content-Type response
header to application/xml.
"""
mime_type = 'application/xml'
def root_attributes(self):
attrs = super(NameAtomFeedType, self).root_attributes()
attrs['xmlns:georss'] = 'http://www.georss.org/georss'
return attrs
def add_item_elements(self, handler, item):
super(NameAtomFeedType, self).add_item_elements(handler, item)
if item.get('location'):
handler.addQuickElement('georss:point', item['location'])
class NameAtomFeed(Feed):
feed_type = NameAtomFeedType
link = reverse_lazy("name_feed")
title = "Name App"
subtitle = "New Name Records"
author_name = app_settings.NAME_FEED_AUTHOR_NAME
author_email = app_settings.NAME_FEED_AUTHOR_EMAIL
author_link = app_settings.NAME_FEED_AUTHOR_LINK
def items(self):
# last 5 added items
return Name.objects.order_by('-date_created')[:20]
def item_title(self, obj):
return obj.name
def item_description(self, obj):
return 'Name Type: {0}'.format(obj.get_name_type_label())
def item_link(self, obj):
return obj.get_absolute_url()
def item_location(self, obj):
if obj.has_locations() and obj.location_set.current_location:
return obj.location_set.current_location.geo_point()
def item_extra_kwargs(self, obj):
return dict(location=self.item_location(obj))
|
<commit_before>from django.contrib.syndication.views import Feed
from django.core.urlresolvers import reverse_lazy
from django.utils.feedgenerator import Atom1Feed
from . import app_settings
from .models import Name
class NameAtomFeedType(Atom1Feed):
"""Create an Atom feed that sets the Content-Type response
header to application/xml.
"""
mime_type = 'application/xml'
class NameAtomFeed(Feed):
feed_type = NameAtomFeedType
link = reverse_lazy("name_feed")
title = "Name App"
subtitle = "New Name Records"
author_name = app_settings.NAME_FEED_AUTHOR_NAME
author_email = app_settings.NAME_FEED_AUTHOR_EMAIL
author_link = app_settings.NAME_FEED_AUTHOR_LINK
def items(self):
# last 5 added items
return Name.objects.order_by('-date_created')[:20]
def item_title(self, obj):
return obj.name
def item_description(self, obj):
return 'Name Type: {0}'.format(obj.get_name_type_label())
def item_link(self, obj):
return obj.get_absolute_url()
<commit_msg>Add the location as a georss:point element.<commit_after>from django.contrib.syndication.views import Feed
from django.core.urlresolvers import reverse_lazy
from django.utils.feedgenerator import Atom1Feed
from . import app_settings
from .models import Name
class NameAtomFeedType(Atom1Feed):
"""Create an Atom feed that sets the Content-Type response
header to application/xml.
"""
mime_type = 'application/xml'
def root_attributes(self):
attrs = super(NameAtomFeedType, self).root_attributes()
attrs['xmlns:georss'] = 'http://www.georss.org/georss'
return attrs
def add_item_elements(self, handler, item):
super(NameAtomFeedType, self).add_item_elements(handler, item)
if item.get('location'):
handler.addQuickElement('georss:point', item['location'])
class NameAtomFeed(Feed):
feed_type = NameAtomFeedType
link = reverse_lazy("name_feed")
title = "Name App"
subtitle = "New Name Records"
author_name = app_settings.NAME_FEED_AUTHOR_NAME
author_email = app_settings.NAME_FEED_AUTHOR_EMAIL
author_link = app_settings.NAME_FEED_AUTHOR_LINK
def items(self):
# last 5 added items
return Name.objects.order_by('-date_created')[:20]
def item_title(self, obj):
return obj.name
def item_description(self, obj):
return 'Name Type: {0}'.format(obj.get_name_type_label())
def item_link(self, obj):
return obj.get_absolute_url()
def item_location(self, obj):
if obj.has_locations() and obj.location_set.current_location:
return obj.location_set.current_location.geo_point()
def item_extra_kwargs(self, obj):
return dict(location=self.item_location(obj))
|
21f2a2a053c5d5cc24651a8aefd8c24357a85223
|
demos/minimal.py
|
demos/minimal.py
|
#!/usr/bin/env python
from gi.repository import GtkClutter
GtkClutter.init([])
from gi.repository import GObject, Gtk, GtkChamplain
GObject.threads_init()
GtkClutter.init([])
window = Gtk.Window(type=Gtk.WindowType.TOPLEVEL)
window.connect("destroy", Gtk.main_quit)
widget = GtkChamplain.Embed()
widget.set_size_request(640, 480)
window.add(widget)
window.show_all()
Gtk.main()
|
#!/usr/bin/env python
# To run this example, you need to set the GI_TYPELIB_PATH environment
# variable to point to the gir directory:
#
# export GI_TYPELIB_PATH=$GI_TYPELIB_PATH:/usr/local/lib/girepository-1.0/
from gi.repository import GtkClutter
GtkClutter.init([])
from gi.repository import GObject, Gtk, GtkChamplain
GObject.threads_init()
GtkClutter.init([])
window = Gtk.Window(type=Gtk.WindowType.TOPLEVEL)
window.connect("destroy", Gtk.main_quit)
widget = GtkChamplain.Embed()
widget.set_size_request(640, 480)
window.add(widget)
window.show_all()
Gtk.main()
|
Add description how to run the python demo
|
Add description how to run the python demo
|
Python
|
lgpl-2.1
|
Distrotech/libchamplain,Distrotech/libchamplain,Distrotech/libchamplain,Distrotech/libchamplain,Distrotech/libchamplain
|
#!/usr/bin/env python
from gi.repository import GtkClutter
GtkClutter.init([])
from gi.repository import GObject, Gtk, GtkChamplain
GObject.threads_init()
GtkClutter.init([])
window = Gtk.Window(type=Gtk.WindowType.TOPLEVEL)
window.connect("destroy", Gtk.main_quit)
widget = GtkChamplain.Embed()
widget.set_size_request(640, 480)
window.add(widget)
window.show_all()
Gtk.main()
Add description how to run the python demo
|
#!/usr/bin/env python
# To run this example, you need to set the GI_TYPELIB_PATH environment
# variable to point to the gir directory:
#
# export GI_TYPELIB_PATH=$GI_TYPELIB_PATH:/usr/local/lib/girepository-1.0/
from gi.repository import GtkClutter
GtkClutter.init([])
from gi.repository import GObject, Gtk, GtkChamplain
GObject.threads_init()
GtkClutter.init([])
window = Gtk.Window(type=Gtk.WindowType.TOPLEVEL)
window.connect("destroy", Gtk.main_quit)
widget = GtkChamplain.Embed()
widget.set_size_request(640, 480)
window.add(widget)
window.show_all()
Gtk.main()
|
<commit_before>#!/usr/bin/env python
from gi.repository import GtkClutter
GtkClutter.init([])
from gi.repository import GObject, Gtk, GtkChamplain
GObject.threads_init()
GtkClutter.init([])
window = Gtk.Window(type=Gtk.WindowType.TOPLEVEL)
window.connect("destroy", Gtk.main_quit)
widget = GtkChamplain.Embed()
widget.set_size_request(640, 480)
window.add(widget)
window.show_all()
Gtk.main()
<commit_msg>Add description how to run the python demo<commit_after>
|
#!/usr/bin/env python
# To run this example, you need to set the GI_TYPELIB_PATH environment
# variable to point to the gir directory:
#
# export GI_TYPELIB_PATH=$GI_TYPELIB_PATH:/usr/local/lib/girepository-1.0/
from gi.repository import GtkClutter
GtkClutter.init([])
from gi.repository import GObject, Gtk, GtkChamplain
GObject.threads_init()
GtkClutter.init([])
window = Gtk.Window(type=Gtk.WindowType.TOPLEVEL)
window.connect("destroy", Gtk.main_quit)
widget = GtkChamplain.Embed()
widget.set_size_request(640, 480)
window.add(widget)
window.show_all()
Gtk.main()
|
#!/usr/bin/env python
from gi.repository import GtkClutter
GtkClutter.init([])
from gi.repository import GObject, Gtk, GtkChamplain
GObject.threads_init()
GtkClutter.init([])
window = Gtk.Window(type=Gtk.WindowType.TOPLEVEL)
window.connect("destroy", Gtk.main_quit)
widget = GtkChamplain.Embed()
widget.set_size_request(640, 480)
window.add(widget)
window.show_all()
Gtk.main()
Add description how to run the python demo#!/usr/bin/env python
# To run this example, you need to set the GI_TYPELIB_PATH environment
# variable to point to the gir directory:
#
# export GI_TYPELIB_PATH=$GI_TYPELIB_PATH:/usr/local/lib/girepository-1.0/
from gi.repository import GtkClutter
GtkClutter.init([])
from gi.repository import GObject, Gtk, GtkChamplain
GObject.threads_init()
GtkClutter.init([])
window = Gtk.Window(type=Gtk.WindowType.TOPLEVEL)
window.connect("destroy", Gtk.main_quit)
widget = GtkChamplain.Embed()
widget.set_size_request(640, 480)
window.add(widget)
window.show_all()
Gtk.main()
|
<commit_before>#!/usr/bin/env python
from gi.repository import GtkClutter
GtkClutter.init([])
from gi.repository import GObject, Gtk, GtkChamplain
GObject.threads_init()
GtkClutter.init([])
window = Gtk.Window(type=Gtk.WindowType.TOPLEVEL)
window.connect("destroy", Gtk.main_quit)
widget = GtkChamplain.Embed()
widget.set_size_request(640, 480)
window.add(widget)
window.show_all()
Gtk.main()
<commit_msg>Add description how to run the python demo<commit_after>#!/usr/bin/env python
# To run this example, you need to set the GI_TYPELIB_PATH environment
# variable to point to the gir directory:
#
# export GI_TYPELIB_PATH=$GI_TYPELIB_PATH:/usr/local/lib/girepository-1.0/
from gi.repository import GtkClutter
GtkClutter.init([])
from gi.repository import GObject, Gtk, GtkChamplain
GObject.threads_init()
GtkClutter.init([])
window = Gtk.Window(type=Gtk.WindowType.TOPLEVEL)
window.connect("destroy", Gtk.main_quit)
widget = GtkChamplain.Embed()
widget.set_size_request(640, 480)
window.add(widget)
window.show_all()
Gtk.main()
|
4afd1c39d3fff4f3d84b9c1e5e47cb2dc3d1ff6e
|
pycom/objects.py
|
pycom/objects.py
|
# encoding: utf-8
### Attribute Wrapper
class AttrWrapper(object):
attrs = []
def __setattr__(self, name, value):
if name not in self.attrs:
raise AttributeError("'%s' is not supported" % name)
object.__setattr__(self, name, value)
def __repr__(self):
attrs = []
template = "%s=%s"
for name in self.attrs:
try:
attrs.append(template % (name, getattr(self, name)))
except AttributeError:
pass
return "%s(%s)" % (self.__class__.__name__, ", ".join(attrs))
def val(obj, name, default=None):
if hasattr(obj, name):
return obj.name
elif name in obj:
return obj[name]
elif isinstance(obj, (list, tuple)) and isinstance(name, int):
try:
return obj[name]
except Exception:
return default
else:
return default
|
# encoding: utf-8
### Attribute Wrapper
class AttrWrapper(object):
attrs = []
def __setattr__(self, name, value):
if name not in self.attrs:
raise AttributeError("'%s' is not supported" % name)
object.__setattr__(self, name, value)
def __repr__(self):
attrs = []
template = "%s=%s"
for name in self.attrs:
try:
attrs.append(template % (name, getattr(self, name)))
except AttributeError:
pass
return "%s(%s)" % (self.__class__.__name__, ", ".join(attrs))
def val(obj, name, default=None):
if hasattr(obj, name):
return obj.name
elif name in obj:
return obj[name]
elif isinstance(obj, (list, tuple)) and isinstance(name, int):
try:
return obj[name]
except Exception:
return default
else:
return default
v = val
|
Set the alias of the function `val` to `v`.
|
Set the alias of the function `val` to `v`.
|
Python
|
mit
|
xgfone/pycom,xgfone/xutils
|
# encoding: utf-8
### Attribute Wrapper
class AttrWrapper(object):
attrs = []
def __setattr__(self, name, value):
if name not in self.attrs:
raise AttributeError("'%s' is not supported" % name)
object.__setattr__(self, name, value)
def __repr__(self):
attrs = []
template = "%s=%s"
for name in self.attrs:
try:
attrs.append(template % (name, getattr(self, name)))
except AttributeError:
pass
return "%s(%s)" % (self.__class__.__name__, ", ".join(attrs))
def val(obj, name, default=None):
if hasattr(obj, name):
return obj.name
elif name in obj:
return obj[name]
elif isinstance(obj, (list, tuple)) and isinstance(name, int):
try:
return obj[name]
except Exception:
return default
else:
return default
Set the alias of the function `val` to `v`.
|
# encoding: utf-8
### Attribute Wrapper
class AttrWrapper(object):
attrs = []
def __setattr__(self, name, value):
if name not in self.attrs:
raise AttributeError("'%s' is not supported" % name)
object.__setattr__(self, name, value)
def __repr__(self):
attrs = []
template = "%s=%s"
for name in self.attrs:
try:
attrs.append(template % (name, getattr(self, name)))
except AttributeError:
pass
return "%s(%s)" % (self.__class__.__name__, ", ".join(attrs))
def val(obj, name, default=None):
if hasattr(obj, name):
return obj.name
elif name in obj:
return obj[name]
elif isinstance(obj, (list, tuple)) and isinstance(name, int):
try:
return obj[name]
except Exception:
return default
else:
return default
v = val
|
<commit_before># encoding: utf-8
### Attribute Wrapper
class AttrWrapper(object):
attrs = []
def __setattr__(self, name, value):
if name not in self.attrs:
raise AttributeError("'%s' is not supported" % name)
object.__setattr__(self, name, value)
def __repr__(self):
attrs = []
template = "%s=%s"
for name in self.attrs:
try:
attrs.append(template % (name, getattr(self, name)))
except AttributeError:
pass
return "%s(%s)" % (self.__class__.__name__, ", ".join(attrs))
def val(obj, name, default=None):
if hasattr(obj, name):
return obj.name
elif name in obj:
return obj[name]
elif isinstance(obj, (list, tuple)) and isinstance(name, int):
try:
return obj[name]
except Exception:
return default
else:
return default
<commit_msg>Set the alias of the function `val` to `v`.<commit_after>
|
# encoding: utf-8
### Attribute Wrapper
class AttrWrapper(object):
attrs = []
def __setattr__(self, name, value):
if name not in self.attrs:
raise AttributeError("'%s' is not supported" % name)
object.__setattr__(self, name, value)
def __repr__(self):
attrs = []
template = "%s=%s"
for name in self.attrs:
try:
attrs.append(template % (name, getattr(self, name)))
except AttributeError:
pass
return "%s(%s)" % (self.__class__.__name__, ", ".join(attrs))
def val(obj, name, default=None):
if hasattr(obj, name):
return obj.name
elif name in obj:
return obj[name]
elif isinstance(obj, (list, tuple)) and isinstance(name, int):
try:
return obj[name]
except Exception:
return default
else:
return default
v = val
|
# encoding: utf-8
### Attribute Wrapper
class AttrWrapper(object):
attrs = []
def __setattr__(self, name, value):
if name not in self.attrs:
raise AttributeError("'%s' is not supported" % name)
object.__setattr__(self, name, value)
def __repr__(self):
attrs = []
template = "%s=%s"
for name in self.attrs:
try:
attrs.append(template % (name, getattr(self, name)))
except AttributeError:
pass
return "%s(%s)" % (self.__class__.__name__, ", ".join(attrs))
def val(obj, name, default=None):
if hasattr(obj, name):
return obj.name
elif name in obj:
return obj[name]
elif isinstance(obj, (list, tuple)) and isinstance(name, int):
try:
return obj[name]
except Exception:
return default
else:
return default
Set the alias of the function `val` to `v`.# encoding: utf-8
### Attribute Wrapper
class AttrWrapper(object):
attrs = []
def __setattr__(self, name, value):
if name not in self.attrs:
raise AttributeError("'%s' is not supported" % name)
object.__setattr__(self, name, value)
def __repr__(self):
attrs = []
template = "%s=%s"
for name in self.attrs:
try:
attrs.append(template % (name, getattr(self, name)))
except AttributeError:
pass
return "%s(%s)" % (self.__class__.__name__, ", ".join(attrs))
def val(obj, name, default=None):
if hasattr(obj, name):
return obj.name
elif name in obj:
return obj[name]
elif isinstance(obj, (list, tuple)) and isinstance(name, int):
try:
return obj[name]
except Exception:
return default
else:
return default
v = val
|
<commit_before># encoding: utf-8
### Attribute Wrapper
class AttrWrapper(object):
attrs = []
def __setattr__(self, name, value):
if name not in self.attrs:
raise AttributeError("'%s' is not supported" % name)
object.__setattr__(self, name, value)
def __repr__(self):
attrs = []
template = "%s=%s"
for name in self.attrs:
try:
attrs.append(template % (name, getattr(self, name)))
except AttributeError:
pass
return "%s(%s)" % (self.__class__.__name__, ", ".join(attrs))
def val(obj, name, default=None):
if hasattr(obj, name):
return obj.name
elif name in obj:
return obj[name]
elif isinstance(obj, (list, tuple)) and isinstance(name, int):
try:
return obj[name]
except Exception:
return default
else:
return default
<commit_msg>Set the alias of the function `val` to `v`.<commit_after># encoding: utf-8
### Attribute Wrapper
class AttrWrapper(object):
attrs = []
def __setattr__(self, name, value):
if name not in self.attrs:
raise AttributeError("'%s' is not supported" % name)
object.__setattr__(self, name, value)
def __repr__(self):
attrs = []
template = "%s=%s"
for name in self.attrs:
try:
attrs.append(template % (name, getattr(self, name)))
except AttributeError:
pass
return "%s(%s)" % (self.__class__.__name__, ", ".join(attrs))
def val(obj, name, default=None):
if hasattr(obj, name):
return obj.name
elif name in obj:
return obj[name]
elif isinstance(obj, (list, tuple)) and isinstance(name, int):
try:
return obj[name]
except Exception:
return default
else:
return default
v = val
|
d174159ef6af50ec28146fd0a91ea3d677ee234f
|
tests/integration/test_redirection_absolute.py
|
tests/integration/test_redirection_absolute.py
|
"""Check REDIRECTIONS"""
import io
import os
import pytest
import nikola.plugins.command.init
from nikola import __main__
from .helper import append_config, cd
from .test_demo_build import prepare_demo_site
from .test_empty_build import ( # NOQA
test_archive_exists,
test_avoid_double_slash_in_rss,
test_check_files,
test_check_links,
test_index_in_sitemap,
)
@pytest.fixture(scope="module")
def build(target_dir):
"""Fill the site with demo content and build it."""
prepare_demo_site(target_dir)
nikola.utils.makedirs(os.path.join(target_dir, "files", "foo"))
target_path = os.path.join(target_dir, "files", "foo", "bar.html")
with io.open(target_path, "w+", encoding="utf8") as outf:
outf.write("foo")
append_config(
target_dir,
"""
REDIRECTIONS = [ ("posts/foo.html", "/foo/bar.html"), ]
""",
)
with cd(target_dir):
__main__.main(["build"])
|
"""Check REDIRECTIONS"""
import io
import os
import pytest
import nikola.plugins.command.init
from nikola import __main__
from .helper import append_config, cd
from .test_demo_build import prepare_demo_site
from .test_empty_build import ( # NOQA
test_archive_exists,
test_avoid_double_slash_in_rss,
test_check_files,
test_check_links,
test_index_in_sitemap,
)
@pytest.fixture(scope="module")
def build(target_dir):
"""Fill the site with demo content and build it."""
prepare_demo_site(target_dir)
redirects_dir = os.path.join(target_dir, "files", "redirects")
nikola.utils.makedirs(redirects_dir)
target_path = os.path.join(redirects_dir, "absolute_source.html")
with io.open(target_path, "w+", encoding="utf8") as outf:
outf.write("absolute")
append_config(
target_dir,
"""
REDIRECTIONS = [ ("posts/absolute.html", "/redirects/absolute_source.html"), ]
""",
)
with cd(target_dir):
__main__.main(["build"])
|
Refactor tests in preparation of the merge of redirect tests.
|
Refactor tests in preparation of the merge of redirect tests.
|
Python
|
mit
|
getnikola/nikola,okin/nikola,getnikola/nikola,okin/nikola,getnikola/nikola,okin/nikola,okin/nikola,getnikola/nikola
|
"""Check REDIRECTIONS"""
import io
import os
import pytest
import nikola.plugins.command.init
from nikola import __main__
from .helper import append_config, cd
from .test_demo_build import prepare_demo_site
from .test_empty_build import ( # NOQA
test_archive_exists,
test_avoid_double_slash_in_rss,
test_check_files,
test_check_links,
test_index_in_sitemap,
)
@pytest.fixture(scope="module")
def build(target_dir):
"""Fill the site with demo content and build it."""
prepare_demo_site(target_dir)
nikola.utils.makedirs(os.path.join(target_dir, "files", "foo"))
target_path = os.path.join(target_dir, "files", "foo", "bar.html")
with io.open(target_path, "w+", encoding="utf8") as outf:
outf.write("foo")
append_config(
target_dir,
"""
REDIRECTIONS = [ ("posts/foo.html", "/foo/bar.html"), ]
""",
)
with cd(target_dir):
__main__.main(["build"])
Refactor tests in preparation of the merge of redirect tests.
|
"""Check REDIRECTIONS"""
import io
import os
import pytest
import nikola.plugins.command.init
from nikola import __main__
from .helper import append_config, cd
from .test_demo_build import prepare_demo_site
from .test_empty_build import ( # NOQA
test_archive_exists,
test_avoid_double_slash_in_rss,
test_check_files,
test_check_links,
test_index_in_sitemap,
)
@pytest.fixture(scope="module")
def build(target_dir):
"""Fill the site with demo content and build it."""
prepare_demo_site(target_dir)
redirects_dir = os.path.join(target_dir, "files", "redirects")
nikola.utils.makedirs(redirects_dir)
target_path = os.path.join(redirects_dir, "absolute_source.html")
with io.open(target_path, "w+", encoding="utf8") as outf:
outf.write("absolute")
append_config(
target_dir,
"""
REDIRECTIONS = [ ("posts/absolute.html", "/redirects/absolute_source.html"), ]
""",
)
with cd(target_dir):
__main__.main(["build"])
|
<commit_before>"""Check REDIRECTIONS"""
import io
import os
import pytest
import nikola.plugins.command.init
from nikola import __main__
from .helper import append_config, cd
from .test_demo_build import prepare_demo_site
from .test_empty_build import ( # NOQA
test_archive_exists,
test_avoid_double_slash_in_rss,
test_check_files,
test_check_links,
test_index_in_sitemap,
)
@pytest.fixture(scope="module")
def build(target_dir):
"""Fill the site with demo content and build it."""
prepare_demo_site(target_dir)
nikola.utils.makedirs(os.path.join(target_dir, "files", "foo"))
target_path = os.path.join(target_dir, "files", "foo", "bar.html")
with io.open(target_path, "w+", encoding="utf8") as outf:
outf.write("foo")
append_config(
target_dir,
"""
REDIRECTIONS = [ ("posts/foo.html", "/foo/bar.html"), ]
""",
)
with cd(target_dir):
__main__.main(["build"])
<commit_msg>Refactor tests in preparation of the merge of redirect tests.<commit_after>
|
"""Check REDIRECTIONS"""
import io
import os
import pytest
import nikola.plugins.command.init
from nikola import __main__
from .helper import append_config, cd
from .test_demo_build import prepare_demo_site
from .test_empty_build import ( # NOQA
test_archive_exists,
test_avoid_double_slash_in_rss,
test_check_files,
test_check_links,
test_index_in_sitemap,
)
@pytest.fixture(scope="module")
def build(target_dir):
"""Fill the site with demo content and build it."""
prepare_demo_site(target_dir)
redirects_dir = os.path.join(target_dir, "files", "redirects")
nikola.utils.makedirs(redirects_dir)
target_path = os.path.join(redirects_dir, "absolute_source.html")
with io.open(target_path, "w+", encoding="utf8") as outf:
outf.write("absolute")
append_config(
target_dir,
"""
REDIRECTIONS = [ ("posts/absolute.html", "/redirects/absolute_source.html"), ]
""",
)
with cd(target_dir):
__main__.main(["build"])
|
"""Check REDIRECTIONS"""
import io
import os
import pytest
import nikola.plugins.command.init
from nikola import __main__
from .helper import append_config, cd
from .test_demo_build import prepare_demo_site
from .test_empty_build import ( # NOQA
test_archive_exists,
test_avoid_double_slash_in_rss,
test_check_files,
test_check_links,
test_index_in_sitemap,
)
@pytest.fixture(scope="module")
def build(target_dir):
"""Fill the site with demo content and build it."""
prepare_demo_site(target_dir)
nikola.utils.makedirs(os.path.join(target_dir, "files", "foo"))
target_path = os.path.join(target_dir, "files", "foo", "bar.html")
with io.open(target_path, "w+", encoding="utf8") as outf:
outf.write("foo")
append_config(
target_dir,
"""
REDIRECTIONS = [ ("posts/foo.html", "/foo/bar.html"), ]
""",
)
with cd(target_dir):
__main__.main(["build"])
Refactor tests in preparation of the merge of redirect tests."""Check REDIRECTIONS"""
import io
import os
import pytest
import nikola.plugins.command.init
from nikola import __main__
from .helper import append_config, cd
from .test_demo_build import prepare_demo_site
from .test_empty_build import ( # NOQA
test_archive_exists,
test_avoid_double_slash_in_rss,
test_check_files,
test_check_links,
test_index_in_sitemap,
)
@pytest.fixture(scope="module")
def build(target_dir):
"""Fill the site with demo content and build it."""
prepare_demo_site(target_dir)
redirects_dir = os.path.join(target_dir, "files", "redirects")
nikola.utils.makedirs(redirects_dir)
target_path = os.path.join(redirects_dir, "absolute_source.html")
with io.open(target_path, "w+", encoding="utf8") as outf:
outf.write("absolute")
append_config(
target_dir,
"""
REDIRECTIONS = [ ("posts/absolute.html", "/redirects/absolute_source.html"), ]
""",
)
with cd(target_dir):
__main__.main(["build"])
|
<commit_before>"""Check REDIRECTIONS"""
import io
import os
import pytest
import nikola.plugins.command.init
from nikola import __main__
from .helper import append_config, cd
from .test_demo_build import prepare_demo_site
from .test_empty_build import ( # NOQA
test_archive_exists,
test_avoid_double_slash_in_rss,
test_check_files,
test_check_links,
test_index_in_sitemap,
)
@pytest.fixture(scope="module")
def build(target_dir):
"""Fill the site with demo content and build it."""
prepare_demo_site(target_dir)
nikola.utils.makedirs(os.path.join(target_dir, "files", "foo"))
target_path = os.path.join(target_dir, "files", "foo", "bar.html")
with io.open(target_path, "w+", encoding="utf8") as outf:
outf.write("foo")
append_config(
target_dir,
"""
REDIRECTIONS = [ ("posts/foo.html", "/foo/bar.html"), ]
""",
)
with cd(target_dir):
__main__.main(["build"])
<commit_msg>Refactor tests in preparation of the merge of redirect tests.<commit_after>"""Check REDIRECTIONS"""
import io
import os
import pytest
import nikola.plugins.command.init
from nikola import __main__
from .helper import append_config, cd
from .test_demo_build import prepare_demo_site
from .test_empty_build import ( # NOQA
test_archive_exists,
test_avoid_double_slash_in_rss,
test_check_files,
test_check_links,
test_index_in_sitemap,
)
@pytest.fixture(scope="module")
def build(target_dir):
"""Fill the site with demo content and build it."""
prepare_demo_site(target_dir)
redirects_dir = os.path.join(target_dir, "files", "redirects")
nikola.utils.makedirs(redirects_dir)
target_path = os.path.join(redirects_dir, "absolute_source.html")
with io.open(target_path, "w+", encoding="utf8") as outf:
outf.write("absolute")
append_config(
target_dir,
"""
REDIRECTIONS = [ ("posts/absolute.html", "/redirects/absolute_source.html"), ]
""",
)
with cd(target_dir):
__main__.main(["build"])
|
6d54cbe4eb1e946bdc96dc4701dd6d5ab164c63e
|
oshino/agents/subprocess_agent.py
|
oshino/agents/subprocess_agent.py
|
import asyncio
from . import Agent
from asyncio.subprocess import PIPE
class SubprocessAgent(Agent):
@property
def script(self):
return self._data["script"]
def is_valid(self):
return "script" in self._data
async def process(self, event_fn):
logger = self.get_logger()
proc = await asyncio.create_subprocess_shell(self.script)
exitcode = await proc.wait()
state = "ok" if exitcode == 0 else "failure"
event_fn(service=self.prefix,
state=state,
metric_f=1.0,
description="Exit code: {0}".format(exitcode)
)
|
import asyncio
from . import Agent
from asyncio.subprocess import PIPE
class SubprocessAgent(Agent):
@property
def script(self):
return self._data["script"]
def is_valid(self):
return "script" in self._data
async def process(self, event_fn):
logger = self.get_logger()
proc = await asyncio.create_subprocess_shell(self.script)
exitcode = await proc.wait()
state = "ok" if exitcode == 0 else "failure"
event_fn(service=self.prefix + "shell",
state=state,
metric_f=1.0,
description="Exit code: {0}".format(exitcode)
)
|
Add postfix for subprocess metric
|
Add postfix for subprocess metric
|
Python
|
mit
|
CodersOfTheNight/oshino
|
import asyncio
from . import Agent
from asyncio.subprocess import PIPE
class SubprocessAgent(Agent):
@property
def script(self):
return self._data["script"]
def is_valid(self):
return "script" in self._data
async def process(self, event_fn):
logger = self.get_logger()
proc = await asyncio.create_subprocess_shell(self.script)
exitcode = await proc.wait()
state = "ok" if exitcode == 0 else "failure"
event_fn(service=self.prefix,
state=state,
metric_f=1.0,
description="Exit code: {0}".format(exitcode)
)
Add postfix for subprocess metric
|
import asyncio
from . import Agent
from asyncio.subprocess import PIPE
class SubprocessAgent(Agent):
@property
def script(self):
return self._data["script"]
def is_valid(self):
return "script" in self._data
async def process(self, event_fn):
logger = self.get_logger()
proc = await asyncio.create_subprocess_shell(self.script)
exitcode = await proc.wait()
state = "ok" if exitcode == 0 else "failure"
event_fn(service=self.prefix + "shell",
state=state,
metric_f=1.0,
description="Exit code: {0}".format(exitcode)
)
|
<commit_before>import asyncio
from . import Agent
from asyncio.subprocess import PIPE
class SubprocessAgent(Agent):
@property
def script(self):
return self._data["script"]
def is_valid(self):
return "script" in self._data
async def process(self, event_fn):
logger = self.get_logger()
proc = await asyncio.create_subprocess_shell(self.script)
exitcode = await proc.wait()
state = "ok" if exitcode == 0 else "failure"
event_fn(service=self.prefix,
state=state,
metric_f=1.0,
description="Exit code: {0}".format(exitcode)
)
<commit_msg>Add postfix for subprocess metric<commit_after>
|
import asyncio
from . import Agent
from asyncio.subprocess import PIPE
class SubprocessAgent(Agent):
@property
def script(self):
return self._data["script"]
def is_valid(self):
return "script" in self._data
async def process(self, event_fn):
logger = self.get_logger()
proc = await asyncio.create_subprocess_shell(self.script)
exitcode = await proc.wait()
state = "ok" if exitcode == 0 else "failure"
event_fn(service=self.prefix + "shell",
state=state,
metric_f=1.0,
description="Exit code: {0}".format(exitcode)
)
|
import asyncio
from . import Agent
from asyncio.subprocess import PIPE
class SubprocessAgent(Agent):
@property
def script(self):
return self._data["script"]
def is_valid(self):
return "script" in self._data
async def process(self, event_fn):
logger = self.get_logger()
proc = await asyncio.create_subprocess_shell(self.script)
exitcode = await proc.wait()
state = "ok" if exitcode == 0 else "failure"
event_fn(service=self.prefix,
state=state,
metric_f=1.0,
description="Exit code: {0}".format(exitcode)
)
Add postfix for subprocess metricimport asyncio
from . import Agent
from asyncio.subprocess import PIPE
class SubprocessAgent(Agent):
@property
def script(self):
return self._data["script"]
def is_valid(self):
return "script" in self._data
async def process(self, event_fn):
logger = self.get_logger()
proc = await asyncio.create_subprocess_shell(self.script)
exitcode = await proc.wait()
state = "ok" if exitcode == 0 else "failure"
event_fn(service=self.prefix + "shell",
state=state,
metric_f=1.0,
description="Exit code: {0}".format(exitcode)
)
|
<commit_before>import asyncio
from . import Agent
from asyncio.subprocess import PIPE
class SubprocessAgent(Agent):
@property
def script(self):
return self._data["script"]
def is_valid(self):
return "script" in self._data
async def process(self, event_fn):
logger = self.get_logger()
proc = await asyncio.create_subprocess_shell(self.script)
exitcode = await proc.wait()
state = "ok" if exitcode == 0 else "failure"
event_fn(service=self.prefix,
state=state,
metric_f=1.0,
description="Exit code: {0}".format(exitcode)
)
<commit_msg>Add postfix for subprocess metric<commit_after>import asyncio
from . import Agent
from asyncio.subprocess import PIPE
class SubprocessAgent(Agent):
@property
def script(self):
return self._data["script"]
def is_valid(self):
return "script" in self._data
async def process(self, event_fn):
logger = self.get_logger()
proc = await asyncio.create_subprocess_shell(self.script)
exitcode = await proc.wait()
state = "ok" if exitcode == 0 else "failure"
event_fn(service=self.prefix + "shell",
state=state,
metric_f=1.0,
description="Exit code: {0}".format(exitcode)
)
|
3028e6de1c0367939028d9b21bd48468c01096ce
|
fabfile.py
|
fabfile.py
|
# -*- coding: utf-8 -*
"""
Simple fabric file to test oinspect output
"""
from __future__ import print_function
import webbrowser
import oinspect.utils as utils
import oinspect.sphinxify as spxy
def _show_page(content, fname):
with open(fname, 'wb') as f:
f.write(utils.to_binary_string(content, encoding='utf-8'))
webbrowser.open_new_tab(fname)
def test_basic():
"""Test with an empty context"""
docstring = 'A test'
content = spxy.sphinxify(docstring, spxy.generate_context())
_show_page(content, '/tmp/test_basic.html')
def test_math():
"""Test a docstring with Latex on it"""
docstring = 'This is a rational number :math:`\\frac{x}{y}`'
content = spxy.sphinxify(docstring, spxy.generate_context())
_show_page(content, '/tmp/test_math.html')
def test_numpy_sin():
"""Test for numpy.sin docstring"""
import numpy as np
docstring = np.sin.__doc__
content = spxy.sphinxify(docstring, spxy.generate_context(name='sin'))
_show_page(content, '/tmp/test_np_sin.html')
def run_all():
"""Run all tests"""
test_basic()
test_math()
test_numpy_sin()
|
# -*- coding: utf-8 -*
"""
Simple fabric file to test oinspect output
"""
from __future__ import print_function
import webbrowser
import oinspect.utils as utils
import oinspect.sphinxify as spxy
def _show_page(content, fname):
with open(fname, 'wb') as f:
f.write(utils.to_binary_string(content, encoding='utf-8'))
webbrowser.open_new_tab(fname)
def test_basic():
"""Test with an empty context"""
docstring = 'A test'
content = spxy.sphinxify(docstring, spxy.generate_context())
_show_page(content, '/tmp/test_basic.html')
def test_math():
"""Test a docstring with Latex on it"""
docstring = 'This is a rational number :math:`\\frac{x}{y}`'
content = spxy.sphinxify(docstring, spxy.generate_context())
_show_page(content, '/tmp/test_math.html')
def test_numpy_sin():
"""Test for numpy.sin docstring"""
import numpy as np
docstring = np.sin.__doc__
content = spxy.sphinxify(docstring, spxy.generate_context(name='sin'))
_show_page(content, '/tmp/test_np_sin.html')
def run_all():
"""Run all tests"""
test_basic()
test_math()
test_numpy_sin()
|
Add empty line for pep8
|
Add empty line for pep8
|
Python
|
bsd-3-clause
|
techtonik/docrepr,techtonik/docrepr,spyder-ide/docrepr,spyder-ide/docrepr,spyder-ide/docrepr,techtonik/docrepr
|
# -*- coding: utf-8 -*
"""
Simple fabric file to test oinspect output
"""
from __future__ import print_function
import webbrowser
import oinspect.utils as utils
import oinspect.sphinxify as spxy
def _show_page(content, fname):
with open(fname, 'wb') as f:
f.write(utils.to_binary_string(content, encoding='utf-8'))
webbrowser.open_new_tab(fname)
def test_basic():
"""Test with an empty context"""
docstring = 'A test'
content = spxy.sphinxify(docstring, spxy.generate_context())
_show_page(content, '/tmp/test_basic.html')
def test_math():
"""Test a docstring with Latex on it"""
docstring = 'This is a rational number :math:`\\frac{x}{y}`'
content = spxy.sphinxify(docstring, spxy.generate_context())
_show_page(content, '/tmp/test_math.html')
def test_numpy_sin():
"""Test for numpy.sin docstring"""
import numpy as np
docstring = np.sin.__doc__
content = spxy.sphinxify(docstring, spxy.generate_context(name='sin'))
_show_page(content, '/tmp/test_np_sin.html')
def run_all():
"""Run all tests"""
test_basic()
test_math()
test_numpy_sin()
Add empty line for pep8
|
# -*- coding: utf-8 -*
"""
Simple fabric file to test oinspect output
"""
from __future__ import print_function
import webbrowser
import oinspect.utils as utils
import oinspect.sphinxify as spxy
def _show_page(content, fname):
with open(fname, 'wb') as f:
f.write(utils.to_binary_string(content, encoding='utf-8'))
webbrowser.open_new_tab(fname)
def test_basic():
"""Test with an empty context"""
docstring = 'A test'
content = spxy.sphinxify(docstring, spxy.generate_context())
_show_page(content, '/tmp/test_basic.html')
def test_math():
"""Test a docstring with Latex on it"""
docstring = 'This is a rational number :math:`\\frac{x}{y}`'
content = spxy.sphinxify(docstring, spxy.generate_context())
_show_page(content, '/tmp/test_math.html')
def test_numpy_sin():
"""Test for numpy.sin docstring"""
import numpy as np
docstring = np.sin.__doc__
content = spxy.sphinxify(docstring, spxy.generate_context(name='sin'))
_show_page(content, '/tmp/test_np_sin.html')
def run_all():
"""Run all tests"""
test_basic()
test_math()
test_numpy_sin()
|
<commit_before># -*- coding: utf-8 -*
"""
Simple fabric file to test oinspect output
"""
from __future__ import print_function
import webbrowser
import oinspect.utils as utils
import oinspect.sphinxify as spxy
def _show_page(content, fname):
with open(fname, 'wb') as f:
f.write(utils.to_binary_string(content, encoding='utf-8'))
webbrowser.open_new_tab(fname)
def test_basic():
"""Test with an empty context"""
docstring = 'A test'
content = spxy.sphinxify(docstring, spxy.generate_context())
_show_page(content, '/tmp/test_basic.html')
def test_math():
"""Test a docstring with Latex on it"""
docstring = 'This is a rational number :math:`\\frac{x}{y}`'
content = spxy.sphinxify(docstring, spxy.generate_context())
_show_page(content, '/tmp/test_math.html')
def test_numpy_sin():
"""Test for numpy.sin docstring"""
import numpy as np
docstring = np.sin.__doc__
content = spxy.sphinxify(docstring, spxy.generate_context(name='sin'))
_show_page(content, '/tmp/test_np_sin.html')
def run_all():
"""Run all tests"""
test_basic()
test_math()
test_numpy_sin()
<commit_msg>Add empty line for pep8<commit_after>
|
# -*- coding: utf-8 -*
"""
Simple fabric file to test oinspect output
"""
from __future__ import print_function
import webbrowser
import oinspect.utils as utils
import oinspect.sphinxify as spxy
def _show_page(content, fname):
with open(fname, 'wb') as f:
f.write(utils.to_binary_string(content, encoding='utf-8'))
webbrowser.open_new_tab(fname)
def test_basic():
"""Test with an empty context"""
docstring = 'A test'
content = spxy.sphinxify(docstring, spxy.generate_context())
_show_page(content, '/tmp/test_basic.html')
def test_math():
"""Test a docstring with Latex on it"""
docstring = 'This is a rational number :math:`\\frac{x}{y}`'
content = spxy.sphinxify(docstring, spxy.generate_context())
_show_page(content, '/tmp/test_math.html')
def test_numpy_sin():
"""Test for numpy.sin docstring"""
import numpy as np
docstring = np.sin.__doc__
content = spxy.sphinxify(docstring, spxy.generate_context(name='sin'))
_show_page(content, '/tmp/test_np_sin.html')
def run_all():
"""Run all tests"""
test_basic()
test_math()
test_numpy_sin()
|
# -*- coding: utf-8 -*
"""
Simple fabric file to test oinspect output
"""
from __future__ import print_function
import webbrowser
import oinspect.utils as utils
import oinspect.sphinxify as spxy
def _show_page(content, fname):
with open(fname, 'wb') as f:
f.write(utils.to_binary_string(content, encoding='utf-8'))
webbrowser.open_new_tab(fname)
def test_basic():
"""Test with an empty context"""
docstring = 'A test'
content = spxy.sphinxify(docstring, spxy.generate_context())
_show_page(content, '/tmp/test_basic.html')
def test_math():
"""Test a docstring with Latex on it"""
docstring = 'This is a rational number :math:`\\frac{x}{y}`'
content = spxy.sphinxify(docstring, spxy.generate_context())
_show_page(content, '/tmp/test_math.html')
def test_numpy_sin():
"""Test for numpy.sin docstring"""
import numpy as np
docstring = np.sin.__doc__
content = spxy.sphinxify(docstring, spxy.generate_context(name='sin'))
_show_page(content, '/tmp/test_np_sin.html')
def run_all():
"""Run all tests"""
test_basic()
test_math()
test_numpy_sin()
Add empty line for pep8# -*- coding: utf-8 -*
"""
Simple fabric file to test oinspect output
"""
from __future__ import print_function
import webbrowser
import oinspect.utils as utils
import oinspect.sphinxify as spxy
def _show_page(content, fname):
with open(fname, 'wb') as f:
f.write(utils.to_binary_string(content, encoding='utf-8'))
webbrowser.open_new_tab(fname)
def test_basic():
"""Test with an empty context"""
docstring = 'A test'
content = spxy.sphinxify(docstring, spxy.generate_context())
_show_page(content, '/tmp/test_basic.html')
def test_math():
"""Test a docstring with Latex on it"""
docstring = 'This is a rational number :math:`\\frac{x}{y}`'
content = spxy.sphinxify(docstring, spxy.generate_context())
_show_page(content, '/tmp/test_math.html')
def test_numpy_sin():
"""Test for numpy.sin docstring"""
import numpy as np
docstring = np.sin.__doc__
content = spxy.sphinxify(docstring, spxy.generate_context(name='sin'))
_show_page(content, '/tmp/test_np_sin.html')
def run_all():
"""Run all tests"""
test_basic()
test_math()
test_numpy_sin()
|
<commit_before># -*- coding: utf-8 -*
"""
Simple fabric file to test oinspect output
"""
from __future__ import print_function
import webbrowser
import oinspect.utils as utils
import oinspect.sphinxify as spxy
def _show_page(content, fname):
with open(fname, 'wb') as f:
f.write(utils.to_binary_string(content, encoding='utf-8'))
webbrowser.open_new_tab(fname)
def test_basic():
"""Test with an empty context"""
docstring = 'A test'
content = spxy.sphinxify(docstring, spxy.generate_context())
_show_page(content, '/tmp/test_basic.html')
def test_math():
"""Test a docstring with Latex on it"""
docstring = 'This is a rational number :math:`\\frac{x}{y}`'
content = spxy.sphinxify(docstring, spxy.generate_context())
_show_page(content, '/tmp/test_math.html')
def test_numpy_sin():
"""Test for numpy.sin docstring"""
import numpy as np
docstring = np.sin.__doc__
content = spxy.sphinxify(docstring, spxy.generate_context(name='sin'))
_show_page(content, '/tmp/test_np_sin.html')
def run_all():
"""Run all tests"""
test_basic()
test_math()
test_numpy_sin()
<commit_msg>Add empty line for pep8<commit_after># -*- coding: utf-8 -*
"""
Simple fabric file to test oinspect output
"""
from __future__ import print_function
import webbrowser
import oinspect.utils as utils
import oinspect.sphinxify as spxy
def _show_page(content, fname):
with open(fname, 'wb') as f:
f.write(utils.to_binary_string(content, encoding='utf-8'))
webbrowser.open_new_tab(fname)
def test_basic():
"""Test with an empty context"""
docstring = 'A test'
content = spxy.sphinxify(docstring, spxy.generate_context())
_show_page(content, '/tmp/test_basic.html')
def test_math():
"""Test a docstring with Latex on it"""
docstring = 'This is a rational number :math:`\\frac{x}{y}`'
content = spxy.sphinxify(docstring, spxy.generate_context())
_show_page(content, '/tmp/test_math.html')
def test_numpy_sin():
"""Test for numpy.sin docstring"""
import numpy as np
docstring = np.sin.__doc__
content = spxy.sphinxify(docstring, spxy.generate_context(name='sin'))
_show_page(content, '/tmp/test_np_sin.html')
def run_all():
"""Run all tests"""
test_basic()
test_math()
test_numpy_sin()
|
7db97c8d50091bd91bf8cb874fe3540eff499510
|
fabfile.py
|
fabfile.py
|
from fabric.api import cd, sudo, env
env.path = '/var/praekelt/vumi-go'
def deploy_go():
with cd(env.path):
sudo('git pull', user='vumi')
_venv_command('./ve/bin/django-admin.py collectstatic --pythonpath=. '
'--settings=go.settings --noinput')
def deploy_vumi():
with cd('%s/ve/src/vumi/' % (env.path,)):
sudo('git pull', user='vumi')
def restart_celery():
with cd(env.path):
supervisorctl('restart vumi_celery:celery')
def restart_gunicorn():
"""
Intentionally restart the gunicorns 1 by 1 so HAProxy is given
time to load balance across gunicorns that have either already restarted
or are waiting to be restarted
"""
with cd(env.path):
for i in range(1, 5):
supervisorctl('restart vumi_web:goui_%s' % (i,))
def supervisorctl(command):
return sudo('supervisorctl %s' % (command,))
def _venv_command(command, user='vumi'):
return sudo('. ve/bin/activate && %s' % (command,), user=user)
|
from fabric.api import cd, sudo, env
env.path = '/var/praekelt/vumi-go'
def deploy_go():
with cd(env.path):
sudo('git pull', user='vumi')
_venv_command('./ve/bin/django-admin.py collectstatic --pythonpath=. '
'--settings=go.settings --noinput')
def deploy_vumi():
with cd('%s/ve/src/vumi/' % (env.path,)):
sudo('git pull', user='vumi')
def restart_celery():
with cd(env.path):
supervisorctl('restart vumi_celery:celery')
def restart_gunicorn():
"""
Intentionally restart the gunicorns 1 by 1 so HAProxy is given
time to load balance across gunicorns that have either already restarted
or are waiting to be restarted
"""
with cd(env.path):
for i in range(1, 5):
supervisorctl('restart vumi_web:goui_%s' % (i,))
def update_nodejs_modules():
"""
Update the Node.js modules that the JS sandbox depends on.
"""
npm_install("vumigo_v01")
def supervisorctl(command):
return sudo('supervisorctl %s' % (command,))
def npm_install(package):
return sudo('npm install --global %s' % (package,))
def _venv_command(command, user='vumi'):
return sudo('. ve/bin/activate && %s' % (command,), user=user)
|
Add command for updating the node.js modules needed by the JS sandbox.
|
Add command for updating the node.js modules needed by the JS sandbox.
|
Python
|
bsd-3-clause
|
praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go
|
from fabric.api import cd, sudo, env
env.path = '/var/praekelt/vumi-go'
def deploy_go():
with cd(env.path):
sudo('git pull', user='vumi')
_venv_command('./ve/bin/django-admin.py collectstatic --pythonpath=. '
'--settings=go.settings --noinput')
def deploy_vumi():
with cd('%s/ve/src/vumi/' % (env.path,)):
sudo('git pull', user='vumi')
def restart_celery():
with cd(env.path):
supervisorctl('restart vumi_celery:celery')
def restart_gunicorn():
"""
Intentionally restart the gunicorns 1 by 1 so HAProxy is given
time to load balance across gunicorns that have either already restarted
or are waiting to be restarted
"""
with cd(env.path):
for i in range(1, 5):
supervisorctl('restart vumi_web:goui_%s' % (i,))
def supervisorctl(command):
return sudo('supervisorctl %s' % (command,))
def _venv_command(command, user='vumi'):
return sudo('. ve/bin/activate && %s' % (command,), user=user)
Add command for updating the node.js modules needed by the JS sandbox.
|
from fabric.api import cd, sudo, env
env.path = '/var/praekelt/vumi-go'
def deploy_go():
with cd(env.path):
sudo('git pull', user='vumi')
_venv_command('./ve/bin/django-admin.py collectstatic --pythonpath=. '
'--settings=go.settings --noinput')
def deploy_vumi():
with cd('%s/ve/src/vumi/' % (env.path,)):
sudo('git pull', user='vumi')
def restart_celery():
with cd(env.path):
supervisorctl('restart vumi_celery:celery')
def restart_gunicorn():
"""
Intentionally restart the gunicorns 1 by 1 so HAProxy is given
time to load balance across gunicorns that have either already restarted
or are waiting to be restarted
"""
with cd(env.path):
for i in range(1, 5):
supervisorctl('restart vumi_web:goui_%s' % (i,))
def update_nodejs_modules():
"""
Update the Node.js modules that the JS sandbox depends on.
"""
npm_install("vumigo_v01")
def supervisorctl(command):
return sudo('supervisorctl %s' % (command,))
def npm_install(package):
return sudo('npm install --global %s' % (package,))
def _venv_command(command, user='vumi'):
return sudo('. ve/bin/activate && %s' % (command,), user=user)
|
<commit_before>from fabric.api import cd, sudo, env
env.path = '/var/praekelt/vumi-go'
def deploy_go():
with cd(env.path):
sudo('git pull', user='vumi')
_venv_command('./ve/bin/django-admin.py collectstatic --pythonpath=. '
'--settings=go.settings --noinput')
def deploy_vumi():
with cd('%s/ve/src/vumi/' % (env.path,)):
sudo('git pull', user='vumi')
def restart_celery():
with cd(env.path):
supervisorctl('restart vumi_celery:celery')
def restart_gunicorn():
"""
Intentionally restart the gunicorns 1 by 1 so HAProxy is given
time to load balance across gunicorns that have either already restarted
or are waiting to be restarted
"""
with cd(env.path):
for i in range(1, 5):
supervisorctl('restart vumi_web:goui_%s' % (i,))
def supervisorctl(command):
return sudo('supervisorctl %s' % (command,))
def _venv_command(command, user='vumi'):
return sudo('. ve/bin/activate && %s' % (command,), user=user)
<commit_msg>Add command for updating the node.js modules needed by the JS sandbox.<commit_after>
|
from fabric.api import cd, sudo, env
env.path = '/var/praekelt/vumi-go'
def deploy_go():
with cd(env.path):
sudo('git pull', user='vumi')
_venv_command('./ve/bin/django-admin.py collectstatic --pythonpath=. '
'--settings=go.settings --noinput')
def deploy_vumi():
with cd('%s/ve/src/vumi/' % (env.path,)):
sudo('git pull', user='vumi')
def restart_celery():
with cd(env.path):
supervisorctl('restart vumi_celery:celery')
def restart_gunicorn():
"""
Intentionally restart the gunicorns 1 by 1 so HAProxy is given
time to load balance across gunicorns that have either already restarted
or are waiting to be restarted
"""
with cd(env.path):
for i in range(1, 5):
supervisorctl('restart vumi_web:goui_%s' % (i,))
def update_nodejs_modules():
"""
Update the Node.js modules that the JS sandbox depends on.
"""
npm_install("vumigo_v01")
def supervisorctl(command):
return sudo('supervisorctl %s' % (command,))
def npm_install(package):
return sudo('npm install --global %s' % (package,))
def _venv_command(command, user='vumi'):
return sudo('. ve/bin/activate && %s' % (command,), user=user)
|
from fabric.api import cd, sudo, env
env.path = '/var/praekelt/vumi-go'
def deploy_go():
with cd(env.path):
sudo('git pull', user='vumi')
_venv_command('./ve/bin/django-admin.py collectstatic --pythonpath=. '
'--settings=go.settings --noinput')
def deploy_vumi():
with cd('%s/ve/src/vumi/' % (env.path,)):
sudo('git pull', user='vumi')
def restart_celery():
with cd(env.path):
supervisorctl('restart vumi_celery:celery')
def restart_gunicorn():
"""
Intentionally restart the gunicorns 1 by 1 so HAProxy is given
time to load balance across gunicorns that have either already restarted
or are waiting to be restarted
"""
with cd(env.path):
for i in range(1, 5):
supervisorctl('restart vumi_web:goui_%s' % (i,))
def supervisorctl(command):
return sudo('supervisorctl %s' % (command,))
def _venv_command(command, user='vumi'):
return sudo('. ve/bin/activate && %s' % (command,), user=user)
Add command for updating the node.js modules needed by the JS sandbox.from fabric.api import cd, sudo, env
env.path = '/var/praekelt/vumi-go'
def deploy_go():
with cd(env.path):
sudo('git pull', user='vumi')
_venv_command('./ve/bin/django-admin.py collectstatic --pythonpath=. '
'--settings=go.settings --noinput')
def deploy_vumi():
with cd('%s/ve/src/vumi/' % (env.path,)):
sudo('git pull', user='vumi')
def restart_celery():
with cd(env.path):
supervisorctl('restart vumi_celery:celery')
def restart_gunicorn():
"""
Intentionally restart the gunicorns 1 by 1 so HAProxy is given
time to load balance across gunicorns that have either already restarted
or are waiting to be restarted
"""
with cd(env.path):
for i in range(1, 5):
supervisorctl('restart vumi_web:goui_%s' % (i,))
def update_nodejs_modules():
"""
Update the Node.js modules that the JS sandbox depends on.
"""
npm_install("vumigo_v01")
def supervisorctl(command):
return sudo('supervisorctl %s' % (command,))
def npm_install(package):
return sudo('npm install --global %s' % (package,))
def _venv_command(command, user='vumi'):
return sudo('. ve/bin/activate && %s' % (command,), user=user)
|
<commit_before>from fabric.api import cd, sudo, env
env.path = '/var/praekelt/vumi-go'
def deploy_go():
with cd(env.path):
sudo('git pull', user='vumi')
_venv_command('./ve/bin/django-admin.py collectstatic --pythonpath=. '
'--settings=go.settings --noinput')
def deploy_vumi():
with cd('%s/ve/src/vumi/' % (env.path,)):
sudo('git pull', user='vumi')
def restart_celery():
with cd(env.path):
supervisorctl('restart vumi_celery:celery')
def restart_gunicorn():
"""
Intentionally restart the gunicorns 1 by 1 so HAProxy is given
time to load balance across gunicorns that have either already restarted
or are waiting to be restarted
"""
with cd(env.path):
for i in range(1, 5):
supervisorctl('restart vumi_web:goui_%s' % (i,))
def supervisorctl(command):
return sudo('supervisorctl %s' % (command,))
def _venv_command(command, user='vumi'):
return sudo('. ve/bin/activate && %s' % (command,), user=user)
<commit_msg>Add command for updating the node.js modules needed by the JS sandbox.<commit_after>from fabric.api import cd, sudo, env
env.path = '/var/praekelt/vumi-go'
def deploy_go():
with cd(env.path):
sudo('git pull', user='vumi')
_venv_command('./ve/bin/django-admin.py collectstatic --pythonpath=. '
'--settings=go.settings --noinput')
def deploy_vumi():
with cd('%s/ve/src/vumi/' % (env.path,)):
sudo('git pull', user='vumi')
def restart_celery():
with cd(env.path):
supervisorctl('restart vumi_celery:celery')
def restart_gunicorn():
"""
Intentionally restart the gunicorns 1 by 1 so HAProxy is given
time to load balance across gunicorns that have either already restarted
or are waiting to be restarted
"""
with cd(env.path):
for i in range(1, 5):
supervisorctl('restart vumi_web:goui_%s' % (i,))
def update_nodejs_modules():
"""
Update the Node.js modules that the JS sandbox depends on.
"""
npm_install("vumigo_v01")
def supervisorctl(command):
return sudo('supervisorctl %s' % (command,))
def npm_install(package):
return sudo('npm install --global %s' % (package,))
def _venv_command(command, user='vumi'):
return sudo('. ve/bin/activate && %s' % (command,), user=user)
|
723d97a490a3d0d5e04d118343ac0574cda74476
|
tests/matchers/test_equal.py
|
tests/matchers/test_equal.py
|
from robber import expect
from robber.matchers.equal import Equal, NotEqual
class TestEqual:
def test_matches(self):
expect(Equal(1, 1).matches()).to.eq(True)
expect(Equal(1, 2).matches()).to.eq(False)
def test_failure_message(self):
equal = Equal('actual', 'expected')
message = equal.failure_message()
expect(message) == 'Expected "actual" to equal "expected"'
def test_register(self):
expect(expect.matcher('eq')) == Equal
expect(expect.matcher('__eq__')) == Equal
class TestNotEqual:
def test_matches(self):
expect(NotEqual(1, 2).matches()).to.eq(True)
expect(NotEqual(1, 1).matches()).to.eq(False)
def test_failure_message(self):
equal = NotEqual('actual', 'expected')
message = equal.failure_message()
expect(message) == 'Expected "actual" not to equal "expected"'
def test_register(self):
expect(expect.matcher('not_eq')) == NotEqual
expect(expect.matcher('ne')) == NotEqual
expect(expect.matcher('__ne__')) == NotEqual
|
from robber import expect
from robber.matchers.equal import Equal, NotEqual
class TestEqual:
def test_matches(self):
expect(Equal(1, 1).matches()).to.eq(True)
expect(Equal(1, 2).matches()).to.eq(False)
def test_failure_message(self):
equal = Equal('actual', 'expected')
message = equal.failure_message()
expect(message) == 'Expected "actual" to equal "expected"'
def test_failure_message_with_not_to(self):
equal = Equal('actual', 'actual', is_negated=True)
message = equal.failure_message()
expect(message) == 'Expected "actual" not to equal "actual"'
def test_register(self):
expect(expect.matcher('eq')) == Equal
expect(expect.matcher('__eq__')) == Equal
class TestNotEqual:
def test_matches(self):
expect(NotEqual(1, 2).matches()).to.eq(True)
expect(NotEqual(1, 1).matches()).to.eq(False)
def test_failure_message(self):
equal = NotEqual('actual', 'expected')
message = equal.failure_message()
expect(message) == 'Expected "actual" not to equal "expected"'
def test_register(self):
expect(expect.matcher('not_eq')) == NotEqual
expect(expect.matcher('ne')) == NotEqual
expect(expect.matcher('__ne__')) == NotEqual
|
Add failure message test for not_to.be.eq
|
[f] Add failure message test for not_to.be.eq
|
Python
|
mit
|
vesln/robber.py
|
from robber import expect
from robber.matchers.equal import Equal, NotEqual
class TestEqual:
def test_matches(self):
expect(Equal(1, 1).matches()).to.eq(True)
expect(Equal(1, 2).matches()).to.eq(False)
def test_failure_message(self):
equal = Equal('actual', 'expected')
message = equal.failure_message()
expect(message) == 'Expected "actual" to equal "expected"'
def test_register(self):
expect(expect.matcher('eq')) == Equal
expect(expect.matcher('__eq__')) == Equal
class TestNotEqual:
def test_matches(self):
expect(NotEqual(1, 2).matches()).to.eq(True)
expect(NotEqual(1, 1).matches()).to.eq(False)
def test_failure_message(self):
equal = NotEqual('actual', 'expected')
message = equal.failure_message()
expect(message) == 'Expected "actual" not to equal "expected"'
def test_register(self):
expect(expect.matcher('not_eq')) == NotEqual
expect(expect.matcher('ne')) == NotEqual
expect(expect.matcher('__ne__')) == NotEqual
[f] Add failure message test for not_to.be.eq
|
from robber import expect
from robber.matchers.equal import Equal, NotEqual
class TestEqual:
def test_matches(self):
expect(Equal(1, 1).matches()).to.eq(True)
expect(Equal(1, 2).matches()).to.eq(False)
def test_failure_message(self):
equal = Equal('actual', 'expected')
message = equal.failure_message()
expect(message) == 'Expected "actual" to equal "expected"'
def test_failure_message_with_not_to(self):
equal = Equal('actual', 'actual', is_negated=True)
message = equal.failure_message()
expect(message) == 'Expected "actual" not to equal "actual"'
def test_register(self):
expect(expect.matcher('eq')) == Equal
expect(expect.matcher('__eq__')) == Equal
class TestNotEqual:
def test_matches(self):
expect(NotEqual(1, 2).matches()).to.eq(True)
expect(NotEqual(1, 1).matches()).to.eq(False)
def test_failure_message(self):
equal = NotEqual('actual', 'expected')
message = equal.failure_message()
expect(message) == 'Expected "actual" not to equal "expected"'
def test_register(self):
expect(expect.matcher('not_eq')) == NotEqual
expect(expect.matcher('ne')) == NotEqual
expect(expect.matcher('__ne__')) == NotEqual
|
<commit_before>from robber import expect
from robber.matchers.equal import Equal, NotEqual
class TestEqual:
def test_matches(self):
expect(Equal(1, 1).matches()).to.eq(True)
expect(Equal(1, 2).matches()).to.eq(False)
def test_failure_message(self):
equal = Equal('actual', 'expected')
message = equal.failure_message()
expect(message) == 'Expected "actual" to equal "expected"'
def test_register(self):
expect(expect.matcher('eq')) == Equal
expect(expect.matcher('__eq__')) == Equal
class TestNotEqual:
def test_matches(self):
expect(NotEqual(1, 2).matches()).to.eq(True)
expect(NotEqual(1, 1).matches()).to.eq(False)
def test_failure_message(self):
equal = NotEqual('actual', 'expected')
message = equal.failure_message()
expect(message) == 'Expected "actual" not to equal "expected"'
def test_register(self):
expect(expect.matcher('not_eq')) == NotEqual
expect(expect.matcher('ne')) == NotEqual
expect(expect.matcher('__ne__')) == NotEqual
<commit_msg>[f] Add failure message test for not_to.be.eq<commit_after>
|
from robber import expect
from robber.matchers.equal import Equal, NotEqual
class TestEqual:
def test_matches(self):
expect(Equal(1, 1).matches()).to.eq(True)
expect(Equal(1, 2).matches()).to.eq(False)
def test_failure_message(self):
equal = Equal('actual', 'expected')
message = equal.failure_message()
expect(message) == 'Expected "actual" to equal "expected"'
def test_failure_message_with_not_to(self):
equal = Equal('actual', 'actual', is_negated=True)
message = equal.failure_message()
expect(message) == 'Expected "actual" not to equal "actual"'
def test_register(self):
expect(expect.matcher('eq')) == Equal
expect(expect.matcher('__eq__')) == Equal
class TestNotEqual:
def test_matches(self):
expect(NotEqual(1, 2).matches()).to.eq(True)
expect(NotEqual(1, 1).matches()).to.eq(False)
def test_failure_message(self):
equal = NotEqual('actual', 'expected')
message = equal.failure_message()
expect(message) == 'Expected "actual" not to equal "expected"'
def test_register(self):
expect(expect.matcher('not_eq')) == NotEqual
expect(expect.matcher('ne')) == NotEqual
expect(expect.matcher('__ne__')) == NotEqual
|
from robber import expect
from robber.matchers.equal import Equal, NotEqual
class TestEqual:
def test_matches(self):
expect(Equal(1, 1).matches()).to.eq(True)
expect(Equal(1, 2).matches()).to.eq(False)
def test_failure_message(self):
equal = Equal('actual', 'expected')
message = equal.failure_message()
expect(message) == 'Expected "actual" to equal "expected"'
def test_register(self):
expect(expect.matcher('eq')) == Equal
expect(expect.matcher('__eq__')) == Equal
class TestNotEqual:
def test_matches(self):
expect(NotEqual(1, 2).matches()).to.eq(True)
expect(NotEqual(1, 1).matches()).to.eq(False)
def test_failure_message(self):
equal = NotEqual('actual', 'expected')
message = equal.failure_message()
expect(message) == 'Expected "actual" not to equal "expected"'
def test_register(self):
expect(expect.matcher('not_eq')) == NotEqual
expect(expect.matcher('ne')) == NotEqual
expect(expect.matcher('__ne__')) == NotEqual
[f] Add failure message test for not_to.be.eqfrom robber import expect
from robber.matchers.equal import Equal, NotEqual
class TestEqual:
def test_matches(self):
expect(Equal(1, 1).matches()).to.eq(True)
expect(Equal(1, 2).matches()).to.eq(False)
def test_failure_message(self):
equal = Equal('actual', 'expected')
message = equal.failure_message()
expect(message) == 'Expected "actual" to equal "expected"'
def test_failure_message_with_not_to(self):
equal = Equal('actual', 'actual', is_negated=True)
message = equal.failure_message()
expect(message) == 'Expected "actual" not to equal "actual"'
def test_register(self):
expect(expect.matcher('eq')) == Equal
expect(expect.matcher('__eq__')) == Equal
class TestNotEqual:
def test_matches(self):
expect(NotEqual(1, 2).matches()).to.eq(True)
expect(NotEqual(1, 1).matches()).to.eq(False)
def test_failure_message(self):
equal = NotEqual('actual', 'expected')
message = equal.failure_message()
expect(message) == 'Expected "actual" not to equal "expected"'
def test_register(self):
expect(expect.matcher('not_eq')) == NotEqual
expect(expect.matcher('ne')) == NotEqual
expect(expect.matcher('__ne__')) == NotEqual
|
<commit_before>from robber import expect
from robber.matchers.equal import Equal, NotEqual
class TestEqual:
def test_matches(self):
expect(Equal(1, 1).matches()).to.eq(True)
expect(Equal(1, 2).matches()).to.eq(False)
def test_failure_message(self):
equal = Equal('actual', 'expected')
message = equal.failure_message()
expect(message) == 'Expected "actual" to equal "expected"'
def test_register(self):
expect(expect.matcher('eq')) == Equal
expect(expect.matcher('__eq__')) == Equal
class TestNotEqual:
def test_matches(self):
expect(NotEqual(1, 2).matches()).to.eq(True)
expect(NotEqual(1, 1).matches()).to.eq(False)
def test_failure_message(self):
equal = NotEqual('actual', 'expected')
message = equal.failure_message()
expect(message) == 'Expected "actual" not to equal "expected"'
def test_register(self):
expect(expect.matcher('not_eq')) == NotEqual
expect(expect.matcher('ne')) == NotEqual
expect(expect.matcher('__ne__')) == NotEqual
<commit_msg>[f] Add failure message test for not_to.be.eq<commit_after>from robber import expect
from robber.matchers.equal import Equal, NotEqual
class TestEqual:
def test_matches(self):
expect(Equal(1, 1).matches()).to.eq(True)
expect(Equal(1, 2).matches()).to.eq(False)
def test_failure_message(self):
equal = Equal('actual', 'expected')
message = equal.failure_message()
expect(message) == 'Expected "actual" to equal "expected"'
def test_failure_message_with_not_to(self):
equal = Equal('actual', 'actual', is_negated=True)
message = equal.failure_message()
expect(message) == 'Expected "actual" not to equal "actual"'
def test_register(self):
expect(expect.matcher('eq')) == Equal
expect(expect.matcher('__eq__')) == Equal
class TestNotEqual:
def test_matches(self):
expect(NotEqual(1, 2).matches()).to.eq(True)
expect(NotEqual(1, 1).matches()).to.eq(False)
def test_failure_message(self):
equal = NotEqual('actual', 'expected')
message = equal.failure_message()
expect(message) == 'Expected "actual" not to equal "expected"'
def test_register(self):
expect(expect.matcher('not_eq')) == NotEqual
expect(expect.matcher('ne')) == NotEqual
expect(expect.matcher('__ne__')) == NotEqual
|
55a056346c2707b80663c56b59e58724cc72ace6
|
django_react_templatetags/mixins.py
|
django_react_templatetags/mixins.py
|
class RepresentationMixin(object):
@property
def react_representation(self):
raise NotImplementedError(
'Missing property react_representation in class'
)
|
class RepresentationMixin(object):
def to_react_representation(self, context=None):
raise NotImplementedError(
'Missing property to_react_representation in class'
)
|
Switch NotImplementedError to to_react_represetnation in mixin
|
Switch NotImplementedError to to_react_represetnation in mixin
|
Python
|
mit
|
Frojd/django-react-templatetags,Frojd/django-react-templatetags,Frojd/django-react-templatetags
|
class RepresentationMixin(object):
@property
def react_representation(self):
raise NotImplementedError(
'Missing property react_representation in class'
)
Switch NotImplementedError to to_react_represetnation in mixin
|
class RepresentationMixin(object):
def to_react_representation(self, context=None):
raise NotImplementedError(
'Missing property to_react_representation in class'
)
|
<commit_before>class RepresentationMixin(object):
@property
def react_representation(self):
raise NotImplementedError(
'Missing property react_representation in class'
)
<commit_msg>Switch NotImplementedError to to_react_represetnation in mixin<commit_after>
|
class RepresentationMixin(object):
def to_react_representation(self, context=None):
raise NotImplementedError(
'Missing property to_react_representation in class'
)
|
class RepresentationMixin(object):
@property
def react_representation(self):
raise NotImplementedError(
'Missing property react_representation in class'
)
Switch NotImplementedError to to_react_represetnation in mixinclass RepresentationMixin(object):
def to_react_representation(self, context=None):
raise NotImplementedError(
'Missing property to_react_representation in class'
)
|
<commit_before>class RepresentationMixin(object):
@property
def react_representation(self):
raise NotImplementedError(
'Missing property react_representation in class'
)
<commit_msg>Switch NotImplementedError to to_react_represetnation in mixin<commit_after>class RepresentationMixin(object):
def to_react_representation(self, context=None):
raise NotImplementedError(
'Missing property to_react_representation in class'
)
|
945bb3897abb55e1b0f4f9fc97644bc22dd54208
|
simuvex/concretization_strategies/__init__.py
|
simuvex/concretization_strategies/__init__.py
|
from angr.concretization_strategies.any import SimConcretizationStrategyAny
from angr.concretization_strategies.max import SimConcretizationStrategyMax
from angr.concretization_strategies.nonzero import SimConcretizationStrategyNonzero
from angr.concretization_strategies.nonzero_range import SimConcretizationStrategyNonzeroRange
from angr.concretization_strategies.norepeats import SimConcretizationStrategyNorepeats
from angr.concretization_strategies.norepeats_range import SimConcretizationStrategyNorepeatsRange
from angr.concretization_strategies.range import SimConcretizationStrategyRange
from angr.concretization_strategies.single import SimConcretizationStrategySingle
from angr.concretization_strategies.solutions import SimConcretizationStrategySolutions
|
from angr.concretization_strategies import *
from angr.concretization_strategies.any import SimConcretizationStrategyAny
from angr.concretization_strategies.max import SimConcretizationStrategyMax
from angr.concretization_strategies.nonzero import SimConcretizationStrategyNonzero
from angr.concretization_strategies.nonzero_range import SimConcretizationStrategyNonzeroRange
from angr.concretization_strategies.norepeats import SimConcretizationStrategyNorepeats
from angr.concretization_strategies.norepeats_range import SimConcretizationStrategyNorepeatsRange
from angr.concretization_strategies.range import SimConcretizationStrategyRange
from angr.concretization_strategies.single import SimConcretizationStrategySingle
from angr.concretization_strategies.solutions import SimConcretizationStrategySolutions
|
Fix compat bug in concretization_strategies
|
Fix compat bug in concretization_strategies
|
Python
|
bsd-2-clause
|
angr/simuvex
|
from angr.concretization_strategies.any import SimConcretizationStrategyAny
from angr.concretization_strategies.max import SimConcretizationStrategyMax
from angr.concretization_strategies.nonzero import SimConcretizationStrategyNonzero
from angr.concretization_strategies.nonzero_range import SimConcretizationStrategyNonzeroRange
from angr.concretization_strategies.norepeats import SimConcretizationStrategyNorepeats
from angr.concretization_strategies.norepeats_range import SimConcretizationStrategyNorepeatsRange
from angr.concretization_strategies.range import SimConcretizationStrategyRange
from angr.concretization_strategies.single import SimConcretizationStrategySingle
from angr.concretization_strategies.solutions import SimConcretizationStrategySolutions
Fix compat bug in concretization_strategies
|
from angr.concretization_strategies import *
from angr.concretization_strategies.any import SimConcretizationStrategyAny
from angr.concretization_strategies.max import SimConcretizationStrategyMax
from angr.concretization_strategies.nonzero import SimConcretizationStrategyNonzero
from angr.concretization_strategies.nonzero_range import SimConcretizationStrategyNonzeroRange
from angr.concretization_strategies.norepeats import SimConcretizationStrategyNorepeats
from angr.concretization_strategies.norepeats_range import SimConcretizationStrategyNorepeatsRange
from angr.concretization_strategies.range import SimConcretizationStrategyRange
from angr.concretization_strategies.single import SimConcretizationStrategySingle
from angr.concretization_strategies.solutions import SimConcretizationStrategySolutions
|
<commit_before>from angr.concretization_strategies.any import SimConcretizationStrategyAny
from angr.concretization_strategies.max import SimConcretizationStrategyMax
from angr.concretization_strategies.nonzero import SimConcretizationStrategyNonzero
from angr.concretization_strategies.nonzero_range import SimConcretizationStrategyNonzeroRange
from angr.concretization_strategies.norepeats import SimConcretizationStrategyNorepeats
from angr.concretization_strategies.norepeats_range import SimConcretizationStrategyNorepeatsRange
from angr.concretization_strategies.range import SimConcretizationStrategyRange
from angr.concretization_strategies.single import SimConcretizationStrategySingle
from angr.concretization_strategies.solutions import SimConcretizationStrategySolutions
<commit_msg>Fix compat bug in concretization_strategies<commit_after>
|
from angr.concretization_strategies import *
from angr.concretization_strategies.any import SimConcretizationStrategyAny
from angr.concretization_strategies.max import SimConcretizationStrategyMax
from angr.concretization_strategies.nonzero import SimConcretizationStrategyNonzero
from angr.concretization_strategies.nonzero_range import SimConcretizationStrategyNonzeroRange
from angr.concretization_strategies.norepeats import SimConcretizationStrategyNorepeats
from angr.concretization_strategies.norepeats_range import SimConcretizationStrategyNorepeatsRange
from angr.concretization_strategies.range import SimConcretizationStrategyRange
from angr.concretization_strategies.single import SimConcretizationStrategySingle
from angr.concretization_strategies.solutions import SimConcretizationStrategySolutions
|
from angr.concretization_strategies.any import SimConcretizationStrategyAny
from angr.concretization_strategies.max import SimConcretizationStrategyMax
from angr.concretization_strategies.nonzero import SimConcretizationStrategyNonzero
from angr.concretization_strategies.nonzero_range import SimConcretizationStrategyNonzeroRange
from angr.concretization_strategies.norepeats import SimConcretizationStrategyNorepeats
from angr.concretization_strategies.norepeats_range import SimConcretizationStrategyNorepeatsRange
from angr.concretization_strategies.range import SimConcretizationStrategyRange
from angr.concretization_strategies.single import SimConcretizationStrategySingle
from angr.concretization_strategies.solutions import SimConcretizationStrategySolutions
Fix compat bug in concretization_strategiesfrom angr.concretization_strategies import *
from angr.concretization_strategies.any import SimConcretizationStrategyAny
from angr.concretization_strategies.max import SimConcretizationStrategyMax
from angr.concretization_strategies.nonzero import SimConcretizationStrategyNonzero
from angr.concretization_strategies.nonzero_range import SimConcretizationStrategyNonzeroRange
from angr.concretization_strategies.norepeats import SimConcretizationStrategyNorepeats
from angr.concretization_strategies.norepeats_range import SimConcretizationStrategyNorepeatsRange
from angr.concretization_strategies.range import SimConcretizationStrategyRange
from angr.concretization_strategies.single import SimConcretizationStrategySingle
from angr.concretization_strategies.solutions import SimConcretizationStrategySolutions
|
<commit_before>from angr.concretization_strategies.any import SimConcretizationStrategyAny
from angr.concretization_strategies.max import SimConcretizationStrategyMax
from angr.concretization_strategies.nonzero import SimConcretizationStrategyNonzero
from angr.concretization_strategies.nonzero_range import SimConcretizationStrategyNonzeroRange
from angr.concretization_strategies.norepeats import SimConcretizationStrategyNorepeats
from angr.concretization_strategies.norepeats_range import SimConcretizationStrategyNorepeatsRange
from angr.concretization_strategies.range import SimConcretizationStrategyRange
from angr.concretization_strategies.single import SimConcretizationStrategySingle
from angr.concretization_strategies.solutions import SimConcretizationStrategySolutions
<commit_msg>Fix compat bug in concretization_strategies<commit_after>from angr.concretization_strategies import *
from angr.concretization_strategies.any import SimConcretizationStrategyAny
from angr.concretization_strategies.max import SimConcretizationStrategyMax
from angr.concretization_strategies.nonzero import SimConcretizationStrategyNonzero
from angr.concretization_strategies.nonzero_range import SimConcretizationStrategyNonzeroRange
from angr.concretization_strategies.norepeats import SimConcretizationStrategyNorepeats
from angr.concretization_strategies.norepeats_range import SimConcretizationStrategyNorepeatsRange
from angr.concretization_strategies.range import SimConcretizationStrategyRange
from angr.concretization_strategies.single import SimConcretizationStrategySingle
from angr.concretization_strategies.solutions import SimConcretizationStrategySolutions
|
f2c8b94d35c5f0676dd99ad61df6daabf6d21d46
|
recipe_site/urls.py
|
recipe_site/urls.py
|
"""recipe_site URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^recipes/', include('recipes.urls')),
url(r'^polls/', include('polls.urls')),
url(r'^admin/', admin.site.urls),
]
|
"""recipe_site URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
from recipes.views import IndexView
urlpatterns = [
url(r'^recipes/', include('recipes.urls')),
url(r'^$', IndexView.as_view(), name='index'),
url(r'^polls/', include('polls.urls')),
url(r'^admin/', admin.site.urls),
]
|
Add homepage to the recipe site
|
Add homepage to the recipe site
|
Python
|
mit
|
kgarrison343/recipe-site,kgarrison343/recipe-site
|
"""recipe_site URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^recipes/', include('recipes.urls')),
url(r'^polls/', include('polls.urls')),
url(r'^admin/', admin.site.urls),
]
Add homepage to the recipe site
|
"""recipe_site URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
from recipes.views import IndexView
urlpatterns = [
url(r'^recipes/', include('recipes.urls')),
url(r'^$', IndexView.as_view(), name='index'),
url(r'^polls/', include('polls.urls')),
url(r'^admin/', admin.site.urls),
]
|
<commit_before>"""recipe_site URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^recipes/', include('recipes.urls')),
url(r'^polls/', include('polls.urls')),
url(r'^admin/', admin.site.urls),
]
<commit_msg>Add homepage to the recipe site<commit_after>
|
"""recipe_site URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
from recipes.views import IndexView
urlpatterns = [
url(r'^recipes/', include('recipes.urls')),
url(r'^$', IndexView.as_view(), name='index'),
url(r'^polls/', include('polls.urls')),
url(r'^admin/', admin.site.urls),
]
|
"""recipe_site URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^recipes/', include('recipes.urls')),
url(r'^polls/', include('polls.urls')),
url(r'^admin/', admin.site.urls),
]
Add homepage to the recipe site"""recipe_site URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
from recipes.views import IndexView
urlpatterns = [
url(r'^recipes/', include('recipes.urls')),
url(r'^$', IndexView.as_view(), name='index'),
url(r'^polls/', include('polls.urls')),
url(r'^admin/', admin.site.urls),
]
|
<commit_before>"""recipe_site URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^recipes/', include('recipes.urls')),
url(r'^polls/', include('polls.urls')),
url(r'^admin/', admin.site.urls),
]
<commit_msg>Add homepage to the recipe site<commit_after>"""recipe_site URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
from recipes.views import IndexView
urlpatterns = [
url(r'^recipes/', include('recipes.urls')),
url(r'^$', IndexView.as_view(), name='index'),
url(r'^polls/', include('polls.urls')),
url(r'^admin/', admin.site.urls),
]
|
0f83ba67a4db2cdacbd3679479d26dbb584da978
|
testing/models/test_epic.py
|
testing/models/test_epic.py
|
from __future__ import with_statement, print_function
import pytest
try:
from unittest import mock
except ImportError:
import mock
from k2catalogue import models
@pytest.fixture
def epic():
return models.EPIC(epic_id=12345, ra=12.345, dec=67.894,
mag=None, campaign_id=1)
def test_repr(epic):
assert repr(epic) == '<EPIC: 12345>'
def test_simbad_query(epic):
with mock.patch('k2catalogue.models.Simbad') as Simbad:
epic.simbad_query(radius=2.)
Simbad.return_value.open.assert_called_once_with(radius=2.)
|
from __future__ import with_statement, print_function
import pytest
try:
from unittest import mock
except ImportError:
import mock
from k2catalogue import models
from k2catalogue import detail_object
@pytest.fixture
def epic():
return models.EPIC(epic_id=12345, ra=12.345, dec=67.894,
mag=None, campaign_id=1)
def test_repr(epic):
assert repr(epic) == '<EPIC: 12345>'
def test_simbad_query(epic):
with mock.patch('k2catalogue.models.Simbad') as Simbad:
epic.simbad_query(radius=2.)
Simbad.return_value.open.assert_called_once_with(radius=2.)
def test_detail_object_query(epic):
with mock.patch('k2catalogue.detail_object.webbrowser.open') as mock_open:
detail_object.DetailObject(epic).open()
mock_open.assert_called_once_with(
'http://deneb.astro.warwick.ac.uk/phrlbj/k2varcat/objects/12345.html'
)
|
Add test for integration of DetailObject and EPIC
|
Add test for integration of DetailObject and EPIC
|
Python
|
mit
|
mindriot101/k2catalogue
|
from __future__ import with_statement, print_function
import pytest
try:
from unittest import mock
except ImportError:
import mock
from k2catalogue import models
@pytest.fixture
def epic():
return models.EPIC(epic_id=12345, ra=12.345, dec=67.894,
mag=None, campaign_id=1)
def test_repr(epic):
assert repr(epic) == '<EPIC: 12345>'
def test_simbad_query(epic):
with mock.patch('k2catalogue.models.Simbad') as Simbad:
epic.simbad_query(radius=2.)
Simbad.return_value.open.assert_called_once_with(radius=2.)
Add test for integration of DetailObject and EPIC
|
from __future__ import with_statement, print_function
import pytest
try:
from unittest import mock
except ImportError:
import mock
from k2catalogue import models
from k2catalogue import detail_object
@pytest.fixture
def epic():
return models.EPIC(epic_id=12345, ra=12.345, dec=67.894,
mag=None, campaign_id=1)
def test_repr(epic):
assert repr(epic) == '<EPIC: 12345>'
def test_simbad_query(epic):
with mock.patch('k2catalogue.models.Simbad') as Simbad:
epic.simbad_query(radius=2.)
Simbad.return_value.open.assert_called_once_with(radius=2.)
def test_detail_object_query(epic):
with mock.patch('k2catalogue.detail_object.webbrowser.open') as mock_open:
detail_object.DetailObject(epic).open()
mock_open.assert_called_once_with(
'http://deneb.astro.warwick.ac.uk/phrlbj/k2varcat/objects/12345.html'
)
|
<commit_before>from __future__ import with_statement, print_function
import pytest
try:
from unittest import mock
except ImportError:
import mock
from k2catalogue import models
@pytest.fixture
def epic():
return models.EPIC(epic_id=12345, ra=12.345, dec=67.894,
mag=None, campaign_id=1)
def test_repr(epic):
assert repr(epic) == '<EPIC: 12345>'
def test_simbad_query(epic):
with mock.patch('k2catalogue.models.Simbad') as Simbad:
epic.simbad_query(radius=2.)
Simbad.return_value.open.assert_called_once_with(radius=2.)
<commit_msg>Add test for integration of DetailObject and EPIC<commit_after>
|
from __future__ import with_statement, print_function
import pytest
try:
from unittest import mock
except ImportError:
import mock
from k2catalogue import models
from k2catalogue import detail_object
@pytest.fixture
def epic():
return models.EPIC(epic_id=12345, ra=12.345, dec=67.894,
mag=None, campaign_id=1)
def test_repr(epic):
assert repr(epic) == '<EPIC: 12345>'
def test_simbad_query(epic):
with mock.patch('k2catalogue.models.Simbad') as Simbad:
epic.simbad_query(radius=2.)
Simbad.return_value.open.assert_called_once_with(radius=2.)
def test_detail_object_query(epic):
with mock.patch('k2catalogue.detail_object.webbrowser.open') as mock_open:
detail_object.DetailObject(epic).open()
mock_open.assert_called_once_with(
'http://deneb.astro.warwick.ac.uk/phrlbj/k2varcat/objects/12345.html'
)
|
from __future__ import with_statement, print_function
import pytest
try:
from unittest import mock
except ImportError:
import mock
from k2catalogue import models
@pytest.fixture
def epic():
return models.EPIC(epic_id=12345, ra=12.345, dec=67.894,
mag=None, campaign_id=1)
def test_repr(epic):
assert repr(epic) == '<EPIC: 12345>'
def test_simbad_query(epic):
with mock.patch('k2catalogue.models.Simbad') as Simbad:
epic.simbad_query(radius=2.)
Simbad.return_value.open.assert_called_once_with(radius=2.)
Add test for integration of DetailObject and EPICfrom __future__ import with_statement, print_function
import pytest
try:
from unittest import mock
except ImportError:
import mock
from k2catalogue import models
from k2catalogue import detail_object
@pytest.fixture
def epic():
return models.EPIC(epic_id=12345, ra=12.345, dec=67.894,
mag=None, campaign_id=1)
def test_repr(epic):
assert repr(epic) == '<EPIC: 12345>'
def test_simbad_query(epic):
with mock.patch('k2catalogue.models.Simbad') as Simbad:
epic.simbad_query(radius=2.)
Simbad.return_value.open.assert_called_once_with(radius=2.)
def test_detail_object_query(epic):
with mock.patch('k2catalogue.detail_object.webbrowser.open') as mock_open:
detail_object.DetailObject(epic).open()
mock_open.assert_called_once_with(
'http://deneb.astro.warwick.ac.uk/phrlbj/k2varcat/objects/12345.html'
)
|
<commit_before>from __future__ import with_statement, print_function
import pytest
try:
from unittest import mock
except ImportError:
import mock
from k2catalogue import models
@pytest.fixture
def epic():
return models.EPIC(epic_id=12345, ra=12.345, dec=67.894,
mag=None, campaign_id=1)
def test_repr(epic):
assert repr(epic) == '<EPIC: 12345>'
def test_simbad_query(epic):
with mock.patch('k2catalogue.models.Simbad') as Simbad:
epic.simbad_query(radius=2.)
Simbad.return_value.open.assert_called_once_with(radius=2.)
<commit_msg>Add test for integration of DetailObject and EPIC<commit_after>from __future__ import with_statement, print_function
import pytest
try:
from unittest import mock
except ImportError:
import mock
from k2catalogue import models
from k2catalogue import detail_object
@pytest.fixture
def epic():
return models.EPIC(epic_id=12345, ra=12.345, dec=67.894,
mag=None, campaign_id=1)
def test_repr(epic):
assert repr(epic) == '<EPIC: 12345>'
def test_simbad_query(epic):
with mock.patch('k2catalogue.models.Simbad') as Simbad:
epic.simbad_query(radius=2.)
Simbad.return_value.open.assert_called_once_with(radius=2.)
def test_detail_object_query(epic):
with mock.patch('k2catalogue.detail_object.webbrowser.open') as mock_open:
detail_object.DetailObject(epic).open()
mock_open.assert_called_once_with(
'http://deneb.astro.warwick.ac.uk/phrlbj/k2varcat/objects/12345.html'
)
|
132b354f03d10ebc5a55152fef30ffbfb4b82a28
|
tests/dev/test_horoscope.py
|
tests/dev/test_horoscope.py
|
# coding=utf-8
from unittest import TestCase
from click.testing import CliRunner
import yoda
class TestHoroscope(TestCase):
"""
Test for the following commands:
| Module: dev
| command: horoscope
"""
def __init__(self, methodName='runTest'):
super(TestHoroscope, self).__init__()
self.runner = CliRunner()
def runTest(self):
result = self.runner.invoke(yoda.cli, ['horoscope', 'aries'])
self.assertTrue(type(result.output.encode('ascii', 'ignore')) == str)
|
# coding=utf-8
import sys
from unittest import TestCase
from click.testing import CliRunner
import yoda
class TestHoroscope(TestCase):
"""
Test for the following commands:
| Module: dev
| command: horoscope
"""
def __init__(self, methodName='runTest'):
super(TestHoroscope, self).__init__()
self.runner = CliRunner()
def runTest(self):
result = self.runner.invoke(yoda.cli, ['horoscope', 'aries'])
if sys.version_info[0] == 3:
string_types = str
else:
string_types = basestring
self.assertIsInstance(result.output, string_types)
|
Fix broken test for Python 2.x/3.x
|
Fix broken test for Python 2.x/3.x
|
Python
|
mit
|
dude-pa/dude
|
# coding=utf-8
from unittest import TestCase
from click.testing import CliRunner
import yoda
class TestHoroscope(TestCase):
"""
Test for the following commands:
| Module: dev
| command: horoscope
"""
def __init__(self, methodName='runTest'):
super(TestHoroscope, self).__init__()
self.runner = CliRunner()
def runTest(self):
result = self.runner.invoke(yoda.cli, ['horoscope', 'aries'])
self.assertTrue(type(result.output.encode('ascii', 'ignore')) == str)
Fix broken test for Python 2.x/3.x
|
# coding=utf-8
import sys
from unittest import TestCase
from click.testing import CliRunner
import yoda
class TestHoroscope(TestCase):
"""
Test for the following commands:
| Module: dev
| command: horoscope
"""
def __init__(self, methodName='runTest'):
super(TestHoroscope, self).__init__()
self.runner = CliRunner()
def runTest(self):
result = self.runner.invoke(yoda.cli, ['horoscope', 'aries'])
if sys.version_info[0] == 3:
string_types = str
else:
string_types = basestring
self.assertIsInstance(result.output, string_types)
|
<commit_before># coding=utf-8
from unittest import TestCase
from click.testing import CliRunner
import yoda
class TestHoroscope(TestCase):
"""
Test for the following commands:
| Module: dev
| command: horoscope
"""
def __init__(self, methodName='runTest'):
super(TestHoroscope, self).__init__()
self.runner = CliRunner()
def runTest(self):
result = self.runner.invoke(yoda.cli, ['horoscope', 'aries'])
self.assertTrue(type(result.output.encode('ascii', 'ignore')) == str)
<commit_msg>Fix broken test for Python 2.x/3.x<commit_after>
|
# coding=utf-8
import sys
from unittest import TestCase
from click.testing import CliRunner
import yoda
class TestHoroscope(TestCase):
"""
Test for the following commands:
| Module: dev
| command: horoscope
"""
def __init__(self, methodName='runTest'):
super(TestHoroscope, self).__init__()
self.runner = CliRunner()
def runTest(self):
result = self.runner.invoke(yoda.cli, ['horoscope', 'aries'])
if sys.version_info[0] == 3:
string_types = str
else:
string_types = basestring
self.assertIsInstance(result.output, string_types)
|
# coding=utf-8
from unittest import TestCase
from click.testing import CliRunner
import yoda
class TestHoroscope(TestCase):
"""
Test for the following commands:
| Module: dev
| command: horoscope
"""
def __init__(self, methodName='runTest'):
super(TestHoroscope, self).__init__()
self.runner = CliRunner()
def runTest(self):
result = self.runner.invoke(yoda.cli, ['horoscope', 'aries'])
self.assertTrue(type(result.output.encode('ascii', 'ignore')) == str)
Fix broken test for Python 2.x/3.x# coding=utf-8
import sys
from unittest import TestCase
from click.testing import CliRunner
import yoda
class TestHoroscope(TestCase):
"""
Test for the following commands:
| Module: dev
| command: horoscope
"""
def __init__(self, methodName='runTest'):
super(TestHoroscope, self).__init__()
self.runner = CliRunner()
def runTest(self):
result = self.runner.invoke(yoda.cli, ['horoscope', 'aries'])
if sys.version_info[0] == 3:
string_types = str
else:
string_types = basestring
self.assertIsInstance(result.output, string_types)
|
<commit_before># coding=utf-8
from unittest import TestCase
from click.testing import CliRunner
import yoda
class TestHoroscope(TestCase):
"""
Test for the following commands:
| Module: dev
| command: horoscope
"""
def __init__(self, methodName='runTest'):
super(TestHoroscope, self).__init__()
self.runner = CliRunner()
def runTest(self):
result = self.runner.invoke(yoda.cli, ['horoscope', 'aries'])
self.assertTrue(type(result.output.encode('ascii', 'ignore')) == str)
<commit_msg>Fix broken test for Python 2.x/3.x<commit_after># coding=utf-8
import sys
from unittest import TestCase
from click.testing import CliRunner
import yoda
class TestHoroscope(TestCase):
"""
Test for the following commands:
| Module: dev
| command: horoscope
"""
def __init__(self, methodName='runTest'):
super(TestHoroscope, self).__init__()
self.runner = CliRunner()
def runTest(self):
result = self.runner.invoke(yoda.cli, ['horoscope', 'aries'])
if sys.version_info[0] == 3:
string_types = str
else:
string_types = basestring
self.assertIsInstance(result.output, string_types)
|
58db8bc908c36522bdb781e61ee5fb7dd79a9911
|
webapp/byceps/blueprints/shop_admin/service.py
|
webapp/byceps/blueprints/shop_admin/service.py
|
# -*- coding: utf-8 -*-
"""
byceps.blueprints.shop_admin.service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2014 Jochen Kupperschmidt
"""
from collections import Counter
def count_ordered_articles(article):
"""Count how often the article has been ordered, grouped by the
order's payment state.
"""
counter = Counter()
for order_item in article.order_items:
counter[order_item.order.payment_state] += order_item.quantity
return dict(counter)
|
# -*- coding: utf-8 -*-
"""
byceps.blueprints.shop_admin.service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2014 Jochen Kupperschmidt
"""
from collections import Counter
from ..shop.models import PaymentState
def count_ordered_articles(article):
"""Count how often the article has been ordered, grouped by the
order's payment state.
"""
counter = Counter({state: 0 for state in PaymentState})
for order_item in article.order_items:
counter[order_item.order.payment_state] += order_item.quantity
return dict(counter)
|
Make sure every payment state is
|
Make sure every payment state is
|
Python
|
bsd-3-clause
|
m-ober/byceps,homeworkprod/byceps,m-ober/byceps,homeworkprod/byceps,m-ober/byceps,homeworkprod/byceps
|
# -*- coding: utf-8 -*-
"""
byceps.blueprints.shop_admin.service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2014 Jochen Kupperschmidt
"""
from collections import Counter
def count_ordered_articles(article):
"""Count how often the article has been ordered, grouped by the
order's payment state.
"""
counter = Counter()
for order_item in article.order_items:
counter[order_item.order.payment_state] += order_item.quantity
return dict(counter)
Make sure every payment state is
|
# -*- coding: utf-8 -*-
"""
byceps.blueprints.shop_admin.service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2014 Jochen Kupperschmidt
"""
from collections import Counter
from ..shop.models import PaymentState
def count_ordered_articles(article):
"""Count how often the article has been ordered, grouped by the
order's payment state.
"""
counter = Counter({state: 0 for state in PaymentState})
for order_item in article.order_items:
counter[order_item.order.payment_state] += order_item.quantity
return dict(counter)
|
<commit_before># -*- coding: utf-8 -*-
"""
byceps.blueprints.shop_admin.service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2014 Jochen Kupperschmidt
"""
from collections import Counter
def count_ordered_articles(article):
"""Count how often the article has been ordered, grouped by the
order's payment state.
"""
counter = Counter()
for order_item in article.order_items:
counter[order_item.order.payment_state] += order_item.quantity
return dict(counter)
<commit_msg>Make sure every payment state is<commit_after>
|
# -*- coding: utf-8 -*-
"""
byceps.blueprints.shop_admin.service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2014 Jochen Kupperschmidt
"""
from collections import Counter
from ..shop.models import PaymentState
def count_ordered_articles(article):
"""Count how often the article has been ordered, grouped by the
order's payment state.
"""
counter = Counter({state: 0 for state in PaymentState})
for order_item in article.order_items:
counter[order_item.order.payment_state] += order_item.quantity
return dict(counter)
|
# -*- coding: utf-8 -*-
"""
byceps.blueprints.shop_admin.service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2014 Jochen Kupperschmidt
"""
from collections import Counter
def count_ordered_articles(article):
"""Count how often the article has been ordered, grouped by the
order's payment state.
"""
counter = Counter()
for order_item in article.order_items:
counter[order_item.order.payment_state] += order_item.quantity
return dict(counter)
Make sure every payment state is# -*- coding: utf-8 -*-
"""
byceps.blueprints.shop_admin.service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2014 Jochen Kupperschmidt
"""
from collections import Counter
from ..shop.models import PaymentState
def count_ordered_articles(article):
"""Count how often the article has been ordered, grouped by the
order's payment state.
"""
counter = Counter({state: 0 for state in PaymentState})
for order_item in article.order_items:
counter[order_item.order.payment_state] += order_item.quantity
return dict(counter)
|
<commit_before># -*- coding: utf-8 -*-
"""
byceps.blueprints.shop_admin.service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2014 Jochen Kupperschmidt
"""
from collections import Counter
def count_ordered_articles(article):
"""Count how often the article has been ordered, grouped by the
order's payment state.
"""
counter = Counter()
for order_item in article.order_items:
counter[order_item.order.payment_state] += order_item.quantity
return dict(counter)
<commit_msg>Make sure every payment state is<commit_after># -*- coding: utf-8 -*-
"""
byceps.blueprints.shop_admin.service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2014 Jochen Kupperschmidt
"""
from collections import Counter
from ..shop.models import PaymentState
def count_ordered_articles(article):
"""Count how often the article has been ordered, grouped by the
order's payment state.
"""
counter = Counter({state: 0 for state in PaymentState})
for order_item in article.order_items:
counter[order_item.order.payment_state] += order_item.quantity
return dict(counter)
|
0774388eec3e405966828d5e2137abbd3dd29f1c
|
createcppfiles.py
|
createcppfiles.py
|
#!/usr/bin/python
import sys
import os
def createHeader(guard, namespace):
return "#ifndef {}_HPP\n#define {}_HPP\n\n{}\n\n#endif".format(guard, guard, namespace)
def createSource(name, namespace):
return '#include "{}.hpp"\n\n{}'.format(name, namespace)
def createNamespace(namespace):
return "namespace {}\n{{\n\n}}".format(namespace)
if len(sys.argv) > 1:
path, name = os.path.split(sys.argv[1])
if not name:
print("No name specified")
sys.exit(1)
if path:
path += "/"
namespace = createNamespace(sys.argv[2]) if len(sys.argv) > 2 else ""
guard = name.replace(" ", "_").upper()
name = name.replace(" ", "") # Remove spaces for filename
path += name
with open(path + ".hpp", "w") as f:
f.write(createHeader(guard, namespace))
with open(path + ".cpp", "w") as f:
f.write(createSource(name, namespace))
else:
print("Create a C++ header and source file.")
print("Usage:\n\t{} <fname> [namespace]".format(sys.argv[0]))
print("\n<fname>\t\tThe name of the file. Spaces will be removed. Include guards will have the same name but spaces replaced with underscores (_).")
print("[namespace]\tA namespace (what a surprise).")
|
#!/usr/bin/python
import sys
import os
def createHeader(guard, namespace):
return "#ifndef {}_HPP\n#define {}_HPP\n\n{}\n\n#endif".format(guard, guard, namespace)
def createSource(name, namespace):
return '#include "{}.hpp"\n\n{}'.format(name, namespace)
def createNamespace(namespace):
return "namespace {}\n{{\n\n}}".format(namespace)
if len(sys.argv) > 1:
path, name = os.path.split(sys.argv[1])
if not name:
print("No name specified")
sys.exit(1)
if path:
path += "/"
namespace = createNamespace(sys.argv[2]) if len(sys.argv) > 2 else ""
if namespace:
guard = "{}_{}".format(sys.argv[2], name.replace(" ", "_")).upper()
else:
guard = name.replace(" ", "_").upper()
name = name.replace(" ", "") # Remove spaces for filename
path += name
with open(path + ".hpp", "w") as f:
f.write(createHeader(guard, namespace))
with open(path + ".cpp", "w") as f:
f.write(createSource(name, namespace))
else:
print("Create a C++ header and source file.")
print("Usage:\n\t{} <fname> [namespace]".format(sys.argv[0]))
print("\n<fname>\t\tThe name of the file. Spaces will be removed. Include guards will have the same name but spaces replaced with underscores (_).")
print("[namespace]\tA namespace (what a surprise).")
|
Prepend namespace in include guards
|
Prepend namespace in include guards
|
Python
|
mit
|
mphe/scripts,mall0c/scripts,mall0c/scripts,mphe/scripts
|
#!/usr/bin/python
import sys
import os
def createHeader(guard, namespace):
return "#ifndef {}_HPP\n#define {}_HPP\n\n{}\n\n#endif".format(guard, guard, namespace)
def createSource(name, namespace):
return '#include "{}.hpp"\n\n{}'.format(name, namespace)
def createNamespace(namespace):
return "namespace {}\n{{\n\n}}".format(namespace)
if len(sys.argv) > 1:
path, name = os.path.split(sys.argv[1])
if not name:
print("No name specified")
sys.exit(1)
if path:
path += "/"
namespace = createNamespace(sys.argv[2]) if len(sys.argv) > 2 else ""
guard = name.replace(" ", "_").upper()
name = name.replace(" ", "") # Remove spaces for filename
path += name
with open(path + ".hpp", "w") as f:
f.write(createHeader(guard, namespace))
with open(path + ".cpp", "w") as f:
f.write(createSource(name, namespace))
else:
print("Create a C++ header and source file.")
print("Usage:\n\t{} <fname> [namespace]".format(sys.argv[0]))
print("\n<fname>\t\tThe name of the file. Spaces will be removed. Include guards will have the same name but spaces replaced with underscores (_).")
print("[namespace]\tA namespace (what a surprise).")
Prepend namespace in include guards
|
#!/usr/bin/python
import sys
import os
def createHeader(guard, namespace):
return "#ifndef {}_HPP\n#define {}_HPP\n\n{}\n\n#endif".format(guard, guard, namespace)
def createSource(name, namespace):
return '#include "{}.hpp"\n\n{}'.format(name, namespace)
def createNamespace(namespace):
return "namespace {}\n{{\n\n}}".format(namespace)
if len(sys.argv) > 1:
path, name = os.path.split(sys.argv[1])
if not name:
print("No name specified")
sys.exit(1)
if path:
path += "/"
namespace = createNamespace(sys.argv[2]) if len(sys.argv) > 2 else ""
if namespace:
guard = "{}_{}".format(sys.argv[2], name.replace(" ", "_")).upper()
else:
guard = name.replace(" ", "_").upper()
name = name.replace(" ", "") # Remove spaces for filename
path += name
with open(path + ".hpp", "w") as f:
f.write(createHeader(guard, namespace))
with open(path + ".cpp", "w") as f:
f.write(createSource(name, namespace))
else:
print("Create a C++ header and source file.")
print("Usage:\n\t{} <fname> [namespace]".format(sys.argv[0]))
print("\n<fname>\t\tThe name of the file. Spaces will be removed. Include guards will have the same name but spaces replaced with underscores (_).")
print("[namespace]\tA namespace (what a surprise).")
|
<commit_before>#!/usr/bin/python
import sys
import os
def createHeader(guard, namespace):
return "#ifndef {}_HPP\n#define {}_HPP\n\n{}\n\n#endif".format(guard, guard, namespace)
def createSource(name, namespace):
return '#include "{}.hpp"\n\n{}'.format(name, namespace)
def createNamespace(namespace):
return "namespace {}\n{{\n\n}}".format(namespace)
if len(sys.argv) > 1:
path, name = os.path.split(sys.argv[1])
if not name:
print("No name specified")
sys.exit(1)
if path:
path += "/"
namespace = createNamespace(sys.argv[2]) if len(sys.argv) > 2 else ""
guard = name.replace(" ", "_").upper()
name = name.replace(" ", "") # Remove spaces for filename
path += name
with open(path + ".hpp", "w") as f:
f.write(createHeader(guard, namespace))
with open(path + ".cpp", "w") as f:
f.write(createSource(name, namespace))
else:
print("Create a C++ header and source file.")
print("Usage:\n\t{} <fname> [namespace]".format(sys.argv[0]))
print("\n<fname>\t\tThe name of the file. Spaces will be removed. Include guards will have the same name but spaces replaced with underscores (_).")
print("[namespace]\tA namespace (what a surprise).")
<commit_msg>Prepend namespace in include guards<commit_after>
|
#!/usr/bin/python
import sys
import os
def createHeader(guard, namespace):
return "#ifndef {}_HPP\n#define {}_HPP\n\n{}\n\n#endif".format(guard, guard, namespace)
def createSource(name, namespace):
return '#include "{}.hpp"\n\n{}'.format(name, namespace)
def createNamespace(namespace):
return "namespace {}\n{{\n\n}}".format(namespace)
if len(sys.argv) > 1:
path, name = os.path.split(sys.argv[1])
if not name:
print("No name specified")
sys.exit(1)
if path:
path += "/"
namespace = createNamespace(sys.argv[2]) if len(sys.argv) > 2 else ""
if namespace:
guard = "{}_{}".format(sys.argv[2], name.replace(" ", "_")).upper()
else:
guard = name.replace(" ", "_").upper()
name = name.replace(" ", "") # Remove spaces for filename
path += name
with open(path + ".hpp", "w") as f:
f.write(createHeader(guard, namespace))
with open(path + ".cpp", "w") as f:
f.write(createSource(name, namespace))
else:
print("Create a C++ header and source file.")
print("Usage:\n\t{} <fname> [namespace]".format(sys.argv[0]))
print("\n<fname>\t\tThe name of the file. Spaces will be removed. Include guards will have the same name but spaces replaced with underscores (_).")
print("[namespace]\tA namespace (what a surprise).")
|
#!/usr/bin/python
import sys
import os
def createHeader(guard, namespace):
return "#ifndef {}_HPP\n#define {}_HPP\n\n{}\n\n#endif".format(guard, guard, namespace)
def createSource(name, namespace):
return '#include "{}.hpp"\n\n{}'.format(name, namespace)
def createNamespace(namespace):
return "namespace {}\n{{\n\n}}".format(namespace)
if len(sys.argv) > 1:
path, name = os.path.split(sys.argv[1])
if not name:
print("No name specified")
sys.exit(1)
if path:
path += "/"
namespace = createNamespace(sys.argv[2]) if len(sys.argv) > 2 else ""
guard = name.replace(" ", "_").upper()
name = name.replace(" ", "") # Remove spaces for filename
path += name
with open(path + ".hpp", "w") as f:
f.write(createHeader(guard, namespace))
with open(path + ".cpp", "w") as f:
f.write(createSource(name, namespace))
else:
print("Create a C++ header and source file.")
print("Usage:\n\t{} <fname> [namespace]".format(sys.argv[0]))
print("\n<fname>\t\tThe name of the file. Spaces will be removed. Include guards will have the same name but spaces replaced with underscores (_).")
print("[namespace]\tA namespace (what a surprise).")
Prepend namespace in include guards#!/usr/bin/python
import sys
import os
def createHeader(guard, namespace):
return "#ifndef {}_HPP\n#define {}_HPP\n\n{}\n\n#endif".format(guard, guard, namespace)
def createSource(name, namespace):
return '#include "{}.hpp"\n\n{}'.format(name, namespace)
def createNamespace(namespace):
return "namespace {}\n{{\n\n}}".format(namespace)
if len(sys.argv) > 1:
path, name = os.path.split(sys.argv[1])
if not name:
print("No name specified")
sys.exit(1)
if path:
path += "/"
namespace = createNamespace(sys.argv[2]) if len(sys.argv) > 2 else ""
if namespace:
guard = "{}_{}".format(sys.argv[2], name.replace(" ", "_")).upper()
else:
guard = name.replace(" ", "_").upper()
name = name.replace(" ", "") # Remove spaces for filename
path += name
with open(path + ".hpp", "w") as f:
f.write(createHeader(guard, namespace))
with open(path + ".cpp", "w") as f:
f.write(createSource(name, namespace))
else:
print("Create a C++ header and source file.")
print("Usage:\n\t{} <fname> [namespace]".format(sys.argv[0]))
print("\n<fname>\t\tThe name of the file. Spaces will be removed. Include guards will have the same name but spaces replaced with underscores (_).")
print("[namespace]\tA namespace (what a surprise).")
|
<commit_before>#!/usr/bin/python
import sys
import os
def createHeader(guard, namespace):
return "#ifndef {}_HPP\n#define {}_HPP\n\n{}\n\n#endif".format(guard, guard, namespace)
def createSource(name, namespace):
return '#include "{}.hpp"\n\n{}'.format(name, namespace)
def createNamespace(namespace):
return "namespace {}\n{{\n\n}}".format(namespace)
if len(sys.argv) > 1:
path, name = os.path.split(sys.argv[1])
if not name:
print("No name specified")
sys.exit(1)
if path:
path += "/"
namespace = createNamespace(sys.argv[2]) if len(sys.argv) > 2 else ""
guard = name.replace(" ", "_").upper()
name = name.replace(" ", "") # Remove spaces for filename
path += name
with open(path + ".hpp", "w") as f:
f.write(createHeader(guard, namespace))
with open(path + ".cpp", "w") as f:
f.write(createSource(name, namespace))
else:
print("Create a C++ header and source file.")
print("Usage:\n\t{} <fname> [namespace]".format(sys.argv[0]))
print("\n<fname>\t\tThe name of the file. Spaces will be removed. Include guards will have the same name but spaces replaced with underscores (_).")
print("[namespace]\tA namespace (what a surprise).")
<commit_msg>Prepend namespace in include guards<commit_after>#!/usr/bin/python
import sys
import os
def createHeader(guard, namespace):
return "#ifndef {}_HPP\n#define {}_HPP\n\n{}\n\n#endif".format(guard, guard, namespace)
def createSource(name, namespace):
return '#include "{}.hpp"\n\n{}'.format(name, namespace)
def createNamespace(namespace):
return "namespace {}\n{{\n\n}}".format(namespace)
if len(sys.argv) > 1:
path, name = os.path.split(sys.argv[1])
if not name:
print("No name specified")
sys.exit(1)
if path:
path += "/"
namespace = createNamespace(sys.argv[2]) if len(sys.argv) > 2 else ""
if namespace:
guard = "{}_{}".format(sys.argv[2], name.replace(" ", "_")).upper()
else:
guard = name.replace(" ", "_").upper()
name = name.replace(" ", "") # Remove spaces for filename
path += name
with open(path + ".hpp", "w") as f:
f.write(createHeader(guard, namespace))
with open(path + ".cpp", "w") as f:
f.write(createSource(name, namespace))
else:
print("Create a C++ header and source file.")
print("Usage:\n\t{} <fname> [namespace]".format(sys.argv[0]))
print("\n<fname>\t\tThe name of the file. Spaces will be removed. Include guards will have the same name but spaces replaced with underscores (_).")
print("[namespace]\tA namespace (what a surprise).")
|
e58c78fea4b604905333b490a22e640477d5e2d5
|
django_pytest/test_runner.py
|
django_pytest/test_runner.py
|
def run_tests(test_labels, verbosity=1, interactive=True, extra_tests=[]):
import sys
from pkg_resources import load_entry_point
sys.argv[1:] = sys.argv[2:]
# Remove stop word (--) from argument list again. This separates Django
# command options from py.test ones.
try:
del sys.argv[sys.argv.index('--')]
except ValueError:
pass
try:
entry_point = load_entry_point('py>=1.0.0', 'console_scripts', 'py.test')
except ImportError:
entry_point = load_entry_point('pytest>=2.0', 'console_scripts', 'py.test')
sys.exit(entry_point())
|
class TestRunner(object):
def __init__(self, verbosity=1, interactive=True, failfast=True, **kwargs):
self.verbosity = verbosity
self.interactive = interactive
self.failfast = failfast
def run_tests(self, test_labels):
import pytest
import sys
if test_labels is None:
print ('Not yet implemented: py.test is still not able to '
'discover the tests in all the INSTALLED_APPS as Django '
'requires.')
exit(1)
pytest_args = []
if self.failfast:
pytest_args.append('--exitfirst')
if self.verbosity == 0:
pytest_args.append('--quiet')
elif self.verbosity > 1:
pytest_args.append('--verbose')
# Remove arguments before (--). This separates Django command options
# from py.test ones.
try:
pytest_args_index = sys.argv.index('--') + 1
pytest_args.extend(sys.argv[pytest_args_index:])
except ValueError:
pass
sys.exit(pytest.main(pytest_args))
# Keep the old name to be backwards-compatible
def run_tests(test_labels, verbosity=1, interactive=True, extra_tests=[]):
runner = TestRunner(verbosity, interactive, failfast=False)
runner.run_tests(test_labels)
|
Add a new TestRunner class to remove Django deprecation warnings
|
Add a new TestRunner class to remove Django deprecation warnings
|
Python
|
bsd-3-clause
|
buchuki/django-pytest,0101/django-pytest
|
def run_tests(test_labels, verbosity=1, interactive=True, extra_tests=[]):
import sys
from pkg_resources import load_entry_point
sys.argv[1:] = sys.argv[2:]
# Remove stop word (--) from argument list again. This separates Django
# command options from py.test ones.
try:
del sys.argv[sys.argv.index('--')]
except ValueError:
pass
try:
entry_point = load_entry_point('py>=1.0.0', 'console_scripts', 'py.test')
except ImportError:
entry_point = load_entry_point('pytest>=2.0', 'console_scripts', 'py.test')
sys.exit(entry_point())
Add a new TestRunner class to remove Django deprecation warnings
|
class TestRunner(object):
def __init__(self, verbosity=1, interactive=True, failfast=True, **kwargs):
self.verbosity = verbosity
self.interactive = interactive
self.failfast = failfast
def run_tests(self, test_labels):
import pytest
import sys
if test_labels is None:
print ('Not yet implemented: py.test is still not able to '
'discover the tests in all the INSTALLED_APPS as Django '
'requires.')
exit(1)
pytest_args = []
if self.failfast:
pytest_args.append('--exitfirst')
if self.verbosity == 0:
pytest_args.append('--quiet')
elif self.verbosity > 1:
pytest_args.append('--verbose')
# Remove arguments before (--). This separates Django command options
# from py.test ones.
try:
pytest_args_index = sys.argv.index('--') + 1
pytest_args.extend(sys.argv[pytest_args_index:])
except ValueError:
pass
sys.exit(pytest.main(pytest_args))
# Keep the old name to be backwards-compatible
def run_tests(test_labels, verbosity=1, interactive=True, extra_tests=[]):
runner = TestRunner(verbosity, interactive, failfast=False)
runner.run_tests(test_labels)
|
<commit_before>def run_tests(test_labels, verbosity=1, interactive=True, extra_tests=[]):
import sys
from pkg_resources import load_entry_point
sys.argv[1:] = sys.argv[2:]
# Remove stop word (--) from argument list again. This separates Django
# command options from py.test ones.
try:
del sys.argv[sys.argv.index('--')]
except ValueError:
pass
try:
entry_point = load_entry_point('py>=1.0.0', 'console_scripts', 'py.test')
except ImportError:
entry_point = load_entry_point('pytest>=2.0', 'console_scripts', 'py.test')
sys.exit(entry_point())
<commit_msg>Add a new TestRunner class to remove Django deprecation warnings<commit_after>
|
class TestRunner(object):
def __init__(self, verbosity=1, interactive=True, failfast=True, **kwargs):
self.verbosity = verbosity
self.interactive = interactive
self.failfast = failfast
def run_tests(self, test_labels):
import pytest
import sys
if test_labels is None:
print ('Not yet implemented: py.test is still not able to '
'discover the tests in all the INSTALLED_APPS as Django '
'requires.')
exit(1)
pytest_args = []
if self.failfast:
pytest_args.append('--exitfirst')
if self.verbosity == 0:
pytest_args.append('--quiet')
elif self.verbosity > 1:
pytest_args.append('--verbose')
# Remove arguments before (--). This separates Django command options
# from py.test ones.
try:
pytest_args_index = sys.argv.index('--') + 1
pytest_args.extend(sys.argv[pytest_args_index:])
except ValueError:
pass
sys.exit(pytest.main(pytest_args))
# Keep the old name to be backwards-compatible
def run_tests(test_labels, verbosity=1, interactive=True, extra_tests=[]):
runner = TestRunner(verbosity, interactive, failfast=False)
runner.run_tests(test_labels)
|
def run_tests(test_labels, verbosity=1, interactive=True, extra_tests=[]):
import sys
from pkg_resources import load_entry_point
sys.argv[1:] = sys.argv[2:]
# Remove stop word (--) from argument list again. This separates Django
# command options from py.test ones.
try:
del sys.argv[sys.argv.index('--')]
except ValueError:
pass
try:
entry_point = load_entry_point('py>=1.0.0', 'console_scripts', 'py.test')
except ImportError:
entry_point = load_entry_point('pytest>=2.0', 'console_scripts', 'py.test')
sys.exit(entry_point())
Add a new TestRunner class to remove Django deprecation warningsclass TestRunner(object):
def __init__(self, verbosity=1, interactive=True, failfast=True, **kwargs):
self.verbosity = verbosity
self.interactive = interactive
self.failfast = failfast
def run_tests(self, test_labels):
import pytest
import sys
if test_labels is None:
print ('Not yet implemented: py.test is still not able to '
'discover the tests in all the INSTALLED_APPS as Django '
'requires.')
exit(1)
pytest_args = []
if self.failfast:
pytest_args.append('--exitfirst')
if self.verbosity == 0:
pytest_args.append('--quiet')
elif self.verbosity > 1:
pytest_args.append('--verbose')
# Remove arguments before (--). This separates Django command options
# from py.test ones.
try:
pytest_args_index = sys.argv.index('--') + 1
pytest_args.extend(sys.argv[pytest_args_index:])
except ValueError:
pass
sys.exit(pytest.main(pytest_args))
# Keep the old name to be backwards-compatible
def run_tests(test_labels, verbosity=1, interactive=True, extra_tests=[]):
runner = TestRunner(verbosity, interactive, failfast=False)
runner.run_tests(test_labels)
|
<commit_before>def run_tests(test_labels, verbosity=1, interactive=True, extra_tests=[]):
import sys
from pkg_resources import load_entry_point
sys.argv[1:] = sys.argv[2:]
# Remove stop word (--) from argument list again. This separates Django
# command options from py.test ones.
try:
del sys.argv[sys.argv.index('--')]
except ValueError:
pass
try:
entry_point = load_entry_point('py>=1.0.0', 'console_scripts', 'py.test')
except ImportError:
entry_point = load_entry_point('pytest>=2.0', 'console_scripts', 'py.test')
sys.exit(entry_point())
<commit_msg>Add a new TestRunner class to remove Django deprecation warnings<commit_after>class TestRunner(object):
def __init__(self, verbosity=1, interactive=True, failfast=True, **kwargs):
self.verbosity = verbosity
self.interactive = interactive
self.failfast = failfast
def run_tests(self, test_labels):
import pytest
import sys
if test_labels is None:
print ('Not yet implemented: py.test is still not able to '
'discover the tests in all the INSTALLED_APPS as Django '
'requires.')
exit(1)
pytest_args = []
if self.failfast:
pytest_args.append('--exitfirst')
if self.verbosity == 0:
pytest_args.append('--quiet')
elif self.verbosity > 1:
pytest_args.append('--verbose')
# Remove arguments before (--). This separates Django command options
# from py.test ones.
try:
pytest_args_index = sys.argv.index('--') + 1
pytest_args.extend(sys.argv[pytest_args_index:])
except ValueError:
pass
sys.exit(pytest.main(pytest_args))
# Keep the old name to be backwards-compatible
def run_tests(test_labels, verbosity=1, interactive=True, extra_tests=[]):
runner = TestRunner(verbosity, interactive, failfast=False)
runner.run_tests(test_labels)
|
d4d25c87fc3eb9cfe1406d2a857b93c69d389850
|
parser.py
|
parser.py
|
import os
import re
import json
class Parser(object):
"""Parse output from Speedtest CLI into JSON"""
def parse_all(self):
records = []
for file in os.listdir("data"):
if file.endswith(".speedtest.txt"):
records.append(self.parse("data/" + file))
return json.dumps(records)
def parse(self, file):
input = open(file, "r")
data = input.read()
input.close()
timestamp = re.search(r'Speed Test Ran at: (.*)', data)
ping = re.search(r'Ping: (.*)', data)
download = re.search(r'Download: (.*)', data)
upload = re.search(r'Upload: (.*)', data)
record = {}
if timestamp:
record["timestamp"] = timestamp.group(1)
if ping:
record["result"] = "success"
record["ping"] = ping.group(1)
record["download"] = download.group(1)
record["upload"] = upload.group(1)
else:
record["result"] = "failure"
return record
parser = Parser()
print parser.parse_all()
|
import os
import re
import json
class Parser(object):
"""Parse output from Speedtest CLI into JSON"""
def parse_all(self):
# needs:
# labels (timestamps)
# data (ping/dl/ul speed)
records = []
labels = []
download_speeds = []
for file in os.listdir("data"):
if file.endswith(".speedtest.txt"):
records.append(self.parse("data/" + file))
for record in records:
labels.append(record["timestamp"])
if record["result"] == "success":
download_speeds.append(record["download"])
datasets = [{"label":"Download Speeds", "data":download_speeds}]
summary = {}
summary["labels"] = labels
summary["datasets"] = datasets
return json.dumps(summary)
def parse(self, file):
input = open(file, "r")
data = input.read()
input.close()
timestamp = re.search(r'Speed Test Ran at: (.*)', data)
ping = re.search(r'Ping: (.*)', data)
download = re.search(r'Download: (.*) Mbit/s', data)
upload = re.search(r'Upload: (.*)', data)
record = {}
if timestamp:
record["timestamp"] = timestamp.group(1)
if ping:
record["result"] = "success"
record["ping"] = ping.group(1)
record["download"] = download.group(1)
record["upload"] = upload.group(1)
else:
record["result"] = "failure"
return record
parser = Parser()
print parser.parse_all()
|
Format parsed speed data for use in Chart.js
|
Format parsed speed data for use in Chart.js
|
Python
|
mit
|
ruralocity/speedchart,ruralocity/speedchart
|
import os
import re
import json
class Parser(object):
"""Parse output from Speedtest CLI into JSON"""
def parse_all(self):
records = []
for file in os.listdir("data"):
if file.endswith(".speedtest.txt"):
records.append(self.parse("data/" + file))
return json.dumps(records)
def parse(self, file):
input = open(file, "r")
data = input.read()
input.close()
timestamp = re.search(r'Speed Test Ran at: (.*)', data)
ping = re.search(r'Ping: (.*)', data)
download = re.search(r'Download: (.*)', data)
upload = re.search(r'Upload: (.*)', data)
record = {}
if timestamp:
record["timestamp"] = timestamp.group(1)
if ping:
record["result"] = "success"
record["ping"] = ping.group(1)
record["download"] = download.group(1)
record["upload"] = upload.group(1)
else:
record["result"] = "failure"
return record
parser = Parser()
print parser.parse_all()
Format parsed speed data for use in Chart.js
|
import os
import re
import json
class Parser(object):
"""Parse output from Speedtest CLI into JSON"""
def parse_all(self):
# needs:
# labels (timestamps)
# data (ping/dl/ul speed)
records = []
labels = []
download_speeds = []
for file in os.listdir("data"):
if file.endswith(".speedtest.txt"):
records.append(self.parse("data/" + file))
for record in records:
labels.append(record["timestamp"])
if record["result"] == "success":
download_speeds.append(record["download"])
datasets = [{"label":"Download Speeds", "data":download_speeds}]
summary = {}
summary["labels"] = labels
summary["datasets"] = datasets
return json.dumps(summary)
def parse(self, file):
input = open(file, "r")
data = input.read()
input.close()
timestamp = re.search(r'Speed Test Ran at: (.*)', data)
ping = re.search(r'Ping: (.*)', data)
download = re.search(r'Download: (.*) Mbit/s', data)
upload = re.search(r'Upload: (.*)', data)
record = {}
if timestamp:
record["timestamp"] = timestamp.group(1)
if ping:
record["result"] = "success"
record["ping"] = ping.group(1)
record["download"] = download.group(1)
record["upload"] = upload.group(1)
else:
record["result"] = "failure"
return record
parser = Parser()
print parser.parse_all()
|
<commit_before>import os
import re
import json
class Parser(object):
"""Parse output from Speedtest CLI into JSON"""
def parse_all(self):
records = []
for file in os.listdir("data"):
if file.endswith(".speedtest.txt"):
records.append(self.parse("data/" + file))
return json.dumps(records)
def parse(self, file):
input = open(file, "r")
data = input.read()
input.close()
timestamp = re.search(r'Speed Test Ran at: (.*)', data)
ping = re.search(r'Ping: (.*)', data)
download = re.search(r'Download: (.*)', data)
upload = re.search(r'Upload: (.*)', data)
record = {}
if timestamp:
record["timestamp"] = timestamp.group(1)
if ping:
record["result"] = "success"
record["ping"] = ping.group(1)
record["download"] = download.group(1)
record["upload"] = upload.group(1)
else:
record["result"] = "failure"
return record
parser = Parser()
print parser.parse_all()
<commit_msg>Format parsed speed data for use in Chart.js<commit_after>
|
import os
import re
import json
class Parser(object):
"""Parse output from Speedtest CLI into JSON"""
def parse_all(self):
# needs:
# labels (timestamps)
# data (ping/dl/ul speed)
records = []
labels = []
download_speeds = []
for file in os.listdir("data"):
if file.endswith(".speedtest.txt"):
records.append(self.parse("data/" + file))
for record in records:
labels.append(record["timestamp"])
if record["result"] == "success":
download_speeds.append(record["download"])
datasets = [{"label":"Download Speeds", "data":download_speeds}]
summary = {}
summary["labels"] = labels
summary["datasets"] = datasets
return json.dumps(summary)
def parse(self, file):
input = open(file, "r")
data = input.read()
input.close()
timestamp = re.search(r'Speed Test Ran at: (.*)', data)
ping = re.search(r'Ping: (.*)', data)
download = re.search(r'Download: (.*) Mbit/s', data)
upload = re.search(r'Upload: (.*)', data)
record = {}
if timestamp:
record["timestamp"] = timestamp.group(1)
if ping:
record["result"] = "success"
record["ping"] = ping.group(1)
record["download"] = download.group(1)
record["upload"] = upload.group(1)
else:
record["result"] = "failure"
return record
parser = Parser()
print parser.parse_all()
|
import os
import re
import json
class Parser(object):
"""Parse output from Speedtest CLI into JSON"""
def parse_all(self):
records = []
for file in os.listdir("data"):
if file.endswith(".speedtest.txt"):
records.append(self.parse("data/" + file))
return json.dumps(records)
def parse(self, file):
input = open(file, "r")
data = input.read()
input.close()
timestamp = re.search(r'Speed Test Ran at: (.*)', data)
ping = re.search(r'Ping: (.*)', data)
download = re.search(r'Download: (.*)', data)
upload = re.search(r'Upload: (.*)', data)
record = {}
if timestamp:
record["timestamp"] = timestamp.group(1)
if ping:
record["result"] = "success"
record["ping"] = ping.group(1)
record["download"] = download.group(1)
record["upload"] = upload.group(1)
else:
record["result"] = "failure"
return record
parser = Parser()
print parser.parse_all()
Format parsed speed data for use in Chart.jsimport os
import re
import json
class Parser(object):
"""Parse output from Speedtest CLI into JSON"""
def parse_all(self):
# needs:
# labels (timestamps)
# data (ping/dl/ul speed)
records = []
labels = []
download_speeds = []
for file in os.listdir("data"):
if file.endswith(".speedtest.txt"):
records.append(self.parse("data/" + file))
for record in records:
labels.append(record["timestamp"])
if record["result"] == "success":
download_speeds.append(record["download"])
datasets = [{"label":"Download Speeds", "data":download_speeds}]
summary = {}
summary["labels"] = labels
summary["datasets"] = datasets
return json.dumps(summary)
def parse(self, file):
input = open(file, "r")
data = input.read()
input.close()
timestamp = re.search(r'Speed Test Ran at: (.*)', data)
ping = re.search(r'Ping: (.*)', data)
download = re.search(r'Download: (.*) Mbit/s', data)
upload = re.search(r'Upload: (.*)', data)
record = {}
if timestamp:
record["timestamp"] = timestamp.group(1)
if ping:
record["result"] = "success"
record["ping"] = ping.group(1)
record["download"] = download.group(1)
record["upload"] = upload.group(1)
else:
record["result"] = "failure"
return record
parser = Parser()
print parser.parse_all()
|
<commit_before>import os
import re
import json
class Parser(object):
"""Parse output from Speedtest CLI into JSON"""
def parse_all(self):
records = []
for file in os.listdir("data"):
if file.endswith(".speedtest.txt"):
records.append(self.parse("data/" + file))
return json.dumps(records)
def parse(self, file):
input = open(file, "r")
data = input.read()
input.close()
timestamp = re.search(r'Speed Test Ran at: (.*)', data)
ping = re.search(r'Ping: (.*)', data)
download = re.search(r'Download: (.*)', data)
upload = re.search(r'Upload: (.*)', data)
record = {}
if timestamp:
record["timestamp"] = timestamp.group(1)
if ping:
record["result"] = "success"
record["ping"] = ping.group(1)
record["download"] = download.group(1)
record["upload"] = upload.group(1)
else:
record["result"] = "failure"
return record
parser = Parser()
print parser.parse_all()
<commit_msg>Format parsed speed data for use in Chart.js<commit_after>import os
import re
import json
class Parser(object):
"""Parse output from Speedtest CLI into JSON"""
def parse_all(self):
# needs:
# labels (timestamps)
# data (ping/dl/ul speed)
records = []
labels = []
download_speeds = []
for file in os.listdir("data"):
if file.endswith(".speedtest.txt"):
records.append(self.parse("data/" + file))
for record in records:
labels.append(record["timestamp"])
if record["result"] == "success":
download_speeds.append(record["download"])
datasets = [{"label":"Download Speeds", "data":download_speeds}]
summary = {}
summary["labels"] = labels
summary["datasets"] = datasets
return json.dumps(summary)
def parse(self, file):
input = open(file, "r")
data = input.read()
input.close()
timestamp = re.search(r'Speed Test Ran at: (.*)', data)
ping = re.search(r'Ping: (.*)', data)
download = re.search(r'Download: (.*) Mbit/s', data)
upload = re.search(r'Upload: (.*)', data)
record = {}
if timestamp:
record["timestamp"] = timestamp.group(1)
if ping:
record["result"] = "success"
record["ping"] = ping.group(1)
record["download"] = download.group(1)
record["upload"] = upload.group(1)
else:
record["result"] = "failure"
return record
parser = Parser()
print parser.parse_all()
|
858a286b66fc3301acd93d847534c0d9ab2afcb5
|
ckanext/stadtzhtheme/tests/test_validation.py
|
ckanext/stadtzhtheme/tests/test_validation.py
|
import nose
from ckanapi import LocalCKAN, ValidationError
from ckan.tests import helpers, factories
eq_ = nose.tools.eq_
assert_true = nose.tools.assert_true
class TestValidation(helpers.FunctionalTestBase):
def test_invalid_url(self):
lc = LocalCKAN()
try:
dataset = factories.Dataset()
lc.call_action(
'resource_create',
{
'package_id': dataset['name'],
'name': 'Test-File',
'url': 'https://example.com]'
}
)
except ValidationError as e:
eq_(
e.error_dict['url'],
[u'Please provide a valid URL']
)
else:
raise AssertionError('ValidationError not raised')
|
import nose
from ckanapi import TestAppCKAN, ValidationError
from ckan.tests import helpers, factories
eq_ = nose.tools.eq_
assert_true = nose.tools.assert_true
class TestValidation(helpers.FunctionalTestBase):
def test_invalid_url(self):
factories.Sysadmin(apikey="my-test-key")
app = self._get_test_app()
demo = TestAppCKAN(app, apikey="my-test-key")
try:
dataset = factories.Dataset()
demo.action.resource_create(
package_id=dataset['name'],
name='Test-File',
url='https://example.com]'
)
except ValidationError as e:
eq_(
e.error_dict['url'],
[u'Bitte eine valide URL angeben']
)
else:
raise AssertionError('ValidationError not raised')
|
Use TestAppCKAN in test instead of LocalCKAN
|
Use TestAppCKAN in test instead of LocalCKAN
|
Python
|
agpl-3.0
|
opendatazurich/ckanext-stadtzh-theme,opendatazurich/ckanext-stadtzh-theme,opendatazurich/ckanext-stadtzh-theme
|
import nose
from ckanapi import LocalCKAN, ValidationError
from ckan.tests import helpers, factories
eq_ = nose.tools.eq_
assert_true = nose.tools.assert_true
class TestValidation(helpers.FunctionalTestBase):
def test_invalid_url(self):
lc = LocalCKAN()
try:
dataset = factories.Dataset()
lc.call_action(
'resource_create',
{
'package_id': dataset['name'],
'name': 'Test-File',
'url': 'https://example.com]'
}
)
except ValidationError as e:
eq_(
e.error_dict['url'],
[u'Please provide a valid URL']
)
else:
raise AssertionError('ValidationError not raised')
Use TestAppCKAN in test instead of LocalCKAN
|
import nose
from ckanapi import TestAppCKAN, ValidationError
from ckan.tests import helpers, factories
eq_ = nose.tools.eq_
assert_true = nose.tools.assert_true
class TestValidation(helpers.FunctionalTestBase):
def test_invalid_url(self):
factories.Sysadmin(apikey="my-test-key")
app = self._get_test_app()
demo = TestAppCKAN(app, apikey="my-test-key")
try:
dataset = factories.Dataset()
demo.action.resource_create(
package_id=dataset['name'],
name='Test-File',
url='https://example.com]'
)
except ValidationError as e:
eq_(
e.error_dict['url'],
[u'Bitte eine valide URL angeben']
)
else:
raise AssertionError('ValidationError not raised')
|
<commit_before>import nose
from ckanapi import LocalCKAN, ValidationError
from ckan.tests import helpers, factories
eq_ = nose.tools.eq_
assert_true = nose.tools.assert_true
class TestValidation(helpers.FunctionalTestBase):
def test_invalid_url(self):
lc = LocalCKAN()
try:
dataset = factories.Dataset()
lc.call_action(
'resource_create',
{
'package_id': dataset['name'],
'name': 'Test-File',
'url': 'https://example.com]'
}
)
except ValidationError as e:
eq_(
e.error_dict['url'],
[u'Please provide a valid URL']
)
else:
raise AssertionError('ValidationError not raised')
<commit_msg>Use TestAppCKAN in test instead of LocalCKAN<commit_after>
|
import nose
from ckanapi import TestAppCKAN, ValidationError
from ckan.tests import helpers, factories
eq_ = nose.tools.eq_
assert_true = nose.tools.assert_true
class TestValidation(helpers.FunctionalTestBase):
def test_invalid_url(self):
factories.Sysadmin(apikey="my-test-key")
app = self._get_test_app()
demo = TestAppCKAN(app, apikey="my-test-key")
try:
dataset = factories.Dataset()
demo.action.resource_create(
package_id=dataset['name'],
name='Test-File',
url='https://example.com]'
)
except ValidationError as e:
eq_(
e.error_dict['url'],
[u'Bitte eine valide URL angeben']
)
else:
raise AssertionError('ValidationError not raised')
|
import nose
from ckanapi import LocalCKAN, ValidationError
from ckan.tests import helpers, factories
eq_ = nose.tools.eq_
assert_true = nose.tools.assert_true
class TestValidation(helpers.FunctionalTestBase):
def test_invalid_url(self):
lc = LocalCKAN()
try:
dataset = factories.Dataset()
lc.call_action(
'resource_create',
{
'package_id': dataset['name'],
'name': 'Test-File',
'url': 'https://example.com]'
}
)
except ValidationError as e:
eq_(
e.error_dict['url'],
[u'Please provide a valid URL']
)
else:
raise AssertionError('ValidationError not raised')
Use TestAppCKAN in test instead of LocalCKANimport nose
from ckanapi import TestAppCKAN, ValidationError
from ckan.tests import helpers, factories
eq_ = nose.tools.eq_
assert_true = nose.tools.assert_true
class TestValidation(helpers.FunctionalTestBase):
def test_invalid_url(self):
factories.Sysadmin(apikey="my-test-key")
app = self._get_test_app()
demo = TestAppCKAN(app, apikey="my-test-key")
try:
dataset = factories.Dataset()
demo.action.resource_create(
package_id=dataset['name'],
name='Test-File',
url='https://example.com]'
)
except ValidationError as e:
eq_(
e.error_dict['url'],
[u'Bitte eine valide URL angeben']
)
else:
raise AssertionError('ValidationError not raised')
|
<commit_before>import nose
from ckanapi import LocalCKAN, ValidationError
from ckan.tests import helpers, factories
eq_ = nose.tools.eq_
assert_true = nose.tools.assert_true
class TestValidation(helpers.FunctionalTestBase):
def test_invalid_url(self):
lc = LocalCKAN()
try:
dataset = factories.Dataset()
lc.call_action(
'resource_create',
{
'package_id': dataset['name'],
'name': 'Test-File',
'url': 'https://example.com]'
}
)
except ValidationError as e:
eq_(
e.error_dict['url'],
[u'Please provide a valid URL']
)
else:
raise AssertionError('ValidationError not raised')
<commit_msg>Use TestAppCKAN in test instead of LocalCKAN<commit_after>import nose
from ckanapi import TestAppCKAN, ValidationError
from ckan.tests import helpers, factories
eq_ = nose.tools.eq_
assert_true = nose.tools.assert_true
class TestValidation(helpers.FunctionalTestBase):
def test_invalid_url(self):
factories.Sysadmin(apikey="my-test-key")
app = self._get_test_app()
demo = TestAppCKAN(app, apikey="my-test-key")
try:
dataset = factories.Dataset()
demo.action.resource_create(
package_id=dataset['name'],
name='Test-File',
url='https://example.com]'
)
except ValidationError as e:
eq_(
e.error_dict['url'],
[u'Bitte eine valide URL angeben']
)
else:
raise AssertionError('ValidationError not raised')
|
15beb35fff1ea343dc42cf4acc0e9ad5e64cef33
|
abilian/testing/__init__.py
|
abilian/testing/__init__.py
|
"""Base stuff for testing.
"""
from flask.ext.testing import TestCase
from abilian.application import Application
from abilian.core.entities import db
class TestConfig(object):
SQLALCHEMY_DATABASE_URI = "sqlite://"
SQLALCHEMY_ECHO = False
class BaseTestCase(TestCase):
config_class = TestConfig
def create_app(self):
config = self.config_class()
self.app = Application(config)
return self.app
def setUp(self):
self.app.create_db()
self.session = db.session
def tearDown(self):
db.session.remove()
db.drop_all()
db.engine.dispose()
|
"""Base stuff for testing.
"""
from flask.ext.testing import TestCase
from abilian.application import Application
from abilian.core.entities import db
class TestConfig(object):
SQLALCHEMY_DATABASE_URI = "sqlite://"
SQLALCHEMY_ECHO = False
TESTING = True
class BaseTestCase(TestCase):
config_class = TestConfig
application_class = Application
def create_app(self):
config = self.config_class()
self.app = self.application_class(config)
return self.app
def setUp(self):
self.app.create_db()
self.session = db.session
def tearDown(self):
db.session.remove()
db.drop_all()
db.engine.dispose()
|
Add TESTING-True in test config.
|
Add TESTING-True in test config.
|
Python
|
lgpl-2.1
|
abilian/abilian-core,abilian/abilian-core,abilian/abilian-core,abilian/abilian-core,abilian/abilian-core
|
"""Base stuff for testing.
"""
from flask.ext.testing import TestCase
from abilian.application import Application
from abilian.core.entities import db
class TestConfig(object):
SQLALCHEMY_DATABASE_URI = "sqlite://"
SQLALCHEMY_ECHO = False
class BaseTestCase(TestCase):
config_class = TestConfig
def create_app(self):
config = self.config_class()
self.app = Application(config)
return self.app
def setUp(self):
self.app.create_db()
self.session = db.session
def tearDown(self):
db.session.remove()
db.drop_all()
db.engine.dispose()
Add TESTING-True in test config.
|
"""Base stuff for testing.
"""
from flask.ext.testing import TestCase
from abilian.application import Application
from abilian.core.entities import db
class TestConfig(object):
SQLALCHEMY_DATABASE_URI = "sqlite://"
SQLALCHEMY_ECHO = False
TESTING = True
class BaseTestCase(TestCase):
config_class = TestConfig
application_class = Application
def create_app(self):
config = self.config_class()
self.app = self.application_class(config)
return self.app
def setUp(self):
self.app.create_db()
self.session = db.session
def tearDown(self):
db.session.remove()
db.drop_all()
db.engine.dispose()
|
<commit_before>"""Base stuff for testing.
"""
from flask.ext.testing import TestCase
from abilian.application import Application
from abilian.core.entities import db
class TestConfig(object):
SQLALCHEMY_DATABASE_URI = "sqlite://"
SQLALCHEMY_ECHO = False
class BaseTestCase(TestCase):
config_class = TestConfig
def create_app(self):
config = self.config_class()
self.app = Application(config)
return self.app
def setUp(self):
self.app.create_db()
self.session = db.session
def tearDown(self):
db.session.remove()
db.drop_all()
db.engine.dispose()
<commit_msg>Add TESTING-True in test config.<commit_after>
|
"""Base stuff for testing.
"""
from flask.ext.testing import TestCase
from abilian.application import Application
from abilian.core.entities import db
class TestConfig(object):
SQLALCHEMY_DATABASE_URI = "sqlite://"
SQLALCHEMY_ECHO = False
TESTING = True
class BaseTestCase(TestCase):
config_class = TestConfig
application_class = Application
def create_app(self):
config = self.config_class()
self.app = self.application_class(config)
return self.app
def setUp(self):
self.app.create_db()
self.session = db.session
def tearDown(self):
db.session.remove()
db.drop_all()
db.engine.dispose()
|
"""Base stuff for testing.
"""
from flask.ext.testing import TestCase
from abilian.application import Application
from abilian.core.entities import db
class TestConfig(object):
SQLALCHEMY_DATABASE_URI = "sqlite://"
SQLALCHEMY_ECHO = False
class BaseTestCase(TestCase):
config_class = TestConfig
def create_app(self):
config = self.config_class()
self.app = Application(config)
return self.app
def setUp(self):
self.app.create_db()
self.session = db.session
def tearDown(self):
db.session.remove()
db.drop_all()
db.engine.dispose()
Add TESTING-True in test config."""Base stuff for testing.
"""
from flask.ext.testing import TestCase
from abilian.application import Application
from abilian.core.entities import db
class TestConfig(object):
SQLALCHEMY_DATABASE_URI = "sqlite://"
SQLALCHEMY_ECHO = False
TESTING = True
class BaseTestCase(TestCase):
config_class = TestConfig
application_class = Application
def create_app(self):
config = self.config_class()
self.app = self.application_class(config)
return self.app
def setUp(self):
self.app.create_db()
self.session = db.session
def tearDown(self):
db.session.remove()
db.drop_all()
db.engine.dispose()
|
<commit_before>"""Base stuff for testing.
"""
from flask.ext.testing import TestCase
from abilian.application import Application
from abilian.core.entities import db
class TestConfig(object):
SQLALCHEMY_DATABASE_URI = "sqlite://"
SQLALCHEMY_ECHO = False
class BaseTestCase(TestCase):
config_class = TestConfig
def create_app(self):
config = self.config_class()
self.app = Application(config)
return self.app
def setUp(self):
self.app.create_db()
self.session = db.session
def tearDown(self):
db.session.remove()
db.drop_all()
db.engine.dispose()
<commit_msg>Add TESTING-True in test config.<commit_after>"""Base stuff for testing.
"""
from flask.ext.testing import TestCase
from abilian.application import Application
from abilian.core.entities import db
class TestConfig(object):
SQLALCHEMY_DATABASE_URI = "sqlite://"
SQLALCHEMY_ECHO = False
TESTING = True
class BaseTestCase(TestCase):
config_class = TestConfig
application_class = Application
def create_app(self):
config = self.config_class()
self.app = self.application_class(config)
return self.app
def setUp(self):
self.app.create_db()
self.session = db.session
def tearDown(self):
db.session.remove()
db.drop_all()
db.engine.dispose()
|
6dc0540bef999ca26d32a24fec39f2b4dde77bb5
|
pjson/core.py
|
pjson/core.py
|
import json
import sys
if sys.version_info[0] == 2:
from StringIO import StringIO
else:
from io import StringIO
from pygments import highlight
from pygments.formatters import TerminalFormatter
from xml.etree import ElementTree as ET
import xmlformatter
def format_code(data, is_xml=False):
"""
Parses data and formats it
"""
if is_xml:
ET.fromstring(data) # Make sure XML is valid
formatter = xmlformatter.Formatter(indent=2, indent_char=' ',
encoding_output='UTF-8',
preserve=['literal'])
return formatter.format_string(data)
else:
obj = json.loads(data)
output = StringIO()
json.dump(obj, output, sort_keys=True, indent=2)
return output.getvalue()
def color_yo_shit(code, lexer):
"""
Calls pygments.highlight to color yo shit
"""
return highlight(code, lexer, TerminalFormatter())
|
import json
import sys
from pygments import highlight
from pygments.formatters import TerminalFormatter
from xml.etree import ElementTree as ET
import xmlformatter
def format_code(data, is_xml=False):
"""
Parses data and formats it
"""
if is_xml:
ET.fromstring(data) # Make sure XML is valid
formatter = xmlformatter.Formatter(indent=2, indent_char=' ',
encoding_output='UTF-8',
preserve=['literal'])
return formatter.format_string(data)
else:
obj = json.loads(data)
return json.dumps(obj, sort_keys=True, indent=2, ensure_ascii=False).encode('UTF-8')
def color_yo_shit(code, lexer):
"""
Calls pygments.highlight to color yo shit
"""
return highlight(unicode(code, 'UTF-8'), lexer, TerminalFormatter())
|
Add support for special characters
|
Add support for special characters
|
Python
|
mit
|
igorgue/pjson
|
import json
import sys
if sys.version_info[0] == 2:
from StringIO import StringIO
else:
from io import StringIO
from pygments import highlight
from pygments.formatters import TerminalFormatter
from xml.etree import ElementTree as ET
import xmlformatter
def format_code(data, is_xml=False):
"""
Parses data and formats it
"""
if is_xml:
ET.fromstring(data) # Make sure XML is valid
formatter = xmlformatter.Formatter(indent=2, indent_char=' ',
encoding_output='UTF-8',
preserve=['literal'])
return formatter.format_string(data)
else:
obj = json.loads(data)
output = StringIO()
json.dump(obj, output, sort_keys=True, indent=2)
return output.getvalue()
def color_yo_shit(code, lexer):
"""
Calls pygments.highlight to color yo shit
"""
return highlight(code, lexer, TerminalFormatter())
Add support for special characters
|
import json
import sys
from pygments import highlight
from pygments.formatters import TerminalFormatter
from xml.etree import ElementTree as ET
import xmlformatter
def format_code(data, is_xml=False):
"""
Parses data and formats it
"""
if is_xml:
ET.fromstring(data) # Make sure XML is valid
formatter = xmlformatter.Formatter(indent=2, indent_char=' ',
encoding_output='UTF-8',
preserve=['literal'])
return formatter.format_string(data)
else:
obj = json.loads(data)
return json.dumps(obj, sort_keys=True, indent=2, ensure_ascii=False).encode('UTF-8')
def color_yo_shit(code, lexer):
"""
Calls pygments.highlight to color yo shit
"""
return highlight(unicode(code, 'UTF-8'), lexer, TerminalFormatter())
|
<commit_before>import json
import sys
if sys.version_info[0] == 2:
from StringIO import StringIO
else:
from io import StringIO
from pygments import highlight
from pygments.formatters import TerminalFormatter
from xml.etree import ElementTree as ET
import xmlformatter
def format_code(data, is_xml=False):
"""
Parses data and formats it
"""
if is_xml:
ET.fromstring(data) # Make sure XML is valid
formatter = xmlformatter.Formatter(indent=2, indent_char=' ',
encoding_output='UTF-8',
preserve=['literal'])
return formatter.format_string(data)
else:
obj = json.loads(data)
output = StringIO()
json.dump(obj, output, sort_keys=True, indent=2)
return output.getvalue()
def color_yo_shit(code, lexer):
"""
Calls pygments.highlight to color yo shit
"""
return highlight(code, lexer, TerminalFormatter())
<commit_msg>Add support for special characters<commit_after>
|
import json
import sys
from pygments import highlight
from pygments.formatters import TerminalFormatter
from xml.etree import ElementTree as ET
import xmlformatter
def format_code(data, is_xml=False):
"""
Parses data and formats it
"""
if is_xml:
ET.fromstring(data) # Make sure XML is valid
formatter = xmlformatter.Formatter(indent=2, indent_char=' ',
encoding_output='UTF-8',
preserve=['literal'])
return formatter.format_string(data)
else:
obj = json.loads(data)
return json.dumps(obj, sort_keys=True, indent=2, ensure_ascii=False).encode('UTF-8')
def color_yo_shit(code, lexer):
"""
Calls pygments.highlight to color yo shit
"""
return highlight(unicode(code, 'UTF-8'), lexer, TerminalFormatter())
|
import json
import sys
if sys.version_info[0] == 2:
from StringIO import StringIO
else:
from io import StringIO
from pygments import highlight
from pygments.formatters import TerminalFormatter
from xml.etree import ElementTree as ET
import xmlformatter
def format_code(data, is_xml=False):
"""
Parses data and formats it
"""
if is_xml:
ET.fromstring(data) # Make sure XML is valid
formatter = xmlformatter.Formatter(indent=2, indent_char=' ',
encoding_output='UTF-8',
preserve=['literal'])
return formatter.format_string(data)
else:
obj = json.loads(data)
output = StringIO()
json.dump(obj, output, sort_keys=True, indent=2)
return output.getvalue()
def color_yo_shit(code, lexer):
"""
Calls pygments.highlight to color yo shit
"""
return highlight(code, lexer, TerminalFormatter())
Add support for special charactersimport json
import sys
from pygments import highlight
from pygments.formatters import TerminalFormatter
from xml.etree import ElementTree as ET
import xmlformatter
def format_code(data, is_xml=False):
"""
Parses data and formats it
"""
if is_xml:
ET.fromstring(data) # Make sure XML is valid
formatter = xmlformatter.Formatter(indent=2, indent_char=' ',
encoding_output='UTF-8',
preserve=['literal'])
return formatter.format_string(data)
else:
obj = json.loads(data)
return json.dumps(obj, sort_keys=True, indent=2, ensure_ascii=False).encode('UTF-8')
def color_yo_shit(code, lexer):
"""
Calls pygments.highlight to color yo shit
"""
return highlight(unicode(code, 'UTF-8'), lexer, TerminalFormatter())
|
<commit_before>import json
import sys
if sys.version_info[0] == 2:
from StringIO import StringIO
else:
from io import StringIO
from pygments import highlight
from pygments.formatters import TerminalFormatter
from xml.etree import ElementTree as ET
import xmlformatter
def format_code(data, is_xml=False):
"""
Parses data and formats it
"""
if is_xml:
ET.fromstring(data) # Make sure XML is valid
formatter = xmlformatter.Formatter(indent=2, indent_char=' ',
encoding_output='UTF-8',
preserve=['literal'])
return formatter.format_string(data)
else:
obj = json.loads(data)
output = StringIO()
json.dump(obj, output, sort_keys=True, indent=2)
return output.getvalue()
def color_yo_shit(code, lexer):
"""
Calls pygments.highlight to color yo shit
"""
return highlight(code, lexer, TerminalFormatter())
<commit_msg>Add support for special characters<commit_after>import json
import sys
from pygments import highlight
from pygments.formatters import TerminalFormatter
from xml.etree import ElementTree as ET
import xmlformatter
def format_code(data, is_xml=False):
"""
Parses data and formats it
"""
if is_xml:
ET.fromstring(data) # Make sure XML is valid
formatter = xmlformatter.Formatter(indent=2, indent_char=' ',
encoding_output='UTF-8',
preserve=['literal'])
return formatter.format_string(data)
else:
obj = json.loads(data)
return json.dumps(obj, sort_keys=True, indent=2, ensure_ascii=False).encode('UTF-8')
def color_yo_shit(code, lexer):
"""
Calls pygments.highlight to color yo shit
"""
return highlight(unicode(code, 'UTF-8'), lexer, TerminalFormatter())
|
4551a3b25cc7d9ceac37c8c11c6bb22d98e34bd7
|
test/test_code_format.py
|
test/test_code_format.py
|
import pep8
import os
def test_pep8_conformance():
"""Test source code for PEP8 conformance"""
pep8style = pep8.StyleGuide(max_line_length=120)
report = pep8style.options.report
report.start()
pep8style.input_dir(os.path.join('..', 'xylem'))
report.stop()
assert report.total_errors == 0, "Found '{0}' code style errors (and warnings).".format(report.total_errors)
|
import pep8
import os
def test_pep8_conformance():
"""Test source code for PEP8 conformance"""
pep8style = pep8.StyleGuide(max_line_length=120)
report = pep8style.options.report
report.start()
pep8style.input_dir(os.path.join('xylem'))
report.stop()
assert report.total_errors == 0, "Found '{0}' code style errors (and warnings).".format(report.total_errors)
|
Fix relative path to only test xylem source directory.
|
Fix relative path to only test xylem source directory.
|
Python
|
apache-2.0
|
catkin/xylem,catkin/xylem
|
import pep8
import os
def test_pep8_conformance():
"""Test source code for PEP8 conformance"""
pep8style = pep8.StyleGuide(max_line_length=120)
report = pep8style.options.report
report.start()
pep8style.input_dir(os.path.join('..', 'xylem'))
report.stop()
assert report.total_errors == 0, "Found '{0}' code style errors (and warnings).".format(report.total_errors)
Fix relative path to only test xylem source directory.
|
import pep8
import os
def test_pep8_conformance():
"""Test source code for PEP8 conformance"""
pep8style = pep8.StyleGuide(max_line_length=120)
report = pep8style.options.report
report.start()
pep8style.input_dir(os.path.join('xylem'))
report.stop()
assert report.total_errors == 0, "Found '{0}' code style errors (and warnings).".format(report.total_errors)
|
<commit_before>import pep8
import os
def test_pep8_conformance():
"""Test source code for PEP8 conformance"""
pep8style = pep8.StyleGuide(max_line_length=120)
report = pep8style.options.report
report.start()
pep8style.input_dir(os.path.join('..', 'xylem'))
report.stop()
assert report.total_errors == 0, "Found '{0}' code style errors (and warnings).".format(report.total_errors)
<commit_msg>Fix relative path to only test xylem source directory.<commit_after>
|
import pep8
import os
def test_pep8_conformance():
"""Test source code for PEP8 conformance"""
pep8style = pep8.StyleGuide(max_line_length=120)
report = pep8style.options.report
report.start()
pep8style.input_dir(os.path.join('xylem'))
report.stop()
assert report.total_errors == 0, "Found '{0}' code style errors (and warnings).".format(report.total_errors)
|
import pep8
import os
def test_pep8_conformance():
"""Test source code for PEP8 conformance"""
pep8style = pep8.StyleGuide(max_line_length=120)
report = pep8style.options.report
report.start()
pep8style.input_dir(os.path.join('..', 'xylem'))
report.stop()
assert report.total_errors == 0, "Found '{0}' code style errors (and warnings).".format(report.total_errors)
Fix relative path to only test xylem source directory.import pep8
import os
def test_pep8_conformance():
"""Test source code for PEP8 conformance"""
pep8style = pep8.StyleGuide(max_line_length=120)
report = pep8style.options.report
report.start()
pep8style.input_dir(os.path.join('xylem'))
report.stop()
assert report.total_errors == 0, "Found '{0}' code style errors (and warnings).".format(report.total_errors)
|
<commit_before>import pep8
import os
def test_pep8_conformance():
"""Test source code for PEP8 conformance"""
pep8style = pep8.StyleGuide(max_line_length=120)
report = pep8style.options.report
report.start()
pep8style.input_dir(os.path.join('..', 'xylem'))
report.stop()
assert report.total_errors == 0, "Found '{0}' code style errors (and warnings).".format(report.total_errors)
<commit_msg>Fix relative path to only test xylem source directory.<commit_after>import pep8
import os
def test_pep8_conformance():
"""Test source code for PEP8 conformance"""
pep8style = pep8.StyleGuide(max_line_length=120)
report = pep8style.options.report
report.start()
pep8style.input_dir(os.path.join('xylem'))
report.stop()
assert report.total_errors == 0, "Found '{0}' code style errors (and warnings).".format(report.total_errors)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.