commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
12afe43b0f2599b0c79fab8bb0af454ccf16e57f
|
gittip/orm/__init__.py
|
gittip/orm/__init__.py
|
from __future__ import unicode_literals
import os
from sqlalchemy import create_engine, MetaData
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, scoped_session
class SQLAlchemy(object):
def __init__(self):
self.session = self.create_session()
@property
def engine(self):
dburl = os.environ['DATABASE_URL']
return create_engine(dburl)
def create_session(self):
session = scoped_session(sessionmaker())
session.configure(bind=self.engine)
return session
db = SQLAlchemy()
class Model(object):
def __repr__(self):
cols = self.__mapper__.c.keys()
class_name = self.__class__.__name__
items = ', '.join(['%s=%s' % (col, repr(getattr(self, col))) for col
in cols])
return '%s(%s)' % (class_name, items)
def attrs_dict(self):
keys = self.__mapper__.c.keys()
attrs = {}
for key in keys:
attrs[key] = getattr(self, key)
return attrs
def save(self):
db.session.add(self)
db.session.commit()
def delete(self):
db.session.delete(self)
db.session.commit()
Base = declarative_base(cls=Model)
Base.metadata.bind = db.engine
Base.query = db.session.query_property()
metadata = MetaData()
metadata.bind = db.engine
all = [Base, db, metadata]
def rollback(*_):
db.session.rollback()
|
from __future__ import unicode_literals
import os
import pdb
from sqlalchemy import create_engine, MetaData
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, scoped_session
class Model(object):
def __repr__(self):
cols = self.__mapper__.c.keys()
class_name = self.__class__.__name__
items = ', '.join(['%s=%s' % (col, repr(getattr(self, col))) for col
in cols])
return '%s(%s)' % (class_name, items)
def attrs_dict(self):
keys = self.__mapper__.c.keys()
attrs = {}
for key in keys:
attrs[key] = getattr(self, key)
return attrs
class SQLAlchemy(object):
def __init__(self):
self.session = self.create_session()
self.Model = self.make_declarative_base()
@property
def engine(self):
dburl = os.environ['DATABASE_URL']
return create_engine(dburl)
def create_session(self):
session = scoped_session(sessionmaker())
session.configure(bind=self.engine)
return session
def make_declarative_base(self):
base = declarative_base(cls=Model)
base.query = self.session.query_property()
return base
db = SQLAlchemy()
all = [db]
def rollback(*_):
db.session.rollback()
|
Remove the convenience functions, reorganize around the SQLAlchemy class
|
Remove the convenience functions, reorganize around the SQLAlchemy class
|
Python
|
cc0-1.0
|
bountysource/www.gittip.com,eXcomm/gratipay.com,bountysource/www.gittip.com,studio666/gratipay.com,mccolgst/www.gittip.com,MikeFair/www.gittip.com,bountysource/www.gittip.com,gratipay/gratipay.com,studio666/gratipay.com,studio666/gratipay.com,gratipay/gratipay.com,MikeFair/www.gittip.com,mccolgst/www.gittip.com,mccolgst/www.gittip.com,eXcomm/gratipay.com,eXcomm/gratipay.com,bountysource/www.gittip.com,eXcomm/gratipay.com,gratipay/gratipay.com,mccolgst/www.gittip.com,gratipay/gratipay.com,MikeFair/www.gittip.com,studio666/gratipay.com
|
from __future__ import unicode_literals
import os
from sqlalchemy import create_engine, MetaData
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, scoped_session
class SQLAlchemy(object):
def __init__(self):
self.session = self.create_session()
@property
def engine(self):
dburl = os.environ['DATABASE_URL']
return create_engine(dburl)
def create_session(self):
session = scoped_session(sessionmaker())
session.configure(bind=self.engine)
return session
db = SQLAlchemy()
class Model(object):
def __repr__(self):
cols = self.__mapper__.c.keys()
class_name = self.__class__.__name__
items = ', '.join(['%s=%s' % (col, repr(getattr(self, col))) for col
in cols])
return '%s(%s)' % (class_name, items)
def attrs_dict(self):
keys = self.__mapper__.c.keys()
attrs = {}
for key in keys:
attrs[key] = getattr(self, key)
return attrs
def save(self):
db.session.add(self)
db.session.commit()
def delete(self):
db.session.delete(self)
db.session.commit()
Base = declarative_base(cls=Model)
Base.metadata.bind = db.engine
Base.query = db.session.query_property()
metadata = MetaData()
metadata.bind = db.engine
all = [Base, db, metadata]
def rollback(*_):
db.session.rollback()Remove the convenience functions, reorganize around the SQLAlchemy class
|
from __future__ import unicode_literals
import os
import pdb
from sqlalchemy import create_engine, MetaData
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, scoped_session
class Model(object):
def __repr__(self):
cols = self.__mapper__.c.keys()
class_name = self.__class__.__name__
items = ', '.join(['%s=%s' % (col, repr(getattr(self, col))) for col
in cols])
return '%s(%s)' % (class_name, items)
def attrs_dict(self):
keys = self.__mapper__.c.keys()
attrs = {}
for key in keys:
attrs[key] = getattr(self, key)
return attrs
class SQLAlchemy(object):
def __init__(self):
self.session = self.create_session()
self.Model = self.make_declarative_base()
@property
def engine(self):
dburl = os.environ['DATABASE_URL']
return create_engine(dburl)
def create_session(self):
session = scoped_session(sessionmaker())
session.configure(bind=self.engine)
return session
def make_declarative_base(self):
base = declarative_base(cls=Model)
base.query = self.session.query_property()
return base
db = SQLAlchemy()
all = [db]
def rollback(*_):
db.session.rollback()
|
<commit_before>from __future__ import unicode_literals
import os
from sqlalchemy import create_engine, MetaData
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, scoped_session
class SQLAlchemy(object):
def __init__(self):
self.session = self.create_session()
@property
def engine(self):
dburl = os.environ['DATABASE_URL']
return create_engine(dburl)
def create_session(self):
session = scoped_session(sessionmaker())
session.configure(bind=self.engine)
return session
db = SQLAlchemy()
class Model(object):
def __repr__(self):
cols = self.__mapper__.c.keys()
class_name = self.__class__.__name__
items = ', '.join(['%s=%s' % (col, repr(getattr(self, col))) for col
in cols])
return '%s(%s)' % (class_name, items)
def attrs_dict(self):
keys = self.__mapper__.c.keys()
attrs = {}
for key in keys:
attrs[key] = getattr(self, key)
return attrs
def save(self):
db.session.add(self)
db.session.commit()
def delete(self):
db.session.delete(self)
db.session.commit()
Base = declarative_base(cls=Model)
Base.metadata.bind = db.engine
Base.query = db.session.query_property()
metadata = MetaData()
metadata.bind = db.engine
all = [Base, db, metadata]
def rollback(*_):
db.session.rollback()<commit_msg>Remove the convenience functions, reorganize around the SQLAlchemy class<commit_after>
|
from __future__ import unicode_literals
import os
import pdb
from sqlalchemy import create_engine, MetaData
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, scoped_session
class Model(object):
def __repr__(self):
cols = self.__mapper__.c.keys()
class_name = self.__class__.__name__
items = ', '.join(['%s=%s' % (col, repr(getattr(self, col))) for col
in cols])
return '%s(%s)' % (class_name, items)
def attrs_dict(self):
keys = self.__mapper__.c.keys()
attrs = {}
for key in keys:
attrs[key] = getattr(self, key)
return attrs
class SQLAlchemy(object):
def __init__(self):
self.session = self.create_session()
self.Model = self.make_declarative_base()
@property
def engine(self):
dburl = os.environ['DATABASE_URL']
return create_engine(dburl)
def create_session(self):
session = scoped_session(sessionmaker())
session.configure(bind=self.engine)
return session
def make_declarative_base(self):
base = declarative_base(cls=Model)
base.query = self.session.query_property()
return base
db = SQLAlchemy()
all = [db]
def rollback(*_):
db.session.rollback()
|
from __future__ import unicode_literals
import os
from sqlalchemy import create_engine, MetaData
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, scoped_session
class SQLAlchemy(object):
def __init__(self):
self.session = self.create_session()
@property
def engine(self):
dburl = os.environ['DATABASE_URL']
return create_engine(dburl)
def create_session(self):
session = scoped_session(sessionmaker())
session.configure(bind=self.engine)
return session
db = SQLAlchemy()
class Model(object):
def __repr__(self):
cols = self.__mapper__.c.keys()
class_name = self.__class__.__name__
items = ', '.join(['%s=%s' % (col, repr(getattr(self, col))) for col
in cols])
return '%s(%s)' % (class_name, items)
def attrs_dict(self):
keys = self.__mapper__.c.keys()
attrs = {}
for key in keys:
attrs[key] = getattr(self, key)
return attrs
def save(self):
db.session.add(self)
db.session.commit()
def delete(self):
db.session.delete(self)
db.session.commit()
Base = declarative_base(cls=Model)
Base.metadata.bind = db.engine
Base.query = db.session.query_property()
metadata = MetaData()
metadata.bind = db.engine
all = [Base, db, metadata]
def rollback(*_):
db.session.rollback()Remove the convenience functions, reorganize around the SQLAlchemy classfrom __future__ import unicode_literals
import os
import pdb
from sqlalchemy import create_engine, MetaData
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, scoped_session
class Model(object):
def __repr__(self):
cols = self.__mapper__.c.keys()
class_name = self.__class__.__name__
items = ', '.join(['%s=%s' % (col, repr(getattr(self, col))) for col
in cols])
return '%s(%s)' % (class_name, items)
def attrs_dict(self):
keys = self.__mapper__.c.keys()
attrs = {}
for key in keys:
attrs[key] = getattr(self, key)
return attrs
class SQLAlchemy(object):
def __init__(self):
self.session = self.create_session()
self.Model = self.make_declarative_base()
@property
def engine(self):
dburl = os.environ['DATABASE_URL']
return create_engine(dburl)
def create_session(self):
session = scoped_session(sessionmaker())
session.configure(bind=self.engine)
return session
def make_declarative_base(self):
base = declarative_base(cls=Model)
base.query = self.session.query_property()
return base
db = SQLAlchemy()
all = [db]
def rollback(*_):
db.session.rollback()
|
<commit_before>from __future__ import unicode_literals
import os
from sqlalchemy import create_engine, MetaData
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, scoped_session
class SQLAlchemy(object):
def __init__(self):
self.session = self.create_session()
@property
def engine(self):
dburl = os.environ['DATABASE_URL']
return create_engine(dburl)
def create_session(self):
session = scoped_session(sessionmaker())
session.configure(bind=self.engine)
return session
db = SQLAlchemy()
class Model(object):
def __repr__(self):
cols = self.__mapper__.c.keys()
class_name = self.__class__.__name__
items = ', '.join(['%s=%s' % (col, repr(getattr(self, col))) for col
in cols])
return '%s(%s)' % (class_name, items)
def attrs_dict(self):
keys = self.__mapper__.c.keys()
attrs = {}
for key in keys:
attrs[key] = getattr(self, key)
return attrs
def save(self):
db.session.add(self)
db.session.commit()
def delete(self):
db.session.delete(self)
db.session.commit()
Base = declarative_base(cls=Model)
Base.metadata.bind = db.engine
Base.query = db.session.query_property()
metadata = MetaData()
metadata.bind = db.engine
all = [Base, db, metadata]
def rollback(*_):
db.session.rollback()<commit_msg>Remove the convenience functions, reorganize around the SQLAlchemy class<commit_after>from __future__ import unicode_literals
import os
import pdb
from sqlalchemy import create_engine, MetaData
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, scoped_session
class Model(object):
def __repr__(self):
cols = self.__mapper__.c.keys()
class_name = self.__class__.__name__
items = ', '.join(['%s=%s' % (col, repr(getattr(self, col))) for col
in cols])
return '%s(%s)' % (class_name, items)
def attrs_dict(self):
keys = self.__mapper__.c.keys()
attrs = {}
for key in keys:
attrs[key] = getattr(self, key)
return attrs
class SQLAlchemy(object):
def __init__(self):
self.session = self.create_session()
self.Model = self.make_declarative_base()
@property
def engine(self):
dburl = os.environ['DATABASE_URL']
return create_engine(dburl)
def create_session(self):
session = scoped_session(sessionmaker())
session.configure(bind=self.engine)
return session
def make_declarative_base(self):
base = declarative_base(cls=Model)
base.query = self.session.query_property()
return base
db = SQLAlchemy()
all = [db]
def rollback(*_):
db.session.rollback()
|
4e35b16b8aed2ccb9dbc34a2bb56ce129450546b
|
mode/formatter/format_server.py
|
mode/formatter/format_server.py
|
import socket
from struct import pack, unpack
import sys
import autopep8
PORT = 10011
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_address = ('localhost', 10011)
sock.bind(server_address)
sock.listen(1)
print >>sys.stderr, 'Format server up on %s port %s' % server_address
while True:
connection, client_address = sock.accept()
try:
buf = connection.recv(4)
(size,) = unpack('>i', buf)
if size == -1:
print >>sys.stderr, 'Format server exiting.'
sys.exit(0)
src = ''
while len(src) < size:
src += connection.recv(4096)
src = src.decode('utf-8')
reformatted = autopep8.fix_code(src)
encoded = reformatted.encode('utf-8')
connection.sendall(pack('>i', len(encoded)))
connection.sendall(encoded)
finally:
connection.close()
|
import socket
from struct import pack, unpack
import sys
import autopep8
PORT = 10011
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_address = ('localhost', 10011)
sock.bind(server_address)
sock.listen(1)
print >>sys.stderr, 'Format server up on %s port %s' % server_address
while True:
connection, client_address = sock.accept()
try:
buf = ''
while len(buf) < 4:
buf += connection.recv(4 - len(buf))
(size,) = unpack('>i', buf)
if size == -1:
print >>sys.stderr, 'Format server exiting.'
sys.exit(0)
src = ''
while len(src) < size:
src += connection.recv(4096)
src = src.decode('utf-8')
reformatted = autopep8.fix_code(src)
encoded = reformatted.encode('utf-8')
connection.sendall(pack('>i', len(encoded)))
connection.sendall(encoded)
finally:
connection.close()
|
Fix a bug in format server not fully reading 4-byte length.
|
Fix a bug in format server not fully reading 4-byte length.
|
Python
|
apache-2.0
|
tildebyte/processing.py,mashrin/processing.py,Luxapodular/processing.py,tildebyte/processing.py,Luxapodular/processing.py,tildebyte/processing.py,mashrin/processing.py,mashrin/processing.py,jdf/processing.py,jdf/processing.py,Luxapodular/processing.py,jdf/processing.py
|
import socket
from struct import pack, unpack
import sys
import autopep8
PORT = 10011
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_address = ('localhost', 10011)
sock.bind(server_address)
sock.listen(1)
print >>sys.stderr, 'Format server up on %s port %s' % server_address
while True:
connection, client_address = sock.accept()
try:
buf = connection.recv(4)
(size,) = unpack('>i', buf)
if size == -1:
print >>sys.stderr, 'Format server exiting.'
sys.exit(0)
src = ''
while len(src) < size:
src += connection.recv(4096)
src = src.decode('utf-8')
reformatted = autopep8.fix_code(src)
encoded = reformatted.encode('utf-8')
connection.sendall(pack('>i', len(encoded)))
connection.sendall(encoded)
finally:
connection.close()Fix a bug in format server not fully reading 4-byte length.
|
import socket
from struct import pack, unpack
import sys
import autopep8
PORT = 10011
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_address = ('localhost', 10011)
sock.bind(server_address)
sock.listen(1)
print >>sys.stderr, 'Format server up on %s port %s' % server_address
while True:
connection, client_address = sock.accept()
try:
buf = ''
while len(buf) < 4:
buf += connection.recv(4 - len(buf))
(size,) = unpack('>i', buf)
if size == -1:
print >>sys.stderr, 'Format server exiting.'
sys.exit(0)
src = ''
while len(src) < size:
src += connection.recv(4096)
src = src.decode('utf-8')
reformatted = autopep8.fix_code(src)
encoded = reformatted.encode('utf-8')
connection.sendall(pack('>i', len(encoded)))
connection.sendall(encoded)
finally:
connection.close()
|
<commit_before>import socket
from struct import pack, unpack
import sys
import autopep8
PORT = 10011
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_address = ('localhost', 10011)
sock.bind(server_address)
sock.listen(1)
print >>sys.stderr, 'Format server up on %s port %s' % server_address
while True:
connection, client_address = sock.accept()
try:
buf = connection.recv(4)
(size,) = unpack('>i', buf)
if size == -1:
print >>sys.stderr, 'Format server exiting.'
sys.exit(0)
src = ''
while len(src) < size:
src += connection.recv(4096)
src = src.decode('utf-8')
reformatted = autopep8.fix_code(src)
encoded = reformatted.encode('utf-8')
connection.sendall(pack('>i', len(encoded)))
connection.sendall(encoded)
finally:
connection.close()<commit_msg>Fix a bug in format server not fully reading 4-byte length.<commit_after>
|
import socket
from struct import pack, unpack
import sys
import autopep8
PORT = 10011
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_address = ('localhost', 10011)
sock.bind(server_address)
sock.listen(1)
print >>sys.stderr, 'Format server up on %s port %s' % server_address
while True:
connection, client_address = sock.accept()
try:
buf = ''
while len(buf) < 4:
buf += connection.recv(4 - len(buf))
(size,) = unpack('>i', buf)
if size == -1:
print >>sys.stderr, 'Format server exiting.'
sys.exit(0)
src = ''
while len(src) < size:
src += connection.recv(4096)
src = src.decode('utf-8')
reformatted = autopep8.fix_code(src)
encoded = reformatted.encode('utf-8')
connection.sendall(pack('>i', len(encoded)))
connection.sendall(encoded)
finally:
connection.close()
|
import socket
from struct import pack, unpack
import sys
import autopep8
PORT = 10011
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_address = ('localhost', 10011)
sock.bind(server_address)
sock.listen(1)
print >>sys.stderr, 'Format server up on %s port %s' % server_address
while True:
connection, client_address = sock.accept()
try:
buf = connection.recv(4)
(size,) = unpack('>i', buf)
if size == -1:
print >>sys.stderr, 'Format server exiting.'
sys.exit(0)
src = ''
while len(src) < size:
src += connection.recv(4096)
src = src.decode('utf-8')
reformatted = autopep8.fix_code(src)
encoded = reformatted.encode('utf-8')
connection.sendall(pack('>i', len(encoded)))
connection.sendall(encoded)
finally:
connection.close()Fix a bug in format server not fully reading 4-byte length.import socket
from struct import pack, unpack
import sys
import autopep8
PORT = 10011
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_address = ('localhost', 10011)
sock.bind(server_address)
sock.listen(1)
print >>sys.stderr, 'Format server up on %s port %s' % server_address
while True:
connection, client_address = sock.accept()
try:
buf = ''
while len(buf) < 4:
buf += connection.recv(4 - len(buf))
(size,) = unpack('>i', buf)
if size == -1:
print >>sys.stderr, 'Format server exiting.'
sys.exit(0)
src = ''
while len(src) < size:
src += connection.recv(4096)
src = src.decode('utf-8')
reformatted = autopep8.fix_code(src)
encoded = reformatted.encode('utf-8')
connection.sendall(pack('>i', len(encoded)))
connection.sendall(encoded)
finally:
connection.close()
|
<commit_before>import socket
from struct import pack, unpack
import sys
import autopep8
PORT = 10011
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_address = ('localhost', 10011)
sock.bind(server_address)
sock.listen(1)
print >>sys.stderr, 'Format server up on %s port %s' % server_address
while True:
connection, client_address = sock.accept()
try:
buf = connection.recv(4)
(size,) = unpack('>i', buf)
if size == -1:
print >>sys.stderr, 'Format server exiting.'
sys.exit(0)
src = ''
while len(src) < size:
src += connection.recv(4096)
src = src.decode('utf-8')
reformatted = autopep8.fix_code(src)
encoded = reformatted.encode('utf-8')
connection.sendall(pack('>i', len(encoded)))
connection.sendall(encoded)
finally:
connection.close()<commit_msg>Fix a bug in format server not fully reading 4-byte length.<commit_after>import socket
from struct import pack, unpack
import sys
import autopep8
PORT = 10011
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_address = ('localhost', 10011)
sock.bind(server_address)
sock.listen(1)
print >>sys.stderr, 'Format server up on %s port %s' % server_address
while True:
connection, client_address = sock.accept()
try:
buf = ''
while len(buf) < 4:
buf += connection.recv(4 - len(buf))
(size,) = unpack('>i', buf)
if size == -1:
print >>sys.stderr, 'Format server exiting.'
sys.exit(0)
src = ''
while len(src) < size:
src += connection.recv(4096)
src = src.decode('utf-8')
reformatted = autopep8.fix_code(src)
encoded = reformatted.encode('utf-8')
connection.sendall(pack('>i', len(encoded)))
connection.sendall(encoded)
finally:
connection.close()
|
52dba233e5ca27a1f5c1152683d33037b8362c10
|
tests/test_visitor.py
|
tests/test_visitor.py
|
from __future__ import absolute_import
from datetime import datetime
import json
import unittest
from normalize.visitor import Visitor
from testclasses import wall_one
JSON_CAN_DUMP = (basestring, int, long, dict, list)
class SimpleDumper(Visitor):
def apply(self, value, *args):
if isinstance(value, JSON_CAN_DUMP):
dumpable = value
elif isinstance(value, datetime):
dumpable = value.isoformat()
else:
raise Exception("Can't dump %r" % value)
return dumpable
class TestVisitor(unittest.TestCase):
def test_simple_dumper(self):
dumper = SimpleDumper()
dumpable = dumper.map(wall_one)
json.dumps(dumpable)
|
Add a missing visitor test case
|
Add a missing visitor test case
Very basic, doesn't test reduction yet.
|
Python
|
mit
|
tomo-otsuka/normalize,samv/normalize,hearsaycorp/normalize
|
Add a missing visitor test case
Very basic, doesn't test reduction yet.
|
from __future__ import absolute_import
from datetime import datetime
import json
import unittest
from normalize.visitor import Visitor
from testclasses import wall_one
JSON_CAN_DUMP = (basestring, int, long, dict, list)
class SimpleDumper(Visitor):
def apply(self, value, *args):
if isinstance(value, JSON_CAN_DUMP):
dumpable = value
elif isinstance(value, datetime):
dumpable = value.isoformat()
else:
raise Exception("Can't dump %r" % value)
return dumpable
class TestVisitor(unittest.TestCase):
def test_simple_dumper(self):
dumper = SimpleDumper()
dumpable = dumper.map(wall_one)
json.dumps(dumpable)
|
<commit_before><commit_msg>Add a missing visitor test case
Very basic, doesn't test reduction yet.<commit_after>
|
from __future__ import absolute_import
from datetime import datetime
import json
import unittest
from normalize.visitor import Visitor
from testclasses import wall_one
JSON_CAN_DUMP = (basestring, int, long, dict, list)
class SimpleDumper(Visitor):
def apply(self, value, *args):
if isinstance(value, JSON_CAN_DUMP):
dumpable = value
elif isinstance(value, datetime):
dumpable = value.isoformat()
else:
raise Exception("Can't dump %r" % value)
return dumpable
class TestVisitor(unittest.TestCase):
def test_simple_dumper(self):
dumper = SimpleDumper()
dumpable = dumper.map(wall_one)
json.dumps(dumpable)
|
Add a missing visitor test case
Very basic, doesn't test reduction yet.from __future__ import absolute_import
from datetime import datetime
import json
import unittest
from normalize.visitor import Visitor
from testclasses import wall_one
JSON_CAN_DUMP = (basestring, int, long, dict, list)
class SimpleDumper(Visitor):
def apply(self, value, *args):
if isinstance(value, JSON_CAN_DUMP):
dumpable = value
elif isinstance(value, datetime):
dumpable = value.isoformat()
else:
raise Exception("Can't dump %r" % value)
return dumpable
class TestVisitor(unittest.TestCase):
def test_simple_dumper(self):
dumper = SimpleDumper()
dumpable = dumper.map(wall_one)
json.dumps(dumpable)
|
<commit_before><commit_msg>Add a missing visitor test case
Very basic, doesn't test reduction yet.<commit_after>from __future__ import absolute_import
from datetime import datetime
import json
import unittest
from normalize.visitor import Visitor
from testclasses import wall_one
JSON_CAN_DUMP = (basestring, int, long, dict, list)
class SimpleDumper(Visitor):
def apply(self, value, *args):
if isinstance(value, JSON_CAN_DUMP):
dumpable = value
elif isinstance(value, datetime):
dumpable = value.isoformat()
else:
raise Exception("Can't dump %r" % value)
return dumpable
class TestVisitor(unittest.TestCase):
def test_simple_dumper(self):
dumper = SimpleDumper()
dumpable = dumper.map(wall_one)
json.dumps(dumpable)
|
|
66a8c82f43de9ff8856bdbb1cbe16bad271d81ed
|
tests/test_fei_tiff.py
|
tests/test_fei_tiff.py
|
"""Test FEI SEM image plugin functionality.
FEI TIFFs contain metadata as ASCII plaintext at the end of the file.
"""
from __future__ import unicode_literals
import os
import numpy as np
from imageio.testing import run_tests_if_main, get_test_dir, need_internet
from imageio.core import get_remote_file
import imageio
def test_fei_file_reading():
need_internet() # We keep a test image in the imageio-binaries repo
fei_filename = get_remote_file('images/fei-sem-rbc.tif')
reader = imageio.get_reader(fei_filename, format='fei')
image = reader.get_data(0) # imageio.Image object
assert image.shape == (1094, 1536)
assert np.round(np.mean(image)) == 137
assert len(image.meta) == 18
assert image.meta['EScan']['PixelHeight'] == '7.70833e-009'
def test_fei_file_fail():
normal_tif = os.path.join(get_test_dir(), 'test_tiff.tiff')
bad_reader = imageio.get_reader(normal_tif, format='fei')
np.testing.assert_raises(ValueError, bad_reader._get_meta_data)
|
Add tests for FEI format
|
Add tests for FEI format
|
Python
|
bsd-2-clause
|
imageio/imageio
|
Add tests for FEI format
|
"""Test FEI SEM image plugin functionality.
FEI TIFFs contain metadata as ASCII plaintext at the end of the file.
"""
from __future__ import unicode_literals
import os
import numpy as np
from imageio.testing import run_tests_if_main, get_test_dir, need_internet
from imageio.core import get_remote_file
import imageio
def test_fei_file_reading():
need_internet() # We keep a test image in the imageio-binaries repo
fei_filename = get_remote_file('images/fei-sem-rbc.tif')
reader = imageio.get_reader(fei_filename, format='fei')
image = reader.get_data(0) # imageio.Image object
assert image.shape == (1094, 1536)
assert np.round(np.mean(image)) == 137
assert len(image.meta) == 18
assert image.meta['EScan']['PixelHeight'] == '7.70833e-009'
def test_fei_file_fail():
normal_tif = os.path.join(get_test_dir(), 'test_tiff.tiff')
bad_reader = imageio.get_reader(normal_tif, format='fei')
np.testing.assert_raises(ValueError, bad_reader._get_meta_data)
|
<commit_before><commit_msg>Add tests for FEI format<commit_after>
|
"""Test FEI SEM image plugin functionality.
FEI TIFFs contain metadata as ASCII plaintext at the end of the file.
"""
from __future__ import unicode_literals
import os
import numpy as np
from imageio.testing import run_tests_if_main, get_test_dir, need_internet
from imageio.core import get_remote_file
import imageio
def test_fei_file_reading():
need_internet() # We keep a test image in the imageio-binaries repo
fei_filename = get_remote_file('images/fei-sem-rbc.tif')
reader = imageio.get_reader(fei_filename, format='fei')
image = reader.get_data(0) # imageio.Image object
assert image.shape == (1094, 1536)
assert np.round(np.mean(image)) == 137
assert len(image.meta) == 18
assert image.meta['EScan']['PixelHeight'] == '7.70833e-009'
def test_fei_file_fail():
normal_tif = os.path.join(get_test_dir(), 'test_tiff.tiff')
bad_reader = imageio.get_reader(normal_tif, format='fei')
np.testing.assert_raises(ValueError, bad_reader._get_meta_data)
|
Add tests for FEI format"""Test FEI SEM image plugin functionality.
FEI TIFFs contain metadata as ASCII plaintext at the end of the file.
"""
from __future__ import unicode_literals
import os
import numpy as np
from imageio.testing import run_tests_if_main, get_test_dir, need_internet
from imageio.core import get_remote_file
import imageio
def test_fei_file_reading():
need_internet() # We keep a test image in the imageio-binaries repo
fei_filename = get_remote_file('images/fei-sem-rbc.tif')
reader = imageio.get_reader(fei_filename, format='fei')
image = reader.get_data(0) # imageio.Image object
assert image.shape == (1094, 1536)
assert np.round(np.mean(image)) == 137
assert len(image.meta) == 18
assert image.meta['EScan']['PixelHeight'] == '7.70833e-009'
def test_fei_file_fail():
normal_tif = os.path.join(get_test_dir(), 'test_tiff.tiff')
bad_reader = imageio.get_reader(normal_tif, format='fei')
np.testing.assert_raises(ValueError, bad_reader._get_meta_data)
|
<commit_before><commit_msg>Add tests for FEI format<commit_after>"""Test FEI SEM image plugin functionality.
FEI TIFFs contain metadata as ASCII plaintext at the end of the file.
"""
from __future__ import unicode_literals
import os
import numpy as np
from imageio.testing import run_tests_if_main, get_test_dir, need_internet
from imageio.core import get_remote_file
import imageio
def test_fei_file_reading():
need_internet() # We keep a test image in the imageio-binaries repo
fei_filename = get_remote_file('images/fei-sem-rbc.tif')
reader = imageio.get_reader(fei_filename, format='fei')
image = reader.get_data(0) # imageio.Image object
assert image.shape == (1094, 1536)
assert np.round(np.mean(image)) == 137
assert len(image.meta) == 18
assert image.meta['EScan']['PixelHeight'] == '7.70833e-009'
def test_fei_file_fail():
normal_tif = os.path.join(get_test_dir(), 'test_tiff.tiff')
bad_reader = imageio.get_reader(normal_tif, format='fei')
np.testing.assert_raises(ValueError, bad_reader._get_meta_data)
|
|
22ea1609db983af84c770af5e04dc0d4c5af080f
|
tools/rebuild_index.py
|
tools/rebuild_index.py
|
#!/usr/bin/env python
import datetime
from flask_msearch import Search
from cineapp import app
from cineapp import models
search = Search()
search.init_app(app)
search.create_index(models.Movie)
|
Add script for rebuilding FTS index
|
Add script for rebuilding FTS index
Fixes: #116
|
Python
|
mit
|
ptitoliv/cineapp,ptitoliv/cineapp,ptitoliv/cineapp
|
Add script for rebuilding FTS index
Fixes: #116
|
#!/usr/bin/env python
import datetime
from flask_msearch import Search
from cineapp import app
from cineapp import models
search = Search()
search.init_app(app)
search.create_index(models.Movie)
|
<commit_before><commit_msg>Add script for rebuilding FTS index
Fixes: #116<commit_after>
|
#!/usr/bin/env python
import datetime
from flask_msearch import Search
from cineapp import app
from cineapp import models
search = Search()
search.init_app(app)
search.create_index(models.Movie)
|
Add script for rebuilding FTS index
Fixes: #116#!/usr/bin/env python
import datetime
from flask_msearch import Search
from cineapp import app
from cineapp import models
search = Search()
search.init_app(app)
search.create_index(models.Movie)
|
<commit_before><commit_msg>Add script for rebuilding FTS index
Fixes: #116<commit_after>#!/usr/bin/env python
import datetime
from flask_msearch import Search
from cineapp import app
from cineapp import models
search = Search()
search.init_app(app)
search.create_index(models.Movie)
|
|
ba16688a656b96ea263d61cc3f8b9247b832ae82
|
tests/test_event.py
|
tests/test_event.py
|
import unittest
from event import Event
class EventTest(unittest.TestCase):
def test_a_listener_is_notified_when_event_is_raised(self):
called = False
def listener():
nonlocal called
called = True
event = Event()
event.connect(listener)
event.fire()
self.assertTrue(called)
|
Add EventTest class and a test method.
|
Add EventTest class and a test method.
|
Python
|
mit
|
bsmukasa/stock_alerter
|
Add EventTest class and a test method.
|
import unittest
from event import Event
class EventTest(unittest.TestCase):
def test_a_listener_is_notified_when_event_is_raised(self):
called = False
def listener():
nonlocal called
called = True
event = Event()
event.connect(listener)
event.fire()
self.assertTrue(called)
|
<commit_before><commit_msg>Add EventTest class and a test method.<commit_after>
|
import unittest
from event import Event
class EventTest(unittest.TestCase):
def test_a_listener_is_notified_when_event_is_raised(self):
called = False
def listener():
nonlocal called
called = True
event = Event()
event.connect(listener)
event.fire()
self.assertTrue(called)
|
Add EventTest class and a test method.import unittest
from event import Event
class EventTest(unittest.TestCase):
def test_a_listener_is_notified_when_event_is_raised(self):
called = False
def listener():
nonlocal called
called = True
event = Event()
event.connect(listener)
event.fire()
self.assertTrue(called)
|
<commit_before><commit_msg>Add EventTest class and a test method.<commit_after>import unittest
from event import Event
class EventTest(unittest.TestCase):
def test_a_listener_is_notified_when_event_is_raised(self):
called = False
def listener():
nonlocal called
called = True
event = Event()
event.connect(listener)
event.fire()
self.assertTrue(called)
|
|
eb38566389d907f8a1e6070ccfa76118b7dee85c
|
scripts/odom_noisy.py
|
scripts/odom_noisy.py
|
#!/usr/bin/env python
import rospy
from nav_msgs.msg import Odometry
import numpy as np
from numpy.random import normal as noise
from utils import quaternion_to_heading, heading_to_quaternion
class NoisyOdom(object):
def __init__(self):
rospy.init_node('noisy_odom')
self.odom = rospy.Subscriber('/base_pose_ground_truth', Odometry, self.process_position)
self.noisy_odom = rospy.Publisher('/ekf_simulated', Odometry, queue_size=1)
self.original_odom = rospy.Publisher('/ekf_clean', Odometry, queue_size=1)
rospy.spin()
def process_position(self, odom):
odom.header.frame_id = 'map'
self.original_odom.publish(odom)
print('add noise')
# noise = np.random.normal(0,1)
## noise params ##
# pose.pose.position.[x,y] 2
# pose.pose.orientation.[x,y,z,w] (heading) 3
# twist.twist.linear.[x] 4
# twist.twist.angular.[z] 5
odom.pose.pose.position.x += noise(0,1) # 1
odom.pose.pose.position.y += noise(0,1) # 2
heading = quaternion_to_heading(odom.pose.pose.orientation)
heading += noise(0,1)
odom.pose.pose.orientation = heading_to_quaternion(heading) # 3
self.noisy_odom.publish(odom)
if __name__ == '__main__':
no = NoisyOdom()
|
Create odom-modifier for simulating ekf noise
|
Create odom-modifier for simulating ekf noise
|
Python
|
mit
|
buckbaskin/drive_stack,buckbaskin/drive_stack,buckbaskin/drive_stack
|
Create odom-modifier for simulating ekf noise
|
#!/usr/bin/env python
import rospy
from nav_msgs.msg import Odometry
import numpy as np
from numpy.random import normal as noise
from utils import quaternion_to_heading, heading_to_quaternion
class NoisyOdom(object):
def __init__(self):
rospy.init_node('noisy_odom')
self.odom = rospy.Subscriber('/base_pose_ground_truth', Odometry, self.process_position)
self.noisy_odom = rospy.Publisher('/ekf_simulated', Odometry, queue_size=1)
self.original_odom = rospy.Publisher('/ekf_clean', Odometry, queue_size=1)
rospy.spin()
def process_position(self, odom):
odom.header.frame_id = 'map'
self.original_odom.publish(odom)
print('add noise')
# noise = np.random.normal(0,1)
## noise params ##
# pose.pose.position.[x,y] 2
# pose.pose.orientation.[x,y,z,w] (heading) 3
# twist.twist.linear.[x] 4
# twist.twist.angular.[z] 5
odom.pose.pose.position.x += noise(0,1) # 1
odom.pose.pose.position.y += noise(0,1) # 2
heading = quaternion_to_heading(odom.pose.pose.orientation)
heading += noise(0,1)
odom.pose.pose.orientation = heading_to_quaternion(heading) # 3
self.noisy_odom.publish(odom)
if __name__ == '__main__':
no = NoisyOdom()
|
<commit_before><commit_msg>Create odom-modifier for simulating ekf noise<commit_after>
|
#!/usr/bin/env python
import rospy
from nav_msgs.msg import Odometry
import numpy as np
from numpy.random import normal as noise
from utils import quaternion_to_heading, heading_to_quaternion
class NoisyOdom(object):
def __init__(self):
rospy.init_node('noisy_odom')
self.odom = rospy.Subscriber('/base_pose_ground_truth', Odometry, self.process_position)
self.noisy_odom = rospy.Publisher('/ekf_simulated', Odometry, queue_size=1)
self.original_odom = rospy.Publisher('/ekf_clean', Odometry, queue_size=1)
rospy.spin()
def process_position(self, odom):
odom.header.frame_id = 'map'
self.original_odom.publish(odom)
print('add noise')
# noise = np.random.normal(0,1)
## noise params ##
# pose.pose.position.[x,y] 2
# pose.pose.orientation.[x,y,z,w] (heading) 3
# twist.twist.linear.[x] 4
# twist.twist.angular.[z] 5
odom.pose.pose.position.x += noise(0,1) # 1
odom.pose.pose.position.y += noise(0,1) # 2
heading = quaternion_to_heading(odom.pose.pose.orientation)
heading += noise(0,1)
odom.pose.pose.orientation = heading_to_quaternion(heading) # 3
self.noisy_odom.publish(odom)
if __name__ == '__main__':
no = NoisyOdom()
|
Create odom-modifier for simulating ekf noise#!/usr/bin/env python
import rospy
from nav_msgs.msg import Odometry
import numpy as np
from numpy.random import normal as noise
from utils import quaternion_to_heading, heading_to_quaternion
class NoisyOdom(object):
def __init__(self):
rospy.init_node('noisy_odom')
self.odom = rospy.Subscriber('/base_pose_ground_truth', Odometry, self.process_position)
self.noisy_odom = rospy.Publisher('/ekf_simulated', Odometry, queue_size=1)
self.original_odom = rospy.Publisher('/ekf_clean', Odometry, queue_size=1)
rospy.spin()
def process_position(self, odom):
odom.header.frame_id = 'map'
self.original_odom.publish(odom)
print('add noise')
# noise = np.random.normal(0,1)
## noise params ##
# pose.pose.position.[x,y] 2
# pose.pose.orientation.[x,y,z,w] (heading) 3
# twist.twist.linear.[x] 4
# twist.twist.angular.[z] 5
odom.pose.pose.position.x += noise(0,1) # 1
odom.pose.pose.position.y += noise(0,1) # 2
heading = quaternion_to_heading(odom.pose.pose.orientation)
heading += noise(0,1)
odom.pose.pose.orientation = heading_to_quaternion(heading) # 3
self.noisy_odom.publish(odom)
if __name__ == '__main__':
no = NoisyOdom()
|
<commit_before><commit_msg>Create odom-modifier for simulating ekf noise<commit_after>#!/usr/bin/env python
import rospy
from nav_msgs.msg import Odometry
import numpy as np
from numpy.random import normal as noise
from utils import quaternion_to_heading, heading_to_quaternion
class NoisyOdom(object):
def __init__(self):
rospy.init_node('noisy_odom')
self.odom = rospy.Subscriber('/base_pose_ground_truth', Odometry, self.process_position)
self.noisy_odom = rospy.Publisher('/ekf_simulated', Odometry, queue_size=1)
self.original_odom = rospy.Publisher('/ekf_clean', Odometry, queue_size=1)
rospy.spin()
def process_position(self, odom):
odom.header.frame_id = 'map'
self.original_odom.publish(odom)
print('add noise')
# noise = np.random.normal(0,1)
## noise params ##
# pose.pose.position.[x,y] 2
# pose.pose.orientation.[x,y,z,w] (heading) 3
# twist.twist.linear.[x] 4
# twist.twist.angular.[z] 5
odom.pose.pose.position.x += noise(0,1) # 1
odom.pose.pose.position.y += noise(0,1) # 2
heading = quaternion_to_heading(odom.pose.pose.orientation)
heading += noise(0,1)
odom.pose.pose.orientation = heading_to_quaternion(heading) # 3
self.noisy_odom.publish(odom)
if __name__ == '__main__':
no = NoisyOdom()
|
|
8c6586820449bebdb5f7a686d75c9587fdbbe2b3
|
Lib/test/crashers/infinite_loop_re.py
|
Lib/test/crashers/infinite_loop_re.py
|
# This was taken from http://python.org/sf/1541697
# It's not technically a crasher. It may not even truly be infinite,
# however, I haven't waited a long time to see the result. It takes
# 100% of CPU while running this and should be fixed.
import re
starttag = re.compile(r'<[a-zA-Z][-_.:a-zA-Z0-9]*\s*('
r'\s*([a-zA-Z_][-:.a-zA-Z_0-9]*)(\s*=\s*'
r'(\'[^\']*\'|"[^"]*"|[-a-zA-Z0-9./,:;+*%?!&$\(\)_#=~@]'
r'[][\-a-zA-Z0-9./,:;+*%?!&$\(\)_#=~\'"@]*(?=[\s>/<])))?'
r')*\s*/?\s*(?=[<>])')
if __name__ == '__main__':
foo = '<table cellspacing="0" cellpadding="0" style="border-collapse'
starttag.match(foo)
|
Add a "crasher" taken from the sgml bug report referenced in the comment
|
Add a "crasher" taken from the sgml bug report referenced in the comment
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
Add a "crasher" taken from the sgml bug report referenced in the comment
|
# This was taken from http://python.org/sf/1541697
# It's not technically a crasher. It may not even truly be infinite,
# however, I haven't waited a long time to see the result. It takes
# 100% of CPU while running this and should be fixed.
import re
starttag = re.compile(r'<[a-zA-Z][-_.:a-zA-Z0-9]*\s*('
r'\s*([a-zA-Z_][-:.a-zA-Z_0-9]*)(\s*=\s*'
r'(\'[^\']*\'|"[^"]*"|[-a-zA-Z0-9./,:;+*%?!&$\(\)_#=~@]'
r'[][\-a-zA-Z0-9./,:;+*%?!&$\(\)_#=~\'"@]*(?=[\s>/<])))?'
r')*\s*/?\s*(?=[<>])')
if __name__ == '__main__':
foo = '<table cellspacing="0" cellpadding="0" style="border-collapse'
starttag.match(foo)
|
<commit_before><commit_msg>Add a "crasher" taken from the sgml bug report referenced in the comment<commit_after>
|
# This was taken from http://python.org/sf/1541697
# It's not technically a crasher. It may not even truly be infinite,
# however, I haven't waited a long time to see the result. It takes
# 100% of CPU while running this and should be fixed.
import re
starttag = re.compile(r'<[a-zA-Z][-_.:a-zA-Z0-9]*\s*('
r'\s*([a-zA-Z_][-:.a-zA-Z_0-9]*)(\s*=\s*'
r'(\'[^\']*\'|"[^"]*"|[-a-zA-Z0-9./,:;+*%?!&$\(\)_#=~@]'
r'[][\-a-zA-Z0-9./,:;+*%?!&$\(\)_#=~\'"@]*(?=[\s>/<])))?'
r')*\s*/?\s*(?=[<>])')
if __name__ == '__main__':
foo = '<table cellspacing="0" cellpadding="0" style="border-collapse'
starttag.match(foo)
|
Add a "crasher" taken from the sgml bug report referenced in the comment
# This was taken from http://python.org/sf/1541697
# It's not technically a crasher. It may not even truly be infinite,
# however, I haven't waited a long time to see the result. It takes
# 100% of CPU while running this and should be fixed.
import re
starttag = re.compile(r'<[a-zA-Z][-_.:a-zA-Z0-9]*\s*('
r'\s*([a-zA-Z_][-:.a-zA-Z_0-9]*)(\s*=\s*'
r'(\'[^\']*\'|"[^"]*"|[-a-zA-Z0-9./,:;+*%?!&$\(\)_#=~@]'
r'[][\-a-zA-Z0-9./,:;+*%?!&$\(\)_#=~\'"@]*(?=[\s>/<])))?'
r')*\s*/?\s*(?=[<>])')
if __name__ == '__main__':
foo = '<table cellspacing="0" cellpadding="0" style="border-collapse'
starttag.match(foo)
|
<commit_before><commit_msg>Add a "crasher" taken from the sgml bug report referenced in the comment<commit_after>
# This was taken from http://python.org/sf/1541697
# It's not technically a crasher. It may not even truly be infinite,
# however, I haven't waited a long time to see the result. It takes
# 100% of CPU while running this and should be fixed.
import re
starttag = re.compile(r'<[a-zA-Z][-_.:a-zA-Z0-9]*\s*('
r'\s*([a-zA-Z_][-:.a-zA-Z_0-9]*)(\s*=\s*'
r'(\'[^\']*\'|"[^"]*"|[-a-zA-Z0-9./,:;+*%?!&$\(\)_#=~@]'
r'[][\-a-zA-Z0-9./,:;+*%?!&$\(\)_#=~\'"@]*(?=[\s>/<])))?'
r')*\s*/?\s*(?=[<>])')
if __name__ == '__main__':
foo = '<table cellspacing="0" cellpadding="0" style="border-collapse'
starttag.match(foo)
|
|
7cd31a93f13a49a26cb4ab44173ec38d0218456f
|
tests/test_dk.py
|
tests/test_dk.py
|
# coding: utf-8
# Copyright (c) 2003, Taro Ogawa. All Rights Reserved.
# Copyright (c) 2013, Savoir-faire Linux inc. All Rights Reserved.
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301 USA
from __future__ import unicode_literals
from unittest import TestCase
from num2words import num2words
class Num2WordsDKTest(TestCase):
def test_ordinal(self):
self.assertEqual(num2words(1, to="ordinal", lang="dk"), "første")
self.assertEqual(num2words(5, to="ordinal", lang="dk"), "femte")
def test_cardinal(self):
self.assertEqual(num2words(0, to="cardinal", lang="dk"), "nul")
self.assertEqual(num2words(1, to="cardinal", lang="dk"), "et")
self.assertEqual(num2words(2, to="cardinal", lang="dk"), "to")
self.assertEqual(num2words(5, to="cardinal", lang="dk"), "fem")
self.assertEqual(num2words(8, to="cardinal", lang="dk"), "otte")
self.assertEqual(num2words(18, to="cardinal", lang="dk"), "atten")
self.assertEqual(num2words(45, to="cardinal", lang="dk"), "femogfyrre")
|
Add simple tests for lang_DK.py
|
Add simple tests for lang_DK.py
|
Python
|
lgpl-2.1
|
savoirfairelinux/num2words
|
Add simple tests for lang_DK.py
|
# coding: utf-8
# Copyright (c) 2003, Taro Ogawa. All Rights Reserved.
# Copyright (c) 2013, Savoir-faire Linux inc. All Rights Reserved.
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301 USA
from __future__ import unicode_literals
from unittest import TestCase
from num2words import num2words
class Num2WordsDKTest(TestCase):
def test_ordinal(self):
self.assertEqual(num2words(1, to="ordinal", lang="dk"), "første")
self.assertEqual(num2words(5, to="ordinal", lang="dk"), "femte")
def test_cardinal(self):
self.assertEqual(num2words(0, to="cardinal", lang="dk"), "nul")
self.assertEqual(num2words(1, to="cardinal", lang="dk"), "et")
self.assertEqual(num2words(2, to="cardinal", lang="dk"), "to")
self.assertEqual(num2words(5, to="cardinal", lang="dk"), "fem")
self.assertEqual(num2words(8, to="cardinal", lang="dk"), "otte")
self.assertEqual(num2words(18, to="cardinal", lang="dk"), "atten")
self.assertEqual(num2words(45, to="cardinal", lang="dk"), "femogfyrre")
|
<commit_before><commit_msg>Add simple tests for lang_DK.py<commit_after>
|
# coding: utf-8
# Copyright (c) 2003, Taro Ogawa. All Rights Reserved.
# Copyright (c) 2013, Savoir-faire Linux inc. All Rights Reserved.
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301 USA
from __future__ import unicode_literals
from unittest import TestCase
from num2words import num2words
class Num2WordsDKTest(TestCase):
def test_ordinal(self):
self.assertEqual(num2words(1, to="ordinal", lang="dk"), "første")
self.assertEqual(num2words(5, to="ordinal", lang="dk"), "femte")
def test_cardinal(self):
self.assertEqual(num2words(0, to="cardinal", lang="dk"), "nul")
self.assertEqual(num2words(1, to="cardinal", lang="dk"), "et")
self.assertEqual(num2words(2, to="cardinal", lang="dk"), "to")
self.assertEqual(num2words(5, to="cardinal", lang="dk"), "fem")
self.assertEqual(num2words(8, to="cardinal", lang="dk"), "otte")
self.assertEqual(num2words(18, to="cardinal", lang="dk"), "atten")
self.assertEqual(num2words(45, to="cardinal", lang="dk"), "femogfyrre")
|
Add simple tests for lang_DK.py# coding: utf-8
# Copyright (c) 2003, Taro Ogawa. All Rights Reserved.
# Copyright (c) 2013, Savoir-faire Linux inc. All Rights Reserved.
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301 USA
from __future__ import unicode_literals
from unittest import TestCase
from num2words import num2words
class Num2WordsDKTest(TestCase):
def test_ordinal(self):
self.assertEqual(num2words(1, to="ordinal", lang="dk"), "første")
self.assertEqual(num2words(5, to="ordinal", lang="dk"), "femte")
def test_cardinal(self):
self.assertEqual(num2words(0, to="cardinal", lang="dk"), "nul")
self.assertEqual(num2words(1, to="cardinal", lang="dk"), "et")
self.assertEqual(num2words(2, to="cardinal", lang="dk"), "to")
self.assertEqual(num2words(5, to="cardinal", lang="dk"), "fem")
self.assertEqual(num2words(8, to="cardinal", lang="dk"), "otte")
self.assertEqual(num2words(18, to="cardinal", lang="dk"), "atten")
self.assertEqual(num2words(45, to="cardinal", lang="dk"), "femogfyrre")
|
<commit_before><commit_msg>Add simple tests for lang_DK.py<commit_after># coding: utf-8
# Copyright (c) 2003, Taro Ogawa. All Rights Reserved.
# Copyright (c) 2013, Savoir-faire Linux inc. All Rights Reserved.
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301 USA
from __future__ import unicode_literals
from unittest import TestCase
from num2words import num2words
class Num2WordsDKTest(TestCase):
def test_ordinal(self):
self.assertEqual(num2words(1, to="ordinal", lang="dk"), "første")
self.assertEqual(num2words(5, to="ordinal", lang="dk"), "femte")
def test_cardinal(self):
self.assertEqual(num2words(0, to="cardinal", lang="dk"), "nul")
self.assertEqual(num2words(1, to="cardinal", lang="dk"), "et")
self.assertEqual(num2words(2, to="cardinal", lang="dk"), "to")
self.assertEqual(num2words(5, to="cardinal", lang="dk"), "fem")
self.assertEqual(num2words(8, to="cardinal", lang="dk"), "otte")
self.assertEqual(num2words(18, to="cardinal", lang="dk"), "atten")
self.assertEqual(num2words(45, to="cardinal", lang="dk"), "femogfyrre")
|
|
36fd9a3bcbd84e6917e66a40df752084a818c83c
|
sockets/server_web.py
|
sockets/server_web.py
|
# A basic web server using sockets
import socket
PORT = 8092
MAX_OPEN_REQUESTS = 5
def process_client(clientsocket):
print(clientsocket)
print(clientsocket.recv(1024))
web_contents = "<h1>Received</h1>"
web_headers = "HTTP/1.1 200"
web_headers += "\n" + "Content-Type: text/html"
web_headers += "\n" + "Content-Length: %i" % len(str.encode(web_contents))
clientsocket.send(str.encode(web_headers + "\n\n" + web_contents))
clientsocket.close()
# create an INET, STREAMing socket
serversocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# bind the socket to a public host, and a well-known port
hostname = socket.gethostname()
# Let's use better the local interface name
hostname = "localhost"
try:
serversocket.bind((hostname, PORT))
# become a server socket
# MAX_OPEN_REQUESTS connect requests before refusing outside connections
serversocket.listen(MAX_OPEN_REQUESTS)
while True:
# accept connections from outside
print ("Waiting for connections at %s %i" % (hostname, PORT))
(clientsocket, address) = serversocket.accept()
# now do something with the clientsocket
# in this case, we'll pretend this is a non threaded server
process_client(clientsocket)
except socket.error:
print("Problemas using port %i. Do you have permission?" % PORT)
|
Add basic web server using directly TCP sockets
|
Add basic web server using directly TCP sockets
|
Python
|
mit
|
acs/python-red,acs/python-red
|
Add basic web server using directly TCP sockets
|
# A basic web server using sockets
import socket
PORT = 8092
MAX_OPEN_REQUESTS = 5
def process_client(clientsocket):
print(clientsocket)
print(clientsocket.recv(1024))
web_contents = "<h1>Received</h1>"
web_headers = "HTTP/1.1 200"
web_headers += "\n" + "Content-Type: text/html"
web_headers += "\n" + "Content-Length: %i" % len(str.encode(web_contents))
clientsocket.send(str.encode(web_headers + "\n\n" + web_contents))
clientsocket.close()
# create an INET, STREAMing socket
serversocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# bind the socket to a public host, and a well-known port
hostname = socket.gethostname()
# Let's use better the local interface name
hostname = "localhost"
try:
serversocket.bind((hostname, PORT))
# become a server socket
# MAX_OPEN_REQUESTS connect requests before refusing outside connections
serversocket.listen(MAX_OPEN_REQUESTS)
while True:
# accept connections from outside
print ("Waiting for connections at %s %i" % (hostname, PORT))
(clientsocket, address) = serversocket.accept()
# now do something with the clientsocket
# in this case, we'll pretend this is a non threaded server
process_client(clientsocket)
except socket.error:
print("Problemas using port %i. Do you have permission?" % PORT)
|
<commit_before><commit_msg>Add basic web server using directly TCP sockets<commit_after>
|
# A basic web server using sockets
import socket
PORT = 8092
MAX_OPEN_REQUESTS = 5
def process_client(clientsocket):
print(clientsocket)
print(clientsocket.recv(1024))
web_contents = "<h1>Received</h1>"
web_headers = "HTTP/1.1 200"
web_headers += "\n" + "Content-Type: text/html"
web_headers += "\n" + "Content-Length: %i" % len(str.encode(web_contents))
clientsocket.send(str.encode(web_headers + "\n\n" + web_contents))
clientsocket.close()
# create an INET, STREAMing socket
serversocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# bind the socket to a public host, and a well-known port
hostname = socket.gethostname()
# Let's use better the local interface name
hostname = "localhost"
try:
serversocket.bind((hostname, PORT))
# become a server socket
# MAX_OPEN_REQUESTS connect requests before refusing outside connections
serversocket.listen(MAX_OPEN_REQUESTS)
while True:
# accept connections from outside
print ("Waiting for connections at %s %i" % (hostname, PORT))
(clientsocket, address) = serversocket.accept()
# now do something with the clientsocket
# in this case, we'll pretend this is a non threaded server
process_client(clientsocket)
except socket.error:
print("Problemas using port %i. Do you have permission?" % PORT)
|
Add basic web server using directly TCP sockets# A basic web server using sockets
import socket
PORT = 8092
MAX_OPEN_REQUESTS = 5
def process_client(clientsocket):
print(clientsocket)
print(clientsocket.recv(1024))
web_contents = "<h1>Received</h1>"
web_headers = "HTTP/1.1 200"
web_headers += "\n" + "Content-Type: text/html"
web_headers += "\n" + "Content-Length: %i" % len(str.encode(web_contents))
clientsocket.send(str.encode(web_headers + "\n\n" + web_contents))
clientsocket.close()
# create an INET, STREAMing socket
serversocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# bind the socket to a public host, and a well-known port
hostname = socket.gethostname()
# Let's use better the local interface name
hostname = "localhost"
try:
serversocket.bind((hostname, PORT))
# become a server socket
# MAX_OPEN_REQUESTS connect requests before refusing outside connections
serversocket.listen(MAX_OPEN_REQUESTS)
while True:
# accept connections from outside
print ("Waiting for connections at %s %i" % (hostname, PORT))
(clientsocket, address) = serversocket.accept()
# now do something with the clientsocket
# in this case, we'll pretend this is a non threaded server
process_client(clientsocket)
except socket.error:
print("Problemas using port %i. Do you have permission?" % PORT)
|
<commit_before><commit_msg>Add basic web server using directly TCP sockets<commit_after># A basic web server using sockets
import socket
PORT = 8092
MAX_OPEN_REQUESTS = 5
def process_client(clientsocket):
print(clientsocket)
print(clientsocket.recv(1024))
web_contents = "<h1>Received</h1>"
web_headers = "HTTP/1.1 200"
web_headers += "\n" + "Content-Type: text/html"
web_headers += "\n" + "Content-Length: %i" % len(str.encode(web_contents))
clientsocket.send(str.encode(web_headers + "\n\n" + web_contents))
clientsocket.close()
# create an INET, STREAMing socket
serversocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# bind the socket to a public host, and a well-known port
hostname = socket.gethostname()
# Let's use better the local interface name
hostname = "localhost"
try:
serversocket.bind((hostname, PORT))
# become a server socket
# MAX_OPEN_REQUESTS connect requests before refusing outside connections
serversocket.listen(MAX_OPEN_REQUESTS)
while True:
# accept connections from outside
print ("Waiting for connections at %s %i" % (hostname, PORT))
(clientsocket, address) = serversocket.accept()
# now do something with the clientsocket
# in this case, we'll pretend this is a non threaded server
process_client(clientsocket)
except socket.error:
print("Problemas using port %i. Do you have permission?" % PORT)
|
|
d9e880f76e5b627455056becb3981b5492c7a947
|
parse-zmmailbox-ids.py
|
parse-zmmailbox-ids.py
|
import re
import sys
# $ zmmailbox -z -m username@domain.tld search -l 200 "in:/inbox (before:today)"
# num: 200, more: true
#
# Id Type From Subject Date
# ------- ---- -------------------- -------------------------------------------------- --------------
# 1. -946182 conv admin Daily mail report 09/24/15 23:57
# 2. 421345 conv John Some great news for you 09/24/15 23:57
REGEX_HEAD = re.compile(r'^Id')
REGEX_HEAD_SEP = re.compile(r'^---')
REGEX_DATA = re.compile(r'^(\d+)\.\s+\-?(\d+)\s+(\S+)')
def main():
lines = [line.strip() for line in sys.stdin.readlines() if line.strip()]
while True:
line = lines.pop(0)
if REGEX_HEAD.search(line):
break
line = lines.pop(0)
assert REGEX_HEAD_SEP.search(line)
ids = []
for line in lines:
matched = REGEX_DATA.match(line)
if matched:
ids.append(matched.group(2))
else:
sys.stderr.write("Couldn't parse line: {0}\n".format(line))
sys.exit(1)
for an_id in ids:
print an_id
if __name__ == '__main__':
main()
|
Add parser of 'zmmailbox search' output
|
Add parser of 'zmmailbox search' output
|
Python
|
apache-2.0
|
hgdeoro/zimbra7-to-zimbra8-password-migrator
|
Add parser of 'zmmailbox search' output
|
import re
import sys
# $ zmmailbox -z -m username@domain.tld search -l 200 "in:/inbox (before:today)"
# num: 200, more: true
#
# Id Type From Subject Date
# ------- ---- -------------------- -------------------------------------------------- --------------
# 1. -946182 conv admin Daily mail report 09/24/15 23:57
# 2. 421345 conv John Some great news for you 09/24/15 23:57
REGEX_HEAD = re.compile(r'^Id')
REGEX_HEAD_SEP = re.compile(r'^---')
REGEX_DATA = re.compile(r'^(\d+)\.\s+\-?(\d+)\s+(\S+)')
def main():
lines = [line.strip() for line in sys.stdin.readlines() if line.strip()]
while True:
line = lines.pop(0)
if REGEX_HEAD.search(line):
break
line = lines.pop(0)
assert REGEX_HEAD_SEP.search(line)
ids = []
for line in lines:
matched = REGEX_DATA.match(line)
if matched:
ids.append(matched.group(2))
else:
sys.stderr.write("Couldn't parse line: {0}\n".format(line))
sys.exit(1)
for an_id in ids:
print an_id
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add parser of 'zmmailbox search' output<commit_after>
|
import re
import sys
# $ zmmailbox -z -m username@domain.tld search -l 200 "in:/inbox (before:today)"
# num: 200, more: true
#
# Id Type From Subject Date
# ------- ---- -------------------- -------------------------------------------------- --------------
# 1. -946182 conv admin Daily mail report 09/24/15 23:57
# 2. 421345 conv John Some great news for you 09/24/15 23:57
REGEX_HEAD = re.compile(r'^Id')
REGEX_HEAD_SEP = re.compile(r'^---')
REGEX_DATA = re.compile(r'^(\d+)\.\s+\-?(\d+)\s+(\S+)')
def main():
lines = [line.strip() for line in sys.stdin.readlines() if line.strip()]
while True:
line = lines.pop(0)
if REGEX_HEAD.search(line):
break
line = lines.pop(0)
assert REGEX_HEAD_SEP.search(line)
ids = []
for line in lines:
matched = REGEX_DATA.match(line)
if matched:
ids.append(matched.group(2))
else:
sys.stderr.write("Couldn't parse line: {0}\n".format(line))
sys.exit(1)
for an_id in ids:
print an_id
if __name__ == '__main__':
main()
|
Add parser of 'zmmailbox search' outputimport re
import sys
# $ zmmailbox -z -m username@domain.tld search -l 200 "in:/inbox (before:today)"
# num: 200, more: true
#
# Id Type From Subject Date
# ------- ---- -------------------- -------------------------------------------------- --------------
# 1. -946182 conv admin Daily mail report 09/24/15 23:57
# 2. 421345 conv John Some great news for you 09/24/15 23:57
REGEX_HEAD = re.compile(r'^Id')
REGEX_HEAD_SEP = re.compile(r'^---')
REGEX_DATA = re.compile(r'^(\d+)\.\s+\-?(\d+)\s+(\S+)')
def main():
lines = [line.strip() for line in sys.stdin.readlines() if line.strip()]
while True:
line = lines.pop(0)
if REGEX_HEAD.search(line):
break
line = lines.pop(0)
assert REGEX_HEAD_SEP.search(line)
ids = []
for line in lines:
matched = REGEX_DATA.match(line)
if matched:
ids.append(matched.group(2))
else:
sys.stderr.write("Couldn't parse line: {0}\n".format(line))
sys.exit(1)
for an_id in ids:
print an_id
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add parser of 'zmmailbox search' output<commit_after>import re
import sys
# $ zmmailbox -z -m username@domain.tld search -l 200 "in:/inbox (before:today)"
# num: 200, more: true
#
# Id Type From Subject Date
# ------- ---- -------------------- -------------------------------------------------- --------------
# 1. -946182 conv admin Daily mail report 09/24/15 23:57
# 2. 421345 conv John Some great news for you 09/24/15 23:57
REGEX_HEAD = re.compile(r'^Id')
REGEX_HEAD_SEP = re.compile(r'^---')
REGEX_DATA = re.compile(r'^(\d+)\.\s+\-?(\d+)\s+(\S+)')
def main():
lines = [line.strip() for line in sys.stdin.readlines() if line.strip()]
while True:
line = lines.pop(0)
if REGEX_HEAD.search(line):
break
line = lines.pop(0)
assert REGEX_HEAD_SEP.search(line)
ids = []
for line in lines:
matched = REGEX_DATA.match(line)
if matched:
ids.append(matched.group(2))
else:
sys.stderr.write("Couldn't parse line: {0}\n".format(line))
sys.exit(1)
for an_id in ids:
print an_id
if __name__ == '__main__':
main()
|
|
c2da2500693105f0ce7c0e6430489d6b8d780bbf
|
preprocess_birdclef.py
|
preprocess_birdclef.py
|
import numpy as np
np.random.seed(1337)
import xml.etree.ElementTree as ET
import glob
import csv
import os
import shutil
import tqdm
from bird import preprocessing as pp
# Settings
segment_size_seconds = 3
# Paths
xml_paths = glob.glob("datasets/birdClef2016/xml/*.xml")
source_dir = "./datasets/birdClef2016/wav"
preprocessed_dir = "./datasets/birdClef2016Whole"
noise_dir = os.path.join(preprocessed_dir, "noise")
print("Loading xml roots... ")
progress = tqdm.tqdm(range(len(xml_paths)))
xml_roots = [ET.parse(f) for (p, f) in zip(progress, xml_paths)]
if not os.path.exists(preprocessed_dir):
print("Create diractory: ", preprocessed_dir)
os.makedirs(preprocessed_dir)
if not os.path.exists(noise_dir):
print("Create diractory: ", noise_dir)
os.makedirs(noise_dir)
print("Preprocessing random species subset...")
progress = tqdm.tqdm(range(len(xml_paths)))
for (p, r) in zip(progress, xml_paths):
species = r.find("ClassId").text
filename = r.find("FileName").text
filepath = os.path.join(source_dir, filename)
class_dir = os.path.join(preprocessed_dir, "train", species)
if not os.path.exists(class_dir):
os.makedirs(class_dir)
# preprocess the sound file, and save signal to class_dir, noise to
# noise_dir with specified segment size
pp.preprocess_sound_file(filepath, class_dir, noise_dir,
segment_size_seconds)
|
Add preprocessing script for birdclef data
|
Add preprocessing script for birdclef data
|
Python
|
mit
|
johnmartinsson/bird-species-classification,johnmartinsson/bird-species-classification
|
Add preprocessing script for birdclef data
|
import numpy as np
np.random.seed(1337)
import xml.etree.ElementTree as ET
import glob
import csv
import os
import shutil
import tqdm
from bird import preprocessing as pp
# Settings
segment_size_seconds = 3
# Paths
xml_paths = glob.glob("datasets/birdClef2016/xml/*.xml")
source_dir = "./datasets/birdClef2016/wav"
preprocessed_dir = "./datasets/birdClef2016Whole"
noise_dir = os.path.join(preprocessed_dir, "noise")
print("Loading xml roots... ")
progress = tqdm.tqdm(range(len(xml_paths)))
xml_roots = [ET.parse(f) for (p, f) in zip(progress, xml_paths)]
if not os.path.exists(preprocessed_dir):
print("Create diractory: ", preprocessed_dir)
os.makedirs(preprocessed_dir)
if not os.path.exists(noise_dir):
print("Create diractory: ", noise_dir)
os.makedirs(noise_dir)
print("Preprocessing random species subset...")
progress = tqdm.tqdm(range(len(xml_paths)))
for (p, r) in zip(progress, xml_paths):
species = r.find("ClassId").text
filename = r.find("FileName").text
filepath = os.path.join(source_dir, filename)
class_dir = os.path.join(preprocessed_dir, "train", species)
if not os.path.exists(class_dir):
os.makedirs(class_dir)
# preprocess the sound file, and save signal to class_dir, noise to
# noise_dir with specified segment size
pp.preprocess_sound_file(filepath, class_dir, noise_dir,
segment_size_seconds)
|
<commit_before><commit_msg>Add preprocessing script for birdclef data<commit_after>
|
import numpy as np
np.random.seed(1337)
import xml.etree.ElementTree as ET
import glob
import csv
import os
import shutil
import tqdm
from bird import preprocessing as pp
# Settings
segment_size_seconds = 3
# Paths
xml_paths = glob.glob("datasets/birdClef2016/xml/*.xml")
source_dir = "./datasets/birdClef2016/wav"
preprocessed_dir = "./datasets/birdClef2016Whole"
noise_dir = os.path.join(preprocessed_dir, "noise")
print("Loading xml roots... ")
progress = tqdm.tqdm(range(len(xml_paths)))
xml_roots = [ET.parse(f) for (p, f) in zip(progress, xml_paths)]
if not os.path.exists(preprocessed_dir):
print("Create diractory: ", preprocessed_dir)
os.makedirs(preprocessed_dir)
if not os.path.exists(noise_dir):
print("Create diractory: ", noise_dir)
os.makedirs(noise_dir)
print("Preprocessing random species subset...")
progress = tqdm.tqdm(range(len(xml_paths)))
for (p, r) in zip(progress, xml_paths):
species = r.find("ClassId").text
filename = r.find("FileName").text
filepath = os.path.join(source_dir, filename)
class_dir = os.path.join(preprocessed_dir, "train", species)
if not os.path.exists(class_dir):
os.makedirs(class_dir)
# preprocess the sound file, and save signal to class_dir, noise to
# noise_dir with specified segment size
pp.preprocess_sound_file(filepath, class_dir, noise_dir,
segment_size_seconds)
|
Add preprocessing script for birdclef dataimport numpy as np
np.random.seed(1337)
import xml.etree.ElementTree as ET
import glob
import csv
import os
import shutil
import tqdm
from bird import preprocessing as pp
# Settings
segment_size_seconds = 3
# Paths
xml_paths = glob.glob("datasets/birdClef2016/xml/*.xml")
source_dir = "./datasets/birdClef2016/wav"
preprocessed_dir = "./datasets/birdClef2016Whole"
noise_dir = os.path.join(preprocessed_dir, "noise")
print("Loading xml roots... ")
progress = tqdm.tqdm(range(len(xml_paths)))
xml_roots = [ET.parse(f) for (p, f) in zip(progress, xml_paths)]
if not os.path.exists(preprocessed_dir):
print("Create diractory: ", preprocessed_dir)
os.makedirs(preprocessed_dir)
if not os.path.exists(noise_dir):
print("Create diractory: ", noise_dir)
os.makedirs(noise_dir)
print("Preprocessing random species subset...")
progress = tqdm.tqdm(range(len(xml_paths)))
for (p, r) in zip(progress, xml_paths):
species = r.find("ClassId").text
filename = r.find("FileName").text
filepath = os.path.join(source_dir, filename)
class_dir = os.path.join(preprocessed_dir, "train", species)
if not os.path.exists(class_dir):
os.makedirs(class_dir)
# preprocess the sound file, and save signal to class_dir, noise to
# noise_dir with specified segment size
pp.preprocess_sound_file(filepath, class_dir, noise_dir,
segment_size_seconds)
|
<commit_before><commit_msg>Add preprocessing script for birdclef data<commit_after>import numpy as np
np.random.seed(1337)
import xml.etree.ElementTree as ET
import glob
import csv
import os
import shutil
import tqdm
from bird import preprocessing as pp
# Settings
segment_size_seconds = 3
# Paths
xml_paths = glob.glob("datasets/birdClef2016/xml/*.xml")
source_dir = "./datasets/birdClef2016/wav"
preprocessed_dir = "./datasets/birdClef2016Whole"
noise_dir = os.path.join(preprocessed_dir, "noise")
print("Loading xml roots... ")
progress = tqdm.tqdm(range(len(xml_paths)))
xml_roots = [ET.parse(f) for (p, f) in zip(progress, xml_paths)]
if not os.path.exists(preprocessed_dir):
print("Create diractory: ", preprocessed_dir)
os.makedirs(preprocessed_dir)
if not os.path.exists(noise_dir):
print("Create diractory: ", noise_dir)
os.makedirs(noise_dir)
print("Preprocessing random species subset...")
progress = tqdm.tqdm(range(len(xml_paths)))
for (p, r) in zip(progress, xml_paths):
species = r.find("ClassId").text
filename = r.find("FileName").text
filepath = os.path.join(source_dir, filename)
class_dir = os.path.join(preprocessed_dir, "train", species)
if not os.path.exists(class_dir):
os.makedirs(class_dir)
# preprocess the sound file, and save signal to class_dir, noise to
# noise_dir with specified segment size
pp.preprocess_sound_file(filepath, class_dir, noise_dir,
segment_size_seconds)
|
|
948eced60d8c97535cf98422a8242fa12cdd502c
|
backend/scripts/new-samples/search.py
|
backend/scripts/new-samples/search.py
|
#!/usr/bin/env python
import rethinkdb as r
from optparse import OptionParser
import sys
import json
def main(conn):
value = list(r.table("samples")
.eq_join("id", r.table("sample2attribute_set"),
index="sample_id")
.zip()
.merge(lambda aset: {
"attributes": r.table("attribute_set2attribute")
.get_all(aset["attribute_set_id"],
index="attribute_set_id")
.eq_join("attribute_id", r.table("attributes"))
.zip()
.merge(lambda attr: {
"best_measure": r.table("measurements")
.get(attr["best_measure_id"]),
"history": r.table("best_measure_history")
.get_all(attr["id"], index="attribute_id")
.merge(lambda best: {
"measurement": r.table("measurements")
.get(best["measurement_id"]).default("")
})
.coerce_to("array"),
"measurements": r.table("attribute2measurement")
.get_all(attr["id"], index="attribute_id")
.eq_join("measurement_id", r.table("measurements"))
.zip()
.coerce_to("array")
})
.coerce_to("array")
})
.filter(lambda entry: entry["attributes"].contains(
lambda element: element["best_measure"]["properties"]
["grain_size"]["value"].eq(1.09)))
.run(conn, time_format="raw"))
print json.dumps(value)
# http://stackoverflow.com/questions/23046809/filtering-based-on-key-value-in-all-objects-in-array-in-rethinkdb
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("-P", "--port", dest="port", type="int",
help="rethinkdb port", default=30815)
(options, args) = parser.parse_args()
conn = r.connect("localhost", options.port, db="samplesdb")
main(conn)
|
Add python code to conduct queries.
|
Add python code to conduct queries.
|
Python
|
mit
|
materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org
|
Add python code to conduct queries.
|
#!/usr/bin/env python
import rethinkdb as r
from optparse import OptionParser
import sys
import json
def main(conn):
value = list(r.table("samples")
.eq_join("id", r.table("sample2attribute_set"),
index="sample_id")
.zip()
.merge(lambda aset: {
"attributes": r.table("attribute_set2attribute")
.get_all(aset["attribute_set_id"],
index="attribute_set_id")
.eq_join("attribute_id", r.table("attributes"))
.zip()
.merge(lambda attr: {
"best_measure": r.table("measurements")
.get(attr["best_measure_id"]),
"history": r.table("best_measure_history")
.get_all(attr["id"], index="attribute_id")
.merge(lambda best: {
"measurement": r.table("measurements")
.get(best["measurement_id"]).default("")
})
.coerce_to("array"),
"measurements": r.table("attribute2measurement")
.get_all(attr["id"], index="attribute_id")
.eq_join("measurement_id", r.table("measurements"))
.zip()
.coerce_to("array")
})
.coerce_to("array")
})
.filter(lambda entry: entry["attributes"].contains(
lambda element: element["best_measure"]["properties"]
["grain_size"]["value"].eq(1.09)))
.run(conn, time_format="raw"))
print json.dumps(value)
# http://stackoverflow.com/questions/23046809/filtering-based-on-key-value-in-all-objects-in-array-in-rethinkdb
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("-P", "--port", dest="port", type="int",
help="rethinkdb port", default=30815)
(options, args) = parser.parse_args()
conn = r.connect("localhost", options.port, db="samplesdb")
main(conn)
|
<commit_before><commit_msg>Add python code to conduct queries.<commit_after>
|
#!/usr/bin/env python
import rethinkdb as r
from optparse import OptionParser
import sys
import json
def main(conn):
value = list(r.table("samples")
.eq_join("id", r.table("sample2attribute_set"),
index="sample_id")
.zip()
.merge(lambda aset: {
"attributes": r.table("attribute_set2attribute")
.get_all(aset["attribute_set_id"],
index="attribute_set_id")
.eq_join("attribute_id", r.table("attributes"))
.zip()
.merge(lambda attr: {
"best_measure": r.table("measurements")
.get(attr["best_measure_id"]),
"history": r.table("best_measure_history")
.get_all(attr["id"], index="attribute_id")
.merge(lambda best: {
"measurement": r.table("measurements")
.get(best["measurement_id"]).default("")
})
.coerce_to("array"),
"measurements": r.table("attribute2measurement")
.get_all(attr["id"], index="attribute_id")
.eq_join("measurement_id", r.table("measurements"))
.zip()
.coerce_to("array")
})
.coerce_to("array")
})
.filter(lambda entry: entry["attributes"].contains(
lambda element: element["best_measure"]["properties"]
["grain_size"]["value"].eq(1.09)))
.run(conn, time_format="raw"))
print json.dumps(value)
# http://stackoverflow.com/questions/23046809/filtering-based-on-key-value-in-all-objects-in-array-in-rethinkdb
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("-P", "--port", dest="port", type="int",
help="rethinkdb port", default=30815)
(options, args) = parser.parse_args()
conn = r.connect("localhost", options.port, db="samplesdb")
main(conn)
|
Add python code to conduct queries.#!/usr/bin/env python
import rethinkdb as r
from optparse import OptionParser
import sys
import json
def main(conn):
value = list(r.table("samples")
.eq_join("id", r.table("sample2attribute_set"),
index="sample_id")
.zip()
.merge(lambda aset: {
"attributes": r.table("attribute_set2attribute")
.get_all(aset["attribute_set_id"],
index="attribute_set_id")
.eq_join("attribute_id", r.table("attributes"))
.zip()
.merge(lambda attr: {
"best_measure": r.table("measurements")
.get(attr["best_measure_id"]),
"history": r.table("best_measure_history")
.get_all(attr["id"], index="attribute_id")
.merge(lambda best: {
"measurement": r.table("measurements")
.get(best["measurement_id"]).default("")
})
.coerce_to("array"),
"measurements": r.table("attribute2measurement")
.get_all(attr["id"], index="attribute_id")
.eq_join("measurement_id", r.table("measurements"))
.zip()
.coerce_to("array")
})
.coerce_to("array")
})
.filter(lambda entry: entry["attributes"].contains(
lambda element: element["best_measure"]["properties"]
["grain_size"]["value"].eq(1.09)))
.run(conn, time_format="raw"))
print json.dumps(value)
# http://stackoverflow.com/questions/23046809/filtering-based-on-key-value-in-all-objects-in-array-in-rethinkdb
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("-P", "--port", dest="port", type="int",
help="rethinkdb port", default=30815)
(options, args) = parser.parse_args()
conn = r.connect("localhost", options.port, db="samplesdb")
main(conn)
|
<commit_before><commit_msg>Add python code to conduct queries.<commit_after>#!/usr/bin/env python
import rethinkdb as r
from optparse import OptionParser
import sys
import json
def main(conn):
value = list(r.table("samples")
.eq_join("id", r.table("sample2attribute_set"),
index="sample_id")
.zip()
.merge(lambda aset: {
"attributes": r.table("attribute_set2attribute")
.get_all(aset["attribute_set_id"],
index="attribute_set_id")
.eq_join("attribute_id", r.table("attributes"))
.zip()
.merge(lambda attr: {
"best_measure": r.table("measurements")
.get(attr["best_measure_id"]),
"history": r.table("best_measure_history")
.get_all(attr["id"], index="attribute_id")
.merge(lambda best: {
"measurement": r.table("measurements")
.get(best["measurement_id"]).default("")
})
.coerce_to("array"),
"measurements": r.table("attribute2measurement")
.get_all(attr["id"], index="attribute_id")
.eq_join("measurement_id", r.table("measurements"))
.zip()
.coerce_to("array")
})
.coerce_to("array")
})
.filter(lambda entry: entry["attributes"].contains(
lambda element: element["best_measure"]["properties"]
["grain_size"]["value"].eq(1.09)))
.run(conn, time_format="raw"))
print json.dumps(value)
# http://stackoverflow.com/questions/23046809/filtering-based-on-key-value-in-all-objects-in-array-in-rethinkdb
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("-P", "--port", dest="port", type="int",
help="rethinkdb port", default=30815)
(options, args) = parser.parse_args()
conn = r.connect("localhost", options.port, db="samplesdb")
main(conn)
|
|
29b785da5882000758532e1467b5b7f10645193b
|
collect_cache_statistics.py
|
collect_cache_statistics.py
|
# Lab1 exercise 3.
#
# Setups the cache and runs the benchmark. You should have the benchmark
# breakpointed to the magic breakpoint in your Simics configuration. Run with
# something like:
#
# ./simics -stall -no-stc -c vortex.conf -no-win -q -p collect_cache_statistics.py
conf.cpu0_0.instruction_fetch_mode = "instruction-cache-access-trace"
#
# 4Kbyte cache, random replacement policy
#
cache = pre_conf_object('cache', 'g-cache')
cache.cpus = [conf.cpu0_0]
cache.config_line_number = 128
cache.config_line_size = 32
cache.config_assoc = 1
cache.config_virtual_index = 0
cache.config_virtual_tag = 0
cache.config_replacement_policy = 'random'
cache.penalty_read = 0
cache.penalty_write = 0
cache.penalty_read_next = 0
cache.penalty_write_next = 0
SIM_add_configuration([cache], None)
conf.phys_mem.timing_model = conf.cache
run_command("continue 100000")
run_command("cache.reset-statistics")
run_command("continue 10000")
print "Lines used: %d" % conf.cache.config_line_size
print "Read hit rate: %f" % (1 - conf.cache.stat_data_read_miss / float(conf.cache.stat_data_read))
run_command("exit")
|
Add script for collecting cache statistics
|
Add script for collecting cache statistics
|
Python
|
mit
|
myrjola/comparch_labs,myrjola/comparch_labs,myrjola/comparch_labs,myrjola/comparch_labs
|
Add script for collecting cache statistics
|
# Lab1 exercise 3.
#
# Setups the cache and runs the benchmark. You should have the benchmark
# breakpointed to the magic breakpoint in your Simics configuration. Run with
# something like:
#
# ./simics -stall -no-stc -c vortex.conf -no-win -q -p collect_cache_statistics.py
conf.cpu0_0.instruction_fetch_mode = "instruction-cache-access-trace"
#
# 4Kbyte cache, random replacement policy
#
cache = pre_conf_object('cache', 'g-cache')
cache.cpus = [conf.cpu0_0]
cache.config_line_number = 128
cache.config_line_size = 32
cache.config_assoc = 1
cache.config_virtual_index = 0
cache.config_virtual_tag = 0
cache.config_replacement_policy = 'random'
cache.penalty_read = 0
cache.penalty_write = 0
cache.penalty_read_next = 0
cache.penalty_write_next = 0
SIM_add_configuration([cache], None)
conf.phys_mem.timing_model = conf.cache
run_command("continue 100000")
run_command("cache.reset-statistics")
run_command("continue 10000")
print "Lines used: %d" % conf.cache.config_line_size
print "Read hit rate: %f" % (1 - conf.cache.stat_data_read_miss / float(conf.cache.stat_data_read))
run_command("exit")
|
<commit_before><commit_msg>Add script for collecting cache statistics<commit_after>
|
# Lab1 exercise 3.
#
# Setups the cache and runs the benchmark. You should have the benchmark
# breakpointed to the magic breakpoint in your Simics configuration. Run with
# something like:
#
# ./simics -stall -no-stc -c vortex.conf -no-win -q -p collect_cache_statistics.py
conf.cpu0_0.instruction_fetch_mode = "instruction-cache-access-trace"
#
# 4Kbyte cache, random replacement policy
#
cache = pre_conf_object('cache', 'g-cache')
cache.cpus = [conf.cpu0_0]
cache.config_line_number = 128
cache.config_line_size = 32
cache.config_assoc = 1
cache.config_virtual_index = 0
cache.config_virtual_tag = 0
cache.config_replacement_policy = 'random'
cache.penalty_read = 0
cache.penalty_write = 0
cache.penalty_read_next = 0
cache.penalty_write_next = 0
SIM_add_configuration([cache], None)
conf.phys_mem.timing_model = conf.cache
run_command("continue 100000")
run_command("cache.reset-statistics")
run_command("continue 10000")
print "Lines used: %d" % conf.cache.config_line_size
print "Read hit rate: %f" % (1 - conf.cache.stat_data_read_miss / float(conf.cache.stat_data_read))
run_command("exit")
|
Add script for collecting cache statistics# Lab1 exercise 3.
#
# Setups the cache and runs the benchmark. You should have the benchmark
# breakpointed to the magic breakpoint in your Simics configuration. Run with
# something like:
#
# ./simics -stall -no-stc -c vortex.conf -no-win -q -p collect_cache_statistics.py
conf.cpu0_0.instruction_fetch_mode = "instruction-cache-access-trace"
#
# 4Kbyte cache, random replacement policy
#
cache = pre_conf_object('cache', 'g-cache')
cache.cpus = [conf.cpu0_0]
cache.config_line_number = 128
cache.config_line_size = 32
cache.config_assoc = 1
cache.config_virtual_index = 0
cache.config_virtual_tag = 0
cache.config_replacement_policy = 'random'
cache.penalty_read = 0
cache.penalty_write = 0
cache.penalty_read_next = 0
cache.penalty_write_next = 0
SIM_add_configuration([cache], None)
conf.phys_mem.timing_model = conf.cache
run_command("continue 100000")
run_command("cache.reset-statistics")
run_command("continue 10000")
print "Lines used: %d" % conf.cache.config_line_size
print "Read hit rate: %f" % (1 - conf.cache.stat_data_read_miss / float(conf.cache.stat_data_read))
run_command("exit")
|
<commit_before><commit_msg>Add script for collecting cache statistics<commit_after># Lab1 exercise 3.
#
# Setups the cache and runs the benchmark. You should have the benchmark
# breakpointed to the magic breakpoint in your Simics configuration. Run with
# something like:
#
# ./simics -stall -no-stc -c vortex.conf -no-win -q -p collect_cache_statistics.py
conf.cpu0_0.instruction_fetch_mode = "instruction-cache-access-trace"
#
# 4Kbyte cache, random replacement policy
#
cache = pre_conf_object('cache', 'g-cache')
cache.cpus = [conf.cpu0_0]
cache.config_line_number = 128
cache.config_line_size = 32
cache.config_assoc = 1
cache.config_virtual_index = 0
cache.config_virtual_tag = 0
cache.config_replacement_policy = 'random'
cache.penalty_read = 0
cache.penalty_write = 0
cache.penalty_read_next = 0
cache.penalty_write_next = 0
SIM_add_configuration([cache], None)
conf.phys_mem.timing_model = conf.cache
run_command("continue 100000")
run_command("cache.reset-statistics")
run_command("continue 10000")
print "Lines used: %d" % conf.cache.config_line_size
print "Read hit rate: %f" % (1 - conf.cache.stat_data_read_miss / float(conf.cache.stat_data_read))
run_command("exit")
|
|
485b45333d1f3c21accfd074e75253073c37d60c
|
strings/palindrome/python/palindrome2.py
|
strings/palindrome/python/palindrome2.py
|
#We check if a string is palindrome or not using slicing
#Accept a string input
inputString = input("Enter any string:")
#Caseless Comparison
inputString = inputString.casefold()
#check if the string is equal to its reverse
if inputString == inputString[::-1]:
print("Congrats! You typed in a PALINDROME!!")
else:
print("This is not a palindrome. Try Again.")
|
Add an alternativie Python palindrome check
|
Add an alternativie Python palindrome check
|
Python
|
cc0-1.0
|
ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms
|
Add an alternativie Python palindrome check
|
#We check if a string is palindrome or not using slicing
#Accept a string input
inputString = input("Enter any string:")
#Caseless Comparison
inputString = inputString.casefold()
#check if the string is equal to its reverse
if inputString == inputString[::-1]:
print("Congrats! You typed in a PALINDROME!!")
else:
print("This is not a palindrome. Try Again.")
|
<commit_before><commit_msg>Add an alternativie Python palindrome check<commit_after>
|
#We check if a string is palindrome or not using slicing
#Accept a string input
inputString = input("Enter any string:")
#Caseless Comparison
inputString = inputString.casefold()
#check if the string is equal to its reverse
if inputString == inputString[::-1]:
print("Congrats! You typed in a PALINDROME!!")
else:
print("This is not a palindrome. Try Again.")
|
Add an alternativie Python palindrome check#We check if a string is palindrome or not using slicing
#Accept a string input
inputString = input("Enter any string:")
#Caseless Comparison
inputString = inputString.casefold()
#check if the string is equal to its reverse
if inputString == inputString[::-1]:
print("Congrats! You typed in a PALINDROME!!")
else:
print("This is not a palindrome. Try Again.")
|
<commit_before><commit_msg>Add an alternativie Python palindrome check<commit_after>#We check if a string is palindrome or not using slicing
#Accept a string input
inputString = input("Enter any string:")
#Caseless Comparison
inputString = inputString.casefold()
#check if the string is equal to its reverse
if inputString == inputString[::-1]:
print("Congrats! You typed in a PALINDROME!!")
else:
print("This is not a palindrome. Try Again.")
|
|
be6e9ab206deaa4f114e4c8901100c12533be532
|
client/examples/record_camera_trajectory.py
|
client/examples/record_camera_trajectory.py
|
import sys, atexit, argparse, json, time
sys.path.append('..')
import ue4cv
trajectory = []
def message_handler(message):
if message == 'clicked':
rot = [float(v) for v in ue4cv.client.request('vget /camera/0/rotation').split(' ')]
loc = [float(v) for v in ue4cv.client.request('vget /camera/0/location').split(' ')]
trajectory.append(dict(rotation = rot, location = loc))
def save_to_file(filename):
if len(trajectory) != 0:
with open(filename, 'w') as f:
json.dump(trajectory, f, indent = 4)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--filename', default='camera-trajectory.json')
args = parser.parse_args()
atexit.register(save_to_file, args.filename)
ue4cv.client.connect()
ue4cv.client.message_handler = message_handler
if not ue4cv.client.isconnected():
print 'Can not connect to the game, please run the game downloaded from http://unrealcv.github.io first'
else:
time.sleep(60 * 60 * 24)
|
Add a script to record camera trajectory.
|
Add a script to record camera trajectory.
|
Python
|
mit
|
qiuwch/unrealcv,unrealcv/unrealcv,qiuwch/unrealcv,unrealcv/unrealcv,unrealcv/unrealcv,qiuwch/unrealcv,unrealcv/unrealcv,qiuwch/unrealcv,qiuwch/unrealcv,unrealcv/unrealcv,qiuwch/unrealcv
|
Add a script to record camera trajectory.
|
import sys, atexit, argparse, json, time
sys.path.append('..')
import ue4cv
trajectory = []
def message_handler(message):
if message == 'clicked':
rot = [float(v) for v in ue4cv.client.request('vget /camera/0/rotation').split(' ')]
loc = [float(v) for v in ue4cv.client.request('vget /camera/0/location').split(' ')]
trajectory.append(dict(rotation = rot, location = loc))
def save_to_file(filename):
if len(trajectory) != 0:
with open(filename, 'w') as f:
json.dump(trajectory, f, indent = 4)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--filename', default='camera-trajectory.json')
args = parser.parse_args()
atexit.register(save_to_file, args.filename)
ue4cv.client.connect()
ue4cv.client.message_handler = message_handler
if not ue4cv.client.isconnected():
print 'Can not connect to the game, please run the game downloaded from http://unrealcv.github.io first'
else:
time.sleep(60 * 60 * 24)
|
<commit_before><commit_msg>Add a script to record camera trajectory.<commit_after>
|
import sys, atexit, argparse, json, time
sys.path.append('..')
import ue4cv
trajectory = []
def message_handler(message):
if message == 'clicked':
rot = [float(v) for v in ue4cv.client.request('vget /camera/0/rotation').split(' ')]
loc = [float(v) for v in ue4cv.client.request('vget /camera/0/location').split(' ')]
trajectory.append(dict(rotation = rot, location = loc))
def save_to_file(filename):
if len(trajectory) != 0:
with open(filename, 'w') as f:
json.dump(trajectory, f, indent = 4)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--filename', default='camera-trajectory.json')
args = parser.parse_args()
atexit.register(save_to_file, args.filename)
ue4cv.client.connect()
ue4cv.client.message_handler = message_handler
if not ue4cv.client.isconnected():
print 'Can not connect to the game, please run the game downloaded from http://unrealcv.github.io first'
else:
time.sleep(60 * 60 * 24)
|
Add a script to record camera trajectory.import sys, atexit, argparse, json, time
sys.path.append('..')
import ue4cv
trajectory = []
def message_handler(message):
if message == 'clicked':
rot = [float(v) for v in ue4cv.client.request('vget /camera/0/rotation').split(' ')]
loc = [float(v) for v in ue4cv.client.request('vget /camera/0/location').split(' ')]
trajectory.append(dict(rotation = rot, location = loc))
def save_to_file(filename):
if len(trajectory) != 0:
with open(filename, 'w') as f:
json.dump(trajectory, f, indent = 4)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--filename', default='camera-trajectory.json')
args = parser.parse_args()
atexit.register(save_to_file, args.filename)
ue4cv.client.connect()
ue4cv.client.message_handler = message_handler
if not ue4cv.client.isconnected():
print 'Can not connect to the game, please run the game downloaded from http://unrealcv.github.io first'
else:
time.sleep(60 * 60 * 24)
|
<commit_before><commit_msg>Add a script to record camera trajectory.<commit_after>import sys, atexit, argparse, json, time
sys.path.append('..')
import ue4cv
trajectory = []
def message_handler(message):
if message == 'clicked':
rot = [float(v) for v in ue4cv.client.request('vget /camera/0/rotation').split(' ')]
loc = [float(v) for v in ue4cv.client.request('vget /camera/0/location').split(' ')]
trajectory.append(dict(rotation = rot, location = loc))
def save_to_file(filename):
if len(trajectory) != 0:
with open(filename, 'w') as f:
json.dump(trajectory, f, indent = 4)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--filename', default='camera-trajectory.json')
args = parser.parse_args()
atexit.register(save_to_file, args.filename)
ue4cv.client.connect()
ue4cv.client.message_handler = message_handler
if not ue4cv.client.isconnected():
print 'Can not connect to the game, please run the game downloaded from http://unrealcv.github.io first'
else:
time.sleep(60 * 60 * 24)
|
|
d3c373ee886c78fe80494efdc088a099aa949655
|
account/management/commands/create_signups.py
|
account/management/commands/create_signups.py
|
# coding=utf8
from __future__ import print_function
from django.core.management.base import BaseCommand, CommandError
from django.contrib.sites.models import Site
from django.core import urlresolvers
from account.models import SignupCode
class Command(BaseCommand):
args = '<count> <filename> [<expiry_in_days_from_now>]'
help = 'Generates signup codes and outputs url to file'
def handle(self, *args, **options):
site = Site.objects.get_current()
expiry = int(args[2]) if len(args) > 2 else 768
with open(args[1], mode="w") as fh:
for i in range(int(args[0])):
signup = SignupCode.create(max_uses=1, expiry=expiry, check_exists=False)
fh.write("http://{0}{1}?code={2},".format(site.domain,
urlresolvers.reverse('account_signup'),
signup.code))
signup.save()
|
Add create signups management command that will create a batch of signup codes and output to a file
|
Add create signups management command that will create a batch of signup
codes and output to a file
|
Python
|
mit
|
ntucker/django-user-accounts,ntucker/django-user-accounts
|
Add create signups management command that will create a batch of signup
codes and output to a file
|
# coding=utf8
from __future__ import print_function
from django.core.management.base import BaseCommand, CommandError
from django.contrib.sites.models import Site
from django.core import urlresolvers
from account.models import SignupCode
class Command(BaseCommand):
args = '<count> <filename> [<expiry_in_days_from_now>]'
help = 'Generates signup codes and outputs url to file'
def handle(self, *args, **options):
site = Site.objects.get_current()
expiry = int(args[2]) if len(args) > 2 else 768
with open(args[1], mode="w") as fh:
for i in range(int(args[0])):
signup = SignupCode.create(max_uses=1, expiry=expiry, check_exists=False)
fh.write("http://{0}{1}?code={2},".format(site.domain,
urlresolvers.reverse('account_signup'),
signup.code))
signup.save()
|
<commit_before><commit_msg>Add create signups management command that will create a batch of signup
codes and output to a file<commit_after>
|
# coding=utf8
from __future__ import print_function
from django.core.management.base import BaseCommand, CommandError
from django.contrib.sites.models import Site
from django.core import urlresolvers
from account.models import SignupCode
class Command(BaseCommand):
args = '<count> <filename> [<expiry_in_days_from_now>]'
help = 'Generates signup codes and outputs url to file'
def handle(self, *args, **options):
site = Site.objects.get_current()
expiry = int(args[2]) if len(args) > 2 else 768
with open(args[1], mode="w") as fh:
for i in range(int(args[0])):
signup = SignupCode.create(max_uses=1, expiry=expiry, check_exists=False)
fh.write("http://{0}{1}?code={2},".format(site.domain,
urlresolvers.reverse('account_signup'),
signup.code))
signup.save()
|
Add create signups management command that will create a batch of signup
codes and output to a file# coding=utf8
from __future__ import print_function
from django.core.management.base import BaseCommand, CommandError
from django.contrib.sites.models import Site
from django.core import urlresolvers
from account.models import SignupCode
class Command(BaseCommand):
args = '<count> <filename> [<expiry_in_days_from_now>]'
help = 'Generates signup codes and outputs url to file'
def handle(self, *args, **options):
site = Site.objects.get_current()
expiry = int(args[2]) if len(args) > 2 else 768
with open(args[1], mode="w") as fh:
for i in range(int(args[0])):
signup = SignupCode.create(max_uses=1, expiry=expiry, check_exists=False)
fh.write("http://{0}{1}?code={2},".format(site.domain,
urlresolvers.reverse('account_signup'),
signup.code))
signup.save()
|
<commit_before><commit_msg>Add create signups management command that will create a batch of signup
codes and output to a file<commit_after># coding=utf8
from __future__ import print_function
from django.core.management.base import BaseCommand, CommandError
from django.contrib.sites.models import Site
from django.core import urlresolvers
from account.models import SignupCode
class Command(BaseCommand):
args = '<count> <filename> [<expiry_in_days_from_now>]'
help = 'Generates signup codes and outputs url to file'
def handle(self, *args, **options):
site = Site.objects.get_current()
expiry = int(args[2]) if len(args) > 2 else 768
with open(args[1], mode="w") as fh:
for i in range(int(args[0])):
signup = SignupCode.create(max_uses=1, expiry=expiry, check_exists=False)
fh.write("http://{0}{1}?code={2},".format(site.domain,
urlresolvers.reverse('account_signup'),
signup.code))
signup.save()
|
|
a60f858598a5b74ffa0238966249bbde612c2f70
|
HackerRank/ProjectEuler_plus/euler_021.py
|
HackerRank/ProjectEuler_plus/euler_021.py
|
#!/usr/bin/env python3
from math import sqrt
def factors(n):
results = set()
for i in range(1, int(sqrt(n)) + 1):
if n % i == 0:
results.add(i)
results.add(int(n / i))
return sum(results)
amicables = [220, 1184, 2620, 5020, 6232, 10744, 12285, 17296, 63020, 66928, 67095, 69615, 79750]
temp = []
for a in amicables:
temp.append(factors(a) - a)
amicables.extend(temp)
amicables = sorted(amicables)
for T in range(int(input())):
N = int(input())
print(sum([a for a in amicables if a <= N]))
|
Add project euler plus 21
|
Add project euler plus 21
|
Python
|
mit
|
byung-u/ProjectEuler
|
Add project euler plus 21
|
#!/usr/bin/env python3
from math import sqrt
def factors(n):
results = set()
for i in range(1, int(sqrt(n)) + 1):
if n % i == 0:
results.add(i)
results.add(int(n / i))
return sum(results)
amicables = [220, 1184, 2620, 5020, 6232, 10744, 12285, 17296, 63020, 66928, 67095, 69615, 79750]
temp = []
for a in amicables:
temp.append(factors(a) - a)
amicables.extend(temp)
amicables = sorted(amicables)
for T in range(int(input())):
N = int(input())
print(sum([a for a in amicables if a <= N]))
|
<commit_before><commit_msg>Add project euler plus 21<commit_after>
|
#!/usr/bin/env python3
from math import sqrt
def factors(n):
results = set()
for i in range(1, int(sqrt(n)) + 1):
if n % i == 0:
results.add(i)
results.add(int(n / i))
return sum(results)
amicables = [220, 1184, 2620, 5020, 6232, 10744, 12285, 17296, 63020, 66928, 67095, 69615, 79750]
temp = []
for a in amicables:
temp.append(factors(a) - a)
amicables.extend(temp)
amicables = sorted(amicables)
for T in range(int(input())):
N = int(input())
print(sum([a for a in amicables if a <= N]))
|
Add project euler plus 21#!/usr/bin/env python3
from math import sqrt
def factors(n):
results = set()
for i in range(1, int(sqrt(n)) + 1):
if n % i == 0:
results.add(i)
results.add(int(n / i))
return sum(results)
amicables = [220, 1184, 2620, 5020, 6232, 10744, 12285, 17296, 63020, 66928, 67095, 69615, 79750]
temp = []
for a in amicables:
temp.append(factors(a) - a)
amicables.extend(temp)
amicables = sorted(amicables)
for T in range(int(input())):
N = int(input())
print(sum([a for a in amicables if a <= N]))
|
<commit_before><commit_msg>Add project euler plus 21<commit_after>#!/usr/bin/env python3
from math import sqrt
def factors(n):
results = set()
for i in range(1, int(sqrt(n)) + 1):
if n % i == 0:
results.add(i)
results.add(int(n / i))
return sum(results)
amicables = [220, 1184, 2620, 5020, 6232, 10744, 12285, 17296, 63020, 66928, 67095, 69615, 79750]
temp = []
for a in amicables:
temp.append(factors(a) - a)
amicables.extend(temp)
amicables = sorted(amicables)
for T in range(int(input())):
N = int(input())
print(sum([a for a in amicables if a <= N]))
|
|
90a43f24beae71c6c10b47e8a62f40a572f42d9c
|
scripts/rostype.py
|
scripts/rostype.py
|
'''
@returns(type)
def function_to_modify():
...
This will return the value from the function if it is an instance
of the type passed into the returns. Otherwise it will
throw a TypeError
The function modified with the decorator will also have a property
return_type that gives the class that is guarunteed (neglecting Exceptions)
to be returned from the function.
Example:
from nav_msgs.msg import Odometry
@returns(Odometry)
def ogian(g):
return g
>>> ogian.return_type
<class 'nav_msgs.msg._Odometry.Odometry'>
>>> ogian(Odometry())
returns the new Odometry instance
>>> ogian("This is a string")
throws TypeError
'''
def returns(type):
def wrap(f):
def wrapped_f(*args, **kwargs):
v = f(*args, **kwargs)
if isinstance(v, type):
# print 'return matches type'
return v
else:
# print 'return is not of type'
raise TypeError('rostype: returns: Function did not match forced type')
wrapped_f.return_type = type
return wrapped_f
return wrap
|
Create decorator that forces a return type
|
Create decorator that forces a return type
This will either return the value for the given function, or throw
an error if it tries to return a value that is not the proper type
I am using this to enforce a return type on service proxies being
used as callables, which, at least in Python, don't have a
definite return type.
|
Python
|
mit
|
buckbaskin/drive_stack,buckbaskin/drive_stack,buckbaskin/drive_stack
|
Create decorator that forces a return type
This will either return the value for the given function, or throw
an error if it tries to return a value that is not the proper type
I am using this to enforce a return type on service proxies being
used as callables, which, at least in Python, don't have a
definite return type.
|
'''
@returns(type)
def function_to_modify():
...
This will return the value from the function if it is an instance
of the type passed into the returns. Otherwise it will
throw a TypeError
The function modified with the decorator will also have a property
return_type that gives the class that is guarunteed (neglecting Exceptions)
to be returned from the function.
Example:
from nav_msgs.msg import Odometry
@returns(Odometry)
def ogian(g):
return g
>>> ogian.return_type
<class 'nav_msgs.msg._Odometry.Odometry'>
>>> ogian(Odometry())
returns the new Odometry instance
>>> ogian("This is a string")
throws TypeError
'''
def returns(type):
def wrap(f):
def wrapped_f(*args, **kwargs):
v = f(*args, **kwargs)
if isinstance(v, type):
# print 'return matches type'
return v
else:
# print 'return is not of type'
raise TypeError('rostype: returns: Function did not match forced type')
wrapped_f.return_type = type
return wrapped_f
return wrap
|
<commit_before><commit_msg>Create decorator that forces a return type
This will either return the value for the given function, or throw
an error if it tries to return a value that is not the proper type
I am using this to enforce a return type on service proxies being
used as callables, which, at least in Python, don't have a
definite return type.<commit_after>
|
'''
@returns(type)
def function_to_modify():
...
This will return the value from the function if it is an instance
of the type passed into the returns. Otherwise it will
throw a TypeError
The function modified with the decorator will also have a property
return_type that gives the class that is guarunteed (neglecting Exceptions)
to be returned from the function.
Example:
from nav_msgs.msg import Odometry
@returns(Odometry)
def ogian(g):
return g
>>> ogian.return_type
<class 'nav_msgs.msg._Odometry.Odometry'>
>>> ogian(Odometry())
returns the new Odometry instance
>>> ogian("This is a string")
throws TypeError
'''
def returns(type):
def wrap(f):
def wrapped_f(*args, **kwargs):
v = f(*args, **kwargs)
if isinstance(v, type):
# print 'return matches type'
return v
else:
# print 'return is not of type'
raise TypeError('rostype: returns: Function did not match forced type')
wrapped_f.return_type = type
return wrapped_f
return wrap
|
Create decorator that forces a return type
This will either return the value for the given function, or throw
an error if it tries to return a value that is not the proper type
I am using this to enforce a return type on service proxies being
used as callables, which, at least in Python, don't have a
definite return type.'''
@returns(type)
def function_to_modify():
...
This will return the value from the function if it is an instance
of the type passed into the returns. Otherwise it will
throw a TypeError
The function modified with the decorator will also have a property
return_type that gives the class that is guarunteed (neglecting Exceptions)
to be returned from the function.
Example:
from nav_msgs.msg import Odometry
@returns(Odometry)
def ogian(g):
return g
>>> ogian.return_type
<class 'nav_msgs.msg._Odometry.Odometry'>
>>> ogian(Odometry())
returns the new Odometry instance
>>> ogian("This is a string")
throws TypeError
'''
def returns(type):
def wrap(f):
def wrapped_f(*args, **kwargs):
v = f(*args, **kwargs)
if isinstance(v, type):
# print 'return matches type'
return v
else:
# print 'return is not of type'
raise TypeError('rostype: returns: Function did not match forced type')
wrapped_f.return_type = type
return wrapped_f
return wrap
|
<commit_before><commit_msg>Create decorator that forces a return type
This will either return the value for the given function, or throw
an error if it tries to return a value that is not the proper type
I am using this to enforce a return type on service proxies being
used as callables, which, at least in Python, don't have a
definite return type.<commit_after>'''
@returns(type)
def function_to_modify():
...
This will return the value from the function if it is an instance
of the type passed into the returns. Otherwise it will
throw a TypeError
The function modified with the decorator will also have a property
return_type that gives the class that is guarunteed (neglecting Exceptions)
to be returned from the function.
Example:
from nav_msgs.msg import Odometry
@returns(Odometry)
def ogian(g):
return g
>>> ogian.return_type
<class 'nav_msgs.msg._Odometry.Odometry'>
>>> ogian(Odometry())
returns the new Odometry instance
>>> ogian("This is a string")
throws TypeError
'''
def returns(type):
def wrap(f):
def wrapped_f(*args, **kwargs):
v = f(*args, **kwargs)
if isinstance(v, type):
# print 'return matches type'
return v
else:
# print 'return is not of type'
raise TypeError('rostype: returns: Function did not match forced type')
wrapped_f.return_type = type
return wrapped_f
return wrap
|
|
299a4356d49e9b694c1fb6340cda2669e8e30377
|
api/check_examples.py
|
api/check_examples.py
|
#! /usr/bin/env python
import sys
def import_error(module, package, debian, error):
sys.stderr.write((
"Error importing %(module)s: %(error)r\n"
"To install %(module)s run:\n"
" pip install %(package)s\n"
"or on Debian run:\n"
" sudo apt-get install python-%(debian)s\n"
) % locals())
if __name__=='__main__':
sys.exit(1)
try:
import jsonschema
except ImportError as e:
import_error("jsonschema", "jsonschema", "jsonschema", e)
raise
try:
import yaml
except ImportError as e:
import_error("yaml", "PyYAML", "yaml", e)
raise
import json
import os
def check_response(filepath, request, code, response):
try:
example = json.loads(
response.get('examples', {}).get('application/json', "null")
)
except Exception as e:
raise ValueError("Error parsing JSON example response for %r %r" % (
request, code
), e)
schema = response.get('schema')
fileurl = "file://" + os.path.abspath(filepath)
if example and schema:
try:
print ("Checking schema for: %r %r %r" % (filepath, request, code))
# Setting the 'id' tells jsonschema where the file is so that it
# can correctly resolve relative $ref references in the schema
schema['id'] = fileurl
jsonschema.validate(example, schema)
except Exception as e:
raise ValueError("Error validating JSON schema for %r %r" %(
request, code
), e)
def check_swagger_file(filepath):
with open(filepath) as f:
swagger = yaml.load(f)
for path, path_api in swagger['paths'].items():
for method, request_api in path_api.items():
request = "%s %s" % (method.upper(), path)
try:
responses = request_api['responses']
except KeyError:
raise ValueError("No responses for %r" % (request,))
for code, response in responses.items():
check_response(filepath, request, code, response)
if __name__=='__main__':
for path in sys.argv[1:]:
try:
check_swagger_file(path)
except Exception as e:
raise ValueError("Error checking file %r" % (path,), e)
|
Add script to check that the example responses in the swagger matches the examples.
|
Add script to check that the example responses in the swagger matches
the examples.
|
Python
|
apache-2.0
|
matrix-org/matrix-doc,matrix-org/matrix-doc,matrix-org/matrix-doc,matrix-org/matrix-doc
|
Add script to check that the example responses in the swagger matches
the examples.
|
#! /usr/bin/env python
import sys
def import_error(module, package, debian, error):
sys.stderr.write((
"Error importing %(module)s: %(error)r\n"
"To install %(module)s run:\n"
" pip install %(package)s\n"
"or on Debian run:\n"
" sudo apt-get install python-%(debian)s\n"
) % locals())
if __name__=='__main__':
sys.exit(1)
try:
import jsonschema
except ImportError as e:
import_error("jsonschema", "jsonschema", "jsonschema", e)
raise
try:
import yaml
except ImportError as e:
import_error("yaml", "PyYAML", "yaml", e)
raise
import json
import os
def check_response(filepath, request, code, response):
try:
example = json.loads(
response.get('examples', {}).get('application/json', "null")
)
except Exception as e:
raise ValueError("Error parsing JSON example response for %r %r" % (
request, code
), e)
schema = response.get('schema')
fileurl = "file://" + os.path.abspath(filepath)
if example and schema:
try:
print ("Checking schema for: %r %r %r" % (filepath, request, code))
# Setting the 'id' tells jsonschema where the file is so that it
# can correctly resolve relative $ref references in the schema
schema['id'] = fileurl
jsonschema.validate(example, schema)
except Exception as e:
raise ValueError("Error validating JSON schema for %r %r" %(
request, code
), e)
def check_swagger_file(filepath):
with open(filepath) as f:
swagger = yaml.load(f)
for path, path_api in swagger['paths'].items():
for method, request_api in path_api.items():
request = "%s %s" % (method.upper(), path)
try:
responses = request_api['responses']
except KeyError:
raise ValueError("No responses for %r" % (request,))
for code, response in responses.items():
check_response(filepath, request, code, response)
if __name__=='__main__':
for path in sys.argv[1:]:
try:
check_swagger_file(path)
except Exception as e:
raise ValueError("Error checking file %r" % (path,), e)
|
<commit_before><commit_msg>Add script to check that the example responses in the swagger matches
the examples.<commit_after>
|
#! /usr/bin/env python
import sys
def import_error(module, package, debian, error):
sys.stderr.write((
"Error importing %(module)s: %(error)r\n"
"To install %(module)s run:\n"
" pip install %(package)s\n"
"or on Debian run:\n"
" sudo apt-get install python-%(debian)s\n"
) % locals())
if __name__=='__main__':
sys.exit(1)
try:
import jsonschema
except ImportError as e:
import_error("jsonschema", "jsonschema", "jsonschema", e)
raise
try:
import yaml
except ImportError as e:
import_error("yaml", "PyYAML", "yaml", e)
raise
import json
import os
def check_response(filepath, request, code, response):
try:
example = json.loads(
response.get('examples', {}).get('application/json', "null")
)
except Exception as e:
raise ValueError("Error parsing JSON example response for %r %r" % (
request, code
), e)
schema = response.get('schema')
fileurl = "file://" + os.path.abspath(filepath)
if example and schema:
try:
print ("Checking schema for: %r %r %r" % (filepath, request, code))
# Setting the 'id' tells jsonschema where the file is so that it
# can correctly resolve relative $ref references in the schema
schema['id'] = fileurl
jsonschema.validate(example, schema)
except Exception as e:
raise ValueError("Error validating JSON schema for %r %r" %(
request, code
), e)
def check_swagger_file(filepath):
with open(filepath) as f:
swagger = yaml.load(f)
for path, path_api in swagger['paths'].items():
for method, request_api in path_api.items():
request = "%s %s" % (method.upper(), path)
try:
responses = request_api['responses']
except KeyError:
raise ValueError("No responses for %r" % (request,))
for code, response in responses.items():
check_response(filepath, request, code, response)
if __name__=='__main__':
for path in sys.argv[1:]:
try:
check_swagger_file(path)
except Exception as e:
raise ValueError("Error checking file %r" % (path,), e)
|
Add script to check that the example responses in the swagger matches
the examples.#! /usr/bin/env python
import sys
def import_error(module, package, debian, error):
sys.stderr.write((
"Error importing %(module)s: %(error)r\n"
"To install %(module)s run:\n"
" pip install %(package)s\n"
"or on Debian run:\n"
" sudo apt-get install python-%(debian)s\n"
) % locals())
if __name__=='__main__':
sys.exit(1)
try:
import jsonschema
except ImportError as e:
import_error("jsonschema", "jsonschema", "jsonschema", e)
raise
try:
import yaml
except ImportError as e:
import_error("yaml", "PyYAML", "yaml", e)
raise
import json
import os
def check_response(filepath, request, code, response):
try:
example = json.loads(
response.get('examples', {}).get('application/json', "null")
)
except Exception as e:
raise ValueError("Error parsing JSON example response for %r %r" % (
request, code
), e)
schema = response.get('schema')
fileurl = "file://" + os.path.abspath(filepath)
if example and schema:
try:
print ("Checking schema for: %r %r %r" % (filepath, request, code))
# Setting the 'id' tells jsonschema where the file is so that it
# can correctly resolve relative $ref references in the schema
schema['id'] = fileurl
jsonschema.validate(example, schema)
except Exception as e:
raise ValueError("Error validating JSON schema for %r %r" %(
request, code
), e)
def check_swagger_file(filepath):
with open(filepath) as f:
swagger = yaml.load(f)
for path, path_api in swagger['paths'].items():
for method, request_api in path_api.items():
request = "%s %s" % (method.upper(), path)
try:
responses = request_api['responses']
except KeyError:
raise ValueError("No responses for %r" % (request,))
for code, response in responses.items():
check_response(filepath, request, code, response)
if __name__=='__main__':
for path in sys.argv[1:]:
try:
check_swagger_file(path)
except Exception as e:
raise ValueError("Error checking file %r" % (path,), e)
|
<commit_before><commit_msg>Add script to check that the example responses in the swagger matches
the examples.<commit_after>#! /usr/bin/env python
import sys
def import_error(module, package, debian, error):
sys.stderr.write((
"Error importing %(module)s: %(error)r\n"
"To install %(module)s run:\n"
" pip install %(package)s\n"
"or on Debian run:\n"
" sudo apt-get install python-%(debian)s\n"
) % locals())
if __name__=='__main__':
sys.exit(1)
try:
import jsonschema
except ImportError as e:
import_error("jsonschema", "jsonschema", "jsonschema", e)
raise
try:
import yaml
except ImportError as e:
import_error("yaml", "PyYAML", "yaml", e)
raise
import json
import os
def check_response(filepath, request, code, response):
try:
example = json.loads(
response.get('examples', {}).get('application/json', "null")
)
except Exception as e:
raise ValueError("Error parsing JSON example response for %r %r" % (
request, code
), e)
schema = response.get('schema')
fileurl = "file://" + os.path.abspath(filepath)
if example and schema:
try:
print ("Checking schema for: %r %r %r" % (filepath, request, code))
# Setting the 'id' tells jsonschema where the file is so that it
# can correctly resolve relative $ref references in the schema
schema['id'] = fileurl
jsonschema.validate(example, schema)
except Exception as e:
raise ValueError("Error validating JSON schema for %r %r" %(
request, code
), e)
def check_swagger_file(filepath):
with open(filepath) as f:
swagger = yaml.load(f)
for path, path_api in swagger['paths'].items():
for method, request_api in path_api.items():
request = "%s %s" % (method.upper(), path)
try:
responses = request_api['responses']
except KeyError:
raise ValueError("No responses for %r" % (request,))
for code, response in responses.items():
check_response(filepath, request, code, response)
if __name__=='__main__':
for path in sys.argv[1:]:
try:
check_swagger_file(path)
except Exception as e:
raise ValueError("Error checking file %r" % (path,), e)
|
|
d8d6c37293fc2a4963c83c6c34ff92a103c40334
|
tests/test_decompiler.py
|
tests/test_decompiler.py
|
import os
import angr
test_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..', 'binaries', 'tests'))
def test_decompiling_all_x86_64():
bin_path = os.path.join(test_location, "x86_64", "all")
p = angr.Project(bin_path, auto_load_libs=False)
cfg = p.analyses.CFG(collect_data_references=True)
for f in cfg.functions.values():
dec = p.analyses.Decompiler(f, cfg=cfg)
if dec.codegen is not None:
print(dec.codegen.text)
else:
print("Failed to decompile function %s." % repr(f))
if __name__ == "__main__":
test_decompiling_all_x86_64()
|
Add a test for the decompiler analysis.
|
Add a test for the decompiler analysis.
|
Python
|
bsd-2-clause
|
iamahuman/angr,iamahuman/angr,schieb/angr,angr/angr,angr/angr,iamahuman/angr,schieb/angr,angr/angr,schieb/angr
|
Add a test for the decompiler analysis.
|
import os
import angr
test_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..', 'binaries', 'tests'))
def test_decompiling_all_x86_64():
bin_path = os.path.join(test_location, "x86_64", "all")
p = angr.Project(bin_path, auto_load_libs=False)
cfg = p.analyses.CFG(collect_data_references=True)
for f in cfg.functions.values():
dec = p.analyses.Decompiler(f, cfg=cfg)
if dec.codegen is not None:
print(dec.codegen.text)
else:
print("Failed to decompile function %s." % repr(f))
if __name__ == "__main__":
test_decompiling_all_x86_64()
|
<commit_before><commit_msg>Add a test for the decompiler analysis.<commit_after>
|
import os
import angr
test_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..', 'binaries', 'tests'))
def test_decompiling_all_x86_64():
bin_path = os.path.join(test_location, "x86_64", "all")
p = angr.Project(bin_path, auto_load_libs=False)
cfg = p.analyses.CFG(collect_data_references=True)
for f in cfg.functions.values():
dec = p.analyses.Decompiler(f, cfg=cfg)
if dec.codegen is not None:
print(dec.codegen.text)
else:
print("Failed to decompile function %s." % repr(f))
if __name__ == "__main__":
test_decompiling_all_x86_64()
|
Add a test for the decompiler analysis.
import os
import angr
test_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..', 'binaries', 'tests'))
def test_decompiling_all_x86_64():
bin_path = os.path.join(test_location, "x86_64", "all")
p = angr.Project(bin_path, auto_load_libs=False)
cfg = p.analyses.CFG(collect_data_references=True)
for f in cfg.functions.values():
dec = p.analyses.Decompiler(f, cfg=cfg)
if dec.codegen is not None:
print(dec.codegen.text)
else:
print("Failed to decompile function %s." % repr(f))
if __name__ == "__main__":
test_decompiling_all_x86_64()
|
<commit_before><commit_msg>Add a test for the decompiler analysis.<commit_after>
import os
import angr
test_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..', 'binaries', 'tests'))
def test_decompiling_all_x86_64():
bin_path = os.path.join(test_location, "x86_64", "all")
p = angr.Project(bin_path, auto_load_libs=False)
cfg = p.analyses.CFG(collect_data_references=True)
for f in cfg.functions.values():
dec = p.analyses.Decompiler(f, cfg=cfg)
if dec.codegen is not None:
print(dec.codegen.text)
else:
print("Failed to decompile function %s." % repr(f))
if __name__ == "__main__":
test_decompiling_all_x86_64()
|
|
c5b335500ab326b225626927ebadf5c584f58ac4
|
tests/unit/test_files.py
|
tests/unit/test_files.py
|
from isort import files
from isort.settings import DEFAULT_CONFIG
def test_find(tmpdir):
tmp_file = tmpdir.join("file.py")
tmp_file.write("import os, sys\n")
assert tuple(files.find((tmp_file,), DEFAULT_CONFIG, [], [])) == (tmp_file,)
|
Add test for new files module
|
Add test for new files module
|
Python
|
mit
|
PyCQA/isort,PyCQA/isort
|
Add test for new files module
|
from isort import files
from isort.settings import DEFAULT_CONFIG
def test_find(tmpdir):
tmp_file = tmpdir.join("file.py")
tmp_file.write("import os, sys\n")
assert tuple(files.find((tmp_file,), DEFAULT_CONFIG, [], [])) == (tmp_file,)
|
<commit_before><commit_msg>Add test for new files module<commit_after>
|
from isort import files
from isort.settings import DEFAULT_CONFIG
def test_find(tmpdir):
tmp_file = tmpdir.join("file.py")
tmp_file.write("import os, sys\n")
assert tuple(files.find((tmp_file,), DEFAULT_CONFIG, [], [])) == (tmp_file,)
|
Add test for new files modulefrom isort import files
from isort.settings import DEFAULT_CONFIG
def test_find(tmpdir):
tmp_file = tmpdir.join("file.py")
tmp_file.write("import os, sys\n")
assert tuple(files.find((tmp_file,), DEFAULT_CONFIG, [], [])) == (tmp_file,)
|
<commit_before><commit_msg>Add test for new files module<commit_after>from isort import files
from isort.settings import DEFAULT_CONFIG
def test_find(tmpdir):
tmp_file = tmpdir.join("file.py")
tmp_file.write("import os, sys\n")
assert tuple(files.find((tmp_file,), DEFAULT_CONFIG, [], [])) == (tmp_file,)
|
|
3dad0c186250f1ceb01be0a9dcae0831ef44d874
|
test/test_password.py
|
test/test_password.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015-2017:
# Matthieu Estrada, ttamalfor@gmail.com
#
# This file is part of (AlignakApp).
#
# (AlignakApp) is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# (AlignakApp) is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with (AlignakApp). If not, see <http://www.gnu.org/licenses/>.
import sys
import unittest2
from alignak_app.core.utils import init_config
from alignak_app.core.backend import AppBackend
from alignak_app.user.password import PasswordDialog
from alignak_app.core.locales import init_localization
from PyQt5.QtWidgets import QApplication, QWidget
class TestUserManager(unittest2.TestCase):
"""
This file test the PasswordDialog class.
"""
init_config()
init_localization()
@classmethod
def setUpClass(cls):
"""Create QApplication"""
try:
cls.app = QApplication(sys.argv)
cls.app_backend = AppBackend()
cls.app_backend.login()
except:
pass
def test_initialize(self):
"""Initialize PasswordDialog"""
under_test = PasswordDialog(self.app_backend)
self.assertTrue(under_test.app_backend)
self.assertIsNone(under_test.pass_edit)
self.assertIsNone(under_test.confirm_edit)
self.assertIsNone(under_test.help_label)
under_test.initialize()
self.assertTrue(under_test.app_backend)
self.assertIsNotNone(under_test.pass_edit)
self.assertIsNotNone(under_test.confirm_edit)
self.assertIsNotNone(under_test.help_label)
def test_center(self):
"""Center PasswordDialog"""
under_test = PasswordDialog(self.app_backend)
old_pos_test = under_test.pos()
self.assertFalse(old_pos_test)
under_test.center(under_test)
new_pos_test = under_test.pos()
self.assertTrue(new_pos_test)
|
Add Unit Tests for PasswordDialog
|
Add Unit Tests for PasswordDialog
|
Python
|
agpl-3.0
|
Alignak-monitoring-contrib/alignak-app,Alignak-monitoring-contrib/alignak-app
|
Add Unit Tests for PasswordDialog
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015-2017:
# Matthieu Estrada, ttamalfor@gmail.com
#
# This file is part of (AlignakApp).
#
# (AlignakApp) is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# (AlignakApp) is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with (AlignakApp). If not, see <http://www.gnu.org/licenses/>.
import sys
import unittest2
from alignak_app.core.utils import init_config
from alignak_app.core.backend import AppBackend
from alignak_app.user.password import PasswordDialog
from alignak_app.core.locales import init_localization
from PyQt5.QtWidgets import QApplication, QWidget
class TestUserManager(unittest2.TestCase):
"""
This file test the PasswordDialog class.
"""
init_config()
init_localization()
@classmethod
def setUpClass(cls):
"""Create QApplication"""
try:
cls.app = QApplication(sys.argv)
cls.app_backend = AppBackend()
cls.app_backend.login()
except:
pass
def test_initialize(self):
"""Initialize PasswordDialog"""
under_test = PasswordDialog(self.app_backend)
self.assertTrue(under_test.app_backend)
self.assertIsNone(under_test.pass_edit)
self.assertIsNone(under_test.confirm_edit)
self.assertIsNone(under_test.help_label)
under_test.initialize()
self.assertTrue(under_test.app_backend)
self.assertIsNotNone(under_test.pass_edit)
self.assertIsNotNone(under_test.confirm_edit)
self.assertIsNotNone(under_test.help_label)
def test_center(self):
"""Center PasswordDialog"""
under_test = PasswordDialog(self.app_backend)
old_pos_test = under_test.pos()
self.assertFalse(old_pos_test)
under_test.center(under_test)
new_pos_test = under_test.pos()
self.assertTrue(new_pos_test)
|
<commit_before><commit_msg>Add Unit Tests for PasswordDialog<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015-2017:
# Matthieu Estrada, ttamalfor@gmail.com
#
# This file is part of (AlignakApp).
#
# (AlignakApp) is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# (AlignakApp) is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with (AlignakApp). If not, see <http://www.gnu.org/licenses/>.
import sys
import unittest2
from alignak_app.core.utils import init_config
from alignak_app.core.backend import AppBackend
from alignak_app.user.password import PasswordDialog
from alignak_app.core.locales import init_localization
from PyQt5.QtWidgets import QApplication, QWidget
class TestUserManager(unittest2.TestCase):
"""
This file test the PasswordDialog class.
"""
init_config()
init_localization()
@classmethod
def setUpClass(cls):
"""Create QApplication"""
try:
cls.app = QApplication(sys.argv)
cls.app_backend = AppBackend()
cls.app_backend.login()
except:
pass
def test_initialize(self):
"""Initialize PasswordDialog"""
under_test = PasswordDialog(self.app_backend)
self.assertTrue(under_test.app_backend)
self.assertIsNone(under_test.pass_edit)
self.assertIsNone(under_test.confirm_edit)
self.assertIsNone(under_test.help_label)
under_test.initialize()
self.assertTrue(under_test.app_backend)
self.assertIsNotNone(under_test.pass_edit)
self.assertIsNotNone(under_test.confirm_edit)
self.assertIsNotNone(under_test.help_label)
def test_center(self):
"""Center PasswordDialog"""
under_test = PasswordDialog(self.app_backend)
old_pos_test = under_test.pos()
self.assertFalse(old_pos_test)
under_test.center(under_test)
new_pos_test = under_test.pos()
self.assertTrue(new_pos_test)
|
Add Unit Tests for PasswordDialog#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015-2017:
# Matthieu Estrada, ttamalfor@gmail.com
#
# This file is part of (AlignakApp).
#
# (AlignakApp) is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# (AlignakApp) is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with (AlignakApp). If not, see <http://www.gnu.org/licenses/>.
import sys
import unittest2
from alignak_app.core.utils import init_config
from alignak_app.core.backend import AppBackend
from alignak_app.user.password import PasswordDialog
from alignak_app.core.locales import init_localization
from PyQt5.QtWidgets import QApplication, QWidget
class TestUserManager(unittest2.TestCase):
"""
This file test the PasswordDialog class.
"""
init_config()
init_localization()
@classmethod
def setUpClass(cls):
"""Create QApplication"""
try:
cls.app = QApplication(sys.argv)
cls.app_backend = AppBackend()
cls.app_backend.login()
except:
pass
def test_initialize(self):
"""Initialize PasswordDialog"""
under_test = PasswordDialog(self.app_backend)
self.assertTrue(under_test.app_backend)
self.assertIsNone(under_test.pass_edit)
self.assertIsNone(under_test.confirm_edit)
self.assertIsNone(under_test.help_label)
under_test.initialize()
self.assertTrue(under_test.app_backend)
self.assertIsNotNone(under_test.pass_edit)
self.assertIsNotNone(under_test.confirm_edit)
self.assertIsNotNone(under_test.help_label)
def test_center(self):
"""Center PasswordDialog"""
under_test = PasswordDialog(self.app_backend)
old_pos_test = under_test.pos()
self.assertFalse(old_pos_test)
under_test.center(under_test)
new_pos_test = under_test.pos()
self.assertTrue(new_pos_test)
|
<commit_before><commit_msg>Add Unit Tests for PasswordDialog<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015-2017:
# Matthieu Estrada, ttamalfor@gmail.com
#
# This file is part of (AlignakApp).
#
# (AlignakApp) is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# (AlignakApp) is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with (AlignakApp). If not, see <http://www.gnu.org/licenses/>.
import sys
import unittest2
from alignak_app.core.utils import init_config
from alignak_app.core.backend import AppBackend
from alignak_app.user.password import PasswordDialog
from alignak_app.core.locales import init_localization
from PyQt5.QtWidgets import QApplication, QWidget
class TestUserManager(unittest2.TestCase):
"""
This file test the PasswordDialog class.
"""
init_config()
init_localization()
@classmethod
def setUpClass(cls):
"""Create QApplication"""
try:
cls.app = QApplication(sys.argv)
cls.app_backend = AppBackend()
cls.app_backend.login()
except:
pass
def test_initialize(self):
"""Initialize PasswordDialog"""
under_test = PasswordDialog(self.app_backend)
self.assertTrue(under_test.app_backend)
self.assertIsNone(under_test.pass_edit)
self.assertIsNone(under_test.confirm_edit)
self.assertIsNone(under_test.help_label)
under_test.initialize()
self.assertTrue(under_test.app_backend)
self.assertIsNotNone(under_test.pass_edit)
self.assertIsNotNone(under_test.confirm_edit)
self.assertIsNotNone(under_test.help_label)
def test_center(self):
"""Center PasswordDialog"""
under_test = PasswordDialog(self.app_backend)
old_pos_test = under_test.pos()
self.assertFalse(old_pos_test)
under_test.center(under_test)
new_pos_test = under_test.pos()
self.assertTrue(new_pos_test)
|
|
fe5a0508f97326e0be4fddca5db65a655607c2f9
|
examples/rate_limit_handling.py
|
examples/rate_limit_handling.py
|
import tweepy
consumer_key = ""
consumer_secret = ""
access_token = ""
access_token_secret = ""
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
# Setting wait_on_rate_limit to True when initializing API will initialize an
# instance, called api here, that will automatically wait, using time.sleep,
# for the appropriate amount of time when a rate limit is encountered
api = tweepy.API(auth, wait_on_rate_limit=True)
# This will search for Tweets with the query "Twitter", returning up to the
# maximum of 100 Tweets per request to the Twitter API
# Once the rate limit is reached, it will automatically wait / sleep before
# continuing
for tweet in tweepy.Cursor(api.search_tweets, "Twitter", count=100).items():
print(tweet.id)
|
Add rate limit handling example
|
Add rate limit handling example
|
Python
|
mit
|
svven/tweepy,tweepy/tweepy
|
Add rate limit handling example
|
import tweepy
consumer_key = ""
consumer_secret = ""
access_token = ""
access_token_secret = ""
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
# Setting wait_on_rate_limit to True when initializing API will initialize an
# instance, called api here, that will automatically wait, using time.sleep,
# for the appropriate amount of time when a rate limit is encountered
api = tweepy.API(auth, wait_on_rate_limit=True)
# This will search for Tweets with the query "Twitter", returning up to the
# maximum of 100 Tweets per request to the Twitter API
# Once the rate limit is reached, it will automatically wait / sleep before
# continuing
for tweet in tweepy.Cursor(api.search_tweets, "Twitter", count=100).items():
print(tweet.id)
|
<commit_before><commit_msg>Add rate limit handling example<commit_after>
|
import tweepy
consumer_key = ""
consumer_secret = ""
access_token = ""
access_token_secret = ""
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
# Setting wait_on_rate_limit to True when initializing API will initialize an
# instance, called api here, that will automatically wait, using time.sleep,
# for the appropriate amount of time when a rate limit is encountered
api = tweepy.API(auth, wait_on_rate_limit=True)
# This will search for Tweets with the query "Twitter", returning up to the
# maximum of 100 Tweets per request to the Twitter API
# Once the rate limit is reached, it will automatically wait / sleep before
# continuing
for tweet in tweepy.Cursor(api.search_tweets, "Twitter", count=100).items():
print(tweet.id)
|
Add rate limit handling exampleimport tweepy
consumer_key = ""
consumer_secret = ""
access_token = ""
access_token_secret = ""
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
# Setting wait_on_rate_limit to True when initializing API will initialize an
# instance, called api here, that will automatically wait, using time.sleep,
# for the appropriate amount of time when a rate limit is encountered
api = tweepy.API(auth, wait_on_rate_limit=True)
# This will search for Tweets with the query "Twitter", returning up to the
# maximum of 100 Tweets per request to the Twitter API
# Once the rate limit is reached, it will automatically wait / sleep before
# continuing
for tweet in tweepy.Cursor(api.search_tweets, "Twitter", count=100).items():
print(tweet.id)
|
<commit_before><commit_msg>Add rate limit handling example<commit_after>import tweepy
consumer_key = ""
consumer_secret = ""
access_token = ""
access_token_secret = ""
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
# Setting wait_on_rate_limit to True when initializing API will initialize an
# instance, called api here, that will automatically wait, using time.sleep,
# for the appropriate amount of time when a rate limit is encountered
api = tweepy.API(auth, wait_on_rate_limit=True)
# This will search for Tweets with the query "Twitter", returning up to the
# maximum of 100 Tweets per request to the Twitter API
# Once the rate limit is reached, it will automatically wait / sleep before
# continuing
for tweet in tweepy.Cursor(api.search_tweets, "Twitter", count=100).items():
print(tweet.id)
|
|
afadff60dfe8479196b77a86e058e412f42ef5ba
|
kubernetes/examples/kubeclient_simple.py
|
kubernetes/examples/kubeclient_simple.py
|
#!/usr/bin/env python
from kubernetes import client, config, watch
config.load_kube_config()
v1 = client.CoreV1Api()
# watch on pods in all namespaces.
w = watch.Watch()
for event in w.stream(v1.list_pod_for_all_namespaces):
print("Event: %s %s %s" % (event['type'],
event['object'].kind, event['object'].metadata.name))
|
Add a simple example of a python kubernetes client
|
Add a simple example of a python kubernetes client
|
Python
|
apache-2.0
|
bdastur/notes,bdastur/notes,bdastur/notes,bdastur/notes,bdastur/notes,bdastur/notes
|
Add a simple example of a python kubernetes client
|
#!/usr/bin/env python
from kubernetes import client, config, watch
config.load_kube_config()
v1 = client.CoreV1Api()
# watch on pods in all namespaces.
w = watch.Watch()
for event in w.stream(v1.list_pod_for_all_namespaces):
print("Event: %s %s %s" % (event['type'],
event['object'].kind, event['object'].metadata.name))
|
<commit_before><commit_msg>Add a simple example of a python kubernetes client<commit_after>
|
#!/usr/bin/env python
from kubernetes import client, config, watch
config.load_kube_config()
v1 = client.CoreV1Api()
# watch on pods in all namespaces.
w = watch.Watch()
for event in w.stream(v1.list_pod_for_all_namespaces):
print("Event: %s %s %s" % (event['type'],
event['object'].kind, event['object'].metadata.name))
|
Add a simple example of a python kubernetes client#!/usr/bin/env python
from kubernetes import client, config, watch
config.load_kube_config()
v1 = client.CoreV1Api()
# watch on pods in all namespaces.
w = watch.Watch()
for event in w.stream(v1.list_pod_for_all_namespaces):
print("Event: %s %s %s" % (event['type'],
event['object'].kind, event['object'].metadata.name))
|
<commit_before><commit_msg>Add a simple example of a python kubernetes client<commit_after>#!/usr/bin/env python
from kubernetes import client, config, watch
config.load_kube_config()
v1 = client.CoreV1Api()
# watch on pods in all namespaces.
w = watch.Watch()
for event in w.stream(v1.list_pod_for_all_namespaces):
print("Event: %s %s %s" % (event['type'],
event['object'].kind, event['object'].metadata.name))
|
|
db02ae80c04023cb4d984f5df6932aaca7948588
|
examples/image_fromarray.py
|
examples/image_fromarray.py
|
"""Create a nifti image from a numpy array and an affine transform."""
from os import path
import numpy as np
from neuroimaging.core.api import fromarray, save_image, load_image, \
Affine, CoordinateMap
# Imports used just for development and testing. User's typically
# would not uses these when creating an image.
from tempfile import NamedTemporaryFile
from neuroimaging.testing import assert_equal
# Load an image to get the array and affine
fn = path.join(path.expanduser('~'), '.nipy', 'tests', 'data',
'avg152T1.nii.gz')
if not path.exists(fn):
raise IOError('file does not exists: %s\n' % fn)
# Use one of our test files to get an array and affine from.
img = load_image(fn)
arr = np.asarray(img)
affine = img.affine.copy()
# We use a temporary file for this example so as to not create junk
# files in the nipy directory.
tmpfile = NamedTemporaryFile(suffix='.nii.gz')
#
# START HERE
#
# 1) Create a CoordinateMap from the affine transform which specifies
# the mapping from input to output coordinates.
# Specify the axis order of the affine
axes_names = ['x', 'y', 'z']
# Build a CoordinateMap to create the image with
coordmap = CoordinateMap.from_affine(Affine(affine), names=axes_names,
shape=arr.shape)
# 2) Create a nipy image from the array and CoordinateMap
# Create new image
newimg = fromarray(arr, names=axes_names, coordmap=coordmap)
# 3) Save the nipy image to the specified filename
save_image(newimg, tmpfile.name)
#
# END HERE
#
# Reload and verify the affine was saved correctly.
tmpimg = load_image(tmpfile.name)
assert_equal(tmpimg.affine, affine)
assert_equal(np.mean(tmpimg), np.mean(img))
assert_equal(np.std(tmpimg), np.std(img))
assert_equal(np.asarray(tmpimg), np.asarray(img))
|
Add example for creating an image from an array and an affine.
|
Add example for creating an image from an array and an affine.
|
Python
|
bsd-3-clause
|
arokem/nipy,alexis-roche/nireg,arokem/nipy,alexis-roche/register,bthirion/nipy,alexis-roche/nireg,alexis-roche/nipy,alexis-roche/niseg,nipy/nireg,nipy/nipy-labs,bthirion/nipy,nipy/nipy-labs,alexis-roche/nipy,alexis-roche/register,bthirion/nipy,arokem/nipy,arokem/nipy,alexis-roche/register,alexis-roche/niseg,alexis-roche/nipy,alexis-roche/nipy,nipy/nireg,bthirion/nipy
|
Add example for creating an image from an array and an affine.
|
"""Create a nifti image from a numpy array and an affine transform."""
from os import path
import numpy as np
from neuroimaging.core.api import fromarray, save_image, load_image, \
Affine, CoordinateMap
# Imports used just for development and testing. User's typically
# would not uses these when creating an image.
from tempfile import NamedTemporaryFile
from neuroimaging.testing import assert_equal
# Load an image to get the array and affine
fn = path.join(path.expanduser('~'), '.nipy', 'tests', 'data',
'avg152T1.nii.gz')
if not path.exists(fn):
raise IOError('file does not exists: %s\n' % fn)
# Use one of our test files to get an array and affine from.
img = load_image(fn)
arr = np.asarray(img)
affine = img.affine.copy()
# We use a temporary file for this example so as to not create junk
# files in the nipy directory.
tmpfile = NamedTemporaryFile(suffix='.nii.gz')
#
# START HERE
#
# 1) Create a CoordinateMap from the affine transform which specifies
# the mapping from input to output coordinates.
# Specify the axis order of the affine
axes_names = ['x', 'y', 'z']
# Build a CoordinateMap to create the image with
coordmap = CoordinateMap.from_affine(Affine(affine), names=axes_names,
shape=arr.shape)
# 2) Create a nipy image from the array and CoordinateMap
# Create new image
newimg = fromarray(arr, names=axes_names, coordmap=coordmap)
# 3) Save the nipy image to the specified filename
save_image(newimg, tmpfile.name)
#
# END HERE
#
# Reload and verify the affine was saved correctly.
tmpimg = load_image(tmpfile.name)
assert_equal(tmpimg.affine, affine)
assert_equal(np.mean(tmpimg), np.mean(img))
assert_equal(np.std(tmpimg), np.std(img))
assert_equal(np.asarray(tmpimg), np.asarray(img))
|
<commit_before><commit_msg>Add example for creating an image from an array and an affine.<commit_after>
|
"""Create a nifti image from a numpy array and an affine transform."""
from os import path
import numpy as np
from neuroimaging.core.api import fromarray, save_image, load_image, \
Affine, CoordinateMap
# Imports used just for development and testing. User's typically
# would not uses these when creating an image.
from tempfile import NamedTemporaryFile
from neuroimaging.testing import assert_equal
# Load an image to get the array and affine
fn = path.join(path.expanduser('~'), '.nipy', 'tests', 'data',
'avg152T1.nii.gz')
if not path.exists(fn):
raise IOError('file does not exists: %s\n' % fn)
# Use one of our test files to get an array and affine from.
img = load_image(fn)
arr = np.asarray(img)
affine = img.affine.copy()
# We use a temporary file for this example so as to not create junk
# files in the nipy directory.
tmpfile = NamedTemporaryFile(suffix='.nii.gz')
#
# START HERE
#
# 1) Create a CoordinateMap from the affine transform which specifies
# the mapping from input to output coordinates.
# Specify the axis order of the affine
axes_names = ['x', 'y', 'z']
# Build a CoordinateMap to create the image with
coordmap = CoordinateMap.from_affine(Affine(affine), names=axes_names,
shape=arr.shape)
# 2) Create a nipy image from the array and CoordinateMap
# Create new image
newimg = fromarray(arr, names=axes_names, coordmap=coordmap)
# 3) Save the nipy image to the specified filename
save_image(newimg, tmpfile.name)
#
# END HERE
#
# Reload and verify the affine was saved correctly.
tmpimg = load_image(tmpfile.name)
assert_equal(tmpimg.affine, affine)
assert_equal(np.mean(tmpimg), np.mean(img))
assert_equal(np.std(tmpimg), np.std(img))
assert_equal(np.asarray(tmpimg), np.asarray(img))
|
Add example for creating an image from an array and an affine."""Create a nifti image from a numpy array and an affine transform."""
from os import path
import numpy as np
from neuroimaging.core.api import fromarray, save_image, load_image, \
Affine, CoordinateMap
# Imports used just for development and testing. User's typically
# would not uses these when creating an image.
from tempfile import NamedTemporaryFile
from neuroimaging.testing import assert_equal
# Load an image to get the array and affine
fn = path.join(path.expanduser('~'), '.nipy', 'tests', 'data',
'avg152T1.nii.gz')
if not path.exists(fn):
raise IOError('file does not exists: %s\n' % fn)
# Use one of our test files to get an array and affine from.
img = load_image(fn)
arr = np.asarray(img)
affine = img.affine.copy()
# We use a temporary file for this example so as to not create junk
# files in the nipy directory.
tmpfile = NamedTemporaryFile(suffix='.nii.gz')
#
# START HERE
#
# 1) Create a CoordinateMap from the affine transform which specifies
# the mapping from input to output coordinates.
# Specify the axis order of the affine
axes_names = ['x', 'y', 'z']
# Build a CoordinateMap to create the image with
coordmap = CoordinateMap.from_affine(Affine(affine), names=axes_names,
shape=arr.shape)
# 2) Create a nipy image from the array and CoordinateMap
# Create new image
newimg = fromarray(arr, names=axes_names, coordmap=coordmap)
# 3) Save the nipy image to the specified filename
save_image(newimg, tmpfile.name)
#
# END HERE
#
# Reload and verify the affine was saved correctly.
tmpimg = load_image(tmpfile.name)
assert_equal(tmpimg.affine, affine)
assert_equal(np.mean(tmpimg), np.mean(img))
assert_equal(np.std(tmpimg), np.std(img))
assert_equal(np.asarray(tmpimg), np.asarray(img))
|
<commit_before><commit_msg>Add example for creating an image from an array and an affine.<commit_after>"""Create a nifti image from a numpy array and an affine transform."""
from os import path
import numpy as np
from neuroimaging.core.api import fromarray, save_image, load_image, \
Affine, CoordinateMap
# Imports used just for development and testing. User's typically
# would not uses these when creating an image.
from tempfile import NamedTemporaryFile
from neuroimaging.testing import assert_equal
# Load an image to get the array and affine
fn = path.join(path.expanduser('~'), '.nipy', 'tests', 'data',
'avg152T1.nii.gz')
if not path.exists(fn):
raise IOError('file does not exists: %s\n' % fn)
# Use one of our test files to get an array and affine from.
img = load_image(fn)
arr = np.asarray(img)
affine = img.affine.copy()
# We use a temporary file for this example so as to not create junk
# files in the nipy directory.
tmpfile = NamedTemporaryFile(suffix='.nii.gz')
#
# START HERE
#
# 1) Create a CoordinateMap from the affine transform which specifies
# the mapping from input to output coordinates.
# Specify the axis order of the affine
axes_names = ['x', 'y', 'z']
# Build a CoordinateMap to create the image with
coordmap = CoordinateMap.from_affine(Affine(affine), names=axes_names,
shape=arr.shape)
# 2) Create a nipy image from the array and CoordinateMap
# Create new image
newimg = fromarray(arr, names=axes_names, coordmap=coordmap)
# 3) Save the nipy image to the specified filename
save_image(newimg, tmpfile.name)
#
# END HERE
#
# Reload and verify the affine was saved correctly.
tmpimg = load_image(tmpfile.name)
assert_equal(tmpimg.affine, affine)
assert_equal(np.mean(tmpimg), np.mean(img))
assert_equal(np.std(tmpimg), np.std(img))
assert_equal(np.asarray(tmpimg), np.asarray(img))
|
|
49af609f3fe3440cfff8ec00b42934530bbea3f9
|
bankinate.py
|
bankinate.py
|
import requests
from bs4 import BeautifulSoup
username = raw_input('Enter your username: ')
print username
payload = {'UserName': username, 'input': 'Go'}
r = requests.post("https://online.bbt.com/auth/pwd.tb", data = payload)
print (r.text)
|
Send request for bbt username
|
Send request for bbt username
|
Python
|
mit
|
GTmmiller/bankinator
|
Send request for bbt username
|
import requests
from bs4 import BeautifulSoup
username = raw_input('Enter your username: ')
print username
payload = {'UserName': username, 'input': 'Go'}
r = requests.post("https://online.bbt.com/auth/pwd.tb", data = payload)
print (r.text)
|
<commit_before><commit_msg>Send request for bbt username<commit_after>
|
import requests
from bs4 import BeautifulSoup
username = raw_input('Enter your username: ')
print username
payload = {'UserName': username, 'input': 'Go'}
r = requests.post("https://online.bbt.com/auth/pwd.tb", data = payload)
print (r.text)
|
Send request for bbt usernameimport requests
from bs4 import BeautifulSoup
username = raw_input('Enter your username: ')
print username
payload = {'UserName': username, 'input': 'Go'}
r = requests.post("https://online.bbt.com/auth/pwd.tb", data = payload)
print (r.text)
|
<commit_before><commit_msg>Send request for bbt username<commit_after>import requests
from bs4 import BeautifulSoup
username = raw_input('Enter your username: ')
print username
payload = {'UserName': username, 'input': 'Go'}
r = requests.post("https://online.bbt.com/auth/pwd.tb", data = payload)
print (r.text)
|
|
99ca3372ceaaa28e9e925b86a9b8de3a3735b1df
|
notifications/district_points_updated.py
|
notifications/district_points_updated.py
|
from consts.district_type import DistrictType
from consts.notification_type import NotificationType
from notifications.base_notification import BaseNotification
class DistrictPointsUpdatedNotification(BaseNotification):
# disrict_key is like <year><enum>
# Example: 2014ne
def __init__(self, district_key):
self.district_key = district_key
self.district_enum = DistrictType.abbrevs[district_key[4:]]
def _build_dict(self):
data = {}
data['message_type'] = NotificationType.type_names[NotificationType.DISTRICT_POINTS_UPDATED]
data['message_data'] = {}
data['message_data']['district_key'] = self.district_key
data['message_data']['district_name'] = DistrictType.names[self.district_enum]
return data
|
Add notification for district points being updated
|
Add notification for district points being updated
|
Python
|
mit
|
the-blue-alliance/the-blue-alliance,bvisness/the-blue-alliance,the-blue-alliance/the-blue-alliance,bvisness/the-blue-alliance,the-blue-alliance/the-blue-alliance,bdaroz/the-blue-alliance,fangeugene/the-blue-alliance,1fish2/the-blue-alliance,jaredhasenklein/the-blue-alliance,verycumbersome/the-blue-alliance,tsteward/the-blue-alliance,verycumbersome/the-blue-alliance,tsteward/the-blue-alliance,synth3tk/the-blue-alliance,bdaroz/the-blue-alliance,1fish2/the-blue-alliance,josephbisch/the-blue-alliance,the-blue-alliance/the-blue-alliance,nwalters512/the-blue-alliance,bvisness/the-blue-alliance,jaredhasenklein/the-blue-alliance,josephbisch/the-blue-alliance,synth3tk/the-blue-alliance,phil-lopreiato/the-blue-alliance,tsteward/the-blue-alliance,nwalters512/the-blue-alliance,1fish2/the-blue-alliance,1fish2/the-blue-alliance,fangeugene/the-blue-alliance,bdaroz/the-blue-alliance,bvisness/the-blue-alliance,bdaroz/the-blue-alliance,jaredhasenklein/the-blue-alliance,synth3tk/the-blue-alliance,phil-lopreiato/the-blue-alliance,fangeugene/the-blue-alliance,josephbisch/the-blue-alliance,1fish2/the-blue-alliance,josephbisch/the-blue-alliance,nwalters512/the-blue-alliance,verycumbersome/the-blue-alliance,bvisness/the-blue-alliance,fangeugene/the-blue-alliance,josephbisch/the-blue-alliance,josephbisch/the-blue-alliance,synth3tk/the-blue-alliance,phil-lopreiato/the-blue-alliance,bdaroz/the-blue-alliance,jaredhasenklein/the-blue-alliance,phil-lopreiato/the-blue-alliance,tsteward/the-blue-alliance,the-blue-alliance/the-blue-alliance,verycumbersome/the-blue-alliance,1fish2/the-blue-alliance,the-blue-alliance/the-blue-alliance,tsteward/the-blue-alliance,fangeugene/the-blue-alliance,tsteward/the-blue-alliance,jaredhasenklein/the-blue-alliance,nwalters512/the-blue-alliance,fangeugene/the-blue-alliance,synth3tk/the-blue-alliance,bvisness/the-blue-alliance,phil-lopreiato/the-blue-alliance,jaredhasenklein/the-blue-alliance,synth3tk/the-blue-alliance,bdaroz/the-blue-alliance,verycumbersome/the-blue-alliance,nwalters512/the-blue-alliance,nwalters512/the-blue-alliance,verycumbersome/the-blue-alliance,phil-lopreiato/the-blue-alliance
|
Add notification for district points being updated
|
from consts.district_type import DistrictType
from consts.notification_type import NotificationType
from notifications.base_notification import BaseNotification
class DistrictPointsUpdatedNotification(BaseNotification):
# disrict_key is like <year><enum>
# Example: 2014ne
def __init__(self, district_key):
self.district_key = district_key
self.district_enum = DistrictType.abbrevs[district_key[4:]]
def _build_dict(self):
data = {}
data['message_type'] = NotificationType.type_names[NotificationType.DISTRICT_POINTS_UPDATED]
data['message_data'] = {}
data['message_data']['district_key'] = self.district_key
data['message_data']['district_name'] = DistrictType.names[self.district_enum]
return data
|
<commit_before><commit_msg>Add notification for district points being updated<commit_after>
|
from consts.district_type import DistrictType
from consts.notification_type import NotificationType
from notifications.base_notification import BaseNotification
class DistrictPointsUpdatedNotification(BaseNotification):
# disrict_key is like <year><enum>
# Example: 2014ne
def __init__(self, district_key):
self.district_key = district_key
self.district_enum = DistrictType.abbrevs[district_key[4:]]
def _build_dict(self):
data = {}
data['message_type'] = NotificationType.type_names[NotificationType.DISTRICT_POINTS_UPDATED]
data['message_data'] = {}
data['message_data']['district_key'] = self.district_key
data['message_data']['district_name'] = DistrictType.names[self.district_enum]
return data
|
Add notification for district points being updatedfrom consts.district_type import DistrictType
from consts.notification_type import NotificationType
from notifications.base_notification import BaseNotification
class DistrictPointsUpdatedNotification(BaseNotification):
# disrict_key is like <year><enum>
# Example: 2014ne
def __init__(self, district_key):
self.district_key = district_key
self.district_enum = DistrictType.abbrevs[district_key[4:]]
def _build_dict(self):
data = {}
data['message_type'] = NotificationType.type_names[NotificationType.DISTRICT_POINTS_UPDATED]
data['message_data'] = {}
data['message_data']['district_key'] = self.district_key
data['message_data']['district_name'] = DistrictType.names[self.district_enum]
return data
|
<commit_before><commit_msg>Add notification for district points being updated<commit_after>from consts.district_type import DistrictType
from consts.notification_type import NotificationType
from notifications.base_notification import BaseNotification
class DistrictPointsUpdatedNotification(BaseNotification):
# disrict_key is like <year><enum>
# Example: 2014ne
def __init__(self, district_key):
self.district_key = district_key
self.district_enum = DistrictType.abbrevs[district_key[4:]]
def _build_dict(self):
data = {}
data['message_type'] = NotificationType.type_names[NotificationType.DISTRICT_POINTS_UPDATED]
data['message_data'] = {}
data['message_data']['district_key'] = self.district_key
data['message_data']['district_name'] = DistrictType.names[self.district_enum]
return data
|
|
7bddc0bc8cd9d5f9a4024b45aaae81b6aa840606
|
data/management/commands/approve_facilities.py
|
data/management/commands/approve_facilities.py
|
"""
Bulk creation does not call a model's save method
Thus, some side effects such as approvals, regulation that are supposed
to take place in the save method are not effected.
This command is a work around for ensuring those side effects are
performed on the facilities.
"""
from django.core.management import BaseCommand
from facilities.models import Facility
class Command(BaseCommand):
def handle(self, *args, **options):
for facility in Facility.objects.all():
facility.approved = True
facility.is_published = True
facility.regulated = True
facility.save(allow_save=True)
|
Add helper command for approving facilities
|
Add helper command for approving facilities
|
Python
|
mit
|
MasterFacilityList/mfl_api,MasterFacilityList/mfl_api,MasterFacilityList/mfl_api,MasterFacilityList/mfl_api,MasterFacilityList/mfl_api
|
Add helper command for approving facilities
|
"""
Bulk creation does not call a model's save method
Thus, some side effects such as approvals, regulation that are supposed
to take place in the save method are not effected.
This command is a work around for ensuring those side effects are
performed on the facilities.
"""
from django.core.management import BaseCommand
from facilities.models import Facility
class Command(BaseCommand):
def handle(self, *args, **options):
for facility in Facility.objects.all():
facility.approved = True
facility.is_published = True
facility.regulated = True
facility.save(allow_save=True)
|
<commit_before><commit_msg>Add helper command for approving facilities<commit_after>
|
"""
Bulk creation does not call a model's save method
Thus, some side effects such as approvals, regulation that are supposed
to take place in the save method are not effected.
This command is a work around for ensuring those side effects are
performed on the facilities.
"""
from django.core.management import BaseCommand
from facilities.models import Facility
class Command(BaseCommand):
def handle(self, *args, **options):
for facility in Facility.objects.all():
facility.approved = True
facility.is_published = True
facility.regulated = True
facility.save(allow_save=True)
|
Add helper command for approving facilities"""
Bulk creation does not call a model's save method
Thus, some side effects such as approvals, regulation that are supposed
to take place in the save method are not effected.
This command is a work around for ensuring those side effects are
performed on the facilities.
"""
from django.core.management import BaseCommand
from facilities.models import Facility
class Command(BaseCommand):
def handle(self, *args, **options):
for facility in Facility.objects.all():
facility.approved = True
facility.is_published = True
facility.regulated = True
facility.save(allow_save=True)
|
<commit_before><commit_msg>Add helper command for approving facilities<commit_after>"""
Bulk creation does not call a model's save method
Thus, some side effects such as approvals, regulation that are supposed
to take place in the save method are not effected.
This command is a work around for ensuring those side effects are
performed on the facilities.
"""
from django.core.management import BaseCommand
from facilities.models import Facility
class Command(BaseCommand):
def handle(self, *args, **options):
for facility in Facility.objects.all():
facility.approved = True
facility.is_published = True
facility.regulated = True
facility.save(allow_save=True)
|
|
e4e3d9eb5a29b8917053cd506ec025b28363d27b
|
cltk/tokenize/utils.py
|
cltk/tokenize/utils.py
|
""" Tokenization utilities
"""
__author__ = ['Patrick J. Burns <patrick@diyclassics.org>']
__license__ = 'MIT License.'
import pickle
from abc import abstractmethod
from nltk.tokenize.punkt import PunktSentenceTokenizer, PunktTrainer
from nltk.tokenize.punkt import PunktLanguageVars
from cltk.corpus.latin.readers import latinlibrary
from cltk.tokenize.latin.params import ABBREVIATIONS
class BaseSentenceTokenizerTrainer(object):
""" Train sentence tokenizer
"""
def __init__(self, language=None):
""" Initialize stoplist builder with option for language specific parameters
:type language: str
:param language : text from which to build the stoplist
"""
if language:
self.language = language.lower()
def _tokenizer_setup(self):
self.punctuation = []
self.strict = []
def pickle_sentence_tokenizer(self, filename, tokenizer):
# Dump pickled tokenizer
with open(filename, 'wb') as f:
pickle.dump(tokenizer, f)
def train_sentence_tokenizer(self, text, punctuation=[], strict=[]):
"""
Train sentence tokenizer.
"""
self._tokenizer_setup()
if punctuation:
self.punctuation = punctuation
if strict:
self.strict = strict
# Set punctuation
language_punkt_vars = PunktLanguageVars
language_punkt_vars.sent_end_chars = self.punctuation+self.strict
# Set abbreviations
trainer = PunktTrainer(text, language_punkt_vars)
trainer.INCLUDE_ALL_COLLOCS = True
trainer.INCLUDE_ABBREV_COLLOCS = True
tokenizer = PunktSentenceTokenizer(trainer.get_params())
for abbreviation in ABBREVIATIONS:
tokenizer._params.abbrev_types.add(abbreviation)
return tokenizer
|
Add utilities file for tokenize
|
Add utilities file for tokenize
|
Python
|
mit
|
cltk/cltk,kylepjohnson/cltk,TylerKirby/cltk,diyclassics/cltk,D-K-E/cltk,TylerKirby/cltk
|
Add utilities file for tokenize
|
""" Tokenization utilities
"""
__author__ = ['Patrick J. Burns <patrick@diyclassics.org>']
__license__ = 'MIT License.'
import pickle
from abc import abstractmethod
from nltk.tokenize.punkt import PunktSentenceTokenizer, PunktTrainer
from nltk.tokenize.punkt import PunktLanguageVars
from cltk.corpus.latin.readers import latinlibrary
from cltk.tokenize.latin.params import ABBREVIATIONS
class BaseSentenceTokenizerTrainer(object):
""" Train sentence tokenizer
"""
def __init__(self, language=None):
""" Initialize stoplist builder with option for language specific parameters
:type language: str
:param language : text from which to build the stoplist
"""
if language:
self.language = language.lower()
def _tokenizer_setup(self):
self.punctuation = []
self.strict = []
def pickle_sentence_tokenizer(self, filename, tokenizer):
# Dump pickled tokenizer
with open(filename, 'wb') as f:
pickle.dump(tokenizer, f)
def train_sentence_tokenizer(self, text, punctuation=[], strict=[]):
"""
Train sentence tokenizer.
"""
self._tokenizer_setup()
if punctuation:
self.punctuation = punctuation
if strict:
self.strict = strict
# Set punctuation
language_punkt_vars = PunktLanguageVars
language_punkt_vars.sent_end_chars = self.punctuation+self.strict
# Set abbreviations
trainer = PunktTrainer(text, language_punkt_vars)
trainer.INCLUDE_ALL_COLLOCS = True
trainer.INCLUDE_ABBREV_COLLOCS = True
tokenizer = PunktSentenceTokenizer(trainer.get_params())
for abbreviation in ABBREVIATIONS:
tokenizer._params.abbrev_types.add(abbreviation)
return tokenizer
|
<commit_before><commit_msg>Add utilities file for tokenize<commit_after>
|
""" Tokenization utilities
"""
__author__ = ['Patrick J. Burns <patrick@diyclassics.org>']
__license__ = 'MIT License.'
import pickle
from abc import abstractmethod
from nltk.tokenize.punkt import PunktSentenceTokenizer, PunktTrainer
from nltk.tokenize.punkt import PunktLanguageVars
from cltk.corpus.latin.readers import latinlibrary
from cltk.tokenize.latin.params import ABBREVIATIONS
class BaseSentenceTokenizerTrainer(object):
""" Train sentence tokenizer
"""
def __init__(self, language=None):
""" Initialize stoplist builder with option for language specific parameters
:type language: str
:param language : text from which to build the stoplist
"""
if language:
self.language = language.lower()
def _tokenizer_setup(self):
self.punctuation = []
self.strict = []
def pickle_sentence_tokenizer(self, filename, tokenizer):
# Dump pickled tokenizer
with open(filename, 'wb') as f:
pickle.dump(tokenizer, f)
def train_sentence_tokenizer(self, text, punctuation=[], strict=[]):
"""
Train sentence tokenizer.
"""
self._tokenizer_setup()
if punctuation:
self.punctuation = punctuation
if strict:
self.strict = strict
# Set punctuation
language_punkt_vars = PunktLanguageVars
language_punkt_vars.sent_end_chars = self.punctuation+self.strict
# Set abbreviations
trainer = PunktTrainer(text, language_punkt_vars)
trainer.INCLUDE_ALL_COLLOCS = True
trainer.INCLUDE_ABBREV_COLLOCS = True
tokenizer = PunktSentenceTokenizer(trainer.get_params())
for abbreviation in ABBREVIATIONS:
tokenizer._params.abbrev_types.add(abbreviation)
return tokenizer
|
Add utilities file for tokenize""" Tokenization utilities
"""
__author__ = ['Patrick J. Burns <patrick@diyclassics.org>']
__license__ = 'MIT License.'
import pickle
from abc import abstractmethod
from nltk.tokenize.punkt import PunktSentenceTokenizer, PunktTrainer
from nltk.tokenize.punkt import PunktLanguageVars
from cltk.corpus.latin.readers import latinlibrary
from cltk.tokenize.latin.params import ABBREVIATIONS
class BaseSentenceTokenizerTrainer(object):
""" Train sentence tokenizer
"""
def __init__(self, language=None):
""" Initialize stoplist builder with option for language specific parameters
:type language: str
:param language : text from which to build the stoplist
"""
if language:
self.language = language.lower()
def _tokenizer_setup(self):
self.punctuation = []
self.strict = []
def pickle_sentence_tokenizer(self, filename, tokenizer):
# Dump pickled tokenizer
with open(filename, 'wb') as f:
pickle.dump(tokenizer, f)
def train_sentence_tokenizer(self, text, punctuation=[], strict=[]):
"""
Train sentence tokenizer.
"""
self._tokenizer_setup()
if punctuation:
self.punctuation = punctuation
if strict:
self.strict = strict
# Set punctuation
language_punkt_vars = PunktLanguageVars
language_punkt_vars.sent_end_chars = self.punctuation+self.strict
# Set abbreviations
trainer = PunktTrainer(text, language_punkt_vars)
trainer.INCLUDE_ALL_COLLOCS = True
trainer.INCLUDE_ABBREV_COLLOCS = True
tokenizer = PunktSentenceTokenizer(trainer.get_params())
for abbreviation in ABBREVIATIONS:
tokenizer._params.abbrev_types.add(abbreviation)
return tokenizer
|
<commit_before><commit_msg>Add utilities file for tokenize<commit_after>""" Tokenization utilities
"""
__author__ = ['Patrick J. Burns <patrick@diyclassics.org>']
__license__ = 'MIT License.'
import pickle
from abc import abstractmethod
from nltk.tokenize.punkt import PunktSentenceTokenizer, PunktTrainer
from nltk.tokenize.punkt import PunktLanguageVars
from cltk.corpus.latin.readers import latinlibrary
from cltk.tokenize.latin.params import ABBREVIATIONS
class BaseSentenceTokenizerTrainer(object):
""" Train sentence tokenizer
"""
def __init__(self, language=None):
""" Initialize stoplist builder with option for language specific parameters
:type language: str
:param language : text from which to build the stoplist
"""
if language:
self.language = language.lower()
def _tokenizer_setup(self):
self.punctuation = []
self.strict = []
def pickle_sentence_tokenizer(self, filename, tokenizer):
# Dump pickled tokenizer
with open(filename, 'wb') as f:
pickle.dump(tokenizer, f)
def train_sentence_tokenizer(self, text, punctuation=[], strict=[]):
"""
Train sentence tokenizer.
"""
self._tokenizer_setup()
if punctuation:
self.punctuation = punctuation
if strict:
self.strict = strict
# Set punctuation
language_punkt_vars = PunktLanguageVars
language_punkt_vars.sent_end_chars = self.punctuation+self.strict
# Set abbreviations
trainer = PunktTrainer(text, language_punkt_vars)
trainer.INCLUDE_ALL_COLLOCS = True
trainer.INCLUDE_ABBREV_COLLOCS = True
tokenizer = PunktSentenceTokenizer(trainer.get_params())
for abbreviation in ABBREVIATIONS:
tokenizer._params.abbrev_types.add(abbreviation)
return tokenizer
|
|
34fe8eb97ccbdcc703293d6241c68bac03d3f531
|
db/migrations/migration2.py
|
db/migrations/migration2.py
|
#!/usr/bin/env python
#
# This library is free software, distributed under the terms of
# the GNU Lesser General Public License Version 3, or any later version.
# See the COPYING file included in this archive
#
# The docstrings in this module contain epytext markup; API documentation
# may be created by processing this file with epydoc: http://epydoc.sf.net
from os import path, remove
from pysqlcipher import dbapi2 as sqlite
import sys
sys.path.append('node/')
import constants
DB_PATH = constants.DB_PATH
def upgrade(db_path):
if not path.isfile(db_path):
con = sqlite.connect(db_path)
with con:
cur = con.cursor()
# Use PRAGMA key to encrypt / decrypt database.
cur.execute("PRAGMA key = 'passphrase';")
cur.execute("ALTER TABLE products("
"ADD COLUMN deleted INT)")
def downgrade(db_path):
if not path.isfile(db_path):
con = sqlite.connect(db_path)
with con:
cur = con.cursor()
# Use PRAGMA key to encrypt / decrypt database.
cur.execute("PRAGMA key = 'passphrase';")
cur.execute("ALTER TABLE products DROP COLUMN deleted")
if __name__ == "__main__":
if sys.argv[1:] is not None:
DB_PATH = sys.argv[1:][0]
if sys.argv[2:] is "downgrade":
downgrade(DB_PATH)
else:
upgrade(DB_PATH)
|
Add deleted column to products table migration
|
Add deleted column to products table migration
|
Python
|
mit
|
mirrax/OpenBazaar,STRML/OpenBazaar,akhavr/OpenBazaar,matiasbastos/OpenBazaar,bglassy/OpenBazaar,saltduck/OpenBazaar,bankonme/OpenBazaar,akhavr/OpenBazaar,blakejakopovic/OpenBazaar,im0rtel/OpenBazaar,dlcorporation/openbazaar,must-/OpenBazaar,bglassy/OpenBazaar,atsuyim/OpenBazaar,dionyziz/OpenBazaar,STRML/OpenBazaar,tortxof/OpenBazaar,yagoulas/OpenBazaar,freebazaar/FreeBazaar,rllola/OpenBazaar,atsuyim/OpenBazaar,hoffmabc/OpenBazaar,habibmasuro/OpenBazaar,must-/OpenBazaar,Renelvon/OpenBazaar,habibmasuro/OpenBazaar,im0rtel/OpenBazaar,matiasbastos/OpenBazaar,mirrax/OpenBazaar,hoffmabc/OpenBazaar,saltduck/OpenBazaar,matiasbastos/OpenBazaar,freebazaar/FreeBazaar,NolanZhao/OpenBazaar,STRML/OpenBazaar,dlcorporation/openbazaar,blakejakopovic/OpenBazaar,Renelvon/OpenBazaar,tortxof/OpenBazaar,kordless/OpenBazaar,mirrax/OpenBazaar,Renelvon/OpenBazaar,yagoulas/OpenBazaar,habibmasuro/OpenBazaar,hoffmabc/OpenBazaar,freebazaar/FreeBazaar,dionyziz/OpenBazaar,rllola/OpenBazaar,akhavr/OpenBazaar,hoffmabc/OpenBazaar,must-/OpenBazaar,saltduck/OpenBazaar,dionyziz/OpenBazaar,dlcorporation/openbazaar,kordless/OpenBazaar,Renelvon/OpenBazaar,bankonme/OpenBazaar,blakejakopovic/OpenBazaar,atsuyim/OpenBazaar,hoffmabc/OpenBazaar,rllola/OpenBazaar,freebazaar/FreeBazaar,dlcorporation/openbazaar,akhavr/OpenBazaar,dlcorporation/openbazaar,tortxof/OpenBazaar,im0rtel/OpenBazaar,tortxof/OpenBazaar,saltduck/OpenBazaar,akhavr/OpenBazaar,must-/OpenBazaar,mirrax/OpenBazaar,STRML/OpenBazaar,rllola/OpenBazaar,matiasbastos/OpenBazaar,atsuyim/OpenBazaar,kordless/OpenBazaar,dlcorporation/openbazaar,hoffmabc/OpenBazaar,yagoulas/OpenBazaar,dionyziz/OpenBazaar,NolanZhao/OpenBazaar,NolanZhao/OpenBazaar,dionyziz/OpenBazaar,bankonme/OpenBazaar,habibmasuro/OpenBazaar,kordless/OpenBazaar,bglassy/OpenBazaar,freebazaar/FreeBazaar,im0rtel/OpenBazaar,bankonme/OpenBazaar,NolanZhao/OpenBazaar,yagoulas/OpenBazaar,bglassy/OpenBazaar,blakejakopovic/OpenBazaar
|
Add deleted column to products table migration
|
#!/usr/bin/env python
#
# This library is free software, distributed under the terms of
# the GNU Lesser General Public License Version 3, or any later version.
# See the COPYING file included in this archive
#
# The docstrings in this module contain epytext markup; API documentation
# may be created by processing this file with epydoc: http://epydoc.sf.net
from os import path, remove
from pysqlcipher import dbapi2 as sqlite
import sys
sys.path.append('node/')
import constants
DB_PATH = constants.DB_PATH
def upgrade(db_path):
if not path.isfile(db_path):
con = sqlite.connect(db_path)
with con:
cur = con.cursor()
# Use PRAGMA key to encrypt / decrypt database.
cur.execute("PRAGMA key = 'passphrase';")
cur.execute("ALTER TABLE products("
"ADD COLUMN deleted INT)")
def downgrade(db_path):
if not path.isfile(db_path):
con = sqlite.connect(db_path)
with con:
cur = con.cursor()
# Use PRAGMA key to encrypt / decrypt database.
cur.execute("PRAGMA key = 'passphrase';")
cur.execute("ALTER TABLE products DROP COLUMN deleted")
if __name__ == "__main__":
if sys.argv[1:] is not None:
DB_PATH = sys.argv[1:][0]
if sys.argv[2:] is "downgrade":
downgrade(DB_PATH)
else:
upgrade(DB_PATH)
|
<commit_before><commit_msg>Add deleted column to products table migration<commit_after>
|
#!/usr/bin/env python
#
# This library is free software, distributed under the terms of
# the GNU Lesser General Public License Version 3, or any later version.
# See the COPYING file included in this archive
#
# The docstrings in this module contain epytext markup; API documentation
# may be created by processing this file with epydoc: http://epydoc.sf.net
from os import path, remove
from pysqlcipher import dbapi2 as sqlite
import sys
sys.path.append('node/')
import constants
DB_PATH = constants.DB_PATH
def upgrade(db_path):
if not path.isfile(db_path):
con = sqlite.connect(db_path)
with con:
cur = con.cursor()
# Use PRAGMA key to encrypt / decrypt database.
cur.execute("PRAGMA key = 'passphrase';")
cur.execute("ALTER TABLE products("
"ADD COLUMN deleted INT)")
def downgrade(db_path):
if not path.isfile(db_path):
con = sqlite.connect(db_path)
with con:
cur = con.cursor()
# Use PRAGMA key to encrypt / decrypt database.
cur.execute("PRAGMA key = 'passphrase';")
cur.execute("ALTER TABLE products DROP COLUMN deleted")
if __name__ == "__main__":
if sys.argv[1:] is not None:
DB_PATH = sys.argv[1:][0]
if sys.argv[2:] is "downgrade":
downgrade(DB_PATH)
else:
upgrade(DB_PATH)
|
Add deleted column to products table migration#!/usr/bin/env python
#
# This library is free software, distributed under the terms of
# the GNU Lesser General Public License Version 3, or any later version.
# See the COPYING file included in this archive
#
# The docstrings in this module contain epytext markup; API documentation
# may be created by processing this file with epydoc: http://epydoc.sf.net
from os import path, remove
from pysqlcipher import dbapi2 as sqlite
import sys
sys.path.append('node/')
import constants
DB_PATH = constants.DB_PATH
def upgrade(db_path):
if not path.isfile(db_path):
con = sqlite.connect(db_path)
with con:
cur = con.cursor()
# Use PRAGMA key to encrypt / decrypt database.
cur.execute("PRAGMA key = 'passphrase';")
cur.execute("ALTER TABLE products("
"ADD COLUMN deleted INT)")
def downgrade(db_path):
if not path.isfile(db_path):
con = sqlite.connect(db_path)
with con:
cur = con.cursor()
# Use PRAGMA key to encrypt / decrypt database.
cur.execute("PRAGMA key = 'passphrase';")
cur.execute("ALTER TABLE products DROP COLUMN deleted")
if __name__ == "__main__":
if sys.argv[1:] is not None:
DB_PATH = sys.argv[1:][0]
if sys.argv[2:] is "downgrade":
downgrade(DB_PATH)
else:
upgrade(DB_PATH)
|
<commit_before><commit_msg>Add deleted column to products table migration<commit_after>#!/usr/bin/env python
#
# This library is free software, distributed under the terms of
# the GNU Lesser General Public License Version 3, or any later version.
# See the COPYING file included in this archive
#
# The docstrings in this module contain epytext markup; API documentation
# may be created by processing this file with epydoc: http://epydoc.sf.net
from os import path, remove
from pysqlcipher import dbapi2 as sqlite
import sys
sys.path.append('node/')
import constants
DB_PATH = constants.DB_PATH
def upgrade(db_path):
if not path.isfile(db_path):
con = sqlite.connect(db_path)
with con:
cur = con.cursor()
# Use PRAGMA key to encrypt / decrypt database.
cur.execute("PRAGMA key = 'passphrase';")
cur.execute("ALTER TABLE products("
"ADD COLUMN deleted INT)")
def downgrade(db_path):
if not path.isfile(db_path):
con = sqlite.connect(db_path)
with con:
cur = con.cursor()
# Use PRAGMA key to encrypt / decrypt database.
cur.execute("PRAGMA key = 'passphrase';")
cur.execute("ALTER TABLE products DROP COLUMN deleted")
if __name__ == "__main__":
if sys.argv[1:] is not None:
DB_PATH = sys.argv[1:][0]
if sys.argv[2:] is "downgrade":
downgrade(DB_PATH)
else:
upgrade(DB_PATH)
|
|
8c280ce30db32b6c8fc6f6b551b5c8fc72990b09
|
pyQuantuccia/tests/test_get_holiday_date.py
|
pyQuantuccia/tests/test_get_holiday_date.py
|
from pyQuantuccia import quantuccia
def test_get_holiday_date():
""" At the moment the only thing this function
can do is return NULL.
"""
assert(quantuccia.get_holiday_date() is None)
|
Add a very simple test.
|
Add a very simple test.
|
Python
|
bsd-3-clause
|
jwg4/pyQuantuccia,jwg4/pyQuantuccia
|
Add a very simple test.
|
from pyQuantuccia import quantuccia
def test_get_holiday_date():
""" At the moment the only thing this function
can do is return NULL.
"""
assert(quantuccia.get_holiday_date() is None)
|
<commit_before><commit_msg>Add a very simple test.<commit_after>
|
from pyQuantuccia import quantuccia
def test_get_holiday_date():
""" At the moment the only thing this function
can do is return NULL.
"""
assert(quantuccia.get_holiday_date() is None)
|
Add a very simple test.from pyQuantuccia import quantuccia
def test_get_holiday_date():
""" At the moment the only thing this function
can do is return NULL.
"""
assert(quantuccia.get_holiday_date() is None)
|
<commit_before><commit_msg>Add a very simple test.<commit_after>from pyQuantuccia import quantuccia
def test_get_holiday_date():
""" At the moment the only thing this function
can do is return NULL.
"""
assert(quantuccia.get_holiday_date() is None)
|
|
598911ebd93085926602a26e9bbf835df0bea0b6
|
test/test_rcsparse.py
|
test/test_rcsparse.py
|
import unittest
from rcsparse import rcsfile
from os.path import dirname, join
REV_NUMBER = 0
REV_STATE = 3
class Test(unittest.TestCase):
def test_rcsfile(self):
f = rcsfile(join(dirname(__file__), 'data', 'patch-copyin_c,v'))
self.assertEquals(f.head, '1.1')
self.assertEquals(f.revs[f.head][REV_NUMBER], '1.1')
self.assertEquals(f.revs[f.head][REV_STATE], 'dead')
if __name__ == '__main__':
unittest.main()
|
Add a test case for Simon Schubert's rcsparse library
|
Add a test case for Simon Schubert's rcsparse library
|
Python
|
isc
|
ustuehler/git-cvs,ustuehler/git-cvs
|
Add a test case for Simon Schubert's rcsparse library
|
import unittest
from rcsparse import rcsfile
from os.path import dirname, join
REV_NUMBER = 0
REV_STATE = 3
class Test(unittest.TestCase):
def test_rcsfile(self):
f = rcsfile(join(dirname(__file__), 'data', 'patch-copyin_c,v'))
self.assertEquals(f.head, '1.1')
self.assertEquals(f.revs[f.head][REV_NUMBER], '1.1')
self.assertEquals(f.revs[f.head][REV_STATE], 'dead')
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add a test case for Simon Schubert's rcsparse library<commit_after>
|
import unittest
from rcsparse import rcsfile
from os.path import dirname, join
REV_NUMBER = 0
REV_STATE = 3
class Test(unittest.TestCase):
def test_rcsfile(self):
f = rcsfile(join(dirname(__file__), 'data', 'patch-copyin_c,v'))
self.assertEquals(f.head, '1.1')
self.assertEquals(f.revs[f.head][REV_NUMBER], '1.1')
self.assertEquals(f.revs[f.head][REV_STATE], 'dead')
if __name__ == '__main__':
unittest.main()
|
Add a test case for Simon Schubert's rcsparse libraryimport unittest
from rcsparse import rcsfile
from os.path import dirname, join
REV_NUMBER = 0
REV_STATE = 3
class Test(unittest.TestCase):
def test_rcsfile(self):
f = rcsfile(join(dirname(__file__), 'data', 'patch-copyin_c,v'))
self.assertEquals(f.head, '1.1')
self.assertEquals(f.revs[f.head][REV_NUMBER], '1.1')
self.assertEquals(f.revs[f.head][REV_STATE], 'dead')
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add a test case for Simon Schubert's rcsparse library<commit_after>import unittest
from rcsparse import rcsfile
from os.path import dirname, join
REV_NUMBER = 0
REV_STATE = 3
class Test(unittest.TestCase):
def test_rcsfile(self):
f = rcsfile(join(dirname(__file__), 'data', 'patch-copyin_c,v'))
self.assertEquals(f.head, '1.1')
self.assertEquals(f.revs[f.head][REV_NUMBER], '1.1')
self.assertEquals(f.revs[f.head][REV_STATE], 'dead')
if __name__ == '__main__':
unittest.main()
|
|
e81d5778ed5f81d5d479bf828de1a854c24af75e
|
tests/test_domains.py
|
tests/test_domains.py
|
import unittest
import subprocess
import random
class DomainsTest(unittest.TestCase):
def _cmd(self, cmd, *args):
pargs = ('scripts/cli53', cmd) + args
return subprocess.check_output(pargs, stderr=subprocess.STDOUT)
def _unique_name(self):
return 'temp%d.com' % random.randint(0, 65535)
def test_usage(self):
assert 'usage' in self._cmd('-h')
def test_create_delete(self):
name = self._unique_name()
self._cmd('create', name)
assert name in self._cmd('list')
self._cmd('delete', name)
assert name not in self._cmd('list')
|
import unittest
import subprocess
import random
# copied from python 2.7 for python 2.6
def check_output(*popenargs, **kwargs):
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be overridden.')
process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
raise subprocess.CalledProcessError(retcode, cmd, output=output)
return output
class DomainsTest(unittest.TestCase):
def _cmd(self, cmd, *args):
pargs = ('scripts/cli53', cmd) + args
return check_output(pargs, stderr=subprocess.STDOUT)
def _unique_name(self):
return 'temp%d.com' % random.randint(0, 65535)
def test_usage(self):
assert 'usage' in self._cmd('-h')
def test_create_delete(self):
name = self._unique_name()
self._cmd('create', name)
assert name in self._cmd('list')
self._cmd('delete', name)
assert name not in self._cmd('list')
|
Fix tests on python 2.6
|
Fix tests on python 2.6
|
Python
|
mit
|
branall1/cli53,ftahmed/cli53,branall1/cli53,Collaborne/cli53,branall1/cli53,ftahmed/cli53,jefflaplante/cli53,Collaborne/cli53,jefflaplante/cli53
|
import unittest
import subprocess
import random
class DomainsTest(unittest.TestCase):
def _cmd(self, cmd, *args):
pargs = ('scripts/cli53', cmd) + args
return subprocess.check_output(pargs, stderr=subprocess.STDOUT)
def _unique_name(self):
return 'temp%d.com' % random.randint(0, 65535)
def test_usage(self):
assert 'usage' in self._cmd('-h')
def test_create_delete(self):
name = self._unique_name()
self._cmd('create', name)
assert name in self._cmd('list')
self._cmd('delete', name)
assert name not in self._cmd('list')
Fix tests on python 2.6
|
import unittest
import subprocess
import random
# copied from python 2.7 for python 2.6
def check_output(*popenargs, **kwargs):
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be overridden.')
process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
raise subprocess.CalledProcessError(retcode, cmd, output=output)
return output
class DomainsTest(unittest.TestCase):
def _cmd(self, cmd, *args):
pargs = ('scripts/cli53', cmd) + args
return check_output(pargs, stderr=subprocess.STDOUT)
def _unique_name(self):
return 'temp%d.com' % random.randint(0, 65535)
def test_usage(self):
assert 'usage' in self._cmd('-h')
def test_create_delete(self):
name = self._unique_name()
self._cmd('create', name)
assert name in self._cmd('list')
self._cmd('delete', name)
assert name not in self._cmd('list')
|
<commit_before>import unittest
import subprocess
import random
class DomainsTest(unittest.TestCase):
def _cmd(self, cmd, *args):
pargs = ('scripts/cli53', cmd) + args
return subprocess.check_output(pargs, stderr=subprocess.STDOUT)
def _unique_name(self):
return 'temp%d.com' % random.randint(0, 65535)
def test_usage(self):
assert 'usage' in self._cmd('-h')
def test_create_delete(self):
name = self._unique_name()
self._cmd('create', name)
assert name in self._cmd('list')
self._cmd('delete', name)
assert name not in self._cmd('list')
<commit_msg>Fix tests on python 2.6<commit_after>
|
import unittest
import subprocess
import random
# copied from python 2.7 for python 2.6
def check_output(*popenargs, **kwargs):
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be overridden.')
process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
raise subprocess.CalledProcessError(retcode, cmd, output=output)
return output
class DomainsTest(unittest.TestCase):
def _cmd(self, cmd, *args):
pargs = ('scripts/cli53', cmd) + args
return check_output(pargs, stderr=subprocess.STDOUT)
def _unique_name(self):
return 'temp%d.com' % random.randint(0, 65535)
def test_usage(self):
assert 'usage' in self._cmd('-h')
def test_create_delete(self):
name = self._unique_name()
self._cmd('create', name)
assert name in self._cmd('list')
self._cmd('delete', name)
assert name not in self._cmd('list')
|
import unittest
import subprocess
import random
class DomainsTest(unittest.TestCase):
def _cmd(self, cmd, *args):
pargs = ('scripts/cli53', cmd) + args
return subprocess.check_output(pargs, stderr=subprocess.STDOUT)
def _unique_name(self):
return 'temp%d.com' % random.randint(0, 65535)
def test_usage(self):
assert 'usage' in self._cmd('-h')
def test_create_delete(self):
name = self._unique_name()
self._cmd('create', name)
assert name in self._cmd('list')
self._cmd('delete', name)
assert name not in self._cmd('list')
Fix tests on python 2.6import unittest
import subprocess
import random
# copied from python 2.7 for python 2.6
def check_output(*popenargs, **kwargs):
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be overridden.')
process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
raise subprocess.CalledProcessError(retcode, cmd, output=output)
return output
class DomainsTest(unittest.TestCase):
def _cmd(self, cmd, *args):
pargs = ('scripts/cli53', cmd) + args
return check_output(pargs, stderr=subprocess.STDOUT)
def _unique_name(self):
return 'temp%d.com' % random.randint(0, 65535)
def test_usage(self):
assert 'usage' in self._cmd('-h')
def test_create_delete(self):
name = self._unique_name()
self._cmd('create', name)
assert name in self._cmd('list')
self._cmd('delete', name)
assert name not in self._cmd('list')
|
<commit_before>import unittest
import subprocess
import random
class DomainsTest(unittest.TestCase):
def _cmd(self, cmd, *args):
pargs = ('scripts/cli53', cmd) + args
return subprocess.check_output(pargs, stderr=subprocess.STDOUT)
def _unique_name(self):
return 'temp%d.com' % random.randint(0, 65535)
def test_usage(self):
assert 'usage' in self._cmd('-h')
def test_create_delete(self):
name = self._unique_name()
self._cmd('create', name)
assert name in self._cmd('list')
self._cmd('delete', name)
assert name not in self._cmd('list')
<commit_msg>Fix tests on python 2.6<commit_after>import unittest
import subprocess
import random
# copied from python 2.7 for python 2.6
def check_output(*popenargs, **kwargs):
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be overridden.')
process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
raise subprocess.CalledProcessError(retcode, cmd, output=output)
return output
class DomainsTest(unittest.TestCase):
def _cmd(self, cmd, *args):
pargs = ('scripts/cli53', cmd) + args
return check_output(pargs, stderr=subprocess.STDOUT)
def _unique_name(self):
return 'temp%d.com' % random.randint(0, 65535)
def test_usage(self):
assert 'usage' in self._cmd('-h')
def test_create_delete(self):
name = self._unique_name()
self._cmd('create', name)
assert name in self._cmd('list')
self._cmd('delete', name)
assert name not in self._cmd('list')
|
5c047b97f2e41b51f8314f6ee06788b1d9246dbf
|
tests/float/math_fun_special.py
|
tests/float/math_fun_special.py
|
# test the special functions imported from math
try:
from math import *
except ImportError:
print("SKIP")
import sys
sys.exit()
test_values = [-8., -2.5, -1, -0.5, 0.0, 0.5, 2.5, 8.,]
pos_test_values = [0.001, 0.1, 0.5, 1.0, 1.5, 10.,]
functions = [
('erf', erf, test_values),
('erfc', erfc, test_values),
('gamma', gamma, pos_test_values),
('lgamma', lgamma, pos_test_values + [50., 100.,]),
]
for function_name, function, test_vals in functions:
print(function_name)
for value in test_vals:
print("{:.5g}".format(function(value)))
|
Add test for math special functions.
|
tests: Add test for math special functions.
|
Python
|
mit
|
redbear/micropython,cnoviello/micropython,kostyll/micropython,adafruit/circuitpython,ericsnowcurrently/micropython,feilongfl/micropython,chrisdearman/micropython,noahchense/micropython,puuu/micropython,feilongfl/micropython,trezor/micropython,praemdonck/micropython,KISSMonX/micropython,cwyark/micropython,tralamazza/micropython,feilongfl/micropython,praemdonck/micropython,cwyark/micropython,orionrobots/micropython,infinnovation/micropython,ChuckM/micropython,tobbad/micropython,blazewicz/micropython,SungEun-Steve-Kim/test-mp,redbear/micropython,ericsnowcurrently/micropython,xuxiaoxin/micropython,drrk/micropython,tdautc19841202/micropython,Timmenem/micropython,drrk/micropython,henriknelson/micropython,blmorris/micropython,MrSurly/micropython,alex-march/micropython,pozetroninc/micropython,lbattraw/micropython,xhat/micropython,alex-robbins/micropython,aethaniel/micropython,galenhz/micropython,lowRISC/micropython,MrSurly/micropython,dhylands/micropython,micropython/micropython-esp32,jmarcelino/pycom-micropython,adafruit/circuitpython,TDAbboud/micropython,adamkh/micropython,cloudformdesign/micropython,hiway/micropython,HenrikSolver/micropython,chrisdearman/micropython,emfcamp/micropython,AriZuu/micropython,pramasoul/micropython,dxxb/micropython,PappaPeppar/micropython,feilongfl/micropython,mianos/micropython,heisewangluo/micropython,drrk/micropython,bvernoux/micropython,MrSurly/micropython,firstval/micropython,cnoviello/micropython,paul-xxx/micropython,warner83/micropython,oopy/micropython,swegener/micropython,neilh10/micropython,MrSurly/micropython-esp32,mianos/micropython,rubencabrera/micropython,ganshun666/micropython,redbear/micropython,tuc-osg/micropython,trezor/micropython,matthewelse/micropython,ryannathans/micropython,pozetroninc/micropython,paul-xxx/micropython,swegener/micropython,torwag/micropython,toolmacher/micropython,tralamazza/micropython,dmazzella/micropython,omtinez/micropython,warner83/micropython,Timmenem/micropython,mianos/micropython,ryannathans/micropython,chrisdearman/micropython,xyb/micropython,ChuckM/micropython,alex-march/micropython,jimkmc/micropython,tdautc19841202/micropython,xhat/micropython,blmorris/micropython,SHA2017-badge/micropython-esp32,mhoffma/micropython,EcmaXp/micropython,SungEun-Steve-Kim/test-mp,supergis/micropython,tuc-osg/micropython,utopiaprince/micropython,praemdonck/micropython,jmarcelino/pycom-micropython,dmazzella/micropython,martinribelotta/micropython,oopy/micropython,neilh10/micropython,ernesto-g/micropython,danicampora/micropython,hosaka/micropython,tuc-osg/micropython,aethaniel/micropython,xuxiaoxin/micropython,jimkmc/micropython,pozetroninc/micropython,kerneltask/micropython,orionrobots/micropython,heisewangluo/micropython,stonegithubs/micropython,AriZuu/micropython,xyb/micropython,pramasoul/micropython,ganshun666/micropython,adafruit/circuitpython,HenrikSolver/micropython,warner83/micropython,emfcamp/micropython,noahchense/micropython,supergis/micropython,SHA2017-badge/micropython-esp32,aethaniel/micropython,dinau/micropython,suda/micropython,selste/micropython,MrSurly/micropython,dhylands/micropython,MrSurly/micropython,dhylands/micropython,danicampora/micropython,ahotam/micropython,HenrikSolver/micropython,MrSurly/micropython-esp32,selste/micropython,danicampora/micropython,lbattraw/micropython,jlillest/micropython,firstval/micropython,pfalcon/micropython,drrk/micropython,cwyark/micropython,alex-march/micropython,blmorris/micropython,deshipu/micropython,adamkh/micropython,jlillest/micropython,suda/micropython,ceramos/micropython,orionrobots/micropython,adafruit/circuitpython,vriera/micropython,infinnovation/micropython,bvernoux/micropython,adamkh/micropython,orionrobots/micropython,hosaka/micropython,dinau/micropython,matthewelse/micropython,vitiral/micropython,tralamazza/micropython,hosaka/micropython,skybird6672/micropython,paul-xxx/micropython,ruffy91/micropython,pozetroninc/micropython,paul-xxx/micropython,omtinez/micropython,utopiaprince/micropython,KISSMonX/micropython,martinribelotta/micropython,oopy/micropython,mhoffma/micropython,stonegithubs/micropython,dinau/micropython,micropython/micropython-esp32,cwyark/micropython,mgyenik/micropython,adafruit/micropython,jmarcelino/pycom-micropython,alex-robbins/micropython,pfalcon/micropython,ernesto-g/micropython,lowRISC/micropython,supergis/micropython,ganshun666/micropython,adafruit/circuitpython,xuxiaoxin/micropython,toolmacher/micropython,trezor/micropython,jimkmc/micropython,mianos/micropython,drrk/micropython,supergis/micropython,TDAbboud/micropython,neilh10/micropython,ruffy91/micropython,deshipu/micropython,stonegithubs/micropython,jimkmc/micropython,torwag/micropython,infinnovation/micropython,SHA2017-badge/micropython-esp32,cnoviello/micropython,martinribelotta/micropython,henriknelson/micropython,cloudformdesign/micropython,alex-robbins/micropython,kostyll/micropython,AriZuu/micropython,lowRISC/micropython,vitiral/micropython,henriknelson/micropython,ryannathans/micropython,redbear/micropython,dhylands/micropython,emfcamp/micropython,praemdonck/micropython,ChuckM/micropython,rubencabrera/micropython,galenhz/micropython,cnoviello/micropython,henriknelson/micropython,bvernoux/micropython,vitiral/micropython,ericsnowcurrently/micropython,vriera/micropython,noahwilliamsson/micropython,tobbad/micropython,pfalcon/micropython,toolmacher/micropython,misterdanb/micropython,bvernoux/micropython,ahotam/micropython,lbattraw/micropython,tobbad/micropython,ericsnowcurrently/micropython,micropython/micropython-esp32,tralamazza/micropython,mianos/micropython,xyb/micropython,Timmenem/micropython,ChuckM/micropython,firstval/micropython,kostyll/micropython,ryannathans/micropython,selste/micropython,dmazzella/micropython,bvernoux/micropython,ericsnowcurrently/micropython,misterdanb/micropython,chrisdearman/micropython,cloudformdesign/micropython,pfalcon/micropython,blmorris/micropython,omtinez/micropython,cloudformdesign/micropython,danicampora/micropython,SHA2017-badge/micropython-esp32,PappaPeppar/micropython,heisewangluo/micropython,oopy/micropython,puuu/micropython,KISSMonX/micropython,ernesto-g/micropython,hiway/micropython,mgyenik/micropython,lbattraw/micropython,jlillest/micropython,ahotam/micropython,torwag/micropython,warner83/micropython,feilongfl/micropython,ceramos/micropython,mpalomer/micropython,martinribelotta/micropython,dxxb/micropython,ruffy91/micropython,adafruit/circuitpython,pozetroninc/micropython,kerneltask/micropython,EcmaXp/micropython,pramasoul/micropython,ceramos/micropython,tdautc19841202/micropython,puuu/micropython,vriera/micropython,cloudformdesign/micropython,lbattraw/micropython,rubencabrera/micropython,adafruit/micropython,ahotam/micropython,swegener/micropython,redbear/micropython,torwag/micropython,KISSMonX/micropython,danicampora/micropython,xuxiaoxin/micropython,jimkmc/micropython,henriknelson/micropython,swegener/micropython,dhylands/micropython,paul-xxx/micropython,skybird6672/micropython,orionrobots/micropython,mhoffma/micropython,pramasoul/micropython,jlillest/micropython,toolmacher/micropython,mpalomer/micropython,turbinenreiter/micropython,suda/micropython,blazewicz/micropython,adafruit/micropython,turbinenreiter/micropython,EcmaXp/micropython,skybird6672/micropython,oopy/micropython,neilh10/micropython,blmorris/micropython,suda/micropython,firstval/micropython,adamkh/micropython,supergis/micropython,alex-march/micropython,TDAbboud/micropython,xhat/micropython,matthewelse/micropython,dxxb/micropython,hiway/micropython,alex-robbins/micropython,matthewelse/micropython,AriZuu/micropython,heisewangluo/micropython,deshipu/micropython,skybird6672/micropython,trezor/micropython,swegener/micropython,Peetz0r/micropython-esp32,cnoviello/micropython,mhoffma/micropython,tuc-osg/micropython,tobbad/micropython,mpalomer/micropython,mgyenik/micropython,noahwilliamsson/micropython,cwyark/micropython,toolmacher/micropython,emfcamp/micropython,noahchense/micropython,aethaniel/micropython,utopiaprince/micropython,alex-robbins/micropython,lowRISC/micropython,matthewelse/micropython,pfalcon/micropython,tdautc19841202/micropython,xyb/micropython,kostyll/micropython,dinau/micropython,slzatz/micropython,vitiral/micropython,mpalomer/micropython,slzatz/micropython,TDAbboud/micropython,noahwilliamsson/micropython,jmarcelino/pycom-micropython,Peetz0r/micropython-esp32,blazewicz/micropython,chrisdearman/micropython,TDAbboud/micropython,ganshun666/micropython,utopiaprince/micropython,EcmaXp/micropython,slzatz/micropython,utopiaprince/micropython,micropython/micropython-esp32,SungEun-Steve-Kim/test-mp,neilh10/micropython,ceramos/micropython,misterdanb/micropython,slzatz/micropython,micropython/micropython-esp32,omtinez/micropython,praemdonck/micropython,slzatz/micropython,vriera/micropython,xuxiaoxin/micropython,vitiral/micropython,SHA2017-badge/micropython-esp32,ChuckM/micropython,trezor/micropython,stonegithubs/micropython,puuu/micropython,xyb/micropython,EcmaXp/micropython,skybird6672/micropython,heisewangluo/micropython,vriera/micropython,PappaPeppar/micropython,jlillest/micropython,jmarcelino/pycom-micropython,deshipu/micropython,alex-march/micropython,galenhz/micropython,rubencabrera/micropython,infinnovation/micropython,puuu/micropython,dxxb/micropython,warner83/micropython,blazewicz/micropython,omtinez/micropython,martinribelotta/micropython,selste/micropython,mgyenik/micropython,Peetz0r/micropython-esp32,adafruit/micropython,MrSurly/micropython-esp32,selste/micropython,ruffy91/micropython,xhat/micropython,hosaka/micropython,adafruit/micropython,ganshun666/micropython,SungEun-Steve-Kim/test-mp,hiway/micropython,kerneltask/micropython,torwag/micropython,KISSMonX/micropython,misterdanb/micropython,tuc-osg/micropython,noahchense/micropython,HenrikSolver/micropython,aethaniel/micropython,MrSurly/micropython-esp32,galenhz/micropython,infinnovation/micropython,stonegithubs/micropython,ryannathans/micropython,tobbad/micropython,hiway/micropython,noahchense/micropython,ahotam/micropython,MrSurly/micropython-esp32,dinau/micropython,Timmenem/micropython,SungEun-Steve-Kim/test-mp,ruffy91/micropython,tdautc19841202/micropython,Peetz0r/micropython-esp32,galenhz/micropython,blazewicz/micropython,turbinenreiter/micropython,ernesto-g/micropython,noahwilliamsson/micropython,rubencabrera/micropython,AriZuu/micropython,HenrikSolver/micropython,firstval/micropython,turbinenreiter/micropython,xhat/micropython,PappaPeppar/micropython,hosaka/micropython,PappaPeppar/micropython,kerneltask/micropython,noahwilliamsson/micropython,pramasoul/micropython,dmazzella/micropython,emfcamp/micropython,mpalomer/micropython,mgyenik/micropython,suda/micropython,ceramos/micropython,matthewelse/micropython,ernesto-g/micropython,mhoffma/micropython,lowRISC/micropython,deshipu/micropython,adamkh/micropython,dxxb/micropython,turbinenreiter/micropython,Peetz0r/micropython-esp32,misterdanb/micropython,kerneltask/micropython,kostyll/micropython,Timmenem/micropython
|
tests: Add test for math special functions.
|
# test the special functions imported from math
try:
from math import *
except ImportError:
print("SKIP")
import sys
sys.exit()
test_values = [-8., -2.5, -1, -0.5, 0.0, 0.5, 2.5, 8.,]
pos_test_values = [0.001, 0.1, 0.5, 1.0, 1.5, 10.,]
functions = [
('erf', erf, test_values),
('erfc', erfc, test_values),
('gamma', gamma, pos_test_values),
('lgamma', lgamma, pos_test_values + [50., 100.,]),
]
for function_name, function, test_vals in functions:
print(function_name)
for value in test_vals:
print("{:.5g}".format(function(value)))
|
<commit_before><commit_msg>tests: Add test for math special functions.<commit_after>
|
# test the special functions imported from math
try:
from math import *
except ImportError:
print("SKIP")
import sys
sys.exit()
test_values = [-8., -2.5, -1, -0.5, 0.0, 0.5, 2.5, 8.,]
pos_test_values = [0.001, 0.1, 0.5, 1.0, 1.5, 10.,]
functions = [
('erf', erf, test_values),
('erfc', erfc, test_values),
('gamma', gamma, pos_test_values),
('lgamma', lgamma, pos_test_values + [50., 100.,]),
]
for function_name, function, test_vals in functions:
print(function_name)
for value in test_vals:
print("{:.5g}".format(function(value)))
|
tests: Add test for math special functions.# test the special functions imported from math
try:
from math import *
except ImportError:
print("SKIP")
import sys
sys.exit()
test_values = [-8., -2.5, -1, -0.5, 0.0, 0.5, 2.5, 8.,]
pos_test_values = [0.001, 0.1, 0.5, 1.0, 1.5, 10.,]
functions = [
('erf', erf, test_values),
('erfc', erfc, test_values),
('gamma', gamma, pos_test_values),
('lgamma', lgamma, pos_test_values + [50., 100.,]),
]
for function_name, function, test_vals in functions:
print(function_name)
for value in test_vals:
print("{:.5g}".format(function(value)))
|
<commit_before><commit_msg>tests: Add test for math special functions.<commit_after># test the special functions imported from math
try:
from math import *
except ImportError:
print("SKIP")
import sys
sys.exit()
test_values = [-8., -2.5, -1, -0.5, 0.0, 0.5, 2.5, 8.,]
pos_test_values = [0.001, 0.1, 0.5, 1.0, 1.5, 10.,]
functions = [
('erf', erf, test_values),
('erfc', erfc, test_values),
('gamma', gamma, pos_test_values),
('lgamma', lgamma, pos_test_values + [50., 100.,]),
]
for function_name, function, test_vals in functions:
print(function_name)
for value in test_vals:
print("{:.5g}".format(function(value)))
|
|
96dce6296c238eaab42798c0b8a99fae3c30958f
|
test/test_Datac.py
|
test/test_Datac.py
|
# -*- coding: utf-8 -*-
import unittest
import datac
class Instantiation(unittest.TestCase):
"""
Test instantiation works according to spec
"""
def test_params_non_dict(self):
"""
Datac instantiation should fail is params is not a dict
"""
params = None
abscissae = [1,2]
abscissa_name = "abscissa"
self.assertRaises(TypeError, datac.Datac, params, abscissae, abscissa_name)
def test_abscissae_non_iterable(self):
"""
Datac instantiation should fail if abscissae is not iterable
"""
params = {"fake": 1.}
abscissae = None
abscissa_name = "abscissa"
self.assertRaises(TypeError, datac.Datac, params, abscissae, abscissa_name)
def test_abscissae_string(self):
"""
Datac instantiation should fail if abscissae is a string
"""
params = {"fake": 1.}
abscissae = "a string!"
abscissa_name = "abscissa"
self.assertRaises(TypeError, datac.Datac, params, abscissae, abscissa_name)
def test_abscissae_name_non_string(self):
"""
Datac instantiation should fail if abscissae_name is not a string
"""
params = {"fake": 1.}
abscissae = [1, 2]
abscissa_name = None
self.assertRaises(TypeError, datac.Datac, params, abscissae, abscissa_name)
|
Add some tests of instantiation
|
Add some tests of instantiation
|
Python
|
mit
|
jrsmith3/datac,jrsmith3/datac
|
Add some tests of instantiation
|
# -*- coding: utf-8 -*-
import unittest
import datac
class Instantiation(unittest.TestCase):
"""
Test instantiation works according to spec
"""
def test_params_non_dict(self):
"""
Datac instantiation should fail is params is not a dict
"""
params = None
abscissae = [1,2]
abscissa_name = "abscissa"
self.assertRaises(TypeError, datac.Datac, params, abscissae, abscissa_name)
def test_abscissae_non_iterable(self):
"""
Datac instantiation should fail if abscissae is not iterable
"""
params = {"fake": 1.}
abscissae = None
abscissa_name = "abscissa"
self.assertRaises(TypeError, datac.Datac, params, abscissae, abscissa_name)
def test_abscissae_string(self):
"""
Datac instantiation should fail if abscissae is a string
"""
params = {"fake": 1.}
abscissae = "a string!"
abscissa_name = "abscissa"
self.assertRaises(TypeError, datac.Datac, params, abscissae, abscissa_name)
def test_abscissae_name_non_string(self):
"""
Datac instantiation should fail if abscissae_name is not a string
"""
params = {"fake": 1.}
abscissae = [1, 2]
abscissa_name = None
self.assertRaises(TypeError, datac.Datac, params, abscissae, abscissa_name)
|
<commit_before><commit_msg>Add some tests of instantiation<commit_after>
|
# -*- coding: utf-8 -*-
import unittest
import datac
class Instantiation(unittest.TestCase):
"""
Test instantiation works according to spec
"""
def test_params_non_dict(self):
"""
Datac instantiation should fail is params is not a dict
"""
params = None
abscissae = [1,2]
abscissa_name = "abscissa"
self.assertRaises(TypeError, datac.Datac, params, abscissae, abscissa_name)
def test_abscissae_non_iterable(self):
"""
Datac instantiation should fail if abscissae is not iterable
"""
params = {"fake": 1.}
abscissae = None
abscissa_name = "abscissa"
self.assertRaises(TypeError, datac.Datac, params, abscissae, abscissa_name)
def test_abscissae_string(self):
"""
Datac instantiation should fail if abscissae is a string
"""
params = {"fake": 1.}
abscissae = "a string!"
abscissa_name = "abscissa"
self.assertRaises(TypeError, datac.Datac, params, abscissae, abscissa_name)
def test_abscissae_name_non_string(self):
"""
Datac instantiation should fail if abscissae_name is not a string
"""
params = {"fake": 1.}
abscissae = [1, 2]
abscissa_name = None
self.assertRaises(TypeError, datac.Datac, params, abscissae, abscissa_name)
|
Add some tests of instantiation# -*- coding: utf-8 -*-
import unittest
import datac
class Instantiation(unittest.TestCase):
"""
Test instantiation works according to spec
"""
def test_params_non_dict(self):
"""
Datac instantiation should fail is params is not a dict
"""
params = None
abscissae = [1,2]
abscissa_name = "abscissa"
self.assertRaises(TypeError, datac.Datac, params, abscissae, abscissa_name)
def test_abscissae_non_iterable(self):
"""
Datac instantiation should fail if abscissae is not iterable
"""
params = {"fake": 1.}
abscissae = None
abscissa_name = "abscissa"
self.assertRaises(TypeError, datac.Datac, params, abscissae, abscissa_name)
def test_abscissae_string(self):
"""
Datac instantiation should fail if abscissae is a string
"""
params = {"fake": 1.}
abscissae = "a string!"
abscissa_name = "abscissa"
self.assertRaises(TypeError, datac.Datac, params, abscissae, abscissa_name)
def test_abscissae_name_non_string(self):
"""
Datac instantiation should fail if abscissae_name is not a string
"""
params = {"fake": 1.}
abscissae = [1, 2]
abscissa_name = None
self.assertRaises(TypeError, datac.Datac, params, abscissae, abscissa_name)
|
<commit_before><commit_msg>Add some tests of instantiation<commit_after># -*- coding: utf-8 -*-
import unittest
import datac
class Instantiation(unittest.TestCase):
"""
Test instantiation works according to spec
"""
def test_params_non_dict(self):
"""
Datac instantiation should fail is params is not a dict
"""
params = None
abscissae = [1,2]
abscissa_name = "abscissa"
self.assertRaises(TypeError, datac.Datac, params, abscissae, abscissa_name)
def test_abscissae_non_iterable(self):
"""
Datac instantiation should fail if abscissae is not iterable
"""
params = {"fake": 1.}
abscissae = None
abscissa_name = "abscissa"
self.assertRaises(TypeError, datac.Datac, params, abscissae, abscissa_name)
def test_abscissae_string(self):
"""
Datac instantiation should fail if abscissae is a string
"""
params = {"fake": 1.}
abscissae = "a string!"
abscissa_name = "abscissa"
self.assertRaises(TypeError, datac.Datac, params, abscissae, abscissa_name)
def test_abscissae_name_non_string(self):
"""
Datac instantiation should fail if abscissae_name is not a string
"""
params = {"fake": 1.}
abscissae = [1, 2]
abscissa_name = None
self.assertRaises(TypeError, datac.Datac, params, abscissae, abscissa_name)
|
|
81ae4a91b2dddaf64f7d722971224acb5682de67
|
examples/flowy_workflows/not_found_act.py
|
examples/flowy_workflows/not_found_act.py
|
from flowy import Workflow, ActivityProxy, WorkflowProxy
from flowy import make_config, workflow_config
@workflow_config('SimpleWorkflow', 2, 'constant_list', 60, 60)
class SimpleWorkflow(Workflow):
"""
Does nothing
"""
div = ActivityProxy(
name='InexistentActivity',
version=1,
task_list='constant_list',
)
def run(self, remote):
r = remote.div()
print(r.result())
return True
if __name__ == '__main__':
my_config = make_config('RolisTest')
# f = open("/home/local/3PILLAR/rszabo/flowy/mocks_output.txt", "w")
# f.close()
# f = open("/home/local/3PILLAR/rszabo/flowy/mocks.txt", "w")
# f.close()
# Start a workflow
SimpleWorkflowID = my_config.workflow_starter('SimpleWorkflow', 2)
print 'Starting: ', SimpleWorkflowID()
# Start the workflow loop
my_config.scan()
my_config.start_workflow_loop(task_list='constant_list')
|
Add wf for inexistent activity
|
Add wf for inexistent activity
|
Python
|
mit
|
severb/flowy
|
Add wf for inexistent activity
|
from flowy import Workflow, ActivityProxy, WorkflowProxy
from flowy import make_config, workflow_config
@workflow_config('SimpleWorkflow', 2, 'constant_list', 60, 60)
class SimpleWorkflow(Workflow):
"""
Does nothing
"""
div = ActivityProxy(
name='InexistentActivity',
version=1,
task_list='constant_list',
)
def run(self, remote):
r = remote.div()
print(r.result())
return True
if __name__ == '__main__':
my_config = make_config('RolisTest')
# f = open("/home/local/3PILLAR/rszabo/flowy/mocks_output.txt", "w")
# f.close()
# f = open("/home/local/3PILLAR/rszabo/flowy/mocks.txt", "w")
# f.close()
# Start a workflow
SimpleWorkflowID = my_config.workflow_starter('SimpleWorkflow', 2)
print 'Starting: ', SimpleWorkflowID()
# Start the workflow loop
my_config.scan()
my_config.start_workflow_loop(task_list='constant_list')
|
<commit_before><commit_msg>Add wf for inexistent activity<commit_after>
|
from flowy import Workflow, ActivityProxy, WorkflowProxy
from flowy import make_config, workflow_config
@workflow_config('SimpleWorkflow', 2, 'constant_list', 60, 60)
class SimpleWorkflow(Workflow):
"""
Does nothing
"""
div = ActivityProxy(
name='InexistentActivity',
version=1,
task_list='constant_list',
)
def run(self, remote):
r = remote.div()
print(r.result())
return True
if __name__ == '__main__':
my_config = make_config('RolisTest')
# f = open("/home/local/3PILLAR/rszabo/flowy/mocks_output.txt", "w")
# f.close()
# f = open("/home/local/3PILLAR/rszabo/flowy/mocks.txt", "w")
# f.close()
# Start a workflow
SimpleWorkflowID = my_config.workflow_starter('SimpleWorkflow', 2)
print 'Starting: ', SimpleWorkflowID()
# Start the workflow loop
my_config.scan()
my_config.start_workflow_loop(task_list='constant_list')
|
Add wf for inexistent activityfrom flowy import Workflow, ActivityProxy, WorkflowProxy
from flowy import make_config, workflow_config
@workflow_config('SimpleWorkflow', 2, 'constant_list', 60, 60)
class SimpleWorkflow(Workflow):
"""
Does nothing
"""
div = ActivityProxy(
name='InexistentActivity',
version=1,
task_list='constant_list',
)
def run(self, remote):
r = remote.div()
print(r.result())
return True
if __name__ == '__main__':
my_config = make_config('RolisTest')
# f = open("/home/local/3PILLAR/rszabo/flowy/mocks_output.txt", "w")
# f.close()
# f = open("/home/local/3PILLAR/rszabo/flowy/mocks.txt", "w")
# f.close()
# Start a workflow
SimpleWorkflowID = my_config.workflow_starter('SimpleWorkflow', 2)
print 'Starting: ', SimpleWorkflowID()
# Start the workflow loop
my_config.scan()
my_config.start_workflow_loop(task_list='constant_list')
|
<commit_before><commit_msg>Add wf for inexistent activity<commit_after>from flowy import Workflow, ActivityProxy, WorkflowProxy
from flowy import make_config, workflow_config
@workflow_config('SimpleWorkflow', 2, 'constant_list', 60, 60)
class SimpleWorkflow(Workflow):
"""
Does nothing
"""
div = ActivityProxy(
name='InexistentActivity',
version=1,
task_list='constant_list',
)
def run(self, remote):
r = remote.div()
print(r.result())
return True
if __name__ == '__main__':
my_config = make_config('RolisTest')
# f = open("/home/local/3PILLAR/rszabo/flowy/mocks_output.txt", "w")
# f.close()
# f = open("/home/local/3PILLAR/rszabo/flowy/mocks.txt", "w")
# f.close()
# Start a workflow
SimpleWorkflowID = my_config.workflow_starter('SimpleWorkflow', 2)
print 'Starting: ', SimpleWorkflowID()
# Start the workflow loop
my_config.scan()
my_config.start_workflow_loop(task_list='constant_list')
|
|
b1e7846b77a8260079632804d4628352e532bd42
|
src/data_set_builder/get_demographics.py
|
src/data_set_builder/get_demographics.py
|
from sqlalchemy.orm import sessionmaker
# scraping modules
from scrapy.crawler import CrawlerProcess, Crawler
from demographic_scraper.demographic_scraper.spiders.alexa_spider import AlexaSpider
from scrapy.utils.project import get_project_settings
from models import db_connect, WebsitesContent
def main():
"""Index alexa demographics
"""
engine = db_connect()
Session = sessionmaker(bind=engine)
session = Session()
settings = get_project_settings()
settings.set('ITEM_PIPELINES',
{'demographic_scraper.demographic_scraper.pipelines.WebsiteDemographicPipeline': 300})
settings.set('EXTENSIONS',
{'scrapy.telnet.TelnetConsole': None,})
process = CrawlerProcess(settings)
for website in session.query(WebsitesContent).all():
url = website.link
print website.link
AlexaSpider.name = url
process.crawl(AlexaSpider, url=url, db_session=session)
process.start()
process.stop()
session.close()
if __name__ == '__main__':
main()
|
Add script to fetch demographics.
|
Add script to fetch demographics.
|
Python
|
mit
|
piatra/ssl-project
|
Add script to fetch demographics.
|
from sqlalchemy.orm import sessionmaker
# scraping modules
from scrapy.crawler import CrawlerProcess, Crawler
from demographic_scraper.demographic_scraper.spiders.alexa_spider import AlexaSpider
from scrapy.utils.project import get_project_settings
from models import db_connect, WebsitesContent
def main():
"""Index alexa demographics
"""
engine = db_connect()
Session = sessionmaker(bind=engine)
session = Session()
settings = get_project_settings()
settings.set('ITEM_PIPELINES',
{'demographic_scraper.demographic_scraper.pipelines.WebsiteDemographicPipeline': 300})
settings.set('EXTENSIONS',
{'scrapy.telnet.TelnetConsole': None,})
process = CrawlerProcess(settings)
for website in session.query(WebsitesContent).all():
url = website.link
print website.link
AlexaSpider.name = url
process.crawl(AlexaSpider, url=url, db_session=session)
process.start()
process.stop()
session.close()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add script to fetch demographics.<commit_after>
|
from sqlalchemy.orm import sessionmaker
# scraping modules
from scrapy.crawler import CrawlerProcess, Crawler
from demographic_scraper.demographic_scraper.spiders.alexa_spider import AlexaSpider
from scrapy.utils.project import get_project_settings
from models import db_connect, WebsitesContent
def main():
"""Index alexa demographics
"""
engine = db_connect()
Session = sessionmaker(bind=engine)
session = Session()
settings = get_project_settings()
settings.set('ITEM_PIPELINES',
{'demographic_scraper.demographic_scraper.pipelines.WebsiteDemographicPipeline': 300})
settings.set('EXTENSIONS',
{'scrapy.telnet.TelnetConsole': None,})
process = CrawlerProcess(settings)
for website in session.query(WebsitesContent).all():
url = website.link
print website.link
AlexaSpider.name = url
process.crawl(AlexaSpider, url=url, db_session=session)
process.start()
process.stop()
session.close()
if __name__ == '__main__':
main()
|
Add script to fetch demographics.from sqlalchemy.orm import sessionmaker
# scraping modules
from scrapy.crawler import CrawlerProcess, Crawler
from demographic_scraper.demographic_scraper.spiders.alexa_spider import AlexaSpider
from scrapy.utils.project import get_project_settings
from models import db_connect, WebsitesContent
def main():
"""Index alexa demographics
"""
engine = db_connect()
Session = sessionmaker(bind=engine)
session = Session()
settings = get_project_settings()
settings.set('ITEM_PIPELINES',
{'demographic_scraper.demographic_scraper.pipelines.WebsiteDemographicPipeline': 300})
settings.set('EXTENSIONS',
{'scrapy.telnet.TelnetConsole': None,})
process = CrawlerProcess(settings)
for website in session.query(WebsitesContent).all():
url = website.link
print website.link
AlexaSpider.name = url
process.crawl(AlexaSpider, url=url, db_session=session)
process.start()
process.stop()
session.close()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add script to fetch demographics.<commit_after>from sqlalchemy.orm import sessionmaker
# scraping modules
from scrapy.crawler import CrawlerProcess, Crawler
from demographic_scraper.demographic_scraper.spiders.alexa_spider import AlexaSpider
from scrapy.utils.project import get_project_settings
from models import db_connect, WebsitesContent
def main():
"""Index alexa demographics
"""
engine = db_connect()
Session = sessionmaker(bind=engine)
session = Session()
settings = get_project_settings()
settings.set('ITEM_PIPELINES',
{'demographic_scraper.demographic_scraper.pipelines.WebsiteDemographicPipeline': 300})
settings.set('EXTENSIONS',
{'scrapy.telnet.TelnetConsole': None,})
process = CrawlerProcess(settings)
for website in session.query(WebsitesContent).all():
url = website.link
print website.link
AlexaSpider.name = url
process.crawl(AlexaSpider, url=url, db_session=session)
process.start()
process.stop()
session.close()
if __name__ == '__main__':
main()
|
|
03e494dda74ca6bd7efb43e77b2b0de80e7cba4e
|
osf/migrations/0097_auto_20180503_0947.py
|
osf/migrations/0097_auto_20180503_0947.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-05-03 14:47
from __future__ import unicode_literals
from django.db import migrations
from django.contrib.auth.models import Permission
from osf.models import OSFUser
# Old permission groups
add_preprintservice = Permission.objects.get(codename='add_preprintservice')
change_preprintservice = Permission.objects.get(codename='change_preprintservice')
delete_preprintservice = Permission.objects.get(codename='delete_preprintservice')
view_preprintservice = Permission.objects.get(codename='view_preprintservice')
# New permission groups
add_preprint = Permission.objects.get(codename='add_preprint')
change_preprint = Permission.objects.get(codename='change_preprint')
delete_preprint = Permission.objects.get(codename='delete_preprint')
osf_admin_view_preprint = Permission.objects.get(codename='osf_admin_view_preprint')
def unmigrate_preprint_service_permissions(state, schema):
remove_users_from_permission(add_preprint)
remove_users_from_permission(change_preprint)
remove_users_from_permission(delete_preprint)
remove_users_from_permission(osf_admin_view_preprint)
def remove_users_from_permission(perm):
for user in OSFUser.objects.filter(user_permissions=perm):
u.user_permissions.remove(perm)
def add_users_to_renamed_permission(old_perm, new_perm):
for user in OSFUser.objects.filter(user_permissions=old_perm):
user.user_permissions.add(new_perm)
def migrate_preprint_service_permissions(state, schema):
add_users_to_renamed_permission(add_preprintservice, add_preprint)
add_users_to_renamed_permission(change_preprintservice, change_preprint)
add_users_to_renamed_permission(delete_preprintservice, delete_preprint)
add_users_to_renamed_permission(view_preprintservice, osf_admin_view_preprint)
class Migration(migrations.Migration):
dependencies = [
('osf', '0096_preprint_node_divorce'),
]
operations = [
migrations.RunPython(migrate_preprint_service_permissions, unmigrate_preprint_service_permissions),
]
|
Add a data migration for adding the users from the old preprintservice permissions to the newly named preprint permissions.
|
Add a data migration for adding the users from the old preprintservice permissions to the newly named preprint permissions.
|
Python
|
apache-2.0
|
felliott/osf.io,adlius/osf.io,felliott/osf.io,baylee-d/osf.io,mattclark/osf.io,CenterForOpenScience/osf.io,saradbowman/osf.io,mfraezz/osf.io,cslzchen/osf.io,brianjgeiger/osf.io,Johnetordoff/osf.io,CenterForOpenScience/osf.io,baylee-d/osf.io,Johnetordoff/osf.io,baylee-d/osf.io,mfraezz/osf.io,aaxelb/osf.io,felliott/osf.io,mfraezz/osf.io,brianjgeiger/osf.io,adlius/osf.io,aaxelb/osf.io,brianjgeiger/osf.io,mfraezz/osf.io,cslzchen/osf.io,adlius/osf.io,pattisdr/osf.io,felliott/osf.io,aaxelb/osf.io,Johnetordoff/osf.io,saradbowman/osf.io,mattclark/osf.io,pattisdr/osf.io,CenterForOpenScience/osf.io,cslzchen/osf.io,CenterForOpenScience/osf.io,aaxelb/osf.io,mattclark/osf.io,Johnetordoff/osf.io,brianjgeiger/osf.io,adlius/osf.io,cslzchen/osf.io,pattisdr/osf.io
|
Add a data migration for adding the users from the old preprintservice permissions to the newly named preprint permissions.
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-05-03 14:47
from __future__ import unicode_literals
from django.db import migrations
from django.contrib.auth.models import Permission
from osf.models import OSFUser
# Old permission groups
add_preprintservice = Permission.objects.get(codename='add_preprintservice')
change_preprintservice = Permission.objects.get(codename='change_preprintservice')
delete_preprintservice = Permission.objects.get(codename='delete_preprintservice')
view_preprintservice = Permission.objects.get(codename='view_preprintservice')
# New permission groups
add_preprint = Permission.objects.get(codename='add_preprint')
change_preprint = Permission.objects.get(codename='change_preprint')
delete_preprint = Permission.objects.get(codename='delete_preprint')
osf_admin_view_preprint = Permission.objects.get(codename='osf_admin_view_preprint')
def unmigrate_preprint_service_permissions(state, schema):
remove_users_from_permission(add_preprint)
remove_users_from_permission(change_preprint)
remove_users_from_permission(delete_preprint)
remove_users_from_permission(osf_admin_view_preprint)
def remove_users_from_permission(perm):
for user in OSFUser.objects.filter(user_permissions=perm):
u.user_permissions.remove(perm)
def add_users_to_renamed_permission(old_perm, new_perm):
for user in OSFUser.objects.filter(user_permissions=old_perm):
user.user_permissions.add(new_perm)
def migrate_preprint_service_permissions(state, schema):
add_users_to_renamed_permission(add_preprintservice, add_preprint)
add_users_to_renamed_permission(change_preprintservice, change_preprint)
add_users_to_renamed_permission(delete_preprintservice, delete_preprint)
add_users_to_renamed_permission(view_preprintservice, osf_admin_view_preprint)
class Migration(migrations.Migration):
dependencies = [
('osf', '0096_preprint_node_divorce'),
]
operations = [
migrations.RunPython(migrate_preprint_service_permissions, unmigrate_preprint_service_permissions),
]
|
<commit_before><commit_msg>Add a data migration for adding the users from the old preprintservice permissions to the newly named preprint permissions.<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-05-03 14:47
from __future__ import unicode_literals
from django.db import migrations
from django.contrib.auth.models import Permission
from osf.models import OSFUser
# Old permission groups
add_preprintservice = Permission.objects.get(codename='add_preprintservice')
change_preprintservice = Permission.objects.get(codename='change_preprintservice')
delete_preprintservice = Permission.objects.get(codename='delete_preprintservice')
view_preprintservice = Permission.objects.get(codename='view_preprintservice')
# New permission groups
add_preprint = Permission.objects.get(codename='add_preprint')
change_preprint = Permission.objects.get(codename='change_preprint')
delete_preprint = Permission.objects.get(codename='delete_preprint')
osf_admin_view_preprint = Permission.objects.get(codename='osf_admin_view_preprint')
def unmigrate_preprint_service_permissions(state, schema):
remove_users_from_permission(add_preprint)
remove_users_from_permission(change_preprint)
remove_users_from_permission(delete_preprint)
remove_users_from_permission(osf_admin_view_preprint)
def remove_users_from_permission(perm):
for user in OSFUser.objects.filter(user_permissions=perm):
u.user_permissions.remove(perm)
def add_users_to_renamed_permission(old_perm, new_perm):
for user in OSFUser.objects.filter(user_permissions=old_perm):
user.user_permissions.add(new_perm)
def migrate_preprint_service_permissions(state, schema):
add_users_to_renamed_permission(add_preprintservice, add_preprint)
add_users_to_renamed_permission(change_preprintservice, change_preprint)
add_users_to_renamed_permission(delete_preprintservice, delete_preprint)
add_users_to_renamed_permission(view_preprintservice, osf_admin_view_preprint)
class Migration(migrations.Migration):
dependencies = [
('osf', '0096_preprint_node_divorce'),
]
operations = [
migrations.RunPython(migrate_preprint_service_permissions, unmigrate_preprint_service_permissions),
]
|
Add a data migration for adding the users from the old preprintservice permissions to the newly named preprint permissions.# -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-05-03 14:47
from __future__ import unicode_literals
from django.db import migrations
from django.contrib.auth.models import Permission
from osf.models import OSFUser
# Old permission groups
add_preprintservice = Permission.objects.get(codename='add_preprintservice')
change_preprintservice = Permission.objects.get(codename='change_preprintservice')
delete_preprintservice = Permission.objects.get(codename='delete_preprintservice')
view_preprintservice = Permission.objects.get(codename='view_preprintservice')
# New permission groups
add_preprint = Permission.objects.get(codename='add_preprint')
change_preprint = Permission.objects.get(codename='change_preprint')
delete_preprint = Permission.objects.get(codename='delete_preprint')
osf_admin_view_preprint = Permission.objects.get(codename='osf_admin_view_preprint')
def unmigrate_preprint_service_permissions(state, schema):
remove_users_from_permission(add_preprint)
remove_users_from_permission(change_preprint)
remove_users_from_permission(delete_preprint)
remove_users_from_permission(osf_admin_view_preprint)
def remove_users_from_permission(perm):
for user in OSFUser.objects.filter(user_permissions=perm):
u.user_permissions.remove(perm)
def add_users_to_renamed_permission(old_perm, new_perm):
for user in OSFUser.objects.filter(user_permissions=old_perm):
user.user_permissions.add(new_perm)
def migrate_preprint_service_permissions(state, schema):
add_users_to_renamed_permission(add_preprintservice, add_preprint)
add_users_to_renamed_permission(change_preprintservice, change_preprint)
add_users_to_renamed_permission(delete_preprintservice, delete_preprint)
add_users_to_renamed_permission(view_preprintservice, osf_admin_view_preprint)
class Migration(migrations.Migration):
dependencies = [
('osf', '0096_preprint_node_divorce'),
]
operations = [
migrations.RunPython(migrate_preprint_service_permissions, unmigrate_preprint_service_permissions),
]
|
<commit_before><commit_msg>Add a data migration for adding the users from the old preprintservice permissions to the newly named preprint permissions.<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-05-03 14:47
from __future__ import unicode_literals
from django.db import migrations
from django.contrib.auth.models import Permission
from osf.models import OSFUser
# Old permission groups
add_preprintservice = Permission.objects.get(codename='add_preprintservice')
change_preprintservice = Permission.objects.get(codename='change_preprintservice')
delete_preprintservice = Permission.objects.get(codename='delete_preprintservice')
view_preprintservice = Permission.objects.get(codename='view_preprintservice')
# New permission groups
add_preprint = Permission.objects.get(codename='add_preprint')
change_preprint = Permission.objects.get(codename='change_preprint')
delete_preprint = Permission.objects.get(codename='delete_preprint')
osf_admin_view_preprint = Permission.objects.get(codename='osf_admin_view_preprint')
def unmigrate_preprint_service_permissions(state, schema):
remove_users_from_permission(add_preprint)
remove_users_from_permission(change_preprint)
remove_users_from_permission(delete_preprint)
remove_users_from_permission(osf_admin_view_preprint)
def remove_users_from_permission(perm):
for user in OSFUser.objects.filter(user_permissions=perm):
u.user_permissions.remove(perm)
def add_users_to_renamed_permission(old_perm, new_perm):
for user in OSFUser.objects.filter(user_permissions=old_perm):
user.user_permissions.add(new_perm)
def migrate_preprint_service_permissions(state, schema):
add_users_to_renamed_permission(add_preprintservice, add_preprint)
add_users_to_renamed_permission(change_preprintservice, change_preprint)
add_users_to_renamed_permission(delete_preprintservice, delete_preprint)
add_users_to_renamed_permission(view_preprintservice, osf_admin_view_preprint)
class Migration(migrations.Migration):
dependencies = [
('osf', '0096_preprint_node_divorce'),
]
operations = [
migrations.RunPython(migrate_preprint_service_permissions, unmigrate_preprint_service_permissions),
]
|
|
5d773799784ab945bb4cd9ae66042d13890be3b6
|
ocradmin/core/templatetags/formatattr.py
|
ocradmin/core/templatetags/formatattr.py
|
import re
from django.template import Library
from django.conf import settings
register = Library()
@register.filter
def formatattr(value):
"""Formats an object's nested attribute for display"""
value = value.split(".")[-1]
return " ".join([p.capitalize() for p in value.split("_")])
|
Add a template tag for pretty printing nested attr names
|
Add a template tag for pretty printing nested attr names
|
Python
|
apache-2.0
|
vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium
|
Add a template tag for pretty printing nested attr names
|
import re
from django.template import Library
from django.conf import settings
register = Library()
@register.filter
def formatattr(value):
"""Formats an object's nested attribute for display"""
value = value.split(".")[-1]
return " ".join([p.capitalize() for p in value.split("_")])
|
<commit_before><commit_msg>Add a template tag for pretty printing nested attr names<commit_after>
|
import re
from django.template import Library
from django.conf import settings
register = Library()
@register.filter
def formatattr(value):
"""Formats an object's nested attribute for display"""
value = value.split(".")[-1]
return " ".join([p.capitalize() for p in value.split("_")])
|
Add a template tag for pretty printing nested attr namesimport re
from django.template import Library
from django.conf import settings
register = Library()
@register.filter
def formatattr(value):
"""Formats an object's nested attribute for display"""
value = value.split(".")[-1]
return " ".join([p.capitalize() for p in value.split("_")])
|
<commit_before><commit_msg>Add a template tag for pretty printing nested attr names<commit_after>import re
from django.template import Library
from django.conf import settings
register = Library()
@register.filter
def formatattr(value):
"""Formats an object's nested attribute for display"""
value = value.split(".")[-1]
return " ".join([p.capitalize() for p in value.split("_")])
|
|
effc09edd607d7975a01b3652b4932e40fb0f7f9
|
bin/combine-examples.py
|
bin/combine-examples.py
|
#!/usr/bin/python
import re
import sys
def main(argv):
examples = {}
requires = set()
for filename in argv[1:]:
lines = open(filename).readlines()
if len(lines) > 0 and lines[0].startswith('// NOCOMPILE'):
continue
requires.update(line for line in lines if line.startswith('goog.require'))
examples[filename] = [line for line in lines if not line.startswith('goog.require')]
for require in sorted(requires):
print require,
for filename in sorted(examples.keys()):
print '// ', filename
print '(function(){'
for line in examples[filename]:
print line,
print '})();'
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
Add script to combine examples
|
Add script to combine examples
|
Python
|
bsd-2-clause
|
kkuunnddaannkk/ol3,landonb/ol3,pmlrsg/ol3,tamarmot/ol3,elemoine/ol3,alexbrault/ol3,Distem/ol3,t27/ol3,Andrey-Pavlov/ol3,bogdanvaduva/ol3,fblackburn/ol3,jacmendt/ol3,bogdanvaduva/ol3,ahocevar/ol3,Distem/ol3,thhomas/ol3,jmiller-boundless/ol3,klokantech/ol3raster,geekdenz/openlayers,stweil/openlayers,antonio83moura/ol3,alexbrault/ol3,jmiller-boundless/ol3,Morgul/ol3,ahocevar/openlayers,alvinlindstam/ol3,Antreasgr/ol3,fredj/ol3,gingerik/ol3,wlerner/ol3,xiaoqqchen/ol3,klokantech/ol3raster,adube/ol3,geekdenz/ol3,oterral/ol3,Morgul/ol3,geekdenz/openlayers,fblackburn/ol3,landonb/ol3,mzur/ol3,NOAA-ORR-ERD/ol3,stweil/openlayers,ahocevar/openlayers,geonux/ol3,mechdrew/ol3,fredj/ol3,ahocevar/openlayers,freylis/ol3,itayod/ol3,gingerik/ol3,fperucic/ol3,thomasmoelhave/ol3,pmlrsg/ol3,itayod/ol3,Andrey-Pavlov/ol3,CandoImage/ol3,tsauerwein/ol3,wlerner/ol3,Andrey-Pavlov/ol3,openlayers/openlayers,gingerik/ol3,elemoine/ol3,stweil/ol3,fredj/ol3,bartvde/ol3,NOAA-ORR-ERD/ol3,planetlabs/ol3,kkuunnddaannkk/ol3,bartvde/ol3,klokantech/ol3,geonux/ol3,jacmendt/ol3,thhomas/ol3,ahocevar/ol3,bogdanvaduva/ol3,bogdanvaduva/ol3,epointal/ol3,geekdenz/ol3,thomasmoelhave/ol3,thomasmoelhave/ol3,thomasmoelhave/ol3,freylis/ol3,richstoner/ol3,adube/ol3,tschaub/ol3,kjelderg/ol3,planetlabs/ol3,llambanna/ol3,kjelderg/ol3,das-peter/ol3,jacmendt/ol3,fblackburn/ol3,mechdrew/ol3,elemoine/ol3,hafenr/ol3,klokantech/ol3,epointal/ol3,tschaub/ol3,xiaoqqchen/ol3,bill-chadwick/ol3,tsauerwein/ol3,aisaacs/ol3,ahocevar/ol3,fperucic/ol3,stweil/ol3,tsauerwein/ol3,aisaacs/ol3,kjelderg/ol3,planetlabs/ol3,wlerner/ol3,fblackburn/ol3,richstoner/ol3,jmiller-boundless/ol3,freylis/ol3,jacmendt/ol3,klokantech/ol3raster,alexbrault/ol3,tamarmot/ol3,pmlrsg/ol3,mzur/ol3,adube/ol3,Antreasgr/ol3,ahocevar/ol3,mzur/ol3,bjornharrtell/ol3,llambanna/ol3,kjelderg/ol3,t27/ol3,klokantech/ol3,itayod/ol3,tamarmot/ol3,t27/ol3,hafenr/ol3,alvinlindstam/ol3,pmlrsg/ol3,jmiller-boundless/ol3,yinxiaomei/ol3,epointal/ol3,alvinlindstam/ol3,alvinlindstam/ol3,xiaoqqchen/ol3,das-peter/ol3,klokantech/ol3raster,richstoner/ol3,yinxiaomei/ol3,kkuunnddaannkk/ol3,alexbrault/ol3,stweil/ol3,antonio83moura/ol3,elemoine/ol3,richstoner/ol3,Andrey-Pavlov/ol3,thhomas/ol3,wlerner/ol3,aisaacs/ol3,oterral/ol3,geekdenz/ol3,t27/ol3,mechdrew/ol3,tschaub/ol3,bjornharrtell/ol3,epointal/ol3,fperucic/ol3,denilsonsa/ol3,kkuunnddaannkk/ol3,Morgul/ol3,Antreasgr/ol3,Distem/ol3,gingerik/ol3,geekdenz/ol3,fperucic/ol3,Distem/ol3,aisaacs/ol3,stweil/openlayers,bill-chadwick/ol3,landonb/ol3,denilsonsa/ol3,bjornharrtell/ol3,openlayers/openlayers,oterral/ol3,bartvde/ol3,mechdrew/ol3,CandoImage/ol3,CandoImage/ol3,hafenr/ol3,denilsonsa/ol3,das-peter/ol3,fredj/ol3,xiaoqqchen/ol3,planetlabs/ol3,llambanna/ol3,antonio83moura/ol3,tsauerwein/ol3,denilsonsa/ol3,tamarmot/ol3,Morgul/ol3,NOAA-ORR-ERD/ol3,klokantech/ol3,thhomas/ol3,freylis/ol3,bartvde/ol3,geonux/ol3,jmiller-boundless/ol3,stweil/ol3,tschaub/ol3,yinxiaomei/ol3,yinxiaomei/ol3,llambanna/ol3,geekdenz/openlayers,hafenr/ol3,antonio83moura/ol3,mzur/ol3,Antreasgr/ol3,NOAA-ORR-ERD/ol3,bill-chadwick/ol3,CandoImage/ol3,geonux/ol3,das-peter/ol3,bill-chadwick/ol3,openlayers/openlayers,landonb/ol3,itayod/ol3
|
Add script to combine examples
|
#!/usr/bin/python
import re
import sys
def main(argv):
examples = {}
requires = set()
for filename in argv[1:]:
lines = open(filename).readlines()
if len(lines) > 0 and lines[0].startswith('// NOCOMPILE'):
continue
requires.update(line for line in lines if line.startswith('goog.require'))
examples[filename] = [line for line in lines if not line.startswith('goog.require')]
for require in sorted(requires):
print require,
for filename in sorted(examples.keys()):
print '// ', filename
print '(function(){'
for line in examples[filename]:
print line,
print '})();'
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
<commit_before><commit_msg>Add script to combine examples<commit_after>
|
#!/usr/bin/python
import re
import sys
def main(argv):
examples = {}
requires = set()
for filename in argv[1:]:
lines = open(filename).readlines()
if len(lines) > 0 and lines[0].startswith('// NOCOMPILE'):
continue
requires.update(line for line in lines if line.startswith('goog.require'))
examples[filename] = [line for line in lines if not line.startswith('goog.require')]
for require in sorted(requires):
print require,
for filename in sorted(examples.keys()):
print '// ', filename
print '(function(){'
for line in examples[filename]:
print line,
print '})();'
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
Add script to combine examples#!/usr/bin/python
import re
import sys
def main(argv):
examples = {}
requires = set()
for filename in argv[1:]:
lines = open(filename).readlines()
if len(lines) > 0 and lines[0].startswith('// NOCOMPILE'):
continue
requires.update(line for line in lines if line.startswith('goog.require'))
examples[filename] = [line for line in lines if not line.startswith('goog.require')]
for require in sorted(requires):
print require,
for filename in sorted(examples.keys()):
print '// ', filename
print '(function(){'
for line in examples[filename]:
print line,
print '})();'
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
<commit_before><commit_msg>Add script to combine examples<commit_after>#!/usr/bin/python
import re
import sys
def main(argv):
examples = {}
requires = set()
for filename in argv[1:]:
lines = open(filename).readlines()
if len(lines) > 0 and lines[0].startswith('// NOCOMPILE'):
continue
requires.update(line for line in lines if line.startswith('goog.require'))
examples[filename] = [line for line in lines if not line.startswith('goog.require')]
for require in sorted(requires):
print require,
for filename in sorted(examples.keys()):
print '// ', filename
print '(function(){'
for line in examples[filename]:
print line,
print '})();'
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
|
f2bf31385f9a2969ed7a1f1f082ade54c4d3a72b
|
dataactcore/migrations/versions/c3a3389bda57_merge_9dded_5c594.py
|
dataactcore/migrations/versions/c3a3389bda57_merge_9dded_5c594.py
|
"""Merge 9dded 5c594
Revision ID: c3a3389bda57
Revises: 9dded6e6bf79, 5c594d23709b
Create Date: 2016-11-04 15:10:00.221900
"""
# revision identifiers, used by Alembic.
revision = 'c3a3389bda57'
down_revision = ('9dded6e6bf79', '5c594d23709b')
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_data_broker():
pass
def downgrade_data_broker():
pass
|
Add merge commit for migrations
|
Add merge commit for migrations
|
Python
|
cc0-1.0
|
fedspendingtransparency/data-act-broker-backend,fedspendingtransparency/data-act-broker-backend
|
Add merge commit for migrations
|
"""Merge 9dded 5c594
Revision ID: c3a3389bda57
Revises: 9dded6e6bf79, 5c594d23709b
Create Date: 2016-11-04 15:10:00.221900
"""
# revision identifiers, used by Alembic.
revision = 'c3a3389bda57'
down_revision = ('9dded6e6bf79', '5c594d23709b')
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_data_broker():
pass
def downgrade_data_broker():
pass
|
<commit_before><commit_msg>Add merge commit for migrations<commit_after>
|
"""Merge 9dded 5c594
Revision ID: c3a3389bda57
Revises: 9dded6e6bf79, 5c594d23709b
Create Date: 2016-11-04 15:10:00.221900
"""
# revision identifiers, used by Alembic.
revision = 'c3a3389bda57'
down_revision = ('9dded6e6bf79', '5c594d23709b')
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_data_broker():
pass
def downgrade_data_broker():
pass
|
Add merge commit for migrations"""Merge 9dded 5c594
Revision ID: c3a3389bda57
Revises: 9dded6e6bf79, 5c594d23709b
Create Date: 2016-11-04 15:10:00.221900
"""
# revision identifiers, used by Alembic.
revision = 'c3a3389bda57'
down_revision = ('9dded6e6bf79', '5c594d23709b')
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_data_broker():
pass
def downgrade_data_broker():
pass
|
<commit_before><commit_msg>Add merge commit for migrations<commit_after>"""Merge 9dded 5c594
Revision ID: c3a3389bda57
Revises: 9dded6e6bf79, 5c594d23709b
Create Date: 2016-11-04 15:10:00.221900
"""
# revision identifiers, used by Alembic.
revision = 'c3a3389bda57'
down_revision = ('9dded6e6bf79', '5c594d23709b')
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_data_broker():
pass
def downgrade_data_broker():
pass
|
|
0cbf03210468e07ba425ff80a48a1786607b2ce6
|
updatorr/tracker_handlers/handler_rutor.py
|
updatorr/tracker_handlers/handler_rutor.py
|
# -*- coding: utf-8 -*-
"""
Created on Sat Feb 16 14:17:04 2013
@author: lucky
"""
from updatorr.handler_base import BaseTrackerHandler
from updatorr.utils import register_tracker_handler
class RutorHandler(BaseTrackerHandler):
"""This class implements .torrent files downloads
for http://rutor.org tracker."""
login_required = False
def get_torrent_file(self):
"""This is the main method which returns
a filepath to the downloaded file."""
torrent_file = None
download_link = self.get_download_link()
if download_link is None:
self.dump_error('Cannot find torrent file download link at %s' % self.resource_url)
else:
self.debug('Torrent download link found: %s' % download_link)
torrent_file = self.download_torrent(download_link)
return torrent_file
def get_id_from_link(self):
"""Returns forum thread identifier from full thread URL."""
lst = self.resource_url.split('/')
lst.reverse ()
return lst[0]
def get_download_link(self):
"""Tries to find .torrent file download link at forum thread page
and return that one."""
linkToFind = "d.rutor.org/download/" + self.get_id_from_link()
response, page_html = self.get_resource(self.resource_url)
page_links = self.find_links(page_html)
download_link = None
for page_link in page_links:
if linkToFind in page_link:
download_link = page_link
break
return download_link
def download_torrent(self, url):
"""Gets .torrent file contents from given URL and
stores that in a temporary file within a filesystem.
Returns a path to that file.
"""
self.debug('Downloading torrent file from %s ...' % url)
# That was a check that user himself visited torrent's page ;)
response, contents = self.get_resource(url)
return self.store_tmp_torrent(contents)
# With that one we tell updatetorr to handle links to `rutracker.org` domain with RutrackerHandler class.
register_tracker_handler('rutor.org', RutorHandler)
|
Add rutor.org torrent tracker support
|
Add rutor.org torrent tracker support
|
Python
|
bsd-3-clause
|
idlesign/deluge-updatorr,idlesign/deluge-updatorr
|
Add rutor.org torrent tracker support
|
# -*- coding: utf-8 -*-
"""
Created on Sat Feb 16 14:17:04 2013
@author: lucky
"""
from updatorr.handler_base import BaseTrackerHandler
from updatorr.utils import register_tracker_handler
class RutorHandler(BaseTrackerHandler):
"""This class implements .torrent files downloads
for http://rutor.org tracker."""
login_required = False
def get_torrent_file(self):
"""This is the main method which returns
a filepath to the downloaded file."""
torrent_file = None
download_link = self.get_download_link()
if download_link is None:
self.dump_error('Cannot find torrent file download link at %s' % self.resource_url)
else:
self.debug('Torrent download link found: %s' % download_link)
torrent_file = self.download_torrent(download_link)
return torrent_file
def get_id_from_link(self):
"""Returns forum thread identifier from full thread URL."""
lst = self.resource_url.split('/')
lst.reverse ()
return lst[0]
def get_download_link(self):
"""Tries to find .torrent file download link at forum thread page
and return that one."""
linkToFind = "d.rutor.org/download/" + self.get_id_from_link()
response, page_html = self.get_resource(self.resource_url)
page_links = self.find_links(page_html)
download_link = None
for page_link in page_links:
if linkToFind in page_link:
download_link = page_link
break
return download_link
def download_torrent(self, url):
"""Gets .torrent file contents from given URL and
stores that in a temporary file within a filesystem.
Returns a path to that file.
"""
self.debug('Downloading torrent file from %s ...' % url)
# That was a check that user himself visited torrent's page ;)
response, contents = self.get_resource(url)
return self.store_tmp_torrent(contents)
# With that one we tell updatetorr to handle links to `rutracker.org` domain with RutrackerHandler class.
register_tracker_handler('rutor.org', RutorHandler)
|
<commit_before><commit_msg>Add rutor.org torrent tracker support<commit_after>
|
# -*- coding: utf-8 -*-
"""
Created on Sat Feb 16 14:17:04 2013
@author: lucky
"""
from updatorr.handler_base import BaseTrackerHandler
from updatorr.utils import register_tracker_handler
class RutorHandler(BaseTrackerHandler):
"""This class implements .torrent files downloads
for http://rutor.org tracker."""
login_required = False
def get_torrent_file(self):
"""This is the main method which returns
a filepath to the downloaded file."""
torrent_file = None
download_link = self.get_download_link()
if download_link is None:
self.dump_error('Cannot find torrent file download link at %s' % self.resource_url)
else:
self.debug('Torrent download link found: %s' % download_link)
torrent_file = self.download_torrent(download_link)
return torrent_file
def get_id_from_link(self):
"""Returns forum thread identifier from full thread URL."""
lst = self.resource_url.split('/')
lst.reverse ()
return lst[0]
def get_download_link(self):
"""Tries to find .torrent file download link at forum thread page
and return that one."""
linkToFind = "d.rutor.org/download/" + self.get_id_from_link()
response, page_html = self.get_resource(self.resource_url)
page_links = self.find_links(page_html)
download_link = None
for page_link in page_links:
if linkToFind in page_link:
download_link = page_link
break
return download_link
def download_torrent(self, url):
"""Gets .torrent file contents from given URL and
stores that in a temporary file within a filesystem.
Returns a path to that file.
"""
self.debug('Downloading torrent file from %s ...' % url)
# That was a check that user himself visited torrent's page ;)
response, contents = self.get_resource(url)
return self.store_tmp_torrent(contents)
# With that one we tell updatetorr to handle links to `rutracker.org` domain with RutrackerHandler class.
register_tracker_handler('rutor.org', RutorHandler)
|
Add rutor.org torrent tracker support# -*- coding: utf-8 -*-
"""
Created on Sat Feb 16 14:17:04 2013
@author: lucky
"""
from updatorr.handler_base import BaseTrackerHandler
from updatorr.utils import register_tracker_handler
class RutorHandler(BaseTrackerHandler):
"""This class implements .torrent files downloads
for http://rutor.org tracker."""
login_required = False
def get_torrent_file(self):
"""This is the main method which returns
a filepath to the downloaded file."""
torrent_file = None
download_link = self.get_download_link()
if download_link is None:
self.dump_error('Cannot find torrent file download link at %s' % self.resource_url)
else:
self.debug('Torrent download link found: %s' % download_link)
torrent_file = self.download_torrent(download_link)
return torrent_file
def get_id_from_link(self):
"""Returns forum thread identifier from full thread URL."""
lst = self.resource_url.split('/')
lst.reverse ()
return lst[0]
def get_download_link(self):
"""Tries to find .torrent file download link at forum thread page
and return that one."""
linkToFind = "d.rutor.org/download/" + self.get_id_from_link()
response, page_html = self.get_resource(self.resource_url)
page_links = self.find_links(page_html)
download_link = None
for page_link in page_links:
if linkToFind in page_link:
download_link = page_link
break
return download_link
def download_torrent(self, url):
"""Gets .torrent file contents from given URL and
stores that in a temporary file within a filesystem.
Returns a path to that file.
"""
self.debug('Downloading torrent file from %s ...' % url)
# That was a check that user himself visited torrent's page ;)
response, contents = self.get_resource(url)
return self.store_tmp_torrent(contents)
# With that one we tell updatetorr to handle links to `rutracker.org` domain with RutrackerHandler class.
register_tracker_handler('rutor.org', RutorHandler)
|
<commit_before><commit_msg>Add rutor.org torrent tracker support<commit_after># -*- coding: utf-8 -*-
"""
Created on Sat Feb 16 14:17:04 2013
@author: lucky
"""
from updatorr.handler_base import BaseTrackerHandler
from updatorr.utils import register_tracker_handler
class RutorHandler(BaseTrackerHandler):
"""This class implements .torrent files downloads
for http://rutor.org tracker."""
login_required = False
def get_torrent_file(self):
"""This is the main method which returns
a filepath to the downloaded file."""
torrent_file = None
download_link = self.get_download_link()
if download_link is None:
self.dump_error('Cannot find torrent file download link at %s' % self.resource_url)
else:
self.debug('Torrent download link found: %s' % download_link)
torrent_file = self.download_torrent(download_link)
return torrent_file
def get_id_from_link(self):
"""Returns forum thread identifier from full thread URL."""
lst = self.resource_url.split('/')
lst.reverse ()
return lst[0]
def get_download_link(self):
"""Tries to find .torrent file download link at forum thread page
and return that one."""
linkToFind = "d.rutor.org/download/" + self.get_id_from_link()
response, page_html = self.get_resource(self.resource_url)
page_links = self.find_links(page_html)
download_link = None
for page_link in page_links:
if linkToFind in page_link:
download_link = page_link
break
return download_link
def download_torrent(self, url):
"""Gets .torrent file contents from given URL and
stores that in a temporary file within a filesystem.
Returns a path to that file.
"""
self.debug('Downloading torrent file from %s ...' % url)
# That was a check that user himself visited torrent's page ;)
response, contents = self.get_resource(url)
return self.store_tmp_torrent(contents)
# With that one we tell updatetorr to handle links to `rutracker.org` domain with RutrackerHandler class.
register_tracker_handler('rutor.org', RutorHandler)
|
|
bed4f189cd6bf9534ef152b780de0fca9e9c546a
|
tests/util.py
|
tests/util.py
|
from unittest import TestCase
from evesrp import create_app, db
class TestApp(TestCase):
def setUp(self):
self.app = create_app()
self.app.testing = True
self.app.config['SECRET_KEY'] = 'testing'
self.app.config['USER_AGENT_EMAIL'] = 'testing@example.com'
db.create_all(app=self.app)
|
from unittest import TestCase
from evesrp import create_app, db
class TestApp(TestCase):
def setUp(self):
self.app = create_app()
self.app.testing = True
self.app.config['SECRET_KEY'] = 'testing'
self.app.config['USER_AGENT_EMAIL'] = 'testing@example.com'
self.app.config['WTF_CSRF_ENABLED'] = False
db.create_all(app=self.app)
|
Disable all CSRF protection while testing
|
Disable all CSRF protection while testing
|
Python
|
bsd-2-clause
|
eskwire/evesrp,eskwire/evesrp,paxswill/evesrp,eskwire/evesrp,eskwire/evesrp,paxswill/evesrp,paxswill/evesrp
|
from unittest import TestCase
from evesrp import create_app, db
class TestApp(TestCase):
def setUp(self):
self.app = create_app()
self.app.testing = True
self.app.config['SECRET_KEY'] = 'testing'
self.app.config['USER_AGENT_EMAIL'] = 'testing@example.com'
db.create_all(app=self.app)
Disable all CSRF protection while testing
|
from unittest import TestCase
from evesrp import create_app, db
class TestApp(TestCase):
def setUp(self):
self.app = create_app()
self.app.testing = True
self.app.config['SECRET_KEY'] = 'testing'
self.app.config['USER_AGENT_EMAIL'] = 'testing@example.com'
self.app.config['WTF_CSRF_ENABLED'] = False
db.create_all(app=self.app)
|
<commit_before>from unittest import TestCase
from evesrp import create_app, db
class TestApp(TestCase):
def setUp(self):
self.app = create_app()
self.app.testing = True
self.app.config['SECRET_KEY'] = 'testing'
self.app.config['USER_AGENT_EMAIL'] = 'testing@example.com'
db.create_all(app=self.app)
<commit_msg>Disable all CSRF protection while testing<commit_after>
|
from unittest import TestCase
from evesrp import create_app, db
class TestApp(TestCase):
def setUp(self):
self.app = create_app()
self.app.testing = True
self.app.config['SECRET_KEY'] = 'testing'
self.app.config['USER_AGENT_EMAIL'] = 'testing@example.com'
self.app.config['WTF_CSRF_ENABLED'] = False
db.create_all(app=self.app)
|
from unittest import TestCase
from evesrp import create_app, db
class TestApp(TestCase):
def setUp(self):
self.app = create_app()
self.app.testing = True
self.app.config['SECRET_KEY'] = 'testing'
self.app.config['USER_AGENT_EMAIL'] = 'testing@example.com'
db.create_all(app=self.app)
Disable all CSRF protection while testingfrom unittest import TestCase
from evesrp import create_app, db
class TestApp(TestCase):
def setUp(self):
self.app = create_app()
self.app.testing = True
self.app.config['SECRET_KEY'] = 'testing'
self.app.config['USER_AGENT_EMAIL'] = 'testing@example.com'
self.app.config['WTF_CSRF_ENABLED'] = False
db.create_all(app=self.app)
|
<commit_before>from unittest import TestCase
from evesrp import create_app, db
class TestApp(TestCase):
def setUp(self):
self.app = create_app()
self.app.testing = True
self.app.config['SECRET_KEY'] = 'testing'
self.app.config['USER_AGENT_EMAIL'] = 'testing@example.com'
db.create_all(app=self.app)
<commit_msg>Disable all CSRF protection while testing<commit_after>from unittest import TestCase
from evesrp import create_app, db
class TestApp(TestCase):
def setUp(self):
self.app = create_app()
self.app.testing = True
self.app.config['SECRET_KEY'] = 'testing'
self.app.config['USER_AGENT_EMAIL'] = 'testing@example.com'
self.app.config['WTF_CSRF_ENABLED'] = False
db.create_all(app=self.app)
|
199943deb4da7c68f08f578b404dbc6208cc41ac
|
spacy/tests/regression/util_add_marker.py
|
spacy/tests/regression/util_add_marker.py
|
import re
from pathlib import Path
from typing import Optional
import typer
def main(
filename: Path, out_file: Optional[Path] = typer.Option(None), dry_run: bool = False
):
"""Add pytest issue markers on regression tests
If --out-file is not used, it will overwrite the original file. You can set
the --dry-run flag to just see the changeset and not write to disk.
"""
lines = []
with filename.open() as f:
lines = f.readlines()
# Regex pattern for matching common regression formats (e.g. test_issue1234)
pattern = r"def test_issue\d{1,4}"
regex = re.compile(pattern)
new_lines = []
for line_text in lines:
if regex.search(line_text): # if match, append marker first
issue_num = int(re.findall(r"\d+", line_text)[0]) # Simple heuristic
typer.echo(f"Found: {line_text} with issue number: {issue_num}")
new_lines.append(f"@pytest.mark.issue({issue_num})\n")
new_lines.append(line_text)
# Save to file
if not dry_run:
out = out_file or filename
with out.open("w") as f:
for new_line in new_lines:
f.write(new_line)
if __name__ == "__main__":
typer.run(main)
|
Add simple script to add pytest marks
|
Add simple script to add pytest marks
|
Python
|
mit
|
explosion/spaCy,explosion/spaCy,explosion/spaCy,explosion/spaCy,explosion/spaCy,honnibal/spaCy,honnibal/spaCy,explosion/spaCy,honnibal/spaCy,honnibal/spaCy
|
Add simple script to add pytest marks
|
import re
from pathlib import Path
from typing import Optional
import typer
def main(
filename: Path, out_file: Optional[Path] = typer.Option(None), dry_run: bool = False
):
"""Add pytest issue markers on regression tests
If --out-file is not used, it will overwrite the original file. You can set
the --dry-run flag to just see the changeset and not write to disk.
"""
lines = []
with filename.open() as f:
lines = f.readlines()
# Regex pattern for matching common regression formats (e.g. test_issue1234)
pattern = r"def test_issue\d{1,4}"
regex = re.compile(pattern)
new_lines = []
for line_text in lines:
if regex.search(line_text): # if match, append marker first
issue_num = int(re.findall(r"\d+", line_text)[0]) # Simple heuristic
typer.echo(f"Found: {line_text} with issue number: {issue_num}")
new_lines.append(f"@pytest.mark.issue({issue_num})\n")
new_lines.append(line_text)
# Save to file
if not dry_run:
out = out_file or filename
with out.open("w") as f:
for new_line in new_lines:
f.write(new_line)
if __name__ == "__main__":
typer.run(main)
|
<commit_before><commit_msg>Add simple script to add pytest marks<commit_after>
|
import re
from pathlib import Path
from typing import Optional
import typer
def main(
filename: Path, out_file: Optional[Path] = typer.Option(None), dry_run: bool = False
):
"""Add pytest issue markers on regression tests
If --out-file is not used, it will overwrite the original file. You can set
the --dry-run flag to just see the changeset and not write to disk.
"""
lines = []
with filename.open() as f:
lines = f.readlines()
# Regex pattern for matching common regression formats (e.g. test_issue1234)
pattern = r"def test_issue\d{1,4}"
regex = re.compile(pattern)
new_lines = []
for line_text in lines:
if regex.search(line_text): # if match, append marker first
issue_num = int(re.findall(r"\d+", line_text)[0]) # Simple heuristic
typer.echo(f"Found: {line_text} with issue number: {issue_num}")
new_lines.append(f"@pytest.mark.issue({issue_num})\n")
new_lines.append(line_text)
# Save to file
if not dry_run:
out = out_file or filename
with out.open("w") as f:
for new_line in new_lines:
f.write(new_line)
if __name__ == "__main__":
typer.run(main)
|
Add simple script to add pytest marksimport re
from pathlib import Path
from typing import Optional
import typer
def main(
filename: Path, out_file: Optional[Path] = typer.Option(None), dry_run: bool = False
):
"""Add pytest issue markers on regression tests
If --out-file is not used, it will overwrite the original file. You can set
the --dry-run flag to just see the changeset and not write to disk.
"""
lines = []
with filename.open() as f:
lines = f.readlines()
# Regex pattern for matching common regression formats (e.g. test_issue1234)
pattern = r"def test_issue\d{1,4}"
regex = re.compile(pattern)
new_lines = []
for line_text in lines:
if regex.search(line_text): # if match, append marker first
issue_num = int(re.findall(r"\d+", line_text)[0]) # Simple heuristic
typer.echo(f"Found: {line_text} with issue number: {issue_num}")
new_lines.append(f"@pytest.mark.issue({issue_num})\n")
new_lines.append(line_text)
# Save to file
if not dry_run:
out = out_file or filename
with out.open("w") as f:
for new_line in new_lines:
f.write(new_line)
if __name__ == "__main__":
typer.run(main)
|
<commit_before><commit_msg>Add simple script to add pytest marks<commit_after>import re
from pathlib import Path
from typing import Optional
import typer
def main(
filename: Path, out_file: Optional[Path] = typer.Option(None), dry_run: bool = False
):
"""Add pytest issue markers on regression tests
If --out-file is not used, it will overwrite the original file. You can set
the --dry-run flag to just see the changeset and not write to disk.
"""
lines = []
with filename.open() as f:
lines = f.readlines()
# Regex pattern for matching common regression formats (e.g. test_issue1234)
pattern = r"def test_issue\d{1,4}"
regex = re.compile(pattern)
new_lines = []
for line_text in lines:
if regex.search(line_text): # if match, append marker first
issue_num = int(re.findall(r"\d+", line_text)[0]) # Simple heuristic
typer.echo(f"Found: {line_text} with issue number: {issue_num}")
new_lines.append(f"@pytest.mark.issue({issue_num})\n")
new_lines.append(line_text)
# Save to file
if not dry_run:
out = out_file or filename
with out.open("w") as f:
for new_line in new_lines:
f.write(new_line)
if __name__ == "__main__":
typer.run(main)
|
|
1342a1a6bf37018092ec949eb7476b3d8b75971c
|
tests/mock_api/test_mock_api_columns.py
|
tests/mock_api/test_mock_api_columns.py
|
# pylint: disable=C0103,W0232
from parameterized import parameterized, param
from smartsheet.models import Column
from mock_api_test_helper import MockApiTestHelper, clean_api_error
class TestMockApiColumns(MockApiTestHelper):
@parameterized([
param(
scenario='Update Column - Change Type - Picklist',
get_method=lambda client: client.Sheets.update_column,
params=[
123,
234,
Column({
'index': 2,
'title': 'Updated Column',
'type': 'PICKLIST',
'options': [
'An',
'updated',
'column'
],
'width': 200
})
],
get_validation_field=lambda response: response.result.title,
expected_validation_val='Updated Column'
),
param(
scenario='Update Column - Change Type - Contact List',
get_method=lambda client: client.Sheets.update_column,
params=[
123,
234,
Column({
"index": 2,
"title": "Updated Column",
"type": "CONTACT_LIST",
"contactOptions": [
{
"name": "Some Contact",
"email": "some.contact@smartsheet.com"
},
{
"name": "Some Other Contact",
"email": "some.other.contact@smartsheet.com"
}
],
"width": 200
})
],
get_validation_field=lambda response: response.result.title,
expected_validation_val='Updated Column'
)
])
@clean_api_error
def test_successful(
self,
scenario,
get_method,
params,
get_validation_field,
expected_validation_val):
self.client.as_test_scenario(scenario)
response = get_method(self.client)(*params)
assert get_validation_field(response) == expected_validation_val
|
Add tests for updating column types.
|
Add tests for updating column types.
|
Python
|
apache-2.0
|
smartsheet-platform/smartsheet-python-sdk,smartsheet-platform/smartsheet-python-sdk
|
Add tests for updating column types.
|
# pylint: disable=C0103,W0232
from parameterized import parameterized, param
from smartsheet.models import Column
from mock_api_test_helper import MockApiTestHelper, clean_api_error
class TestMockApiColumns(MockApiTestHelper):
@parameterized([
param(
scenario='Update Column - Change Type - Picklist',
get_method=lambda client: client.Sheets.update_column,
params=[
123,
234,
Column({
'index': 2,
'title': 'Updated Column',
'type': 'PICKLIST',
'options': [
'An',
'updated',
'column'
],
'width': 200
})
],
get_validation_field=lambda response: response.result.title,
expected_validation_val='Updated Column'
),
param(
scenario='Update Column - Change Type - Contact List',
get_method=lambda client: client.Sheets.update_column,
params=[
123,
234,
Column({
"index": 2,
"title": "Updated Column",
"type": "CONTACT_LIST",
"contactOptions": [
{
"name": "Some Contact",
"email": "some.contact@smartsheet.com"
},
{
"name": "Some Other Contact",
"email": "some.other.contact@smartsheet.com"
}
],
"width": 200
})
],
get_validation_field=lambda response: response.result.title,
expected_validation_val='Updated Column'
)
])
@clean_api_error
def test_successful(
self,
scenario,
get_method,
params,
get_validation_field,
expected_validation_val):
self.client.as_test_scenario(scenario)
response = get_method(self.client)(*params)
assert get_validation_field(response) == expected_validation_val
|
<commit_before><commit_msg>Add tests for updating column types.<commit_after>
|
# pylint: disable=C0103,W0232
from parameterized import parameterized, param
from smartsheet.models import Column
from mock_api_test_helper import MockApiTestHelper, clean_api_error
class TestMockApiColumns(MockApiTestHelper):
@parameterized([
param(
scenario='Update Column - Change Type - Picklist',
get_method=lambda client: client.Sheets.update_column,
params=[
123,
234,
Column({
'index': 2,
'title': 'Updated Column',
'type': 'PICKLIST',
'options': [
'An',
'updated',
'column'
],
'width': 200
})
],
get_validation_field=lambda response: response.result.title,
expected_validation_val='Updated Column'
),
param(
scenario='Update Column - Change Type - Contact List',
get_method=lambda client: client.Sheets.update_column,
params=[
123,
234,
Column({
"index": 2,
"title": "Updated Column",
"type": "CONTACT_LIST",
"contactOptions": [
{
"name": "Some Contact",
"email": "some.contact@smartsheet.com"
},
{
"name": "Some Other Contact",
"email": "some.other.contact@smartsheet.com"
}
],
"width": 200
})
],
get_validation_field=lambda response: response.result.title,
expected_validation_val='Updated Column'
)
])
@clean_api_error
def test_successful(
self,
scenario,
get_method,
params,
get_validation_field,
expected_validation_val):
self.client.as_test_scenario(scenario)
response = get_method(self.client)(*params)
assert get_validation_field(response) == expected_validation_val
|
Add tests for updating column types.# pylint: disable=C0103,W0232
from parameterized import parameterized, param
from smartsheet.models import Column
from mock_api_test_helper import MockApiTestHelper, clean_api_error
class TestMockApiColumns(MockApiTestHelper):
@parameterized([
param(
scenario='Update Column - Change Type - Picklist',
get_method=lambda client: client.Sheets.update_column,
params=[
123,
234,
Column({
'index': 2,
'title': 'Updated Column',
'type': 'PICKLIST',
'options': [
'An',
'updated',
'column'
],
'width': 200
})
],
get_validation_field=lambda response: response.result.title,
expected_validation_val='Updated Column'
),
param(
scenario='Update Column - Change Type - Contact List',
get_method=lambda client: client.Sheets.update_column,
params=[
123,
234,
Column({
"index": 2,
"title": "Updated Column",
"type": "CONTACT_LIST",
"contactOptions": [
{
"name": "Some Contact",
"email": "some.contact@smartsheet.com"
},
{
"name": "Some Other Contact",
"email": "some.other.contact@smartsheet.com"
}
],
"width": 200
})
],
get_validation_field=lambda response: response.result.title,
expected_validation_val='Updated Column'
)
])
@clean_api_error
def test_successful(
self,
scenario,
get_method,
params,
get_validation_field,
expected_validation_val):
self.client.as_test_scenario(scenario)
response = get_method(self.client)(*params)
assert get_validation_field(response) == expected_validation_val
|
<commit_before><commit_msg>Add tests for updating column types.<commit_after># pylint: disable=C0103,W0232
from parameterized import parameterized, param
from smartsheet.models import Column
from mock_api_test_helper import MockApiTestHelper, clean_api_error
class TestMockApiColumns(MockApiTestHelper):
@parameterized([
param(
scenario='Update Column - Change Type - Picklist',
get_method=lambda client: client.Sheets.update_column,
params=[
123,
234,
Column({
'index': 2,
'title': 'Updated Column',
'type': 'PICKLIST',
'options': [
'An',
'updated',
'column'
],
'width': 200
})
],
get_validation_field=lambda response: response.result.title,
expected_validation_val='Updated Column'
),
param(
scenario='Update Column - Change Type - Contact List',
get_method=lambda client: client.Sheets.update_column,
params=[
123,
234,
Column({
"index": 2,
"title": "Updated Column",
"type": "CONTACT_LIST",
"contactOptions": [
{
"name": "Some Contact",
"email": "some.contact@smartsheet.com"
},
{
"name": "Some Other Contact",
"email": "some.other.contact@smartsheet.com"
}
],
"width": 200
})
],
get_validation_field=lambda response: response.result.title,
expected_validation_val='Updated Column'
)
])
@clean_api_error
def test_successful(
self,
scenario,
get_method,
params,
get_validation_field,
expected_validation_val):
self.client.as_test_scenario(scenario)
response = get_method(self.client)(*params)
assert get_validation_field(response) == expected_validation_val
|
|
4223233a301657afb16536e4e589fd6ab0bec464
|
sparqllib/querycomponent/tests/test_triple.py
|
sparqllib/querycomponent/tests/test_triple.py
|
import unittest
import sparqllib
from rdflib import BNode, Literal
class TestTriple(unittest.TestCase):
def setUp(self):
self.subject = BNode("subject")
self.relation = BNode("relation")
self.object = Literal("Cats")
self.triple = sparqllib.Triple(self.subject,
self.relation,
self.object)
def test_init(self):
with self.assertRaises(TypeError):
sparqllib.Triple()
def test_eq(self):
self.assertTrue(self.triple == (self.subject, self.relation, self.object))
self.assertTrue((self.subject, self.relation, self.object) == self.triple)
self.assertFalse(self.triple == ())
self.assertFalse(self.triple == (self.subject))
self.assertFalse(self.triple == (self.subject, self.relation, "Dogs"))
def test_serialize(self):
self.assertEqual(self.triple.serialize(),
"?subject ?relation \"Cats\" .\n")
self.triple.object = Literal("Dogs", lang='en')
self.assertEqual(self.triple.serialize(),
"?subject ?relation \"Dogs\"@en .\n")
if __name__ == '__main__':
unittest.main()
|
Test for Triple query component
|
Test for Triple query component
|
Python
|
mit
|
ALSchwalm/sparqllib
|
Test for Triple query component
|
import unittest
import sparqllib
from rdflib import BNode, Literal
class TestTriple(unittest.TestCase):
def setUp(self):
self.subject = BNode("subject")
self.relation = BNode("relation")
self.object = Literal("Cats")
self.triple = sparqllib.Triple(self.subject,
self.relation,
self.object)
def test_init(self):
with self.assertRaises(TypeError):
sparqllib.Triple()
def test_eq(self):
self.assertTrue(self.triple == (self.subject, self.relation, self.object))
self.assertTrue((self.subject, self.relation, self.object) == self.triple)
self.assertFalse(self.triple == ())
self.assertFalse(self.triple == (self.subject))
self.assertFalse(self.triple == (self.subject, self.relation, "Dogs"))
def test_serialize(self):
self.assertEqual(self.triple.serialize(),
"?subject ?relation \"Cats\" .\n")
self.triple.object = Literal("Dogs", lang='en')
self.assertEqual(self.triple.serialize(),
"?subject ?relation \"Dogs\"@en .\n")
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Test for Triple query component<commit_after>
|
import unittest
import sparqllib
from rdflib import BNode, Literal
class TestTriple(unittest.TestCase):
def setUp(self):
self.subject = BNode("subject")
self.relation = BNode("relation")
self.object = Literal("Cats")
self.triple = sparqllib.Triple(self.subject,
self.relation,
self.object)
def test_init(self):
with self.assertRaises(TypeError):
sparqllib.Triple()
def test_eq(self):
self.assertTrue(self.triple == (self.subject, self.relation, self.object))
self.assertTrue((self.subject, self.relation, self.object) == self.triple)
self.assertFalse(self.triple == ())
self.assertFalse(self.triple == (self.subject))
self.assertFalse(self.triple == (self.subject, self.relation, "Dogs"))
def test_serialize(self):
self.assertEqual(self.triple.serialize(),
"?subject ?relation \"Cats\" .\n")
self.triple.object = Literal("Dogs", lang='en')
self.assertEqual(self.triple.serialize(),
"?subject ?relation \"Dogs\"@en .\n")
if __name__ == '__main__':
unittest.main()
|
Test for Triple query component
import unittest
import sparqllib
from rdflib import BNode, Literal
class TestTriple(unittest.TestCase):
def setUp(self):
self.subject = BNode("subject")
self.relation = BNode("relation")
self.object = Literal("Cats")
self.triple = sparqllib.Triple(self.subject,
self.relation,
self.object)
def test_init(self):
with self.assertRaises(TypeError):
sparqllib.Triple()
def test_eq(self):
self.assertTrue(self.triple == (self.subject, self.relation, self.object))
self.assertTrue((self.subject, self.relation, self.object) == self.triple)
self.assertFalse(self.triple == ())
self.assertFalse(self.triple == (self.subject))
self.assertFalse(self.triple == (self.subject, self.relation, "Dogs"))
def test_serialize(self):
self.assertEqual(self.triple.serialize(),
"?subject ?relation \"Cats\" .\n")
self.triple.object = Literal("Dogs", lang='en')
self.assertEqual(self.triple.serialize(),
"?subject ?relation \"Dogs\"@en .\n")
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Test for Triple query component<commit_after>
import unittest
import sparqllib
from rdflib import BNode, Literal
class TestTriple(unittest.TestCase):
def setUp(self):
self.subject = BNode("subject")
self.relation = BNode("relation")
self.object = Literal("Cats")
self.triple = sparqllib.Triple(self.subject,
self.relation,
self.object)
def test_init(self):
with self.assertRaises(TypeError):
sparqllib.Triple()
def test_eq(self):
self.assertTrue(self.triple == (self.subject, self.relation, self.object))
self.assertTrue((self.subject, self.relation, self.object) == self.triple)
self.assertFalse(self.triple == ())
self.assertFalse(self.triple == (self.subject))
self.assertFalse(self.triple == (self.subject, self.relation, "Dogs"))
def test_serialize(self):
self.assertEqual(self.triple.serialize(),
"?subject ?relation \"Cats\" .\n")
self.triple.object = Literal("Dogs", lang='en')
self.assertEqual(self.triple.serialize(),
"?subject ?relation \"Dogs\"@en .\n")
if __name__ == '__main__':
unittest.main()
|
|
fbd7da82e8231c025eaaf9dd60f94d104583c02c
|
crmapp/accounts/urls.py
|
crmapp/accounts/urls.py
|
from django.conf.urls import patterns, url
account_urls = patterns('',
url(r'^$',
'crmapp.accounts.views.account_detail', name='account_detail'
),
)
|
from django.conf.urls import patterns, url
account_urls = patterns('',
url(r'^$',
'crmapp.accounts.views.account_detail', name='account_detail'
),
url(r'^edit/$',
'crmapp.accounts.views.account_cru', name='account_update'
),
)
|
Create the Account Detail Page - Part II > Edit Account - Create URL Conf
|
Create the Account Detail Page - Part II > Edit Account - Create URL Conf
|
Python
|
mit
|
tabdon/crmeasyapp,tabdon/crmeasyapp,deenaariff/Django
|
from django.conf.urls import patterns, url
account_urls = patterns('',
url(r'^$',
'crmapp.accounts.views.account_detail', name='account_detail'
),
)
Create the Account Detail Page - Part II > Edit Account - Create URL Conf
|
from django.conf.urls import patterns, url
account_urls = patterns('',
url(r'^$',
'crmapp.accounts.views.account_detail', name='account_detail'
),
url(r'^edit/$',
'crmapp.accounts.views.account_cru', name='account_update'
),
)
|
<commit_before>from django.conf.urls import patterns, url
account_urls = patterns('',
url(r'^$',
'crmapp.accounts.views.account_detail', name='account_detail'
),
)
<commit_msg>Create the Account Detail Page - Part II > Edit Account - Create URL Conf<commit_after>
|
from django.conf.urls import patterns, url
account_urls = patterns('',
url(r'^$',
'crmapp.accounts.views.account_detail', name='account_detail'
),
url(r'^edit/$',
'crmapp.accounts.views.account_cru', name='account_update'
),
)
|
from django.conf.urls import patterns, url
account_urls = patterns('',
url(r'^$',
'crmapp.accounts.views.account_detail', name='account_detail'
),
)
Create the Account Detail Page - Part II > Edit Account - Create URL Conffrom django.conf.urls import patterns, url
account_urls = patterns('',
url(r'^$',
'crmapp.accounts.views.account_detail', name='account_detail'
),
url(r'^edit/$',
'crmapp.accounts.views.account_cru', name='account_update'
),
)
|
<commit_before>from django.conf.urls import patterns, url
account_urls = patterns('',
url(r'^$',
'crmapp.accounts.views.account_detail', name='account_detail'
),
)
<commit_msg>Create the Account Detail Page - Part II > Edit Account - Create URL Conf<commit_after>from django.conf.urls import patterns, url
account_urls = patterns('',
url(r'^$',
'crmapp.accounts.views.account_detail', name='account_detail'
),
url(r'^edit/$',
'crmapp.accounts.views.account_cru', name='account_update'
),
)
|
76afa26f17de1d10dc3f81b08182aa5febca8e8d
|
core/migrations/0010_auto_20210311_1521.py
|
core/migrations/0010_auto_20210311_1521.py
|
# Generated by Django 2.2.14 on 2021-03-11 15:21
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0009_auto_20200825_0800'),
]
operations = [
migrations.AlterField(
model_name='pin',
name='referer',
field=models.CharField(blank=True, max_length=2048, null=True),
),
migrations.AlterField(
model_name='pin',
name='url',
field=models.CharField(blank=True, max_length=2048, null=True),
),
]
|
Add migrations for url and referer length chagne
|
Feature: Add migrations for url and referer length chagne
|
Python
|
bsd-2-clause
|
pinry/pinry,pinry/pinry,pinry/pinry,pinry/pinry
|
Feature: Add migrations for url and referer length chagne
|
# Generated by Django 2.2.14 on 2021-03-11 15:21
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0009_auto_20200825_0800'),
]
operations = [
migrations.AlterField(
model_name='pin',
name='referer',
field=models.CharField(blank=True, max_length=2048, null=True),
),
migrations.AlterField(
model_name='pin',
name='url',
field=models.CharField(blank=True, max_length=2048, null=True),
),
]
|
<commit_before><commit_msg>Feature: Add migrations for url and referer length chagne<commit_after>
|
# Generated by Django 2.2.14 on 2021-03-11 15:21
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0009_auto_20200825_0800'),
]
operations = [
migrations.AlterField(
model_name='pin',
name='referer',
field=models.CharField(blank=True, max_length=2048, null=True),
),
migrations.AlterField(
model_name='pin',
name='url',
field=models.CharField(blank=True, max_length=2048, null=True),
),
]
|
Feature: Add migrations for url and referer length chagne# Generated by Django 2.2.14 on 2021-03-11 15:21
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0009_auto_20200825_0800'),
]
operations = [
migrations.AlterField(
model_name='pin',
name='referer',
field=models.CharField(blank=True, max_length=2048, null=True),
),
migrations.AlterField(
model_name='pin',
name='url',
field=models.CharField(blank=True, max_length=2048, null=True),
),
]
|
<commit_before><commit_msg>Feature: Add migrations for url and referer length chagne<commit_after># Generated by Django 2.2.14 on 2021-03-11 15:21
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0009_auto_20200825_0800'),
]
operations = [
migrations.AlterField(
model_name='pin',
name='referer',
field=models.CharField(blank=True, max_length=2048, null=True),
),
migrations.AlterField(
model_name='pin',
name='url',
field=models.CharField(blank=True, max_length=2048, null=True),
),
]
|
|
a7908b4f6369f5a29e72fa828aff12285e3f3d25
|
app/applications.py
|
app/applications.py
|
from . import data_structures
# 1. Stack application
def balanced_parentheses_checker(symbol_string):
"""Verify that a set of parentheses is balanced."""
opening_symbols = '{[('
closing_symbols = '}])'
opening_symbols_stack = data_structures.Stack()
symbol_count = len(symbol_string)
counter = 0
while counter < symbol_count:
current_symbol = symbol_string[counter]
if current_symbol in '{[(':
opening_symbols_stack.push(current_symbol)
else:
if not opening_symbols_stack.is_empty() and \
opening_symbols.index(opening_symbols_stack.peek()) == \
closing_symbols.index(current_symbol):
opening_symbols_stack.pop()
else:
counter = symbol_count
counter += 1
return opening_symbols_stack.is_empty() and counter == symbol_count
if __name__ == '__main__':
print(balanced_parentheses_checker('[]{[]{([][])}()}'))
|
Apply stack in providing an efficient balanced parentheses-checker
|
Apply stack in providing an efficient balanced parentheses-checker
|
Python
|
mit
|
andela-kerinoso/data_structures_algo
|
Apply stack in providing an efficient balanced parentheses-checker
|
from . import data_structures
# 1. Stack application
def balanced_parentheses_checker(symbol_string):
"""Verify that a set of parentheses is balanced."""
opening_symbols = '{[('
closing_symbols = '}])'
opening_symbols_stack = data_structures.Stack()
symbol_count = len(symbol_string)
counter = 0
while counter < symbol_count:
current_symbol = symbol_string[counter]
if current_symbol in '{[(':
opening_symbols_stack.push(current_symbol)
else:
if not opening_symbols_stack.is_empty() and \
opening_symbols.index(opening_symbols_stack.peek()) == \
closing_symbols.index(current_symbol):
opening_symbols_stack.pop()
else:
counter = symbol_count
counter += 1
return opening_symbols_stack.is_empty() and counter == symbol_count
if __name__ == '__main__':
print(balanced_parentheses_checker('[]{[]{([][])}()}'))
|
<commit_before><commit_msg>Apply stack in providing an efficient balanced parentheses-checker<commit_after>
|
from . import data_structures
# 1. Stack application
def balanced_parentheses_checker(symbol_string):
"""Verify that a set of parentheses is balanced."""
opening_symbols = '{[('
closing_symbols = '}])'
opening_symbols_stack = data_structures.Stack()
symbol_count = len(symbol_string)
counter = 0
while counter < symbol_count:
current_symbol = symbol_string[counter]
if current_symbol in '{[(':
opening_symbols_stack.push(current_symbol)
else:
if not opening_symbols_stack.is_empty() and \
opening_symbols.index(opening_symbols_stack.peek()) == \
closing_symbols.index(current_symbol):
opening_symbols_stack.pop()
else:
counter = symbol_count
counter += 1
return opening_symbols_stack.is_empty() and counter == symbol_count
if __name__ == '__main__':
print(balanced_parentheses_checker('[]{[]{([][])}()}'))
|
Apply stack in providing an efficient balanced parentheses-checkerfrom . import data_structures
# 1. Stack application
def balanced_parentheses_checker(symbol_string):
"""Verify that a set of parentheses is balanced."""
opening_symbols = '{[('
closing_symbols = '}])'
opening_symbols_stack = data_structures.Stack()
symbol_count = len(symbol_string)
counter = 0
while counter < symbol_count:
current_symbol = symbol_string[counter]
if current_symbol in '{[(':
opening_symbols_stack.push(current_symbol)
else:
if not opening_symbols_stack.is_empty() and \
opening_symbols.index(opening_symbols_stack.peek()) == \
closing_symbols.index(current_symbol):
opening_symbols_stack.pop()
else:
counter = symbol_count
counter += 1
return opening_symbols_stack.is_empty() and counter == symbol_count
if __name__ == '__main__':
print(balanced_parentheses_checker('[]{[]{([][])}()}'))
|
<commit_before><commit_msg>Apply stack in providing an efficient balanced parentheses-checker<commit_after>from . import data_structures
# 1. Stack application
def balanced_parentheses_checker(symbol_string):
"""Verify that a set of parentheses is balanced."""
opening_symbols = '{[('
closing_symbols = '}])'
opening_symbols_stack = data_structures.Stack()
symbol_count = len(symbol_string)
counter = 0
while counter < symbol_count:
current_symbol = symbol_string[counter]
if current_symbol in '{[(':
opening_symbols_stack.push(current_symbol)
else:
if not opening_symbols_stack.is_empty() and \
opening_symbols.index(opening_symbols_stack.peek()) == \
closing_symbols.index(current_symbol):
opening_symbols_stack.pop()
else:
counter = symbol_count
counter += 1
return opening_symbols_stack.is_empty() and counter == symbol_count
if __name__ == '__main__':
print(balanced_parentheses_checker('[]{[]{([][])}()}'))
|
|
7e2c9a4f890cdb7446b2ef892d57008c7a0eee12
|
django_ajax/response.py
|
django_ajax/response.py
|
# -*- coding: utf-8 -*-
"""
Responses
"""
from django.conf import settings
from django.http import HttpResponse
from django_ajax.utils import serialize_to_json
class JSONResponse(HttpResponse):
"""
Return a JSON serialized HTTP response
"""
def __init__(self, data):
"""
This returns a object that we send as json content using
utils.serialize_to_json, that is a wrapper to json.dumps
method using a custom class to handle models and querysets. Put your
options to serialize_to_json in kwargs, other options are used by
response.
"""
super(JSONResponse, self).__init__(
content=serialize_to_json(data, sort_keys=settings.DEBUG),
content_type='application/json'
)
|
# -*- coding: utf-8 -*-
"""
Responses
"""
from django.conf import settings
from django.http import HttpResponse
from django_ajax.utils import serialize_to_json
class JSONResponse(HttpResponse):
"""
Return a JSON serialized HTTP response
"""
def __init__(self, data, *args, **kwargs):
"""
This returns a object that we send as json content using
utils.serialize_to_json, that is a wrapper to json.dumps
method using a custom class to handle models and querysets. Put your
options to serialize_to_json in kwargs, other options are used by
response.
"""
if not 'sort_keys' in kwargs:
kwargs['sort_keys'] = settings.DEBUG
super(JSONResponse, self).__init__(
content=serialize_to_json(data, *args, **kwargs),
content_type='application/json'
)
|
Add parameters support for json serialize
|
Add parameters support for json serialize
|
Python
|
mit
|
furious-luke/django-ajax,furious-luke/django-ajax,yceruto/django-ajax,furious-luke/django-ajax,yceruto/django-ajax
|
# -*- coding: utf-8 -*-
"""
Responses
"""
from django.conf import settings
from django.http import HttpResponse
from django_ajax.utils import serialize_to_json
class JSONResponse(HttpResponse):
"""
Return a JSON serialized HTTP response
"""
def __init__(self, data):
"""
This returns a object that we send as json content using
utils.serialize_to_json, that is a wrapper to json.dumps
method using a custom class to handle models and querysets. Put your
options to serialize_to_json in kwargs, other options are used by
response.
"""
super(JSONResponse, self).__init__(
content=serialize_to_json(data, sort_keys=settings.DEBUG),
content_type='application/json'
)Add parameters support for json serialize
|
# -*- coding: utf-8 -*-
"""
Responses
"""
from django.conf import settings
from django.http import HttpResponse
from django_ajax.utils import serialize_to_json
class JSONResponse(HttpResponse):
"""
Return a JSON serialized HTTP response
"""
def __init__(self, data, *args, **kwargs):
"""
This returns a object that we send as json content using
utils.serialize_to_json, that is a wrapper to json.dumps
method using a custom class to handle models and querysets. Put your
options to serialize_to_json in kwargs, other options are used by
response.
"""
if not 'sort_keys' in kwargs:
kwargs['sort_keys'] = settings.DEBUG
super(JSONResponse, self).__init__(
content=serialize_to_json(data, *args, **kwargs),
content_type='application/json'
)
|
<commit_before># -*- coding: utf-8 -*-
"""
Responses
"""
from django.conf import settings
from django.http import HttpResponse
from django_ajax.utils import serialize_to_json
class JSONResponse(HttpResponse):
"""
Return a JSON serialized HTTP response
"""
def __init__(self, data):
"""
This returns a object that we send as json content using
utils.serialize_to_json, that is a wrapper to json.dumps
method using a custom class to handle models and querysets. Put your
options to serialize_to_json in kwargs, other options are used by
response.
"""
super(JSONResponse, self).__init__(
content=serialize_to_json(data, sort_keys=settings.DEBUG),
content_type='application/json'
)<commit_msg>Add parameters support for json serialize<commit_after>
|
# -*- coding: utf-8 -*-
"""
Responses
"""
from django.conf import settings
from django.http import HttpResponse
from django_ajax.utils import serialize_to_json
class JSONResponse(HttpResponse):
"""
Return a JSON serialized HTTP response
"""
def __init__(self, data, *args, **kwargs):
"""
This returns a object that we send as json content using
utils.serialize_to_json, that is a wrapper to json.dumps
method using a custom class to handle models and querysets. Put your
options to serialize_to_json in kwargs, other options are used by
response.
"""
if not 'sort_keys' in kwargs:
kwargs['sort_keys'] = settings.DEBUG
super(JSONResponse, self).__init__(
content=serialize_to_json(data, *args, **kwargs),
content_type='application/json'
)
|
# -*- coding: utf-8 -*-
"""
Responses
"""
from django.conf import settings
from django.http import HttpResponse
from django_ajax.utils import serialize_to_json
class JSONResponse(HttpResponse):
"""
Return a JSON serialized HTTP response
"""
def __init__(self, data):
"""
This returns a object that we send as json content using
utils.serialize_to_json, that is a wrapper to json.dumps
method using a custom class to handle models and querysets. Put your
options to serialize_to_json in kwargs, other options are used by
response.
"""
super(JSONResponse, self).__init__(
content=serialize_to_json(data, sort_keys=settings.DEBUG),
content_type='application/json'
)Add parameters support for json serialize# -*- coding: utf-8 -*-
"""
Responses
"""
from django.conf import settings
from django.http import HttpResponse
from django_ajax.utils import serialize_to_json
class JSONResponse(HttpResponse):
"""
Return a JSON serialized HTTP response
"""
def __init__(self, data, *args, **kwargs):
"""
This returns a object that we send as json content using
utils.serialize_to_json, that is a wrapper to json.dumps
method using a custom class to handle models and querysets. Put your
options to serialize_to_json in kwargs, other options are used by
response.
"""
if not 'sort_keys' in kwargs:
kwargs['sort_keys'] = settings.DEBUG
super(JSONResponse, self).__init__(
content=serialize_to_json(data, *args, **kwargs),
content_type='application/json'
)
|
<commit_before># -*- coding: utf-8 -*-
"""
Responses
"""
from django.conf import settings
from django.http import HttpResponse
from django_ajax.utils import serialize_to_json
class JSONResponse(HttpResponse):
"""
Return a JSON serialized HTTP response
"""
def __init__(self, data):
"""
This returns a object that we send as json content using
utils.serialize_to_json, that is a wrapper to json.dumps
method using a custom class to handle models and querysets. Put your
options to serialize_to_json in kwargs, other options are used by
response.
"""
super(JSONResponse, self).__init__(
content=serialize_to_json(data, sort_keys=settings.DEBUG),
content_type='application/json'
)<commit_msg>Add parameters support for json serialize<commit_after># -*- coding: utf-8 -*-
"""
Responses
"""
from django.conf import settings
from django.http import HttpResponse
from django_ajax.utils import serialize_to_json
class JSONResponse(HttpResponse):
"""
Return a JSON serialized HTTP response
"""
def __init__(self, data, *args, **kwargs):
"""
This returns a object that we send as json content using
utils.serialize_to_json, that is a wrapper to json.dumps
method using a custom class to handle models and querysets. Put your
options to serialize_to_json in kwargs, other options are used by
response.
"""
if not 'sort_keys' in kwargs:
kwargs['sort_keys'] = settings.DEBUG
super(JSONResponse, self).__init__(
content=serialize_to_json(data, *args, **kwargs),
content_type='application/json'
)
|
ab656c2101f8b8e8a9c63892a3a4d266556d2e73
|
pyfire/logger.py
|
pyfire/logger.py
|
# -*- coding: utf-8 -*-
"""
pyfire.logger
~~~~~~~~~~~~~
Use pocoo's logbook or a simple no-op fallback
:copyright: (c) 2011 by the pyfire Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
try:
from logbook import Logger
except ImportError:
class Logger(object):
def __init__(self, name, level=0):
self.name = name
self.level = level
debug = info = warn = warning = notice = error = exception = \
critical = log = lambda *a, **kw: None
|
Add logbook wrapper and no-op fallback
|
Add logbook wrapper and no-op fallback
|
Python
|
bsd-3-clause
|
IgnitedAndExploded/pyfire,IgnitedAndExploded/pyfire
|
Add logbook wrapper and no-op fallback
|
# -*- coding: utf-8 -*-
"""
pyfire.logger
~~~~~~~~~~~~~
Use pocoo's logbook or a simple no-op fallback
:copyright: (c) 2011 by the pyfire Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
try:
from logbook import Logger
except ImportError:
class Logger(object):
def __init__(self, name, level=0):
self.name = name
self.level = level
debug = info = warn = warning = notice = error = exception = \
critical = log = lambda *a, **kw: None
|
<commit_before><commit_msg>Add logbook wrapper and no-op fallback<commit_after>
|
# -*- coding: utf-8 -*-
"""
pyfire.logger
~~~~~~~~~~~~~
Use pocoo's logbook or a simple no-op fallback
:copyright: (c) 2011 by the pyfire Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
try:
from logbook import Logger
except ImportError:
class Logger(object):
def __init__(self, name, level=0):
self.name = name
self.level = level
debug = info = warn = warning = notice = error = exception = \
critical = log = lambda *a, **kw: None
|
Add logbook wrapper and no-op fallback# -*- coding: utf-8 -*-
"""
pyfire.logger
~~~~~~~~~~~~~
Use pocoo's logbook or a simple no-op fallback
:copyright: (c) 2011 by the pyfire Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
try:
from logbook import Logger
except ImportError:
class Logger(object):
def __init__(self, name, level=0):
self.name = name
self.level = level
debug = info = warn = warning = notice = error = exception = \
critical = log = lambda *a, **kw: None
|
<commit_before><commit_msg>Add logbook wrapper and no-op fallback<commit_after># -*- coding: utf-8 -*-
"""
pyfire.logger
~~~~~~~~~~~~~
Use pocoo's logbook or a simple no-op fallback
:copyright: (c) 2011 by the pyfire Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
try:
from logbook import Logger
except ImportError:
class Logger(object):
def __init__(self, name, level=0):
self.name = name
self.level = level
debug = info = warn = warning = notice = error = exception = \
critical = log = lambda *a, **kw: None
|
|
e17e76d9576213cd6261e9781f6398ce53b8e694
|
uoftscrapers/scrapers/layers/__init__.py
|
uoftscrapers/scrapers/layers/__init__.py
|
import requests
import json
from ..scraper import Scraper
class LayersScraper(Scraper):
"""A superclass for scraping Layers of the UofT Map.
Map is located at http://map.utoronto.ca
"""
def __init__(self, name, output_location='.'):
super().__init__(name, output_location)
self.host = 'http://map.utoronto.ca/'
self.s = requests.Session()
def get_layers_json(self, campus):
"""Retrieve the JSON structure from host."""
self.logger.info('Scraping map layers %s.' % campus)
headers = {
'Referer': self.host
}
html = self.s.get('%s%s%s' % (self.host, 'data/map/', campus),
headers=headers).text
data = json.loads(html)
return data['layers']
@staticmethod
def get_value(self, entry, val, number=False):
"""Retrieve the desired value from the parsed response dictionary."""
if val in entry.keys():
return entry[val]
else:
return 0 if number else ''
|
import requests
import json
from ..scraper import Scraper
class LayersScraper(Scraper):
"""A superclass for scraping Layers of the UofT Map.
Map is located at http://map.utoronto.ca
"""
def __init__(self, name, output_location='.'):
super().__init__(name, output_location)
self.host = 'http://map.utoronto.ca/'
self.s = requests.Session()
def get_layers_json(self, campus):
"""Retrieve the JSON structure from host."""
self.logger.info('Scraping map layers %s.' % campus)
headers = {
'Referer': self.host
}
html = self.s.get('%s%s%s' % (self.host, 'data/map/', campus),
headers=headers).text
data = json.loads(html)
return data['layers']
@staticmethod
def get_value(entry, val, number=False):
"""Retrieve the desired value from the parsed response dictionary."""
if val in entry.keys():
return entry[val]
else:
return 0 if number else ''
|
Remove unnecessary self parameter from static method
|
Remove unnecessary self parameter from static method
|
Python
|
mit
|
kshvmdn/uoft-scrapers,g3wanghc/uoft-scrapers,cobalt-uoft/uoft-scrapers,arkon/uoft-scrapers
|
import requests
import json
from ..scraper import Scraper
class LayersScraper(Scraper):
"""A superclass for scraping Layers of the UofT Map.
Map is located at http://map.utoronto.ca
"""
def __init__(self, name, output_location='.'):
super().__init__(name, output_location)
self.host = 'http://map.utoronto.ca/'
self.s = requests.Session()
def get_layers_json(self, campus):
"""Retrieve the JSON structure from host."""
self.logger.info('Scraping map layers %s.' % campus)
headers = {
'Referer': self.host
}
html = self.s.get('%s%s%s' % (self.host, 'data/map/', campus),
headers=headers).text
data = json.loads(html)
return data['layers']
@staticmethod
def get_value(self, entry, val, number=False):
"""Retrieve the desired value from the parsed response dictionary."""
if val in entry.keys():
return entry[val]
else:
return 0 if number else ''
Remove unnecessary self parameter from static method
|
import requests
import json
from ..scraper import Scraper
class LayersScraper(Scraper):
"""A superclass for scraping Layers of the UofT Map.
Map is located at http://map.utoronto.ca
"""
def __init__(self, name, output_location='.'):
super().__init__(name, output_location)
self.host = 'http://map.utoronto.ca/'
self.s = requests.Session()
def get_layers_json(self, campus):
"""Retrieve the JSON structure from host."""
self.logger.info('Scraping map layers %s.' % campus)
headers = {
'Referer': self.host
}
html = self.s.get('%s%s%s' % (self.host, 'data/map/', campus),
headers=headers).text
data = json.loads(html)
return data['layers']
@staticmethod
def get_value(entry, val, number=False):
"""Retrieve the desired value from the parsed response dictionary."""
if val in entry.keys():
return entry[val]
else:
return 0 if number else ''
|
<commit_before>import requests
import json
from ..scraper import Scraper
class LayersScraper(Scraper):
"""A superclass for scraping Layers of the UofT Map.
Map is located at http://map.utoronto.ca
"""
def __init__(self, name, output_location='.'):
super().__init__(name, output_location)
self.host = 'http://map.utoronto.ca/'
self.s = requests.Session()
def get_layers_json(self, campus):
"""Retrieve the JSON structure from host."""
self.logger.info('Scraping map layers %s.' % campus)
headers = {
'Referer': self.host
}
html = self.s.get('%s%s%s' % (self.host, 'data/map/', campus),
headers=headers).text
data = json.loads(html)
return data['layers']
@staticmethod
def get_value(self, entry, val, number=False):
"""Retrieve the desired value from the parsed response dictionary."""
if val in entry.keys():
return entry[val]
else:
return 0 if number else ''
<commit_msg>Remove unnecessary self parameter from static method<commit_after>
|
import requests
import json
from ..scraper import Scraper
class LayersScraper(Scraper):
"""A superclass for scraping Layers of the UofT Map.
Map is located at http://map.utoronto.ca
"""
def __init__(self, name, output_location='.'):
super().__init__(name, output_location)
self.host = 'http://map.utoronto.ca/'
self.s = requests.Session()
def get_layers_json(self, campus):
"""Retrieve the JSON structure from host."""
self.logger.info('Scraping map layers %s.' % campus)
headers = {
'Referer': self.host
}
html = self.s.get('%s%s%s' % (self.host, 'data/map/', campus),
headers=headers).text
data = json.loads(html)
return data['layers']
@staticmethod
def get_value(entry, val, number=False):
"""Retrieve the desired value from the parsed response dictionary."""
if val in entry.keys():
return entry[val]
else:
return 0 if number else ''
|
import requests
import json
from ..scraper import Scraper
class LayersScraper(Scraper):
"""A superclass for scraping Layers of the UofT Map.
Map is located at http://map.utoronto.ca
"""
def __init__(self, name, output_location='.'):
super().__init__(name, output_location)
self.host = 'http://map.utoronto.ca/'
self.s = requests.Session()
def get_layers_json(self, campus):
"""Retrieve the JSON structure from host."""
self.logger.info('Scraping map layers %s.' % campus)
headers = {
'Referer': self.host
}
html = self.s.get('%s%s%s' % (self.host, 'data/map/', campus),
headers=headers).text
data = json.loads(html)
return data['layers']
@staticmethod
def get_value(self, entry, val, number=False):
"""Retrieve the desired value from the parsed response dictionary."""
if val in entry.keys():
return entry[val]
else:
return 0 if number else ''
Remove unnecessary self parameter from static methodimport requests
import json
from ..scraper import Scraper
class LayersScraper(Scraper):
"""A superclass for scraping Layers of the UofT Map.
Map is located at http://map.utoronto.ca
"""
def __init__(self, name, output_location='.'):
super().__init__(name, output_location)
self.host = 'http://map.utoronto.ca/'
self.s = requests.Session()
def get_layers_json(self, campus):
"""Retrieve the JSON structure from host."""
self.logger.info('Scraping map layers %s.' % campus)
headers = {
'Referer': self.host
}
html = self.s.get('%s%s%s' % (self.host, 'data/map/', campus),
headers=headers).text
data = json.loads(html)
return data['layers']
@staticmethod
def get_value(entry, val, number=False):
"""Retrieve the desired value from the parsed response dictionary."""
if val in entry.keys():
return entry[val]
else:
return 0 if number else ''
|
<commit_before>import requests
import json
from ..scraper import Scraper
class LayersScraper(Scraper):
"""A superclass for scraping Layers of the UofT Map.
Map is located at http://map.utoronto.ca
"""
def __init__(self, name, output_location='.'):
super().__init__(name, output_location)
self.host = 'http://map.utoronto.ca/'
self.s = requests.Session()
def get_layers_json(self, campus):
"""Retrieve the JSON structure from host."""
self.logger.info('Scraping map layers %s.' % campus)
headers = {
'Referer': self.host
}
html = self.s.get('%s%s%s' % (self.host, 'data/map/', campus),
headers=headers).text
data = json.loads(html)
return data['layers']
@staticmethod
def get_value(self, entry, val, number=False):
"""Retrieve the desired value from the parsed response dictionary."""
if val in entry.keys():
return entry[val]
else:
return 0 if number else ''
<commit_msg>Remove unnecessary self parameter from static method<commit_after>import requests
import json
from ..scraper import Scraper
class LayersScraper(Scraper):
"""A superclass for scraping Layers of the UofT Map.
Map is located at http://map.utoronto.ca
"""
def __init__(self, name, output_location='.'):
super().__init__(name, output_location)
self.host = 'http://map.utoronto.ca/'
self.s = requests.Session()
def get_layers_json(self, campus):
"""Retrieve the JSON structure from host."""
self.logger.info('Scraping map layers %s.' % campus)
headers = {
'Referer': self.host
}
html = self.s.get('%s%s%s' % (self.host, 'data/map/', campus),
headers=headers).text
data = json.loads(html)
return data['layers']
@staticmethod
def get_value(entry, val, number=False):
"""Retrieve the desired value from the parsed response dictionary."""
if val in entry.keys():
return entry[val]
else:
return 0 if number else ''
|
2d9e61993bdcfb94124f7fbc4a9d76c6d85042e9
|
networkx_benchmark.py
|
networkx_benchmark.py
|
from __future__ import division
import numpy as np
import networkx as nx
import sys
import time
import graph_util
if len(sys.argv) != 2:
print 'usage: ' + sys.argv[0] + ' <num vertices>'
exit(1)
n = int(sys.argv[1])
print 'max-flow on %d-complete graph\n' % (n)
g = graph_util.complete_graph(n).to_undirected()
print 'n:',n
print 'm:', g.number_of_edges()
start_time = time.clock()
flow_val, flow = nx.ford_fulkerson(g, 0, 1, capacity='capacity')
stop_time = time.clock()
#print 'final flow:\n',flow
print 'time:', stop_time - start_time
exit(0)
|
Add benchmark for networkx ford-fulkerson max-flow
|
Add benchmark for networkx ford-fulkerson max-flow
|
Python
|
mit
|
weinstein/FastMaxFlow
|
Add benchmark for networkx ford-fulkerson max-flow
|
from __future__ import division
import numpy as np
import networkx as nx
import sys
import time
import graph_util
if len(sys.argv) != 2:
print 'usage: ' + sys.argv[0] + ' <num vertices>'
exit(1)
n = int(sys.argv[1])
print 'max-flow on %d-complete graph\n' % (n)
g = graph_util.complete_graph(n).to_undirected()
print 'n:',n
print 'm:', g.number_of_edges()
start_time = time.clock()
flow_val, flow = nx.ford_fulkerson(g, 0, 1, capacity='capacity')
stop_time = time.clock()
#print 'final flow:\n',flow
print 'time:', stop_time - start_time
exit(0)
|
<commit_before><commit_msg>Add benchmark for networkx ford-fulkerson max-flow<commit_after>
|
from __future__ import division
import numpy as np
import networkx as nx
import sys
import time
import graph_util
if len(sys.argv) != 2:
print 'usage: ' + sys.argv[0] + ' <num vertices>'
exit(1)
n = int(sys.argv[1])
print 'max-flow on %d-complete graph\n' % (n)
g = graph_util.complete_graph(n).to_undirected()
print 'n:',n
print 'm:', g.number_of_edges()
start_time = time.clock()
flow_val, flow = nx.ford_fulkerson(g, 0, 1, capacity='capacity')
stop_time = time.clock()
#print 'final flow:\n',flow
print 'time:', stop_time - start_time
exit(0)
|
Add benchmark for networkx ford-fulkerson max-flowfrom __future__ import division
import numpy as np
import networkx as nx
import sys
import time
import graph_util
if len(sys.argv) != 2:
print 'usage: ' + sys.argv[0] + ' <num vertices>'
exit(1)
n = int(sys.argv[1])
print 'max-flow on %d-complete graph\n' % (n)
g = graph_util.complete_graph(n).to_undirected()
print 'n:',n
print 'm:', g.number_of_edges()
start_time = time.clock()
flow_val, flow = nx.ford_fulkerson(g, 0, 1, capacity='capacity')
stop_time = time.clock()
#print 'final flow:\n',flow
print 'time:', stop_time - start_time
exit(0)
|
<commit_before><commit_msg>Add benchmark for networkx ford-fulkerson max-flow<commit_after>from __future__ import division
import numpy as np
import networkx as nx
import sys
import time
import graph_util
if len(sys.argv) != 2:
print 'usage: ' + sys.argv[0] + ' <num vertices>'
exit(1)
n = int(sys.argv[1])
print 'max-flow on %d-complete graph\n' % (n)
g = graph_util.complete_graph(n).to_undirected()
print 'n:',n
print 'm:', g.number_of_edges()
start_time = time.clock()
flow_val, flow = nx.ford_fulkerson(g, 0, 1, capacity='capacity')
stop_time = time.clock()
#print 'final flow:\n',flow
print 'time:', stop_time - start_time
exit(0)
|
|
ec4895d7a36a269c7be0c58899df933f2522d07f
|
zou/migrations/versions/fee7c696166e_.py
|
zou/migrations/versions/fee7c696166e_.py
|
"""empty message
Revision ID: fee7c696166e
Revises: 6bd3b102d61b
Create Date: 2018-06-22 15:14:44.303391
"""
from alembic import op
import sqlalchemy as sa
import sqlalchemy_utils
from sqlalchemy.dialects import postgresql
import sqlalchemy_utils
import uuid
# revision identifiers, used by Alembic.
revision = 'fee7c696166e'
down_revision = '6bd3b102d61b'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('api_event',
sa.Column('id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), default=uuid.uuid4, nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('name', sa.String(length=80), nullable=False),
sa.Column('user_id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), default=uuid.uuid4, nullable=True),
sa.Column('data', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['person.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_api_event_name'), 'api_event', ['name'], unique=False)
op.create_index(op.f('ix_api_event_user_id'), 'api_event', ['user_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_api_event_user_id'), table_name='api_event')
op.drop_index(op.f('ix_api_event_name'), table_name='api_event')
op.drop_table('api_event')
# ### end Alembic commands ###
|
Add migration file for event table
|
Add migration file for event table
|
Python
|
agpl-3.0
|
cgwire/zou
|
Add migration file for event table
|
"""empty message
Revision ID: fee7c696166e
Revises: 6bd3b102d61b
Create Date: 2018-06-22 15:14:44.303391
"""
from alembic import op
import sqlalchemy as sa
import sqlalchemy_utils
from sqlalchemy.dialects import postgresql
import sqlalchemy_utils
import uuid
# revision identifiers, used by Alembic.
revision = 'fee7c696166e'
down_revision = '6bd3b102d61b'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('api_event',
sa.Column('id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), default=uuid.uuid4, nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('name', sa.String(length=80), nullable=False),
sa.Column('user_id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), default=uuid.uuid4, nullable=True),
sa.Column('data', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['person.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_api_event_name'), 'api_event', ['name'], unique=False)
op.create_index(op.f('ix_api_event_user_id'), 'api_event', ['user_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_api_event_user_id'), table_name='api_event')
op.drop_index(op.f('ix_api_event_name'), table_name='api_event')
op.drop_table('api_event')
# ### end Alembic commands ###
|
<commit_before><commit_msg>Add migration file for event table<commit_after>
|
"""empty message
Revision ID: fee7c696166e
Revises: 6bd3b102d61b
Create Date: 2018-06-22 15:14:44.303391
"""
from alembic import op
import sqlalchemy as sa
import sqlalchemy_utils
from sqlalchemy.dialects import postgresql
import sqlalchemy_utils
import uuid
# revision identifiers, used by Alembic.
revision = 'fee7c696166e'
down_revision = '6bd3b102d61b'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('api_event',
sa.Column('id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), default=uuid.uuid4, nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('name', sa.String(length=80), nullable=False),
sa.Column('user_id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), default=uuid.uuid4, nullable=True),
sa.Column('data', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['person.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_api_event_name'), 'api_event', ['name'], unique=False)
op.create_index(op.f('ix_api_event_user_id'), 'api_event', ['user_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_api_event_user_id'), table_name='api_event')
op.drop_index(op.f('ix_api_event_name'), table_name='api_event')
op.drop_table('api_event')
# ### end Alembic commands ###
|
Add migration file for event table"""empty message
Revision ID: fee7c696166e
Revises: 6bd3b102d61b
Create Date: 2018-06-22 15:14:44.303391
"""
from alembic import op
import sqlalchemy as sa
import sqlalchemy_utils
from sqlalchemy.dialects import postgresql
import sqlalchemy_utils
import uuid
# revision identifiers, used by Alembic.
revision = 'fee7c696166e'
down_revision = '6bd3b102d61b'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('api_event',
sa.Column('id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), default=uuid.uuid4, nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('name', sa.String(length=80), nullable=False),
sa.Column('user_id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), default=uuid.uuid4, nullable=True),
sa.Column('data', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['person.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_api_event_name'), 'api_event', ['name'], unique=False)
op.create_index(op.f('ix_api_event_user_id'), 'api_event', ['user_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_api_event_user_id'), table_name='api_event')
op.drop_index(op.f('ix_api_event_name'), table_name='api_event')
op.drop_table('api_event')
# ### end Alembic commands ###
|
<commit_before><commit_msg>Add migration file for event table<commit_after>"""empty message
Revision ID: fee7c696166e
Revises: 6bd3b102d61b
Create Date: 2018-06-22 15:14:44.303391
"""
from alembic import op
import sqlalchemy as sa
import sqlalchemy_utils
from sqlalchemy.dialects import postgresql
import sqlalchemy_utils
import uuid
# revision identifiers, used by Alembic.
revision = 'fee7c696166e'
down_revision = '6bd3b102d61b'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('api_event',
sa.Column('id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), default=uuid.uuid4, nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('name', sa.String(length=80), nullable=False),
sa.Column('user_id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), default=uuid.uuid4, nullable=True),
sa.Column('data', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['person.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_api_event_name'), 'api_event', ['name'], unique=False)
op.create_index(op.f('ix_api_event_user_id'), 'api_event', ['user_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_api_event_user_id'), table_name='api_event')
op.drop_index(op.f('ix_api_event_name'), table_name='api_event')
op.drop_table('api_event')
# ### end Alembic commands ###
|
|
d1b0b663ce77cae1f79c2825e9aefd9b2f61a139
|
_check_player_stats.py
|
_check_player_stats.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from operator import attrgetter
from sqlalchemy import and_, String, cast
from db.common import session_scope
from db.player import Player
from db.player_game import PlayerGame
from db.player_season import PlayerSeason
# TODO: command line arguments, comparison of all applicable stat values
season = 2016
season_type = 'RS'
stat_criterion = 'goals'
if __name__ == '__main__':
with session_scope() as session:
# retrieving player seasons for specified season and season type
pseasons = session.query(PlayerSeason).filter(
and_(
PlayerSeason.season == season,
PlayerSeason.season_type == season_type
)
).all()
print("+ %d individual season statlines retrieved" % len(pseasons))
for pseason in sorted(pseasons)[:]:
plr = Player.find_by_id(pseason.player_id)
# retrieving individual player games for specified player
pgames = session.query(PlayerGame).filter(
and_(
PlayerGame.player_id == pseason.player_id,
cast(PlayerGame.game_id, String).like("%d02%%" % season),
PlayerGame.team_id == pseason.team_id
)
).all()
stats_value = sum(map(attrgetter(stat_criterion), pgames))
# print(plr, stats_value, getattr(pseason, stat_criterion))
try:
assert stats_value == getattr(pseason, stat_criterion)
except Exception as e:
print(plr)
print("\t Goals in player games: %d" % stats_value)
print("\t Goals in player season stats: %d" % pseason.goals)
|
Add initial version of checking player stats
|
Add initial version of checking player stats
|
Python
|
mit
|
leaffan/pynhldb
|
Add initial version of checking player stats
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from operator import attrgetter
from sqlalchemy import and_, String, cast
from db.common import session_scope
from db.player import Player
from db.player_game import PlayerGame
from db.player_season import PlayerSeason
# TODO: command line arguments, comparison of all applicable stat values
season = 2016
season_type = 'RS'
stat_criterion = 'goals'
if __name__ == '__main__':
with session_scope() as session:
# retrieving player seasons for specified season and season type
pseasons = session.query(PlayerSeason).filter(
and_(
PlayerSeason.season == season,
PlayerSeason.season_type == season_type
)
).all()
print("+ %d individual season statlines retrieved" % len(pseasons))
for pseason in sorted(pseasons)[:]:
plr = Player.find_by_id(pseason.player_id)
# retrieving individual player games for specified player
pgames = session.query(PlayerGame).filter(
and_(
PlayerGame.player_id == pseason.player_id,
cast(PlayerGame.game_id, String).like("%d02%%" % season),
PlayerGame.team_id == pseason.team_id
)
).all()
stats_value = sum(map(attrgetter(stat_criterion), pgames))
# print(plr, stats_value, getattr(pseason, stat_criterion))
try:
assert stats_value == getattr(pseason, stat_criterion)
except Exception as e:
print(plr)
print("\t Goals in player games: %d" % stats_value)
print("\t Goals in player season stats: %d" % pseason.goals)
|
<commit_before><commit_msg>Add initial version of checking player stats<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from operator import attrgetter
from sqlalchemy import and_, String, cast
from db.common import session_scope
from db.player import Player
from db.player_game import PlayerGame
from db.player_season import PlayerSeason
# TODO: command line arguments, comparison of all applicable stat values
season = 2016
season_type = 'RS'
stat_criterion = 'goals'
if __name__ == '__main__':
with session_scope() as session:
# retrieving player seasons for specified season and season type
pseasons = session.query(PlayerSeason).filter(
and_(
PlayerSeason.season == season,
PlayerSeason.season_type == season_type
)
).all()
print("+ %d individual season statlines retrieved" % len(pseasons))
for pseason in sorted(pseasons)[:]:
plr = Player.find_by_id(pseason.player_id)
# retrieving individual player games for specified player
pgames = session.query(PlayerGame).filter(
and_(
PlayerGame.player_id == pseason.player_id,
cast(PlayerGame.game_id, String).like("%d02%%" % season),
PlayerGame.team_id == pseason.team_id
)
).all()
stats_value = sum(map(attrgetter(stat_criterion), pgames))
# print(plr, stats_value, getattr(pseason, stat_criterion))
try:
assert stats_value == getattr(pseason, stat_criterion)
except Exception as e:
print(plr)
print("\t Goals in player games: %d" % stats_value)
print("\t Goals in player season stats: %d" % pseason.goals)
|
Add initial version of checking player stats#!/usr/bin/env python
# -*- coding: utf-8 -*-
from operator import attrgetter
from sqlalchemy import and_, String, cast
from db.common import session_scope
from db.player import Player
from db.player_game import PlayerGame
from db.player_season import PlayerSeason
# TODO: command line arguments, comparison of all applicable stat values
season = 2016
season_type = 'RS'
stat_criterion = 'goals'
if __name__ == '__main__':
with session_scope() as session:
# retrieving player seasons for specified season and season type
pseasons = session.query(PlayerSeason).filter(
and_(
PlayerSeason.season == season,
PlayerSeason.season_type == season_type
)
).all()
print("+ %d individual season statlines retrieved" % len(pseasons))
for pseason in sorted(pseasons)[:]:
plr = Player.find_by_id(pseason.player_id)
# retrieving individual player games for specified player
pgames = session.query(PlayerGame).filter(
and_(
PlayerGame.player_id == pseason.player_id,
cast(PlayerGame.game_id, String).like("%d02%%" % season),
PlayerGame.team_id == pseason.team_id
)
).all()
stats_value = sum(map(attrgetter(stat_criterion), pgames))
# print(plr, stats_value, getattr(pseason, stat_criterion))
try:
assert stats_value == getattr(pseason, stat_criterion)
except Exception as e:
print(plr)
print("\t Goals in player games: %d" % stats_value)
print("\t Goals in player season stats: %d" % pseason.goals)
|
<commit_before><commit_msg>Add initial version of checking player stats<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from operator import attrgetter
from sqlalchemy import and_, String, cast
from db.common import session_scope
from db.player import Player
from db.player_game import PlayerGame
from db.player_season import PlayerSeason
# TODO: command line arguments, comparison of all applicable stat values
season = 2016
season_type = 'RS'
stat_criterion = 'goals'
if __name__ == '__main__':
with session_scope() as session:
# retrieving player seasons for specified season and season type
pseasons = session.query(PlayerSeason).filter(
and_(
PlayerSeason.season == season,
PlayerSeason.season_type == season_type
)
).all()
print("+ %d individual season statlines retrieved" % len(pseasons))
for pseason in sorted(pseasons)[:]:
plr = Player.find_by_id(pseason.player_id)
# retrieving individual player games for specified player
pgames = session.query(PlayerGame).filter(
and_(
PlayerGame.player_id == pseason.player_id,
cast(PlayerGame.game_id, String).like("%d02%%" % season),
PlayerGame.team_id == pseason.team_id
)
).all()
stats_value = sum(map(attrgetter(stat_criterion), pgames))
# print(plr, stats_value, getattr(pseason, stat_criterion))
try:
assert stats_value == getattr(pseason, stat_criterion)
except Exception as e:
print(plr)
print("\t Goals in player games: %d" % stats_value)
print("\t Goals in player season stats: %d" % pseason.goals)
|
|
4e2e8d486eb58d6efd66535ce101d8f58748c851
|
tests/chainer_tests/functions_tests/array_tests/test_crop.py
|
tests/chainer_tests/functions_tests/array_tests/test_crop.py
|
import unittest
import numpy
import chainer
from chainer import cuda
from chainer import functions
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
class TestCrop(unittest.TestCase):
axes = [1, 2]
def setUp(self):
self.x0_data = numpy.random.uniform(-1, 1, (4, 3, 2))
self.x1_data = numpy.random.uniform(-1, 1, (4, 2, 1))
self.gy_data = numpy.random.uniform(-1, 1, (4, 2, 1))
def check_forward(self, x0_data, x1_data):
x0 = chainer.Variable(x0_data)
x1 = chainer.Variable(x1_data)
y = functions.crop(x0, x1, self.axes)
self.assertEqual(y.data.dtype, numpy.float)
numpy.testing.assert_equal(cuda.to_cpu(x0_data)[:, :2, :1],
cuda.to_cpu(y.data))
def test_forward_cpu(self):
self.check_forward(self.x0_data, self.x1_data)
@attr.gpu
def test_forward_gpu(self):
self.check_forward(cuda.to_gpu(self.x0_data),
cuda.to_gpu(self.x1_data))
def check_backward(self, x0_data, x1_data, y_grad):
x0 = chainer.Variable(x0_data)
x1 = chainer.Variable(x1_data)
y = functions.crop(x0, x1, self.axes)
y.grad = y_grad
y.backward()
xs = (x0.data, x1.data)
def f():
func = y.creator
return func.forward(xs)
gx, _ = gradient_check.numerical_grad(f, xs, (y.grad,))
gradient_check.assert_allclose(cuda.to_cpu(gx), cuda.to_cpu(x0.grad))
def test_backward_cpu(self):
self.check_backward(self.x0_data, self.x1_data, self.gy_data)
@attr.gpu
def test_backward_gpu(self):
self.check_backward(cuda.to_gpu(self.x0_data),
cuda.to_gpu(self.x1_data),
cuda.to_gpu(self.gy_data))
testing.run_module(__name__, __file__)
|
Add test for crop function
|
Add test for crop function
|
Python
|
mit
|
okuta/chainer,benob/chainer,anaruse/chainer,ktnyt/chainer,keisuke-umezawa/chainer,okuta/chainer,jnishi/chainer,delta2323/chainer,kashif/chainer,ronekko/chainer,wkentaro/chainer,chainer/chainer,ktnyt/chainer,niboshi/chainer,kikusu/chainer,jnishi/chainer,niboshi/chainer,niboshi/chainer,hvy/chainer,jnishi/chainer,jnishi/chainer,pfnet/chainer,chainer/chainer,hvy/chainer,tkerola/chainer,benob/chainer,wkentaro/chainer,keisuke-umezawa/chainer,cupy/cupy,hvy/chainer,keisuke-umezawa/chainer,ktnyt/chainer,cupy/cupy,kikusu/chainer,ysekky/chainer,niboshi/chainer,cupy/cupy,cupy/cupy,ktnyt/chainer,okuta/chainer,keisuke-umezawa/chainer,kiyukuta/chainer,hvy/chainer,wkentaro/chainer,rezoo/chainer,wkentaro/chainer,aonotas/chainer,okuta/chainer,chainer/chainer,chainer/chainer
|
Add test for crop function
|
import unittest
import numpy
import chainer
from chainer import cuda
from chainer import functions
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
class TestCrop(unittest.TestCase):
axes = [1, 2]
def setUp(self):
self.x0_data = numpy.random.uniform(-1, 1, (4, 3, 2))
self.x1_data = numpy.random.uniform(-1, 1, (4, 2, 1))
self.gy_data = numpy.random.uniform(-1, 1, (4, 2, 1))
def check_forward(self, x0_data, x1_data):
x0 = chainer.Variable(x0_data)
x1 = chainer.Variable(x1_data)
y = functions.crop(x0, x1, self.axes)
self.assertEqual(y.data.dtype, numpy.float)
numpy.testing.assert_equal(cuda.to_cpu(x0_data)[:, :2, :1],
cuda.to_cpu(y.data))
def test_forward_cpu(self):
self.check_forward(self.x0_data, self.x1_data)
@attr.gpu
def test_forward_gpu(self):
self.check_forward(cuda.to_gpu(self.x0_data),
cuda.to_gpu(self.x1_data))
def check_backward(self, x0_data, x1_data, y_grad):
x0 = chainer.Variable(x0_data)
x1 = chainer.Variable(x1_data)
y = functions.crop(x0, x1, self.axes)
y.grad = y_grad
y.backward()
xs = (x0.data, x1.data)
def f():
func = y.creator
return func.forward(xs)
gx, _ = gradient_check.numerical_grad(f, xs, (y.grad,))
gradient_check.assert_allclose(cuda.to_cpu(gx), cuda.to_cpu(x0.grad))
def test_backward_cpu(self):
self.check_backward(self.x0_data, self.x1_data, self.gy_data)
@attr.gpu
def test_backward_gpu(self):
self.check_backward(cuda.to_gpu(self.x0_data),
cuda.to_gpu(self.x1_data),
cuda.to_gpu(self.gy_data))
testing.run_module(__name__, __file__)
|
<commit_before><commit_msg>Add test for crop function<commit_after>
|
import unittest
import numpy
import chainer
from chainer import cuda
from chainer import functions
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
class TestCrop(unittest.TestCase):
axes = [1, 2]
def setUp(self):
self.x0_data = numpy.random.uniform(-1, 1, (4, 3, 2))
self.x1_data = numpy.random.uniform(-1, 1, (4, 2, 1))
self.gy_data = numpy.random.uniform(-1, 1, (4, 2, 1))
def check_forward(self, x0_data, x1_data):
x0 = chainer.Variable(x0_data)
x1 = chainer.Variable(x1_data)
y = functions.crop(x0, x1, self.axes)
self.assertEqual(y.data.dtype, numpy.float)
numpy.testing.assert_equal(cuda.to_cpu(x0_data)[:, :2, :1],
cuda.to_cpu(y.data))
def test_forward_cpu(self):
self.check_forward(self.x0_data, self.x1_data)
@attr.gpu
def test_forward_gpu(self):
self.check_forward(cuda.to_gpu(self.x0_data),
cuda.to_gpu(self.x1_data))
def check_backward(self, x0_data, x1_data, y_grad):
x0 = chainer.Variable(x0_data)
x1 = chainer.Variable(x1_data)
y = functions.crop(x0, x1, self.axes)
y.grad = y_grad
y.backward()
xs = (x0.data, x1.data)
def f():
func = y.creator
return func.forward(xs)
gx, _ = gradient_check.numerical_grad(f, xs, (y.grad,))
gradient_check.assert_allclose(cuda.to_cpu(gx), cuda.to_cpu(x0.grad))
def test_backward_cpu(self):
self.check_backward(self.x0_data, self.x1_data, self.gy_data)
@attr.gpu
def test_backward_gpu(self):
self.check_backward(cuda.to_gpu(self.x0_data),
cuda.to_gpu(self.x1_data),
cuda.to_gpu(self.gy_data))
testing.run_module(__name__, __file__)
|
Add test for crop functionimport unittest
import numpy
import chainer
from chainer import cuda
from chainer import functions
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
class TestCrop(unittest.TestCase):
axes = [1, 2]
def setUp(self):
self.x0_data = numpy.random.uniform(-1, 1, (4, 3, 2))
self.x1_data = numpy.random.uniform(-1, 1, (4, 2, 1))
self.gy_data = numpy.random.uniform(-1, 1, (4, 2, 1))
def check_forward(self, x0_data, x1_data):
x0 = chainer.Variable(x0_data)
x1 = chainer.Variable(x1_data)
y = functions.crop(x0, x1, self.axes)
self.assertEqual(y.data.dtype, numpy.float)
numpy.testing.assert_equal(cuda.to_cpu(x0_data)[:, :2, :1],
cuda.to_cpu(y.data))
def test_forward_cpu(self):
self.check_forward(self.x0_data, self.x1_data)
@attr.gpu
def test_forward_gpu(self):
self.check_forward(cuda.to_gpu(self.x0_data),
cuda.to_gpu(self.x1_data))
def check_backward(self, x0_data, x1_data, y_grad):
x0 = chainer.Variable(x0_data)
x1 = chainer.Variable(x1_data)
y = functions.crop(x0, x1, self.axes)
y.grad = y_grad
y.backward()
xs = (x0.data, x1.data)
def f():
func = y.creator
return func.forward(xs)
gx, _ = gradient_check.numerical_grad(f, xs, (y.grad,))
gradient_check.assert_allclose(cuda.to_cpu(gx), cuda.to_cpu(x0.grad))
def test_backward_cpu(self):
self.check_backward(self.x0_data, self.x1_data, self.gy_data)
@attr.gpu
def test_backward_gpu(self):
self.check_backward(cuda.to_gpu(self.x0_data),
cuda.to_gpu(self.x1_data),
cuda.to_gpu(self.gy_data))
testing.run_module(__name__, __file__)
|
<commit_before><commit_msg>Add test for crop function<commit_after>import unittest
import numpy
import chainer
from chainer import cuda
from chainer import functions
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
class TestCrop(unittest.TestCase):
axes = [1, 2]
def setUp(self):
self.x0_data = numpy.random.uniform(-1, 1, (4, 3, 2))
self.x1_data = numpy.random.uniform(-1, 1, (4, 2, 1))
self.gy_data = numpy.random.uniform(-1, 1, (4, 2, 1))
def check_forward(self, x0_data, x1_data):
x0 = chainer.Variable(x0_data)
x1 = chainer.Variable(x1_data)
y = functions.crop(x0, x1, self.axes)
self.assertEqual(y.data.dtype, numpy.float)
numpy.testing.assert_equal(cuda.to_cpu(x0_data)[:, :2, :1],
cuda.to_cpu(y.data))
def test_forward_cpu(self):
self.check_forward(self.x0_data, self.x1_data)
@attr.gpu
def test_forward_gpu(self):
self.check_forward(cuda.to_gpu(self.x0_data),
cuda.to_gpu(self.x1_data))
def check_backward(self, x0_data, x1_data, y_grad):
x0 = chainer.Variable(x0_data)
x1 = chainer.Variable(x1_data)
y = functions.crop(x0, x1, self.axes)
y.grad = y_grad
y.backward()
xs = (x0.data, x1.data)
def f():
func = y.creator
return func.forward(xs)
gx, _ = gradient_check.numerical_grad(f, xs, (y.grad,))
gradient_check.assert_allclose(cuda.to_cpu(gx), cuda.to_cpu(x0.grad))
def test_backward_cpu(self):
self.check_backward(self.x0_data, self.x1_data, self.gy_data)
@attr.gpu
def test_backward_gpu(self):
self.check_backward(cuda.to_gpu(self.x0_data),
cuda.to_gpu(self.x1_data),
cuda.to_gpu(self.gy_data))
testing.run_module(__name__, __file__)
|
|
adcfe3e87559c5822df8d5f19f306ae320495058
|
example_server_login.py
|
example_server_login.py
|
from nintendo.nex import backend
from nintendo.games import Friends
import logging
logging.basicConfig(level=logging.INFO)
backend = backend.BackEndClient(
Friends.ACCESS_KEY, Friends.NEX_VERSION,
backend.Settings("friends.cfg")
)
backend.connect("127.0.0.1", 1223)
backend.login_guest()
backend.close()
|
Add example script that logs in on custom server
|
Add example script that logs in on custom server
|
Python
|
mit
|
Kinnay/NintendoClients
|
Add example script that logs in on custom server
|
from nintendo.nex import backend
from nintendo.games import Friends
import logging
logging.basicConfig(level=logging.INFO)
backend = backend.BackEndClient(
Friends.ACCESS_KEY, Friends.NEX_VERSION,
backend.Settings("friends.cfg")
)
backend.connect("127.0.0.1", 1223)
backend.login_guest()
backend.close()
|
<commit_before><commit_msg>Add example script that logs in on custom server<commit_after>
|
from nintendo.nex import backend
from nintendo.games import Friends
import logging
logging.basicConfig(level=logging.INFO)
backend = backend.BackEndClient(
Friends.ACCESS_KEY, Friends.NEX_VERSION,
backend.Settings("friends.cfg")
)
backend.connect("127.0.0.1", 1223)
backend.login_guest()
backend.close()
|
Add example script that logs in on custom server
from nintendo.nex import backend
from nintendo.games import Friends
import logging
logging.basicConfig(level=logging.INFO)
backend = backend.BackEndClient(
Friends.ACCESS_KEY, Friends.NEX_VERSION,
backend.Settings("friends.cfg")
)
backend.connect("127.0.0.1", 1223)
backend.login_guest()
backend.close()
|
<commit_before><commit_msg>Add example script that logs in on custom server<commit_after>
from nintendo.nex import backend
from nintendo.games import Friends
import logging
logging.basicConfig(level=logging.INFO)
backend = backend.BackEndClient(
Friends.ACCESS_KEY, Friends.NEX_VERSION,
backend.Settings("friends.cfg")
)
backend.connect("127.0.0.1", 1223)
backend.login_guest()
backend.close()
|
|
72fcefaa55251d7107d4e70679094966c23a36a9
|
babel_util/scripts/arxivmd_to_leveldb.py
|
babel_util/scripts/arxivmd_to_leveldb.py
|
#!/usr/bin/env python3
import leveldb
import msgpack
import csv
from util.misc import Benchmark, open_file
REQUIRED_KEYS = {'title', 'paper_id', 'date'}
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description="Creates a LevelDB of TSV metadata in infile")
parser.add_argument('infile')
parser.add_argument('leveldb_path')
parser.add_argument('--benchmark-freq', default=100000, type=int)
parser.add_argument('--delimiter', '-d', default='\t')
args = parser.parse_args()
db = leveldb.LevelDB(args.db_path,
write_buffer_size=100 << 20, # 100MB
block_cache_size=400 << 20) # 400MB
with open_file(args.infile) as ifs:
b = Benchmark(args.benchmark_freq)
reader = csv.DictReader(ifs, delimiter=args.delimiter)
for row in reader:
if not REQUIRED_KEYS.issubset(row.keys()):
print(row)
raise KeyError("Not all required keys present")
db.Put(row["paper_id"].encode(), msgpack.packb(row))
b.increment()
b.print_freq()
print(db.GetStats())
|
Transform SQL TSV dump of arXiv MD to LevelDB
|
Transform SQL TSV dump of arXiv MD to LevelDB
|
Python
|
agpl-3.0
|
jevinw/rec_utilities,jevinw/rec_utilities
|
Transform SQL TSV dump of arXiv MD to LevelDB
|
#!/usr/bin/env python3
import leveldb
import msgpack
import csv
from util.misc import Benchmark, open_file
REQUIRED_KEYS = {'title', 'paper_id', 'date'}
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description="Creates a LevelDB of TSV metadata in infile")
parser.add_argument('infile')
parser.add_argument('leveldb_path')
parser.add_argument('--benchmark-freq', default=100000, type=int)
parser.add_argument('--delimiter', '-d', default='\t')
args = parser.parse_args()
db = leveldb.LevelDB(args.db_path,
write_buffer_size=100 << 20, # 100MB
block_cache_size=400 << 20) # 400MB
with open_file(args.infile) as ifs:
b = Benchmark(args.benchmark_freq)
reader = csv.DictReader(ifs, delimiter=args.delimiter)
for row in reader:
if not REQUIRED_KEYS.issubset(row.keys()):
print(row)
raise KeyError("Not all required keys present")
db.Put(row["paper_id"].encode(), msgpack.packb(row))
b.increment()
b.print_freq()
print(db.GetStats())
|
<commit_before><commit_msg>Transform SQL TSV dump of arXiv MD to LevelDB<commit_after>
|
#!/usr/bin/env python3
import leveldb
import msgpack
import csv
from util.misc import Benchmark, open_file
REQUIRED_KEYS = {'title', 'paper_id', 'date'}
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description="Creates a LevelDB of TSV metadata in infile")
parser.add_argument('infile')
parser.add_argument('leveldb_path')
parser.add_argument('--benchmark-freq', default=100000, type=int)
parser.add_argument('--delimiter', '-d', default='\t')
args = parser.parse_args()
db = leveldb.LevelDB(args.db_path,
write_buffer_size=100 << 20, # 100MB
block_cache_size=400 << 20) # 400MB
with open_file(args.infile) as ifs:
b = Benchmark(args.benchmark_freq)
reader = csv.DictReader(ifs, delimiter=args.delimiter)
for row in reader:
if not REQUIRED_KEYS.issubset(row.keys()):
print(row)
raise KeyError("Not all required keys present")
db.Put(row["paper_id"].encode(), msgpack.packb(row))
b.increment()
b.print_freq()
print(db.GetStats())
|
Transform SQL TSV dump of arXiv MD to LevelDB#!/usr/bin/env python3
import leveldb
import msgpack
import csv
from util.misc import Benchmark, open_file
REQUIRED_KEYS = {'title', 'paper_id', 'date'}
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description="Creates a LevelDB of TSV metadata in infile")
parser.add_argument('infile')
parser.add_argument('leveldb_path')
parser.add_argument('--benchmark-freq', default=100000, type=int)
parser.add_argument('--delimiter', '-d', default='\t')
args = parser.parse_args()
db = leveldb.LevelDB(args.db_path,
write_buffer_size=100 << 20, # 100MB
block_cache_size=400 << 20) # 400MB
with open_file(args.infile) as ifs:
b = Benchmark(args.benchmark_freq)
reader = csv.DictReader(ifs, delimiter=args.delimiter)
for row in reader:
if not REQUIRED_KEYS.issubset(row.keys()):
print(row)
raise KeyError("Not all required keys present")
db.Put(row["paper_id"].encode(), msgpack.packb(row))
b.increment()
b.print_freq()
print(db.GetStats())
|
<commit_before><commit_msg>Transform SQL TSV dump of arXiv MD to LevelDB<commit_after>#!/usr/bin/env python3
import leveldb
import msgpack
import csv
from util.misc import Benchmark, open_file
REQUIRED_KEYS = {'title', 'paper_id', 'date'}
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description="Creates a LevelDB of TSV metadata in infile")
parser.add_argument('infile')
parser.add_argument('leveldb_path')
parser.add_argument('--benchmark-freq', default=100000, type=int)
parser.add_argument('--delimiter', '-d', default='\t')
args = parser.parse_args()
db = leveldb.LevelDB(args.db_path,
write_buffer_size=100 << 20, # 100MB
block_cache_size=400 << 20) # 400MB
with open_file(args.infile) as ifs:
b = Benchmark(args.benchmark_freq)
reader = csv.DictReader(ifs, delimiter=args.delimiter)
for row in reader:
if not REQUIRED_KEYS.issubset(row.keys()):
print(row)
raise KeyError("Not all required keys present")
db.Put(row["paper_id"].encode(), msgpack.packb(row))
b.increment()
b.print_freq()
print(db.GetStats())
|
|
fedef82434f12a006e22705d1d010b31c616520c
|
scripts/test_generator_insert.py
|
scripts/test_generator_insert.py
|
import random
n = 100
count = 0
difference = 0
keys = []
output = ""
for i in xrange(n):
if (random.random() < 0.4):
random.shuffle(keys)
for j in xrange(random.randint(0,len(keys))):
key = keys.pop()
output += "remove %d %d\n" % (key, len(keys))
else:
chosen_key = random.randint(1, n)
while (chosen_key in keys):
chosen_key = random.randint(1, n)
keys.append(chosen_key)
output += "insert %d %d %d\n" % (chosen_key, i, len(keys))
print output
|
Add a test generator python script
|
Add a test generator python script
|
Python
|
mit
|
potay/FreeTables,potay/FreeTables
|
Add a test generator python script
|
import random
n = 100
count = 0
difference = 0
keys = []
output = ""
for i in xrange(n):
if (random.random() < 0.4):
random.shuffle(keys)
for j in xrange(random.randint(0,len(keys))):
key = keys.pop()
output += "remove %d %d\n" % (key, len(keys))
else:
chosen_key = random.randint(1, n)
while (chosen_key in keys):
chosen_key = random.randint(1, n)
keys.append(chosen_key)
output += "insert %d %d %d\n" % (chosen_key, i, len(keys))
print output
|
<commit_before><commit_msg>Add a test generator python script<commit_after>
|
import random
n = 100
count = 0
difference = 0
keys = []
output = ""
for i in xrange(n):
if (random.random() < 0.4):
random.shuffle(keys)
for j in xrange(random.randint(0,len(keys))):
key = keys.pop()
output += "remove %d %d\n" % (key, len(keys))
else:
chosen_key = random.randint(1, n)
while (chosen_key in keys):
chosen_key = random.randint(1, n)
keys.append(chosen_key)
output += "insert %d %d %d\n" % (chosen_key, i, len(keys))
print output
|
Add a test generator python scriptimport random
n = 100
count = 0
difference = 0
keys = []
output = ""
for i in xrange(n):
if (random.random() < 0.4):
random.shuffle(keys)
for j in xrange(random.randint(0,len(keys))):
key = keys.pop()
output += "remove %d %d\n" % (key, len(keys))
else:
chosen_key = random.randint(1, n)
while (chosen_key in keys):
chosen_key = random.randint(1, n)
keys.append(chosen_key)
output += "insert %d %d %d\n" % (chosen_key, i, len(keys))
print output
|
<commit_before><commit_msg>Add a test generator python script<commit_after>import random
n = 100
count = 0
difference = 0
keys = []
output = ""
for i in xrange(n):
if (random.random() < 0.4):
random.shuffle(keys)
for j in xrange(random.randint(0,len(keys))):
key = keys.pop()
output += "remove %d %d\n" % (key, len(keys))
else:
chosen_key = random.randint(1, n)
while (chosen_key in keys):
chosen_key = random.randint(1, n)
keys.append(chosen_key)
output += "insert %d %d %d\n" % (chosen_key, i, len(keys))
print output
|
|
ed0df2948b7d93d9c141889cc466ac0d5794f462
|
smarkets/hal_messenger_client.py
|
smarkets/hal_messenger_client.py
|
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import socket
import simplejson as json
log = logging.getLogger(__name__)
def resolve_address_host(address):
assert len(address) == 2, address
return (socket.gethostbyname(address[0]), address[1])
class HalMessengerUDPClient(object):
def __init__(self, address):
"""
:type address: 2-tuple of string host (ip or domain) and integer port number
"""
self._address = resolve_address_host(address)
self._socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
def send(self, message, room):
payload = json.dumps(dict(message=message, room=room)).encode()
bytes_sent = self._socket.sendto(payload, self._address)
if bytes_sent != len(payload):
log.warn('Sent %s bytes instead of %s. Payload: %s', bytes_sent, len(payload), payload)
class DummyHalMessengerClient(object):
def send(self, message, room):
pass
|
Create Hal messenger UDP client
|
Create Hal messenger UDP client
|
Python
|
mit
|
smarkets/smk_python_sdk
|
Create Hal messenger UDP client
|
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import socket
import simplejson as json
log = logging.getLogger(__name__)
def resolve_address_host(address):
assert len(address) == 2, address
return (socket.gethostbyname(address[0]), address[1])
class HalMessengerUDPClient(object):
def __init__(self, address):
"""
:type address: 2-tuple of string host (ip or domain) and integer port number
"""
self._address = resolve_address_host(address)
self._socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
def send(self, message, room):
payload = json.dumps(dict(message=message, room=room)).encode()
bytes_sent = self._socket.sendto(payload, self._address)
if bytes_sent != len(payload):
log.warn('Sent %s bytes instead of %s. Payload: %s', bytes_sent, len(payload), payload)
class DummyHalMessengerClient(object):
def send(self, message, room):
pass
|
<commit_before><commit_msg>Create Hal messenger UDP client<commit_after>
|
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import socket
import simplejson as json
log = logging.getLogger(__name__)
def resolve_address_host(address):
assert len(address) == 2, address
return (socket.gethostbyname(address[0]), address[1])
class HalMessengerUDPClient(object):
def __init__(self, address):
"""
:type address: 2-tuple of string host (ip or domain) and integer port number
"""
self._address = resolve_address_host(address)
self._socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
def send(self, message, room):
payload = json.dumps(dict(message=message, room=room)).encode()
bytes_sent = self._socket.sendto(payload, self._address)
if bytes_sent != len(payload):
log.warn('Sent %s bytes instead of %s. Payload: %s', bytes_sent, len(payload), payload)
class DummyHalMessengerClient(object):
def send(self, message, room):
pass
|
Create Hal messenger UDP clientfrom __future__ import absolute_import, division, print_function, unicode_literals
import logging
import socket
import simplejson as json
log = logging.getLogger(__name__)
def resolve_address_host(address):
assert len(address) == 2, address
return (socket.gethostbyname(address[0]), address[1])
class HalMessengerUDPClient(object):
def __init__(self, address):
"""
:type address: 2-tuple of string host (ip or domain) and integer port number
"""
self._address = resolve_address_host(address)
self._socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
def send(self, message, room):
payload = json.dumps(dict(message=message, room=room)).encode()
bytes_sent = self._socket.sendto(payload, self._address)
if bytes_sent != len(payload):
log.warn('Sent %s bytes instead of %s. Payload: %s', bytes_sent, len(payload), payload)
class DummyHalMessengerClient(object):
def send(self, message, room):
pass
|
<commit_before><commit_msg>Create Hal messenger UDP client<commit_after>from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import socket
import simplejson as json
log = logging.getLogger(__name__)
def resolve_address_host(address):
assert len(address) == 2, address
return (socket.gethostbyname(address[0]), address[1])
class HalMessengerUDPClient(object):
def __init__(self, address):
"""
:type address: 2-tuple of string host (ip or domain) and integer port number
"""
self._address = resolve_address_host(address)
self._socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
def send(self, message, room):
payload = json.dumps(dict(message=message, room=room)).encode()
bytes_sent = self._socket.sendto(payload, self._address)
if bytes_sent != len(payload):
log.warn('Sent %s bytes instead of %s. Payload: %s', bytes_sent, len(payload), payload)
class DummyHalMessengerClient(object):
def send(self, message, room):
pass
|
|
45984e716354e2824a5199498dabefa723a6769a
|
neutron/tests/unit/ml2/test_type_local.py
|
neutron/tests/unit/ml2/test_type_local.py
|
# Copyright (c) 2014 Thales Services SAS
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.common import exceptions as exc
from neutron.plugins.common import constants as p_const
from neutron.plugins.ml2 import driver_api as api
from neutron.plugins.ml2.drivers import type_local
from neutron.tests import base
class LocalTypeTest(base.BaseTestCase):
def setUp(self):
super(LocalTypeTest, self).setUp()
self.driver = type_local.LocalTypeDriver()
self.session = None
def test_validate_provider_segment(self):
segment = {api.NETWORK_TYPE: p_const.TYPE_LOCAL}
self.driver.validate_provider_segment(segment)
def test_validate_provider_segment_with_unallowed_physical_network(self):
segment = {api.NETWORK_TYPE: p_const.TYPE_LOCAL,
api.PHYSICAL_NETWORK: 'phys_net'}
self.assertRaises(exc.InvalidInput,
self.driver.validate_provider_segment,
segment)
def test_validate_provider_segment_with_unallowed_segmentation_id(self):
segment = {api.NETWORK_TYPE: p_const.TYPE_LOCAL,
api.SEGMENTATION_ID: 2}
self.assertRaises(exc.InvalidInput,
self.driver.validate_provider_segment,
segment)
def test_reserve_provider_segment(self):
segment = {api.NETWORK_TYPE: p_const.TYPE_LOCAL}
self.driver.reserve_provider_segment(self.session, segment)
self.driver.release_segment(self.session, segment)
def test_allocate_tenant_segment(self):
expected = {api.NETWORK_TYPE: p_const.TYPE_LOCAL}
observed = self.driver.allocate_tenant_segment(self.session)
self.assertEqual(expected, observed)
|
Add local type driver unittests
|
Add local type driver unittests
Partial-Bug: #1269127
Change-Id: I5b34dc09128bcb879ea46be64cc5104eeefd4ab4
|
Python
|
apache-2.0
|
blueboxgroup/neutron,yanheven/neutron,JianyuWang/neutron,jerryz1982/neutron,projectcalico/calico-neutron,antonioUnina/neutron,gkotton/neutron,javaos74/neutron,silenci/neutron,leeseulstack/openstack,suneeth51/neutron,CiscoSystems/neutron,JianyuWang/neutron,CiscoSystems/neutron,sasukeh/neutron,vivekanand1101/neutron,yuewko/neutron,mmnelemane/neutron,mahak/neutron,cisco-openstack/neutron,leeseuljeong/leeseulstack_neutron,paninetworks/neutron,gkotton/neutron,openstack/neutron,yuewko/neutron,eayunstack/neutron,swdream/neutron,jacknjzhou/neutron,apporc/neutron,MaximNevrov/neutron,eonpatapon/neutron,SmartInfrastructures/neutron,pnavarro/neutron,waltBB/neutron_read,shahbazn/neutron,sebrandon1/neutron,leeseulstack/openstack,MaximNevrov/neutron,rdo-management/neutron,sasukeh/neutron,igor-toga/local-snat,leeseuljeong/leeseulstack_neutron,wolverineav/neutron,vbannai/neutron,skyddv/neutron,chitr/neutron,mandeepdhami/neutron,jacknjzhou/neutron,huntxu/neutron,bigswitch/neutron,pnavarro/neutron,klmitch/neutron,SamYaple/neutron,cloudbase/neutron,eonpatapon/neutron,igor-toga/local-snat,aristanetworks/neutron,jumpojoy/neutron,virtualopensystems/neutron,adelina-t/neutron,wenhuizhang/neutron,redhat-openstack/neutron,yamahata/tacker,dhanunjaya/neutron,cernops/neutron,leeseulstack/openstack,redhat-openstack/neutron,shahbazn/neutron,silenci/neutron,SamYaple/neutron,watonyweng/neutron,vbannai/neutron,Stavitsky/neutron,vivekanand1101/neutron,Metaswitch/calico-neutron,bgxavier/neutron,asgard-lab/neutron,infobloxopen/neutron,asgard-lab/neutron,mattt416/neutron,cernops/neutron,Metaswitch/calico-neutron,apporc/neutron,javaos74/neutron,openstack/neutron,dims/neutron,alexandrucoman/vbox-neutron-agent,NeCTAR-RC/neutron,huntxu/neutron,vbannai/neutron,blueboxgroup/neutron,bigswitch/neutron,takeshineshiro/neutron,eayunstack/neutron,mahak/neutron,jumpojoy/neutron,rdo-management/neutron,yanheven/neutron,JioCloud/neutron,openstack/neutron,vveerava/Openstack,chitr/neutron,CiscoSystems/neutron,noironetworks/neutron,barnsnake351/neutron,magic0704/neutron,infobloxopen/neutron,neoareslinux/neutron,antonioUnina/neutron,sebrandon1/neutron,noironetworks/neutron,adelina-t/neutron,miyakz1192/neutron,skyddv/neutron,paninetworks/neutron,yamahata/tacker,cisco-openstack/neutron,miyakz1192/neutron,cloudbase/neutron-virtualbox,Stavitsky/neutron,JioCloud/neutron,leeseuljeong/leeseulstack_neutron,watonyweng/neutron,wolverineav/neutron,alexandrucoman/vbox-neutron-agent,neoareslinux/neutron,wenhuizhang/neutron,projectcalico/calico-neutron,blueboxgroup/neutron,mattt416/neutron,gkotton/neutron,aristanetworks/neutron,virtualopensystems/neutron,mandeepdhami/neutron,suneeth51/neutron,mmnelemane/neutron,NeCTAR-RC/neutron,bgxavier/neutron,cloudbase/neutron,yamahata/tacker,SmartInfrastructures/neutron,mahak/neutron,dims/neutron,cloudbase/neutron-virtualbox,barnsnake351/neutron,klmitch/neutron,glove747/liberty-neutron,vveerava/Openstack,waltBB/neutron_read,gopal1cloud/neutron,vveerava/Openstack,dhanunjaya/neutron,virtualopensystems/neutron,magic0704/neutron,glove747/liberty-neutron,swdream/neutron,gopal1cloud/neutron,takeshineshiro/neutron,jerryz1982/neutron
|
Add local type driver unittests
Partial-Bug: #1269127
Change-Id: I5b34dc09128bcb879ea46be64cc5104eeefd4ab4
|
# Copyright (c) 2014 Thales Services SAS
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.common import exceptions as exc
from neutron.plugins.common import constants as p_const
from neutron.plugins.ml2 import driver_api as api
from neutron.plugins.ml2.drivers import type_local
from neutron.tests import base
class LocalTypeTest(base.BaseTestCase):
def setUp(self):
super(LocalTypeTest, self).setUp()
self.driver = type_local.LocalTypeDriver()
self.session = None
def test_validate_provider_segment(self):
segment = {api.NETWORK_TYPE: p_const.TYPE_LOCAL}
self.driver.validate_provider_segment(segment)
def test_validate_provider_segment_with_unallowed_physical_network(self):
segment = {api.NETWORK_TYPE: p_const.TYPE_LOCAL,
api.PHYSICAL_NETWORK: 'phys_net'}
self.assertRaises(exc.InvalidInput,
self.driver.validate_provider_segment,
segment)
def test_validate_provider_segment_with_unallowed_segmentation_id(self):
segment = {api.NETWORK_TYPE: p_const.TYPE_LOCAL,
api.SEGMENTATION_ID: 2}
self.assertRaises(exc.InvalidInput,
self.driver.validate_provider_segment,
segment)
def test_reserve_provider_segment(self):
segment = {api.NETWORK_TYPE: p_const.TYPE_LOCAL}
self.driver.reserve_provider_segment(self.session, segment)
self.driver.release_segment(self.session, segment)
def test_allocate_tenant_segment(self):
expected = {api.NETWORK_TYPE: p_const.TYPE_LOCAL}
observed = self.driver.allocate_tenant_segment(self.session)
self.assertEqual(expected, observed)
|
<commit_before><commit_msg>Add local type driver unittests
Partial-Bug: #1269127
Change-Id: I5b34dc09128bcb879ea46be64cc5104eeefd4ab4<commit_after>
|
# Copyright (c) 2014 Thales Services SAS
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.common import exceptions as exc
from neutron.plugins.common import constants as p_const
from neutron.plugins.ml2 import driver_api as api
from neutron.plugins.ml2.drivers import type_local
from neutron.tests import base
class LocalTypeTest(base.BaseTestCase):
def setUp(self):
super(LocalTypeTest, self).setUp()
self.driver = type_local.LocalTypeDriver()
self.session = None
def test_validate_provider_segment(self):
segment = {api.NETWORK_TYPE: p_const.TYPE_LOCAL}
self.driver.validate_provider_segment(segment)
def test_validate_provider_segment_with_unallowed_physical_network(self):
segment = {api.NETWORK_TYPE: p_const.TYPE_LOCAL,
api.PHYSICAL_NETWORK: 'phys_net'}
self.assertRaises(exc.InvalidInput,
self.driver.validate_provider_segment,
segment)
def test_validate_provider_segment_with_unallowed_segmentation_id(self):
segment = {api.NETWORK_TYPE: p_const.TYPE_LOCAL,
api.SEGMENTATION_ID: 2}
self.assertRaises(exc.InvalidInput,
self.driver.validate_provider_segment,
segment)
def test_reserve_provider_segment(self):
segment = {api.NETWORK_TYPE: p_const.TYPE_LOCAL}
self.driver.reserve_provider_segment(self.session, segment)
self.driver.release_segment(self.session, segment)
def test_allocate_tenant_segment(self):
expected = {api.NETWORK_TYPE: p_const.TYPE_LOCAL}
observed = self.driver.allocate_tenant_segment(self.session)
self.assertEqual(expected, observed)
|
Add local type driver unittests
Partial-Bug: #1269127
Change-Id: I5b34dc09128bcb879ea46be64cc5104eeefd4ab4# Copyright (c) 2014 Thales Services SAS
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.common import exceptions as exc
from neutron.plugins.common import constants as p_const
from neutron.plugins.ml2 import driver_api as api
from neutron.plugins.ml2.drivers import type_local
from neutron.tests import base
class LocalTypeTest(base.BaseTestCase):
def setUp(self):
super(LocalTypeTest, self).setUp()
self.driver = type_local.LocalTypeDriver()
self.session = None
def test_validate_provider_segment(self):
segment = {api.NETWORK_TYPE: p_const.TYPE_LOCAL}
self.driver.validate_provider_segment(segment)
def test_validate_provider_segment_with_unallowed_physical_network(self):
segment = {api.NETWORK_TYPE: p_const.TYPE_LOCAL,
api.PHYSICAL_NETWORK: 'phys_net'}
self.assertRaises(exc.InvalidInput,
self.driver.validate_provider_segment,
segment)
def test_validate_provider_segment_with_unallowed_segmentation_id(self):
segment = {api.NETWORK_TYPE: p_const.TYPE_LOCAL,
api.SEGMENTATION_ID: 2}
self.assertRaises(exc.InvalidInput,
self.driver.validate_provider_segment,
segment)
def test_reserve_provider_segment(self):
segment = {api.NETWORK_TYPE: p_const.TYPE_LOCAL}
self.driver.reserve_provider_segment(self.session, segment)
self.driver.release_segment(self.session, segment)
def test_allocate_tenant_segment(self):
expected = {api.NETWORK_TYPE: p_const.TYPE_LOCAL}
observed = self.driver.allocate_tenant_segment(self.session)
self.assertEqual(expected, observed)
|
<commit_before><commit_msg>Add local type driver unittests
Partial-Bug: #1269127
Change-Id: I5b34dc09128bcb879ea46be64cc5104eeefd4ab4<commit_after># Copyright (c) 2014 Thales Services SAS
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.common import exceptions as exc
from neutron.plugins.common import constants as p_const
from neutron.plugins.ml2 import driver_api as api
from neutron.plugins.ml2.drivers import type_local
from neutron.tests import base
class LocalTypeTest(base.BaseTestCase):
def setUp(self):
super(LocalTypeTest, self).setUp()
self.driver = type_local.LocalTypeDriver()
self.session = None
def test_validate_provider_segment(self):
segment = {api.NETWORK_TYPE: p_const.TYPE_LOCAL}
self.driver.validate_provider_segment(segment)
def test_validate_provider_segment_with_unallowed_physical_network(self):
segment = {api.NETWORK_TYPE: p_const.TYPE_LOCAL,
api.PHYSICAL_NETWORK: 'phys_net'}
self.assertRaises(exc.InvalidInput,
self.driver.validate_provider_segment,
segment)
def test_validate_provider_segment_with_unallowed_segmentation_id(self):
segment = {api.NETWORK_TYPE: p_const.TYPE_LOCAL,
api.SEGMENTATION_ID: 2}
self.assertRaises(exc.InvalidInput,
self.driver.validate_provider_segment,
segment)
def test_reserve_provider_segment(self):
segment = {api.NETWORK_TYPE: p_const.TYPE_LOCAL}
self.driver.reserve_provider_segment(self.session, segment)
self.driver.release_segment(self.session, segment)
def test_allocate_tenant_segment(self):
expected = {api.NETWORK_TYPE: p_const.TYPE_LOCAL}
observed = self.driver.allocate_tenant_segment(self.session)
self.assertEqual(expected, observed)
|
|
65cd9f228432a22e54b07f456d5c619f24a487be
|
erpnext/patches/v8_11/set_schedule_date_for_purchase_order.py
|
erpnext/patches/v8_11/set_schedule_date_for_purchase_order.py
|
# Copyright (c) 2017, Frappe and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
frappe.reload_doctype("Purchase Order")
frappe.reload_doctype("Purchase Order Item")
if not frappe.db.has_column("Purchase Order", "schedule_date"):
return
#Update only submitted PO
for po in frappe.get_all("Purchase Order", filters= [["docstatus", "=", 1]], fields=["name"]):
purchase_order = frappe.get_doc("Purchase Order", po)
if purchase_order.items:
if not purchase_order.schedule_date:
purchase_order.schedule_date = purchase_order.items[0].schedule_date
purchase_order.save()
|
Add patch to update schedule date of submitted Purchase Orders
|
Add patch to update schedule date of submitted Purchase Orders
|
Python
|
agpl-3.0
|
gsnbng/erpnext,geekroot/erpnext,gsnbng/erpnext,indictranstech/erpnext,geekroot/erpnext,geekroot/erpnext,indictranstech/erpnext,gsnbng/erpnext,gsnbng/erpnext,indictranstech/erpnext,geekroot/erpnext,indictranstech/erpnext
|
Add patch to update schedule date of submitted Purchase Orders
|
# Copyright (c) 2017, Frappe and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
frappe.reload_doctype("Purchase Order")
frappe.reload_doctype("Purchase Order Item")
if not frappe.db.has_column("Purchase Order", "schedule_date"):
return
#Update only submitted PO
for po in frappe.get_all("Purchase Order", filters= [["docstatus", "=", 1]], fields=["name"]):
purchase_order = frappe.get_doc("Purchase Order", po)
if purchase_order.items:
if not purchase_order.schedule_date:
purchase_order.schedule_date = purchase_order.items[0].schedule_date
purchase_order.save()
|
<commit_before><commit_msg>Add patch to update schedule date of submitted Purchase Orders<commit_after>
|
# Copyright (c) 2017, Frappe and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
frappe.reload_doctype("Purchase Order")
frappe.reload_doctype("Purchase Order Item")
if not frappe.db.has_column("Purchase Order", "schedule_date"):
return
#Update only submitted PO
for po in frappe.get_all("Purchase Order", filters= [["docstatus", "=", 1]], fields=["name"]):
purchase_order = frappe.get_doc("Purchase Order", po)
if purchase_order.items:
if not purchase_order.schedule_date:
purchase_order.schedule_date = purchase_order.items[0].schedule_date
purchase_order.save()
|
Add patch to update schedule date of submitted Purchase Orders# Copyright (c) 2017, Frappe and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
frappe.reload_doctype("Purchase Order")
frappe.reload_doctype("Purchase Order Item")
if not frappe.db.has_column("Purchase Order", "schedule_date"):
return
#Update only submitted PO
for po in frappe.get_all("Purchase Order", filters= [["docstatus", "=", 1]], fields=["name"]):
purchase_order = frappe.get_doc("Purchase Order", po)
if purchase_order.items:
if not purchase_order.schedule_date:
purchase_order.schedule_date = purchase_order.items[0].schedule_date
purchase_order.save()
|
<commit_before><commit_msg>Add patch to update schedule date of submitted Purchase Orders<commit_after># Copyright (c) 2017, Frappe and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
frappe.reload_doctype("Purchase Order")
frappe.reload_doctype("Purchase Order Item")
if not frappe.db.has_column("Purchase Order", "schedule_date"):
return
#Update only submitted PO
for po in frappe.get_all("Purchase Order", filters= [["docstatus", "=", 1]], fields=["name"]):
purchase_order = frappe.get_doc("Purchase Order", po)
if purchase_order.items:
if not purchase_order.schedule_date:
purchase_order.schedule_date = purchase_order.items[0].schedule_date
purchase_order.save()
|
|
77d155cdea473873b8e179eefa180901f12c1623
|
beetsplug/kodiupdate.py
|
beetsplug/kodiupdate.py
|
# -*- coding: utf-8 -*-
"""Updates a Kodi library whenever the beets library is changed. This is based on the Plex Update plugin.
Put something like the following in your config.yaml to configure:
kodi:
host: localhost
port: 8080
user: user
pwd: secret
"""
from __future__ import division, absolute_import, print_function
import requests
import json
from requests.auth import HTTPBasicAuth
from beets import config
from beets.plugins import BeetsPlugin
def update_kodi(host, port, user, password):
"""Sends request to the Kodi api to start a library refresh.
"""
url = "http://{0}:{1}/jsonrpc/".format(host, port)
# The kodi jsonrpc documentation states that Content-Type: application/json is mandatory
headers = {'Content-Type': 'application/json'}
# Create the payload. Id seems to be mandatory.
payload = {'jsonrpc': '2.0', 'method':'AudioLibrary.Scan', 'id':1}
r = requests.post(url, auth=HTTPBasicAuth(user, password), json=payload, headers=headers)
return r
class KodiUpdate(BeetsPlugin):
def __init__(self):
super(KodiUpdate, self).__init__()
# Adding defaults.
config['kodi'].add({
u'host': u'localhost',
u'port': 8080,
u'user': u'kodi',
u'pwd': u'kodi'})
self.register_listener('database_change', self.listen_for_db_change)
def listen_for_db_change(self, lib, model):
"""Listens for beets db change and register the update for the end"""
self.register_listener('cli_exit', self.update)
def update(self, lib):
"""When the client exists try to send refresh request to Kodi server.
"""
self._log.info(u'Updating Kodi library...')
# Try to send update request.
try:
update_kodi(
config['kodi']['host'].get(),
config['kodi']['port'].get(),
config['kodi']['user'].get(),
config['kodi']['pwd'].get())
self._log.info(u'... started.')
except requests.exceptions.RequestException:
self._log.warning(u'Update failed.')
|
Add a plugin to update a Kodi music library
|
Add a plugin to update a Kodi music library
I created one for an older version before, but it didn't work since the change to Python 3. So I created a new one that works.
|
Python
|
mit
|
beetbox/beets,beetbox/beets,ibmibmibm/beets,ibmibmibm/beets,Kraymer/beets,jackwilsdon/beets,beetbox/beets,sampsyo/beets,pkess/beets,beetbox/beets,jackwilsdon/beets,sampsyo/beets,jackwilsdon/beets,jackwilsdon/beets,SusannaMaria/beets,pkess/beets,shamangeorge/beets,SusannaMaria/beets,sampsyo/beets,shamangeorge/beets,shamangeorge/beets,Kraymer/beets,SusannaMaria/beets,ibmibmibm/beets,ibmibmibm/beets,Kraymer/beets,SusannaMaria/beets,pkess/beets,sampsyo/beets,shamangeorge/beets,Kraymer/beets,pkess/beets
|
Add a plugin to update a Kodi music library
I created one for an older version before, but it didn't work since the change to Python 3. So I created a new one that works.
|
# -*- coding: utf-8 -*-
"""Updates a Kodi library whenever the beets library is changed. This is based on the Plex Update plugin.
Put something like the following in your config.yaml to configure:
kodi:
host: localhost
port: 8080
user: user
pwd: secret
"""
from __future__ import division, absolute_import, print_function
import requests
import json
from requests.auth import HTTPBasicAuth
from beets import config
from beets.plugins import BeetsPlugin
def update_kodi(host, port, user, password):
"""Sends request to the Kodi api to start a library refresh.
"""
url = "http://{0}:{1}/jsonrpc/".format(host, port)
# The kodi jsonrpc documentation states that Content-Type: application/json is mandatory
headers = {'Content-Type': 'application/json'}
# Create the payload. Id seems to be mandatory.
payload = {'jsonrpc': '2.0', 'method':'AudioLibrary.Scan', 'id':1}
r = requests.post(url, auth=HTTPBasicAuth(user, password), json=payload, headers=headers)
return r
class KodiUpdate(BeetsPlugin):
def __init__(self):
super(KodiUpdate, self).__init__()
# Adding defaults.
config['kodi'].add({
u'host': u'localhost',
u'port': 8080,
u'user': u'kodi',
u'pwd': u'kodi'})
self.register_listener('database_change', self.listen_for_db_change)
def listen_for_db_change(self, lib, model):
"""Listens for beets db change and register the update for the end"""
self.register_listener('cli_exit', self.update)
def update(self, lib):
"""When the client exists try to send refresh request to Kodi server.
"""
self._log.info(u'Updating Kodi library...')
# Try to send update request.
try:
update_kodi(
config['kodi']['host'].get(),
config['kodi']['port'].get(),
config['kodi']['user'].get(),
config['kodi']['pwd'].get())
self._log.info(u'... started.')
except requests.exceptions.RequestException:
self._log.warning(u'Update failed.')
|
<commit_before><commit_msg>Add a plugin to update a Kodi music library
I created one for an older version before, but it didn't work since the change to Python 3. So I created a new one that works.<commit_after>
|
# -*- coding: utf-8 -*-
"""Updates a Kodi library whenever the beets library is changed. This is based on the Plex Update plugin.
Put something like the following in your config.yaml to configure:
kodi:
host: localhost
port: 8080
user: user
pwd: secret
"""
from __future__ import division, absolute_import, print_function
import requests
import json
from requests.auth import HTTPBasicAuth
from beets import config
from beets.plugins import BeetsPlugin
def update_kodi(host, port, user, password):
"""Sends request to the Kodi api to start a library refresh.
"""
url = "http://{0}:{1}/jsonrpc/".format(host, port)
# The kodi jsonrpc documentation states that Content-Type: application/json is mandatory
headers = {'Content-Type': 'application/json'}
# Create the payload. Id seems to be mandatory.
payload = {'jsonrpc': '2.0', 'method':'AudioLibrary.Scan', 'id':1}
r = requests.post(url, auth=HTTPBasicAuth(user, password), json=payload, headers=headers)
return r
class KodiUpdate(BeetsPlugin):
def __init__(self):
super(KodiUpdate, self).__init__()
# Adding defaults.
config['kodi'].add({
u'host': u'localhost',
u'port': 8080,
u'user': u'kodi',
u'pwd': u'kodi'})
self.register_listener('database_change', self.listen_for_db_change)
def listen_for_db_change(self, lib, model):
"""Listens for beets db change and register the update for the end"""
self.register_listener('cli_exit', self.update)
def update(self, lib):
"""When the client exists try to send refresh request to Kodi server.
"""
self._log.info(u'Updating Kodi library...')
# Try to send update request.
try:
update_kodi(
config['kodi']['host'].get(),
config['kodi']['port'].get(),
config['kodi']['user'].get(),
config['kodi']['pwd'].get())
self._log.info(u'... started.')
except requests.exceptions.RequestException:
self._log.warning(u'Update failed.')
|
Add a plugin to update a Kodi music library
I created one for an older version before, but it didn't work since the change to Python 3. So I created a new one that works.# -*- coding: utf-8 -*-
"""Updates a Kodi library whenever the beets library is changed. This is based on the Plex Update plugin.
Put something like the following in your config.yaml to configure:
kodi:
host: localhost
port: 8080
user: user
pwd: secret
"""
from __future__ import division, absolute_import, print_function
import requests
import json
from requests.auth import HTTPBasicAuth
from beets import config
from beets.plugins import BeetsPlugin
def update_kodi(host, port, user, password):
"""Sends request to the Kodi api to start a library refresh.
"""
url = "http://{0}:{1}/jsonrpc/".format(host, port)
# The kodi jsonrpc documentation states that Content-Type: application/json is mandatory
headers = {'Content-Type': 'application/json'}
# Create the payload. Id seems to be mandatory.
payload = {'jsonrpc': '2.0', 'method':'AudioLibrary.Scan', 'id':1}
r = requests.post(url, auth=HTTPBasicAuth(user, password), json=payload, headers=headers)
return r
class KodiUpdate(BeetsPlugin):
def __init__(self):
super(KodiUpdate, self).__init__()
# Adding defaults.
config['kodi'].add({
u'host': u'localhost',
u'port': 8080,
u'user': u'kodi',
u'pwd': u'kodi'})
self.register_listener('database_change', self.listen_for_db_change)
def listen_for_db_change(self, lib, model):
"""Listens for beets db change and register the update for the end"""
self.register_listener('cli_exit', self.update)
def update(self, lib):
"""When the client exists try to send refresh request to Kodi server.
"""
self._log.info(u'Updating Kodi library...')
# Try to send update request.
try:
update_kodi(
config['kodi']['host'].get(),
config['kodi']['port'].get(),
config['kodi']['user'].get(),
config['kodi']['pwd'].get())
self._log.info(u'... started.')
except requests.exceptions.RequestException:
self._log.warning(u'Update failed.')
|
<commit_before><commit_msg>Add a plugin to update a Kodi music library
I created one for an older version before, but it didn't work since the change to Python 3. So I created a new one that works.<commit_after># -*- coding: utf-8 -*-
"""Updates a Kodi library whenever the beets library is changed. This is based on the Plex Update plugin.
Put something like the following in your config.yaml to configure:
kodi:
host: localhost
port: 8080
user: user
pwd: secret
"""
from __future__ import division, absolute_import, print_function
import requests
import json
from requests.auth import HTTPBasicAuth
from beets import config
from beets.plugins import BeetsPlugin
def update_kodi(host, port, user, password):
"""Sends request to the Kodi api to start a library refresh.
"""
url = "http://{0}:{1}/jsonrpc/".format(host, port)
# The kodi jsonrpc documentation states that Content-Type: application/json is mandatory
headers = {'Content-Type': 'application/json'}
# Create the payload. Id seems to be mandatory.
payload = {'jsonrpc': '2.0', 'method':'AudioLibrary.Scan', 'id':1}
r = requests.post(url, auth=HTTPBasicAuth(user, password), json=payload, headers=headers)
return r
class KodiUpdate(BeetsPlugin):
def __init__(self):
super(KodiUpdate, self).__init__()
# Adding defaults.
config['kodi'].add({
u'host': u'localhost',
u'port': 8080,
u'user': u'kodi',
u'pwd': u'kodi'})
self.register_listener('database_change', self.listen_for_db_change)
def listen_for_db_change(self, lib, model):
"""Listens for beets db change and register the update for the end"""
self.register_listener('cli_exit', self.update)
def update(self, lib):
"""When the client exists try to send refresh request to Kodi server.
"""
self._log.info(u'Updating Kodi library...')
# Try to send update request.
try:
update_kodi(
config['kodi']['host'].get(),
config['kodi']['port'].get(),
config['kodi']['user'].get(),
config['kodi']['pwd'].get())
self._log.info(u'... started.')
except requests.exceptions.RequestException:
self._log.warning(u'Update failed.')
|
|
b715b0def9eefa0cde9ac57564fee6ea54d7d92c
|
py/add-one-row-to-tree.py
|
py/add-one-row-to-tree.py
|
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def addOneRow(self, root, v, d):
"""
:type root: TreeNode
:type v: int
:type d: int
:rtype: TreeNode
"""
if d == 1:
new_root = TreeNode(v)
new_root.left = root
return new_root
q = [(root, 2)]
for node, nxt_depth in q:
if nxt_depth == d:
left = TreeNode(v)
right = TreeNode(v)
left.left = node.left
right.right = node.right
node.left, node.right = left, right
else:
if node.left:
q.append((node.left, nxt_depth + 1))
if node.right:
q.append((node.right, nxt_depth + 1))
return root
|
Add py solution for 623. Add One Row to Tree
|
Add py solution for 623. Add One Row to Tree
623. Add One Row to Tree: https://leetcode.com/problems/add-one-row-to-tree/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 623. Add One Row to Tree
623. Add One Row to Tree: https://leetcode.com/problems/add-one-row-to-tree/
|
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def addOneRow(self, root, v, d):
"""
:type root: TreeNode
:type v: int
:type d: int
:rtype: TreeNode
"""
if d == 1:
new_root = TreeNode(v)
new_root.left = root
return new_root
q = [(root, 2)]
for node, nxt_depth in q:
if nxt_depth == d:
left = TreeNode(v)
right = TreeNode(v)
left.left = node.left
right.right = node.right
node.left, node.right = left, right
else:
if node.left:
q.append((node.left, nxt_depth + 1))
if node.right:
q.append((node.right, nxt_depth + 1))
return root
|
<commit_before><commit_msg>Add py solution for 623. Add One Row to Tree
623. Add One Row to Tree: https://leetcode.com/problems/add-one-row-to-tree/<commit_after>
|
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def addOneRow(self, root, v, d):
"""
:type root: TreeNode
:type v: int
:type d: int
:rtype: TreeNode
"""
if d == 1:
new_root = TreeNode(v)
new_root.left = root
return new_root
q = [(root, 2)]
for node, nxt_depth in q:
if nxt_depth == d:
left = TreeNode(v)
right = TreeNode(v)
left.left = node.left
right.right = node.right
node.left, node.right = left, right
else:
if node.left:
q.append((node.left, nxt_depth + 1))
if node.right:
q.append((node.right, nxt_depth + 1))
return root
|
Add py solution for 623. Add One Row to Tree
623. Add One Row to Tree: https://leetcode.com/problems/add-one-row-to-tree/# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def addOneRow(self, root, v, d):
"""
:type root: TreeNode
:type v: int
:type d: int
:rtype: TreeNode
"""
if d == 1:
new_root = TreeNode(v)
new_root.left = root
return new_root
q = [(root, 2)]
for node, nxt_depth in q:
if nxt_depth == d:
left = TreeNode(v)
right = TreeNode(v)
left.left = node.left
right.right = node.right
node.left, node.right = left, right
else:
if node.left:
q.append((node.left, nxt_depth + 1))
if node.right:
q.append((node.right, nxt_depth + 1))
return root
|
<commit_before><commit_msg>Add py solution for 623. Add One Row to Tree
623. Add One Row to Tree: https://leetcode.com/problems/add-one-row-to-tree/<commit_after># Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def addOneRow(self, root, v, d):
"""
:type root: TreeNode
:type v: int
:type d: int
:rtype: TreeNode
"""
if d == 1:
new_root = TreeNode(v)
new_root.left = root
return new_root
q = [(root, 2)]
for node, nxt_depth in q:
if nxt_depth == d:
left = TreeNode(v)
right = TreeNode(v)
left.left = node.left
right.right = node.right
node.left, node.right = left, right
else:
if node.left:
q.append((node.left, nxt_depth + 1))
if node.right:
q.append((node.right, nxt_depth + 1))
return root
|
|
a6237c18f0a2f152b8d138dcc802bd824734b70d
|
catalog/predictions.py
|
catalog/predictions.py
|
from users.models import User
from catalog.models import Course
import collections
from django.contrib.contenttypes.models import ContentType
from actstream.models import Follow
def distance(v1, v2):
absolute_difference = [abs(c1 - c2) for c1, c2 in zip(v1, v2)]
distance = sum(absolute_difference)
return distance
def get_users_following_dict():
course_type = ContentType.objects.get(app_label="catalog", model="course")
follows = Follow.objects\
.filter(content_type=course_type)\
.select_related('user')\
.prefetch_related('follow_object')
following_dict = collections.defaultdict(set)
for follow in follows:
following_dict[follow.user.netid].add(follow.follow_object)
return following_dict
def suggest(target_user, K=15):
courses = Course.objects.all()
users = {user.netid: user for user in User.objects.all()}
users_following = get_users_following_dict()
vectors = {}
for netid, user in users.items():
following = users_following[netid]
vectors[netid] = [course in following for course in courses]
target_vector = vectors[target_user.netid]
distances = {netid: distance(target_vector, vector) for netid, vector in vectors.items()}
non_null_distances = {netid: distance for netid, distance in distances.items() if distance > 0}
get_score = lambda x: x[1]
neighbors = sorted(non_null_distances.items(), key=get_score)[:K]
best_matches = collections.Counter()
target_set = users_following[target_user.netid]
for netid, score in neighbors:
differences = users_following[netid] - target_set
best_matches.update(differences)
return best_matches
|
Add a course following predictor
|
Add a course following predictor
|
Python
|
agpl-3.0
|
UrLab/beta402,UrLab/DocHub,UrLab/beta402,UrLab/beta402,UrLab/DocHub,UrLab/DocHub,UrLab/DocHub
|
Add a course following predictor
|
from users.models import User
from catalog.models import Course
import collections
from django.contrib.contenttypes.models import ContentType
from actstream.models import Follow
def distance(v1, v2):
absolute_difference = [abs(c1 - c2) for c1, c2 in zip(v1, v2)]
distance = sum(absolute_difference)
return distance
def get_users_following_dict():
course_type = ContentType.objects.get(app_label="catalog", model="course")
follows = Follow.objects\
.filter(content_type=course_type)\
.select_related('user')\
.prefetch_related('follow_object')
following_dict = collections.defaultdict(set)
for follow in follows:
following_dict[follow.user.netid].add(follow.follow_object)
return following_dict
def suggest(target_user, K=15):
courses = Course.objects.all()
users = {user.netid: user for user in User.objects.all()}
users_following = get_users_following_dict()
vectors = {}
for netid, user in users.items():
following = users_following[netid]
vectors[netid] = [course in following for course in courses]
target_vector = vectors[target_user.netid]
distances = {netid: distance(target_vector, vector) for netid, vector in vectors.items()}
non_null_distances = {netid: distance for netid, distance in distances.items() if distance > 0}
get_score = lambda x: x[1]
neighbors = sorted(non_null_distances.items(), key=get_score)[:K]
best_matches = collections.Counter()
target_set = users_following[target_user.netid]
for netid, score in neighbors:
differences = users_following[netid] - target_set
best_matches.update(differences)
return best_matches
|
<commit_before><commit_msg>Add a course following predictor<commit_after>
|
from users.models import User
from catalog.models import Course
import collections
from django.contrib.contenttypes.models import ContentType
from actstream.models import Follow
def distance(v1, v2):
absolute_difference = [abs(c1 - c2) for c1, c2 in zip(v1, v2)]
distance = sum(absolute_difference)
return distance
def get_users_following_dict():
course_type = ContentType.objects.get(app_label="catalog", model="course")
follows = Follow.objects\
.filter(content_type=course_type)\
.select_related('user')\
.prefetch_related('follow_object')
following_dict = collections.defaultdict(set)
for follow in follows:
following_dict[follow.user.netid].add(follow.follow_object)
return following_dict
def suggest(target_user, K=15):
courses = Course.objects.all()
users = {user.netid: user for user in User.objects.all()}
users_following = get_users_following_dict()
vectors = {}
for netid, user in users.items():
following = users_following[netid]
vectors[netid] = [course in following for course in courses]
target_vector = vectors[target_user.netid]
distances = {netid: distance(target_vector, vector) for netid, vector in vectors.items()}
non_null_distances = {netid: distance for netid, distance in distances.items() if distance > 0}
get_score = lambda x: x[1]
neighbors = sorted(non_null_distances.items(), key=get_score)[:K]
best_matches = collections.Counter()
target_set = users_following[target_user.netid]
for netid, score in neighbors:
differences = users_following[netid] - target_set
best_matches.update(differences)
return best_matches
|
Add a course following predictorfrom users.models import User
from catalog.models import Course
import collections
from django.contrib.contenttypes.models import ContentType
from actstream.models import Follow
def distance(v1, v2):
absolute_difference = [abs(c1 - c2) for c1, c2 in zip(v1, v2)]
distance = sum(absolute_difference)
return distance
def get_users_following_dict():
course_type = ContentType.objects.get(app_label="catalog", model="course")
follows = Follow.objects\
.filter(content_type=course_type)\
.select_related('user')\
.prefetch_related('follow_object')
following_dict = collections.defaultdict(set)
for follow in follows:
following_dict[follow.user.netid].add(follow.follow_object)
return following_dict
def suggest(target_user, K=15):
courses = Course.objects.all()
users = {user.netid: user for user in User.objects.all()}
users_following = get_users_following_dict()
vectors = {}
for netid, user in users.items():
following = users_following[netid]
vectors[netid] = [course in following for course in courses]
target_vector = vectors[target_user.netid]
distances = {netid: distance(target_vector, vector) for netid, vector in vectors.items()}
non_null_distances = {netid: distance for netid, distance in distances.items() if distance > 0}
get_score = lambda x: x[1]
neighbors = sorted(non_null_distances.items(), key=get_score)[:K]
best_matches = collections.Counter()
target_set = users_following[target_user.netid]
for netid, score in neighbors:
differences = users_following[netid] - target_set
best_matches.update(differences)
return best_matches
|
<commit_before><commit_msg>Add a course following predictor<commit_after>from users.models import User
from catalog.models import Course
import collections
from django.contrib.contenttypes.models import ContentType
from actstream.models import Follow
def distance(v1, v2):
absolute_difference = [abs(c1 - c2) for c1, c2 in zip(v1, v2)]
distance = sum(absolute_difference)
return distance
def get_users_following_dict():
course_type = ContentType.objects.get(app_label="catalog", model="course")
follows = Follow.objects\
.filter(content_type=course_type)\
.select_related('user')\
.prefetch_related('follow_object')
following_dict = collections.defaultdict(set)
for follow in follows:
following_dict[follow.user.netid].add(follow.follow_object)
return following_dict
def suggest(target_user, K=15):
courses = Course.objects.all()
users = {user.netid: user for user in User.objects.all()}
users_following = get_users_following_dict()
vectors = {}
for netid, user in users.items():
following = users_following[netid]
vectors[netid] = [course in following for course in courses]
target_vector = vectors[target_user.netid]
distances = {netid: distance(target_vector, vector) for netid, vector in vectors.items()}
non_null_distances = {netid: distance for netid, distance in distances.items() if distance > 0}
get_score = lambda x: x[1]
neighbors = sorted(non_null_distances.items(), key=get_score)[:K]
best_matches = collections.Counter()
target_set = users_following[target_user.netid]
for netid, score in neighbors:
differences = users_following[netid] - target_set
best_matches.update(differences)
return best_matches
|
|
543eda041b4cde015d7150d0416c6e18dde8af9a
|
test/build/test_builder.py
|
test/build/test_builder.py
|
import unittest
import sys
import re
import saliweb.build
class BuilderTest(unittest.TestCase):
"""Check builder functions"""
def test_builder_python_tests(self):
"""Test builder_python_tests function"""
class DummyEnv(object):
def __init__(self, exit_val):
self.exit_val = exit_val
self.env = {'ENV': {}}
def __getitem__(self, key):
return self.env[key]
def Clone(self):
return self
def Execute(self, exe):
self.exec_str = exe
return self.exit_val
e = DummyEnv(0)
t = saliweb.build.builder_python_tests('dummytgt',
['foo.py', 'bar.py'], e)
self.assertEqual(t, None)
e = DummyEnv(1)
t = saliweb.build.builder_python_tests('dummytgt',
['foo.py', 'bar.py'], e)
self.assertEqual(e.env['ENV'], {'PYTHONPATH': 'python'})
regex = 'python .*/run\-tests\.py foo\.py bar\.py$'
m = re.match(regex, e.exec_str)
self.assertNotEqual(m, None, 'String %s does not match regex %s' \
% (e.exec_str, regex))
self.assertEqual(t, 1)
if __name__ == '__main__':
unittest.main()
|
Test build system builder functions.
|
Test build system builder functions.
|
Python
|
lgpl-2.1
|
salilab/saliweb,salilab/saliweb,salilab/saliweb,salilab/saliweb,salilab/saliweb
|
Test build system builder functions.
|
import unittest
import sys
import re
import saliweb.build
class BuilderTest(unittest.TestCase):
"""Check builder functions"""
def test_builder_python_tests(self):
"""Test builder_python_tests function"""
class DummyEnv(object):
def __init__(self, exit_val):
self.exit_val = exit_val
self.env = {'ENV': {}}
def __getitem__(self, key):
return self.env[key]
def Clone(self):
return self
def Execute(self, exe):
self.exec_str = exe
return self.exit_val
e = DummyEnv(0)
t = saliweb.build.builder_python_tests('dummytgt',
['foo.py', 'bar.py'], e)
self.assertEqual(t, None)
e = DummyEnv(1)
t = saliweb.build.builder_python_tests('dummytgt',
['foo.py', 'bar.py'], e)
self.assertEqual(e.env['ENV'], {'PYTHONPATH': 'python'})
regex = 'python .*/run\-tests\.py foo\.py bar\.py$'
m = re.match(regex, e.exec_str)
self.assertNotEqual(m, None, 'String %s does not match regex %s' \
% (e.exec_str, regex))
self.assertEqual(t, 1)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Test build system builder functions.<commit_after>
|
import unittest
import sys
import re
import saliweb.build
class BuilderTest(unittest.TestCase):
"""Check builder functions"""
def test_builder_python_tests(self):
"""Test builder_python_tests function"""
class DummyEnv(object):
def __init__(self, exit_val):
self.exit_val = exit_val
self.env = {'ENV': {}}
def __getitem__(self, key):
return self.env[key]
def Clone(self):
return self
def Execute(self, exe):
self.exec_str = exe
return self.exit_val
e = DummyEnv(0)
t = saliweb.build.builder_python_tests('dummytgt',
['foo.py', 'bar.py'], e)
self.assertEqual(t, None)
e = DummyEnv(1)
t = saliweb.build.builder_python_tests('dummytgt',
['foo.py', 'bar.py'], e)
self.assertEqual(e.env['ENV'], {'PYTHONPATH': 'python'})
regex = 'python .*/run\-tests\.py foo\.py bar\.py$'
m = re.match(regex, e.exec_str)
self.assertNotEqual(m, None, 'String %s does not match regex %s' \
% (e.exec_str, regex))
self.assertEqual(t, 1)
if __name__ == '__main__':
unittest.main()
|
Test build system builder functions.import unittest
import sys
import re
import saliweb.build
class BuilderTest(unittest.TestCase):
"""Check builder functions"""
def test_builder_python_tests(self):
"""Test builder_python_tests function"""
class DummyEnv(object):
def __init__(self, exit_val):
self.exit_val = exit_val
self.env = {'ENV': {}}
def __getitem__(self, key):
return self.env[key]
def Clone(self):
return self
def Execute(self, exe):
self.exec_str = exe
return self.exit_val
e = DummyEnv(0)
t = saliweb.build.builder_python_tests('dummytgt',
['foo.py', 'bar.py'], e)
self.assertEqual(t, None)
e = DummyEnv(1)
t = saliweb.build.builder_python_tests('dummytgt',
['foo.py', 'bar.py'], e)
self.assertEqual(e.env['ENV'], {'PYTHONPATH': 'python'})
regex = 'python .*/run\-tests\.py foo\.py bar\.py$'
m = re.match(regex, e.exec_str)
self.assertNotEqual(m, None, 'String %s does not match regex %s' \
% (e.exec_str, regex))
self.assertEqual(t, 1)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Test build system builder functions.<commit_after>import unittest
import sys
import re
import saliweb.build
class BuilderTest(unittest.TestCase):
"""Check builder functions"""
def test_builder_python_tests(self):
"""Test builder_python_tests function"""
class DummyEnv(object):
def __init__(self, exit_val):
self.exit_val = exit_val
self.env = {'ENV': {}}
def __getitem__(self, key):
return self.env[key]
def Clone(self):
return self
def Execute(self, exe):
self.exec_str = exe
return self.exit_val
e = DummyEnv(0)
t = saliweb.build.builder_python_tests('dummytgt',
['foo.py', 'bar.py'], e)
self.assertEqual(t, None)
e = DummyEnv(1)
t = saliweb.build.builder_python_tests('dummytgt',
['foo.py', 'bar.py'], e)
self.assertEqual(e.env['ENV'], {'PYTHONPATH': 'python'})
regex = 'python .*/run\-tests\.py foo\.py bar\.py$'
m = re.match(regex, e.exec_str)
self.assertNotEqual(m, None, 'String %s does not match regex %s' \
% (e.exec_str, regex))
self.assertEqual(t, 1)
if __name__ == '__main__':
unittest.main()
|
|
cea3e26e4c8a1c078251bc184d93c59530ab7044
|
tests/unit/route53/test_zone.py
|
tests/unit/route53/test_zone.py
|
#!/usr/bin/env python
# Copyright (c) 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
from boto.route53.zone import Zone
from tests.compat import mock, unittest
class TestZone(unittest.TestCase):
def test_find_records(self):
mock_connection = mock.Mock()
zone = Zone(mock_connection, {})
zone.id = None
rr_names = ['amazon.com', 'amazon.com', 'aws.amazon.com',
'aws.amazon.com']
mock_rrs = []
# Create some mock resource records.
for rr_name in rr_names:
mock_rr = mock.Mock()
mock_rr.name = rr_name
mock_rr.type = 'A'
mock_rr.weight = None
mock_rr.region = None
mock_rrs.append(mock_rr)
# Set the last resource record to ``None``. The ``find_records`` loop
# should never hit this.
mock_rrs[3] = None
mock_connection.get_all_rrsets.return_value = mock_rrs
mock_connection._make_qualified.return_value = 'amazon.com'
# Ensure that the ``None`` type object was not iterated over.
try:
result_rrs = zone.find_records('amazon.com', 'A', all=True)
except AttributeError as e:
self.fail("find_records() iterated too far into resource"
" record list.")
# Determine that the resulting records are correct.
self.assertEqual(result_rrs, [mock_rrs[0], mock_rrs[1]])
if __name__ == "__main__":
unittest.main()
|
Add test for iteraton break of Route53 records
|
Add test for iteraton break of Route53 records
|
Python
|
mit
|
alfredodeza/boto,TiVoMaker/boto,vishnugonela/boto,shaunbrady/boto,garnaat/boto,janslow/boto,felix-d/boto,nishigori/boto,yangchaogit/boto,disruptek/boto,disruptek/boto,ocadotechnology/boto,ramitsurana/boto,nikhilraog/boto,drbild/boto,zzzirk/boto,rosmo/boto,khagler/boto,j-carl/boto,serviceagility/boto,revmischa/boto,alex/boto,acourtney2015/boto,kouk/boto,darjus-amzn/boto,ekalosak/boto,ryansb/boto,tpodowd/boto,campenberger/boto,tpodowd/boto,kouk/boto,awatts/boto,lra/boto,vijaylbais/boto,appneta/boto,trademob/boto,israelbenatar/boto,weebygames/boto,dimdung/boto,varunarya10/boto,rayluo/boto,jotes/boto,SaranyaKarthikeyan/boto,Pretio/boto,abridgett/boto,pfhayes/boto,nexusz99/boto,podhmo/boto,jindongh/boto,bleib1dj/boto,zachmullen/boto,stevenbrichards/boto,s0enke/boto,appneta/boto,elainexmas/boto,clouddocx/boto,Asana/boto,drbild/boto,alex/boto,bryx-inc/boto,ddzialak/boto,weka-io/boto
|
Add test for iteraton break of Route53 records
|
#!/usr/bin/env python
# Copyright (c) 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
from boto.route53.zone import Zone
from tests.compat import mock, unittest
class TestZone(unittest.TestCase):
def test_find_records(self):
mock_connection = mock.Mock()
zone = Zone(mock_connection, {})
zone.id = None
rr_names = ['amazon.com', 'amazon.com', 'aws.amazon.com',
'aws.amazon.com']
mock_rrs = []
# Create some mock resource records.
for rr_name in rr_names:
mock_rr = mock.Mock()
mock_rr.name = rr_name
mock_rr.type = 'A'
mock_rr.weight = None
mock_rr.region = None
mock_rrs.append(mock_rr)
# Set the last resource record to ``None``. The ``find_records`` loop
# should never hit this.
mock_rrs[3] = None
mock_connection.get_all_rrsets.return_value = mock_rrs
mock_connection._make_qualified.return_value = 'amazon.com'
# Ensure that the ``None`` type object was not iterated over.
try:
result_rrs = zone.find_records('amazon.com', 'A', all=True)
except AttributeError as e:
self.fail("find_records() iterated too far into resource"
" record list.")
# Determine that the resulting records are correct.
self.assertEqual(result_rrs, [mock_rrs[0], mock_rrs[1]])
if __name__ == "__main__":
unittest.main()
|
<commit_before><commit_msg>Add test for iteraton break of Route53 records<commit_after>
|
#!/usr/bin/env python
# Copyright (c) 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
from boto.route53.zone import Zone
from tests.compat import mock, unittest
class TestZone(unittest.TestCase):
def test_find_records(self):
mock_connection = mock.Mock()
zone = Zone(mock_connection, {})
zone.id = None
rr_names = ['amazon.com', 'amazon.com', 'aws.amazon.com',
'aws.amazon.com']
mock_rrs = []
# Create some mock resource records.
for rr_name in rr_names:
mock_rr = mock.Mock()
mock_rr.name = rr_name
mock_rr.type = 'A'
mock_rr.weight = None
mock_rr.region = None
mock_rrs.append(mock_rr)
# Set the last resource record to ``None``. The ``find_records`` loop
# should never hit this.
mock_rrs[3] = None
mock_connection.get_all_rrsets.return_value = mock_rrs
mock_connection._make_qualified.return_value = 'amazon.com'
# Ensure that the ``None`` type object was not iterated over.
try:
result_rrs = zone.find_records('amazon.com', 'A', all=True)
except AttributeError as e:
self.fail("find_records() iterated too far into resource"
" record list.")
# Determine that the resulting records are correct.
self.assertEqual(result_rrs, [mock_rrs[0], mock_rrs[1]])
if __name__ == "__main__":
unittest.main()
|
Add test for iteraton break of Route53 records#!/usr/bin/env python
# Copyright (c) 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
from boto.route53.zone import Zone
from tests.compat import mock, unittest
class TestZone(unittest.TestCase):
def test_find_records(self):
mock_connection = mock.Mock()
zone = Zone(mock_connection, {})
zone.id = None
rr_names = ['amazon.com', 'amazon.com', 'aws.amazon.com',
'aws.amazon.com']
mock_rrs = []
# Create some mock resource records.
for rr_name in rr_names:
mock_rr = mock.Mock()
mock_rr.name = rr_name
mock_rr.type = 'A'
mock_rr.weight = None
mock_rr.region = None
mock_rrs.append(mock_rr)
# Set the last resource record to ``None``. The ``find_records`` loop
# should never hit this.
mock_rrs[3] = None
mock_connection.get_all_rrsets.return_value = mock_rrs
mock_connection._make_qualified.return_value = 'amazon.com'
# Ensure that the ``None`` type object was not iterated over.
try:
result_rrs = zone.find_records('amazon.com', 'A', all=True)
except AttributeError as e:
self.fail("find_records() iterated too far into resource"
" record list.")
# Determine that the resulting records are correct.
self.assertEqual(result_rrs, [mock_rrs[0], mock_rrs[1]])
if __name__ == "__main__":
unittest.main()
|
<commit_before><commit_msg>Add test for iteraton break of Route53 records<commit_after>#!/usr/bin/env python
# Copyright (c) 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
from boto.route53.zone import Zone
from tests.compat import mock, unittest
class TestZone(unittest.TestCase):
def test_find_records(self):
mock_connection = mock.Mock()
zone = Zone(mock_connection, {})
zone.id = None
rr_names = ['amazon.com', 'amazon.com', 'aws.amazon.com',
'aws.amazon.com']
mock_rrs = []
# Create some mock resource records.
for rr_name in rr_names:
mock_rr = mock.Mock()
mock_rr.name = rr_name
mock_rr.type = 'A'
mock_rr.weight = None
mock_rr.region = None
mock_rrs.append(mock_rr)
# Set the last resource record to ``None``. The ``find_records`` loop
# should never hit this.
mock_rrs[3] = None
mock_connection.get_all_rrsets.return_value = mock_rrs
mock_connection._make_qualified.return_value = 'amazon.com'
# Ensure that the ``None`` type object was not iterated over.
try:
result_rrs = zone.find_records('amazon.com', 'A', all=True)
except AttributeError as e:
self.fail("find_records() iterated too far into resource"
" record list.")
# Determine that the resulting records are correct.
self.assertEqual(result_rrs, [mock_rrs[0], mock_rrs[1]])
if __name__ == "__main__":
unittest.main()
|
|
2a9bcc7360faabbaf0988687edad945fea454b3d
|
IPython/testing/plugin/test_ipdoctest.py
|
IPython/testing/plugin/test_ipdoctest.py
|
"""Tests for the ipdoctest machinery itself.
Note: in a file named test_X, functions whose only test is their docstring (as
a doctest) and which have no test functionality of their own, should be called
'doctest_foo' instead of 'test_foo', otherwise they get double-counted (the
empty function call is counted as a test, which just inflates tests numbers
artificially).
"""
def doctest_simple():
"""ipdoctest must handle simple inputs
In [1]: 1
Out[1]: 1
In [2]: print 1
1
"""
def doctest_multiline1():
"""The ipdoctest machinery must handle multiline examples gracefully.
In [2]: for i in range(10):
...: print i,
...:
0 1 2 3 4 5 6 7 8 9
"""
def doctest_multiline2():
"""Multiline examples that define functions and print output.
In [7]: def f(x):
...: return x+1
...:
In [8]: f(1)
Out[8]: 2
In [9]: def g(x):
...: print 'x is:',x
...:
In [10]: g(1)
x is: 1
In [11]: g('hello')
x is: hello
"""
def doctest_multiline3():
"""Multiline examples with blank lines.
In [12]: def h(x):
....: if x>1:
....: return x**2
....: # To leave a blank line in the input, you must mark it
....: # with a comment character:
....: #
....: # otherwise the doctest parser gets confused.
....: else:
....: return -1
....:
In [13]: h(5)
Out[13]: 25
In [14]: h(1)
Out[14]: -1
In [15]: h(0)
Out[15]: -1
"""
|
Add a few tests for ipdoctest itself. Not very complete yet, but a start.
|
Add a few tests for ipdoctest itself. Not very complete yet, but a start.
|
Python
|
bsd-3-clause
|
ipython/ipython,ipython/ipython
|
Add a few tests for ipdoctest itself. Not very complete yet, but a start.
|
"""Tests for the ipdoctest machinery itself.
Note: in a file named test_X, functions whose only test is their docstring (as
a doctest) and which have no test functionality of their own, should be called
'doctest_foo' instead of 'test_foo', otherwise they get double-counted (the
empty function call is counted as a test, which just inflates tests numbers
artificially).
"""
def doctest_simple():
"""ipdoctest must handle simple inputs
In [1]: 1
Out[1]: 1
In [2]: print 1
1
"""
def doctest_multiline1():
"""The ipdoctest machinery must handle multiline examples gracefully.
In [2]: for i in range(10):
...: print i,
...:
0 1 2 3 4 5 6 7 8 9
"""
def doctest_multiline2():
"""Multiline examples that define functions and print output.
In [7]: def f(x):
...: return x+1
...:
In [8]: f(1)
Out[8]: 2
In [9]: def g(x):
...: print 'x is:',x
...:
In [10]: g(1)
x is: 1
In [11]: g('hello')
x is: hello
"""
def doctest_multiline3():
"""Multiline examples with blank lines.
In [12]: def h(x):
....: if x>1:
....: return x**2
....: # To leave a blank line in the input, you must mark it
....: # with a comment character:
....: #
....: # otherwise the doctest parser gets confused.
....: else:
....: return -1
....:
In [13]: h(5)
Out[13]: 25
In [14]: h(1)
Out[14]: -1
In [15]: h(0)
Out[15]: -1
"""
|
<commit_before><commit_msg>Add a few tests for ipdoctest itself. Not very complete yet, but a start.<commit_after>
|
"""Tests for the ipdoctest machinery itself.
Note: in a file named test_X, functions whose only test is their docstring (as
a doctest) and which have no test functionality of their own, should be called
'doctest_foo' instead of 'test_foo', otherwise they get double-counted (the
empty function call is counted as a test, which just inflates tests numbers
artificially).
"""
def doctest_simple():
"""ipdoctest must handle simple inputs
In [1]: 1
Out[1]: 1
In [2]: print 1
1
"""
def doctest_multiline1():
"""The ipdoctest machinery must handle multiline examples gracefully.
In [2]: for i in range(10):
...: print i,
...:
0 1 2 3 4 5 6 7 8 9
"""
def doctest_multiline2():
"""Multiline examples that define functions and print output.
In [7]: def f(x):
...: return x+1
...:
In [8]: f(1)
Out[8]: 2
In [9]: def g(x):
...: print 'x is:',x
...:
In [10]: g(1)
x is: 1
In [11]: g('hello')
x is: hello
"""
def doctest_multiline3():
"""Multiline examples with blank lines.
In [12]: def h(x):
....: if x>1:
....: return x**2
....: # To leave a blank line in the input, you must mark it
....: # with a comment character:
....: #
....: # otherwise the doctest parser gets confused.
....: else:
....: return -1
....:
In [13]: h(5)
Out[13]: 25
In [14]: h(1)
Out[14]: -1
In [15]: h(0)
Out[15]: -1
"""
|
Add a few tests for ipdoctest itself. Not very complete yet, but a start."""Tests for the ipdoctest machinery itself.
Note: in a file named test_X, functions whose only test is their docstring (as
a doctest) and which have no test functionality of their own, should be called
'doctest_foo' instead of 'test_foo', otherwise they get double-counted (the
empty function call is counted as a test, which just inflates tests numbers
artificially).
"""
def doctest_simple():
"""ipdoctest must handle simple inputs
In [1]: 1
Out[1]: 1
In [2]: print 1
1
"""
def doctest_multiline1():
"""The ipdoctest machinery must handle multiline examples gracefully.
In [2]: for i in range(10):
...: print i,
...:
0 1 2 3 4 5 6 7 8 9
"""
def doctest_multiline2():
"""Multiline examples that define functions and print output.
In [7]: def f(x):
...: return x+1
...:
In [8]: f(1)
Out[8]: 2
In [9]: def g(x):
...: print 'x is:',x
...:
In [10]: g(1)
x is: 1
In [11]: g('hello')
x is: hello
"""
def doctest_multiline3():
"""Multiline examples with blank lines.
In [12]: def h(x):
....: if x>1:
....: return x**2
....: # To leave a blank line in the input, you must mark it
....: # with a comment character:
....: #
....: # otherwise the doctest parser gets confused.
....: else:
....: return -1
....:
In [13]: h(5)
Out[13]: 25
In [14]: h(1)
Out[14]: -1
In [15]: h(0)
Out[15]: -1
"""
|
<commit_before><commit_msg>Add a few tests for ipdoctest itself. Not very complete yet, but a start.<commit_after>"""Tests for the ipdoctest machinery itself.
Note: in a file named test_X, functions whose only test is their docstring (as
a doctest) and which have no test functionality of their own, should be called
'doctest_foo' instead of 'test_foo', otherwise they get double-counted (the
empty function call is counted as a test, which just inflates tests numbers
artificially).
"""
def doctest_simple():
"""ipdoctest must handle simple inputs
In [1]: 1
Out[1]: 1
In [2]: print 1
1
"""
def doctest_multiline1():
"""The ipdoctest machinery must handle multiline examples gracefully.
In [2]: for i in range(10):
...: print i,
...:
0 1 2 3 4 5 6 7 8 9
"""
def doctest_multiline2():
"""Multiline examples that define functions and print output.
In [7]: def f(x):
...: return x+1
...:
In [8]: f(1)
Out[8]: 2
In [9]: def g(x):
...: print 'x is:',x
...:
In [10]: g(1)
x is: 1
In [11]: g('hello')
x is: hello
"""
def doctest_multiline3():
"""Multiline examples with blank lines.
In [12]: def h(x):
....: if x>1:
....: return x**2
....: # To leave a blank line in the input, you must mark it
....: # with a comment character:
....: #
....: # otherwise the doctest parser gets confused.
....: else:
....: return -1
....:
In [13]: h(5)
Out[13]: 25
In [14]: h(1)
Out[14]: -1
In [15]: h(0)
Out[15]: -1
"""
|
|
0eb648a58dc59a36b83537a6de9bc4635035b0a4
|
raiden/tests/unit/storage/test_storage.py
|
raiden/tests/unit/storage/test_storage.py
|
from unittest.mock import patch
import pytest
from raiden.storage.sqlite import RAIDEN_DB_VERSION, SQLiteStorage
def test_transaction_commit(tmp_path):
filename = f'v{RAIDEN_DB_VERSION}_db.log'
storage = SQLiteStorage(f'{tmp_path}/{filename}')
with storage.transaction():
with patch('raiden.storage.sqlite.RAIDEN_DB_VERSION', new=1000):
storage.update_version()
assert storage.get_version() == 1000
def test_transaction_rollback(tmp_path):
filename = f'v{RAIDEN_DB_VERSION}_db.log'
storage = SQLiteStorage(f'{tmp_path}/{filename}')
storage.update_version()
assert storage.get_version() == RAIDEN_DB_VERSION
with pytest.raises(KeyboardInterrupt):
with storage.transaction():
with patch('raiden.storage.sqlite.RAIDEN_DB_VERSION', new=1000):
storage.update_version()
raise KeyboardInterrupt()
assert storage.get_version() == RAIDEN_DB_VERSION
|
Add small storage transaction tests
|
Add small storage transaction tests
|
Python
|
mit
|
hackaugusto/raiden,hackaugusto/raiden
|
Add small storage transaction tests
|
from unittest.mock import patch
import pytest
from raiden.storage.sqlite import RAIDEN_DB_VERSION, SQLiteStorage
def test_transaction_commit(tmp_path):
filename = f'v{RAIDEN_DB_VERSION}_db.log'
storage = SQLiteStorage(f'{tmp_path}/{filename}')
with storage.transaction():
with patch('raiden.storage.sqlite.RAIDEN_DB_VERSION', new=1000):
storage.update_version()
assert storage.get_version() == 1000
def test_transaction_rollback(tmp_path):
filename = f'v{RAIDEN_DB_VERSION}_db.log'
storage = SQLiteStorage(f'{tmp_path}/{filename}')
storage.update_version()
assert storage.get_version() == RAIDEN_DB_VERSION
with pytest.raises(KeyboardInterrupt):
with storage.transaction():
with patch('raiden.storage.sqlite.RAIDEN_DB_VERSION', new=1000):
storage.update_version()
raise KeyboardInterrupt()
assert storage.get_version() == RAIDEN_DB_VERSION
|
<commit_before><commit_msg>Add small storage transaction tests<commit_after>
|
from unittest.mock import patch
import pytest
from raiden.storage.sqlite import RAIDEN_DB_VERSION, SQLiteStorage
def test_transaction_commit(tmp_path):
filename = f'v{RAIDEN_DB_VERSION}_db.log'
storage = SQLiteStorage(f'{tmp_path}/{filename}')
with storage.transaction():
with patch('raiden.storage.sqlite.RAIDEN_DB_VERSION', new=1000):
storage.update_version()
assert storage.get_version() == 1000
def test_transaction_rollback(tmp_path):
filename = f'v{RAIDEN_DB_VERSION}_db.log'
storage = SQLiteStorage(f'{tmp_path}/{filename}')
storage.update_version()
assert storage.get_version() == RAIDEN_DB_VERSION
with pytest.raises(KeyboardInterrupt):
with storage.transaction():
with patch('raiden.storage.sqlite.RAIDEN_DB_VERSION', new=1000):
storage.update_version()
raise KeyboardInterrupt()
assert storage.get_version() == RAIDEN_DB_VERSION
|
Add small storage transaction testsfrom unittest.mock import patch
import pytest
from raiden.storage.sqlite import RAIDEN_DB_VERSION, SQLiteStorage
def test_transaction_commit(tmp_path):
filename = f'v{RAIDEN_DB_VERSION}_db.log'
storage = SQLiteStorage(f'{tmp_path}/{filename}')
with storage.transaction():
with patch('raiden.storage.sqlite.RAIDEN_DB_VERSION', new=1000):
storage.update_version()
assert storage.get_version() == 1000
def test_transaction_rollback(tmp_path):
filename = f'v{RAIDEN_DB_VERSION}_db.log'
storage = SQLiteStorage(f'{tmp_path}/{filename}')
storage.update_version()
assert storage.get_version() == RAIDEN_DB_VERSION
with pytest.raises(KeyboardInterrupt):
with storage.transaction():
with patch('raiden.storage.sqlite.RAIDEN_DB_VERSION', new=1000):
storage.update_version()
raise KeyboardInterrupt()
assert storage.get_version() == RAIDEN_DB_VERSION
|
<commit_before><commit_msg>Add small storage transaction tests<commit_after>from unittest.mock import patch
import pytest
from raiden.storage.sqlite import RAIDEN_DB_VERSION, SQLiteStorage
def test_transaction_commit(tmp_path):
filename = f'v{RAIDEN_DB_VERSION}_db.log'
storage = SQLiteStorage(f'{tmp_path}/{filename}')
with storage.transaction():
with patch('raiden.storage.sqlite.RAIDEN_DB_VERSION', new=1000):
storage.update_version()
assert storage.get_version() == 1000
def test_transaction_rollback(tmp_path):
filename = f'v{RAIDEN_DB_VERSION}_db.log'
storage = SQLiteStorage(f'{tmp_path}/{filename}')
storage.update_version()
assert storage.get_version() == RAIDEN_DB_VERSION
with pytest.raises(KeyboardInterrupt):
with storage.transaction():
with patch('raiden.storage.sqlite.RAIDEN_DB_VERSION', new=1000):
storage.update_version()
raise KeyboardInterrupt()
assert storage.get_version() == RAIDEN_DB_VERSION
|
|
d7f5da9cce78cec57720bff37e0c21604d0b93d1
|
pft/tests/test_users.py
|
pft/tests/test_users.py
|
"""User Model Tests."""
import unittest
from .. import create_app, db
from ..database import User
class UserModelTestCase(unittest.TestCase):
"""User model tests."""
def setUp(self):
"""Set up tests."""
self.app = create_app('testing')
self.app_context = self.app.app_context()
self.app_context.push()
db.create_all()
def tearDown(self):
"""Clean up after tests."""
db.session.remove()
db.drop_all()
self.app_context.pop()
def test_password_setter(self):
"""Test password is hased and set."""
u = User(password='cat')
self.assertTrue(u.password_hash is not None)
def test_no_password_getter(self):
"""Test can not get password."""
u = User(password='cat')
with self.assertRaises(AttributeError):
u.password
def test_password_verification(self):
"""Test password verification."""
u = User(password='cat')
self.assertTrue(u.verify_password('cat'))
self.assertFalse(u.verify_password('dog'))
def test_password_salts_are_random(self):
"""Test password salts are random."""
u = User(password='cat')
u2 = User(password='cat')
self.assertTrue(u.password_hash != u2.password_hash)
|
Add unit tests for User
|
Add unit tests for User
|
Python
|
unknown
|
gregcowell/PFT,gregcowell/BAM,gregcowell/PFT,gregcowell/BAM
|
Add unit tests for User
|
"""User Model Tests."""
import unittest
from .. import create_app, db
from ..database import User
class UserModelTestCase(unittest.TestCase):
"""User model tests."""
def setUp(self):
"""Set up tests."""
self.app = create_app('testing')
self.app_context = self.app.app_context()
self.app_context.push()
db.create_all()
def tearDown(self):
"""Clean up after tests."""
db.session.remove()
db.drop_all()
self.app_context.pop()
def test_password_setter(self):
"""Test password is hased and set."""
u = User(password='cat')
self.assertTrue(u.password_hash is not None)
def test_no_password_getter(self):
"""Test can not get password."""
u = User(password='cat')
with self.assertRaises(AttributeError):
u.password
def test_password_verification(self):
"""Test password verification."""
u = User(password='cat')
self.assertTrue(u.verify_password('cat'))
self.assertFalse(u.verify_password('dog'))
def test_password_salts_are_random(self):
"""Test password salts are random."""
u = User(password='cat')
u2 = User(password='cat')
self.assertTrue(u.password_hash != u2.password_hash)
|
<commit_before><commit_msg>Add unit tests for User<commit_after>
|
"""User Model Tests."""
import unittest
from .. import create_app, db
from ..database import User
class UserModelTestCase(unittest.TestCase):
"""User model tests."""
def setUp(self):
"""Set up tests."""
self.app = create_app('testing')
self.app_context = self.app.app_context()
self.app_context.push()
db.create_all()
def tearDown(self):
"""Clean up after tests."""
db.session.remove()
db.drop_all()
self.app_context.pop()
def test_password_setter(self):
"""Test password is hased and set."""
u = User(password='cat')
self.assertTrue(u.password_hash is not None)
def test_no_password_getter(self):
"""Test can not get password."""
u = User(password='cat')
with self.assertRaises(AttributeError):
u.password
def test_password_verification(self):
"""Test password verification."""
u = User(password='cat')
self.assertTrue(u.verify_password('cat'))
self.assertFalse(u.verify_password('dog'))
def test_password_salts_are_random(self):
"""Test password salts are random."""
u = User(password='cat')
u2 = User(password='cat')
self.assertTrue(u.password_hash != u2.password_hash)
|
Add unit tests for User"""User Model Tests."""
import unittest
from .. import create_app, db
from ..database import User
class UserModelTestCase(unittest.TestCase):
"""User model tests."""
def setUp(self):
"""Set up tests."""
self.app = create_app('testing')
self.app_context = self.app.app_context()
self.app_context.push()
db.create_all()
def tearDown(self):
"""Clean up after tests."""
db.session.remove()
db.drop_all()
self.app_context.pop()
def test_password_setter(self):
"""Test password is hased and set."""
u = User(password='cat')
self.assertTrue(u.password_hash is not None)
def test_no_password_getter(self):
"""Test can not get password."""
u = User(password='cat')
with self.assertRaises(AttributeError):
u.password
def test_password_verification(self):
"""Test password verification."""
u = User(password='cat')
self.assertTrue(u.verify_password('cat'))
self.assertFalse(u.verify_password('dog'))
def test_password_salts_are_random(self):
"""Test password salts are random."""
u = User(password='cat')
u2 = User(password='cat')
self.assertTrue(u.password_hash != u2.password_hash)
|
<commit_before><commit_msg>Add unit tests for User<commit_after>"""User Model Tests."""
import unittest
from .. import create_app, db
from ..database import User
class UserModelTestCase(unittest.TestCase):
"""User model tests."""
def setUp(self):
"""Set up tests."""
self.app = create_app('testing')
self.app_context = self.app.app_context()
self.app_context.push()
db.create_all()
def tearDown(self):
"""Clean up after tests."""
db.session.remove()
db.drop_all()
self.app_context.pop()
def test_password_setter(self):
"""Test password is hased and set."""
u = User(password='cat')
self.assertTrue(u.password_hash is not None)
def test_no_password_getter(self):
"""Test can not get password."""
u = User(password='cat')
with self.assertRaises(AttributeError):
u.password
def test_password_verification(self):
"""Test password verification."""
u = User(password='cat')
self.assertTrue(u.verify_password('cat'))
self.assertFalse(u.verify_password('dog'))
def test_password_salts_are_random(self):
"""Test password salts are random."""
u = User(password='cat')
u2 = User(password='cat')
self.assertTrue(u.password_hash != u2.password_hash)
|
|
6bb921082a168cf31b7499bc62f2a26c223e9686
|
tests/test_autoreconnect.py
|
tests/test_autoreconnect.py
|
#!/usr/bin/env python
from graphitesend import graphitesend
import unittest
import socket
class TestAutoreconnect(unittest.TestCase):
def setUp(self):
""" reset graphitesend """
self.server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.server.bind(('localhost', 2003))
self.server.listen(5)
def tearDown(self):
""" reset graphitesend """
# Drop any connections or modules that have been setup from other tests
graphitesend.reset()
try:
self.server.shutdown(socket.SHUT_RD)
self.server.close()
except Exception:
pass
self.server = None
def test_set_autoreconnect_default(self):
g = graphitesend.init(dryrun=True)
self.assertEqual(g._autoreconnect, False)
def test_set_autoreconnect_true(self):
g = graphitesend.init(dryrun=True, autoreconnect=True)
self.assertEqual(g._autoreconnect, True)
def test_set_autoreconnect_false(self):
g = graphitesend.init(dryrun=True, autoreconnect=False)
self.assertEqual(g._autoreconnect, False)
def test_autoreconnect(self):
g = graphitesend.GraphiteClient(autoreconnect=True)
g.send("metric", 42)
self.tearDown()
with self.assertRaises(graphitesend.GraphiteSendException):
g.send("metric", 2)
self.setUp()
g.send("metric", 3)
|
Add tests for autoreconnect feature
|
Add tests for autoreconnect feature
|
Python
|
apache-2.0
|
numberly/graphitesend,daniellawrence/graphitesend,PabloLefort/graphitesend
|
Add tests for autoreconnect feature
|
#!/usr/bin/env python
from graphitesend import graphitesend
import unittest
import socket
class TestAutoreconnect(unittest.TestCase):
def setUp(self):
""" reset graphitesend """
self.server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.server.bind(('localhost', 2003))
self.server.listen(5)
def tearDown(self):
""" reset graphitesend """
# Drop any connections or modules that have been setup from other tests
graphitesend.reset()
try:
self.server.shutdown(socket.SHUT_RD)
self.server.close()
except Exception:
pass
self.server = None
def test_set_autoreconnect_default(self):
g = graphitesend.init(dryrun=True)
self.assertEqual(g._autoreconnect, False)
def test_set_autoreconnect_true(self):
g = graphitesend.init(dryrun=True, autoreconnect=True)
self.assertEqual(g._autoreconnect, True)
def test_set_autoreconnect_false(self):
g = graphitesend.init(dryrun=True, autoreconnect=False)
self.assertEqual(g._autoreconnect, False)
def test_autoreconnect(self):
g = graphitesend.GraphiteClient(autoreconnect=True)
g.send("metric", 42)
self.tearDown()
with self.assertRaises(graphitesend.GraphiteSendException):
g.send("metric", 2)
self.setUp()
g.send("metric", 3)
|
<commit_before><commit_msg>Add tests for autoreconnect feature<commit_after>
|
#!/usr/bin/env python
from graphitesend import graphitesend
import unittest
import socket
class TestAutoreconnect(unittest.TestCase):
def setUp(self):
""" reset graphitesend """
self.server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.server.bind(('localhost', 2003))
self.server.listen(5)
def tearDown(self):
""" reset graphitesend """
# Drop any connections or modules that have been setup from other tests
graphitesend.reset()
try:
self.server.shutdown(socket.SHUT_RD)
self.server.close()
except Exception:
pass
self.server = None
def test_set_autoreconnect_default(self):
g = graphitesend.init(dryrun=True)
self.assertEqual(g._autoreconnect, False)
def test_set_autoreconnect_true(self):
g = graphitesend.init(dryrun=True, autoreconnect=True)
self.assertEqual(g._autoreconnect, True)
def test_set_autoreconnect_false(self):
g = graphitesend.init(dryrun=True, autoreconnect=False)
self.assertEqual(g._autoreconnect, False)
def test_autoreconnect(self):
g = graphitesend.GraphiteClient(autoreconnect=True)
g.send("metric", 42)
self.tearDown()
with self.assertRaises(graphitesend.GraphiteSendException):
g.send("metric", 2)
self.setUp()
g.send("metric", 3)
|
Add tests for autoreconnect feature#!/usr/bin/env python
from graphitesend import graphitesend
import unittest
import socket
class TestAutoreconnect(unittest.TestCase):
def setUp(self):
""" reset graphitesend """
self.server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.server.bind(('localhost', 2003))
self.server.listen(5)
def tearDown(self):
""" reset graphitesend """
# Drop any connections or modules that have been setup from other tests
graphitesend.reset()
try:
self.server.shutdown(socket.SHUT_RD)
self.server.close()
except Exception:
pass
self.server = None
def test_set_autoreconnect_default(self):
g = graphitesend.init(dryrun=True)
self.assertEqual(g._autoreconnect, False)
def test_set_autoreconnect_true(self):
g = graphitesend.init(dryrun=True, autoreconnect=True)
self.assertEqual(g._autoreconnect, True)
def test_set_autoreconnect_false(self):
g = graphitesend.init(dryrun=True, autoreconnect=False)
self.assertEqual(g._autoreconnect, False)
def test_autoreconnect(self):
g = graphitesend.GraphiteClient(autoreconnect=True)
g.send("metric", 42)
self.tearDown()
with self.assertRaises(graphitesend.GraphiteSendException):
g.send("metric", 2)
self.setUp()
g.send("metric", 3)
|
<commit_before><commit_msg>Add tests for autoreconnect feature<commit_after>#!/usr/bin/env python
from graphitesend import graphitesend
import unittest
import socket
class TestAutoreconnect(unittest.TestCase):
def setUp(self):
""" reset graphitesend """
self.server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.server.bind(('localhost', 2003))
self.server.listen(5)
def tearDown(self):
""" reset graphitesend """
# Drop any connections or modules that have been setup from other tests
graphitesend.reset()
try:
self.server.shutdown(socket.SHUT_RD)
self.server.close()
except Exception:
pass
self.server = None
def test_set_autoreconnect_default(self):
g = graphitesend.init(dryrun=True)
self.assertEqual(g._autoreconnect, False)
def test_set_autoreconnect_true(self):
g = graphitesend.init(dryrun=True, autoreconnect=True)
self.assertEqual(g._autoreconnect, True)
def test_set_autoreconnect_false(self):
g = graphitesend.init(dryrun=True, autoreconnect=False)
self.assertEqual(g._autoreconnect, False)
def test_autoreconnect(self):
g = graphitesend.GraphiteClient(autoreconnect=True)
g.send("metric", 42)
self.tearDown()
with self.assertRaises(graphitesend.GraphiteSendException):
g.send("metric", 2)
self.setUp()
g.send("metric", 3)
|
|
f9724b7a3eea113b897e2109bd5fc9942314ffe1
|
helios/migrations/0004_auto_20160509_1804.py
|
helios/migrations/0004_auto_20160509_1804.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('helios', '0003_castvote_cast_from'),
]
operations = [
migrations.RenameField(
model_name='castvote',
old_name='cast_from',
new_name='cast_ip',
),
]
|
Update migrations for cast_ip implemented in official Helios
|
Update migrations for cast_ip implemented in official Helios
|
Python
|
agpl-3.0
|
RunasSudo/helios-server-mixnet,RunasSudo/helios-server-mixnet,RunasSudo/helios-server-mixnet,RunasSudo/helios-server-mixnet,RunasSudo/helios-server-mixnet
|
Update migrations for cast_ip implemented in official Helios
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('helios', '0003_castvote_cast_from'),
]
operations = [
migrations.RenameField(
model_name='castvote',
old_name='cast_from',
new_name='cast_ip',
),
]
|
<commit_before><commit_msg>Update migrations for cast_ip implemented in official Helios<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('helios', '0003_castvote_cast_from'),
]
operations = [
migrations.RenameField(
model_name='castvote',
old_name='cast_from',
new_name='cast_ip',
),
]
|
Update migrations for cast_ip implemented in official Helios# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('helios', '0003_castvote_cast_from'),
]
operations = [
migrations.RenameField(
model_name='castvote',
old_name='cast_from',
new_name='cast_ip',
),
]
|
<commit_before><commit_msg>Update migrations for cast_ip implemented in official Helios<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('helios', '0003_castvote_cast_from'),
]
operations = [
migrations.RenameField(
model_name='castvote',
old_name='cast_from',
new_name='cast_ip',
),
]
|
|
b0cded39e3d318103f7a6bbd36a9a78a5b7c2e5a
|
cli-create-dataset-table.py
|
cli-create-dataset-table.py
|
#!/usr/bin/env python
''' Create dataset table (w/ sample schema) '''
import httplib2
from apiclient.discovery import build
from oauth2client.client import SignedJwtAssertionCredentials
from apiclient.errors import HttpError
from config import Credential as gc
dataset_id = 'DATASET-ID-HERE'
new_table_id = 'NEW-TABLE-NAME-HERE'
f = file(gc.private_key, 'rb')
key = f.read()
f.close()
credentials = SignedJwtAssertionCredentials(
gc.service_account_email,
key,
scope='https://www.googleapis.com/auth/bigquery')
http = httplib2.Http()
http = credentials.authorize(http)
service = build('bigquery', 'v2', http=http)
try:
tables = service.tables()
# Construct the request body object w/ sample table schema
request_body = {
"schema": {
"fields": [
{
"mode": "REQUIRED",
"type": "STRING",
"name": "id",
},
{
"mode": "REQUIRED",
"type": "STRING",
"name": "full_name",
},
{
"mode": "REQUIRED",
"type": "STRING",
"name": "email_address",
}
],
},
"tableReference": {
"projectId": gc.project_id,
"tableId": new_table_id,
"datasetId": dataset_id
}
}
response = tables.insert(projectId=gc.project_id,
datasetId=dataset_id,
body=request_body).execute()
# print out the response
print(response);
except HttpError as err:
print 'Error:', err.content
|
Add create (dataset) table sample script
|
Add create (dataset) table sample script
|
Python
|
mit
|
rawswift/google-bigquery-python-cli
|
Add create (dataset) table sample script
|
#!/usr/bin/env python
''' Create dataset table (w/ sample schema) '''
import httplib2
from apiclient.discovery import build
from oauth2client.client import SignedJwtAssertionCredentials
from apiclient.errors import HttpError
from config import Credential as gc
dataset_id = 'DATASET-ID-HERE'
new_table_id = 'NEW-TABLE-NAME-HERE'
f = file(gc.private_key, 'rb')
key = f.read()
f.close()
credentials = SignedJwtAssertionCredentials(
gc.service_account_email,
key,
scope='https://www.googleapis.com/auth/bigquery')
http = httplib2.Http()
http = credentials.authorize(http)
service = build('bigquery', 'v2', http=http)
try:
tables = service.tables()
# Construct the request body object w/ sample table schema
request_body = {
"schema": {
"fields": [
{
"mode": "REQUIRED",
"type": "STRING",
"name": "id",
},
{
"mode": "REQUIRED",
"type": "STRING",
"name": "full_name",
},
{
"mode": "REQUIRED",
"type": "STRING",
"name": "email_address",
}
],
},
"tableReference": {
"projectId": gc.project_id,
"tableId": new_table_id,
"datasetId": dataset_id
}
}
response = tables.insert(projectId=gc.project_id,
datasetId=dataset_id,
body=request_body).execute()
# print out the response
print(response);
except HttpError as err:
print 'Error:', err.content
|
<commit_before><commit_msg>Add create (dataset) table sample script<commit_after>
|
#!/usr/bin/env python
''' Create dataset table (w/ sample schema) '''
import httplib2
from apiclient.discovery import build
from oauth2client.client import SignedJwtAssertionCredentials
from apiclient.errors import HttpError
from config import Credential as gc
dataset_id = 'DATASET-ID-HERE'
new_table_id = 'NEW-TABLE-NAME-HERE'
f = file(gc.private_key, 'rb')
key = f.read()
f.close()
credentials = SignedJwtAssertionCredentials(
gc.service_account_email,
key,
scope='https://www.googleapis.com/auth/bigquery')
http = httplib2.Http()
http = credentials.authorize(http)
service = build('bigquery', 'v2', http=http)
try:
tables = service.tables()
# Construct the request body object w/ sample table schema
request_body = {
"schema": {
"fields": [
{
"mode": "REQUIRED",
"type": "STRING",
"name": "id",
},
{
"mode": "REQUIRED",
"type": "STRING",
"name": "full_name",
},
{
"mode": "REQUIRED",
"type": "STRING",
"name": "email_address",
}
],
},
"tableReference": {
"projectId": gc.project_id,
"tableId": new_table_id,
"datasetId": dataset_id
}
}
response = tables.insert(projectId=gc.project_id,
datasetId=dataset_id,
body=request_body).execute()
# print out the response
print(response);
except HttpError as err:
print 'Error:', err.content
|
Add create (dataset) table sample script#!/usr/bin/env python
''' Create dataset table (w/ sample schema) '''
import httplib2
from apiclient.discovery import build
from oauth2client.client import SignedJwtAssertionCredentials
from apiclient.errors import HttpError
from config import Credential as gc
dataset_id = 'DATASET-ID-HERE'
new_table_id = 'NEW-TABLE-NAME-HERE'
f = file(gc.private_key, 'rb')
key = f.read()
f.close()
credentials = SignedJwtAssertionCredentials(
gc.service_account_email,
key,
scope='https://www.googleapis.com/auth/bigquery')
http = httplib2.Http()
http = credentials.authorize(http)
service = build('bigquery', 'v2', http=http)
try:
tables = service.tables()
# Construct the request body object w/ sample table schema
request_body = {
"schema": {
"fields": [
{
"mode": "REQUIRED",
"type": "STRING",
"name": "id",
},
{
"mode": "REQUIRED",
"type": "STRING",
"name": "full_name",
},
{
"mode": "REQUIRED",
"type": "STRING",
"name": "email_address",
}
],
},
"tableReference": {
"projectId": gc.project_id,
"tableId": new_table_id,
"datasetId": dataset_id
}
}
response = tables.insert(projectId=gc.project_id,
datasetId=dataset_id,
body=request_body).execute()
# print out the response
print(response);
except HttpError as err:
print 'Error:', err.content
|
<commit_before><commit_msg>Add create (dataset) table sample script<commit_after>#!/usr/bin/env python
''' Create dataset table (w/ sample schema) '''
import httplib2
from apiclient.discovery import build
from oauth2client.client import SignedJwtAssertionCredentials
from apiclient.errors import HttpError
from config import Credential as gc
dataset_id = 'DATASET-ID-HERE'
new_table_id = 'NEW-TABLE-NAME-HERE'
f = file(gc.private_key, 'rb')
key = f.read()
f.close()
credentials = SignedJwtAssertionCredentials(
gc.service_account_email,
key,
scope='https://www.googleapis.com/auth/bigquery')
http = httplib2.Http()
http = credentials.authorize(http)
service = build('bigquery', 'v2', http=http)
try:
tables = service.tables()
# Construct the request body object w/ sample table schema
request_body = {
"schema": {
"fields": [
{
"mode": "REQUIRED",
"type": "STRING",
"name": "id",
},
{
"mode": "REQUIRED",
"type": "STRING",
"name": "full_name",
},
{
"mode": "REQUIRED",
"type": "STRING",
"name": "email_address",
}
],
},
"tableReference": {
"projectId": gc.project_id,
"tableId": new_table_id,
"datasetId": dataset_id
}
}
response = tables.insert(projectId=gc.project_id,
datasetId=dataset_id,
body=request_body).execute()
# print out the response
print(response);
except HttpError as err:
print 'Error:', err.content
|
|
fea181bf41c478042df00cee5dc33864751a4ed2
|
oidc_provider/migrations/0002_userconsent.py
|
oidc_provider/migrations/0002_userconsent.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('oidc_provider', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='UserConsent',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('expires_at', models.DateTimeField()),
('_scope', models.TextField(default=b'')),
('client', models.ForeignKey(to='oidc_provider.Client')),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
]
|
Add migrations for user consent.
|
Add migrations for user consent.
|
Python
|
mit
|
torreco/django-oidc-provider,ByteInternet/django-oidc-provider,Sjord/django-oidc-provider,wayward710/django-oidc-provider,bunnyinc/django-oidc-provider,juanifioren/django-oidc-provider,Sjord/django-oidc-provider,bunnyinc/django-oidc-provider,wojtek-fliposports/django-oidc-provider,wojtek-fliposports/django-oidc-provider,juanifioren/django-oidc-provider,wayward710/django-oidc-provider,torreco/django-oidc-provider,nmohoric/django-oidc-provider,nmohoric/django-oidc-provider,ByteInternet/django-oidc-provider
|
Add migrations for user consent.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('oidc_provider', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='UserConsent',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('expires_at', models.DateTimeField()),
('_scope', models.TextField(default=b'')),
('client', models.ForeignKey(to='oidc_provider.Client')),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
]
|
<commit_before><commit_msg>Add migrations for user consent.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('oidc_provider', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='UserConsent',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('expires_at', models.DateTimeField()),
('_scope', models.TextField(default=b'')),
('client', models.ForeignKey(to='oidc_provider.Client')),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
]
|
Add migrations for user consent.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('oidc_provider', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='UserConsent',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('expires_at', models.DateTimeField()),
('_scope', models.TextField(default=b'')),
('client', models.ForeignKey(to='oidc_provider.Client')),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
]
|
<commit_before><commit_msg>Add migrations for user consent.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('oidc_provider', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='UserConsent',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('expires_at', models.DateTimeField()),
('_scope', models.TextField(default=b'')),
('client', models.ForeignKey(to='oidc_provider.Client')),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
]
|
|
9b5a6e0da2afbfc021c7aefd1b31aed9a0cd9c85
|
osf/migrations/0121_auto_20180723_0816.py
|
osf/migrations/0121_auto_20180723_0816.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-07-23 13:16
from __future__ import unicode_literals
from waffle.models import Flag
from django.db import migrations
EMBER_WAFFLE_PAGES = [
'dashboard',
'home',
]
def format_ember_waffle_flag_name(page):
return '{}{}{}'.format('ember_', page, '_page')
def add_ember_waffle_flags(state, schema):
for page in EMBER_WAFFLE_PAGES:
Flag.objects.get_or_create(name=format_ember_waffle_flag_name(page), everyone=False)
return
def remove_waffle_flags(state, schema):
pages = [format_ember_waffle_flag_name(page) for page in EMBER_WAFFLE_PAGES]
Flag.objects.filter(name__in=pages).delete()
return
class Migration(migrations.Migration):
dependencies = [
('osf', '0120_merge_20180716_1457'),
]
operations = [
migrations.RunPython(remove_waffle_flags, add_ember_waffle_flags)
]
|
Add data migration to remove dashboard and home waffle flags from the db.
|
Add data migration to remove dashboard and home waffle flags from the db.
|
Python
|
apache-2.0
|
pattisdr/osf.io,brianjgeiger/osf.io,saradbowman/osf.io,felliott/osf.io,adlius/osf.io,mattclark/osf.io,Johnetordoff/osf.io,aaxelb/osf.io,caseyrollins/osf.io,erinspace/osf.io,erinspace/osf.io,brianjgeiger/osf.io,cslzchen/osf.io,cslzchen/osf.io,HalcyonChimera/osf.io,mattclark/osf.io,aaxelb/osf.io,aaxelb/osf.io,adlius/osf.io,felliott/osf.io,adlius/osf.io,HalcyonChimera/osf.io,caseyrollins/osf.io,aaxelb/osf.io,felliott/osf.io,HalcyonChimera/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,adlius/osf.io,baylee-d/osf.io,mfraezz/osf.io,cslzchen/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,caseyrollins/osf.io,baylee-d/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,CenterForOpenScience/osf.io,pattisdr/osf.io,CenterForOpenScience/osf.io,Johnetordoff/osf.io,mattclark/osf.io,saradbowman/osf.io,mfraezz/osf.io,Johnetordoff/osf.io,pattisdr/osf.io,cslzchen/osf.io,Johnetordoff/osf.io,brianjgeiger/osf.io,baylee-d/osf.io,erinspace/osf.io,felliott/osf.io
|
Add data migration to remove dashboard and home waffle flags from the db.
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-07-23 13:16
from __future__ import unicode_literals
from waffle.models import Flag
from django.db import migrations
EMBER_WAFFLE_PAGES = [
'dashboard',
'home',
]
def format_ember_waffle_flag_name(page):
return '{}{}{}'.format('ember_', page, '_page')
def add_ember_waffle_flags(state, schema):
for page in EMBER_WAFFLE_PAGES:
Flag.objects.get_or_create(name=format_ember_waffle_flag_name(page), everyone=False)
return
def remove_waffle_flags(state, schema):
pages = [format_ember_waffle_flag_name(page) for page in EMBER_WAFFLE_PAGES]
Flag.objects.filter(name__in=pages).delete()
return
class Migration(migrations.Migration):
dependencies = [
('osf', '0120_merge_20180716_1457'),
]
operations = [
migrations.RunPython(remove_waffle_flags, add_ember_waffle_flags)
]
|
<commit_before><commit_msg>Add data migration to remove dashboard and home waffle flags from the db.<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-07-23 13:16
from __future__ import unicode_literals
from waffle.models import Flag
from django.db import migrations
EMBER_WAFFLE_PAGES = [
'dashboard',
'home',
]
def format_ember_waffle_flag_name(page):
return '{}{}{}'.format('ember_', page, '_page')
def add_ember_waffle_flags(state, schema):
for page in EMBER_WAFFLE_PAGES:
Flag.objects.get_or_create(name=format_ember_waffle_flag_name(page), everyone=False)
return
def remove_waffle_flags(state, schema):
pages = [format_ember_waffle_flag_name(page) for page in EMBER_WAFFLE_PAGES]
Flag.objects.filter(name__in=pages).delete()
return
class Migration(migrations.Migration):
dependencies = [
('osf', '0120_merge_20180716_1457'),
]
operations = [
migrations.RunPython(remove_waffle_flags, add_ember_waffle_flags)
]
|
Add data migration to remove dashboard and home waffle flags from the db.# -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-07-23 13:16
from __future__ import unicode_literals
from waffle.models import Flag
from django.db import migrations
EMBER_WAFFLE_PAGES = [
'dashboard',
'home',
]
def format_ember_waffle_flag_name(page):
return '{}{}{}'.format('ember_', page, '_page')
def add_ember_waffle_flags(state, schema):
for page in EMBER_WAFFLE_PAGES:
Flag.objects.get_or_create(name=format_ember_waffle_flag_name(page), everyone=False)
return
def remove_waffle_flags(state, schema):
pages = [format_ember_waffle_flag_name(page) for page in EMBER_WAFFLE_PAGES]
Flag.objects.filter(name__in=pages).delete()
return
class Migration(migrations.Migration):
dependencies = [
('osf', '0120_merge_20180716_1457'),
]
operations = [
migrations.RunPython(remove_waffle_flags, add_ember_waffle_flags)
]
|
<commit_before><commit_msg>Add data migration to remove dashboard and home waffle flags from the db.<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-07-23 13:16
from __future__ import unicode_literals
from waffle.models import Flag
from django.db import migrations
EMBER_WAFFLE_PAGES = [
'dashboard',
'home',
]
def format_ember_waffle_flag_name(page):
return '{}{}{}'.format('ember_', page, '_page')
def add_ember_waffle_flags(state, schema):
for page in EMBER_WAFFLE_PAGES:
Flag.objects.get_or_create(name=format_ember_waffle_flag_name(page), everyone=False)
return
def remove_waffle_flags(state, schema):
pages = [format_ember_waffle_flag_name(page) for page in EMBER_WAFFLE_PAGES]
Flag.objects.filter(name__in=pages).delete()
return
class Migration(migrations.Migration):
dependencies = [
('osf', '0120_merge_20180716_1457'),
]
operations = [
migrations.RunPython(remove_waffle_flags, add_ember_waffle_flags)
]
|
|
06062f40987c1b95213825fc6851676daa78de9f
|
faker/providers/internet/pl_PL/__init__.py
|
faker/providers/internet/pl_PL/__init__.py
|
# coding=utf-8
from __future__ import unicode_literals
from .. import Provider as InternetProvider
class Provider(InternetProvider):
free_email_domains = (
'onet.pl',
'interia.pl',
'gmail.com',
'o2.pl',
'yahoo.com',
'hotmail.com',
)
tlds = ('com', 'com', 'com', 'net', 'org', 'pl', 'pl', 'pl')
replacements = (
('ą', 'a'),
('ć', 'c'),
('ę', 'e'),
('ł', 'l'),
('ń', 'n'),
('ó', 'o'),
('ś', 's'),
('ź', 'z'),
('ż', 'z'),
)
|
Add pl_PL internet data provider
|
Add pl_PL internet data provider
|
Python
|
mit
|
joke2k/faker,danhuss/faker,joke2k/faker
|
Add pl_PL internet data provider
|
# coding=utf-8
from __future__ import unicode_literals
from .. import Provider as InternetProvider
class Provider(InternetProvider):
free_email_domains = (
'onet.pl',
'interia.pl',
'gmail.com',
'o2.pl',
'yahoo.com',
'hotmail.com',
)
tlds = ('com', 'com', 'com', 'net', 'org', 'pl', 'pl', 'pl')
replacements = (
('ą', 'a'),
('ć', 'c'),
('ę', 'e'),
('ł', 'l'),
('ń', 'n'),
('ó', 'o'),
('ś', 's'),
('ź', 'z'),
('ż', 'z'),
)
|
<commit_before><commit_msg>Add pl_PL internet data provider<commit_after>
|
# coding=utf-8
from __future__ import unicode_literals
from .. import Provider as InternetProvider
class Provider(InternetProvider):
free_email_domains = (
'onet.pl',
'interia.pl',
'gmail.com',
'o2.pl',
'yahoo.com',
'hotmail.com',
)
tlds = ('com', 'com', 'com', 'net', 'org', 'pl', 'pl', 'pl')
replacements = (
('ą', 'a'),
('ć', 'c'),
('ę', 'e'),
('ł', 'l'),
('ń', 'n'),
('ó', 'o'),
('ś', 's'),
('ź', 'z'),
('ż', 'z'),
)
|
Add pl_PL internet data provider# coding=utf-8
from __future__ import unicode_literals
from .. import Provider as InternetProvider
class Provider(InternetProvider):
free_email_domains = (
'onet.pl',
'interia.pl',
'gmail.com',
'o2.pl',
'yahoo.com',
'hotmail.com',
)
tlds = ('com', 'com', 'com', 'net', 'org', 'pl', 'pl', 'pl')
replacements = (
('ą', 'a'),
('ć', 'c'),
('ę', 'e'),
('ł', 'l'),
('ń', 'n'),
('ó', 'o'),
('ś', 's'),
('ź', 'z'),
('ż', 'z'),
)
|
<commit_before><commit_msg>Add pl_PL internet data provider<commit_after># coding=utf-8
from __future__ import unicode_literals
from .. import Provider as InternetProvider
class Provider(InternetProvider):
free_email_domains = (
'onet.pl',
'interia.pl',
'gmail.com',
'o2.pl',
'yahoo.com',
'hotmail.com',
)
tlds = ('com', 'com', 'com', 'net', 'org', 'pl', 'pl', 'pl')
replacements = (
('ą', 'a'),
('ć', 'c'),
('ę', 'e'),
('ł', 'l'),
('ń', 'n'),
('ó', 'o'),
('ś', 's'),
('ź', 'z'),
('ż', 'z'),
)
|
|
c3e88d59492469f1eea89d9a1d63565439ac1611
|
daguerre/migrations/0004_hash_upload_to_dir.py
|
daguerre/migrations/0004_hash_upload_to_dir.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import daguerre.models
class Migration(migrations.Migration):
dependencies = [
('daguerre', '0003_auto_20160301_2342'),
]
operations = [
migrations.AlterField(
model_name='adjustedimage',
name='adjusted',
field=models.ImageField(max_length=45, upload_to=daguerre.models.upload_to),
),
]
|
Apply the upload_to migration on the AdjustedImage model
|
Apply the upload_to migration on the AdjustedImage model
|
Python
|
bsd-3-clause
|
littleweaver/django-daguerre,littleweaver/django-daguerre
|
Apply the upload_to migration on the AdjustedImage model
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import daguerre.models
class Migration(migrations.Migration):
dependencies = [
('daguerre', '0003_auto_20160301_2342'),
]
operations = [
migrations.AlterField(
model_name='adjustedimage',
name='adjusted',
field=models.ImageField(max_length=45, upload_to=daguerre.models.upload_to),
),
]
|
<commit_before><commit_msg>Apply the upload_to migration on the AdjustedImage model<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import daguerre.models
class Migration(migrations.Migration):
dependencies = [
('daguerre', '0003_auto_20160301_2342'),
]
operations = [
migrations.AlterField(
model_name='adjustedimage',
name='adjusted',
field=models.ImageField(max_length=45, upload_to=daguerre.models.upload_to),
),
]
|
Apply the upload_to migration on the AdjustedImage model# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import daguerre.models
class Migration(migrations.Migration):
dependencies = [
('daguerre', '0003_auto_20160301_2342'),
]
operations = [
migrations.AlterField(
model_name='adjustedimage',
name='adjusted',
field=models.ImageField(max_length=45, upload_to=daguerre.models.upload_to),
),
]
|
<commit_before><commit_msg>Apply the upload_to migration on the AdjustedImage model<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import daguerre.models
class Migration(migrations.Migration):
dependencies = [
('daguerre', '0003_auto_20160301_2342'),
]
operations = [
migrations.AlterField(
model_name='adjustedimage',
name='adjusted',
field=models.ImageField(max_length=45, upload_to=daguerre.models.upload_to),
),
]
|
|
458031c375a3003237cfa216841aa44d5909a6a2
|
migrations/versions/d1a1a428aae0_remade_articles_to_be_better.py
|
migrations/versions/d1a1a428aae0_remade_articles_to_be_better.py
|
"""Remade article to be better
Revision ID: d1a1a428aae0
Revises: 4e7b4c69111b
Create Date: 2018-10-23 22:18:11.271098
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = 'd1a1a428aae0'
down_revision = '4e7b4c69111b'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('articles',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(), nullable=True),
sa.Column('url', sa.String(), nullable=True),
sa.Column('type', sa.Enum('blog', 'workshop', name='type'), server_default='blog', nullable=False),
sa.Column('text', sa.String(), nullable=True),
sa.Column('creation_date', sa.DateTime(), server_default=sa.text('now()'), nullable=True),
sa.Column('update_date', sa.DateTime(), server_default=sa.text('now()'), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('title'),
sa.UniqueConstraint('url')
)
op.create_table('tags',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('tag', sa.String(), nullable=False),
sa.PrimaryKeyConstraint('id', 'tag')
)
op.create_table('tags_joiner',
sa.Column('tag', sa.Integer(), nullable=False),
sa.Column('article', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['article'], ['articles.id'], ),
sa.ForeignKeyConstraint(['tag'], ['tags.id'], ),
sa.PrimaryKeyConstraint('tag', 'article')
)
op.drop_table('article')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('article',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('title', sa.VARCHAR(), autoincrement=False, nullable=True),
sa.Column('text', sa.VARCHAR(), autoincrement=False, nullable=True),
sa.Column('date', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id', name='article_pkey'),
sa.UniqueConstraint('title', name='article_title_key')
)
op.drop_table('tags_joiner')
op.drop_table('tags')
op.drop_table('articles')
# ### end Alembic commands ###
|
Add in the migration for the previous commit
|
Add in the migration for the previous commit
|
Python
|
mit
|
dougmiller/theMetaCity,dougmiller/theMetaCity,dougmiller/theMetaCity,dougmiller/theMetaCity
|
Add in the migration for the previous commit
|
"""Remade article to be better
Revision ID: d1a1a428aae0
Revises: 4e7b4c69111b
Create Date: 2018-10-23 22:18:11.271098
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = 'd1a1a428aae0'
down_revision = '4e7b4c69111b'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('articles',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(), nullable=True),
sa.Column('url', sa.String(), nullable=True),
sa.Column('type', sa.Enum('blog', 'workshop', name='type'), server_default='blog', nullable=False),
sa.Column('text', sa.String(), nullable=True),
sa.Column('creation_date', sa.DateTime(), server_default=sa.text('now()'), nullable=True),
sa.Column('update_date', sa.DateTime(), server_default=sa.text('now()'), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('title'),
sa.UniqueConstraint('url')
)
op.create_table('tags',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('tag', sa.String(), nullable=False),
sa.PrimaryKeyConstraint('id', 'tag')
)
op.create_table('tags_joiner',
sa.Column('tag', sa.Integer(), nullable=False),
sa.Column('article', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['article'], ['articles.id'], ),
sa.ForeignKeyConstraint(['tag'], ['tags.id'], ),
sa.PrimaryKeyConstraint('tag', 'article')
)
op.drop_table('article')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('article',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('title', sa.VARCHAR(), autoincrement=False, nullable=True),
sa.Column('text', sa.VARCHAR(), autoincrement=False, nullable=True),
sa.Column('date', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id', name='article_pkey'),
sa.UniqueConstraint('title', name='article_title_key')
)
op.drop_table('tags_joiner')
op.drop_table('tags')
op.drop_table('articles')
# ### end Alembic commands ###
|
<commit_before><commit_msg>Add in the migration for the previous commit<commit_after>
|
"""Remade article to be better
Revision ID: d1a1a428aae0
Revises: 4e7b4c69111b
Create Date: 2018-10-23 22:18:11.271098
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = 'd1a1a428aae0'
down_revision = '4e7b4c69111b'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('articles',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(), nullable=True),
sa.Column('url', sa.String(), nullable=True),
sa.Column('type', sa.Enum('blog', 'workshop', name='type'), server_default='blog', nullable=False),
sa.Column('text', sa.String(), nullable=True),
sa.Column('creation_date', sa.DateTime(), server_default=sa.text('now()'), nullable=True),
sa.Column('update_date', sa.DateTime(), server_default=sa.text('now()'), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('title'),
sa.UniqueConstraint('url')
)
op.create_table('tags',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('tag', sa.String(), nullable=False),
sa.PrimaryKeyConstraint('id', 'tag')
)
op.create_table('tags_joiner',
sa.Column('tag', sa.Integer(), nullable=False),
sa.Column('article', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['article'], ['articles.id'], ),
sa.ForeignKeyConstraint(['tag'], ['tags.id'], ),
sa.PrimaryKeyConstraint('tag', 'article')
)
op.drop_table('article')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('article',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('title', sa.VARCHAR(), autoincrement=False, nullable=True),
sa.Column('text', sa.VARCHAR(), autoincrement=False, nullable=True),
sa.Column('date', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id', name='article_pkey'),
sa.UniqueConstraint('title', name='article_title_key')
)
op.drop_table('tags_joiner')
op.drop_table('tags')
op.drop_table('articles')
# ### end Alembic commands ###
|
Add in the migration for the previous commit"""Remade article to be better
Revision ID: d1a1a428aae0
Revises: 4e7b4c69111b
Create Date: 2018-10-23 22:18:11.271098
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = 'd1a1a428aae0'
down_revision = '4e7b4c69111b'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('articles',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(), nullable=True),
sa.Column('url', sa.String(), nullable=True),
sa.Column('type', sa.Enum('blog', 'workshop', name='type'), server_default='blog', nullable=False),
sa.Column('text', sa.String(), nullable=True),
sa.Column('creation_date', sa.DateTime(), server_default=sa.text('now()'), nullable=True),
sa.Column('update_date', sa.DateTime(), server_default=sa.text('now()'), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('title'),
sa.UniqueConstraint('url')
)
op.create_table('tags',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('tag', sa.String(), nullable=False),
sa.PrimaryKeyConstraint('id', 'tag')
)
op.create_table('tags_joiner',
sa.Column('tag', sa.Integer(), nullable=False),
sa.Column('article', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['article'], ['articles.id'], ),
sa.ForeignKeyConstraint(['tag'], ['tags.id'], ),
sa.PrimaryKeyConstraint('tag', 'article')
)
op.drop_table('article')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('article',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('title', sa.VARCHAR(), autoincrement=False, nullable=True),
sa.Column('text', sa.VARCHAR(), autoincrement=False, nullable=True),
sa.Column('date', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id', name='article_pkey'),
sa.UniqueConstraint('title', name='article_title_key')
)
op.drop_table('tags_joiner')
op.drop_table('tags')
op.drop_table('articles')
# ### end Alembic commands ###
|
<commit_before><commit_msg>Add in the migration for the previous commit<commit_after>"""Remade article to be better
Revision ID: d1a1a428aae0
Revises: 4e7b4c69111b
Create Date: 2018-10-23 22:18:11.271098
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = 'd1a1a428aae0'
down_revision = '4e7b4c69111b'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('articles',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(), nullable=True),
sa.Column('url', sa.String(), nullable=True),
sa.Column('type', sa.Enum('blog', 'workshop', name='type'), server_default='blog', nullable=False),
sa.Column('text', sa.String(), nullable=True),
sa.Column('creation_date', sa.DateTime(), server_default=sa.text('now()'), nullable=True),
sa.Column('update_date', sa.DateTime(), server_default=sa.text('now()'), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('title'),
sa.UniqueConstraint('url')
)
op.create_table('tags',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('tag', sa.String(), nullable=False),
sa.PrimaryKeyConstraint('id', 'tag')
)
op.create_table('tags_joiner',
sa.Column('tag', sa.Integer(), nullable=False),
sa.Column('article', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['article'], ['articles.id'], ),
sa.ForeignKeyConstraint(['tag'], ['tags.id'], ),
sa.PrimaryKeyConstraint('tag', 'article')
)
op.drop_table('article')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('article',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('title', sa.VARCHAR(), autoincrement=False, nullable=True),
sa.Column('text', sa.VARCHAR(), autoincrement=False, nullable=True),
sa.Column('date', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id', name='article_pkey'),
sa.UniqueConstraint('title', name='article_title_key')
)
op.drop_table('tags_joiner')
op.drop_table('tags')
op.drop_table('articles')
# ### end Alembic commands ###
|
|
e7f58ea998bb345796da94bfab6f1c9db50822a6
|
testing/test_urlslash.py
|
testing/test_urlslash.py
|
from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
import os
def test_check_vers_update():
launch = Launcher('',r'http://rlee287.github.io/pyautoupdate/testing/')
launch2 = Launcher('',r'http://rlee287.github.io/pyautoupdate/testing')
assert launch.url == launch2.url
|
Test url slash add behaivior
|
Test url slash add behaivior
|
Python
|
lgpl-2.1
|
rlee287/pyautoupdate,rlee287/pyautoupdate
|
Test url slash add behaivior
|
from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
import os
def test_check_vers_update():
launch = Launcher('',r'http://rlee287.github.io/pyautoupdate/testing/')
launch2 = Launcher('',r'http://rlee287.github.io/pyautoupdate/testing')
assert launch.url == launch2.url
|
<commit_before><commit_msg>Test url slash add behaivior<commit_after>
|
from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
import os
def test_check_vers_update():
launch = Launcher('',r'http://rlee287.github.io/pyautoupdate/testing/')
launch2 = Launcher('',r'http://rlee287.github.io/pyautoupdate/testing')
assert launch.url == launch2.url
|
Test url slash add behaiviorfrom __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
import os
def test_check_vers_update():
launch = Launcher('',r'http://rlee287.github.io/pyautoupdate/testing/')
launch2 = Launcher('',r'http://rlee287.github.io/pyautoupdate/testing')
assert launch.url == launch2.url
|
<commit_before><commit_msg>Test url slash add behaivior<commit_after>from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
import os
def test_check_vers_update():
launch = Launcher('',r'http://rlee287.github.io/pyautoupdate/testing/')
launch2 = Launcher('',r'http://rlee287.github.io/pyautoupdate/testing')
assert launch.url == launch2.url
|
|
2d88cb51291091fd65c218a1d7373828bac874ec
|
preparation/selection/match_amendments.py
|
preparation/selection/match_amendments.py
|
import os
__author__ = 'moskupols'
from hb_res.storage import list_storages, get_storage, FileExplanationStorage
from pprint import pprint
from diff_match_patch import diff_match_patch
CUR_DIR = os.path.dirname(os.path.abspath(__file__))
SEL_PATH = os.path.join(CUR_DIR, 'SelectedAfterMissedModifiers.asset')
all_expls = []
by_key = {}
by_text = {}
by_title = {}
for trunk in list_storages():
with get_storage(trunk) as stor:
for expl in stor.entries():
expl.trunk = trunk
all_expls.append(expl)
by_key[expl.key] = expl
by_text[expl.text] = expl
by_title.setdefault(expl.title, []).append(expl)
abused = set()
matched_by_key = 0
matched_by_text = 0
dmp = diff_match_patch()
fuzzies = []
with FileExplanationStorage(SEL_PATH) as inp:
for sel in inp.entries():
if sel.key in by_key:
abused.add(sel.key)
matched_by_key += 1
elif sel.text in by_text:
abused.add(by_text[sel.text].key)
matched_by_text += 1
else:
best = min(
(dmp.diff_levenshtein(dmp.diff_main(e.text, sel.text)), e)
for e in by_title[sel.title]
if e.key not in abused
)
fuzzies.append((best, sel))
print('matched by key', matched_by_key)
print('matched by text', matched_by_text)
fuzzies.sort()
pprint([(dist, b.title, b.text, sel.text) for (dist, b), sel in fuzzies])
|
Add script matching most of manual explanations
|
Add script matching most of manual explanations
with those in autogenerated assets
|
Python
|
mit
|
hatbot-team/hatbot_resources
|
Add script matching most of manual explanations
with those in autogenerated assets
|
import os
__author__ = 'moskupols'
from hb_res.storage import list_storages, get_storage, FileExplanationStorage
from pprint import pprint
from diff_match_patch import diff_match_patch
CUR_DIR = os.path.dirname(os.path.abspath(__file__))
SEL_PATH = os.path.join(CUR_DIR, 'SelectedAfterMissedModifiers.asset')
all_expls = []
by_key = {}
by_text = {}
by_title = {}
for trunk in list_storages():
with get_storage(trunk) as stor:
for expl in stor.entries():
expl.trunk = trunk
all_expls.append(expl)
by_key[expl.key] = expl
by_text[expl.text] = expl
by_title.setdefault(expl.title, []).append(expl)
abused = set()
matched_by_key = 0
matched_by_text = 0
dmp = diff_match_patch()
fuzzies = []
with FileExplanationStorage(SEL_PATH) as inp:
for sel in inp.entries():
if sel.key in by_key:
abused.add(sel.key)
matched_by_key += 1
elif sel.text in by_text:
abused.add(by_text[sel.text].key)
matched_by_text += 1
else:
best = min(
(dmp.diff_levenshtein(dmp.diff_main(e.text, sel.text)), e)
for e in by_title[sel.title]
if e.key not in abused
)
fuzzies.append((best, sel))
print('matched by key', matched_by_key)
print('matched by text', matched_by_text)
fuzzies.sort()
pprint([(dist, b.title, b.text, sel.text) for (dist, b), sel in fuzzies])
|
<commit_before><commit_msg>Add script matching most of manual explanations
with those in autogenerated assets<commit_after>
|
import os
__author__ = 'moskupols'
from hb_res.storage import list_storages, get_storage, FileExplanationStorage
from pprint import pprint
from diff_match_patch import diff_match_patch
CUR_DIR = os.path.dirname(os.path.abspath(__file__))
SEL_PATH = os.path.join(CUR_DIR, 'SelectedAfterMissedModifiers.asset')
all_expls = []
by_key = {}
by_text = {}
by_title = {}
for trunk in list_storages():
with get_storage(trunk) as stor:
for expl in stor.entries():
expl.trunk = trunk
all_expls.append(expl)
by_key[expl.key] = expl
by_text[expl.text] = expl
by_title.setdefault(expl.title, []).append(expl)
abused = set()
matched_by_key = 0
matched_by_text = 0
dmp = diff_match_patch()
fuzzies = []
with FileExplanationStorage(SEL_PATH) as inp:
for sel in inp.entries():
if sel.key in by_key:
abused.add(sel.key)
matched_by_key += 1
elif sel.text in by_text:
abused.add(by_text[sel.text].key)
matched_by_text += 1
else:
best = min(
(dmp.diff_levenshtein(dmp.diff_main(e.text, sel.text)), e)
for e in by_title[sel.title]
if e.key not in abused
)
fuzzies.append((best, sel))
print('matched by key', matched_by_key)
print('matched by text', matched_by_text)
fuzzies.sort()
pprint([(dist, b.title, b.text, sel.text) for (dist, b), sel in fuzzies])
|
Add script matching most of manual explanations
with those in autogenerated assetsimport os
__author__ = 'moskupols'
from hb_res.storage import list_storages, get_storage, FileExplanationStorage
from pprint import pprint
from diff_match_patch import diff_match_patch
CUR_DIR = os.path.dirname(os.path.abspath(__file__))
SEL_PATH = os.path.join(CUR_DIR, 'SelectedAfterMissedModifiers.asset')
all_expls = []
by_key = {}
by_text = {}
by_title = {}
for trunk in list_storages():
with get_storage(trunk) as stor:
for expl in stor.entries():
expl.trunk = trunk
all_expls.append(expl)
by_key[expl.key] = expl
by_text[expl.text] = expl
by_title.setdefault(expl.title, []).append(expl)
abused = set()
matched_by_key = 0
matched_by_text = 0
dmp = diff_match_patch()
fuzzies = []
with FileExplanationStorage(SEL_PATH) as inp:
for sel in inp.entries():
if sel.key in by_key:
abused.add(sel.key)
matched_by_key += 1
elif sel.text in by_text:
abused.add(by_text[sel.text].key)
matched_by_text += 1
else:
best = min(
(dmp.diff_levenshtein(dmp.diff_main(e.text, sel.text)), e)
for e in by_title[sel.title]
if e.key not in abused
)
fuzzies.append((best, sel))
print('matched by key', matched_by_key)
print('matched by text', matched_by_text)
fuzzies.sort()
pprint([(dist, b.title, b.text, sel.text) for (dist, b), sel in fuzzies])
|
<commit_before><commit_msg>Add script matching most of manual explanations
with those in autogenerated assets<commit_after>import os
__author__ = 'moskupols'
from hb_res.storage import list_storages, get_storage, FileExplanationStorage
from pprint import pprint
from diff_match_patch import diff_match_patch
CUR_DIR = os.path.dirname(os.path.abspath(__file__))
SEL_PATH = os.path.join(CUR_DIR, 'SelectedAfterMissedModifiers.asset')
all_expls = []
by_key = {}
by_text = {}
by_title = {}
for trunk in list_storages():
with get_storage(trunk) as stor:
for expl in stor.entries():
expl.trunk = trunk
all_expls.append(expl)
by_key[expl.key] = expl
by_text[expl.text] = expl
by_title.setdefault(expl.title, []).append(expl)
abused = set()
matched_by_key = 0
matched_by_text = 0
dmp = diff_match_patch()
fuzzies = []
with FileExplanationStorage(SEL_PATH) as inp:
for sel in inp.entries():
if sel.key in by_key:
abused.add(sel.key)
matched_by_key += 1
elif sel.text in by_text:
abused.add(by_text[sel.text].key)
matched_by_text += 1
else:
best = min(
(dmp.diff_levenshtein(dmp.diff_main(e.text, sel.text)), e)
for e in by_title[sel.title]
if e.key not in abused
)
fuzzies.append((best, sel))
print('matched by key', matched_by_key)
print('matched by text', matched_by_text)
fuzzies.sort()
pprint([(dist, b.title, b.text, sel.text) for (dist, b), sel in fuzzies])
|
|
d52920d1efee179271bdf6497b384110c404fa97
|
aleph/tests/test_search_query.py
|
aleph/tests/test_search_query.py
|
from unittest import TestCase
from aleph.search.parser import SearchQueryParser
from aleph.search.query import Query
def query(args):
return Query(SearchQueryParser(args, None))
class QueryTestCase(TestCase):
def setUp(self):
# Allow list elements to be in any order
self.addTypeEqualityFunc(list, self.assertItemsEqual)
def test_no_text(self):
q = query([])
self.assertEqual(q.get_text_query(), {'match_all': {}})
def test_has_text(self):
q = query([('q', 'search text')])
text_q = q.get_text_query()
self.assertEqual(text_q['simple_query_string']['query'], 'search text')
def test_id_filter(self):
q = query([
('filter:id', '5'),
('filter:id', '8'),
('filter:id', '2'),
('filter:_id', '3')
])
filters = q.get_filters()
self.assertEqual(len(filters), 1)
self.assertEqual(filters[0].keys(), ['ids'])
self.assertEqual(filters[0]['ids']['values'], ['8', '5', '2', '3'])
def test_filters(self):
q = query([
('filter:key1', 'foo'),
('filter:key1', 'bar'),
('filter:key2', 'blah'),
('filter:key2', 'blahblah')
])
filters = q.get_filters()
self.assertEqual(len(filters), 2)
self.assertEqual(filters[0].keys(), ['terms'])
self.assertEqual(filters[1].keys(), ['terms'])
# Extract filters without assuming order
filter_key1 = filter(lambda f: ['key1'] == f['terms'].keys(), filters)[0]['terms']['key1']
filter_key2 = filter(lambda f: ['key2'] == f['terms'].keys(), filters)[0]['terms']['key2']
self.assertEquals(filter_key1, ['foo', 'bar'])
self.assertEquals(filter_key2, ['blah', 'blahblah'])
def test_offset(self):
q = query([('offset', 10), ('limit', 100)])
body = q.get_body()
self.assertEqual(body['from'], 10)
self.assertEqual(body['size'], 100)
|
Add tests for Query class
|
Add tests for Query class
|
Python
|
mit
|
alephdata/aleph,alephdata/aleph,pudo/aleph,alephdata/aleph,pudo/aleph,alephdata/aleph,alephdata/aleph,pudo/aleph
|
Add tests for Query class
|
from unittest import TestCase
from aleph.search.parser import SearchQueryParser
from aleph.search.query import Query
def query(args):
return Query(SearchQueryParser(args, None))
class QueryTestCase(TestCase):
def setUp(self):
# Allow list elements to be in any order
self.addTypeEqualityFunc(list, self.assertItemsEqual)
def test_no_text(self):
q = query([])
self.assertEqual(q.get_text_query(), {'match_all': {}})
def test_has_text(self):
q = query([('q', 'search text')])
text_q = q.get_text_query()
self.assertEqual(text_q['simple_query_string']['query'], 'search text')
def test_id_filter(self):
q = query([
('filter:id', '5'),
('filter:id', '8'),
('filter:id', '2'),
('filter:_id', '3')
])
filters = q.get_filters()
self.assertEqual(len(filters), 1)
self.assertEqual(filters[0].keys(), ['ids'])
self.assertEqual(filters[0]['ids']['values'], ['8', '5', '2', '3'])
def test_filters(self):
q = query([
('filter:key1', 'foo'),
('filter:key1', 'bar'),
('filter:key2', 'blah'),
('filter:key2', 'blahblah')
])
filters = q.get_filters()
self.assertEqual(len(filters), 2)
self.assertEqual(filters[0].keys(), ['terms'])
self.assertEqual(filters[1].keys(), ['terms'])
# Extract filters without assuming order
filter_key1 = filter(lambda f: ['key1'] == f['terms'].keys(), filters)[0]['terms']['key1']
filter_key2 = filter(lambda f: ['key2'] == f['terms'].keys(), filters)[0]['terms']['key2']
self.assertEquals(filter_key1, ['foo', 'bar'])
self.assertEquals(filter_key2, ['blah', 'blahblah'])
def test_offset(self):
q = query([('offset', 10), ('limit', 100)])
body = q.get_body()
self.assertEqual(body['from'], 10)
self.assertEqual(body['size'], 100)
|
<commit_before><commit_msg>Add tests for Query class<commit_after>
|
from unittest import TestCase
from aleph.search.parser import SearchQueryParser
from aleph.search.query import Query
def query(args):
return Query(SearchQueryParser(args, None))
class QueryTestCase(TestCase):
def setUp(self):
# Allow list elements to be in any order
self.addTypeEqualityFunc(list, self.assertItemsEqual)
def test_no_text(self):
q = query([])
self.assertEqual(q.get_text_query(), {'match_all': {}})
def test_has_text(self):
q = query([('q', 'search text')])
text_q = q.get_text_query()
self.assertEqual(text_q['simple_query_string']['query'], 'search text')
def test_id_filter(self):
q = query([
('filter:id', '5'),
('filter:id', '8'),
('filter:id', '2'),
('filter:_id', '3')
])
filters = q.get_filters()
self.assertEqual(len(filters), 1)
self.assertEqual(filters[0].keys(), ['ids'])
self.assertEqual(filters[0]['ids']['values'], ['8', '5', '2', '3'])
def test_filters(self):
q = query([
('filter:key1', 'foo'),
('filter:key1', 'bar'),
('filter:key2', 'blah'),
('filter:key2', 'blahblah')
])
filters = q.get_filters()
self.assertEqual(len(filters), 2)
self.assertEqual(filters[0].keys(), ['terms'])
self.assertEqual(filters[1].keys(), ['terms'])
# Extract filters without assuming order
filter_key1 = filter(lambda f: ['key1'] == f['terms'].keys(), filters)[0]['terms']['key1']
filter_key2 = filter(lambda f: ['key2'] == f['terms'].keys(), filters)[0]['terms']['key2']
self.assertEquals(filter_key1, ['foo', 'bar'])
self.assertEquals(filter_key2, ['blah', 'blahblah'])
def test_offset(self):
q = query([('offset', 10), ('limit', 100)])
body = q.get_body()
self.assertEqual(body['from'], 10)
self.assertEqual(body['size'], 100)
|
Add tests for Query classfrom unittest import TestCase
from aleph.search.parser import SearchQueryParser
from aleph.search.query import Query
def query(args):
return Query(SearchQueryParser(args, None))
class QueryTestCase(TestCase):
def setUp(self):
# Allow list elements to be in any order
self.addTypeEqualityFunc(list, self.assertItemsEqual)
def test_no_text(self):
q = query([])
self.assertEqual(q.get_text_query(), {'match_all': {}})
def test_has_text(self):
q = query([('q', 'search text')])
text_q = q.get_text_query()
self.assertEqual(text_q['simple_query_string']['query'], 'search text')
def test_id_filter(self):
q = query([
('filter:id', '5'),
('filter:id', '8'),
('filter:id', '2'),
('filter:_id', '3')
])
filters = q.get_filters()
self.assertEqual(len(filters), 1)
self.assertEqual(filters[0].keys(), ['ids'])
self.assertEqual(filters[0]['ids']['values'], ['8', '5', '2', '3'])
def test_filters(self):
q = query([
('filter:key1', 'foo'),
('filter:key1', 'bar'),
('filter:key2', 'blah'),
('filter:key2', 'blahblah')
])
filters = q.get_filters()
self.assertEqual(len(filters), 2)
self.assertEqual(filters[0].keys(), ['terms'])
self.assertEqual(filters[1].keys(), ['terms'])
# Extract filters without assuming order
filter_key1 = filter(lambda f: ['key1'] == f['terms'].keys(), filters)[0]['terms']['key1']
filter_key2 = filter(lambda f: ['key2'] == f['terms'].keys(), filters)[0]['terms']['key2']
self.assertEquals(filter_key1, ['foo', 'bar'])
self.assertEquals(filter_key2, ['blah', 'blahblah'])
def test_offset(self):
q = query([('offset', 10), ('limit', 100)])
body = q.get_body()
self.assertEqual(body['from'], 10)
self.assertEqual(body['size'], 100)
|
<commit_before><commit_msg>Add tests for Query class<commit_after>from unittest import TestCase
from aleph.search.parser import SearchQueryParser
from aleph.search.query import Query
def query(args):
return Query(SearchQueryParser(args, None))
class QueryTestCase(TestCase):
def setUp(self):
# Allow list elements to be in any order
self.addTypeEqualityFunc(list, self.assertItemsEqual)
def test_no_text(self):
q = query([])
self.assertEqual(q.get_text_query(), {'match_all': {}})
def test_has_text(self):
q = query([('q', 'search text')])
text_q = q.get_text_query()
self.assertEqual(text_q['simple_query_string']['query'], 'search text')
def test_id_filter(self):
q = query([
('filter:id', '5'),
('filter:id', '8'),
('filter:id', '2'),
('filter:_id', '3')
])
filters = q.get_filters()
self.assertEqual(len(filters), 1)
self.assertEqual(filters[0].keys(), ['ids'])
self.assertEqual(filters[0]['ids']['values'], ['8', '5', '2', '3'])
def test_filters(self):
q = query([
('filter:key1', 'foo'),
('filter:key1', 'bar'),
('filter:key2', 'blah'),
('filter:key2', 'blahblah')
])
filters = q.get_filters()
self.assertEqual(len(filters), 2)
self.assertEqual(filters[0].keys(), ['terms'])
self.assertEqual(filters[1].keys(), ['terms'])
# Extract filters without assuming order
filter_key1 = filter(lambda f: ['key1'] == f['terms'].keys(), filters)[0]['terms']['key1']
filter_key2 = filter(lambda f: ['key2'] == f['terms'].keys(), filters)[0]['terms']['key2']
self.assertEquals(filter_key1, ['foo', 'bar'])
self.assertEquals(filter_key2, ['blah', 'blahblah'])
def test_offset(self):
q = query([('offset', 10), ('limit', 100)])
body = q.get_body()
self.assertEqual(body['from'], 10)
self.assertEqual(body['size'], 100)
|
|
4e293daad5af96ee16d93058dda45bae733be9ad
|
nanpy/examples/lcd_autoscroll.py
|
nanpy/examples/lcd_autoscroll.py
|
#!/usr/bin/env python
import time
from nanpy import SerialManager
from nanpy.lcd import Lcd
if __name__ == '__main__':
connection = SerialManager(sleep_after_connect=2)
cols, rows = 16, 2
pins = [7, 8, 9, 10, 11, 12]
lcd = Lcd(pins, [cols, rows], connection=connection)
while True:
lcd.setCursor(0, 0);
for char in range(10):
lcd.printString(char)
time.sleep(0.5)
lcd.setCursor(16, 1)
lcd.autoscroll()
for char in range(10):
lcd.printString(char)
time.sleep(0.5)
lcd.noAutoscroll()
lcd.clear()
|
Add example for lcd autoscroll
|
Add example for lcd autoscroll
|
Python
|
mit
|
joppi/nanpy,nanpy/nanpy
|
Add example for lcd autoscroll
|
#!/usr/bin/env python
import time
from nanpy import SerialManager
from nanpy.lcd import Lcd
if __name__ == '__main__':
connection = SerialManager(sleep_after_connect=2)
cols, rows = 16, 2
pins = [7, 8, 9, 10, 11, 12]
lcd = Lcd(pins, [cols, rows], connection=connection)
while True:
lcd.setCursor(0, 0);
for char in range(10):
lcd.printString(char)
time.sleep(0.5)
lcd.setCursor(16, 1)
lcd.autoscroll()
for char in range(10):
lcd.printString(char)
time.sleep(0.5)
lcd.noAutoscroll()
lcd.clear()
|
<commit_before><commit_msg>Add example for lcd autoscroll<commit_after>
|
#!/usr/bin/env python
import time
from nanpy import SerialManager
from nanpy.lcd import Lcd
if __name__ == '__main__':
connection = SerialManager(sleep_after_connect=2)
cols, rows = 16, 2
pins = [7, 8, 9, 10, 11, 12]
lcd = Lcd(pins, [cols, rows], connection=connection)
while True:
lcd.setCursor(0, 0);
for char in range(10):
lcd.printString(char)
time.sleep(0.5)
lcd.setCursor(16, 1)
lcd.autoscroll()
for char in range(10):
lcd.printString(char)
time.sleep(0.5)
lcd.noAutoscroll()
lcd.clear()
|
Add example for lcd autoscroll#!/usr/bin/env python
import time
from nanpy import SerialManager
from nanpy.lcd import Lcd
if __name__ == '__main__':
connection = SerialManager(sleep_after_connect=2)
cols, rows = 16, 2
pins = [7, 8, 9, 10, 11, 12]
lcd = Lcd(pins, [cols, rows], connection=connection)
while True:
lcd.setCursor(0, 0);
for char in range(10):
lcd.printString(char)
time.sleep(0.5)
lcd.setCursor(16, 1)
lcd.autoscroll()
for char in range(10):
lcd.printString(char)
time.sleep(0.5)
lcd.noAutoscroll()
lcd.clear()
|
<commit_before><commit_msg>Add example for lcd autoscroll<commit_after>#!/usr/bin/env python
import time
from nanpy import SerialManager
from nanpy.lcd import Lcd
if __name__ == '__main__':
connection = SerialManager(sleep_after_connect=2)
cols, rows = 16, 2
pins = [7, 8, 9, 10, 11, 12]
lcd = Lcd(pins, [cols, rows], connection=connection)
while True:
lcd.setCursor(0, 0);
for char in range(10):
lcd.printString(char)
time.sleep(0.5)
lcd.setCursor(16, 1)
lcd.autoscroll()
for char in range(10):
lcd.printString(char)
time.sleep(0.5)
lcd.noAutoscroll()
lcd.clear()
|
|
d6805fc64245e667d83141aeea6b6c2e017e8f3c
|
two-sums/two-sum-2.py
|
two-sums/two-sum-2.py
|
class Solution(object):
def twoSum(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
"""
length = len(nums)
map = {}
for i in range(length):
map[nums[i]] = i
i = i + 1
print map
for i in range(length):
complement = target - nums[i]
# If a list contains two same integers and the target is happen to
# be the sum of two same integers
# for exmaple, [2, 5, 5, 9], target is 10
# map is {2:0, 5:1, 5:2, 9:3}
# When i is 1, complement is 10 - 5 which is 5
# and 5 is definitely in the list
if map.has_key(complement) and map[complement] is not i:
return [i, map[complement]]
i = i + 1
|
Use maps, which gives up the space but get less execution time
|
Use maps, which gives up the space but get less execution time
|
Python
|
mit
|
tonylixu/leetcode
|
Use maps, which gives up the space but get less execution time
|
class Solution(object):
def twoSum(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
"""
length = len(nums)
map = {}
for i in range(length):
map[nums[i]] = i
i = i + 1
print map
for i in range(length):
complement = target - nums[i]
# If a list contains two same integers and the target is happen to
# be the sum of two same integers
# for exmaple, [2, 5, 5, 9], target is 10
# map is {2:0, 5:1, 5:2, 9:3}
# When i is 1, complement is 10 - 5 which is 5
# and 5 is definitely in the list
if map.has_key(complement) and map[complement] is not i:
return [i, map[complement]]
i = i + 1
|
<commit_before><commit_msg>Use maps, which gives up the space but get less execution time<commit_after>
|
class Solution(object):
def twoSum(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
"""
length = len(nums)
map = {}
for i in range(length):
map[nums[i]] = i
i = i + 1
print map
for i in range(length):
complement = target - nums[i]
# If a list contains two same integers and the target is happen to
# be the sum of two same integers
# for exmaple, [2, 5, 5, 9], target is 10
# map is {2:0, 5:1, 5:2, 9:3}
# When i is 1, complement is 10 - 5 which is 5
# and 5 is definitely in the list
if map.has_key(complement) and map[complement] is not i:
return [i, map[complement]]
i = i + 1
|
Use maps, which gives up the space but get less execution timeclass Solution(object):
def twoSum(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
"""
length = len(nums)
map = {}
for i in range(length):
map[nums[i]] = i
i = i + 1
print map
for i in range(length):
complement = target - nums[i]
# If a list contains two same integers and the target is happen to
# be the sum of two same integers
# for exmaple, [2, 5, 5, 9], target is 10
# map is {2:0, 5:1, 5:2, 9:3}
# When i is 1, complement is 10 - 5 which is 5
# and 5 is definitely in the list
if map.has_key(complement) and map[complement] is not i:
return [i, map[complement]]
i = i + 1
|
<commit_before><commit_msg>Use maps, which gives up the space but get less execution time<commit_after>class Solution(object):
def twoSum(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
"""
length = len(nums)
map = {}
for i in range(length):
map[nums[i]] = i
i = i + 1
print map
for i in range(length):
complement = target - nums[i]
# If a list contains two same integers and the target is happen to
# be the sum of two same integers
# for exmaple, [2, 5, 5, 9], target is 10
# map is {2:0, 5:1, 5:2, 9:3}
# When i is 1, complement is 10 - 5 which is 5
# and 5 is definitely in the list
if map.has_key(complement) and map[complement] is not i:
return [i, map[complement]]
i = i + 1
|
|
af2c75a4a99f93e3cae1a1d2d0485a88cce833b7
|
features/environment.py
|
features/environment.py
|
import os
import os.path as pt
from scripttest import TestFileEnvironment
def before_scenario(context, _):
root_dir = pt.abspath(pt.join(pt.dirname(__file__), '..'))
path = ":" + pt.join(root_dir, 'bin')
tmp = pt.join(root_dir, "tmp")
python_path = pt.join(root_dir, 'vendor', 'python', 'lib', 'python2.7', 'site-packages')
os.environ['PATH'] = path + ":" + os.environ['PATH']
os.environ['PYTHONPATH'] = python_path
os.environ['TMPDIR'] = tmp # Required to work with boot2docker
context.env = TestFileEnvironment(base_path = tmp)
|
import os
import os.path as pt
from scripttest import TestFileEnvironment
def before_scenario(context, _):
root_dir = pt.abspath(pt.join(pt.dirname(__file__), '..'))
path = ":" + pt.join(root_dir, 'bin')
tmp = pt.join(root_dir, "tmp", "feature")
python_path = pt.join(root_dir, 'vendor', 'python', 'lib', 'python2.7', 'site-packages')
os.environ['PATH'] = path + ":" + os.environ['PATH']
os.environ['PYTHONPATH'] = python_path
context.env = TestFileEnvironment(base_path = tmp)
|
Update location of feature tests are run in
|
Update location of feature tests are run in
|
Python
|
mit
|
michaelbarton/command-line-interface,bioboxes/command-line-interface,bioboxes/command-line-interface,michaelbarton/command-line-interface
|
import os
import os.path as pt
from scripttest import TestFileEnvironment
def before_scenario(context, _):
root_dir = pt.abspath(pt.join(pt.dirname(__file__), '..'))
path = ":" + pt.join(root_dir, 'bin')
tmp = pt.join(root_dir, "tmp")
python_path = pt.join(root_dir, 'vendor', 'python', 'lib', 'python2.7', 'site-packages')
os.environ['PATH'] = path + ":" + os.environ['PATH']
os.environ['PYTHONPATH'] = python_path
os.environ['TMPDIR'] = tmp # Required to work with boot2docker
context.env = TestFileEnvironment(base_path = tmp)
Update location of feature tests are run in
|
import os
import os.path as pt
from scripttest import TestFileEnvironment
def before_scenario(context, _):
root_dir = pt.abspath(pt.join(pt.dirname(__file__), '..'))
path = ":" + pt.join(root_dir, 'bin')
tmp = pt.join(root_dir, "tmp", "feature")
python_path = pt.join(root_dir, 'vendor', 'python', 'lib', 'python2.7', 'site-packages')
os.environ['PATH'] = path + ":" + os.environ['PATH']
os.environ['PYTHONPATH'] = python_path
context.env = TestFileEnvironment(base_path = tmp)
|
<commit_before>import os
import os.path as pt
from scripttest import TestFileEnvironment
def before_scenario(context, _):
root_dir = pt.abspath(pt.join(pt.dirname(__file__), '..'))
path = ":" + pt.join(root_dir, 'bin')
tmp = pt.join(root_dir, "tmp")
python_path = pt.join(root_dir, 'vendor', 'python', 'lib', 'python2.7', 'site-packages')
os.environ['PATH'] = path + ":" + os.environ['PATH']
os.environ['PYTHONPATH'] = python_path
os.environ['TMPDIR'] = tmp # Required to work with boot2docker
context.env = TestFileEnvironment(base_path = tmp)
<commit_msg>Update location of feature tests are run in<commit_after>
|
import os
import os.path as pt
from scripttest import TestFileEnvironment
def before_scenario(context, _):
root_dir = pt.abspath(pt.join(pt.dirname(__file__), '..'))
path = ":" + pt.join(root_dir, 'bin')
tmp = pt.join(root_dir, "tmp", "feature")
python_path = pt.join(root_dir, 'vendor', 'python', 'lib', 'python2.7', 'site-packages')
os.environ['PATH'] = path + ":" + os.environ['PATH']
os.environ['PYTHONPATH'] = python_path
context.env = TestFileEnvironment(base_path = tmp)
|
import os
import os.path as pt
from scripttest import TestFileEnvironment
def before_scenario(context, _):
root_dir = pt.abspath(pt.join(pt.dirname(__file__), '..'))
path = ":" + pt.join(root_dir, 'bin')
tmp = pt.join(root_dir, "tmp")
python_path = pt.join(root_dir, 'vendor', 'python', 'lib', 'python2.7', 'site-packages')
os.environ['PATH'] = path + ":" + os.environ['PATH']
os.environ['PYTHONPATH'] = python_path
os.environ['TMPDIR'] = tmp # Required to work with boot2docker
context.env = TestFileEnvironment(base_path = tmp)
Update location of feature tests are run inimport os
import os.path as pt
from scripttest import TestFileEnvironment
def before_scenario(context, _):
root_dir = pt.abspath(pt.join(pt.dirname(__file__), '..'))
path = ":" + pt.join(root_dir, 'bin')
tmp = pt.join(root_dir, "tmp", "feature")
python_path = pt.join(root_dir, 'vendor', 'python', 'lib', 'python2.7', 'site-packages')
os.environ['PATH'] = path + ":" + os.environ['PATH']
os.environ['PYTHONPATH'] = python_path
context.env = TestFileEnvironment(base_path = tmp)
|
<commit_before>import os
import os.path as pt
from scripttest import TestFileEnvironment
def before_scenario(context, _):
root_dir = pt.abspath(pt.join(pt.dirname(__file__), '..'))
path = ":" + pt.join(root_dir, 'bin')
tmp = pt.join(root_dir, "tmp")
python_path = pt.join(root_dir, 'vendor', 'python', 'lib', 'python2.7', 'site-packages')
os.environ['PATH'] = path + ":" + os.environ['PATH']
os.environ['PYTHONPATH'] = python_path
os.environ['TMPDIR'] = tmp # Required to work with boot2docker
context.env = TestFileEnvironment(base_path = tmp)
<commit_msg>Update location of feature tests are run in<commit_after>import os
import os.path as pt
from scripttest import TestFileEnvironment
def before_scenario(context, _):
root_dir = pt.abspath(pt.join(pt.dirname(__file__), '..'))
path = ":" + pt.join(root_dir, 'bin')
tmp = pt.join(root_dir, "tmp", "feature")
python_path = pt.join(root_dir, 'vendor', 'python', 'lib', 'python2.7', 'site-packages')
os.environ['PATH'] = path + ":" + os.environ['PATH']
os.environ['PYTHONPATH'] = python_path
context.env = TestFileEnvironment(base_path = tmp)
|
5c2f862390a269ea7c245de269a4359d574ee5ca
|
nose2/tests/unit/test_functions_loader.py
|
nose2/tests/unit/test_functions_loader.py
|
from nose2.compat import unittest
from nose2 import events, loader, session
from nose2.plugins.loader import functions
from nose2.tests._common import TestCase
class TestFunctionLoader(TestCase):
def setUp(self):
self.session = session.Session()
self.loader = loader.PluggableTestLoader(self.session)
self.plugin = functions.Functions(session=self.session)
def test_can_load_test_functions_from_module(self):
class Mod(object):
pass
def test():
pass
m = Mod()
m.test = test
event = events.LoadFromModuleEvent(self.loader, m)
self.session.hooks.loadTestsFromModule(event)
self.assertEqual(len(event.extraTests), 1)
assert isinstance(event.extraTests[0], unittest.FunctionTestCase)
def test_ignores_generator_functions(self):
class Mod(object):
pass
def test():
yield
m = Mod()
m.test = test
event = events.LoadFromModuleEvent(self.loader, m)
self.session.hooks.loadTestsFromModule(event)
self.assertEqual(len(event.extraTests), 0)
|
Add unit tests for function loader plugin
|
Add unit tests for function loader plugin
|
Python
|
bsd-2-clause
|
little-dude/nose2,leth/nose2,ojengwa/nose2,ezigman/nose2,leth/nose2,ojengwa/nose2,ptthiem/nose2,ezigman/nose2,ptthiem/nose2,little-dude/nose2
|
Add unit tests for function loader plugin
|
from nose2.compat import unittest
from nose2 import events, loader, session
from nose2.plugins.loader import functions
from nose2.tests._common import TestCase
class TestFunctionLoader(TestCase):
def setUp(self):
self.session = session.Session()
self.loader = loader.PluggableTestLoader(self.session)
self.plugin = functions.Functions(session=self.session)
def test_can_load_test_functions_from_module(self):
class Mod(object):
pass
def test():
pass
m = Mod()
m.test = test
event = events.LoadFromModuleEvent(self.loader, m)
self.session.hooks.loadTestsFromModule(event)
self.assertEqual(len(event.extraTests), 1)
assert isinstance(event.extraTests[0], unittest.FunctionTestCase)
def test_ignores_generator_functions(self):
class Mod(object):
pass
def test():
yield
m = Mod()
m.test = test
event = events.LoadFromModuleEvent(self.loader, m)
self.session.hooks.loadTestsFromModule(event)
self.assertEqual(len(event.extraTests), 0)
|
<commit_before><commit_msg>Add unit tests for function loader plugin<commit_after>
|
from nose2.compat import unittest
from nose2 import events, loader, session
from nose2.plugins.loader import functions
from nose2.tests._common import TestCase
class TestFunctionLoader(TestCase):
def setUp(self):
self.session = session.Session()
self.loader = loader.PluggableTestLoader(self.session)
self.plugin = functions.Functions(session=self.session)
def test_can_load_test_functions_from_module(self):
class Mod(object):
pass
def test():
pass
m = Mod()
m.test = test
event = events.LoadFromModuleEvent(self.loader, m)
self.session.hooks.loadTestsFromModule(event)
self.assertEqual(len(event.extraTests), 1)
assert isinstance(event.extraTests[0], unittest.FunctionTestCase)
def test_ignores_generator_functions(self):
class Mod(object):
pass
def test():
yield
m = Mod()
m.test = test
event = events.LoadFromModuleEvent(self.loader, m)
self.session.hooks.loadTestsFromModule(event)
self.assertEqual(len(event.extraTests), 0)
|
Add unit tests for function loader pluginfrom nose2.compat import unittest
from nose2 import events, loader, session
from nose2.plugins.loader import functions
from nose2.tests._common import TestCase
class TestFunctionLoader(TestCase):
def setUp(self):
self.session = session.Session()
self.loader = loader.PluggableTestLoader(self.session)
self.plugin = functions.Functions(session=self.session)
def test_can_load_test_functions_from_module(self):
class Mod(object):
pass
def test():
pass
m = Mod()
m.test = test
event = events.LoadFromModuleEvent(self.loader, m)
self.session.hooks.loadTestsFromModule(event)
self.assertEqual(len(event.extraTests), 1)
assert isinstance(event.extraTests[0], unittest.FunctionTestCase)
def test_ignores_generator_functions(self):
class Mod(object):
pass
def test():
yield
m = Mod()
m.test = test
event = events.LoadFromModuleEvent(self.loader, m)
self.session.hooks.loadTestsFromModule(event)
self.assertEqual(len(event.extraTests), 0)
|
<commit_before><commit_msg>Add unit tests for function loader plugin<commit_after>from nose2.compat import unittest
from nose2 import events, loader, session
from nose2.plugins.loader import functions
from nose2.tests._common import TestCase
class TestFunctionLoader(TestCase):
def setUp(self):
self.session = session.Session()
self.loader = loader.PluggableTestLoader(self.session)
self.plugin = functions.Functions(session=self.session)
def test_can_load_test_functions_from_module(self):
class Mod(object):
pass
def test():
pass
m = Mod()
m.test = test
event = events.LoadFromModuleEvent(self.loader, m)
self.session.hooks.loadTestsFromModule(event)
self.assertEqual(len(event.extraTests), 1)
assert isinstance(event.extraTests[0], unittest.FunctionTestCase)
def test_ignores_generator_functions(self):
class Mod(object):
pass
def test():
yield
m = Mod()
m.test = test
event = events.LoadFromModuleEvent(self.loader, m)
self.session.hooks.loadTestsFromModule(event)
self.assertEqual(len(event.extraTests), 0)
|
|
37650e6d9c792006ce86317e8b5b7945d4b4aa42
|
mv-annotated-to-main.py
|
mv-annotated-to-main.py
|
#!/usr/bin/env python
import argparse
import os
import os.path
import re
REGEX = re.compile(r'^(.*)_\d{4}-\d{1,2}-\d{1,2}_\d{1,2}-\d{1,2}-\d{1,2}.pdf$')
def process(filename):
match = REGEX.match(filename)
if match is not None:
new_filename = match.group(1) + '.pdf'
os.rename(filename, new_filename)
print '{} -> {}'.format(filename, new_filename)
if __name__ == '__main__':
PARSER = argparse.ArgumentParser(
description='Removes the date suffix from the PDF files with '
'merged annotations from the Onyx Boox M92 ebook reader.')
PARSER.add_argument(
'-r', help='Descend recursively into directories.',
action='store_true', default=False)
PARSER.add_argument('filenames', nargs='*', help='Files to process.')
ARGS = PARSER.parse_args()
for name in ARGS.filenames:
if os.path.isdir(name):
for dirpath, dirnames, sub_filenames in os.walk(name):
for sub_filename in sub_filenames:
process(os.path.join(dirpath, sub_filenames))
else:
process(name)
|
Add rename script for PDFs merged with annotations
|
Add rename script for PDFs merged with annotations
|
Python
|
mit
|
jgosmann/boox-tools
|
Add rename script for PDFs merged with annotations
|
#!/usr/bin/env python
import argparse
import os
import os.path
import re
REGEX = re.compile(r'^(.*)_\d{4}-\d{1,2}-\d{1,2}_\d{1,2}-\d{1,2}-\d{1,2}.pdf$')
def process(filename):
match = REGEX.match(filename)
if match is not None:
new_filename = match.group(1) + '.pdf'
os.rename(filename, new_filename)
print '{} -> {}'.format(filename, new_filename)
if __name__ == '__main__':
PARSER = argparse.ArgumentParser(
description='Removes the date suffix from the PDF files with '
'merged annotations from the Onyx Boox M92 ebook reader.')
PARSER.add_argument(
'-r', help='Descend recursively into directories.',
action='store_true', default=False)
PARSER.add_argument('filenames', nargs='*', help='Files to process.')
ARGS = PARSER.parse_args()
for name in ARGS.filenames:
if os.path.isdir(name):
for dirpath, dirnames, sub_filenames in os.walk(name):
for sub_filename in sub_filenames:
process(os.path.join(dirpath, sub_filenames))
else:
process(name)
|
<commit_before><commit_msg>Add rename script for PDFs merged with annotations<commit_after>
|
#!/usr/bin/env python
import argparse
import os
import os.path
import re
REGEX = re.compile(r'^(.*)_\d{4}-\d{1,2}-\d{1,2}_\d{1,2}-\d{1,2}-\d{1,2}.pdf$')
def process(filename):
match = REGEX.match(filename)
if match is not None:
new_filename = match.group(1) + '.pdf'
os.rename(filename, new_filename)
print '{} -> {}'.format(filename, new_filename)
if __name__ == '__main__':
PARSER = argparse.ArgumentParser(
description='Removes the date suffix from the PDF files with '
'merged annotations from the Onyx Boox M92 ebook reader.')
PARSER.add_argument(
'-r', help='Descend recursively into directories.',
action='store_true', default=False)
PARSER.add_argument('filenames', nargs='*', help='Files to process.')
ARGS = PARSER.parse_args()
for name in ARGS.filenames:
if os.path.isdir(name):
for dirpath, dirnames, sub_filenames in os.walk(name):
for sub_filename in sub_filenames:
process(os.path.join(dirpath, sub_filenames))
else:
process(name)
|
Add rename script for PDFs merged with annotations#!/usr/bin/env python
import argparse
import os
import os.path
import re
REGEX = re.compile(r'^(.*)_\d{4}-\d{1,2}-\d{1,2}_\d{1,2}-\d{1,2}-\d{1,2}.pdf$')
def process(filename):
match = REGEX.match(filename)
if match is not None:
new_filename = match.group(1) + '.pdf'
os.rename(filename, new_filename)
print '{} -> {}'.format(filename, new_filename)
if __name__ == '__main__':
PARSER = argparse.ArgumentParser(
description='Removes the date suffix from the PDF files with '
'merged annotations from the Onyx Boox M92 ebook reader.')
PARSER.add_argument(
'-r', help='Descend recursively into directories.',
action='store_true', default=False)
PARSER.add_argument('filenames', nargs='*', help='Files to process.')
ARGS = PARSER.parse_args()
for name in ARGS.filenames:
if os.path.isdir(name):
for dirpath, dirnames, sub_filenames in os.walk(name):
for sub_filename in sub_filenames:
process(os.path.join(dirpath, sub_filenames))
else:
process(name)
|
<commit_before><commit_msg>Add rename script for PDFs merged with annotations<commit_after>#!/usr/bin/env python
import argparse
import os
import os.path
import re
REGEX = re.compile(r'^(.*)_\d{4}-\d{1,2}-\d{1,2}_\d{1,2}-\d{1,2}-\d{1,2}.pdf$')
def process(filename):
match = REGEX.match(filename)
if match is not None:
new_filename = match.group(1) + '.pdf'
os.rename(filename, new_filename)
print '{} -> {}'.format(filename, new_filename)
if __name__ == '__main__':
PARSER = argparse.ArgumentParser(
description='Removes the date suffix from the PDF files with '
'merged annotations from the Onyx Boox M92 ebook reader.')
PARSER.add_argument(
'-r', help='Descend recursively into directories.',
action='store_true', default=False)
PARSER.add_argument('filenames', nargs='*', help='Files to process.')
ARGS = PARSER.parse_args()
for name in ARGS.filenames:
if os.path.isdir(name):
for dirpath, dirnames, sub_filenames in os.walk(name):
for sub_filename in sub_filenames:
process(os.path.join(dirpath, sub_filenames))
else:
process(name)
|
|
2d9c2dc78bb2741d943e9f9af771938d17a57ae7
|
Source/bindings/PRESUBMIT.py
|
Source/bindings/PRESUBMIT.py
|
# Copyright (C) 2013 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Blink bindings presubmit script
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into gcl.
"""
def _RunBindingsTests(input_api, output_api):
if input_api.is_committing:
message_type = output_api.PresubmitError
else:
message_type = output_api.PresubmitPromptWarning
pardir = input_api.os_path.pardir
run_bindings_tests_path = input_api.os_path.join(input_api.PresubmitLocalPath(), pardir, pardir, 'Tools', 'Scripts', 'run-bindings-tests')
cmd_name = 'run-bindings-tests'
if input_api.platform == 'win32':
# Windows needs some help.
cmd = [input_api.python_executable, run_bindings_tests_path]
else:
cmd = [run_bindings_tests_path]
test_cmd = input_api.Command(
name=cmd_name,
cmd=cmd,
kwargs={},
message=message_type)
if input_api.verbose:
print('Running ' + cmd_name)
return input_api.RunTests([test_cmd])
def CheckChangeOnUpload(input_api, output_api):
return _RunBindingsTests(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return _RunBindingsTests(input_api, output_api)
|
Add presubmit check for run-bindings-tests
|
Add presubmit check for run-bindings-tests
run-bindings-tests should be run (and succeed) for any changes to
Source/bindings. This is particularly important to make sure tests are
rebaselined when changes are made to the code generator.
This adds a PRESUBMIT.py script to do just that!
Checked that this worked by changing a test IDL in
Source/bindings/tests/idls
...and running
git cl upload
...which then gives a presubmit warning, as desired!
Script based on GetUnitTests in
tools/depot_tools/presubmit_canned_checks.py
...which seems exemplary. I've stripped it down to the minimum. Ref:
http://src.chromium.org/viewvc/chrome/trunk/tools/depot_tools/presubmit_canned_checks.py?view=markup
Proximately, this is in reply to Erik's request:
https://codereview.chromium.org/19047003/#msg5
Review URL: https://chromiumcodereview.appspot.com/21722003
git-svn-id: bf5cd6ccde378db821296732a091cfbcf5285fbd@155448 bbb929c8-8fbe-4397-9dbb-9b2b20218538
|
Python
|
bsd-3-clause
|
primiano/blink-gitcs,primiano/blink-gitcs,primiano/blink-gitcs,primiano/blink-gitcs,primiano/blink-gitcs,primiano/blink-gitcs,primiano/blink-gitcs,primiano/blink-gitcs,primiano/blink-gitcs
|
Add presubmit check for run-bindings-tests
run-bindings-tests should be run (and succeed) for any changes to
Source/bindings. This is particularly important to make sure tests are
rebaselined when changes are made to the code generator.
This adds a PRESUBMIT.py script to do just that!
Checked that this worked by changing a test IDL in
Source/bindings/tests/idls
...and running
git cl upload
...which then gives a presubmit warning, as desired!
Script based on GetUnitTests in
tools/depot_tools/presubmit_canned_checks.py
...which seems exemplary. I've stripped it down to the minimum. Ref:
http://src.chromium.org/viewvc/chrome/trunk/tools/depot_tools/presubmit_canned_checks.py?view=markup
Proximately, this is in reply to Erik's request:
https://codereview.chromium.org/19047003/#msg5
Review URL: https://chromiumcodereview.appspot.com/21722003
git-svn-id: bf5cd6ccde378db821296732a091cfbcf5285fbd@155448 bbb929c8-8fbe-4397-9dbb-9b2b20218538
|
# Copyright (C) 2013 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Blink bindings presubmit script
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into gcl.
"""
def _RunBindingsTests(input_api, output_api):
if input_api.is_committing:
message_type = output_api.PresubmitError
else:
message_type = output_api.PresubmitPromptWarning
pardir = input_api.os_path.pardir
run_bindings_tests_path = input_api.os_path.join(input_api.PresubmitLocalPath(), pardir, pardir, 'Tools', 'Scripts', 'run-bindings-tests')
cmd_name = 'run-bindings-tests'
if input_api.platform == 'win32':
# Windows needs some help.
cmd = [input_api.python_executable, run_bindings_tests_path]
else:
cmd = [run_bindings_tests_path]
test_cmd = input_api.Command(
name=cmd_name,
cmd=cmd,
kwargs={},
message=message_type)
if input_api.verbose:
print('Running ' + cmd_name)
return input_api.RunTests([test_cmd])
def CheckChangeOnUpload(input_api, output_api):
return _RunBindingsTests(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return _RunBindingsTests(input_api, output_api)
|
<commit_before><commit_msg>Add presubmit check for run-bindings-tests
run-bindings-tests should be run (and succeed) for any changes to
Source/bindings. This is particularly important to make sure tests are
rebaselined when changes are made to the code generator.
This adds a PRESUBMIT.py script to do just that!
Checked that this worked by changing a test IDL in
Source/bindings/tests/idls
...and running
git cl upload
...which then gives a presubmit warning, as desired!
Script based on GetUnitTests in
tools/depot_tools/presubmit_canned_checks.py
...which seems exemplary. I've stripped it down to the minimum. Ref:
http://src.chromium.org/viewvc/chrome/trunk/tools/depot_tools/presubmit_canned_checks.py?view=markup
Proximately, this is in reply to Erik's request:
https://codereview.chromium.org/19047003/#msg5
Review URL: https://chromiumcodereview.appspot.com/21722003
git-svn-id: bf5cd6ccde378db821296732a091cfbcf5285fbd@155448 bbb929c8-8fbe-4397-9dbb-9b2b20218538<commit_after>
|
# Copyright (C) 2013 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Blink bindings presubmit script
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into gcl.
"""
def _RunBindingsTests(input_api, output_api):
if input_api.is_committing:
message_type = output_api.PresubmitError
else:
message_type = output_api.PresubmitPromptWarning
pardir = input_api.os_path.pardir
run_bindings_tests_path = input_api.os_path.join(input_api.PresubmitLocalPath(), pardir, pardir, 'Tools', 'Scripts', 'run-bindings-tests')
cmd_name = 'run-bindings-tests'
if input_api.platform == 'win32':
# Windows needs some help.
cmd = [input_api.python_executable, run_bindings_tests_path]
else:
cmd = [run_bindings_tests_path]
test_cmd = input_api.Command(
name=cmd_name,
cmd=cmd,
kwargs={},
message=message_type)
if input_api.verbose:
print('Running ' + cmd_name)
return input_api.RunTests([test_cmd])
def CheckChangeOnUpload(input_api, output_api):
return _RunBindingsTests(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return _RunBindingsTests(input_api, output_api)
|
Add presubmit check for run-bindings-tests
run-bindings-tests should be run (and succeed) for any changes to
Source/bindings. This is particularly important to make sure tests are
rebaselined when changes are made to the code generator.
This adds a PRESUBMIT.py script to do just that!
Checked that this worked by changing a test IDL in
Source/bindings/tests/idls
...and running
git cl upload
...which then gives a presubmit warning, as desired!
Script based on GetUnitTests in
tools/depot_tools/presubmit_canned_checks.py
...which seems exemplary. I've stripped it down to the minimum. Ref:
http://src.chromium.org/viewvc/chrome/trunk/tools/depot_tools/presubmit_canned_checks.py?view=markup
Proximately, this is in reply to Erik's request:
https://codereview.chromium.org/19047003/#msg5
Review URL: https://chromiumcodereview.appspot.com/21722003
git-svn-id: bf5cd6ccde378db821296732a091cfbcf5285fbd@155448 bbb929c8-8fbe-4397-9dbb-9b2b20218538# Copyright (C) 2013 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Blink bindings presubmit script
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into gcl.
"""
def _RunBindingsTests(input_api, output_api):
if input_api.is_committing:
message_type = output_api.PresubmitError
else:
message_type = output_api.PresubmitPromptWarning
pardir = input_api.os_path.pardir
run_bindings_tests_path = input_api.os_path.join(input_api.PresubmitLocalPath(), pardir, pardir, 'Tools', 'Scripts', 'run-bindings-tests')
cmd_name = 'run-bindings-tests'
if input_api.platform == 'win32':
# Windows needs some help.
cmd = [input_api.python_executable, run_bindings_tests_path]
else:
cmd = [run_bindings_tests_path]
test_cmd = input_api.Command(
name=cmd_name,
cmd=cmd,
kwargs={},
message=message_type)
if input_api.verbose:
print('Running ' + cmd_name)
return input_api.RunTests([test_cmd])
def CheckChangeOnUpload(input_api, output_api):
return _RunBindingsTests(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return _RunBindingsTests(input_api, output_api)
|
<commit_before><commit_msg>Add presubmit check for run-bindings-tests
run-bindings-tests should be run (and succeed) for any changes to
Source/bindings. This is particularly important to make sure tests are
rebaselined when changes are made to the code generator.
This adds a PRESUBMIT.py script to do just that!
Checked that this worked by changing a test IDL in
Source/bindings/tests/idls
...and running
git cl upload
...which then gives a presubmit warning, as desired!
Script based on GetUnitTests in
tools/depot_tools/presubmit_canned_checks.py
...which seems exemplary. I've stripped it down to the minimum. Ref:
http://src.chromium.org/viewvc/chrome/trunk/tools/depot_tools/presubmit_canned_checks.py?view=markup
Proximately, this is in reply to Erik's request:
https://codereview.chromium.org/19047003/#msg5
Review URL: https://chromiumcodereview.appspot.com/21722003
git-svn-id: bf5cd6ccde378db821296732a091cfbcf5285fbd@155448 bbb929c8-8fbe-4397-9dbb-9b2b20218538<commit_after># Copyright (C) 2013 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Blink bindings presubmit script
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into gcl.
"""
def _RunBindingsTests(input_api, output_api):
if input_api.is_committing:
message_type = output_api.PresubmitError
else:
message_type = output_api.PresubmitPromptWarning
pardir = input_api.os_path.pardir
run_bindings_tests_path = input_api.os_path.join(input_api.PresubmitLocalPath(), pardir, pardir, 'Tools', 'Scripts', 'run-bindings-tests')
cmd_name = 'run-bindings-tests'
if input_api.platform == 'win32':
# Windows needs some help.
cmd = [input_api.python_executable, run_bindings_tests_path]
else:
cmd = [run_bindings_tests_path]
test_cmd = input_api.Command(
name=cmd_name,
cmd=cmd,
kwargs={},
message=message_type)
if input_api.verbose:
print('Running ' + cmd_name)
return input_api.RunTests([test_cmd])
def CheckChangeOnUpload(input_api, output_api):
return _RunBindingsTests(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return _RunBindingsTests(input_api, output_api)
|
|
d29c3c1ca4085149f1442ca5ec46b77bbaef4028
|
mezzanine/bin/make_filebrowser_safe.py
|
mezzanine/bin/make_filebrowser_safe.py
|
#!/usr/bin/env python
"""
Converts the last backward-compatible filebrowser branch into a newly named
package ``filebrowser_safe``.
"""
import os
branch_url = "http://django-filebrowser.googlecode.com/svn/branches/filebrowser_3"
package_name_from = branch_url.split("/")[-1]
package_name_to = "filebrowser_safe"
if not os.path.exists(package_name_from):
print "Checking out branch..."
os.system("svn co %s" % branch_url)
for (dirpath, dirnames, filenames) in os.walk(package_name_from, False):
for name in dirnames:
path = os.path.join(dirpath, name)
# Delete SVN directories.
if ".svn" in path:
print "Deleting svn %s" % path
os.rmdir(path)
for name in filenames:
path = os.path.join(dirpath, name)
# Delete SVN files.
if ".svn" in path:
print "Deleting svn %s" % path
os.remove(path)
elif path.endswith(".py"):
update = False
with open(path, "r") as f:
data = f.read()
if name == "admin.py":
# Comment out calls to admin.site.register.
update = True
data = data.replace("\nadmin.site.register",
"\n#admin.site.register")
# Replace these instances of the package name with the new name.
for replace_str in ("filebrowser.", "app_label = \"filebrowser\""):
if replace_str in data:
update = True
data = data.replace(replace_str,
replace_str.replace("filebrowser", package_name_to))
if update:
print "Rewriting %s" % path
with open(path, "w") as f:
f.write(data)
# Move the package to the Mezzanine directory using the filebrowser_safe name.
script_path = os.path.dirname(os.path.abspath(__file__))
os.renames(package_name_from, os.path.join(script_path, "..", "..",
package_name_to))
|
Add script for generating fork of filebrowser.
|
Add script for generating fork of filebrowser.
|
Python
|
bsd-2-clause
|
douglaskastle/mezzanine,industrydive/mezzanine,wyzex/mezzanine,dustinrb/mezzanine,vladir/mezzanine,industrydive/mezzanine,Kniyl/mezzanine,promil23/mezzanine,adrian-the-git/mezzanine,sjdines/mezzanine,dustinrb/mezzanine,orlenko/sfpirg,viaregio/mezzanine,batpad/mezzanine,wbtuomela/mezzanine,frankier/mezzanine,vladir/mezzanine,molokov/mezzanine,sjuxax/mezzanine,wbtuomela/mezzanine,promil23/mezzanine,theclanks/mezzanine,dovydas/mezzanine,joshcartme/mezzanine,dekomote/mezzanine-modeltranslation-backport,guibernardino/mezzanine,jjz/mezzanine,webounty/mezzanine,tuxinhang1989/mezzanine,wyzex/mezzanine,damnfine/mezzanine,readevalprint/mezzanine,christianwgd/mezzanine,sjdines/mezzanine,frankier/mezzanine,PegasusWang/mezzanine,adrian-the-git/mezzanine,spookylukey/mezzanine,orlenko/sfpirg,dovydas/mezzanine,theclanks/mezzanine,christianwgd/mezzanine,stbarnabas/mezzanine,Cajoline/mezzanine,biomassives/mezzanine,batpad/mezzanine,damnfine/mezzanine,fusionbox/mezzanine,orlenko/plei,ryneeverett/mezzanine,sjdines/mezzanine,saintbird/mezzanine,Cajoline/mezzanine,saintbird/mezzanine,wrwrwr/mezzanine,emile2016/mezzanine,gradel/mezzanine,webounty/mezzanine,SoLoHiC/mezzanine,PegasusWang/mezzanine,Skytorn86/mezzanine,frankchin/mezzanine,jerivas/mezzanine,dsanders11/mezzanine,damnfine/mezzanine,agepoly/mezzanine,nikolas/mezzanine,Kniyl/mezzanine,nikolas/mezzanine,agepoly/mezzanine,geodesign/mezzanine,scarcry/snm-mezzanine,stbarnabas/mezzanine,dovydas/mezzanine,readevalprint/mezzanine,joshcartme/mezzanine,wyzex/mezzanine,sjuxax/mezzanine,douglaskastle/mezzanine,readevalprint/mezzanine,stephenmcd/mezzanine,frankier/mezzanine,jerivas/mezzanine,ZeroXn/mezzanine,wbtuomela/mezzanine,geodesign/mezzanine,Cicero-Zhao/mezzanine,scarcry/snm-mezzanine,molokov/mezzanine,Cajoline/mezzanine,theclanks/mezzanine,fusionbox/mezzanine,gradel/mezzanine,vladir/mezzanine,sjuxax/mezzanine,ZeroXn/mezzanine,orlenko/plei,tuxinhang1989/mezzanine,dustinrb/mezzanine,emile2016/mezzanine,mush42/mezzanine,promil23/mezzanine,wrwrwr/mezzanine,jjz/mezzanine,stephenmcd/mezzanine,webounty/mezzanine,gradel/mezzanine,saintbird/mezzanine,frankchin/mezzanine,industrydive/mezzanine,emile2016/mezzanine,stephenmcd/mezzanine,dekomote/mezzanine-modeltranslation-backport,tuxinhang1989/mezzanine,joshcartme/mezzanine,mush42/mezzanine,gbosh/mezzanine,dsanders11/mezzanine,PegasusWang/mezzanine,jerivas/mezzanine,AlexHill/mezzanine,Skytorn86/mezzanine,mush42/mezzanine,guibernardino/mezzanine,biomassives/mezzanine,eino-makitalo/mezzanine,spookylukey/mezzanine,viaregio/mezzanine,gbosh/mezzanine,spookylukey/mezzanine,SoLoHiC/mezzanine,cccs-web/mezzanine,eino-makitalo/mezzanine,SoLoHiC/mezzanine,Kniyl/mezzanine,cccs-web/mezzanine,ryneeverett/mezzanine,dsanders11/mezzanine,Skytorn86/mezzanine,scarcry/snm-mezzanine,dekomote/mezzanine-modeltranslation-backport,ZeroXn/mezzanine,molokov/mezzanine,adrian-the-git/mezzanine,nikolas/mezzanine,Cicero-Zhao/mezzanine,orlenko/sfpirg,agepoly/mezzanine,orlenko/plei,gbosh/mezzanine,jjz/mezzanine,geodesign/mezzanine,ryneeverett/mezzanine,frankchin/mezzanine,eino-makitalo/mezzanine,biomassives/mezzanine,AlexHill/mezzanine,christianwgd/mezzanine,viaregio/mezzanine,douglaskastle/mezzanine
|
Add script for generating fork of filebrowser.
|
#!/usr/bin/env python
"""
Converts the last backward-compatible filebrowser branch into a newly named
package ``filebrowser_safe``.
"""
import os
branch_url = "http://django-filebrowser.googlecode.com/svn/branches/filebrowser_3"
package_name_from = branch_url.split("/")[-1]
package_name_to = "filebrowser_safe"
if not os.path.exists(package_name_from):
print "Checking out branch..."
os.system("svn co %s" % branch_url)
for (dirpath, dirnames, filenames) in os.walk(package_name_from, False):
for name in dirnames:
path = os.path.join(dirpath, name)
# Delete SVN directories.
if ".svn" in path:
print "Deleting svn %s" % path
os.rmdir(path)
for name in filenames:
path = os.path.join(dirpath, name)
# Delete SVN files.
if ".svn" in path:
print "Deleting svn %s" % path
os.remove(path)
elif path.endswith(".py"):
update = False
with open(path, "r") as f:
data = f.read()
if name == "admin.py":
# Comment out calls to admin.site.register.
update = True
data = data.replace("\nadmin.site.register",
"\n#admin.site.register")
# Replace these instances of the package name with the new name.
for replace_str in ("filebrowser.", "app_label = \"filebrowser\""):
if replace_str in data:
update = True
data = data.replace(replace_str,
replace_str.replace("filebrowser", package_name_to))
if update:
print "Rewriting %s" % path
with open(path, "w") as f:
f.write(data)
# Move the package to the Mezzanine directory using the filebrowser_safe name.
script_path = os.path.dirname(os.path.abspath(__file__))
os.renames(package_name_from, os.path.join(script_path, "..", "..",
package_name_to))
|
<commit_before><commit_msg>Add script for generating fork of filebrowser.<commit_after>
|
#!/usr/bin/env python
"""
Converts the last backward-compatible filebrowser branch into a newly named
package ``filebrowser_safe``.
"""
import os
branch_url = "http://django-filebrowser.googlecode.com/svn/branches/filebrowser_3"
package_name_from = branch_url.split("/")[-1]
package_name_to = "filebrowser_safe"
if not os.path.exists(package_name_from):
print "Checking out branch..."
os.system("svn co %s" % branch_url)
for (dirpath, dirnames, filenames) in os.walk(package_name_from, False):
for name in dirnames:
path = os.path.join(dirpath, name)
# Delete SVN directories.
if ".svn" in path:
print "Deleting svn %s" % path
os.rmdir(path)
for name in filenames:
path = os.path.join(dirpath, name)
# Delete SVN files.
if ".svn" in path:
print "Deleting svn %s" % path
os.remove(path)
elif path.endswith(".py"):
update = False
with open(path, "r") as f:
data = f.read()
if name == "admin.py":
# Comment out calls to admin.site.register.
update = True
data = data.replace("\nadmin.site.register",
"\n#admin.site.register")
# Replace these instances of the package name with the new name.
for replace_str in ("filebrowser.", "app_label = \"filebrowser\""):
if replace_str in data:
update = True
data = data.replace(replace_str,
replace_str.replace("filebrowser", package_name_to))
if update:
print "Rewriting %s" % path
with open(path, "w") as f:
f.write(data)
# Move the package to the Mezzanine directory using the filebrowser_safe name.
script_path = os.path.dirname(os.path.abspath(__file__))
os.renames(package_name_from, os.path.join(script_path, "..", "..",
package_name_to))
|
Add script for generating fork of filebrowser.#!/usr/bin/env python
"""
Converts the last backward-compatible filebrowser branch into a newly named
package ``filebrowser_safe``.
"""
import os
branch_url = "http://django-filebrowser.googlecode.com/svn/branches/filebrowser_3"
package_name_from = branch_url.split("/")[-1]
package_name_to = "filebrowser_safe"
if not os.path.exists(package_name_from):
print "Checking out branch..."
os.system("svn co %s" % branch_url)
for (dirpath, dirnames, filenames) in os.walk(package_name_from, False):
for name in dirnames:
path = os.path.join(dirpath, name)
# Delete SVN directories.
if ".svn" in path:
print "Deleting svn %s" % path
os.rmdir(path)
for name in filenames:
path = os.path.join(dirpath, name)
# Delete SVN files.
if ".svn" in path:
print "Deleting svn %s" % path
os.remove(path)
elif path.endswith(".py"):
update = False
with open(path, "r") as f:
data = f.read()
if name == "admin.py":
# Comment out calls to admin.site.register.
update = True
data = data.replace("\nadmin.site.register",
"\n#admin.site.register")
# Replace these instances of the package name with the new name.
for replace_str in ("filebrowser.", "app_label = \"filebrowser\""):
if replace_str in data:
update = True
data = data.replace(replace_str,
replace_str.replace("filebrowser", package_name_to))
if update:
print "Rewriting %s" % path
with open(path, "w") as f:
f.write(data)
# Move the package to the Mezzanine directory using the filebrowser_safe name.
script_path = os.path.dirname(os.path.abspath(__file__))
os.renames(package_name_from, os.path.join(script_path, "..", "..",
package_name_to))
|
<commit_before><commit_msg>Add script for generating fork of filebrowser.<commit_after>#!/usr/bin/env python
"""
Converts the last backward-compatible filebrowser branch into a newly named
package ``filebrowser_safe``.
"""
import os
branch_url = "http://django-filebrowser.googlecode.com/svn/branches/filebrowser_3"
package_name_from = branch_url.split("/")[-1]
package_name_to = "filebrowser_safe"
if not os.path.exists(package_name_from):
print "Checking out branch..."
os.system("svn co %s" % branch_url)
for (dirpath, dirnames, filenames) in os.walk(package_name_from, False):
for name in dirnames:
path = os.path.join(dirpath, name)
# Delete SVN directories.
if ".svn" in path:
print "Deleting svn %s" % path
os.rmdir(path)
for name in filenames:
path = os.path.join(dirpath, name)
# Delete SVN files.
if ".svn" in path:
print "Deleting svn %s" % path
os.remove(path)
elif path.endswith(".py"):
update = False
with open(path, "r") as f:
data = f.read()
if name == "admin.py":
# Comment out calls to admin.site.register.
update = True
data = data.replace("\nadmin.site.register",
"\n#admin.site.register")
# Replace these instances of the package name with the new name.
for replace_str in ("filebrowser.", "app_label = \"filebrowser\""):
if replace_str in data:
update = True
data = data.replace(replace_str,
replace_str.replace("filebrowser", package_name_to))
if update:
print "Rewriting %s" % path
with open(path, "w") as f:
f.write(data)
# Move the package to the Mezzanine directory using the filebrowser_safe name.
script_path = os.path.dirname(os.path.abspath(__file__))
os.renames(package_name_from, os.path.join(script_path, "..", "..",
package_name_to))
|
|
cf362d57dbc3a08fb2fd8b60577e8383adeae432
|
stdnum/fi/veronumero.py
|
stdnum/fi/veronumero.py
|
# vat.py - functions for handling Finnish VAT numbers
# coding: utf-8
#
# Copyright (C) 2012-2015 Arthur de Jong
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""
Veronumero (Finnish tax number).
Module for handling veronumero (Finnish Tax numbers).
See
https://www.vero.fi/en/detailed-guidance/guidance/48791/individual_tax_numbers__instructions_fo/
There is no checksum for this identifier.
>>> validate('123456789123')
'123456789123'
>>> validate('12345678912A')
Traceback (most recent call last):
...
InvalidFormat: ...
>>> validate('123456789')
Traceback (most recent call last):
...
InvalidInvalidLength: ...
"""
from stdnum.exceptions import *
def validate(number):
"""Checks to see if the number provided is a valid VAT number. This
checks the length, formatting and check digit."""
if not number.isdigit():
raise InvalidFormat()
if len(number) != 12:
raise InvalidLength()
return number
def is_valid(number):
"""Checks to see if the number provided is a valid VAT number. This
checks the length, formatting and check digit."""
try:
return bool(validate(number))
except ValidationError:
return False
|
Implement Finnish tax number validator
|
Implement Finnish tax number validator
|
Python
|
lgpl-2.1
|
holvi/python-stdnum,holvi/python-stdnum,holvi/python-stdnum
|
Implement Finnish tax number validator
|
# vat.py - functions for handling Finnish VAT numbers
# coding: utf-8
#
# Copyright (C) 2012-2015 Arthur de Jong
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""
Veronumero (Finnish tax number).
Module for handling veronumero (Finnish Tax numbers).
See
https://www.vero.fi/en/detailed-guidance/guidance/48791/individual_tax_numbers__instructions_fo/
There is no checksum for this identifier.
>>> validate('123456789123')
'123456789123'
>>> validate('12345678912A')
Traceback (most recent call last):
...
InvalidFormat: ...
>>> validate('123456789')
Traceback (most recent call last):
...
InvalidInvalidLength: ...
"""
from stdnum.exceptions import *
def validate(number):
"""Checks to see if the number provided is a valid VAT number. This
checks the length, formatting and check digit."""
if not number.isdigit():
raise InvalidFormat()
if len(number) != 12:
raise InvalidLength()
return number
def is_valid(number):
"""Checks to see if the number provided is a valid VAT number. This
checks the length, formatting and check digit."""
try:
return bool(validate(number))
except ValidationError:
return False
|
<commit_before><commit_msg>Implement Finnish tax number validator<commit_after>
|
# vat.py - functions for handling Finnish VAT numbers
# coding: utf-8
#
# Copyright (C) 2012-2015 Arthur de Jong
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""
Veronumero (Finnish tax number).
Module for handling veronumero (Finnish Tax numbers).
See
https://www.vero.fi/en/detailed-guidance/guidance/48791/individual_tax_numbers__instructions_fo/
There is no checksum for this identifier.
>>> validate('123456789123')
'123456789123'
>>> validate('12345678912A')
Traceback (most recent call last):
...
InvalidFormat: ...
>>> validate('123456789')
Traceback (most recent call last):
...
InvalidInvalidLength: ...
"""
from stdnum.exceptions import *
def validate(number):
"""Checks to see if the number provided is a valid VAT number. This
checks the length, formatting and check digit."""
if not number.isdigit():
raise InvalidFormat()
if len(number) != 12:
raise InvalidLength()
return number
def is_valid(number):
"""Checks to see if the number provided is a valid VAT number. This
checks the length, formatting and check digit."""
try:
return bool(validate(number))
except ValidationError:
return False
|
Implement Finnish tax number validator# vat.py - functions for handling Finnish VAT numbers
# coding: utf-8
#
# Copyright (C) 2012-2015 Arthur de Jong
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""
Veronumero (Finnish tax number).
Module for handling veronumero (Finnish Tax numbers).
See
https://www.vero.fi/en/detailed-guidance/guidance/48791/individual_tax_numbers__instructions_fo/
There is no checksum for this identifier.
>>> validate('123456789123')
'123456789123'
>>> validate('12345678912A')
Traceback (most recent call last):
...
InvalidFormat: ...
>>> validate('123456789')
Traceback (most recent call last):
...
InvalidInvalidLength: ...
"""
from stdnum.exceptions import *
def validate(number):
"""Checks to see if the number provided is a valid VAT number. This
checks the length, formatting and check digit."""
if not number.isdigit():
raise InvalidFormat()
if len(number) != 12:
raise InvalidLength()
return number
def is_valid(number):
"""Checks to see if the number provided is a valid VAT number. This
checks the length, formatting and check digit."""
try:
return bool(validate(number))
except ValidationError:
return False
|
<commit_before><commit_msg>Implement Finnish tax number validator<commit_after># vat.py - functions for handling Finnish VAT numbers
# coding: utf-8
#
# Copyright (C) 2012-2015 Arthur de Jong
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""
Veronumero (Finnish tax number).
Module for handling veronumero (Finnish Tax numbers).
See
https://www.vero.fi/en/detailed-guidance/guidance/48791/individual_tax_numbers__instructions_fo/
There is no checksum for this identifier.
>>> validate('123456789123')
'123456789123'
>>> validate('12345678912A')
Traceback (most recent call last):
...
InvalidFormat: ...
>>> validate('123456789')
Traceback (most recent call last):
...
InvalidInvalidLength: ...
"""
from stdnum.exceptions import *
def validate(number):
"""Checks to see if the number provided is a valid VAT number. This
checks the length, formatting and check digit."""
if not number.isdigit():
raise InvalidFormat()
if len(number) != 12:
raise InvalidLength()
return number
def is_valid(number):
"""Checks to see if the number provided is a valid VAT number. This
checks the length, formatting and check digit."""
try:
return bool(validate(number))
except ValidationError:
return False
|
|
931c6b2809de22b7c74206256ecfd1c5beaee149
|
app/main/views/two_factor.py
|
app/main/views/two_factor.py
|
from flask import render_template, redirect, jsonify, session
from flask_login import login_user
from app.main import main
from app.main.dao import users_dao, verify_codes_dao
from app.main.forms import TwoFactorForm
@main.route("/two-factor", methods=['GET'])
def render_two_factor():
return render_template('views/two-factor.html', form=TwoFactorForm())
@main.route('/two-factor', methods=['POST'])
def process_two_factor():
form = TwoFactorForm()
if form.validate_on_submit():
user = users_dao.get_user_by_id(session['user_id'])
verify_codes_dao.use_code_for_user_and_type(user_id=user.id, code_type='sms')
login_user(user)
return redirect('/dashboard')
else:
return jsonify(form.errors), 400
|
import traceback
from flask import render_template, redirect, jsonify, session
from flask_login import login_user
from app.main import main
from app.main.dao import users_dao, verify_codes_dao
from app.main.forms import TwoFactorForm
@main.route("/two-factor", methods=['GET'])
def render_two_factor():
return render_template('views/two-factor.html', form=TwoFactorForm())
@main.route('/two-factor', methods=['POST'])
def process_two_factor():
try:
form = TwoFactorForm()
if form.validate_on_submit():
user = users_dao.get_user_by_id(session['user_id'])
verify_codes_dao.use_code_for_user_and_type(user_id=user.id, code_type='sms')
login_user(user)
return redirect('/dashboard')
else:
return jsonify(form.errors), 400
except:
traceback.print_exc()
|
Add debugging to find issue.
|
Add debugging to find issue.
|
Python
|
mit
|
alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin,gov-cjwaszczuk/notifications-admin,gov-cjwaszczuk/notifications-admin,alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin
|
from flask import render_template, redirect, jsonify, session
from flask_login import login_user
from app.main import main
from app.main.dao import users_dao, verify_codes_dao
from app.main.forms import TwoFactorForm
@main.route("/two-factor", methods=['GET'])
def render_two_factor():
return render_template('views/two-factor.html', form=TwoFactorForm())
@main.route('/two-factor', methods=['POST'])
def process_two_factor():
form = TwoFactorForm()
if form.validate_on_submit():
user = users_dao.get_user_by_id(session['user_id'])
verify_codes_dao.use_code_for_user_and_type(user_id=user.id, code_type='sms')
login_user(user)
return redirect('/dashboard')
else:
return jsonify(form.errors), 400
Add debugging to find issue.
|
import traceback
from flask import render_template, redirect, jsonify, session
from flask_login import login_user
from app.main import main
from app.main.dao import users_dao, verify_codes_dao
from app.main.forms import TwoFactorForm
@main.route("/two-factor", methods=['GET'])
def render_two_factor():
return render_template('views/two-factor.html', form=TwoFactorForm())
@main.route('/two-factor', methods=['POST'])
def process_two_factor():
try:
form = TwoFactorForm()
if form.validate_on_submit():
user = users_dao.get_user_by_id(session['user_id'])
verify_codes_dao.use_code_for_user_and_type(user_id=user.id, code_type='sms')
login_user(user)
return redirect('/dashboard')
else:
return jsonify(form.errors), 400
except:
traceback.print_exc()
|
<commit_before>from flask import render_template, redirect, jsonify, session
from flask_login import login_user
from app.main import main
from app.main.dao import users_dao, verify_codes_dao
from app.main.forms import TwoFactorForm
@main.route("/two-factor", methods=['GET'])
def render_two_factor():
return render_template('views/two-factor.html', form=TwoFactorForm())
@main.route('/two-factor', methods=['POST'])
def process_two_factor():
form = TwoFactorForm()
if form.validate_on_submit():
user = users_dao.get_user_by_id(session['user_id'])
verify_codes_dao.use_code_for_user_and_type(user_id=user.id, code_type='sms')
login_user(user)
return redirect('/dashboard')
else:
return jsonify(form.errors), 400
<commit_msg>Add debugging to find issue.<commit_after>
|
import traceback
from flask import render_template, redirect, jsonify, session
from flask_login import login_user
from app.main import main
from app.main.dao import users_dao, verify_codes_dao
from app.main.forms import TwoFactorForm
@main.route("/two-factor", methods=['GET'])
def render_two_factor():
return render_template('views/two-factor.html', form=TwoFactorForm())
@main.route('/two-factor', methods=['POST'])
def process_two_factor():
try:
form = TwoFactorForm()
if form.validate_on_submit():
user = users_dao.get_user_by_id(session['user_id'])
verify_codes_dao.use_code_for_user_and_type(user_id=user.id, code_type='sms')
login_user(user)
return redirect('/dashboard')
else:
return jsonify(form.errors), 400
except:
traceback.print_exc()
|
from flask import render_template, redirect, jsonify, session
from flask_login import login_user
from app.main import main
from app.main.dao import users_dao, verify_codes_dao
from app.main.forms import TwoFactorForm
@main.route("/two-factor", methods=['GET'])
def render_two_factor():
return render_template('views/two-factor.html', form=TwoFactorForm())
@main.route('/two-factor', methods=['POST'])
def process_two_factor():
form = TwoFactorForm()
if form.validate_on_submit():
user = users_dao.get_user_by_id(session['user_id'])
verify_codes_dao.use_code_for_user_and_type(user_id=user.id, code_type='sms')
login_user(user)
return redirect('/dashboard')
else:
return jsonify(form.errors), 400
Add debugging to find issue.import traceback
from flask import render_template, redirect, jsonify, session
from flask_login import login_user
from app.main import main
from app.main.dao import users_dao, verify_codes_dao
from app.main.forms import TwoFactorForm
@main.route("/two-factor", methods=['GET'])
def render_two_factor():
return render_template('views/two-factor.html', form=TwoFactorForm())
@main.route('/two-factor', methods=['POST'])
def process_two_factor():
try:
form = TwoFactorForm()
if form.validate_on_submit():
user = users_dao.get_user_by_id(session['user_id'])
verify_codes_dao.use_code_for_user_and_type(user_id=user.id, code_type='sms')
login_user(user)
return redirect('/dashboard')
else:
return jsonify(form.errors), 400
except:
traceback.print_exc()
|
<commit_before>from flask import render_template, redirect, jsonify, session
from flask_login import login_user
from app.main import main
from app.main.dao import users_dao, verify_codes_dao
from app.main.forms import TwoFactorForm
@main.route("/two-factor", methods=['GET'])
def render_two_factor():
return render_template('views/two-factor.html', form=TwoFactorForm())
@main.route('/two-factor', methods=['POST'])
def process_two_factor():
form = TwoFactorForm()
if form.validate_on_submit():
user = users_dao.get_user_by_id(session['user_id'])
verify_codes_dao.use_code_for_user_and_type(user_id=user.id, code_type='sms')
login_user(user)
return redirect('/dashboard')
else:
return jsonify(form.errors), 400
<commit_msg>Add debugging to find issue.<commit_after>import traceback
from flask import render_template, redirect, jsonify, session
from flask_login import login_user
from app.main import main
from app.main.dao import users_dao, verify_codes_dao
from app.main.forms import TwoFactorForm
@main.route("/two-factor", methods=['GET'])
def render_two_factor():
return render_template('views/two-factor.html', form=TwoFactorForm())
@main.route('/two-factor', methods=['POST'])
def process_two_factor():
try:
form = TwoFactorForm()
if form.validate_on_submit():
user = users_dao.get_user_by_id(session['user_id'])
verify_codes_dao.use_code_for_user_and_type(user_id=user.id, code_type='sms')
login_user(user)
return redirect('/dashboard')
else:
return jsonify(form.errors), 400
except:
traceback.print_exc()
|
587b8b82de1bf407101b39fd18ef6626c3556547
|
test/helperfuncstest.py
|
test/helperfuncstest.py
|
#!/usr/bin/python2.4
#
# Copyright (c) 2004-2005 rPath, Inc.
#
import testsuite
testsuite.setup()
from mint_rephelp import MintRepositoryHelper
from mint.userlevels import myProjectCompare
class ProjectTest(MintRepositoryHelper):
def testMyProjectCompare(self):
if not isinstance(myProjectCompare(('not tested', 1), ('ignored', 0)), int):
self.fail("myProjectCompare did not return an int")
if not isinstance(myProjectCompare(('not tested', 1L), ('ignored', 0L)), int):
self.fail("myProjectCompare did not return an int")
if __name__ == "__main__":
testsuite.main()
|
Test case to ensure MySQL doesn't break My Projects speedbox.
|
Test case to ensure MySQL doesn't break My Projects speedbox.
|
Python
|
apache-2.0
|
sassoftware/mint,sassoftware/mint,sassoftware/mint,sassoftware/mint,sassoftware/mint
|
Test case to ensure MySQL doesn't break My Projects speedbox.
|
#!/usr/bin/python2.4
#
# Copyright (c) 2004-2005 rPath, Inc.
#
import testsuite
testsuite.setup()
from mint_rephelp import MintRepositoryHelper
from mint.userlevels import myProjectCompare
class ProjectTest(MintRepositoryHelper):
def testMyProjectCompare(self):
if not isinstance(myProjectCompare(('not tested', 1), ('ignored', 0)), int):
self.fail("myProjectCompare did not return an int")
if not isinstance(myProjectCompare(('not tested', 1L), ('ignored', 0L)), int):
self.fail("myProjectCompare did not return an int")
if __name__ == "__main__":
testsuite.main()
|
<commit_before><commit_msg>Test case to ensure MySQL doesn't break My Projects speedbox.<commit_after>
|
#!/usr/bin/python2.4
#
# Copyright (c) 2004-2005 rPath, Inc.
#
import testsuite
testsuite.setup()
from mint_rephelp import MintRepositoryHelper
from mint.userlevels import myProjectCompare
class ProjectTest(MintRepositoryHelper):
def testMyProjectCompare(self):
if not isinstance(myProjectCompare(('not tested', 1), ('ignored', 0)), int):
self.fail("myProjectCompare did not return an int")
if not isinstance(myProjectCompare(('not tested', 1L), ('ignored', 0L)), int):
self.fail("myProjectCompare did not return an int")
if __name__ == "__main__":
testsuite.main()
|
Test case to ensure MySQL doesn't break My Projects speedbox.#!/usr/bin/python2.4
#
# Copyright (c) 2004-2005 rPath, Inc.
#
import testsuite
testsuite.setup()
from mint_rephelp import MintRepositoryHelper
from mint.userlevels import myProjectCompare
class ProjectTest(MintRepositoryHelper):
def testMyProjectCompare(self):
if not isinstance(myProjectCompare(('not tested', 1), ('ignored', 0)), int):
self.fail("myProjectCompare did not return an int")
if not isinstance(myProjectCompare(('not tested', 1L), ('ignored', 0L)), int):
self.fail("myProjectCompare did not return an int")
if __name__ == "__main__":
testsuite.main()
|
<commit_before><commit_msg>Test case to ensure MySQL doesn't break My Projects speedbox.<commit_after>#!/usr/bin/python2.4
#
# Copyright (c) 2004-2005 rPath, Inc.
#
import testsuite
testsuite.setup()
from mint_rephelp import MintRepositoryHelper
from mint.userlevels import myProjectCompare
class ProjectTest(MintRepositoryHelper):
def testMyProjectCompare(self):
if not isinstance(myProjectCompare(('not tested', 1), ('ignored', 0)), int):
self.fail("myProjectCompare did not return an int")
if not isinstance(myProjectCompare(('not tested', 1L), ('ignored', 0L)), int):
self.fail("myProjectCompare did not return an int")
if __name__ == "__main__":
testsuite.main()
|
|
ad08acee184a899943e15e8c6e08385c68c158a7
|
tests/_test_m18nkeys.py
|
tests/_test_m18nkeys.py
|
# -*- coding: utf-8 -*-
import re
import glob
import json
import yaml
###############################################################################
# Find used keys in python code #
###############################################################################
# This regex matches « foo » in patterns like « m18n.n( "foo" »
p = re.compile(r'm18n\.n\(\s*[\"\']([a-zA-Z1-9_]+)[\"\']')
python_files = glob.glob("/vagrant/yunohost/src/yunohost/*.py")
python_files.extend(glob.glob("/vagrant/yunohost/src/yunohost/utils/*.py"))
python_files.append("/vagrant/yunohost/bin/yunohost")
python_keys = set()
for python_file in python_files:
with open(python_file) as f:
keys_in_file = p.findall(f.read())
for key in keys_in_file:
python_keys.add(key)
###############################################################################
# Find keys used in actionmap #
###############################################################################
actionmap_keys = set()
actionmap = yaml.load(open("../data/actionsmap/yunohost.yml"))
for _, category in actionmap.items():
if "actions" not in category.keys():
continue
for _, action in category["actions"].items():
if "arguments" not in action.keys():
continue
for _, argument in action["arguments"].items():
if "extra" not in argument.keys():
continue
if "password" in argument["extra"]:
actionmap_keys.add(argument["extra"]["password"])
if "ask" in argument["extra"]:
actionmap_keys.add(argument["extra"]["ask"])
if "pattern" in argument["extra"]:
actionmap_keys.add(argument["extra"]["pattern"][1])
if "help" in argument["extra"]:
print argument["extra"]["help"]
# These keys are used but difficult to parse
actionmap_keys.add("admin_password")
###############################################################################
# Load en locale json keys #
###############################################################################
en_locale_file = "/vagrant/yunohost/locales/en.json"
with open(en_locale_file) as f:
en_locale_json = json.loads(f.read())
en_locale_keys = set(en_locale_json.keys())
###############################################################################
# Compare keys used and keys defined #
###############################################################################
used_keys = python_keys.union(actionmap_keys)
keys_used_but_not_defined = used_keys.difference(en_locale_keys)
keys_defined_but_not_used = en_locale_keys.difference(used_keys)
if len(keys_used_but_not_defined) != 0:
print "> Error ! Those keys are used in some files but not defined :"
for key in sorted(keys_used_but_not_defined):
print " - %s" % key
if len(keys_defined_but_not_used) != 0:
print "> Warning ! Those keys are defined but seems unused :"
for key in sorted(keys_defined_but_not_used):
print " - %s" % key
|
Add a script to test m18n keys usage
|
Add a script to test m18n keys usage
|
Python
|
agpl-3.0
|
YunoHost/yunohost,YunoHost/yunohost,YunoHost/moulinette-yunohost,YunoHost/yunohost,YunoHost/moulinette-yunohost,YunoHost/moulinette-yunohost,YunoHost/moulinette-yunohost,YunoHost/moulinette-yunohost,YunoHost/yunohost
|
Add a script to test m18n keys usage
|
# -*- coding: utf-8 -*-
import re
import glob
import json
import yaml
###############################################################################
# Find used keys in python code #
###############################################################################
# This regex matches « foo » in patterns like « m18n.n( "foo" »
p = re.compile(r'm18n\.n\(\s*[\"\']([a-zA-Z1-9_]+)[\"\']')
python_files = glob.glob("/vagrant/yunohost/src/yunohost/*.py")
python_files.extend(glob.glob("/vagrant/yunohost/src/yunohost/utils/*.py"))
python_files.append("/vagrant/yunohost/bin/yunohost")
python_keys = set()
for python_file in python_files:
with open(python_file) as f:
keys_in_file = p.findall(f.read())
for key in keys_in_file:
python_keys.add(key)
###############################################################################
# Find keys used in actionmap #
###############################################################################
actionmap_keys = set()
actionmap = yaml.load(open("../data/actionsmap/yunohost.yml"))
for _, category in actionmap.items():
if "actions" not in category.keys():
continue
for _, action in category["actions"].items():
if "arguments" not in action.keys():
continue
for _, argument in action["arguments"].items():
if "extra" not in argument.keys():
continue
if "password" in argument["extra"]:
actionmap_keys.add(argument["extra"]["password"])
if "ask" in argument["extra"]:
actionmap_keys.add(argument["extra"]["ask"])
if "pattern" in argument["extra"]:
actionmap_keys.add(argument["extra"]["pattern"][1])
if "help" in argument["extra"]:
print argument["extra"]["help"]
# These keys are used but difficult to parse
actionmap_keys.add("admin_password")
###############################################################################
# Load en locale json keys #
###############################################################################
en_locale_file = "/vagrant/yunohost/locales/en.json"
with open(en_locale_file) as f:
en_locale_json = json.loads(f.read())
en_locale_keys = set(en_locale_json.keys())
###############################################################################
# Compare keys used and keys defined #
###############################################################################
used_keys = python_keys.union(actionmap_keys)
keys_used_but_not_defined = used_keys.difference(en_locale_keys)
keys_defined_but_not_used = en_locale_keys.difference(used_keys)
if len(keys_used_but_not_defined) != 0:
print "> Error ! Those keys are used in some files but not defined :"
for key in sorted(keys_used_but_not_defined):
print " - %s" % key
if len(keys_defined_but_not_used) != 0:
print "> Warning ! Those keys are defined but seems unused :"
for key in sorted(keys_defined_but_not_used):
print " - %s" % key
|
<commit_before><commit_msg>Add a script to test m18n keys usage<commit_after>
|
# -*- coding: utf-8 -*-
import re
import glob
import json
import yaml
###############################################################################
# Find used keys in python code #
###############################################################################
# This regex matches « foo » in patterns like « m18n.n( "foo" »
p = re.compile(r'm18n\.n\(\s*[\"\']([a-zA-Z1-9_]+)[\"\']')
python_files = glob.glob("/vagrant/yunohost/src/yunohost/*.py")
python_files.extend(glob.glob("/vagrant/yunohost/src/yunohost/utils/*.py"))
python_files.append("/vagrant/yunohost/bin/yunohost")
python_keys = set()
for python_file in python_files:
with open(python_file) as f:
keys_in_file = p.findall(f.read())
for key in keys_in_file:
python_keys.add(key)
###############################################################################
# Find keys used in actionmap #
###############################################################################
actionmap_keys = set()
actionmap = yaml.load(open("../data/actionsmap/yunohost.yml"))
for _, category in actionmap.items():
if "actions" not in category.keys():
continue
for _, action in category["actions"].items():
if "arguments" not in action.keys():
continue
for _, argument in action["arguments"].items():
if "extra" not in argument.keys():
continue
if "password" in argument["extra"]:
actionmap_keys.add(argument["extra"]["password"])
if "ask" in argument["extra"]:
actionmap_keys.add(argument["extra"]["ask"])
if "pattern" in argument["extra"]:
actionmap_keys.add(argument["extra"]["pattern"][1])
if "help" in argument["extra"]:
print argument["extra"]["help"]
# These keys are used but difficult to parse
actionmap_keys.add("admin_password")
###############################################################################
# Load en locale json keys #
###############################################################################
en_locale_file = "/vagrant/yunohost/locales/en.json"
with open(en_locale_file) as f:
en_locale_json = json.loads(f.read())
en_locale_keys = set(en_locale_json.keys())
###############################################################################
# Compare keys used and keys defined #
###############################################################################
used_keys = python_keys.union(actionmap_keys)
keys_used_but_not_defined = used_keys.difference(en_locale_keys)
keys_defined_but_not_used = en_locale_keys.difference(used_keys)
if len(keys_used_but_not_defined) != 0:
print "> Error ! Those keys are used in some files but not defined :"
for key in sorted(keys_used_but_not_defined):
print " - %s" % key
if len(keys_defined_but_not_used) != 0:
print "> Warning ! Those keys are defined but seems unused :"
for key in sorted(keys_defined_but_not_used):
print " - %s" % key
|
Add a script to test m18n keys usage# -*- coding: utf-8 -*-
import re
import glob
import json
import yaml
###############################################################################
# Find used keys in python code #
###############################################################################
# This regex matches « foo » in patterns like « m18n.n( "foo" »
p = re.compile(r'm18n\.n\(\s*[\"\']([a-zA-Z1-9_]+)[\"\']')
python_files = glob.glob("/vagrant/yunohost/src/yunohost/*.py")
python_files.extend(glob.glob("/vagrant/yunohost/src/yunohost/utils/*.py"))
python_files.append("/vagrant/yunohost/bin/yunohost")
python_keys = set()
for python_file in python_files:
with open(python_file) as f:
keys_in_file = p.findall(f.read())
for key in keys_in_file:
python_keys.add(key)
###############################################################################
# Find keys used in actionmap #
###############################################################################
actionmap_keys = set()
actionmap = yaml.load(open("../data/actionsmap/yunohost.yml"))
for _, category in actionmap.items():
if "actions" not in category.keys():
continue
for _, action in category["actions"].items():
if "arguments" not in action.keys():
continue
for _, argument in action["arguments"].items():
if "extra" not in argument.keys():
continue
if "password" in argument["extra"]:
actionmap_keys.add(argument["extra"]["password"])
if "ask" in argument["extra"]:
actionmap_keys.add(argument["extra"]["ask"])
if "pattern" in argument["extra"]:
actionmap_keys.add(argument["extra"]["pattern"][1])
if "help" in argument["extra"]:
print argument["extra"]["help"]
# These keys are used but difficult to parse
actionmap_keys.add("admin_password")
###############################################################################
# Load en locale json keys #
###############################################################################
en_locale_file = "/vagrant/yunohost/locales/en.json"
with open(en_locale_file) as f:
en_locale_json = json.loads(f.read())
en_locale_keys = set(en_locale_json.keys())
###############################################################################
# Compare keys used and keys defined #
###############################################################################
used_keys = python_keys.union(actionmap_keys)
keys_used_but_not_defined = used_keys.difference(en_locale_keys)
keys_defined_but_not_used = en_locale_keys.difference(used_keys)
if len(keys_used_but_not_defined) != 0:
print "> Error ! Those keys are used in some files but not defined :"
for key in sorted(keys_used_but_not_defined):
print " - %s" % key
if len(keys_defined_but_not_used) != 0:
print "> Warning ! Those keys are defined but seems unused :"
for key in sorted(keys_defined_but_not_used):
print " - %s" % key
|
<commit_before><commit_msg>Add a script to test m18n keys usage<commit_after># -*- coding: utf-8 -*-
import re
import glob
import json
import yaml
###############################################################################
# Find used keys in python code #
###############################################################################
# This regex matches « foo » in patterns like « m18n.n( "foo" »
p = re.compile(r'm18n\.n\(\s*[\"\']([a-zA-Z1-9_]+)[\"\']')
python_files = glob.glob("/vagrant/yunohost/src/yunohost/*.py")
python_files.extend(glob.glob("/vagrant/yunohost/src/yunohost/utils/*.py"))
python_files.append("/vagrant/yunohost/bin/yunohost")
python_keys = set()
for python_file in python_files:
with open(python_file) as f:
keys_in_file = p.findall(f.read())
for key in keys_in_file:
python_keys.add(key)
###############################################################################
# Find keys used in actionmap #
###############################################################################
actionmap_keys = set()
actionmap = yaml.load(open("../data/actionsmap/yunohost.yml"))
for _, category in actionmap.items():
if "actions" not in category.keys():
continue
for _, action in category["actions"].items():
if "arguments" not in action.keys():
continue
for _, argument in action["arguments"].items():
if "extra" not in argument.keys():
continue
if "password" in argument["extra"]:
actionmap_keys.add(argument["extra"]["password"])
if "ask" in argument["extra"]:
actionmap_keys.add(argument["extra"]["ask"])
if "pattern" in argument["extra"]:
actionmap_keys.add(argument["extra"]["pattern"][1])
if "help" in argument["extra"]:
print argument["extra"]["help"]
# These keys are used but difficult to parse
actionmap_keys.add("admin_password")
###############################################################################
# Load en locale json keys #
###############################################################################
en_locale_file = "/vagrant/yunohost/locales/en.json"
with open(en_locale_file) as f:
en_locale_json = json.loads(f.read())
en_locale_keys = set(en_locale_json.keys())
###############################################################################
# Compare keys used and keys defined #
###############################################################################
used_keys = python_keys.union(actionmap_keys)
keys_used_but_not_defined = used_keys.difference(en_locale_keys)
keys_defined_but_not_used = en_locale_keys.difference(used_keys)
if len(keys_used_but_not_defined) != 0:
print "> Error ! Those keys are used in some files but not defined :"
for key in sorted(keys_used_but_not_defined):
print " - %s" % key
if len(keys_defined_but_not_used) != 0:
print "> Warning ! Those keys are defined but seems unused :"
for key in sorted(keys_defined_but_not_used):
print " - %s" % key
|
|
318e9b311fff8c5d66f1d043e3784fb81b8d4eda
|
tests/test_browseimage.py
|
tests/test_browseimage.py
|
# coding=utf-8
from __future__ import absolute_import
from eodatasets import browseimage, drivers, type as ptype
from tests import write_files, assert_same
def test_create_typical_browse_metadata():
class TestDriver(drivers.DatasetDriver):
def browse_image_bands(self, d):
return '5', '1', '3'
d = write_files({})
dataset = browseimage.create_typical_browse_metadata(TestDriver(), ptype.DatasetMetadata(), d)
expected = ptype.DatasetMetadata(
browse={
'full':
ptype.BrowseMetadata(
path=d.joinpath('browse.fr.jpg'),
file_type='image/jpg',
red_band='5',
green_band='1',
blue_band='3'
),
'medium':
ptype.BrowseMetadata(
path=d.joinpath('browse.jpg'),
# Default medium size.
shape=ptype.Point(1024, None),
file_type='image/jpg',
red_band='5',
green_band='1',
blue_band='3'
)
}
)
expected.id_, dataset.id_ = None, None
assert_same(expected, dataset)
def test_create_mono_browse_metadata():
# A single band for the browse image.
class TestDriver(drivers.DatasetDriver):
def browse_image_bands(self, d):
return '5'
d = write_files({})
dataset = browseimage.create_typical_browse_metadata(TestDriver(), ptype.DatasetMetadata(), d)
expected = ptype.DatasetMetadata(
browse={
'full':
ptype.BrowseMetadata(
path=d.joinpath('browse.fr.jpg'),
file_type='image/jpg',
red_band='5',
green_band='5',
blue_band='5'
),
'medium':
ptype.BrowseMetadata(
path=d.joinpath('browse.jpg'),
# Default medium size.
shape=ptype.Point(1024, None),
file_type='image/jpg',
red_band='5',
green_band='5',
blue_band='5'
)
}
)
expected.id_, dataset.id_ = None, None
assert_same(expected, dataset)
|
Add browse image setup test.
|
Add browse image setup test.
|
Python
|
apache-2.0
|
GeoscienceAustralia/eo-datasets,jeremyh/eo-datasets,GeoscienceAustralia/eo-datasets,jeremyh/eo-datasets
|
Add browse image setup test.
|
# coding=utf-8
from __future__ import absolute_import
from eodatasets import browseimage, drivers, type as ptype
from tests import write_files, assert_same
def test_create_typical_browse_metadata():
class TestDriver(drivers.DatasetDriver):
def browse_image_bands(self, d):
return '5', '1', '3'
d = write_files({})
dataset = browseimage.create_typical_browse_metadata(TestDriver(), ptype.DatasetMetadata(), d)
expected = ptype.DatasetMetadata(
browse={
'full':
ptype.BrowseMetadata(
path=d.joinpath('browse.fr.jpg'),
file_type='image/jpg',
red_band='5',
green_band='1',
blue_band='3'
),
'medium':
ptype.BrowseMetadata(
path=d.joinpath('browse.jpg'),
# Default medium size.
shape=ptype.Point(1024, None),
file_type='image/jpg',
red_band='5',
green_band='1',
blue_band='3'
)
}
)
expected.id_, dataset.id_ = None, None
assert_same(expected, dataset)
def test_create_mono_browse_metadata():
# A single band for the browse image.
class TestDriver(drivers.DatasetDriver):
def browse_image_bands(self, d):
return '5'
d = write_files({})
dataset = browseimage.create_typical_browse_metadata(TestDriver(), ptype.DatasetMetadata(), d)
expected = ptype.DatasetMetadata(
browse={
'full':
ptype.BrowseMetadata(
path=d.joinpath('browse.fr.jpg'),
file_type='image/jpg',
red_band='5',
green_band='5',
blue_band='5'
),
'medium':
ptype.BrowseMetadata(
path=d.joinpath('browse.jpg'),
# Default medium size.
shape=ptype.Point(1024, None),
file_type='image/jpg',
red_band='5',
green_band='5',
blue_band='5'
)
}
)
expected.id_, dataset.id_ = None, None
assert_same(expected, dataset)
|
<commit_before><commit_msg>Add browse image setup test.<commit_after>
|
# coding=utf-8
from __future__ import absolute_import
from eodatasets import browseimage, drivers, type as ptype
from tests import write_files, assert_same
def test_create_typical_browse_metadata():
class TestDriver(drivers.DatasetDriver):
def browse_image_bands(self, d):
return '5', '1', '3'
d = write_files({})
dataset = browseimage.create_typical_browse_metadata(TestDriver(), ptype.DatasetMetadata(), d)
expected = ptype.DatasetMetadata(
browse={
'full':
ptype.BrowseMetadata(
path=d.joinpath('browse.fr.jpg'),
file_type='image/jpg',
red_band='5',
green_band='1',
blue_band='3'
),
'medium':
ptype.BrowseMetadata(
path=d.joinpath('browse.jpg'),
# Default medium size.
shape=ptype.Point(1024, None),
file_type='image/jpg',
red_band='5',
green_band='1',
blue_band='3'
)
}
)
expected.id_, dataset.id_ = None, None
assert_same(expected, dataset)
def test_create_mono_browse_metadata():
# A single band for the browse image.
class TestDriver(drivers.DatasetDriver):
def browse_image_bands(self, d):
return '5'
d = write_files({})
dataset = browseimage.create_typical_browse_metadata(TestDriver(), ptype.DatasetMetadata(), d)
expected = ptype.DatasetMetadata(
browse={
'full':
ptype.BrowseMetadata(
path=d.joinpath('browse.fr.jpg'),
file_type='image/jpg',
red_band='5',
green_band='5',
blue_band='5'
),
'medium':
ptype.BrowseMetadata(
path=d.joinpath('browse.jpg'),
# Default medium size.
shape=ptype.Point(1024, None),
file_type='image/jpg',
red_band='5',
green_band='5',
blue_band='5'
)
}
)
expected.id_, dataset.id_ = None, None
assert_same(expected, dataset)
|
Add browse image setup test.# coding=utf-8
from __future__ import absolute_import
from eodatasets import browseimage, drivers, type as ptype
from tests import write_files, assert_same
def test_create_typical_browse_metadata():
class TestDriver(drivers.DatasetDriver):
def browse_image_bands(self, d):
return '5', '1', '3'
d = write_files({})
dataset = browseimage.create_typical_browse_metadata(TestDriver(), ptype.DatasetMetadata(), d)
expected = ptype.DatasetMetadata(
browse={
'full':
ptype.BrowseMetadata(
path=d.joinpath('browse.fr.jpg'),
file_type='image/jpg',
red_band='5',
green_band='1',
blue_band='3'
),
'medium':
ptype.BrowseMetadata(
path=d.joinpath('browse.jpg'),
# Default medium size.
shape=ptype.Point(1024, None),
file_type='image/jpg',
red_band='5',
green_band='1',
blue_band='3'
)
}
)
expected.id_, dataset.id_ = None, None
assert_same(expected, dataset)
def test_create_mono_browse_metadata():
# A single band for the browse image.
class TestDriver(drivers.DatasetDriver):
def browse_image_bands(self, d):
return '5'
d = write_files({})
dataset = browseimage.create_typical_browse_metadata(TestDriver(), ptype.DatasetMetadata(), d)
expected = ptype.DatasetMetadata(
browse={
'full':
ptype.BrowseMetadata(
path=d.joinpath('browse.fr.jpg'),
file_type='image/jpg',
red_band='5',
green_band='5',
blue_band='5'
),
'medium':
ptype.BrowseMetadata(
path=d.joinpath('browse.jpg'),
# Default medium size.
shape=ptype.Point(1024, None),
file_type='image/jpg',
red_band='5',
green_band='5',
blue_band='5'
)
}
)
expected.id_, dataset.id_ = None, None
assert_same(expected, dataset)
|
<commit_before><commit_msg>Add browse image setup test.<commit_after># coding=utf-8
from __future__ import absolute_import
from eodatasets import browseimage, drivers, type as ptype
from tests import write_files, assert_same
def test_create_typical_browse_metadata():
class TestDriver(drivers.DatasetDriver):
def browse_image_bands(self, d):
return '5', '1', '3'
d = write_files({})
dataset = browseimage.create_typical_browse_metadata(TestDriver(), ptype.DatasetMetadata(), d)
expected = ptype.DatasetMetadata(
browse={
'full':
ptype.BrowseMetadata(
path=d.joinpath('browse.fr.jpg'),
file_type='image/jpg',
red_band='5',
green_band='1',
blue_band='3'
),
'medium':
ptype.BrowseMetadata(
path=d.joinpath('browse.jpg'),
# Default medium size.
shape=ptype.Point(1024, None),
file_type='image/jpg',
red_band='5',
green_band='1',
blue_band='3'
)
}
)
expected.id_, dataset.id_ = None, None
assert_same(expected, dataset)
def test_create_mono_browse_metadata():
# A single band for the browse image.
class TestDriver(drivers.DatasetDriver):
def browse_image_bands(self, d):
return '5'
d = write_files({})
dataset = browseimage.create_typical_browse_metadata(TestDriver(), ptype.DatasetMetadata(), d)
expected = ptype.DatasetMetadata(
browse={
'full':
ptype.BrowseMetadata(
path=d.joinpath('browse.fr.jpg'),
file_type='image/jpg',
red_band='5',
green_band='5',
blue_band='5'
),
'medium':
ptype.BrowseMetadata(
path=d.joinpath('browse.jpg'),
# Default medium size.
shape=ptype.Point(1024, None),
file_type='image/jpg',
red_band='5',
green_band='5',
blue_band='5'
)
}
)
expected.id_, dataset.id_ = None, None
assert_same(expected, dataset)
|
|
c34f6bf526049be86e6cff713986bf25645f5223
|
vagrant/tournament/generate_tournament.py
|
vagrant/tournament/generate_tournament.py
|
import random
from tournament import connect
from tournament import reportMatch
from tournament_test import testDelete
the_players = [
(1, 'Jeff'),
(2, 'Adarsh'),
(3, 'Amanda'),
(4, 'Eduardo'),
(5, 'Philip'),
(6, 'Jee')
]
def registerPlayerUpdated(name):
"""Add a player to the tournament database.
The database assigns a unique serial id number for the player. (This
should be handled by your SQL database schema, not in your Python code.)
Args:
name: the player's full name (need not be unique).
"""
db = connect()
db_cursor = db.cursor()
query = "INSERT INTO players (name) VALUES (%s)"
db_cursor.execute(query, (name,))
db.commit()
db.close()
def createRandomMatches(player_list, num_matches):
num_players = len(player_list)
for i in xrange(num_matches):
print 'match %s' % (i + 1)
player1_index = random.randint(0, num_players - 1)
player2_index = random.randint(0, num_players - 1)
if player2_index == player1_index:
player2_index = (player1_index + 1) % num_players
winner_id = player_list[player1_index][0]
winner_name = player_list[player1_index][1]
loser_id = player_list[player2_index][0]
loser_name = player_list[player2_index][1]
reportMatch(winner_id, loser_id)
print "%s (id=%s) beat %s (id=%s)" % (
winner_name,
winner_id,
loser_name,
loser_id)
def setup_players_and_matches():
testDelete()
for player in the_players:
registerPlayerUpdated(player[1])
createRandomMatches(the_players, 100)
if __name__ == '__main__':
setup_players_and_matches()
|
Add random tournament generator script
|
Feat: Add random tournament generator script
|
Python
|
mit
|
gsbullmer/tournament-results,gsbullmer/tournament-results,gsbullmer/tournament-results
|
Feat: Add random tournament generator script
|
import random
from tournament import connect
from tournament import reportMatch
from tournament_test import testDelete
the_players = [
(1, 'Jeff'),
(2, 'Adarsh'),
(3, 'Amanda'),
(4, 'Eduardo'),
(5, 'Philip'),
(6, 'Jee')
]
def registerPlayerUpdated(name):
"""Add a player to the tournament database.
The database assigns a unique serial id number for the player. (This
should be handled by your SQL database schema, not in your Python code.)
Args:
name: the player's full name (need not be unique).
"""
db = connect()
db_cursor = db.cursor()
query = "INSERT INTO players (name) VALUES (%s)"
db_cursor.execute(query, (name,))
db.commit()
db.close()
def createRandomMatches(player_list, num_matches):
num_players = len(player_list)
for i in xrange(num_matches):
print 'match %s' % (i + 1)
player1_index = random.randint(0, num_players - 1)
player2_index = random.randint(0, num_players - 1)
if player2_index == player1_index:
player2_index = (player1_index + 1) % num_players
winner_id = player_list[player1_index][0]
winner_name = player_list[player1_index][1]
loser_id = player_list[player2_index][0]
loser_name = player_list[player2_index][1]
reportMatch(winner_id, loser_id)
print "%s (id=%s) beat %s (id=%s)" % (
winner_name,
winner_id,
loser_name,
loser_id)
def setup_players_and_matches():
testDelete()
for player in the_players:
registerPlayerUpdated(player[1])
createRandomMatches(the_players, 100)
if __name__ == '__main__':
setup_players_and_matches()
|
<commit_before><commit_msg>Feat: Add random tournament generator script<commit_after>
|
import random
from tournament import connect
from tournament import reportMatch
from tournament_test import testDelete
the_players = [
(1, 'Jeff'),
(2, 'Adarsh'),
(3, 'Amanda'),
(4, 'Eduardo'),
(5, 'Philip'),
(6, 'Jee')
]
def registerPlayerUpdated(name):
"""Add a player to the tournament database.
The database assigns a unique serial id number for the player. (This
should be handled by your SQL database schema, not in your Python code.)
Args:
name: the player's full name (need not be unique).
"""
db = connect()
db_cursor = db.cursor()
query = "INSERT INTO players (name) VALUES (%s)"
db_cursor.execute(query, (name,))
db.commit()
db.close()
def createRandomMatches(player_list, num_matches):
num_players = len(player_list)
for i in xrange(num_matches):
print 'match %s' % (i + 1)
player1_index = random.randint(0, num_players - 1)
player2_index = random.randint(0, num_players - 1)
if player2_index == player1_index:
player2_index = (player1_index + 1) % num_players
winner_id = player_list[player1_index][0]
winner_name = player_list[player1_index][1]
loser_id = player_list[player2_index][0]
loser_name = player_list[player2_index][1]
reportMatch(winner_id, loser_id)
print "%s (id=%s) beat %s (id=%s)" % (
winner_name,
winner_id,
loser_name,
loser_id)
def setup_players_and_matches():
testDelete()
for player in the_players:
registerPlayerUpdated(player[1])
createRandomMatches(the_players, 100)
if __name__ == '__main__':
setup_players_and_matches()
|
Feat: Add random tournament generator scriptimport random
from tournament import connect
from tournament import reportMatch
from tournament_test import testDelete
the_players = [
(1, 'Jeff'),
(2, 'Adarsh'),
(3, 'Amanda'),
(4, 'Eduardo'),
(5, 'Philip'),
(6, 'Jee')
]
def registerPlayerUpdated(name):
"""Add a player to the tournament database.
The database assigns a unique serial id number for the player. (This
should be handled by your SQL database schema, not in your Python code.)
Args:
name: the player's full name (need not be unique).
"""
db = connect()
db_cursor = db.cursor()
query = "INSERT INTO players (name) VALUES (%s)"
db_cursor.execute(query, (name,))
db.commit()
db.close()
def createRandomMatches(player_list, num_matches):
num_players = len(player_list)
for i in xrange(num_matches):
print 'match %s' % (i + 1)
player1_index = random.randint(0, num_players - 1)
player2_index = random.randint(0, num_players - 1)
if player2_index == player1_index:
player2_index = (player1_index + 1) % num_players
winner_id = player_list[player1_index][0]
winner_name = player_list[player1_index][1]
loser_id = player_list[player2_index][0]
loser_name = player_list[player2_index][1]
reportMatch(winner_id, loser_id)
print "%s (id=%s) beat %s (id=%s)" % (
winner_name,
winner_id,
loser_name,
loser_id)
def setup_players_and_matches():
testDelete()
for player in the_players:
registerPlayerUpdated(player[1])
createRandomMatches(the_players, 100)
if __name__ == '__main__':
setup_players_and_matches()
|
<commit_before><commit_msg>Feat: Add random tournament generator script<commit_after>import random
from tournament import connect
from tournament import reportMatch
from tournament_test import testDelete
the_players = [
(1, 'Jeff'),
(2, 'Adarsh'),
(3, 'Amanda'),
(4, 'Eduardo'),
(5, 'Philip'),
(6, 'Jee')
]
def registerPlayerUpdated(name):
"""Add a player to the tournament database.
The database assigns a unique serial id number for the player. (This
should be handled by your SQL database schema, not in your Python code.)
Args:
name: the player's full name (need not be unique).
"""
db = connect()
db_cursor = db.cursor()
query = "INSERT INTO players (name) VALUES (%s)"
db_cursor.execute(query, (name,))
db.commit()
db.close()
def createRandomMatches(player_list, num_matches):
num_players = len(player_list)
for i in xrange(num_matches):
print 'match %s' % (i + 1)
player1_index = random.randint(0, num_players - 1)
player2_index = random.randint(0, num_players - 1)
if player2_index == player1_index:
player2_index = (player1_index + 1) % num_players
winner_id = player_list[player1_index][0]
winner_name = player_list[player1_index][1]
loser_id = player_list[player2_index][0]
loser_name = player_list[player2_index][1]
reportMatch(winner_id, loser_id)
print "%s (id=%s) beat %s (id=%s)" % (
winner_name,
winner_id,
loser_name,
loser_id)
def setup_players_and_matches():
testDelete()
for player in the_players:
registerPlayerUpdated(player[1])
createRandomMatches(the_players, 100)
if __name__ == '__main__':
setup_players_and_matches()
|
|
c00f02045a5e081f1df5cd62993491ef7dc9af52
|
remap.py
|
remap.py
|
import sys
def parseMaps(goodMapFile, badMapFile):
with open(goodMapFile) as gmf:
goodMap = gmf.readlines()
del goodMap[0]
with open(badMapFile) as bmf:
badMap = bmf.readlines()
del badMap[0]
bcMap = {}
# create a map from the sample ID to the good barcode
for line in goodMap:
line = line.split('\t')
bcMap[line[0]] = line[1]
for line in badMap:
line = line.split('\t')
if line[0] in bcMap:
bcMap[line[0]] = (line[1], bcMap[line[0]])
return dict([t for t in bcMap.values() if isinstance(t, tuple)])
if __name__ == '__main__':
goodMapFile = sys.argv[1]
badMapFile = sys.argv[2]
fastaFile = sys.argv[3]
bcMap = parseMaps(goodMapFile, badMapFile)
newFasta = open('new.fasta', 'w')
with open(fastaFile) as f:
for line in f:
if line[0] == '>':
header = line
newFasta.write(header)
else:
# sequence info
seq = line
badCode = seq[:8]
if badCode in bcMap:
newFasta.write(bcMap[badCode] + seq[8:])
else:
print 'Mapping error: ' + header
newFasta.close()
|
Create a new QIIME mapping file by replacing incorrect barcodes
|
Create a new QIIME mapping file by replacing incorrect barcodes
|
Python
|
mit
|
smdabdoub/phylotoast,akshayparopkari/phylotoast
|
Create a new QIIME mapping file by replacing incorrect barcodes
|
import sys
def parseMaps(goodMapFile, badMapFile):
with open(goodMapFile) as gmf:
goodMap = gmf.readlines()
del goodMap[0]
with open(badMapFile) as bmf:
badMap = bmf.readlines()
del badMap[0]
bcMap = {}
# create a map from the sample ID to the good barcode
for line in goodMap:
line = line.split('\t')
bcMap[line[0]] = line[1]
for line in badMap:
line = line.split('\t')
if line[0] in bcMap:
bcMap[line[0]] = (line[1], bcMap[line[0]])
return dict([t for t in bcMap.values() if isinstance(t, tuple)])
if __name__ == '__main__':
goodMapFile = sys.argv[1]
badMapFile = sys.argv[2]
fastaFile = sys.argv[3]
bcMap = parseMaps(goodMapFile, badMapFile)
newFasta = open('new.fasta', 'w')
with open(fastaFile) as f:
for line in f:
if line[0] == '>':
header = line
newFasta.write(header)
else:
# sequence info
seq = line
badCode = seq[:8]
if badCode in bcMap:
newFasta.write(bcMap[badCode] + seq[8:])
else:
print 'Mapping error: ' + header
newFasta.close()
|
<commit_before><commit_msg>Create a new QIIME mapping file by replacing incorrect barcodes<commit_after>
|
import sys
def parseMaps(goodMapFile, badMapFile):
with open(goodMapFile) as gmf:
goodMap = gmf.readlines()
del goodMap[0]
with open(badMapFile) as bmf:
badMap = bmf.readlines()
del badMap[0]
bcMap = {}
# create a map from the sample ID to the good barcode
for line in goodMap:
line = line.split('\t')
bcMap[line[0]] = line[1]
for line in badMap:
line = line.split('\t')
if line[0] in bcMap:
bcMap[line[0]] = (line[1], bcMap[line[0]])
return dict([t for t in bcMap.values() if isinstance(t, tuple)])
if __name__ == '__main__':
goodMapFile = sys.argv[1]
badMapFile = sys.argv[2]
fastaFile = sys.argv[3]
bcMap = parseMaps(goodMapFile, badMapFile)
newFasta = open('new.fasta', 'w')
with open(fastaFile) as f:
for line in f:
if line[0] == '>':
header = line
newFasta.write(header)
else:
# sequence info
seq = line
badCode = seq[:8]
if badCode in bcMap:
newFasta.write(bcMap[badCode] + seq[8:])
else:
print 'Mapping error: ' + header
newFasta.close()
|
Create a new QIIME mapping file by replacing incorrect barcodesimport sys
def parseMaps(goodMapFile, badMapFile):
with open(goodMapFile) as gmf:
goodMap = gmf.readlines()
del goodMap[0]
with open(badMapFile) as bmf:
badMap = bmf.readlines()
del badMap[0]
bcMap = {}
# create a map from the sample ID to the good barcode
for line in goodMap:
line = line.split('\t')
bcMap[line[0]] = line[1]
for line in badMap:
line = line.split('\t')
if line[0] in bcMap:
bcMap[line[0]] = (line[1], bcMap[line[0]])
return dict([t for t in bcMap.values() if isinstance(t, tuple)])
if __name__ == '__main__':
goodMapFile = sys.argv[1]
badMapFile = sys.argv[2]
fastaFile = sys.argv[3]
bcMap = parseMaps(goodMapFile, badMapFile)
newFasta = open('new.fasta', 'w')
with open(fastaFile) as f:
for line in f:
if line[0] == '>':
header = line
newFasta.write(header)
else:
# sequence info
seq = line
badCode = seq[:8]
if badCode in bcMap:
newFasta.write(bcMap[badCode] + seq[8:])
else:
print 'Mapping error: ' + header
newFasta.close()
|
<commit_before><commit_msg>Create a new QIIME mapping file by replacing incorrect barcodes<commit_after>import sys
def parseMaps(goodMapFile, badMapFile):
with open(goodMapFile) as gmf:
goodMap = gmf.readlines()
del goodMap[0]
with open(badMapFile) as bmf:
badMap = bmf.readlines()
del badMap[0]
bcMap = {}
# create a map from the sample ID to the good barcode
for line in goodMap:
line = line.split('\t')
bcMap[line[0]] = line[1]
for line in badMap:
line = line.split('\t')
if line[0] in bcMap:
bcMap[line[0]] = (line[1], bcMap[line[0]])
return dict([t for t in bcMap.values() if isinstance(t, tuple)])
if __name__ == '__main__':
goodMapFile = sys.argv[1]
badMapFile = sys.argv[2]
fastaFile = sys.argv[3]
bcMap = parseMaps(goodMapFile, badMapFile)
newFasta = open('new.fasta', 'w')
with open(fastaFile) as f:
for line in f:
if line[0] == '>':
header = line
newFasta.write(header)
else:
# sequence info
seq = line
badCode = seq[:8]
if badCode in bcMap:
newFasta.write(bcMap[badCode] + seq[8:])
else:
print 'Mapping error: ' + header
newFasta.close()
|
|
8ea0a6b9986996e40fea906a855fba5547b27c89
|
recalculate_gene_ranks.py
|
recalculate_gene_ranks.py
|
import django_rq
import logging
import os
import time
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'varify.conf.settings')
from django.core import management
log = logging.getLogger(__name__)
# We don't want this job to run forever so limit to this many attempts and
# if the queue is still not empty just give up.
MAX_ATTEMPTS = 100
# Time in seconds between polling the queue for the current count
POLL_DELAY = 300
queue = django_rq.get_queue('default')
attempts = 0
while attempts < MAX_ATTEMPTS:
attempts += 1
if queue.is_empty:
management.call_command('samples', 'gene-ranks')
log.debug('gene ranks updated')
break
log.debug('queue not empty, waiting {0} seconds'.format(POLL_DELAY))
time.sleep(POLL_DELAY)
else:
# Max attempts, log and exit
log.error('Maximum attempts ({0}) made when trying to update gene ranks.'
.format(MAX_ATTEMPTS))
|
Add script to call gene-ranks command after samples load
|
Add script to call gene-ranks command after samples load
This will need to be added as a cron job like the recalculate_allele_frequencies script is used now.
|
Python
|
bsd-2-clause
|
chop-dbhi/varify,chop-dbhi/varify,chop-dbhi/varify,chop-dbhi/varify
|
Add script to call gene-ranks command after samples load
This will need to be added as a cron job like the recalculate_allele_frequencies script is used now.
|
import django_rq
import logging
import os
import time
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'varify.conf.settings')
from django.core import management
log = logging.getLogger(__name__)
# We don't want this job to run forever so limit to this many attempts and
# if the queue is still not empty just give up.
MAX_ATTEMPTS = 100
# Time in seconds between polling the queue for the current count
POLL_DELAY = 300
queue = django_rq.get_queue('default')
attempts = 0
while attempts < MAX_ATTEMPTS:
attempts += 1
if queue.is_empty:
management.call_command('samples', 'gene-ranks')
log.debug('gene ranks updated')
break
log.debug('queue not empty, waiting {0} seconds'.format(POLL_DELAY))
time.sleep(POLL_DELAY)
else:
# Max attempts, log and exit
log.error('Maximum attempts ({0}) made when trying to update gene ranks.'
.format(MAX_ATTEMPTS))
|
<commit_before><commit_msg>Add script to call gene-ranks command after samples load
This will need to be added as a cron job like the recalculate_allele_frequencies script is used now.<commit_after>
|
import django_rq
import logging
import os
import time
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'varify.conf.settings')
from django.core import management
log = logging.getLogger(__name__)
# We don't want this job to run forever so limit to this many attempts and
# if the queue is still not empty just give up.
MAX_ATTEMPTS = 100
# Time in seconds between polling the queue for the current count
POLL_DELAY = 300
queue = django_rq.get_queue('default')
attempts = 0
while attempts < MAX_ATTEMPTS:
attempts += 1
if queue.is_empty:
management.call_command('samples', 'gene-ranks')
log.debug('gene ranks updated')
break
log.debug('queue not empty, waiting {0} seconds'.format(POLL_DELAY))
time.sleep(POLL_DELAY)
else:
# Max attempts, log and exit
log.error('Maximum attempts ({0}) made when trying to update gene ranks.'
.format(MAX_ATTEMPTS))
|
Add script to call gene-ranks command after samples load
This will need to be added as a cron job like the recalculate_allele_frequencies script is used now.import django_rq
import logging
import os
import time
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'varify.conf.settings')
from django.core import management
log = logging.getLogger(__name__)
# We don't want this job to run forever so limit to this many attempts and
# if the queue is still not empty just give up.
MAX_ATTEMPTS = 100
# Time in seconds between polling the queue for the current count
POLL_DELAY = 300
queue = django_rq.get_queue('default')
attempts = 0
while attempts < MAX_ATTEMPTS:
attempts += 1
if queue.is_empty:
management.call_command('samples', 'gene-ranks')
log.debug('gene ranks updated')
break
log.debug('queue not empty, waiting {0} seconds'.format(POLL_DELAY))
time.sleep(POLL_DELAY)
else:
# Max attempts, log and exit
log.error('Maximum attempts ({0}) made when trying to update gene ranks.'
.format(MAX_ATTEMPTS))
|
<commit_before><commit_msg>Add script to call gene-ranks command after samples load
This will need to be added as a cron job like the recalculate_allele_frequencies script is used now.<commit_after>import django_rq
import logging
import os
import time
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'varify.conf.settings')
from django.core import management
log = logging.getLogger(__name__)
# We don't want this job to run forever so limit to this many attempts and
# if the queue is still not empty just give up.
MAX_ATTEMPTS = 100
# Time in seconds between polling the queue for the current count
POLL_DELAY = 300
queue = django_rq.get_queue('default')
attempts = 0
while attempts < MAX_ATTEMPTS:
attempts += 1
if queue.is_empty:
management.call_command('samples', 'gene-ranks')
log.debug('gene ranks updated')
break
log.debug('queue not empty, waiting {0} seconds'.format(POLL_DELAY))
time.sleep(POLL_DELAY)
else:
# Max attempts, log and exit
log.error('Maximum attempts ({0}) made when trying to update gene ranks.'
.format(MAX_ATTEMPTS))
|
|
c058818cf29d154a1899ccedc326302506bed85f
|
tb_website/settings/orchestra.py
|
tb_website/settings/orchestra.py
|
"""
This module graps a configured database from
"""
import os
from collections import defaultdict
DB_ENGINES = {
'SQLITE': 'django.db.backends.sqlite3',
'MYSQL': 'django.db.backends.mysql',
'PSQL': 'django.db.backends.postgresql',
'ORACLE': None,
}
def infinatedict():
"""An infinately deep defaultdict"""
return defaultdict(infinatedict)
def parse_apache_config(filename):
kwargs = infinatedict()
if not os.path.isfile(filename):
raise IOError("Apache config file is missing: %s" % filename)
with open(filename, 'r') as fhl:
for line in fhl.readlines():
if line.startswith('#'):
continue
if line.startswith('SetEnv'):
(name, value) = line[6:].strip().split()
if value[0] == '"' and value[-1] == '"':
value = value[1:-1]
names = name.split('_')
target = kwargs
for name in name.split('_'):
previous = target
target = target[name]
previous[name] = value
return kwargs
def get_database_config(filename, site=None):
config = parse_apache_config(filename)
if not config:
raise ValueError("Apache Config is empty: %s" % filename)
if site is None:
site = list(config)[0]
if site not in config:
raise KeyError("Apache config doesn't have site: %s" % site)
config = config[site]
engines = [(DB_ENGINES[key], config[key]) for key, value in config.items() if key in DB_ENGINES]
if not engines:
raise KeyError("No supported database engine found in the apache config: %s" % filename)
engine, db = engines[0]
return {
'default': {
'ENGINE': engine,
'NAME': db.get('DB', db.get('FILENAME', '')),
'USER': db.get('USER', ''),
'PASSWORD': db.get('PASSWORD', ''),
'HOST': db.get('SERVER', ''),
'PORT': db.get('PORT', ''),
}
}
|
Add new settings support for apache
|
Add new settings support for apache
|
Python
|
agpl-3.0
|
IQSS/gentb-site,IQSS/gentb-site,IQSS/gentb-site,IQSS/gentb-site,IQSS/gentb-site,IQSS/gentb-site,IQSS/gentb-site,IQSS/gentb-site
|
Add new settings support for apache
|
"""
This module graps a configured database from
"""
import os
from collections import defaultdict
DB_ENGINES = {
'SQLITE': 'django.db.backends.sqlite3',
'MYSQL': 'django.db.backends.mysql',
'PSQL': 'django.db.backends.postgresql',
'ORACLE': None,
}
def infinatedict():
"""An infinately deep defaultdict"""
return defaultdict(infinatedict)
def parse_apache_config(filename):
kwargs = infinatedict()
if not os.path.isfile(filename):
raise IOError("Apache config file is missing: %s" % filename)
with open(filename, 'r') as fhl:
for line in fhl.readlines():
if line.startswith('#'):
continue
if line.startswith('SetEnv'):
(name, value) = line[6:].strip().split()
if value[0] == '"' and value[-1] == '"':
value = value[1:-1]
names = name.split('_')
target = kwargs
for name in name.split('_'):
previous = target
target = target[name]
previous[name] = value
return kwargs
def get_database_config(filename, site=None):
config = parse_apache_config(filename)
if not config:
raise ValueError("Apache Config is empty: %s" % filename)
if site is None:
site = list(config)[0]
if site not in config:
raise KeyError("Apache config doesn't have site: %s" % site)
config = config[site]
engines = [(DB_ENGINES[key], config[key]) for key, value in config.items() if key in DB_ENGINES]
if not engines:
raise KeyError("No supported database engine found in the apache config: %s" % filename)
engine, db = engines[0]
return {
'default': {
'ENGINE': engine,
'NAME': db.get('DB', db.get('FILENAME', '')),
'USER': db.get('USER', ''),
'PASSWORD': db.get('PASSWORD', ''),
'HOST': db.get('SERVER', ''),
'PORT': db.get('PORT', ''),
}
}
|
<commit_before><commit_msg>Add new settings support for apache<commit_after>
|
"""
This module graps a configured database from
"""
import os
from collections import defaultdict
DB_ENGINES = {
'SQLITE': 'django.db.backends.sqlite3',
'MYSQL': 'django.db.backends.mysql',
'PSQL': 'django.db.backends.postgresql',
'ORACLE': None,
}
def infinatedict():
"""An infinately deep defaultdict"""
return defaultdict(infinatedict)
def parse_apache_config(filename):
kwargs = infinatedict()
if not os.path.isfile(filename):
raise IOError("Apache config file is missing: %s" % filename)
with open(filename, 'r') as fhl:
for line in fhl.readlines():
if line.startswith('#'):
continue
if line.startswith('SetEnv'):
(name, value) = line[6:].strip().split()
if value[0] == '"' and value[-1] == '"':
value = value[1:-1]
names = name.split('_')
target = kwargs
for name in name.split('_'):
previous = target
target = target[name]
previous[name] = value
return kwargs
def get_database_config(filename, site=None):
config = parse_apache_config(filename)
if not config:
raise ValueError("Apache Config is empty: %s" % filename)
if site is None:
site = list(config)[0]
if site not in config:
raise KeyError("Apache config doesn't have site: %s" % site)
config = config[site]
engines = [(DB_ENGINES[key], config[key]) for key, value in config.items() if key in DB_ENGINES]
if not engines:
raise KeyError("No supported database engine found in the apache config: %s" % filename)
engine, db = engines[0]
return {
'default': {
'ENGINE': engine,
'NAME': db.get('DB', db.get('FILENAME', '')),
'USER': db.get('USER', ''),
'PASSWORD': db.get('PASSWORD', ''),
'HOST': db.get('SERVER', ''),
'PORT': db.get('PORT', ''),
}
}
|
Add new settings support for apache"""
This module graps a configured database from
"""
import os
from collections import defaultdict
DB_ENGINES = {
'SQLITE': 'django.db.backends.sqlite3',
'MYSQL': 'django.db.backends.mysql',
'PSQL': 'django.db.backends.postgresql',
'ORACLE': None,
}
def infinatedict():
"""An infinately deep defaultdict"""
return defaultdict(infinatedict)
def parse_apache_config(filename):
kwargs = infinatedict()
if not os.path.isfile(filename):
raise IOError("Apache config file is missing: %s" % filename)
with open(filename, 'r') as fhl:
for line in fhl.readlines():
if line.startswith('#'):
continue
if line.startswith('SetEnv'):
(name, value) = line[6:].strip().split()
if value[0] == '"' and value[-1] == '"':
value = value[1:-1]
names = name.split('_')
target = kwargs
for name in name.split('_'):
previous = target
target = target[name]
previous[name] = value
return kwargs
def get_database_config(filename, site=None):
config = parse_apache_config(filename)
if not config:
raise ValueError("Apache Config is empty: %s" % filename)
if site is None:
site = list(config)[0]
if site not in config:
raise KeyError("Apache config doesn't have site: %s" % site)
config = config[site]
engines = [(DB_ENGINES[key], config[key]) for key, value in config.items() if key in DB_ENGINES]
if not engines:
raise KeyError("No supported database engine found in the apache config: %s" % filename)
engine, db = engines[0]
return {
'default': {
'ENGINE': engine,
'NAME': db.get('DB', db.get('FILENAME', '')),
'USER': db.get('USER', ''),
'PASSWORD': db.get('PASSWORD', ''),
'HOST': db.get('SERVER', ''),
'PORT': db.get('PORT', ''),
}
}
|
<commit_before><commit_msg>Add new settings support for apache<commit_after>"""
This module graps a configured database from
"""
import os
from collections import defaultdict
DB_ENGINES = {
'SQLITE': 'django.db.backends.sqlite3',
'MYSQL': 'django.db.backends.mysql',
'PSQL': 'django.db.backends.postgresql',
'ORACLE': None,
}
def infinatedict():
"""An infinately deep defaultdict"""
return defaultdict(infinatedict)
def parse_apache_config(filename):
kwargs = infinatedict()
if not os.path.isfile(filename):
raise IOError("Apache config file is missing: %s" % filename)
with open(filename, 'r') as fhl:
for line in fhl.readlines():
if line.startswith('#'):
continue
if line.startswith('SetEnv'):
(name, value) = line[6:].strip().split()
if value[0] == '"' and value[-1] == '"':
value = value[1:-1]
names = name.split('_')
target = kwargs
for name in name.split('_'):
previous = target
target = target[name]
previous[name] = value
return kwargs
def get_database_config(filename, site=None):
config = parse_apache_config(filename)
if not config:
raise ValueError("Apache Config is empty: %s" % filename)
if site is None:
site = list(config)[0]
if site not in config:
raise KeyError("Apache config doesn't have site: %s" % site)
config = config[site]
engines = [(DB_ENGINES[key], config[key]) for key, value in config.items() if key in DB_ENGINES]
if not engines:
raise KeyError("No supported database engine found in the apache config: %s" % filename)
engine, db = engines[0]
return {
'default': {
'ENGINE': engine,
'NAME': db.get('DB', db.get('FILENAME', '')),
'USER': db.get('USER', ''),
'PASSWORD': db.get('PASSWORD', ''),
'HOST': db.get('SERVER', ''),
'PORT': db.get('PORT', ''),
}
}
|
|
485c7d862740c2547c7ffc5d3ace8c10243a1718
|
InvsByDayCalculator.py
|
InvsByDayCalculator.py
|
import datetime,time;
year=2015
month=6
day=22
invsDays=64
invsRatio=0.096
invsFund = 2000
arrInvsPayBack = []
#array arrInvsPayPlan={}
invsDate = datetime.date(year,month,day)
finiDate = invsDate + datetime.timedelta(invsDays)
firstPayDate = datetime.date(year,month+1,20)
print 'Ͷʿʼʱ:', invsDate, ';Ͷʽʱ:', finiDate, ";״ʱ:", firstPayDate
totalPayBack = invsFund * invsRatio * invsDays / 365
## жǷһϢ
payTimes = (finiDate.year - firstPayDate.year)*12 + finiDate.month - firstPayDate.month + 1
if payTimes == 1:
#ṹҵݺУڵ
arrInvsPayBack.append(finiDate)
arrInvsPayBack.append(invsDays)
arrInvsPayBack.append(totalPayBack)
print 'һϢ,Ϣ: ', finiDate, ',Ϣ:', invsDays, ',Ϣ', totalPayBack
else:
#print payTimes,'Գ翴Ƭȥ...'
lastPayDate = invsDate
for x in range(payTimes-1):
# Ϣ&Ϣ
# timedeltaԲ鿴(days) (seconds)
#payDate = firstPayDate + datetime.timedelta(months=x)
if firstPayDate.month+x <=12:
payDate = datetime.date(firstPayDate.year,firstPayDate.month+x,20)
else:
payDate = datetime.date(firstPayDate.year+1,firstPayDate.month+x-12,20)
payDays = (payDate - lastPayDate).days
payBack = invsFund * invsRatio * payDays / 365
arrInvsPayBack.append(payDate)
arrInvsPayBack.append(payDays)
arrInvsPayBack.append(payBack)
#print x, payDate, payBack
lastPayDate = payDate
# һϢ
payDays = (finiDate - lastPayDate).days
payBack = invsFund * invsRatio * payDays / 365
arrInvsPayBack.append(finiDate)
arrInvsPayBack.append(payDays)
arrInvsPayBack.append(payBack)
len = len(arrInvsPayBack)
n = 0
while n<len/3:
print n, 'Ϣ,Ϣ: ', arrInvsPayBack[3*n], ',Ϣ:', arrInvsPayBack[3*n+1], ',Ϣ', arrInvsPayBack[3*n+2]
n = n + 1
print 'Ͷʱ:', invsFund, ';ۼ:', totalPayBack
|
Add the Simple InveCalculator Alog for Earnning by days!
|
Add the Simple InveCalculator Alog for Earnning by days!
|
Python
|
mit
|
DivadOEC/HInvestMgr
|
Add the Simple InveCalculator Alog for Earnning by days!
|
import datetime,time;
year=2015
month=6
day=22
invsDays=64
invsRatio=0.096
invsFund = 2000
arrInvsPayBack = []
#array arrInvsPayPlan={}
invsDate = datetime.date(year,month,day)
finiDate = invsDate + datetime.timedelta(invsDays)
firstPayDate = datetime.date(year,month+1,20)
print 'Ͷʿʼʱ:', invsDate, ';Ͷʽʱ:', finiDate, ";״ʱ:", firstPayDate
totalPayBack = invsFund * invsRatio * invsDays / 365
## жǷһϢ
payTimes = (finiDate.year - firstPayDate.year)*12 + finiDate.month - firstPayDate.month + 1
if payTimes == 1:
#ṹҵݺУڵ
arrInvsPayBack.append(finiDate)
arrInvsPayBack.append(invsDays)
arrInvsPayBack.append(totalPayBack)
print 'һϢ,Ϣ: ', finiDate, ',Ϣ:', invsDays, ',Ϣ', totalPayBack
else:
#print payTimes,'Գ翴Ƭȥ...'
lastPayDate = invsDate
for x in range(payTimes-1):
# Ϣ&Ϣ
# timedeltaԲ鿴(days) (seconds)
#payDate = firstPayDate + datetime.timedelta(months=x)
if firstPayDate.month+x <=12:
payDate = datetime.date(firstPayDate.year,firstPayDate.month+x,20)
else:
payDate = datetime.date(firstPayDate.year+1,firstPayDate.month+x-12,20)
payDays = (payDate - lastPayDate).days
payBack = invsFund * invsRatio * payDays / 365
arrInvsPayBack.append(payDate)
arrInvsPayBack.append(payDays)
arrInvsPayBack.append(payBack)
#print x, payDate, payBack
lastPayDate = payDate
# һϢ
payDays = (finiDate - lastPayDate).days
payBack = invsFund * invsRatio * payDays / 365
arrInvsPayBack.append(finiDate)
arrInvsPayBack.append(payDays)
arrInvsPayBack.append(payBack)
len = len(arrInvsPayBack)
n = 0
while n<len/3:
print n, 'Ϣ,Ϣ: ', arrInvsPayBack[3*n], ',Ϣ:', arrInvsPayBack[3*n+1], ',Ϣ', arrInvsPayBack[3*n+2]
n = n + 1
print 'Ͷʱ:', invsFund, ';ۼ:', totalPayBack
|
<commit_before><commit_msg>Add the Simple InveCalculator Alog for Earnning by days!<commit_after>
|
import datetime,time;
year=2015
month=6
day=22
invsDays=64
invsRatio=0.096
invsFund = 2000
arrInvsPayBack = []
#array arrInvsPayPlan={}
invsDate = datetime.date(year,month,day)
finiDate = invsDate + datetime.timedelta(invsDays)
firstPayDate = datetime.date(year,month+1,20)
print 'Ͷʿʼʱ:', invsDate, ';Ͷʽʱ:', finiDate, ";״ʱ:", firstPayDate
totalPayBack = invsFund * invsRatio * invsDays / 365
## жǷһϢ
payTimes = (finiDate.year - firstPayDate.year)*12 + finiDate.month - firstPayDate.month + 1
if payTimes == 1:
#ṹҵݺУڵ
arrInvsPayBack.append(finiDate)
arrInvsPayBack.append(invsDays)
arrInvsPayBack.append(totalPayBack)
print 'һϢ,Ϣ: ', finiDate, ',Ϣ:', invsDays, ',Ϣ', totalPayBack
else:
#print payTimes,'Գ翴Ƭȥ...'
lastPayDate = invsDate
for x in range(payTimes-1):
# Ϣ&Ϣ
# timedeltaԲ鿴(days) (seconds)
#payDate = firstPayDate + datetime.timedelta(months=x)
if firstPayDate.month+x <=12:
payDate = datetime.date(firstPayDate.year,firstPayDate.month+x,20)
else:
payDate = datetime.date(firstPayDate.year+1,firstPayDate.month+x-12,20)
payDays = (payDate - lastPayDate).days
payBack = invsFund * invsRatio * payDays / 365
arrInvsPayBack.append(payDate)
arrInvsPayBack.append(payDays)
arrInvsPayBack.append(payBack)
#print x, payDate, payBack
lastPayDate = payDate
# һϢ
payDays = (finiDate - lastPayDate).days
payBack = invsFund * invsRatio * payDays / 365
arrInvsPayBack.append(finiDate)
arrInvsPayBack.append(payDays)
arrInvsPayBack.append(payBack)
len = len(arrInvsPayBack)
n = 0
while n<len/3:
print n, 'Ϣ,Ϣ: ', arrInvsPayBack[3*n], ',Ϣ:', arrInvsPayBack[3*n+1], ',Ϣ', arrInvsPayBack[3*n+2]
n = n + 1
print 'Ͷʱ:', invsFund, ';ۼ:', totalPayBack
|
Add the Simple InveCalculator Alog for Earnning by days!import datetime,time;
year=2015
month=6
day=22
invsDays=64
invsRatio=0.096
invsFund = 2000
arrInvsPayBack = []
#array arrInvsPayPlan={}
invsDate = datetime.date(year,month,day)
finiDate = invsDate + datetime.timedelta(invsDays)
firstPayDate = datetime.date(year,month+1,20)
print 'Ͷʿʼʱ:', invsDate, ';Ͷʽʱ:', finiDate, ";״ʱ:", firstPayDate
totalPayBack = invsFund * invsRatio * invsDays / 365
## жǷһϢ
payTimes = (finiDate.year - firstPayDate.year)*12 + finiDate.month - firstPayDate.month + 1
if payTimes == 1:
#ṹҵݺУڵ
arrInvsPayBack.append(finiDate)
arrInvsPayBack.append(invsDays)
arrInvsPayBack.append(totalPayBack)
print 'һϢ,Ϣ: ', finiDate, ',Ϣ:', invsDays, ',Ϣ', totalPayBack
else:
#print payTimes,'Գ翴Ƭȥ...'
lastPayDate = invsDate
for x in range(payTimes-1):
# Ϣ&Ϣ
# timedeltaԲ鿴(days) (seconds)
#payDate = firstPayDate + datetime.timedelta(months=x)
if firstPayDate.month+x <=12:
payDate = datetime.date(firstPayDate.year,firstPayDate.month+x,20)
else:
payDate = datetime.date(firstPayDate.year+1,firstPayDate.month+x-12,20)
payDays = (payDate - lastPayDate).days
payBack = invsFund * invsRatio * payDays / 365
arrInvsPayBack.append(payDate)
arrInvsPayBack.append(payDays)
arrInvsPayBack.append(payBack)
#print x, payDate, payBack
lastPayDate = payDate
# һϢ
payDays = (finiDate - lastPayDate).days
payBack = invsFund * invsRatio * payDays / 365
arrInvsPayBack.append(finiDate)
arrInvsPayBack.append(payDays)
arrInvsPayBack.append(payBack)
len = len(arrInvsPayBack)
n = 0
while n<len/3:
print n, 'Ϣ,Ϣ: ', arrInvsPayBack[3*n], ',Ϣ:', arrInvsPayBack[3*n+1], ',Ϣ', arrInvsPayBack[3*n+2]
n = n + 1
print 'Ͷʱ:', invsFund, ';ۼ:', totalPayBack
|
<commit_before><commit_msg>Add the Simple InveCalculator Alog for Earnning by days!<commit_after>import datetime,time;
year=2015
month=6
day=22
invsDays=64
invsRatio=0.096
invsFund = 2000
arrInvsPayBack = []
#array arrInvsPayPlan={}
invsDate = datetime.date(year,month,day)
finiDate = invsDate + datetime.timedelta(invsDays)
firstPayDate = datetime.date(year,month+1,20)
print 'Ͷʿʼʱ:', invsDate, ';Ͷʽʱ:', finiDate, ";״ʱ:", firstPayDate
totalPayBack = invsFund * invsRatio * invsDays / 365
## жǷһϢ
payTimes = (finiDate.year - firstPayDate.year)*12 + finiDate.month - firstPayDate.month + 1
if payTimes == 1:
#ṹҵݺУڵ
arrInvsPayBack.append(finiDate)
arrInvsPayBack.append(invsDays)
arrInvsPayBack.append(totalPayBack)
print 'һϢ,Ϣ: ', finiDate, ',Ϣ:', invsDays, ',Ϣ', totalPayBack
else:
#print payTimes,'Գ翴Ƭȥ...'
lastPayDate = invsDate
for x in range(payTimes-1):
# Ϣ&Ϣ
# timedeltaԲ鿴(days) (seconds)
#payDate = firstPayDate + datetime.timedelta(months=x)
if firstPayDate.month+x <=12:
payDate = datetime.date(firstPayDate.year,firstPayDate.month+x,20)
else:
payDate = datetime.date(firstPayDate.year+1,firstPayDate.month+x-12,20)
payDays = (payDate - lastPayDate).days
payBack = invsFund * invsRatio * payDays / 365
arrInvsPayBack.append(payDate)
arrInvsPayBack.append(payDays)
arrInvsPayBack.append(payBack)
#print x, payDate, payBack
lastPayDate = payDate
# һϢ
payDays = (finiDate - lastPayDate).days
payBack = invsFund * invsRatio * payDays / 365
arrInvsPayBack.append(finiDate)
arrInvsPayBack.append(payDays)
arrInvsPayBack.append(payBack)
len = len(arrInvsPayBack)
n = 0
while n<len/3:
print n, 'Ϣ,Ϣ: ', arrInvsPayBack[3*n], ',Ϣ:', arrInvsPayBack[3*n+1], ',Ϣ', arrInvsPayBack[3*n+2]
n = n + 1
print 'Ͷʱ:', invsFund, ';ۼ:', totalPayBack
|
|
938e9f822cded00f37befb70ebe33b3b98283b93
|
test/test_jobs/test_send_mail.py
|
test/test_jobs/test_send_mail.py
|
# -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2014 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
from pybossa.model.app import App
from pybossa.jobs import send_mail
from mock import patch
@patch('pybossa.jobs.mail')
@patch('pybossa.jobs.Message')
class TestSendMailJob(object):
def test_send_mail_craetes_message(self, Message, mail):
mail_dict = dict(subject='Hello', recipients=['pepito@hotmail.con'],
body='Hello Pepito!')
send_mail(mail_dict)
Message.assert_called_once_with(**mail_dict)
def test_send_mail_sends_mail(self, Message, mail):
mail_dict = dict(subject='Hello', recipients=['pepito@hotmail.con'],
body='Hello Pepito!')
send_mail(mail_dict)
mail.send.assert_called_once_with(Message())
|
Add tests for send_mail job
|
Add tests for send_mail job
|
Python
|
agpl-3.0
|
PyBossa/pybossa,inteligencia-coletiva-lsd/pybossa,OpenNewsLabs/pybossa,stefanhahmann/pybossa,jean/pybossa,PyBossa/pybossa,Scifabric/pybossa,geotagx/pybossa,stefanhahmann/pybossa,jean/pybossa,geotagx/pybossa,inteligencia-coletiva-lsd/pybossa,OpenNewsLabs/pybossa,Scifabric/pybossa
|
Add tests for send_mail job
|
# -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2014 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
from pybossa.model.app import App
from pybossa.jobs import send_mail
from mock import patch
@patch('pybossa.jobs.mail')
@patch('pybossa.jobs.Message')
class TestSendMailJob(object):
def test_send_mail_craetes_message(self, Message, mail):
mail_dict = dict(subject='Hello', recipients=['pepito@hotmail.con'],
body='Hello Pepito!')
send_mail(mail_dict)
Message.assert_called_once_with(**mail_dict)
def test_send_mail_sends_mail(self, Message, mail):
mail_dict = dict(subject='Hello', recipients=['pepito@hotmail.con'],
body='Hello Pepito!')
send_mail(mail_dict)
mail.send.assert_called_once_with(Message())
|
<commit_before><commit_msg>Add tests for send_mail job<commit_after>
|
# -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2014 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
from pybossa.model.app import App
from pybossa.jobs import send_mail
from mock import patch
@patch('pybossa.jobs.mail')
@patch('pybossa.jobs.Message')
class TestSendMailJob(object):
def test_send_mail_craetes_message(self, Message, mail):
mail_dict = dict(subject='Hello', recipients=['pepito@hotmail.con'],
body='Hello Pepito!')
send_mail(mail_dict)
Message.assert_called_once_with(**mail_dict)
def test_send_mail_sends_mail(self, Message, mail):
mail_dict = dict(subject='Hello', recipients=['pepito@hotmail.con'],
body='Hello Pepito!')
send_mail(mail_dict)
mail.send.assert_called_once_with(Message())
|
Add tests for send_mail job# -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2014 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
from pybossa.model.app import App
from pybossa.jobs import send_mail
from mock import patch
@patch('pybossa.jobs.mail')
@patch('pybossa.jobs.Message')
class TestSendMailJob(object):
def test_send_mail_craetes_message(self, Message, mail):
mail_dict = dict(subject='Hello', recipients=['pepito@hotmail.con'],
body='Hello Pepito!')
send_mail(mail_dict)
Message.assert_called_once_with(**mail_dict)
def test_send_mail_sends_mail(self, Message, mail):
mail_dict = dict(subject='Hello', recipients=['pepito@hotmail.con'],
body='Hello Pepito!')
send_mail(mail_dict)
mail.send.assert_called_once_with(Message())
|
<commit_before><commit_msg>Add tests for send_mail job<commit_after># -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2014 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
from pybossa.model.app import App
from pybossa.jobs import send_mail
from mock import patch
@patch('pybossa.jobs.mail')
@patch('pybossa.jobs.Message')
class TestSendMailJob(object):
def test_send_mail_craetes_message(self, Message, mail):
mail_dict = dict(subject='Hello', recipients=['pepito@hotmail.con'],
body='Hello Pepito!')
send_mail(mail_dict)
Message.assert_called_once_with(**mail_dict)
def test_send_mail_sends_mail(self, Message, mail):
mail_dict = dict(subject='Hello', recipients=['pepito@hotmail.con'],
body='Hello Pepito!')
send_mail(mail_dict)
mail.send.assert_called_once_with(Message())
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.