commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
fcb060c598f3010de9e702ba419f8c8aa5c0097b
|
mixmind/database.py
|
mixmind/database.py
|
from flask_sqlalchemy import SQLAlchemy
from flask_alembic import Alembic
db = SQLAlchemy()
alembic = Alembic()
def init_db():
# import all modules here that might define models so that
# they will be registered properly on the metadata. Otherwise
# you will have to import them first before calling init_db()
import models
import barstock
from authorization import user_datastore
db.create_all()
user_datastore.find_or_create_role(name='admin', description='An admin user may modify the parameters of the app backend')
user_datastore.find_or_create_role(name='bartender', description='This user is a bartender at at least one bar')
user_datastore.find_or_create_role(name='owner', description='This user can do limited management at one bar')
user_datastore.find_or_create_role(name='customer', description='Customer may register to make it easier to order drinks')
db.session.commit()
# now handle alembic revisions
#alembic.stamp('head')
#alembic.revision('Convert columns to support unicode')
#alembic.revision('1.1 - change bar model')
#alembic.revision('1.2 - change bar model')
try:
alembic.revision('Automatic upgrade')
except Exception as err:
print "{}: {}".format(err.__class__.__name__, err)
try:
alembic.upgrade()
except NotImplementedError as err:
print "{}: {}".format(err.__class__.__name__, err)
|
from flask_sqlalchemy import SQLAlchemy
from flask_alembic import Alembic
from . import app
db = SQLAlchemy()
alembic = Alembic()
def init_db():
# import all modules here that might define models so that
# they will be registered properly on the metadata. Otherwise
# you will have to import them first before calling init_db()
import models
import barstock
from authorization import user_datastore
db.create_all()
user_datastore.find_or_create_role(name='admin', description='An admin user may modify the parameters of the app backend')
user_datastore.find_or_create_role(name='bartender', description='This user is a bartender at at least one bar')
user_datastore.find_or_create_role(name='owner', description='This user can do limited management at one bar')
user_datastore.find_or_create_role(name='customer', description='Customer may register to make it easier to order drinks')
db.session.commit()
# now handle alembic revisions
#alembic.stamp('head')
if app.config.get('DEBUG', False):
try:
alembic.revision('Automatic upgrade')
except Exception as err:
print "{}: {}".format(err.__class__.__name__, err)
try:
alembic.upgrade()
except NotImplementedError as err:
print "{}: {}".format(err.__class__.__name__, err)
elif app.config.get('DO_DB_UPGRADE', False):
alembic.revision('Automatic upgrade')
alembic.upgrade()
|
Clean up the upgrader logic and add a config option for it
|
Clean up the upgrader logic and add a config option for it
|
Python
|
apache-2.0
|
twschum/mix-mind,twschum/mix-mind,twschum/mix-mind,twschum/mix-mind
|
from flask_sqlalchemy import SQLAlchemy
from flask_alembic import Alembic
db = SQLAlchemy()
alembic = Alembic()
def init_db():
# import all modules here that might define models so that
# they will be registered properly on the metadata. Otherwise
# you will have to import them first before calling init_db()
import models
import barstock
from authorization import user_datastore
db.create_all()
user_datastore.find_or_create_role(name='admin', description='An admin user may modify the parameters of the app backend')
user_datastore.find_or_create_role(name='bartender', description='This user is a bartender at at least one bar')
user_datastore.find_or_create_role(name='owner', description='This user can do limited management at one bar')
user_datastore.find_or_create_role(name='customer', description='Customer may register to make it easier to order drinks')
db.session.commit()
# now handle alembic revisions
#alembic.stamp('head')
#alembic.revision('Convert columns to support unicode')
#alembic.revision('1.1 - change bar model')
#alembic.revision('1.2 - change bar model')
try:
alembic.revision('Automatic upgrade')
except Exception as err:
print "{}: {}".format(err.__class__.__name__, err)
try:
alembic.upgrade()
except NotImplementedError as err:
print "{}: {}".format(err.__class__.__name__, err)
Clean up the upgrader logic and add a config option for it
|
from flask_sqlalchemy import SQLAlchemy
from flask_alembic import Alembic
from . import app
db = SQLAlchemy()
alembic = Alembic()
def init_db():
# import all modules here that might define models so that
# they will be registered properly on the metadata. Otherwise
# you will have to import them first before calling init_db()
import models
import barstock
from authorization import user_datastore
db.create_all()
user_datastore.find_or_create_role(name='admin', description='An admin user may modify the parameters of the app backend')
user_datastore.find_or_create_role(name='bartender', description='This user is a bartender at at least one bar')
user_datastore.find_or_create_role(name='owner', description='This user can do limited management at one bar')
user_datastore.find_or_create_role(name='customer', description='Customer may register to make it easier to order drinks')
db.session.commit()
# now handle alembic revisions
#alembic.stamp('head')
if app.config.get('DEBUG', False):
try:
alembic.revision('Automatic upgrade')
except Exception as err:
print "{}: {}".format(err.__class__.__name__, err)
try:
alembic.upgrade()
except NotImplementedError as err:
print "{}: {}".format(err.__class__.__name__, err)
elif app.config.get('DO_DB_UPGRADE', False):
alembic.revision('Automatic upgrade')
alembic.upgrade()
|
<commit_before>from flask_sqlalchemy import SQLAlchemy
from flask_alembic import Alembic
db = SQLAlchemy()
alembic = Alembic()
def init_db():
# import all modules here that might define models so that
# they will be registered properly on the metadata. Otherwise
# you will have to import them first before calling init_db()
import models
import barstock
from authorization import user_datastore
db.create_all()
user_datastore.find_or_create_role(name='admin', description='An admin user may modify the parameters of the app backend')
user_datastore.find_or_create_role(name='bartender', description='This user is a bartender at at least one bar')
user_datastore.find_or_create_role(name='owner', description='This user can do limited management at one bar')
user_datastore.find_or_create_role(name='customer', description='Customer may register to make it easier to order drinks')
db.session.commit()
# now handle alembic revisions
#alembic.stamp('head')
#alembic.revision('Convert columns to support unicode')
#alembic.revision('1.1 - change bar model')
#alembic.revision('1.2 - change bar model')
try:
alembic.revision('Automatic upgrade')
except Exception as err:
print "{}: {}".format(err.__class__.__name__, err)
try:
alembic.upgrade()
except NotImplementedError as err:
print "{}: {}".format(err.__class__.__name__, err)
<commit_msg>Clean up the upgrader logic and add a config option for it<commit_after>
|
from flask_sqlalchemy import SQLAlchemy
from flask_alembic import Alembic
from . import app
db = SQLAlchemy()
alembic = Alembic()
def init_db():
# import all modules here that might define models so that
# they will be registered properly on the metadata. Otherwise
# you will have to import them first before calling init_db()
import models
import barstock
from authorization import user_datastore
db.create_all()
user_datastore.find_or_create_role(name='admin', description='An admin user may modify the parameters of the app backend')
user_datastore.find_or_create_role(name='bartender', description='This user is a bartender at at least one bar')
user_datastore.find_or_create_role(name='owner', description='This user can do limited management at one bar')
user_datastore.find_or_create_role(name='customer', description='Customer may register to make it easier to order drinks')
db.session.commit()
# now handle alembic revisions
#alembic.stamp('head')
if app.config.get('DEBUG', False):
try:
alembic.revision('Automatic upgrade')
except Exception as err:
print "{}: {}".format(err.__class__.__name__, err)
try:
alembic.upgrade()
except NotImplementedError as err:
print "{}: {}".format(err.__class__.__name__, err)
elif app.config.get('DO_DB_UPGRADE', False):
alembic.revision('Automatic upgrade')
alembic.upgrade()
|
from flask_sqlalchemy import SQLAlchemy
from flask_alembic import Alembic
db = SQLAlchemy()
alembic = Alembic()
def init_db():
# import all modules here that might define models so that
# they will be registered properly on the metadata. Otherwise
# you will have to import them first before calling init_db()
import models
import barstock
from authorization import user_datastore
db.create_all()
user_datastore.find_or_create_role(name='admin', description='An admin user may modify the parameters of the app backend')
user_datastore.find_or_create_role(name='bartender', description='This user is a bartender at at least one bar')
user_datastore.find_or_create_role(name='owner', description='This user can do limited management at one bar')
user_datastore.find_or_create_role(name='customer', description='Customer may register to make it easier to order drinks')
db.session.commit()
# now handle alembic revisions
#alembic.stamp('head')
#alembic.revision('Convert columns to support unicode')
#alembic.revision('1.1 - change bar model')
#alembic.revision('1.2 - change bar model')
try:
alembic.revision('Automatic upgrade')
except Exception as err:
print "{}: {}".format(err.__class__.__name__, err)
try:
alembic.upgrade()
except NotImplementedError as err:
print "{}: {}".format(err.__class__.__name__, err)
Clean up the upgrader logic and add a config option for itfrom flask_sqlalchemy import SQLAlchemy
from flask_alembic import Alembic
from . import app
db = SQLAlchemy()
alembic = Alembic()
def init_db():
# import all modules here that might define models so that
# they will be registered properly on the metadata. Otherwise
# you will have to import them first before calling init_db()
import models
import barstock
from authorization import user_datastore
db.create_all()
user_datastore.find_or_create_role(name='admin', description='An admin user may modify the parameters of the app backend')
user_datastore.find_or_create_role(name='bartender', description='This user is a bartender at at least one bar')
user_datastore.find_or_create_role(name='owner', description='This user can do limited management at one bar')
user_datastore.find_or_create_role(name='customer', description='Customer may register to make it easier to order drinks')
db.session.commit()
# now handle alembic revisions
#alembic.stamp('head')
if app.config.get('DEBUG', False):
try:
alembic.revision('Automatic upgrade')
except Exception as err:
print "{}: {}".format(err.__class__.__name__, err)
try:
alembic.upgrade()
except NotImplementedError as err:
print "{}: {}".format(err.__class__.__name__, err)
elif app.config.get('DO_DB_UPGRADE', False):
alembic.revision('Automatic upgrade')
alembic.upgrade()
|
<commit_before>from flask_sqlalchemy import SQLAlchemy
from flask_alembic import Alembic
db = SQLAlchemy()
alembic = Alembic()
def init_db():
# import all modules here that might define models so that
# they will be registered properly on the metadata. Otherwise
# you will have to import them first before calling init_db()
import models
import barstock
from authorization import user_datastore
db.create_all()
user_datastore.find_or_create_role(name='admin', description='An admin user may modify the parameters of the app backend')
user_datastore.find_or_create_role(name='bartender', description='This user is a bartender at at least one bar')
user_datastore.find_or_create_role(name='owner', description='This user can do limited management at one bar')
user_datastore.find_or_create_role(name='customer', description='Customer may register to make it easier to order drinks')
db.session.commit()
# now handle alembic revisions
#alembic.stamp('head')
#alembic.revision('Convert columns to support unicode')
#alembic.revision('1.1 - change bar model')
#alembic.revision('1.2 - change bar model')
try:
alembic.revision('Automatic upgrade')
except Exception as err:
print "{}: {}".format(err.__class__.__name__, err)
try:
alembic.upgrade()
except NotImplementedError as err:
print "{}: {}".format(err.__class__.__name__, err)
<commit_msg>Clean up the upgrader logic and add a config option for it<commit_after>from flask_sqlalchemy import SQLAlchemy
from flask_alembic import Alembic
from . import app
db = SQLAlchemy()
alembic = Alembic()
def init_db():
# import all modules here that might define models so that
# they will be registered properly on the metadata. Otherwise
# you will have to import them first before calling init_db()
import models
import barstock
from authorization import user_datastore
db.create_all()
user_datastore.find_or_create_role(name='admin', description='An admin user may modify the parameters of the app backend')
user_datastore.find_or_create_role(name='bartender', description='This user is a bartender at at least one bar')
user_datastore.find_or_create_role(name='owner', description='This user can do limited management at one bar')
user_datastore.find_or_create_role(name='customer', description='Customer may register to make it easier to order drinks')
db.session.commit()
# now handle alembic revisions
#alembic.stamp('head')
if app.config.get('DEBUG', False):
try:
alembic.revision('Automatic upgrade')
except Exception as err:
print "{}: {}".format(err.__class__.__name__, err)
try:
alembic.upgrade()
except NotImplementedError as err:
print "{}: {}".format(err.__class__.__name__, err)
elif app.config.get('DO_DB_UPGRADE', False):
alembic.revision('Automatic upgrade')
alembic.upgrade()
|
21e91efbf9cb064f1fcd19ba7a77ba81a6c843f5
|
isso/db/preferences.py
|
isso/db/preferences.py
|
# -*- encoding: utf-8 -*-
import os
import binascii
class Preferences:
defaults = [
("session-key", binascii.b2a_hex(os.urandom(24))),
]
def __init__(self, db):
self.db = db
self.db.execute([
'CREATE TABLE IF NOT EXISTS preferences (',
' key VARCHAR PRIMARY KEY, value VARCHAR',
');'])
for (key, value) in Preferences.defaults:
if self.get(key) is None:
self.set(key, value)
def get(self, key, default=None):
rv = self.db.execute(
'SELECT value FROM preferences WHERE key=?', (key, )).fetchone()
if rv is None:
return default
return rv[0]
def set(self, key, value):
self.db.execute(
'INSERT INTO preferences (key, value) VALUES (?, ?)', (key, value))
|
# -*- encoding: utf-8 -*-
import os
import binascii
class Preferences:
defaults = [
("session-key", binascii.b2a_hex(os.urandom(24)).decode('utf-8')),
]
def __init__(self, db):
self.db = db
self.db.execute([
'CREATE TABLE IF NOT EXISTS preferences (',
' key VARCHAR PRIMARY KEY, value VARCHAR',
');'])
for (key, value) in Preferences.defaults:
if self.get(key) is None:
self.set(key, value)
def get(self, key, default=None):
rv = self.db.execute(
'SELECT value FROM preferences WHERE key=?', (key, )).fetchone()
if rv is None:
return default
return rv[0]
def set(self, key, value):
self.db.execute(
'INSERT INTO preferences (key, value) VALUES (?, ?)', (key, value))
|
Save the session-key as a unicode string in the db
|
Save the session-key as a unicode string in the db
The session-key should be saved as a string, not a byte string.
|
Python
|
mit
|
posativ/isso,xuhdev/isso,Mushiyo/isso,jelmer/isso,princesuke/isso,jiumx60rus/isso,janusnic/isso,janusnic/isso,Mushiyo/isso,Mushiyo/isso,posativ/isso,mathstuf/isso,janusnic/isso,jiumx60rus/isso,WQuanfeng/isso,jelmer/isso,princesuke/isso,jelmer/isso,Mushiyo/isso,WQuanfeng/isso,xuhdev/isso,mathstuf/isso,xuhdev/isso,princesuke/isso,jelmer/isso,jiumx60rus/isso,posativ/isso,posativ/isso,mathstuf/isso,WQuanfeng/isso
|
# -*- encoding: utf-8 -*-
import os
import binascii
class Preferences:
defaults = [
("session-key", binascii.b2a_hex(os.urandom(24))),
]
def __init__(self, db):
self.db = db
self.db.execute([
'CREATE TABLE IF NOT EXISTS preferences (',
' key VARCHAR PRIMARY KEY, value VARCHAR',
');'])
for (key, value) in Preferences.defaults:
if self.get(key) is None:
self.set(key, value)
def get(self, key, default=None):
rv = self.db.execute(
'SELECT value FROM preferences WHERE key=?', (key, )).fetchone()
if rv is None:
return default
return rv[0]
def set(self, key, value):
self.db.execute(
'INSERT INTO preferences (key, value) VALUES (?, ?)', (key, value))
Save the session-key as a unicode string in the db
The session-key should be saved as a string, not a byte string.
|
# -*- encoding: utf-8 -*-
import os
import binascii
class Preferences:
defaults = [
("session-key", binascii.b2a_hex(os.urandom(24)).decode('utf-8')),
]
def __init__(self, db):
self.db = db
self.db.execute([
'CREATE TABLE IF NOT EXISTS preferences (',
' key VARCHAR PRIMARY KEY, value VARCHAR',
');'])
for (key, value) in Preferences.defaults:
if self.get(key) is None:
self.set(key, value)
def get(self, key, default=None):
rv = self.db.execute(
'SELECT value FROM preferences WHERE key=?', (key, )).fetchone()
if rv is None:
return default
return rv[0]
def set(self, key, value):
self.db.execute(
'INSERT INTO preferences (key, value) VALUES (?, ?)', (key, value))
|
<commit_before># -*- encoding: utf-8 -*-
import os
import binascii
class Preferences:
defaults = [
("session-key", binascii.b2a_hex(os.urandom(24))),
]
def __init__(self, db):
self.db = db
self.db.execute([
'CREATE TABLE IF NOT EXISTS preferences (',
' key VARCHAR PRIMARY KEY, value VARCHAR',
');'])
for (key, value) in Preferences.defaults:
if self.get(key) is None:
self.set(key, value)
def get(self, key, default=None):
rv = self.db.execute(
'SELECT value FROM preferences WHERE key=?', (key, )).fetchone()
if rv is None:
return default
return rv[0]
def set(self, key, value):
self.db.execute(
'INSERT INTO preferences (key, value) VALUES (?, ?)', (key, value))
<commit_msg>Save the session-key as a unicode string in the db
The session-key should be saved as a string, not a byte string.<commit_after>
|
# -*- encoding: utf-8 -*-
import os
import binascii
class Preferences:
defaults = [
("session-key", binascii.b2a_hex(os.urandom(24)).decode('utf-8')),
]
def __init__(self, db):
self.db = db
self.db.execute([
'CREATE TABLE IF NOT EXISTS preferences (',
' key VARCHAR PRIMARY KEY, value VARCHAR',
');'])
for (key, value) in Preferences.defaults:
if self.get(key) is None:
self.set(key, value)
def get(self, key, default=None):
rv = self.db.execute(
'SELECT value FROM preferences WHERE key=?', (key, )).fetchone()
if rv is None:
return default
return rv[0]
def set(self, key, value):
self.db.execute(
'INSERT INTO preferences (key, value) VALUES (?, ?)', (key, value))
|
# -*- encoding: utf-8 -*-
import os
import binascii
class Preferences:
defaults = [
("session-key", binascii.b2a_hex(os.urandom(24))),
]
def __init__(self, db):
self.db = db
self.db.execute([
'CREATE TABLE IF NOT EXISTS preferences (',
' key VARCHAR PRIMARY KEY, value VARCHAR',
');'])
for (key, value) in Preferences.defaults:
if self.get(key) is None:
self.set(key, value)
def get(self, key, default=None):
rv = self.db.execute(
'SELECT value FROM preferences WHERE key=?', (key, )).fetchone()
if rv is None:
return default
return rv[0]
def set(self, key, value):
self.db.execute(
'INSERT INTO preferences (key, value) VALUES (?, ?)', (key, value))
Save the session-key as a unicode string in the db
The session-key should be saved as a string, not a byte string.# -*- encoding: utf-8 -*-
import os
import binascii
class Preferences:
defaults = [
("session-key", binascii.b2a_hex(os.urandom(24)).decode('utf-8')),
]
def __init__(self, db):
self.db = db
self.db.execute([
'CREATE TABLE IF NOT EXISTS preferences (',
' key VARCHAR PRIMARY KEY, value VARCHAR',
');'])
for (key, value) in Preferences.defaults:
if self.get(key) is None:
self.set(key, value)
def get(self, key, default=None):
rv = self.db.execute(
'SELECT value FROM preferences WHERE key=?', (key, )).fetchone()
if rv is None:
return default
return rv[0]
def set(self, key, value):
self.db.execute(
'INSERT INTO preferences (key, value) VALUES (?, ?)', (key, value))
|
<commit_before># -*- encoding: utf-8 -*-
import os
import binascii
class Preferences:
defaults = [
("session-key", binascii.b2a_hex(os.urandom(24))),
]
def __init__(self, db):
self.db = db
self.db.execute([
'CREATE TABLE IF NOT EXISTS preferences (',
' key VARCHAR PRIMARY KEY, value VARCHAR',
');'])
for (key, value) in Preferences.defaults:
if self.get(key) is None:
self.set(key, value)
def get(self, key, default=None):
rv = self.db.execute(
'SELECT value FROM preferences WHERE key=?', (key, )).fetchone()
if rv is None:
return default
return rv[0]
def set(self, key, value):
self.db.execute(
'INSERT INTO preferences (key, value) VALUES (?, ?)', (key, value))
<commit_msg>Save the session-key as a unicode string in the db
The session-key should be saved as a string, not a byte string.<commit_after># -*- encoding: utf-8 -*-
import os
import binascii
class Preferences:
defaults = [
("session-key", binascii.b2a_hex(os.urandom(24)).decode('utf-8')),
]
def __init__(self, db):
self.db = db
self.db.execute([
'CREATE TABLE IF NOT EXISTS preferences (',
' key VARCHAR PRIMARY KEY, value VARCHAR',
');'])
for (key, value) in Preferences.defaults:
if self.get(key) is None:
self.set(key, value)
def get(self, key, default=None):
rv = self.db.execute(
'SELECT value FROM preferences WHERE key=?', (key, )).fetchone()
if rv is None:
return default
return rv[0]
def set(self, key, value):
self.db.execute(
'INSERT INTO preferences (key, value) VALUES (?, ?)', (key, value))
|
4d5af4869871b45839952dd9f881635bd07595c1
|
parsers/RPOnline.py
|
parsers/RPOnline.py
|
from baseparser import BaseParser
from BeautifulSoup import BeautifulSoup, Tag
class RPOParser(BaseParser):
domains = ['www.rp-online.de']
feeder_pat = '1\.\d*$'
feeder_pages = ['http://www.rp-online.de/']
def _parse(self, html):
soup = BeautifulSoup(html, convertEntities=BeautifulSoup.HTML_ENTITIES,
fromEncoding='utf-8')
self.meta = soup.findAll('meta')
# category
keywords = soup.find('meta', {'property': 'vr:category'})
self.category = self.compute_category(keywords['content'] if keywords else '')
#article headline
elt = soup.find('meta', {'property': 'og:title'})
if elt is None:
self.real_article = False
return
self.title = elt['content']
# byline / author
author = soup.find('meta', {'itemprop': 'author'})
self.byline = author['content'] if author else ''
# article date
created_at = soup.find('meta', {'property': 'vr:published_time'})
self.date = created_at['content'] if created_at else ''
#article content
div = soup.find('div', {'class': 'main-text '})
if div is None:
self.real_article = False
return
div = self.remove_non_content(div)
self.body = '\n' + '\n\n'.join([x.getText() for x in div.childGenerator()
if isinstance(x, Tag) and x.name == 'p'])
|
from baseparser import BaseParser
from BeautifulSoup import BeautifulSoup, Tag
class RPOParser(BaseParser):
domains = ['www.rp-online.de']
feeder_pat = '(?<!(vid|bid|iid))(-1\.\d*)$'
feeder_pages = ['http://www.rp-online.de/']
def _parse(self, html):
soup = BeautifulSoup(html, convertEntities=BeautifulSoup.HTML_ENTITIES,
fromEncoding='utf-8')
self.meta = soup.findAll('meta')
# category
keywords = soup.find('meta', {'property': 'vr:category'})
self.category = self.compute_category(keywords['content'] if keywords else '')
#article headline
elt = soup.find('meta', {'property': 'og:title'})
if elt is None:
self.real_article = False
return
self.title = elt['content']
# byline / author
author = soup.find('meta', {'itemprop': 'author'})
self.byline = author['content'] if author else ''
# article date
created_at = soup.find('meta', {'property': 'vr:published_time'})
self.date = created_at['content'] if created_at else ''
#article content
div = soup.find('div', {'class': 'main-text '})
if div is None:
self.real_article = False
return
div = self.remove_non_content(div)
self.body = '\n' + '\n\n'.join([x.getText() for x in div.childGenerator()
if isinstance(x, Tag) and x.name == 'p'])
|
Exclude Videos article and non-scrapable info-galleries and picture-galleries via URL-pattern
|
Exclude Videos article and non-scrapable info-galleries and picture-galleries via URL-pattern
|
Python
|
mit
|
catcosmo/newsdiffs,catcosmo/newsdiffs,catcosmo/newsdiffs
|
from baseparser import BaseParser
from BeautifulSoup import BeautifulSoup, Tag
class RPOParser(BaseParser):
domains = ['www.rp-online.de']
feeder_pat = '1\.\d*$'
feeder_pages = ['http://www.rp-online.de/']
def _parse(self, html):
soup = BeautifulSoup(html, convertEntities=BeautifulSoup.HTML_ENTITIES,
fromEncoding='utf-8')
self.meta = soup.findAll('meta')
# category
keywords = soup.find('meta', {'property': 'vr:category'})
self.category = self.compute_category(keywords['content'] if keywords else '')
#article headline
elt = soup.find('meta', {'property': 'og:title'})
if elt is None:
self.real_article = False
return
self.title = elt['content']
# byline / author
author = soup.find('meta', {'itemprop': 'author'})
self.byline = author['content'] if author else ''
# article date
created_at = soup.find('meta', {'property': 'vr:published_time'})
self.date = created_at['content'] if created_at else ''
#article content
div = soup.find('div', {'class': 'main-text '})
if div is None:
self.real_article = False
return
div = self.remove_non_content(div)
self.body = '\n' + '\n\n'.join([x.getText() for x in div.childGenerator()
if isinstance(x, Tag) and x.name == 'p'])
Exclude Videos article and non-scrapable info-galleries and picture-galleries via URL-pattern
|
from baseparser import BaseParser
from BeautifulSoup import BeautifulSoup, Tag
class RPOParser(BaseParser):
domains = ['www.rp-online.de']
feeder_pat = '(?<!(vid|bid|iid))(-1\.\d*)$'
feeder_pages = ['http://www.rp-online.de/']
def _parse(self, html):
soup = BeautifulSoup(html, convertEntities=BeautifulSoup.HTML_ENTITIES,
fromEncoding='utf-8')
self.meta = soup.findAll('meta')
# category
keywords = soup.find('meta', {'property': 'vr:category'})
self.category = self.compute_category(keywords['content'] if keywords else '')
#article headline
elt = soup.find('meta', {'property': 'og:title'})
if elt is None:
self.real_article = False
return
self.title = elt['content']
# byline / author
author = soup.find('meta', {'itemprop': 'author'})
self.byline = author['content'] if author else ''
# article date
created_at = soup.find('meta', {'property': 'vr:published_time'})
self.date = created_at['content'] if created_at else ''
#article content
div = soup.find('div', {'class': 'main-text '})
if div is None:
self.real_article = False
return
div = self.remove_non_content(div)
self.body = '\n' + '\n\n'.join([x.getText() for x in div.childGenerator()
if isinstance(x, Tag) and x.name == 'p'])
|
<commit_before>from baseparser import BaseParser
from BeautifulSoup import BeautifulSoup, Tag
class RPOParser(BaseParser):
domains = ['www.rp-online.de']
feeder_pat = '1\.\d*$'
feeder_pages = ['http://www.rp-online.de/']
def _parse(self, html):
soup = BeautifulSoup(html, convertEntities=BeautifulSoup.HTML_ENTITIES,
fromEncoding='utf-8')
self.meta = soup.findAll('meta')
# category
keywords = soup.find('meta', {'property': 'vr:category'})
self.category = self.compute_category(keywords['content'] if keywords else '')
#article headline
elt = soup.find('meta', {'property': 'og:title'})
if elt is None:
self.real_article = False
return
self.title = elt['content']
# byline / author
author = soup.find('meta', {'itemprop': 'author'})
self.byline = author['content'] if author else ''
# article date
created_at = soup.find('meta', {'property': 'vr:published_time'})
self.date = created_at['content'] if created_at else ''
#article content
div = soup.find('div', {'class': 'main-text '})
if div is None:
self.real_article = False
return
div = self.remove_non_content(div)
self.body = '\n' + '\n\n'.join([x.getText() for x in div.childGenerator()
if isinstance(x, Tag) and x.name == 'p'])
<commit_msg>Exclude Videos article and non-scrapable info-galleries and picture-galleries via URL-pattern<commit_after>
|
from baseparser import BaseParser
from BeautifulSoup import BeautifulSoup, Tag
class RPOParser(BaseParser):
domains = ['www.rp-online.de']
feeder_pat = '(?<!(vid|bid|iid))(-1\.\d*)$'
feeder_pages = ['http://www.rp-online.de/']
def _parse(self, html):
soup = BeautifulSoup(html, convertEntities=BeautifulSoup.HTML_ENTITIES,
fromEncoding='utf-8')
self.meta = soup.findAll('meta')
# category
keywords = soup.find('meta', {'property': 'vr:category'})
self.category = self.compute_category(keywords['content'] if keywords else '')
#article headline
elt = soup.find('meta', {'property': 'og:title'})
if elt is None:
self.real_article = False
return
self.title = elt['content']
# byline / author
author = soup.find('meta', {'itemprop': 'author'})
self.byline = author['content'] if author else ''
# article date
created_at = soup.find('meta', {'property': 'vr:published_time'})
self.date = created_at['content'] if created_at else ''
#article content
div = soup.find('div', {'class': 'main-text '})
if div is None:
self.real_article = False
return
div = self.remove_non_content(div)
self.body = '\n' + '\n\n'.join([x.getText() for x in div.childGenerator()
if isinstance(x, Tag) and x.name == 'p'])
|
from baseparser import BaseParser
from BeautifulSoup import BeautifulSoup, Tag
class RPOParser(BaseParser):
domains = ['www.rp-online.de']
feeder_pat = '1\.\d*$'
feeder_pages = ['http://www.rp-online.de/']
def _parse(self, html):
soup = BeautifulSoup(html, convertEntities=BeautifulSoup.HTML_ENTITIES,
fromEncoding='utf-8')
self.meta = soup.findAll('meta')
# category
keywords = soup.find('meta', {'property': 'vr:category'})
self.category = self.compute_category(keywords['content'] if keywords else '')
#article headline
elt = soup.find('meta', {'property': 'og:title'})
if elt is None:
self.real_article = False
return
self.title = elt['content']
# byline / author
author = soup.find('meta', {'itemprop': 'author'})
self.byline = author['content'] if author else ''
# article date
created_at = soup.find('meta', {'property': 'vr:published_time'})
self.date = created_at['content'] if created_at else ''
#article content
div = soup.find('div', {'class': 'main-text '})
if div is None:
self.real_article = False
return
div = self.remove_non_content(div)
self.body = '\n' + '\n\n'.join([x.getText() for x in div.childGenerator()
if isinstance(x, Tag) and x.name == 'p'])
Exclude Videos article and non-scrapable info-galleries and picture-galleries via URL-patternfrom baseparser import BaseParser
from BeautifulSoup import BeautifulSoup, Tag
class RPOParser(BaseParser):
domains = ['www.rp-online.de']
feeder_pat = '(?<!(vid|bid|iid))(-1\.\d*)$'
feeder_pages = ['http://www.rp-online.de/']
def _parse(self, html):
soup = BeautifulSoup(html, convertEntities=BeautifulSoup.HTML_ENTITIES,
fromEncoding='utf-8')
self.meta = soup.findAll('meta')
# category
keywords = soup.find('meta', {'property': 'vr:category'})
self.category = self.compute_category(keywords['content'] if keywords else '')
#article headline
elt = soup.find('meta', {'property': 'og:title'})
if elt is None:
self.real_article = False
return
self.title = elt['content']
# byline / author
author = soup.find('meta', {'itemprop': 'author'})
self.byline = author['content'] if author else ''
# article date
created_at = soup.find('meta', {'property': 'vr:published_time'})
self.date = created_at['content'] if created_at else ''
#article content
div = soup.find('div', {'class': 'main-text '})
if div is None:
self.real_article = False
return
div = self.remove_non_content(div)
self.body = '\n' + '\n\n'.join([x.getText() for x in div.childGenerator()
if isinstance(x, Tag) and x.name == 'p'])
|
<commit_before>from baseparser import BaseParser
from BeautifulSoup import BeautifulSoup, Tag
class RPOParser(BaseParser):
domains = ['www.rp-online.de']
feeder_pat = '1\.\d*$'
feeder_pages = ['http://www.rp-online.de/']
def _parse(self, html):
soup = BeautifulSoup(html, convertEntities=BeautifulSoup.HTML_ENTITIES,
fromEncoding='utf-8')
self.meta = soup.findAll('meta')
# category
keywords = soup.find('meta', {'property': 'vr:category'})
self.category = self.compute_category(keywords['content'] if keywords else '')
#article headline
elt = soup.find('meta', {'property': 'og:title'})
if elt is None:
self.real_article = False
return
self.title = elt['content']
# byline / author
author = soup.find('meta', {'itemprop': 'author'})
self.byline = author['content'] if author else ''
# article date
created_at = soup.find('meta', {'property': 'vr:published_time'})
self.date = created_at['content'] if created_at else ''
#article content
div = soup.find('div', {'class': 'main-text '})
if div is None:
self.real_article = False
return
div = self.remove_non_content(div)
self.body = '\n' + '\n\n'.join([x.getText() for x in div.childGenerator()
if isinstance(x, Tag) and x.name == 'p'])
<commit_msg>Exclude Videos article and non-scrapable info-galleries and picture-galleries via URL-pattern<commit_after>from baseparser import BaseParser
from BeautifulSoup import BeautifulSoup, Tag
class RPOParser(BaseParser):
domains = ['www.rp-online.de']
feeder_pat = '(?<!(vid|bid|iid))(-1\.\d*)$'
feeder_pages = ['http://www.rp-online.de/']
def _parse(self, html):
soup = BeautifulSoup(html, convertEntities=BeautifulSoup.HTML_ENTITIES,
fromEncoding='utf-8')
self.meta = soup.findAll('meta')
# category
keywords = soup.find('meta', {'property': 'vr:category'})
self.category = self.compute_category(keywords['content'] if keywords else '')
#article headline
elt = soup.find('meta', {'property': 'og:title'})
if elt is None:
self.real_article = False
return
self.title = elt['content']
# byline / author
author = soup.find('meta', {'itemprop': 'author'})
self.byline = author['content'] if author else ''
# article date
created_at = soup.find('meta', {'property': 'vr:published_time'})
self.date = created_at['content'] if created_at else ''
#article content
div = soup.find('div', {'class': 'main-text '})
if div is None:
self.real_article = False
return
div = self.remove_non_content(div)
self.body = '\n' + '\n\n'.join([x.getText() for x in div.childGenerator()
if isinstance(x, Tag) and x.name == 'p'])
|
014f7d9ef9a10264f78f42a63ffa03dd9cd4e122
|
test/test_texture.py
|
test/test_texture.py
|
import unittest
import os
import pywavefront.texture
import pywavefront.visualization # power of two test
def prepend_dir(file):
return os.path.join(os.path.dirname(__file__), file)
class TestTexture(unittest.TestCase):
def testPathedImageName(self):
"""For Texture objects, the image name should be the last component of the path."""
my_texture = pywavefront.texture.Texture(prepend_dir('4x4.png'))
self.assertEqual(my_texture.image_name, prepend_dir('4x4.png'))
# NOTE: This us using pyglet, so disabling for now
# def testNonPowerOfTwoImage(self):
# """Texture images that have a non-power-of-two dimension should raise an exception."""
# self.assertRaises(Exception, pywavefront.texture.Texture, prepend_dir('3x4.png'))
def testMissingFile(self):
"""Referencing a missing texture file should raise an exception."""
self.assertRaises(Exception, pywavefront.texture.Texture, 'missing.file.do.not.create')
|
import unittest
import os
import pywavefront.texture
def prepend_dir(file):
return os.path.join(os.path.dirname(__file__), file)
class TestTexture(unittest.TestCase):
def testPathedImageName(self):
"""For Texture objects, the image name should be the last component of the path."""
my_texture = pywavefront.texture.Texture(prepend_dir('4x4.png'))
self.assertEqual(my_texture.image_name, prepend_dir('4x4.png'))
def testMissingFile(self):
"""Referencing a missing texture file should raise an exception."""
self.assertRaises(Exception, pywavefront.texture.Texture, 'missing.file.do.not.create')
|
Remove tests depending on pyglet entirely
|
Remove tests depending on pyglet entirely
|
Python
|
bsd-3-clause
|
greenmoss/PyWavefront
|
import unittest
import os
import pywavefront.texture
import pywavefront.visualization # power of two test
def prepend_dir(file):
return os.path.join(os.path.dirname(__file__), file)
class TestTexture(unittest.TestCase):
def testPathedImageName(self):
"""For Texture objects, the image name should be the last component of the path."""
my_texture = pywavefront.texture.Texture(prepend_dir('4x4.png'))
self.assertEqual(my_texture.image_name, prepend_dir('4x4.png'))
# NOTE: This us using pyglet, so disabling for now
# def testNonPowerOfTwoImage(self):
# """Texture images that have a non-power-of-two dimension should raise an exception."""
# self.assertRaises(Exception, pywavefront.texture.Texture, prepend_dir('3x4.png'))
def testMissingFile(self):
"""Referencing a missing texture file should raise an exception."""
self.assertRaises(Exception, pywavefront.texture.Texture, 'missing.file.do.not.create')
Remove tests depending on pyglet entirely
|
import unittest
import os
import pywavefront.texture
def prepend_dir(file):
return os.path.join(os.path.dirname(__file__), file)
class TestTexture(unittest.TestCase):
def testPathedImageName(self):
"""For Texture objects, the image name should be the last component of the path."""
my_texture = pywavefront.texture.Texture(prepend_dir('4x4.png'))
self.assertEqual(my_texture.image_name, prepend_dir('4x4.png'))
def testMissingFile(self):
"""Referencing a missing texture file should raise an exception."""
self.assertRaises(Exception, pywavefront.texture.Texture, 'missing.file.do.not.create')
|
<commit_before>import unittest
import os
import pywavefront.texture
import pywavefront.visualization # power of two test
def prepend_dir(file):
return os.path.join(os.path.dirname(__file__), file)
class TestTexture(unittest.TestCase):
def testPathedImageName(self):
"""For Texture objects, the image name should be the last component of the path."""
my_texture = pywavefront.texture.Texture(prepend_dir('4x4.png'))
self.assertEqual(my_texture.image_name, prepend_dir('4x4.png'))
# NOTE: This us using pyglet, so disabling for now
# def testNonPowerOfTwoImage(self):
# """Texture images that have a non-power-of-two dimension should raise an exception."""
# self.assertRaises(Exception, pywavefront.texture.Texture, prepend_dir('3x4.png'))
def testMissingFile(self):
"""Referencing a missing texture file should raise an exception."""
self.assertRaises(Exception, pywavefront.texture.Texture, 'missing.file.do.not.create')
<commit_msg>Remove tests depending on pyglet entirely<commit_after>
|
import unittest
import os
import pywavefront.texture
def prepend_dir(file):
return os.path.join(os.path.dirname(__file__), file)
class TestTexture(unittest.TestCase):
def testPathedImageName(self):
"""For Texture objects, the image name should be the last component of the path."""
my_texture = pywavefront.texture.Texture(prepend_dir('4x4.png'))
self.assertEqual(my_texture.image_name, prepend_dir('4x4.png'))
def testMissingFile(self):
"""Referencing a missing texture file should raise an exception."""
self.assertRaises(Exception, pywavefront.texture.Texture, 'missing.file.do.not.create')
|
import unittest
import os
import pywavefront.texture
import pywavefront.visualization # power of two test
def prepend_dir(file):
return os.path.join(os.path.dirname(__file__), file)
class TestTexture(unittest.TestCase):
def testPathedImageName(self):
"""For Texture objects, the image name should be the last component of the path."""
my_texture = pywavefront.texture.Texture(prepend_dir('4x4.png'))
self.assertEqual(my_texture.image_name, prepend_dir('4x4.png'))
# NOTE: This us using pyglet, so disabling for now
# def testNonPowerOfTwoImage(self):
# """Texture images that have a non-power-of-two dimension should raise an exception."""
# self.assertRaises(Exception, pywavefront.texture.Texture, prepend_dir('3x4.png'))
def testMissingFile(self):
"""Referencing a missing texture file should raise an exception."""
self.assertRaises(Exception, pywavefront.texture.Texture, 'missing.file.do.not.create')
Remove tests depending on pyglet entirelyimport unittest
import os
import pywavefront.texture
def prepend_dir(file):
return os.path.join(os.path.dirname(__file__), file)
class TestTexture(unittest.TestCase):
def testPathedImageName(self):
"""For Texture objects, the image name should be the last component of the path."""
my_texture = pywavefront.texture.Texture(prepend_dir('4x4.png'))
self.assertEqual(my_texture.image_name, prepend_dir('4x4.png'))
def testMissingFile(self):
"""Referencing a missing texture file should raise an exception."""
self.assertRaises(Exception, pywavefront.texture.Texture, 'missing.file.do.not.create')
|
<commit_before>import unittest
import os
import pywavefront.texture
import pywavefront.visualization # power of two test
def prepend_dir(file):
return os.path.join(os.path.dirname(__file__), file)
class TestTexture(unittest.TestCase):
def testPathedImageName(self):
"""For Texture objects, the image name should be the last component of the path."""
my_texture = pywavefront.texture.Texture(prepend_dir('4x4.png'))
self.assertEqual(my_texture.image_name, prepend_dir('4x4.png'))
# NOTE: This us using pyglet, so disabling for now
# def testNonPowerOfTwoImage(self):
# """Texture images that have a non-power-of-two dimension should raise an exception."""
# self.assertRaises(Exception, pywavefront.texture.Texture, prepend_dir('3x4.png'))
def testMissingFile(self):
"""Referencing a missing texture file should raise an exception."""
self.assertRaises(Exception, pywavefront.texture.Texture, 'missing.file.do.not.create')
<commit_msg>Remove tests depending on pyglet entirely<commit_after>import unittest
import os
import pywavefront.texture
def prepend_dir(file):
return os.path.join(os.path.dirname(__file__), file)
class TestTexture(unittest.TestCase):
def testPathedImageName(self):
"""For Texture objects, the image name should be the last component of the path."""
my_texture = pywavefront.texture.Texture(prepend_dir('4x4.png'))
self.assertEqual(my_texture.image_name, prepend_dir('4x4.png'))
def testMissingFile(self):
"""Referencing a missing texture file should raise an exception."""
self.assertRaises(Exception, pywavefront.texture.Texture, 'missing.file.do.not.create')
|
29cf128a62f66d924980a5a48156045d88f644c5
|
scripts/tabledef.py
|
scripts/tabledef.py
|
# -*- coding: utf-8 -*-
import sys
from sqlalchemy import create_engine
from sqlalchemy import Column, Integer, String
from sqlalchemy.ext.declarative import declarative_base
SQLALCHEMY_DATABASE_URI = 'sqlite:///accounts.db'
Base = declarative_base()
def db_connect():
"""
Performs database connection using database settings from settings.py.
Returns sqlalchemy engine instance
"""
return create_engine(SQLALCHEMY_DATABASE_URI)
class User(Base):
__tablename__ = "user"
id = Column(Integer, primary_key=True)
username = Column(String(30), unique=True)
password = Column(String(30))
email = Column(String(50))
def __repr__(self):
return '<User %r>' % self.username
engine = db_connect() # Connect to database
Base.metadata.create_all(engine) # Create models
|
# -*- coding: utf-8 -*-
import sys
from sqlalchemy import create_engine
from sqlalchemy import Column, Integer, String
from sqlalchemy.ext.declarative import declarative_base
SQLALCHEMY_DATABASE_URI = 'sqlite:///accounts.db'
Base = declarative_base()
def db_connect():
"""
Performs database connection using database settings from settings.py.
Returns sqlalchemy engine instance
"""
return create_engine(SQLALCHEMY_DATABASE_URI)
class User(Base):
__tablename__ = "user"
id = Column(Integer, primary_key=True)
username = Column(String(30), unique=True)
password = Column(String(512))
email = Column(String(50))
def __repr__(self):
return '<User %r>' % self.username
engine = db_connect() # Connect to database
Base.metadata.create_all(engine) # Create models
|
Increase size of `password` column
|
Increase size of `password` column
|
Python
|
mit
|
anfederico/Flaskex,anfederico/Flaskex,anfederico/Flaskex
|
# -*- coding: utf-8 -*-
import sys
from sqlalchemy import create_engine
from sqlalchemy import Column, Integer, String
from sqlalchemy.ext.declarative import declarative_base
SQLALCHEMY_DATABASE_URI = 'sqlite:///accounts.db'
Base = declarative_base()
def db_connect():
"""
Performs database connection using database settings from settings.py.
Returns sqlalchemy engine instance
"""
return create_engine(SQLALCHEMY_DATABASE_URI)
class User(Base):
__tablename__ = "user"
id = Column(Integer, primary_key=True)
username = Column(String(30), unique=True)
password = Column(String(30))
email = Column(String(50))
def __repr__(self):
return '<User %r>' % self.username
engine = db_connect() # Connect to database
Base.metadata.create_all(engine) # Create models
Increase size of `password` column
|
# -*- coding: utf-8 -*-
import sys
from sqlalchemy import create_engine
from sqlalchemy import Column, Integer, String
from sqlalchemy.ext.declarative import declarative_base
SQLALCHEMY_DATABASE_URI = 'sqlite:///accounts.db'
Base = declarative_base()
def db_connect():
"""
Performs database connection using database settings from settings.py.
Returns sqlalchemy engine instance
"""
return create_engine(SQLALCHEMY_DATABASE_URI)
class User(Base):
__tablename__ = "user"
id = Column(Integer, primary_key=True)
username = Column(String(30), unique=True)
password = Column(String(512))
email = Column(String(50))
def __repr__(self):
return '<User %r>' % self.username
engine = db_connect() # Connect to database
Base.metadata.create_all(engine) # Create models
|
<commit_before># -*- coding: utf-8 -*-
import sys
from sqlalchemy import create_engine
from sqlalchemy import Column, Integer, String
from sqlalchemy.ext.declarative import declarative_base
SQLALCHEMY_DATABASE_URI = 'sqlite:///accounts.db'
Base = declarative_base()
def db_connect():
"""
Performs database connection using database settings from settings.py.
Returns sqlalchemy engine instance
"""
return create_engine(SQLALCHEMY_DATABASE_URI)
class User(Base):
__tablename__ = "user"
id = Column(Integer, primary_key=True)
username = Column(String(30), unique=True)
password = Column(String(30))
email = Column(String(50))
def __repr__(self):
return '<User %r>' % self.username
engine = db_connect() # Connect to database
Base.metadata.create_all(engine) # Create models
<commit_msg>Increase size of `password` column<commit_after>
|
# -*- coding: utf-8 -*-
import sys
from sqlalchemy import create_engine
from sqlalchemy import Column, Integer, String
from sqlalchemy.ext.declarative import declarative_base
SQLALCHEMY_DATABASE_URI = 'sqlite:///accounts.db'
Base = declarative_base()
def db_connect():
"""
Performs database connection using database settings from settings.py.
Returns sqlalchemy engine instance
"""
return create_engine(SQLALCHEMY_DATABASE_URI)
class User(Base):
__tablename__ = "user"
id = Column(Integer, primary_key=True)
username = Column(String(30), unique=True)
password = Column(String(512))
email = Column(String(50))
def __repr__(self):
return '<User %r>' % self.username
engine = db_connect() # Connect to database
Base.metadata.create_all(engine) # Create models
|
# -*- coding: utf-8 -*-
import sys
from sqlalchemy import create_engine
from sqlalchemy import Column, Integer, String
from sqlalchemy.ext.declarative import declarative_base
SQLALCHEMY_DATABASE_URI = 'sqlite:///accounts.db'
Base = declarative_base()
def db_connect():
"""
Performs database connection using database settings from settings.py.
Returns sqlalchemy engine instance
"""
return create_engine(SQLALCHEMY_DATABASE_URI)
class User(Base):
__tablename__ = "user"
id = Column(Integer, primary_key=True)
username = Column(String(30), unique=True)
password = Column(String(30))
email = Column(String(50))
def __repr__(self):
return '<User %r>' % self.username
engine = db_connect() # Connect to database
Base.metadata.create_all(engine) # Create models
Increase size of `password` column# -*- coding: utf-8 -*-
import sys
from sqlalchemy import create_engine
from sqlalchemy import Column, Integer, String
from sqlalchemy.ext.declarative import declarative_base
SQLALCHEMY_DATABASE_URI = 'sqlite:///accounts.db'
Base = declarative_base()
def db_connect():
"""
Performs database connection using database settings from settings.py.
Returns sqlalchemy engine instance
"""
return create_engine(SQLALCHEMY_DATABASE_URI)
class User(Base):
__tablename__ = "user"
id = Column(Integer, primary_key=True)
username = Column(String(30), unique=True)
password = Column(String(512))
email = Column(String(50))
def __repr__(self):
return '<User %r>' % self.username
engine = db_connect() # Connect to database
Base.metadata.create_all(engine) # Create models
|
<commit_before># -*- coding: utf-8 -*-
import sys
from sqlalchemy import create_engine
from sqlalchemy import Column, Integer, String
from sqlalchemy.ext.declarative import declarative_base
SQLALCHEMY_DATABASE_URI = 'sqlite:///accounts.db'
Base = declarative_base()
def db_connect():
"""
Performs database connection using database settings from settings.py.
Returns sqlalchemy engine instance
"""
return create_engine(SQLALCHEMY_DATABASE_URI)
class User(Base):
__tablename__ = "user"
id = Column(Integer, primary_key=True)
username = Column(String(30), unique=True)
password = Column(String(30))
email = Column(String(50))
def __repr__(self):
return '<User %r>' % self.username
engine = db_connect() # Connect to database
Base.metadata.create_all(engine) # Create models
<commit_msg>Increase size of `password` column<commit_after># -*- coding: utf-8 -*-
import sys
from sqlalchemy import create_engine
from sqlalchemy import Column, Integer, String
from sqlalchemy.ext.declarative import declarative_base
SQLALCHEMY_DATABASE_URI = 'sqlite:///accounts.db'
Base = declarative_base()
def db_connect():
"""
Performs database connection using database settings from settings.py.
Returns sqlalchemy engine instance
"""
return create_engine(SQLALCHEMY_DATABASE_URI)
class User(Base):
__tablename__ = "user"
id = Column(Integer, primary_key=True)
username = Column(String(30), unique=True)
password = Column(String(512))
email = Column(String(50))
def __repr__(self):
return '<User %r>' % self.username
engine = db_connect() # Connect to database
Base.metadata.create_all(engine) # Create models
|
b8a54a3bef04b43356d2472c59929ad15a0b6d4b
|
semantics/common.py
|
semantics/common.py
|
#!/usr/bin/env python
# vim: set fileencoding=UTF-8 :
import numpy as np
def get_exponent(v):
if isinstance(v, np.float32):
mask, shift, offset = 0x7f800000, 23, 127
else:
raise NotImplementedError('The value v can only be of type np.float32')
return ((v.view('i') & mask) >> shift) - offset
def ulp(v):
if isinstance(v, np.float16):
prec = 11
elif isinstance(v, np.float32):
prec = 24
elif isinstance(v, np.float64):
prec = 53
return 2 ** (get_exponent(v) - prec)
def round(v, m='Nearest'):
pass
if __name__ == '__main__':
v = np.float32('2.5')
print get_exponent(v)
print ulp(v)
|
#!/usr/bin/env python
# vim: set fileencoding=UTF-8 :
import gmpy2
from gmpy2 import mpq, mpfr
def ulp(v):
return mpq(2) ** v.as_mantissa_exp()[1]
def round(mode):
def decorator(f):
def wrapped(v1, v2):
with gmpy2.local_context(round=mode):
return f(v1, v2)
return wrapped
return decorator
if __name__ == '__main__':
gmpy2.set_context(gmpy2.ieee(32))
print float(ulp(mpfr('0.1')))
mult = lambda x, y: x * y
args = [mpfr('0.3'), mpfr('2.6')]
print round(gmpy2.RoundDown)(mult)(*args)
print round(gmpy2.RoundUp)(mult)(*args)
|
Use gmpy2 instead of numpy
|
Use gmpy2 instead of numpy
|
Python
|
mit
|
admk/soap
|
#!/usr/bin/env python
# vim: set fileencoding=UTF-8 :
import numpy as np
def get_exponent(v):
if isinstance(v, np.float32):
mask, shift, offset = 0x7f800000, 23, 127
else:
raise NotImplementedError('The value v can only be of type np.float32')
return ((v.view('i') & mask) >> shift) - offset
def ulp(v):
if isinstance(v, np.float16):
prec = 11
elif isinstance(v, np.float32):
prec = 24
elif isinstance(v, np.float64):
prec = 53
return 2 ** (get_exponent(v) - prec)
def round(v, m='Nearest'):
pass
if __name__ == '__main__':
v = np.float32('2.5')
print get_exponent(v)
print ulp(v)
Use gmpy2 instead of numpy
|
#!/usr/bin/env python
# vim: set fileencoding=UTF-8 :
import gmpy2
from gmpy2 import mpq, mpfr
def ulp(v):
return mpq(2) ** v.as_mantissa_exp()[1]
def round(mode):
def decorator(f):
def wrapped(v1, v2):
with gmpy2.local_context(round=mode):
return f(v1, v2)
return wrapped
return decorator
if __name__ == '__main__':
gmpy2.set_context(gmpy2.ieee(32))
print float(ulp(mpfr('0.1')))
mult = lambda x, y: x * y
args = [mpfr('0.3'), mpfr('2.6')]
print round(gmpy2.RoundDown)(mult)(*args)
print round(gmpy2.RoundUp)(mult)(*args)
|
<commit_before>#!/usr/bin/env python
# vim: set fileencoding=UTF-8 :
import numpy as np
def get_exponent(v):
if isinstance(v, np.float32):
mask, shift, offset = 0x7f800000, 23, 127
else:
raise NotImplementedError('The value v can only be of type np.float32')
return ((v.view('i') & mask) >> shift) - offset
def ulp(v):
if isinstance(v, np.float16):
prec = 11
elif isinstance(v, np.float32):
prec = 24
elif isinstance(v, np.float64):
prec = 53
return 2 ** (get_exponent(v) - prec)
def round(v, m='Nearest'):
pass
if __name__ == '__main__':
v = np.float32('2.5')
print get_exponent(v)
print ulp(v)
<commit_msg>Use gmpy2 instead of numpy<commit_after>
|
#!/usr/bin/env python
# vim: set fileencoding=UTF-8 :
import gmpy2
from gmpy2 import mpq, mpfr
def ulp(v):
return mpq(2) ** v.as_mantissa_exp()[1]
def round(mode):
def decorator(f):
def wrapped(v1, v2):
with gmpy2.local_context(round=mode):
return f(v1, v2)
return wrapped
return decorator
if __name__ == '__main__':
gmpy2.set_context(gmpy2.ieee(32))
print float(ulp(mpfr('0.1')))
mult = lambda x, y: x * y
args = [mpfr('0.3'), mpfr('2.6')]
print round(gmpy2.RoundDown)(mult)(*args)
print round(gmpy2.RoundUp)(mult)(*args)
|
#!/usr/bin/env python
# vim: set fileencoding=UTF-8 :
import numpy as np
def get_exponent(v):
if isinstance(v, np.float32):
mask, shift, offset = 0x7f800000, 23, 127
else:
raise NotImplementedError('The value v can only be of type np.float32')
return ((v.view('i') & mask) >> shift) - offset
def ulp(v):
if isinstance(v, np.float16):
prec = 11
elif isinstance(v, np.float32):
prec = 24
elif isinstance(v, np.float64):
prec = 53
return 2 ** (get_exponent(v) - prec)
def round(v, m='Nearest'):
pass
if __name__ == '__main__':
v = np.float32('2.5')
print get_exponent(v)
print ulp(v)
Use gmpy2 instead of numpy#!/usr/bin/env python
# vim: set fileencoding=UTF-8 :
import gmpy2
from gmpy2 import mpq, mpfr
def ulp(v):
return mpq(2) ** v.as_mantissa_exp()[1]
def round(mode):
def decorator(f):
def wrapped(v1, v2):
with gmpy2.local_context(round=mode):
return f(v1, v2)
return wrapped
return decorator
if __name__ == '__main__':
gmpy2.set_context(gmpy2.ieee(32))
print float(ulp(mpfr('0.1')))
mult = lambda x, y: x * y
args = [mpfr('0.3'), mpfr('2.6')]
print round(gmpy2.RoundDown)(mult)(*args)
print round(gmpy2.RoundUp)(mult)(*args)
|
<commit_before>#!/usr/bin/env python
# vim: set fileencoding=UTF-8 :
import numpy as np
def get_exponent(v):
if isinstance(v, np.float32):
mask, shift, offset = 0x7f800000, 23, 127
else:
raise NotImplementedError('The value v can only be of type np.float32')
return ((v.view('i') & mask) >> shift) - offset
def ulp(v):
if isinstance(v, np.float16):
prec = 11
elif isinstance(v, np.float32):
prec = 24
elif isinstance(v, np.float64):
prec = 53
return 2 ** (get_exponent(v) - prec)
def round(v, m='Nearest'):
pass
if __name__ == '__main__':
v = np.float32('2.5')
print get_exponent(v)
print ulp(v)
<commit_msg>Use gmpy2 instead of numpy<commit_after>#!/usr/bin/env python
# vim: set fileencoding=UTF-8 :
import gmpy2
from gmpy2 import mpq, mpfr
def ulp(v):
return mpq(2) ** v.as_mantissa_exp()[1]
def round(mode):
def decorator(f):
def wrapped(v1, v2):
with gmpy2.local_context(round=mode):
return f(v1, v2)
return wrapped
return decorator
if __name__ == '__main__':
gmpy2.set_context(gmpy2.ieee(32))
print float(ulp(mpfr('0.1')))
mult = lambda x, y: x * y
args = [mpfr('0.3'), mpfr('2.6')]
print round(gmpy2.RoundDown)(mult)(*args)
print round(gmpy2.RoundUp)(mult)(*args)
|
84e41e39921b33fc9c84a99fe498587ca7ac30ae
|
settings_example.py
|
settings_example.py
|
import os
import re
from imap import EmailCheckError, EmailServer
from postgresql import DatabaseServer
CSV_FOLDER = os.getcwd()
# Values come from `EMAIL_SUBJECT_RE`.
CSV_NAME_FORMAT = '{year}-{month}-{day}T{hour}{minute}.csv'
# Restrict emails by sender.
EMAIL_FROM = 'sender@example.com'
# Restrict emails by subject.
EMAIL_SUBJECT_RE = re.compile(''.join([
r'(?P<year>\d{4})',
r'(?P<month>\d{2})',
r'(?P<day>\d{2})',
r'(?P<hour>\d{2})',
r'(?P<minute>\d{2})',
r'\.csv',
]))
LOGGING_FORMAT = '''
- file: %(pathname)s
level: %(levelname)s
line: %(lineno)s
message: |
%(message)s
time: %(asctime)s
'''.strip()
# Values come from `EMAIL_SUBJECT_RE`.
TABLE_NAME_FORMAT = 'data_{year}{month}'
def get_database_client():
con = 'my_username/my_password@database.example.com:5432/my_database'
return DatabaseServer(con)
def get_email_client():
return EmailServer('mail.example.com', 'my_username', 'my_password')
|
import os
import re
from imap import EmailCheckError, EmailServer
from postgresql import DatabaseServer
# If this is set to a valid path, all CSV files extracted from emails will be
# stored in sub-folders within it.
CSV_FOLDER = os.getcwd()
# Values come from `EMAIL_SUBJECT_RE`.
CSV_NAME_FORMAT = '{year}-{month}-{day}T{hour}{minute}.csv'
# Restrict emails by sender.
EMAIL_FROM = 'sender@example.com'
# Restrict emails by subject.
EMAIL_SUBJECT_RE = re.compile(''.join([
r'(?P<year>\d{4})',
r'(?P<month>\d{2})',
r'(?P<day>\d{2})',
r'(?P<hour>\d{2})',
r'(?P<minute>\d{2})',
r'\.csv',
]))
LOGGING_FORMAT = '''
- file: %(pathname)s
level: %(levelname)s
line: %(lineno)s
message: |
%(message)s
time: %(asctime)s
'''.strip()
# Values come from `EMAIL_SUBJECT_RE`.
TABLE_NAME_FORMAT = 'data_{year}{month}'
def get_database_client():
con = 'my_username/my_password@database.example.com:5432/my_database'
return DatabaseServer(con)
def get_email_client():
return EmailServer('mail.example.com', 'my_username', 'my_password')
|
Add CSV folder setting comment
|
Add CSV folder setting comment
|
Python
|
mit
|
AustralianAntarcticDataCentre/save_emails_to_files,AustralianAntarcticDataCentre/save_emails_to_files
|
import os
import re
from imap import EmailCheckError, EmailServer
from postgresql import DatabaseServer
CSV_FOLDER = os.getcwd()
# Values come from `EMAIL_SUBJECT_RE`.
CSV_NAME_FORMAT = '{year}-{month}-{day}T{hour}{minute}.csv'
# Restrict emails by sender.
EMAIL_FROM = 'sender@example.com'
# Restrict emails by subject.
EMAIL_SUBJECT_RE = re.compile(''.join([
r'(?P<year>\d{4})',
r'(?P<month>\d{2})',
r'(?P<day>\d{2})',
r'(?P<hour>\d{2})',
r'(?P<minute>\d{2})',
r'\.csv',
]))
LOGGING_FORMAT = '''
- file: %(pathname)s
level: %(levelname)s
line: %(lineno)s
message: |
%(message)s
time: %(asctime)s
'''.strip()
# Values come from `EMAIL_SUBJECT_RE`.
TABLE_NAME_FORMAT = 'data_{year}{month}'
def get_database_client():
con = 'my_username/my_password@database.example.com:5432/my_database'
return DatabaseServer(con)
def get_email_client():
return EmailServer('mail.example.com', 'my_username', 'my_password')
Add CSV folder setting comment
|
import os
import re
from imap import EmailCheckError, EmailServer
from postgresql import DatabaseServer
# If this is set to a valid path, all CSV files extracted from emails will be
# stored in sub-folders within it.
CSV_FOLDER = os.getcwd()
# Values come from `EMAIL_SUBJECT_RE`.
CSV_NAME_FORMAT = '{year}-{month}-{day}T{hour}{minute}.csv'
# Restrict emails by sender.
EMAIL_FROM = 'sender@example.com'
# Restrict emails by subject.
EMAIL_SUBJECT_RE = re.compile(''.join([
r'(?P<year>\d{4})',
r'(?P<month>\d{2})',
r'(?P<day>\d{2})',
r'(?P<hour>\d{2})',
r'(?P<minute>\d{2})',
r'\.csv',
]))
LOGGING_FORMAT = '''
- file: %(pathname)s
level: %(levelname)s
line: %(lineno)s
message: |
%(message)s
time: %(asctime)s
'''.strip()
# Values come from `EMAIL_SUBJECT_RE`.
TABLE_NAME_FORMAT = 'data_{year}{month}'
def get_database_client():
con = 'my_username/my_password@database.example.com:5432/my_database'
return DatabaseServer(con)
def get_email_client():
return EmailServer('mail.example.com', 'my_username', 'my_password')
|
<commit_before>import os
import re
from imap import EmailCheckError, EmailServer
from postgresql import DatabaseServer
CSV_FOLDER = os.getcwd()
# Values come from `EMAIL_SUBJECT_RE`.
CSV_NAME_FORMAT = '{year}-{month}-{day}T{hour}{minute}.csv'
# Restrict emails by sender.
EMAIL_FROM = 'sender@example.com'
# Restrict emails by subject.
EMAIL_SUBJECT_RE = re.compile(''.join([
r'(?P<year>\d{4})',
r'(?P<month>\d{2})',
r'(?P<day>\d{2})',
r'(?P<hour>\d{2})',
r'(?P<minute>\d{2})',
r'\.csv',
]))
LOGGING_FORMAT = '''
- file: %(pathname)s
level: %(levelname)s
line: %(lineno)s
message: |
%(message)s
time: %(asctime)s
'''.strip()
# Values come from `EMAIL_SUBJECT_RE`.
TABLE_NAME_FORMAT = 'data_{year}{month}'
def get_database_client():
con = 'my_username/my_password@database.example.com:5432/my_database'
return DatabaseServer(con)
def get_email_client():
return EmailServer('mail.example.com', 'my_username', 'my_password')
<commit_msg>Add CSV folder setting comment<commit_after>
|
import os
import re
from imap import EmailCheckError, EmailServer
from postgresql import DatabaseServer
# If this is set to a valid path, all CSV files extracted from emails will be
# stored in sub-folders within it.
CSV_FOLDER = os.getcwd()
# Values come from `EMAIL_SUBJECT_RE`.
CSV_NAME_FORMAT = '{year}-{month}-{day}T{hour}{minute}.csv'
# Restrict emails by sender.
EMAIL_FROM = 'sender@example.com'
# Restrict emails by subject.
EMAIL_SUBJECT_RE = re.compile(''.join([
r'(?P<year>\d{4})',
r'(?P<month>\d{2})',
r'(?P<day>\d{2})',
r'(?P<hour>\d{2})',
r'(?P<minute>\d{2})',
r'\.csv',
]))
LOGGING_FORMAT = '''
- file: %(pathname)s
level: %(levelname)s
line: %(lineno)s
message: |
%(message)s
time: %(asctime)s
'''.strip()
# Values come from `EMAIL_SUBJECT_RE`.
TABLE_NAME_FORMAT = 'data_{year}{month}'
def get_database_client():
con = 'my_username/my_password@database.example.com:5432/my_database'
return DatabaseServer(con)
def get_email_client():
return EmailServer('mail.example.com', 'my_username', 'my_password')
|
import os
import re
from imap import EmailCheckError, EmailServer
from postgresql import DatabaseServer
CSV_FOLDER = os.getcwd()
# Values come from `EMAIL_SUBJECT_RE`.
CSV_NAME_FORMAT = '{year}-{month}-{day}T{hour}{minute}.csv'
# Restrict emails by sender.
EMAIL_FROM = 'sender@example.com'
# Restrict emails by subject.
EMAIL_SUBJECT_RE = re.compile(''.join([
r'(?P<year>\d{4})',
r'(?P<month>\d{2})',
r'(?P<day>\d{2})',
r'(?P<hour>\d{2})',
r'(?P<minute>\d{2})',
r'\.csv',
]))
LOGGING_FORMAT = '''
- file: %(pathname)s
level: %(levelname)s
line: %(lineno)s
message: |
%(message)s
time: %(asctime)s
'''.strip()
# Values come from `EMAIL_SUBJECT_RE`.
TABLE_NAME_FORMAT = 'data_{year}{month}'
def get_database_client():
con = 'my_username/my_password@database.example.com:5432/my_database'
return DatabaseServer(con)
def get_email_client():
return EmailServer('mail.example.com', 'my_username', 'my_password')
Add CSV folder setting commentimport os
import re
from imap import EmailCheckError, EmailServer
from postgresql import DatabaseServer
# If this is set to a valid path, all CSV files extracted from emails will be
# stored in sub-folders within it.
CSV_FOLDER = os.getcwd()
# Values come from `EMAIL_SUBJECT_RE`.
CSV_NAME_FORMAT = '{year}-{month}-{day}T{hour}{minute}.csv'
# Restrict emails by sender.
EMAIL_FROM = 'sender@example.com'
# Restrict emails by subject.
EMAIL_SUBJECT_RE = re.compile(''.join([
r'(?P<year>\d{4})',
r'(?P<month>\d{2})',
r'(?P<day>\d{2})',
r'(?P<hour>\d{2})',
r'(?P<minute>\d{2})',
r'\.csv',
]))
LOGGING_FORMAT = '''
- file: %(pathname)s
level: %(levelname)s
line: %(lineno)s
message: |
%(message)s
time: %(asctime)s
'''.strip()
# Values come from `EMAIL_SUBJECT_RE`.
TABLE_NAME_FORMAT = 'data_{year}{month}'
def get_database_client():
con = 'my_username/my_password@database.example.com:5432/my_database'
return DatabaseServer(con)
def get_email_client():
return EmailServer('mail.example.com', 'my_username', 'my_password')
|
<commit_before>import os
import re
from imap import EmailCheckError, EmailServer
from postgresql import DatabaseServer
CSV_FOLDER = os.getcwd()
# Values come from `EMAIL_SUBJECT_RE`.
CSV_NAME_FORMAT = '{year}-{month}-{day}T{hour}{minute}.csv'
# Restrict emails by sender.
EMAIL_FROM = 'sender@example.com'
# Restrict emails by subject.
EMAIL_SUBJECT_RE = re.compile(''.join([
r'(?P<year>\d{4})',
r'(?P<month>\d{2})',
r'(?P<day>\d{2})',
r'(?P<hour>\d{2})',
r'(?P<minute>\d{2})',
r'\.csv',
]))
LOGGING_FORMAT = '''
- file: %(pathname)s
level: %(levelname)s
line: %(lineno)s
message: |
%(message)s
time: %(asctime)s
'''.strip()
# Values come from `EMAIL_SUBJECT_RE`.
TABLE_NAME_FORMAT = 'data_{year}{month}'
def get_database_client():
con = 'my_username/my_password@database.example.com:5432/my_database'
return DatabaseServer(con)
def get_email_client():
return EmailServer('mail.example.com', 'my_username', 'my_password')
<commit_msg>Add CSV folder setting comment<commit_after>import os
import re
from imap import EmailCheckError, EmailServer
from postgresql import DatabaseServer
# If this is set to a valid path, all CSV files extracted from emails will be
# stored in sub-folders within it.
CSV_FOLDER = os.getcwd()
# Values come from `EMAIL_SUBJECT_RE`.
CSV_NAME_FORMAT = '{year}-{month}-{day}T{hour}{minute}.csv'
# Restrict emails by sender.
EMAIL_FROM = 'sender@example.com'
# Restrict emails by subject.
EMAIL_SUBJECT_RE = re.compile(''.join([
r'(?P<year>\d{4})',
r'(?P<month>\d{2})',
r'(?P<day>\d{2})',
r'(?P<hour>\d{2})',
r'(?P<minute>\d{2})',
r'\.csv',
]))
LOGGING_FORMAT = '''
- file: %(pathname)s
level: %(levelname)s
line: %(lineno)s
message: |
%(message)s
time: %(asctime)s
'''.strip()
# Values come from `EMAIL_SUBJECT_RE`.
TABLE_NAME_FORMAT = 'data_{year}{month}'
def get_database_client():
con = 'my_username/my_password@database.example.com:5432/my_database'
return DatabaseServer(con)
def get_email_client():
return EmailServer('mail.example.com', 'my_username', 'my_password')
|
c2a090772e8aaa3a1f2239c0ca7abc0cb8978c88
|
Tools/compiler/compile.py
|
Tools/compiler/compile.py
|
import sys
import getopt
from compiler import compile, visitor
def main():
VERBOSE = 0
DISPLAY = 0
opts, args = getopt.getopt(sys.argv[1:], 'vqd')
for k, v in opts:
if k == '-v':
VERBOSE = 1
visitor.ASTVisitor.VERBOSE = visitor.ASTVisitor.VERBOSE + 1
if k == '-q':
if sys.platform[:3]=="win":
f = open('nul', 'wb') # /dev/null fails on Windows...
else:
f = open('/dev/null', 'wb')
sys.stdout = f
if k == '-d':
DISPLAY = 1
if not args:
print "no files to compile"
else:
for filename in args:
if VERBOSE:
print filename
compile(filename, DISPLAY)
if __name__ == "__main__":
main()
|
import sys
import getopt
from compiler import compile, visitor
##import profile
def main():
VERBOSE = 0
DISPLAY = 0
CONTINUE = 0
opts, args = getopt.getopt(sys.argv[1:], 'vqdc')
for k, v in opts:
if k == '-v':
VERBOSE = 1
visitor.ASTVisitor.VERBOSE = visitor.ASTVisitor.VERBOSE + 1
if k == '-q':
if sys.platform[:3]=="win":
f = open('nul', 'wb') # /dev/null fails on Windows...
else:
f = open('/dev/null', 'wb')
sys.stdout = f
if k == '-d':
DISPLAY = 1
if k == '-c':
CONTINUE = 1
if not args:
print "no files to compile"
else:
for filename in args:
if VERBOSE:
print filename
try:
compile(filename, DISPLAY)
## profile.run('compile(%s, %s)' % (`filename`, `DISPLAY`),
## filename + ".prof")
except SyntaxError, err:
print err
print err.lineno
if not CONTINUE:
sys.exit(-1)
if __name__ == "__main__":
main()
|
Add -c option to continue if one file has a SyntaxError
|
Add -c option to continue if one file has a SyntaxError
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
import sys
import getopt
from compiler import compile, visitor
def main():
VERBOSE = 0
DISPLAY = 0
opts, args = getopt.getopt(sys.argv[1:], 'vqd')
for k, v in opts:
if k == '-v':
VERBOSE = 1
visitor.ASTVisitor.VERBOSE = visitor.ASTVisitor.VERBOSE + 1
if k == '-q':
if sys.platform[:3]=="win":
f = open('nul', 'wb') # /dev/null fails on Windows...
else:
f = open('/dev/null', 'wb')
sys.stdout = f
if k == '-d':
DISPLAY = 1
if not args:
print "no files to compile"
else:
for filename in args:
if VERBOSE:
print filename
compile(filename, DISPLAY)
if __name__ == "__main__":
main()
Add -c option to continue if one file has a SyntaxError
|
import sys
import getopt
from compiler import compile, visitor
##import profile
def main():
VERBOSE = 0
DISPLAY = 0
CONTINUE = 0
opts, args = getopt.getopt(sys.argv[1:], 'vqdc')
for k, v in opts:
if k == '-v':
VERBOSE = 1
visitor.ASTVisitor.VERBOSE = visitor.ASTVisitor.VERBOSE + 1
if k == '-q':
if sys.platform[:3]=="win":
f = open('nul', 'wb') # /dev/null fails on Windows...
else:
f = open('/dev/null', 'wb')
sys.stdout = f
if k == '-d':
DISPLAY = 1
if k == '-c':
CONTINUE = 1
if not args:
print "no files to compile"
else:
for filename in args:
if VERBOSE:
print filename
try:
compile(filename, DISPLAY)
## profile.run('compile(%s, %s)' % (`filename`, `DISPLAY`),
## filename + ".prof")
except SyntaxError, err:
print err
print err.lineno
if not CONTINUE:
sys.exit(-1)
if __name__ == "__main__":
main()
|
<commit_before>import sys
import getopt
from compiler import compile, visitor
def main():
VERBOSE = 0
DISPLAY = 0
opts, args = getopt.getopt(sys.argv[1:], 'vqd')
for k, v in opts:
if k == '-v':
VERBOSE = 1
visitor.ASTVisitor.VERBOSE = visitor.ASTVisitor.VERBOSE + 1
if k == '-q':
if sys.platform[:3]=="win":
f = open('nul', 'wb') # /dev/null fails on Windows...
else:
f = open('/dev/null', 'wb')
sys.stdout = f
if k == '-d':
DISPLAY = 1
if not args:
print "no files to compile"
else:
for filename in args:
if VERBOSE:
print filename
compile(filename, DISPLAY)
if __name__ == "__main__":
main()
<commit_msg>Add -c option to continue if one file has a SyntaxError<commit_after>
|
import sys
import getopt
from compiler import compile, visitor
##import profile
def main():
VERBOSE = 0
DISPLAY = 0
CONTINUE = 0
opts, args = getopt.getopt(sys.argv[1:], 'vqdc')
for k, v in opts:
if k == '-v':
VERBOSE = 1
visitor.ASTVisitor.VERBOSE = visitor.ASTVisitor.VERBOSE + 1
if k == '-q':
if sys.platform[:3]=="win":
f = open('nul', 'wb') # /dev/null fails on Windows...
else:
f = open('/dev/null', 'wb')
sys.stdout = f
if k == '-d':
DISPLAY = 1
if k == '-c':
CONTINUE = 1
if not args:
print "no files to compile"
else:
for filename in args:
if VERBOSE:
print filename
try:
compile(filename, DISPLAY)
## profile.run('compile(%s, %s)' % (`filename`, `DISPLAY`),
## filename + ".prof")
except SyntaxError, err:
print err
print err.lineno
if not CONTINUE:
sys.exit(-1)
if __name__ == "__main__":
main()
|
import sys
import getopt
from compiler import compile, visitor
def main():
VERBOSE = 0
DISPLAY = 0
opts, args = getopt.getopt(sys.argv[1:], 'vqd')
for k, v in opts:
if k == '-v':
VERBOSE = 1
visitor.ASTVisitor.VERBOSE = visitor.ASTVisitor.VERBOSE + 1
if k == '-q':
if sys.platform[:3]=="win":
f = open('nul', 'wb') # /dev/null fails on Windows...
else:
f = open('/dev/null', 'wb')
sys.stdout = f
if k == '-d':
DISPLAY = 1
if not args:
print "no files to compile"
else:
for filename in args:
if VERBOSE:
print filename
compile(filename, DISPLAY)
if __name__ == "__main__":
main()
Add -c option to continue if one file has a SyntaxErrorimport sys
import getopt
from compiler import compile, visitor
##import profile
def main():
VERBOSE = 0
DISPLAY = 0
CONTINUE = 0
opts, args = getopt.getopt(sys.argv[1:], 'vqdc')
for k, v in opts:
if k == '-v':
VERBOSE = 1
visitor.ASTVisitor.VERBOSE = visitor.ASTVisitor.VERBOSE + 1
if k == '-q':
if sys.platform[:3]=="win":
f = open('nul', 'wb') # /dev/null fails on Windows...
else:
f = open('/dev/null', 'wb')
sys.stdout = f
if k == '-d':
DISPLAY = 1
if k == '-c':
CONTINUE = 1
if not args:
print "no files to compile"
else:
for filename in args:
if VERBOSE:
print filename
try:
compile(filename, DISPLAY)
## profile.run('compile(%s, %s)' % (`filename`, `DISPLAY`),
## filename + ".prof")
except SyntaxError, err:
print err
print err.lineno
if not CONTINUE:
sys.exit(-1)
if __name__ == "__main__":
main()
|
<commit_before>import sys
import getopt
from compiler import compile, visitor
def main():
VERBOSE = 0
DISPLAY = 0
opts, args = getopt.getopt(sys.argv[1:], 'vqd')
for k, v in opts:
if k == '-v':
VERBOSE = 1
visitor.ASTVisitor.VERBOSE = visitor.ASTVisitor.VERBOSE + 1
if k == '-q':
if sys.platform[:3]=="win":
f = open('nul', 'wb') # /dev/null fails on Windows...
else:
f = open('/dev/null', 'wb')
sys.stdout = f
if k == '-d':
DISPLAY = 1
if not args:
print "no files to compile"
else:
for filename in args:
if VERBOSE:
print filename
compile(filename, DISPLAY)
if __name__ == "__main__":
main()
<commit_msg>Add -c option to continue if one file has a SyntaxError<commit_after>import sys
import getopt
from compiler import compile, visitor
##import profile
def main():
VERBOSE = 0
DISPLAY = 0
CONTINUE = 0
opts, args = getopt.getopt(sys.argv[1:], 'vqdc')
for k, v in opts:
if k == '-v':
VERBOSE = 1
visitor.ASTVisitor.VERBOSE = visitor.ASTVisitor.VERBOSE + 1
if k == '-q':
if sys.platform[:3]=="win":
f = open('nul', 'wb') # /dev/null fails on Windows...
else:
f = open('/dev/null', 'wb')
sys.stdout = f
if k == '-d':
DISPLAY = 1
if k == '-c':
CONTINUE = 1
if not args:
print "no files to compile"
else:
for filename in args:
if VERBOSE:
print filename
try:
compile(filename, DISPLAY)
## profile.run('compile(%s, %s)' % (`filename`, `DISPLAY`),
## filename + ".prof")
except SyntaxError, err:
print err
print err.lineno
if not CONTINUE:
sys.exit(-1)
if __name__ == "__main__":
main()
|
9a0fd1daf7d35ae1b29e3d1dbbc11272fcb13847
|
app/start.py
|
app/start.py
|
from lxml import objectify, etree
import simplejson as json
from converter.nsl.nsl_converter import NslConverter
from converter.nsl.other.nsl_other_converter import NslOtherConverter
import sys
xml = open("SEnsl_ssi.xml")
xml_string = xml.read()
xml_obj = objectify.fromstring(xml_string)
nsl_other_xml_string = open("SEnsl_other.xml").read()
nsl_other_xml_obj = objectify.fromstring(nsl_other_xml_string)
nsl_dict = NslConverter().convert(xml_obj)
nsl_other_dict = NslOtherConverter().convert(nsl_other_xml_obj)
for item in nsl_dict:
filename = ""
for code in item["substance_codes"]:
if code["code_system"] == "SENSLIDSENSL":
filename = code["code"]
break
with open('json/' + filename + ".json", 'w') as outfile:
json.dump(item, outfile)
for item in nsl_other_dict:
with open("json/other/" + item["se_nsl_id"] + ".json", "w") as outfile:
json.dump(item, outfile)
|
from lxml import objectify, etree
import simplejson as json
from converter.nsl.nsl_converter import NslConverter
from converter.nsl.other.nsl_other_converter import NslOtherConverter
import sys
xml = open("SEnsl_ssi.xml")
xml_string = xml.read()
xml_obj = objectify.fromstring(xml_string)
nsl_other_xml_string = open("SEnsl_other.xml").read()
nsl_other_xml_obj = objectify.fromstring(nsl_other_xml_string)
nsl_dict = NslConverter().convert(xml_obj)
nsl_other_dict = NslOtherConverter().convert(nsl_other_xml_obj)
for item in nsl_dict:
filename = ""
for code in item["substance_codes"]:
if code["code_system"] == "SENSLIDSENSL":
filename = code["code"]
break
with open('json/' + filename + ".json", 'w') as outfile:
json.dump(item, outfile, sort_keys=True)
for item in nsl_other_dict:
with open("json/other/" + item["se_nsl_id"] + ".json", "w") as outfile:
json.dump(item, outfile, sort_keys=True)
|
Enable sort keys in json dumps to make sure json is stable
|
Enable sort keys in json dumps to make sure json is stable
|
Python
|
bsd-2-clause
|
c0d3m0nkey/xml-to-json-converter
|
from lxml import objectify, etree
import simplejson as json
from converter.nsl.nsl_converter import NslConverter
from converter.nsl.other.nsl_other_converter import NslOtherConverter
import sys
xml = open("SEnsl_ssi.xml")
xml_string = xml.read()
xml_obj = objectify.fromstring(xml_string)
nsl_other_xml_string = open("SEnsl_other.xml").read()
nsl_other_xml_obj = objectify.fromstring(nsl_other_xml_string)
nsl_dict = NslConverter().convert(xml_obj)
nsl_other_dict = NslOtherConverter().convert(nsl_other_xml_obj)
for item in nsl_dict:
filename = ""
for code in item["substance_codes"]:
if code["code_system"] == "SENSLIDSENSL":
filename = code["code"]
break
with open('json/' + filename + ".json", 'w') as outfile:
json.dump(item, outfile)
for item in nsl_other_dict:
with open("json/other/" + item["se_nsl_id"] + ".json", "w") as outfile:
json.dump(item, outfile)
Enable sort keys in json dumps to make sure json is stable
|
from lxml import objectify, etree
import simplejson as json
from converter.nsl.nsl_converter import NslConverter
from converter.nsl.other.nsl_other_converter import NslOtherConverter
import sys
xml = open("SEnsl_ssi.xml")
xml_string = xml.read()
xml_obj = objectify.fromstring(xml_string)
nsl_other_xml_string = open("SEnsl_other.xml").read()
nsl_other_xml_obj = objectify.fromstring(nsl_other_xml_string)
nsl_dict = NslConverter().convert(xml_obj)
nsl_other_dict = NslOtherConverter().convert(nsl_other_xml_obj)
for item in nsl_dict:
filename = ""
for code in item["substance_codes"]:
if code["code_system"] == "SENSLIDSENSL":
filename = code["code"]
break
with open('json/' + filename + ".json", 'w') as outfile:
json.dump(item, outfile, sort_keys=True)
for item in nsl_other_dict:
with open("json/other/" + item["se_nsl_id"] + ".json", "w") as outfile:
json.dump(item, outfile, sort_keys=True)
|
<commit_before>from lxml import objectify, etree
import simplejson as json
from converter.nsl.nsl_converter import NslConverter
from converter.nsl.other.nsl_other_converter import NslOtherConverter
import sys
xml = open("SEnsl_ssi.xml")
xml_string = xml.read()
xml_obj = objectify.fromstring(xml_string)
nsl_other_xml_string = open("SEnsl_other.xml").read()
nsl_other_xml_obj = objectify.fromstring(nsl_other_xml_string)
nsl_dict = NslConverter().convert(xml_obj)
nsl_other_dict = NslOtherConverter().convert(nsl_other_xml_obj)
for item in nsl_dict:
filename = ""
for code in item["substance_codes"]:
if code["code_system"] == "SENSLIDSENSL":
filename = code["code"]
break
with open('json/' + filename + ".json", 'w') as outfile:
json.dump(item, outfile)
for item in nsl_other_dict:
with open("json/other/" + item["se_nsl_id"] + ".json", "w") as outfile:
json.dump(item, outfile)
<commit_msg>Enable sort keys in json dumps to make sure json is stable<commit_after>
|
from lxml import objectify, etree
import simplejson as json
from converter.nsl.nsl_converter import NslConverter
from converter.nsl.other.nsl_other_converter import NslOtherConverter
import sys
xml = open("SEnsl_ssi.xml")
xml_string = xml.read()
xml_obj = objectify.fromstring(xml_string)
nsl_other_xml_string = open("SEnsl_other.xml").read()
nsl_other_xml_obj = objectify.fromstring(nsl_other_xml_string)
nsl_dict = NslConverter().convert(xml_obj)
nsl_other_dict = NslOtherConverter().convert(nsl_other_xml_obj)
for item in nsl_dict:
filename = ""
for code in item["substance_codes"]:
if code["code_system"] == "SENSLIDSENSL":
filename = code["code"]
break
with open('json/' + filename + ".json", 'w') as outfile:
json.dump(item, outfile, sort_keys=True)
for item in nsl_other_dict:
with open("json/other/" + item["se_nsl_id"] + ".json", "w") as outfile:
json.dump(item, outfile, sort_keys=True)
|
from lxml import objectify, etree
import simplejson as json
from converter.nsl.nsl_converter import NslConverter
from converter.nsl.other.nsl_other_converter import NslOtherConverter
import sys
xml = open("SEnsl_ssi.xml")
xml_string = xml.read()
xml_obj = objectify.fromstring(xml_string)
nsl_other_xml_string = open("SEnsl_other.xml").read()
nsl_other_xml_obj = objectify.fromstring(nsl_other_xml_string)
nsl_dict = NslConverter().convert(xml_obj)
nsl_other_dict = NslOtherConverter().convert(nsl_other_xml_obj)
for item in nsl_dict:
filename = ""
for code in item["substance_codes"]:
if code["code_system"] == "SENSLIDSENSL":
filename = code["code"]
break
with open('json/' + filename + ".json", 'w') as outfile:
json.dump(item, outfile)
for item in nsl_other_dict:
with open("json/other/" + item["se_nsl_id"] + ".json", "w") as outfile:
json.dump(item, outfile)
Enable sort keys in json dumps to make sure json is stablefrom lxml import objectify, etree
import simplejson as json
from converter.nsl.nsl_converter import NslConverter
from converter.nsl.other.nsl_other_converter import NslOtherConverter
import sys
xml = open("SEnsl_ssi.xml")
xml_string = xml.read()
xml_obj = objectify.fromstring(xml_string)
nsl_other_xml_string = open("SEnsl_other.xml").read()
nsl_other_xml_obj = objectify.fromstring(nsl_other_xml_string)
nsl_dict = NslConverter().convert(xml_obj)
nsl_other_dict = NslOtherConverter().convert(nsl_other_xml_obj)
for item in nsl_dict:
filename = ""
for code in item["substance_codes"]:
if code["code_system"] == "SENSLIDSENSL":
filename = code["code"]
break
with open('json/' + filename + ".json", 'w') as outfile:
json.dump(item, outfile, sort_keys=True)
for item in nsl_other_dict:
with open("json/other/" + item["se_nsl_id"] + ".json", "w") as outfile:
json.dump(item, outfile, sort_keys=True)
|
<commit_before>from lxml import objectify, etree
import simplejson as json
from converter.nsl.nsl_converter import NslConverter
from converter.nsl.other.nsl_other_converter import NslOtherConverter
import sys
xml = open("SEnsl_ssi.xml")
xml_string = xml.read()
xml_obj = objectify.fromstring(xml_string)
nsl_other_xml_string = open("SEnsl_other.xml").read()
nsl_other_xml_obj = objectify.fromstring(nsl_other_xml_string)
nsl_dict = NslConverter().convert(xml_obj)
nsl_other_dict = NslOtherConverter().convert(nsl_other_xml_obj)
for item in nsl_dict:
filename = ""
for code in item["substance_codes"]:
if code["code_system"] == "SENSLIDSENSL":
filename = code["code"]
break
with open('json/' + filename + ".json", 'w') as outfile:
json.dump(item, outfile)
for item in nsl_other_dict:
with open("json/other/" + item["se_nsl_id"] + ".json", "w") as outfile:
json.dump(item, outfile)
<commit_msg>Enable sort keys in json dumps to make sure json is stable<commit_after>from lxml import objectify, etree
import simplejson as json
from converter.nsl.nsl_converter import NslConverter
from converter.nsl.other.nsl_other_converter import NslOtherConverter
import sys
xml = open("SEnsl_ssi.xml")
xml_string = xml.read()
xml_obj = objectify.fromstring(xml_string)
nsl_other_xml_string = open("SEnsl_other.xml").read()
nsl_other_xml_obj = objectify.fromstring(nsl_other_xml_string)
nsl_dict = NslConverter().convert(xml_obj)
nsl_other_dict = NslOtherConverter().convert(nsl_other_xml_obj)
for item in nsl_dict:
filename = ""
for code in item["substance_codes"]:
if code["code_system"] == "SENSLIDSENSL":
filename = code["code"]
break
with open('json/' + filename + ".json", 'w') as outfile:
json.dump(item, outfile, sort_keys=True)
for item in nsl_other_dict:
with open("json/other/" + item["se_nsl_id"] + ".json", "w") as outfile:
json.dump(item, outfile, sort_keys=True)
|
8c9414aa3badd31a60ce88f37fed41e98c867d9f
|
windberg_web/__init__.py
|
windberg_web/__init__.py
|
# register a signal do update permissions every migration.
# This is based on app django_extensions update_permissions command
from south.signals import post_migrate
def update_permissions_after_migration(app,**kwargs):
"""
Update app permission just after every migration.
This is based on app django_extensions update_permissions management command.
"""
import settings
from django.db.models import get_app, get_models
from django.contrib.auth.management import create_permissions
create_permissions(get_app(app), get_models(), 2 if settings.DEBUG else 0)
post_migrate.connect(update_permissions_after_migration)
|
Update custom permissions after migration
|
Update custom permissions after migration
|
Python
|
bsd-3-clause
|
janLo/Windberg-web,janLo/Windberg-web
|
Update custom permissions after migration
|
# register a signal do update permissions every migration.
# This is based on app django_extensions update_permissions command
from south.signals import post_migrate
def update_permissions_after_migration(app,**kwargs):
"""
Update app permission just after every migration.
This is based on app django_extensions update_permissions management command.
"""
import settings
from django.db.models import get_app, get_models
from django.contrib.auth.management import create_permissions
create_permissions(get_app(app), get_models(), 2 if settings.DEBUG else 0)
post_migrate.connect(update_permissions_after_migration)
|
<commit_before><commit_msg>Update custom permissions after migration<commit_after>
|
# register a signal do update permissions every migration.
# This is based on app django_extensions update_permissions command
from south.signals import post_migrate
def update_permissions_after_migration(app,**kwargs):
"""
Update app permission just after every migration.
This is based on app django_extensions update_permissions management command.
"""
import settings
from django.db.models import get_app, get_models
from django.contrib.auth.management import create_permissions
create_permissions(get_app(app), get_models(), 2 if settings.DEBUG else 0)
post_migrate.connect(update_permissions_after_migration)
|
Update custom permissions after migration# register a signal do update permissions every migration.
# This is based on app django_extensions update_permissions command
from south.signals import post_migrate
def update_permissions_after_migration(app,**kwargs):
"""
Update app permission just after every migration.
This is based on app django_extensions update_permissions management command.
"""
import settings
from django.db.models import get_app, get_models
from django.contrib.auth.management import create_permissions
create_permissions(get_app(app), get_models(), 2 if settings.DEBUG else 0)
post_migrate.connect(update_permissions_after_migration)
|
<commit_before><commit_msg>Update custom permissions after migration<commit_after># register a signal do update permissions every migration.
# This is based on app django_extensions update_permissions command
from south.signals import post_migrate
def update_permissions_after_migration(app,**kwargs):
"""
Update app permission just after every migration.
This is based on app django_extensions update_permissions management command.
"""
import settings
from django.db.models import get_app, get_models
from django.contrib.auth.management import create_permissions
create_permissions(get_app(app), get_models(), 2 if settings.DEBUG else 0)
post_migrate.connect(update_permissions_after_migration)
|
|
b32520e0fb2ff72498b16ea75bea53fbbe96854f
|
tests/functional/services/api/images/test_post.py
|
tests/functional/services/api/images/test_post.py
|
class TestOversizedImageReturns400:
# Expectation for this test is that the image with tag is greater than the value defined in config
def test_oversized_image_post(self, make_image_analysis_request):
resp = make_image_analysis_request("anchore/test_images:oversized_image")
details = resp.body["detail"]
msg = resp.body["message"]
assert resp.code == 400
assert (
msg
== "Image size is too large based on max size specified in the configuration"
)
assert (
details["requested_image_compressed_size"]
> details["max_compressed_image_size_mb"]
)
class TestValidImageReturns200:
def test_valid_image_returns_200(self, make_image_analysis_request):
resp = make_image_analysis_request("alpine:latest")
assert resp.code == 200
|
class TestOversizedImageReturns400:
# Expectation for this test is that the image with tag is greater than the value defined in config
def test_oversized_image_post(self, make_image_analysis_request):
resp = make_image_analysis_request("anchore/test_images:oversized_image")
details = resp.body["detail"]
msg = resp.body["message"]
assert resp.code == 400
assert (
msg
== "Image size is too large based on max size specified in the configuration"
)
assert (
details["requested_image_compressed_size_bytes"]
> details["max_compressed_image_size_mb"]
)
class TestValidImageReturns200:
def test_valid_image_returns_200(self, make_image_analysis_request):
resp = make_image_analysis_request("alpine:latest")
assert resp.code == 200
|
Update failure key in test
|
Update failure key in test
Signed-off-by: Zane Burstein <0b53c6e52ca2d19caefaa4da7d81393843bcf79a@anchore.com>
|
Python
|
apache-2.0
|
anchore/anchore-engine,anchore/anchore-engine,anchore/anchore-engine
|
class TestOversizedImageReturns400:
# Expectation for this test is that the image with tag is greater than the value defined in config
def test_oversized_image_post(self, make_image_analysis_request):
resp = make_image_analysis_request("anchore/test_images:oversized_image")
details = resp.body["detail"]
msg = resp.body["message"]
assert resp.code == 400
assert (
msg
== "Image size is too large based on max size specified in the configuration"
)
assert (
details["requested_image_compressed_size"]
> details["max_compressed_image_size_mb"]
)
class TestValidImageReturns200:
def test_valid_image_returns_200(self, make_image_analysis_request):
resp = make_image_analysis_request("alpine:latest")
assert resp.code == 200
Update failure key in test
Signed-off-by: Zane Burstein <0b53c6e52ca2d19caefaa4da7d81393843bcf79a@anchore.com>
|
class TestOversizedImageReturns400:
# Expectation for this test is that the image with tag is greater than the value defined in config
def test_oversized_image_post(self, make_image_analysis_request):
resp = make_image_analysis_request("anchore/test_images:oversized_image")
details = resp.body["detail"]
msg = resp.body["message"]
assert resp.code == 400
assert (
msg
== "Image size is too large based on max size specified in the configuration"
)
assert (
details["requested_image_compressed_size_bytes"]
> details["max_compressed_image_size_mb"]
)
class TestValidImageReturns200:
def test_valid_image_returns_200(self, make_image_analysis_request):
resp = make_image_analysis_request("alpine:latest")
assert resp.code == 200
|
<commit_before>class TestOversizedImageReturns400:
# Expectation for this test is that the image with tag is greater than the value defined in config
def test_oversized_image_post(self, make_image_analysis_request):
resp = make_image_analysis_request("anchore/test_images:oversized_image")
details = resp.body["detail"]
msg = resp.body["message"]
assert resp.code == 400
assert (
msg
== "Image size is too large based on max size specified in the configuration"
)
assert (
details["requested_image_compressed_size"]
> details["max_compressed_image_size_mb"]
)
class TestValidImageReturns200:
def test_valid_image_returns_200(self, make_image_analysis_request):
resp = make_image_analysis_request("alpine:latest")
assert resp.code == 200
<commit_msg>Update failure key in test
Signed-off-by: Zane Burstein <0b53c6e52ca2d19caefaa4da7d81393843bcf79a@anchore.com><commit_after>
|
class TestOversizedImageReturns400:
# Expectation for this test is that the image with tag is greater than the value defined in config
def test_oversized_image_post(self, make_image_analysis_request):
resp = make_image_analysis_request("anchore/test_images:oversized_image")
details = resp.body["detail"]
msg = resp.body["message"]
assert resp.code == 400
assert (
msg
== "Image size is too large based on max size specified in the configuration"
)
assert (
details["requested_image_compressed_size_bytes"]
> details["max_compressed_image_size_mb"]
)
class TestValidImageReturns200:
def test_valid_image_returns_200(self, make_image_analysis_request):
resp = make_image_analysis_request("alpine:latest")
assert resp.code == 200
|
class TestOversizedImageReturns400:
# Expectation for this test is that the image with tag is greater than the value defined in config
def test_oversized_image_post(self, make_image_analysis_request):
resp = make_image_analysis_request("anchore/test_images:oversized_image")
details = resp.body["detail"]
msg = resp.body["message"]
assert resp.code == 400
assert (
msg
== "Image size is too large based on max size specified in the configuration"
)
assert (
details["requested_image_compressed_size"]
> details["max_compressed_image_size_mb"]
)
class TestValidImageReturns200:
def test_valid_image_returns_200(self, make_image_analysis_request):
resp = make_image_analysis_request("alpine:latest")
assert resp.code == 200
Update failure key in test
Signed-off-by: Zane Burstein <0b53c6e52ca2d19caefaa4da7d81393843bcf79a@anchore.com>class TestOversizedImageReturns400:
# Expectation for this test is that the image with tag is greater than the value defined in config
def test_oversized_image_post(self, make_image_analysis_request):
resp = make_image_analysis_request("anchore/test_images:oversized_image")
details = resp.body["detail"]
msg = resp.body["message"]
assert resp.code == 400
assert (
msg
== "Image size is too large based on max size specified in the configuration"
)
assert (
details["requested_image_compressed_size_bytes"]
> details["max_compressed_image_size_mb"]
)
class TestValidImageReturns200:
def test_valid_image_returns_200(self, make_image_analysis_request):
resp = make_image_analysis_request("alpine:latest")
assert resp.code == 200
|
<commit_before>class TestOversizedImageReturns400:
# Expectation for this test is that the image with tag is greater than the value defined in config
def test_oversized_image_post(self, make_image_analysis_request):
resp = make_image_analysis_request("anchore/test_images:oversized_image")
details = resp.body["detail"]
msg = resp.body["message"]
assert resp.code == 400
assert (
msg
== "Image size is too large based on max size specified in the configuration"
)
assert (
details["requested_image_compressed_size"]
> details["max_compressed_image_size_mb"]
)
class TestValidImageReturns200:
def test_valid_image_returns_200(self, make_image_analysis_request):
resp = make_image_analysis_request("alpine:latest")
assert resp.code == 200
<commit_msg>Update failure key in test
Signed-off-by: Zane Burstein <0b53c6e52ca2d19caefaa4da7d81393843bcf79a@anchore.com><commit_after>class TestOversizedImageReturns400:
# Expectation for this test is that the image with tag is greater than the value defined in config
def test_oversized_image_post(self, make_image_analysis_request):
resp = make_image_analysis_request("anchore/test_images:oversized_image")
details = resp.body["detail"]
msg = resp.body["message"]
assert resp.code == 400
assert (
msg
== "Image size is too large based on max size specified in the configuration"
)
assert (
details["requested_image_compressed_size_bytes"]
> details["max_compressed_image_size_mb"]
)
class TestValidImageReturns200:
def test_valid_image_returns_200(self, make_image_analysis_request):
resp = make_image_analysis_request("alpine:latest")
assert resp.code == 200
|
e195ab1f4e83febf7b3b7dff7e1b63b578986167
|
tests.py
|
tests.py
|
from unittest import TestCase
from markdown import Markdown
from mdx_attr_cols import AttrColTreeProcessor
class TestAttrColTreeProcessor(TestCase):
def mk_processor(self, **conf):
md = Markdown()
return AttrColTreeProcessor(md, conf)
def test_config_defaults(self):
p = self.mk_processor()
self.assertEqual(p.columns, 12)
self.assertEqual(p.attr, 'cols')
self.assertEqual(p.tags, set(['section']))
def test_config_overrides(self):
p = self.mk_processor(
columns=16,
attr='columns',
tags=['section', 'div'],
)
self.assertEqual(p.columns, 16)
self.assertEqual(p.attr, 'columns')
self.assertEqual(p.tags, set(['section', 'div']))
|
from unittest import TestCase
import xmltodict
from markdown import Markdown
from markdown.util import etree
from mdx_attr_cols import AttrColTreeProcessor
class XmlTestCaseMixin(object):
def mk_doc(self, s):
return etree.fromstring(
"<div>" + s.strip() + "</div>")
def assertXmlEqual(self, a, b):
self.assertEqual(
xmltodict.parse(etree.tostring(a)),
xmltodict.parse(etree.tostring(b)))
class TestAttrColTreeProcessor(XmlTestCaseMixin, TestCase):
def mk_processor(self, **conf):
md = Markdown()
return AttrColTreeProcessor(md, conf)
def test_config_defaults(self):
p = self.mk_processor()
self.assertEqual(p.columns, 12)
self.assertEqual(p.attr, 'cols')
self.assertEqual(p.tags, set(['section']))
def test_config_overrides(self):
p = self.mk_processor(
columns=16,
attr='columns',
tags=['section', 'div'],
)
self.assertEqual(p.columns, 16)
self.assertEqual(p.attr, 'columns')
self.assertEqual(p.tags, set(['section', 'div']))
def test_simple_rows(self):
root = self.mk_doc("""
<section cols='4'>Foo</section>
<section cols='6'>Bar</section>
<section cols='2'>Beep</section>
""")
p = self.mk_processor()
new_root = p.run(root)
self.assertXmlEqual(new_root, self.mk_doc("""
<div class="row"><div class="col-md-4"><section>Foo</section>
</div><div class="col-md-6"><section>Bar</section>
</div><div class="col-md-2"><section>Beep</section>
</div></div>
"""))
|
Check handling of simple rows.
|
Check handling of simple rows.
|
Python
|
isc
|
CTPUG/mdx_attr_cols
|
from unittest import TestCase
from markdown import Markdown
from mdx_attr_cols import AttrColTreeProcessor
class TestAttrColTreeProcessor(TestCase):
def mk_processor(self, **conf):
md = Markdown()
return AttrColTreeProcessor(md, conf)
def test_config_defaults(self):
p = self.mk_processor()
self.assertEqual(p.columns, 12)
self.assertEqual(p.attr, 'cols')
self.assertEqual(p.tags, set(['section']))
def test_config_overrides(self):
p = self.mk_processor(
columns=16,
attr='columns',
tags=['section', 'div'],
)
self.assertEqual(p.columns, 16)
self.assertEqual(p.attr, 'columns')
self.assertEqual(p.tags, set(['section', 'div']))
Check handling of simple rows.
|
from unittest import TestCase
import xmltodict
from markdown import Markdown
from markdown.util import etree
from mdx_attr_cols import AttrColTreeProcessor
class XmlTestCaseMixin(object):
def mk_doc(self, s):
return etree.fromstring(
"<div>" + s.strip() + "</div>")
def assertXmlEqual(self, a, b):
self.assertEqual(
xmltodict.parse(etree.tostring(a)),
xmltodict.parse(etree.tostring(b)))
class TestAttrColTreeProcessor(XmlTestCaseMixin, TestCase):
def mk_processor(self, **conf):
md = Markdown()
return AttrColTreeProcessor(md, conf)
def test_config_defaults(self):
p = self.mk_processor()
self.assertEqual(p.columns, 12)
self.assertEqual(p.attr, 'cols')
self.assertEqual(p.tags, set(['section']))
def test_config_overrides(self):
p = self.mk_processor(
columns=16,
attr='columns',
tags=['section', 'div'],
)
self.assertEqual(p.columns, 16)
self.assertEqual(p.attr, 'columns')
self.assertEqual(p.tags, set(['section', 'div']))
def test_simple_rows(self):
root = self.mk_doc("""
<section cols='4'>Foo</section>
<section cols='6'>Bar</section>
<section cols='2'>Beep</section>
""")
p = self.mk_processor()
new_root = p.run(root)
self.assertXmlEqual(new_root, self.mk_doc("""
<div class="row"><div class="col-md-4"><section>Foo</section>
</div><div class="col-md-6"><section>Bar</section>
</div><div class="col-md-2"><section>Beep</section>
</div></div>
"""))
|
<commit_before>from unittest import TestCase
from markdown import Markdown
from mdx_attr_cols import AttrColTreeProcessor
class TestAttrColTreeProcessor(TestCase):
def mk_processor(self, **conf):
md = Markdown()
return AttrColTreeProcessor(md, conf)
def test_config_defaults(self):
p = self.mk_processor()
self.assertEqual(p.columns, 12)
self.assertEqual(p.attr, 'cols')
self.assertEqual(p.tags, set(['section']))
def test_config_overrides(self):
p = self.mk_processor(
columns=16,
attr='columns',
tags=['section', 'div'],
)
self.assertEqual(p.columns, 16)
self.assertEqual(p.attr, 'columns')
self.assertEqual(p.tags, set(['section', 'div']))
<commit_msg>Check handling of simple rows.<commit_after>
|
from unittest import TestCase
import xmltodict
from markdown import Markdown
from markdown.util import etree
from mdx_attr_cols import AttrColTreeProcessor
class XmlTestCaseMixin(object):
def mk_doc(self, s):
return etree.fromstring(
"<div>" + s.strip() + "</div>")
def assertXmlEqual(self, a, b):
self.assertEqual(
xmltodict.parse(etree.tostring(a)),
xmltodict.parse(etree.tostring(b)))
class TestAttrColTreeProcessor(XmlTestCaseMixin, TestCase):
def mk_processor(self, **conf):
md = Markdown()
return AttrColTreeProcessor(md, conf)
def test_config_defaults(self):
p = self.mk_processor()
self.assertEqual(p.columns, 12)
self.assertEqual(p.attr, 'cols')
self.assertEqual(p.tags, set(['section']))
def test_config_overrides(self):
p = self.mk_processor(
columns=16,
attr='columns',
tags=['section', 'div'],
)
self.assertEqual(p.columns, 16)
self.assertEqual(p.attr, 'columns')
self.assertEqual(p.tags, set(['section', 'div']))
def test_simple_rows(self):
root = self.mk_doc("""
<section cols='4'>Foo</section>
<section cols='6'>Bar</section>
<section cols='2'>Beep</section>
""")
p = self.mk_processor()
new_root = p.run(root)
self.assertXmlEqual(new_root, self.mk_doc("""
<div class="row"><div class="col-md-4"><section>Foo</section>
</div><div class="col-md-6"><section>Bar</section>
</div><div class="col-md-2"><section>Beep</section>
</div></div>
"""))
|
from unittest import TestCase
from markdown import Markdown
from mdx_attr_cols import AttrColTreeProcessor
class TestAttrColTreeProcessor(TestCase):
def mk_processor(self, **conf):
md = Markdown()
return AttrColTreeProcessor(md, conf)
def test_config_defaults(self):
p = self.mk_processor()
self.assertEqual(p.columns, 12)
self.assertEqual(p.attr, 'cols')
self.assertEqual(p.tags, set(['section']))
def test_config_overrides(self):
p = self.mk_processor(
columns=16,
attr='columns',
tags=['section', 'div'],
)
self.assertEqual(p.columns, 16)
self.assertEqual(p.attr, 'columns')
self.assertEqual(p.tags, set(['section', 'div']))
Check handling of simple rows.from unittest import TestCase
import xmltodict
from markdown import Markdown
from markdown.util import etree
from mdx_attr_cols import AttrColTreeProcessor
class XmlTestCaseMixin(object):
def mk_doc(self, s):
return etree.fromstring(
"<div>" + s.strip() + "</div>")
def assertXmlEqual(self, a, b):
self.assertEqual(
xmltodict.parse(etree.tostring(a)),
xmltodict.parse(etree.tostring(b)))
class TestAttrColTreeProcessor(XmlTestCaseMixin, TestCase):
def mk_processor(self, **conf):
md = Markdown()
return AttrColTreeProcessor(md, conf)
def test_config_defaults(self):
p = self.mk_processor()
self.assertEqual(p.columns, 12)
self.assertEqual(p.attr, 'cols')
self.assertEqual(p.tags, set(['section']))
def test_config_overrides(self):
p = self.mk_processor(
columns=16,
attr='columns',
tags=['section', 'div'],
)
self.assertEqual(p.columns, 16)
self.assertEqual(p.attr, 'columns')
self.assertEqual(p.tags, set(['section', 'div']))
def test_simple_rows(self):
root = self.mk_doc("""
<section cols='4'>Foo</section>
<section cols='6'>Bar</section>
<section cols='2'>Beep</section>
""")
p = self.mk_processor()
new_root = p.run(root)
self.assertXmlEqual(new_root, self.mk_doc("""
<div class="row"><div class="col-md-4"><section>Foo</section>
</div><div class="col-md-6"><section>Bar</section>
</div><div class="col-md-2"><section>Beep</section>
</div></div>
"""))
|
<commit_before>from unittest import TestCase
from markdown import Markdown
from mdx_attr_cols import AttrColTreeProcessor
class TestAttrColTreeProcessor(TestCase):
def mk_processor(self, **conf):
md = Markdown()
return AttrColTreeProcessor(md, conf)
def test_config_defaults(self):
p = self.mk_processor()
self.assertEqual(p.columns, 12)
self.assertEqual(p.attr, 'cols')
self.assertEqual(p.tags, set(['section']))
def test_config_overrides(self):
p = self.mk_processor(
columns=16,
attr='columns',
tags=['section', 'div'],
)
self.assertEqual(p.columns, 16)
self.assertEqual(p.attr, 'columns')
self.assertEqual(p.tags, set(['section', 'div']))
<commit_msg>Check handling of simple rows.<commit_after>from unittest import TestCase
import xmltodict
from markdown import Markdown
from markdown.util import etree
from mdx_attr_cols import AttrColTreeProcessor
class XmlTestCaseMixin(object):
def mk_doc(self, s):
return etree.fromstring(
"<div>" + s.strip() + "</div>")
def assertXmlEqual(self, a, b):
self.assertEqual(
xmltodict.parse(etree.tostring(a)),
xmltodict.parse(etree.tostring(b)))
class TestAttrColTreeProcessor(XmlTestCaseMixin, TestCase):
def mk_processor(self, **conf):
md = Markdown()
return AttrColTreeProcessor(md, conf)
def test_config_defaults(self):
p = self.mk_processor()
self.assertEqual(p.columns, 12)
self.assertEqual(p.attr, 'cols')
self.assertEqual(p.tags, set(['section']))
def test_config_overrides(self):
p = self.mk_processor(
columns=16,
attr='columns',
tags=['section', 'div'],
)
self.assertEqual(p.columns, 16)
self.assertEqual(p.attr, 'columns')
self.assertEqual(p.tags, set(['section', 'div']))
def test_simple_rows(self):
root = self.mk_doc("""
<section cols='4'>Foo</section>
<section cols='6'>Bar</section>
<section cols='2'>Beep</section>
""")
p = self.mk_processor()
new_root = p.run(root)
self.assertXmlEqual(new_root, self.mk_doc("""
<div class="row"><div class="col-md-4"><section>Foo</section>
</div><div class="col-md-6"><section>Bar</section>
</div><div class="col-md-2"><section>Beep</section>
</div></div>
"""))
|
ab73b2132825e9415ff24306a9d89da10294d79e
|
icekit/utils/management/base.py
|
icekit/utils/management/base.py
|
import time
from django import db
from django.core.management.base import BaseCommand
from optparse import make_option
class CronBaseCommand(BaseCommand):
help = ('Long running process (indefinitely) that executes task on a '
'specified interval (default is 1 min). The intent for the '
'management command is to be used with `django-supervisor` or '
'similar.')
option_list = BaseCommand.option_list + (
make_option(
'-i',
'--interval',
dest='interval',
type='int',
help='Number of minutes to wait before executing task.',
default=1
),
)
def handle(self, *args, **options):
while True:
self.task(*args, **options)
self.cleanup()
self.stdout.write('Sleeping for %s min.' % options['interval'])
time.sleep(60 * options['interval'])
def cleanup(self):
"""
Performs clean-up after task is completed before it is executed again
in the next internal.
"""
# Closes connections to all databases to avoid the long running process
# from holding connections indefinitely.
for alias in db.connections.databases:
self.stdout.write('Closing database connection: %s' % alias)
db.connections[alias].close()
def task(self, *args, **options):
"""
The actual logic of the task to execute. Subclasses must implement
this method.
"""
raise NotImplementedError(
'subclasses of CronBaseCommand must provide a task() method')
|
import logging
import time
from django import db
from django.core.management.base import BaseCommand
from optparse import make_option
logger = logging.getLogger(__name__)
class CronBaseCommand(BaseCommand):
help = ('Long running process (indefinitely) that executes task on a '
'specified interval (default is 1 min). The intent for the '
'management command is to be used with `django-supervisor` or '
'similar.')
option_list = BaseCommand.option_list + (
make_option(
'-i',
'--interval',
dest='interval',
type='int',
help='Number of minutes to wait before executing task.',
default=1
),
)
def handle(self, *args, **options):
while True:
self.task(*args, **options)
self.cleanup()
logger.info('Sleeping for %s min.', options['interval'])
time.sleep(60 * options['interval'])
def cleanup(self):
"""
Performs clean-up after task is completed before it is executed again
in the next internal.
"""
# Closes connections to all databases to avoid the long running process
# from holding connections indefinitely.
for alias in db.connections.databases:
logger.info('Closing database connection: %s', alias)
db.connections[alias].close()
def task(self, *args, **options):
"""
The actual logic of the task to execute. Subclasses must implement
this method.
"""
raise NotImplementedError(
'subclasses of CronBaseCommand must provide a task() method')
|
Use `logging` instead of printing to stdout by default.
|
Use `logging` instead of printing to stdout by default.
|
Python
|
mit
|
ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit
|
import time
from django import db
from django.core.management.base import BaseCommand
from optparse import make_option
class CronBaseCommand(BaseCommand):
help = ('Long running process (indefinitely) that executes task on a '
'specified interval (default is 1 min). The intent for the '
'management command is to be used with `django-supervisor` or '
'similar.')
option_list = BaseCommand.option_list + (
make_option(
'-i',
'--interval',
dest='interval',
type='int',
help='Number of minutes to wait before executing task.',
default=1
),
)
def handle(self, *args, **options):
while True:
self.task(*args, **options)
self.cleanup()
self.stdout.write('Sleeping for %s min.' % options['interval'])
time.sleep(60 * options['interval'])
def cleanup(self):
"""
Performs clean-up after task is completed before it is executed again
in the next internal.
"""
# Closes connections to all databases to avoid the long running process
# from holding connections indefinitely.
for alias in db.connections.databases:
self.stdout.write('Closing database connection: %s' % alias)
db.connections[alias].close()
def task(self, *args, **options):
"""
The actual logic of the task to execute. Subclasses must implement
this method.
"""
raise NotImplementedError(
'subclasses of CronBaseCommand must provide a task() method')
Use `logging` instead of printing to stdout by default.
|
import logging
import time
from django import db
from django.core.management.base import BaseCommand
from optparse import make_option
logger = logging.getLogger(__name__)
class CronBaseCommand(BaseCommand):
help = ('Long running process (indefinitely) that executes task on a '
'specified interval (default is 1 min). The intent for the '
'management command is to be used with `django-supervisor` or '
'similar.')
option_list = BaseCommand.option_list + (
make_option(
'-i',
'--interval',
dest='interval',
type='int',
help='Number of minutes to wait before executing task.',
default=1
),
)
def handle(self, *args, **options):
while True:
self.task(*args, **options)
self.cleanup()
logger.info('Sleeping for %s min.', options['interval'])
time.sleep(60 * options['interval'])
def cleanup(self):
"""
Performs clean-up after task is completed before it is executed again
in the next internal.
"""
# Closes connections to all databases to avoid the long running process
# from holding connections indefinitely.
for alias in db.connections.databases:
logger.info('Closing database connection: %s', alias)
db.connections[alias].close()
def task(self, *args, **options):
"""
The actual logic of the task to execute. Subclasses must implement
this method.
"""
raise NotImplementedError(
'subclasses of CronBaseCommand must provide a task() method')
|
<commit_before>import time
from django import db
from django.core.management.base import BaseCommand
from optparse import make_option
class CronBaseCommand(BaseCommand):
help = ('Long running process (indefinitely) that executes task on a '
'specified interval (default is 1 min). The intent for the '
'management command is to be used with `django-supervisor` or '
'similar.')
option_list = BaseCommand.option_list + (
make_option(
'-i',
'--interval',
dest='interval',
type='int',
help='Number of minutes to wait before executing task.',
default=1
),
)
def handle(self, *args, **options):
while True:
self.task(*args, **options)
self.cleanup()
self.stdout.write('Sleeping for %s min.' % options['interval'])
time.sleep(60 * options['interval'])
def cleanup(self):
"""
Performs clean-up after task is completed before it is executed again
in the next internal.
"""
# Closes connections to all databases to avoid the long running process
# from holding connections indefinitely.
for alias in db.connections.databases:
self.stdout.write('Closing database connection: %s' % alias)
db.connections[alias].close()
def task(self, *args, **options):
"""
The actual logic of the task to execute. Subclasses must implement
this method.
"""
raise NotImplementedError(
'subclasses of CronBaseCommand must provide a task() method')
<commit_msg>Use `logging` instead of printing to stdout by default.<commit_after>
|
import logging
import time
from django import db
from django.core.management.base import BaseCommand
from optparse import make_option
logger = logging.getLogger(__name__)
class CronBaseCommand(BaseCommand):
help = ('Long running process (indefinitely) that executes task on a '
'specified interval (default is 1 min). The intent for the '
'management command is to be used with `django-supervisor` or '
'similar.')
option_list = BaseCommand.option_list + (
make_option(
'-i',
'--interval',
dest='interval',
type='int',
help='Number of minutes to wait before executing task.',
default=1
),
)
def handle(self, *args, **options):
while True:
self.task(*args, **options)
self.cleanup()
logger.info('Sleeping for %s min.', options['interval'])
time.sleep(60 * options['interval'])
def cleanup(self):
"""
Performs clean-up after task is completed before it is executed again
in the next internal.
"""
# Closes connections to all databases to avoid the long running process
# from holding connections indefinitely.
for alias in db.connections.databases:
logger.info('Closing database connection: %s', alias)
db.connections[alias].close()
def task(self, *args, **options):
"""
The actual logic of the task to execute. Subclasses must implement
this method.
"""
raise NotImplementedError(
'subclasses of CronBaseCommand must provide a task() method')
|
import time
from django import db
from django.core.management.base import BaseCommand
from optparse import make_option
class CronBaseCommand(BaseCommand):
help = ('Long running process (indefinitely) that executes task on a '
'specified interval (default is 1 min). The intent for the '
'management command is to be used with `django-supervisor` or '
'similar.')
option_list = BaseCommand.option_list + (
make_option(
'-i',
'--interval',
dest='interval',
type='int',
help='Number of minutes to wait before executing task.',
default=1
),
)
def handle(self, *args, **options):
while True:
self.task(*args, **options)
self.cleanup()
self.stdout.write('Sleeping for %s min.' % options['interval'])
time.sleep(60 * options['interval'])
def cleanup(self):
"""
Performs clean-up after task is completed before it is executed again
in the next internal.
"""
# Closes connections to all databases to avoid the long running process
# from holding connections indefinitely.
for alias in db.connections.databases:
self.stdout.write('Closing database connection: %s' % alias)
db.connections[alias].close()
def task(self, *args, **options):
"""
The actual logic of the task to execute. Subclasses must implement
this method.
"""
raise NotImplementedError(
'subclasses of CronBaseCommand must provide a task() method')
Use `logging` instead of printing to stdout by default.import logging
import time
from django import db
from django.core.management.base import BaseCommand
from optparse import make_option
logger = logging.getLogger(__name__)
class CronBaseCommand(BaseCommand):
help = ('Long running process (indefinitely) that executes task on a '
'specified interval (default is 1 min). The intent for the '
'management command is to be used with `django-supervisor` or '
'similar.')
option_list = BaseCommand.option_list + (
make_option(
'-i',
'--interval',
dest='interval',
type='int',
help='Number of minutes to wait before executing task.',
default=1
),
)
def handle(self, *args, **options):
while True:
self.task(*args, **options)
self.cleanup()
logger.info('Sleeping for %s min.', options['interval'])
time.sleep(60 * options['interval'])
def cleanup(self):
"""
Performs clean-up after task is completed before it is executed again
in the next internal.
"""
# Closes connections to all databases to avoid the long running process
# from holding connections indefinitely.
for alias in db.connections.databases:
logger.info('Closing database connection: %s', alias)
db.connections[alias].close()
def task(self, *args, **options):
"""
The actual logic of the task to execute. Subclasses must implement
this method.
"""
raise NotImplementedError(
'subclasses of CronBaseCommand must provide a task() method')
|
<commit_before>import time
from django import db
from django.core.management.base import BaseCommand
from optparse import make_option
class CronBaseCommand(BaseCommand):
help = ('Long running process (indefinitely) that executes task on a '
'specified interval (default is 1 min). The intent for the '
'management command is to be used with `django-supervisor` or '
'similar.')
option_list = BaseCommand.option_list + (
make_option(
'-i',
'--interval',
dest='interval',
type='int',
help='Number of minutes to wait before executing task.',
default=1
),
)
def handle(self, *args, **options):
while True:
self.task(*args, **options)
self.cleanup()
self.stdout.write('Sleeping for %s min.' % options['interval'])
time.sleep(60 * options['interval'])
def cleanup(self):
"""
Performs clean-up after task is completed before it is executed again
in the next internal.
"""
# Closes connections to all databases to avoid the long running process
# from holding connections indefinitely.
for alias in db.connections.databases:
self.stdout.write('Closing database connection: %s' % alias)
db.connections[alias].close()
def task(self, *args, **options):
"""
The actual logic of the task to execute. Subclasses must implement
this method.
"""
raise NotImplementedError(
'subclasses of CronBaseCommand must provide a task() method')
<commit_msg>Use `logging` instead of printing to stdout by default.<commit_after>import logging
import time
from django import db
from django.core.management.base import BaseCommand
from optparse import make_option
logger = logging.getLogger(__name__)
class CronBaseCommand(BaseCommand):
help = ('Long running process (indefinitely) that executes task on a '
'specified interval (default is 1 min). The intent for the '
'management command is to be used with `django-supervisor` or '
'similar.')
option_list = BaseCommand.option_list + (
make_option(
'-i',
'--interval',
dest='interval',
type='int',
help='Number of minutes to wait before executing task.',
default=1
),
)
def handle(self, *args, **options):
while True:
self.task(*args, **options)
self.cleanup()
logger.info('Sleeping for %s min.', options['interval'])
time.sleep(60 * options['interval'])
def cleanup(self):
"""
Performs clean-up after task is completed before it is executed again
in the next internal.
"""
# Closes connections to all databases to avoid the long running process
# from holding connections indefinitely.
for alias in db.connections.databases:
logger.info('Closing database connection: %s', alias)
db.connections[alias].close()
def task(self, *args, **options):
"""
The actual logic of the task to execute. Subclasses must implement
this method.
"""
raise NotImplementedError(
'subclasses of CronBaseCommand must provide a task() method')
|
4a3f56f895b3ed1c4f0f7ae7b012f9048f939d7f
|
runtests.py
|
runtests.py
|
#!/usr/bin/python
import unittest
import doctest
import sys
from optparse import OptionParser
from firmant.utils import get_module
# Import this now to avoid it throwing errors.
import pytz
if __name__ == '__main__':
suite = unittest.TestSuite()
modules = ['firmant.du',
'firmant.extensions',
'firmant.feeds',
'firmant.i18n',
'firmant.parser',
'firmant.parsers',
'firmant.parsers.posts',
'firmant.tags',
'firmant.utils',
'firmant.writers',
'firmant.writers.j2'
]
for module in modules:
mod = get_module(module)
args = {}
for attr in ['module_relative', 'package', 'setUp', 'tearDown', 'globs',
'optionflags', 'parser', 'encoding']:
if hasattr(mod, '_' + attr):
args[attr] = getattr(mod, '_' + attr)
suite.addTest(doctest.DocTestSuite(mod, **args))
results = unittest.TextTestRunner(verbosity=2).run(suite)
if not results.wasSuccessful():
sys.exit(1)
|
#!/usr/bin/python
import unittest
import doctest
import sys
from optparse import OptionParser
from firmant.utils import get_module
# Import this now to avoid it throwing errors.
import pytz
if __name__ == '__main__':
suite = unittest.TestSuite()
modules = ['firmant.du',
'firmant.extensions',
'firmant.feeds',
'firmant.i18n',
#'firmant.parser',
'firmant.parsers',
'firmant.parsers.posts',
'firmant.tags',
'firmant.utils',
#'firmant.writers',
#'firmant.writers.j2'
]
for module in modules:
mod = get_module(module)
args = {}
for attr in ['module_relative', 'package', 'setUp', 'tearDown', 'globs',
'optionflags', 'parser', 'encoding']:
if hasattr(mod, '_' + attr):
args[attr] = getattr(mod, '_' + attr)
suite.addTest(doctest.DocTestSuite(mod, **args))
results = unittest.TextTestRunner(verbosity=2).run(suite)
if not results.wasSuccessful():
sys.exit(1)
|
Comment out modules with broken tests.
|
Comment out modules with broken tests.
|
Python
|
bsd-3-clause
|
rescrv/firmant
|
#!/usr/bin/python
import unittest
import doctest
import sys
from optparse import OptionParser
from firmant.utils import get_module
# Import this now to avoid it throwing errors.
import pytz
if __name__ == '__main__':
suite = unittest.TestSuite()
modules = ['firmant.du',
'firmant.extensions',
'firmant.feeds',
'firmant.i18n',
'firmant.parser',
'firmant.parsers',
'firmant.parsers.posts',
'firmant.tags',
'firmant.utils',
'firmant.writers',
'firmant.writers.j2'
]
for module in modules:
mod = get_module(module)
args = {}
for attr in ['module_relative', 'package', 'setUp', 'tearDown', 'globs',
'optionflags', 'parser', 'encoding']:
if hasattr(mod, '_' + attr):
args[attr] = getattr(mod, '_' + attr)
suite.addTest(doctest.DocTestSuite(mod, **args))
results = unittest.TextTestRunner(verbosity=2).run(suite)
if not results.wasSuccessful():
sys.exit(1)
Comment out modules with broken tests.
|
#!/usr/bin/python
import unittest
import doctest
import sys
from optparse import OptionParser
from firmant.utils import get_module
# Import this now to avoid it throwing errors.
import pytz
if __name__ == '__main__':
suite = unittest.TestSuite()
modules = ['firmant.du',
'firmant.extensions',
'firmant.feeds',
'firmant.i18n',
#'firmant.parser',
'firmant.parsers',
'firmant.parsers.posts',
'firmant.tags',
'firmant.utils',
#'firmant.writers',
#'firmant.writers.j2'
]
for module in modules:
mod = get_module(module)
args = {}
for attr in ['module_relative', 'package', 'setUp', 'tearDown', 'globs',
'optionflags', 'parser', 'encoding']:
if hasattr(mod, '_' + attr):
args[attr] = getattr(mod, '_' + attr)
suite.addTest(doctest.DocTestSuite(mod, **args))
results = unittest.TextTestRunner(verbosity=2).run(suite)
if not results.wasSuccessful():
sys.exit(1)
|
<commit_before>#!/usr/bin/python
import unittest
import doctest
import sys
from optparse import OptionParser
from firmant.utils import get_module
# Import this now to avoid it throwing errors.
import pytz
if __name__ == '__main__':
suite = unittest.TestSuite()
modules = ['firmant.du',
'firmant.extensions',
'firmant.feeds',
'firmant.i18n',
'firmant.parser',
'firmant.parsers',
'firmant.parsers.posts',
'firmant.tags',
'firmant.utils',
'firmant.writers',
'firmant.writers.j2'
]
for module in modules:
mod = get_module(module)
args = {}
for attr in ['module_relative', 'package', 'setUp', 'tearDown', 'globs',
'optionflags', 'parser', 'encoding']:
if hasattr(mod, '_' + attr):
args[attr] = getattr(mod, '_' + attr)
suite.addTest(doctest.DocTestSuite(mod, **args))
results = unittest.TextTestRunner(verbosity=2).run(suite)
if not results.wasSuccessful():
sys.exit(1)
<commit_msg>Comment out modules with broken tests.<commit_after>
|
#!/usr/bin/python
import unittest
import doctest
import sys
from optparse import OptionParser
from firmant.utils import get_module
# Import this now to avoid it throwing errors.
import pytz
if __name__ == '__main__':
suite = unittest.TestSuite()
modules = ['firmant.du',
'firmant.extensions',
'firmant.feeds',
'firmant.i18n',
#'firmant.parser',
'firmant.parsers',
'firmant.parsers.posts',
'firmant.tags',
'firmant.utils',
#'firmant.writers',
#'firmant.writers.j2'
]
for module in modules:
mod = get_module(module)
args = {}
for attr in ['module_relative', 'package', 'setUp', 'tearDown', 'globs',
'optionflags', 'parser', 'encoding']:
if hasattr(mod, '_' + attr):
args[attr] = getattr(mod, '_' + attr)
suite.addTest(doctest.DocTestSuite(mod, **args))
results = unittest.TextTestRunner(verbosity=2).run(suite)
if not results.wasSuccessful():
sys.exit(1)
|
#!/usr/bin/python
import unittest
import doctest
import sys
from optparse import OptionParser
from firmant.utils import get_module
# Import this now to avoid it throwing errors.
import pytz
if __name__ == '__main__':
suite = unittest.TestSuite()
modules = ['firmant.du',
'firmant.extensions',
'firmant.feeds',
'firmant.i18n',
'firmant.parser',
'firmant.parsers',
'firmant.parsers.posts',
'firmant.tags',
'firmant.utils',
'firmant.writers',
'firmant.writers.j2'
]
for module in modules:
mod = get_module(module)
args = {}
for attr in ['module_relative', 'package', 'setUp', 'tearDown', 'globs',
'optionflags', 'parser', 'encoding']:
if hasattr(mod, '_' + attr):
args[attr] = getattr(mod, '_' + attr)
suite.addTest(doctest.DocTestSuite(mod, **args))
results = unittest.TextTestRunner(verbosity=2).run(suite)
if not results.wasSuccessful():
sys.exit(1)
Comment out modules with broken tests.#!/usr/bin/python
import unittest
import doctest
import sys
from optparse import OptionParser
from firmant.utils import get_module
# Import this now to avoid it throwing errors.
import pytz
if __name__ == '__main__':
suite = unittest.TestSuite()
modules = ['firmant.du',
'firmant.extensions',
'firmant.feeds',
'firmant.i18n',
#'firmant.parser',
'firmant.parsers',
'firmant.parsers.posts',
'firmant.tags',
'firmant.utils',
#'firmant.writers',
#'firmant.writers.j2'
]
for module in modules:
mod = get_module(module)
args = {}
for attr in ['module_relative', 'package', 'setUp', 'tearDown', 'globs',
'optionflags', 'parser', 'encoding']:
if hasattr(mod, '_' + attr):
args[attr] = getattr(mod, '_' + attr)
suite.addTest(doctest.DocTestSuite(mod, **args))
results = unittest.TextTestRunner(verbosity=2).run(suite)
if not results.wasSuccessful():
sys.exit(1)
|
<commit_before>#!/usr/bin/python
import unittest
import doctest
import sys
from optparse import OptionParser
from firmant.utils import get_module
# Import this now to avoid it throwing errors.
import pytz
if __name__ == '__main__':
suite = unittest.TestSuite()
modules = ['firmant.du',
'firmant.extensions',
'firmant.feeds',
'firmant.i18n',
'firmant.parser',
'firmant.parsers',
'firmant.parsers.posts',
'firmant.tags',
'firmant.utils',
'firmant.writers',
'firmant.writers.j2'
]
for module in modules:
mod = get_module(module)
args = {}
for attr in ['module_relative', 'package', 'setUp', 'tearDown', 'globs',
'optionflags', 'parser', 'encoding']:
if hasattr(mod, '_' + attr):
args[attr] = getattr(mod, '_' + attr)
suite.addTest(doctest.DocTestSuite(mod, **args))
results = unittest.TextTestRunner(verbosity=2).run(suite)
if not results.wasSuccessful():
sys.exit(1)
<commit_msg>Comment out modules with broken tests.<commit_after>#!/usr/bin/python
import unittest
import doctest
import sys
from optparse import OptionParser
from firmant.utils import get_module
# Import this now to avoid it throwing errors.
import pytz
if __name__ == '__main__':
suite = unittest.TestSuite()
modules = ['firmant.du',
'firmant.extensions',
'firmant.feeds',
'firmant.i18n',
#'firmant.parser',
'firmant.parsers',
'firmant.parsers.posts',
'firmant.tags',
'firmant.utils',
#'firmant.writers',
#'firmant.writers.j2'
]
for module in modules:
mod = get_module(module)
args = {}
for attr in ['module_relative', 'package', 'setUp', 'tearDown', 'globs',
'optionflags', 'parser', 'encoding']:
if hasattr(mod, '_' + attr):
args[attr] = getattr(mod, '_' + attr)
suite.addTest(doctest.DocTestSuite(mod, **args))
results = unittest.TextTestRunner(verbosity=2).run(suite)
if not results.wasSuccessful():
sys.exit(1)
|
4625a1ed4115b85ce7d96a0d0ba486e589e9fe6c
|
runtests.py
|
runtests.py
|
#!/usr/bin/env python
import sys
from optparse import OptionParser
from os.path import abspath, dirname
from django.test.simple import DjangoTestSuiteRunner
def runtests(*test_args, **kwargs):
parent = dirname(abspath(__file__))
sys.path.insert(0, parent)
test_runner = DjangoTestSuiteRunner(
verbosity=kwargs.get('verbosity', 1),
interactive=kwargs.get('interactive', False),
failfast=kwargs.get('failfast')
)
failures = test_runner.run_tests(test_args)
sys.exit(failures)
if __name__ == '__main__':
parser = OptionParser()
parser.add_option('--failfast', action='store_true', default=False, dest='failfast')
(options, args) = parser.parse_args()
runtests(failfast=options.failfast, *args)
|
#!/usr/bin/env python
import os
import sys
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
from django.test.utils import get_runner
from django.conf import settings
import django
if django.VERSION >= (1, 7):
django.setup()
def runtests():
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True)
failures = test_runner.run_tests(['tests'])
sys.exit(bool(failures))
if __name__ == '__main__':
runtests()
|
Make test runner only run basis tests
|
Make test runner only run basis tests
and not dependecies tests
|
Python
|
mit
|
frecar/django-basis
|
#!/usr/bin/env python
import sys
from optparse import OptionParser
from os.path import abspath, dirname
from django.test.simple import DjangoTestSuiteRunner
def runtests(*test_args, **kwargs):
parent = dirname(abspath(__file__))
sys.path.insert(0, parent)
test_runner = DjangoTestSuiteRunner(
verbosity=kwargs.get('verbosity', 1),
interactive=kwargs.get('interactive', False),
failfast=kwargs.get('failfast')
)
failures = test_runner.run_tests(test_args)
sys.exit(failures)
if __name__ == '__main__':
parser = OptionParser()
parser.add_option('--failfast', action='store_true', default=False, dest='failfast')
(options, args) = parser.parse_args()
runtests(failfast=options.failfast, *args)Make test runner only run basis tests
and not dependecies tests
|
#!/usr/bin/env python
import os
import sys
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
from django.test.utils import get_runner
from django.conf import settings
import django
if django.VERSION >= (1, 7):
django.setup()
def runtests():
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True)
failures = test_runner.run_tests(['tests'])
sys.exit(bool(failures))
if __name__ == '__main__':
runtests()
|
<commit_before>#!/usr/bin/env python
import sys
from optparse import OptionParser
from os.path import abspath, dirname
from django.test.simple import DjangoTestSuiteRunner
def runtests(*test_args, **kwargs):
parent = dirname(abspath(__file__))
sys.path.insert(0, parent)
test_runner = DjangoTestSuiteRunner(
verbosity=kwargs.get('verbosity', 1),
interactive=kwargs.get('interactive', False),
failfast=kwargs.get('failfast')
)
failures = test_runner.run_tests(test_args)
sys.exit(failures)
if __name__ == '__main__':
parser = OptionParser()
parser.add_option('--failfast', action='store_true', default=False, dest='failfast')
(options, args) = parser.parse_args()
runtests(failfast=options.failfast, *args)<commit_msg>Make test runner only run basis tests
and not dependecies tests<commit_after>
|
#!/usr/bin/env python
import os
import sys
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
from django.test.utils import get_runner
from django.conf import settings
import django
if django.VERSION >= (1, 7):
django.setup()
def runtests():
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True)
failures = test_runner.run_tests(['tests'])
sys.exit(bool(failures))
if __name__ == '__main__':
runtests()
|
#!/usr/bin/env python
import sys
from optparse import OptionParser
from os.path import abspath, dirname
from django.test.simple import DjangoTestSuiteRunner
def runtests(*test_args, **kwargs):
parent = dirname(abspath(__file__))
sys.path.insert(0, parent)
test_runner = DjangoTestSuiteRunner(
verbosity=kwargs.get('verbosity', 1),
interactive=kwargs.get('interactive', False),
failfast=kwargs.get('failfast')
)
failures = test_runner.run_tests(test_args)
sys.exit(failures)
if __name__ == '__main__':
parser = OptionParser()
parser.add_option('--failfast', action='store_true', default=False, dest='failfast')
(options, args) = parser.parse_args()
runtests(failfast=options.failfast, *args)Make test runner only run basis tests
and not dependecies tests#!/usr/bin/env python
import os
import sys
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
from django.test.utils import get_runner
from django.conf import settings
import django
if django.VERSION >= (1, 7):
django.setup()
def runtests():
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True)
failures = test_runner.run_tests(['tests'])
sys.exit(bool(failures))
if __name__ == '__main__':
runtests()
|
<commit_before>#!/usr/bin/env python
import sys
from optparse import OptionParser
from os.path import abspath, dirname
from django.test.simple import DjangoTestSuiteRunner
def runtests(*test_args, **kwargs):
parent = dirname(abspath(__file__))
sys.path.insert(0, parent)
test_runner = DjangoTestSuiteRunner(
verbosity=kwargs.get('verbosity', 1),
interactive=kwargs.get('interactive', False),
failfast=kwargs.get('failfast')
)
failures = test_runner.run_tests(test_args)
sys.exit(failures)
if __name__ == '__main__':
parser = OptionParser()
parser.add_option('--failfast', action='store_true', default=False, dest='failfast')
(options, args) = parser.parse_args()
runtests(failfast=options.failfast, *args)<commit_msg>Make test runner only run basis tests
and not dependecies tests<commit_after>#!/usr/bin/env python
import os
import sys
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
from django.test.utils import get_runner
from django.conf import settings
import django
if django.VERSION >= (1, 7):
django.setup()
def runtests():
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True)
failures = test_runner.run_tests(['tests'])
sys.exit(bool(failures))
if __name__ == '__main__':
runtests()
|
1648cec8667611aa7fec4bff12f873f8e6294f82
|
scripts/bodyconf.py
|
scripts/bodyconf.py
|
#!/usr/bin/python2
# -*- coding: utf-8 -*-
pixval = {
'hair': 10,
'head': 20,
'upper': 30,
'arms': 40,
'lower': 50,
'legs': 60,
'feet': 70
}
groups = [
[10, 20],
[30, 40],
[50, 60],
[70]
]
|
#!/usr/bin/python2
# -*- coding: utf-8 -*-
pixval = {
'hair': 10,
'head': 20,
'upper': 30,
'arms': 40,
'lower': 50,
'legs': 60,
'feet': 70
}
groups = [
[10, 20],
[30, 40],
[50, 60],
[70],
[0,10,20,30,40,50,60,70]
]
|
Add whole image as an input
|
Add whole image as an input
|
Python
|
mit
|
Cysu/Person-Reid,Cysu/Person-Reid,Cysu/Person-Reid,Cysu/Person-Reid,Cysu/Person-Reid
|
#!/usr/bin/python2
# -*- coding: utf-8 -*-
pixval = {
'hair': 10,
'head': 20,
'upper': 30,
'arms': 40,
'lower': 50,
'legs': 60,
'feet': 70
}
groups = [
[10, 20],
[30, 40],
[50, 60],
[70]
]
Add whole image as an input
|
#!/usr/bin/python2
# -*- coding: utf-8 -*-
pixval = {
'hair': 10,
'head': 20,
'upper': 30,
'arms': 40,
'lower': 50,
'legs': 60,
'feet': 70
}
groups = [
[10, 20],
[30, 40],
[50, 60],
[70],
[0,10,20,30,40,50,60,70]
]
|
<commit_before>#!/usr/bin/python2
# -*- coding: utf-8 -*-
pixval = {
'hair': 10,
'head': 20,
'upper': 30,
'arms': 40,
'lower': 50,
'legs': 60,
'feet': 70
}
groups = [
[10, 20],
[30, 40],
[50, 60],
[70]
]
<commit_msg>Add whole image as an input<commit_after>
|
#!/usr/bin/python2
# -*- coding: utf-8 -*-
pixval = {
'hair': 10,
'head': 20,
'upper': 30,
'arms': 40,
'lower': 50,
'legs': 60,
'feet': 70
}
groups = [
[10, 20],
[30, 40],
[50, 60],
[70],
[0,10,20,30,40,50,60,70]
]
|
#!/usr/bin/python2
# -*- coding: utf-8 -*-
pixval = {
'hair': 10,
'head': 20,
'upper': 30,
'arms': 40,
'lower': 50,
'legs': 60,
'feet': 70
}
groups = [
[10, 20],
[30, 40],
[50, 60],
[70]
]
Add whole image as an input#!/usr/bin/python2
# -*- coding: utf-8 -*-
pixval = {
'hair': 10,
'head': 20,
'upper': 30,
'arms': 40,
'lower': 50,
'legs': 60,
'feet': 70
}
groups = [
[10, 20],
[30, 40],
[50, 60],
[70],
[0,10,20,30,40,50,60,70]
]
|
<commit_before>#!/usr/bin/python2
# -*- coding: utf-8 -*-
pixval = {
'hair': 10,
'head': 20,
'upper': 30,
'arms': 40,
'lower': 50,
'legs': 60,
'feet': 70
}
groups = [
[10, 20],
[30, 40],
[50, 60],
[70]
]
<commit_msg>Add whole image as an input<commit_after>#!/usr/bin/python2
# -*- coding: utf-8 -*-
pixval = {
'hair': 10,
'head': 20,
'upper': 30,
'arms': 40,
'lower': 50,
'legs': 60,
'feet': 70
}
groups = [
[10, 20],
[30, 40],
[50, 60],
[70],
[0,10,20,30,40,50,60,70]
]
|
00b9bee02f2b7c399da9cd3488790dd53ed7801e
|
jobsboard/jobs/forms.py
|
jobsboard/jobs/forms.py
|
from django import forms
from .models import Job
class JobForm(forms.ModelForm):
# class Meta:
# model = Job
# fields = ('title', 'creator',)
class Meta:
model = Job
exclude = ['created', 'updated',]
|
from django import forms
from .models import Job
class JobForm(forms.ModelForm):
# class Meta:
# model = Job
# fields = ('title', 'creator',)
class Meta:
model = Job
exclude = ['created', 'updated', 'expiry']
|
Hide expiry date field from job post create page
|
Hide expiry date field from job post create page
|
Python
|
mit
|
pythonph/jobs-board,pythonph/jobs-board,pythonph/jobs-board
|
from django import forms
from .models import Job
class JobForm(forms.ModelForm):
# class Meta:
# model = Job
# fields = ('title', 'creator',)
class Meta:
model = Job
exclude = ['created', 'updated',]
Hide expiry date field from job post create page
|
from django import forms
from .models import Job
class JobForm(forms.ModelForm):
# class Meta:
# model = Job
# fields = ('title', 'creator',)
class Meta:
model = Job
exclude = ['created', 'updated', 'expiry']
|
<commit_before>from django import forms
from .models import Job
class JobForm(forms.ModelForm):
# class Meta:
# model = Job
# fields = ('title', 'creator',)
class Meta:
model = Job
exclude = ['created', 'updated',]
<commit_msg>Hide expiry date field from job post create page<commit_after>
|
from django import forms
from .models import Job
class JobForm(forms.ModelForm):
# class Meta:
# model = Job
# fields = ('title', 'creator',)
class Meta:
model = Job
exclude = ['created', 'updated', 'expiry']
|
from django import forms
from .models import Job
class JobForm(forms.ModelForm):
# class Meta:
# model = Job
# fields = ('title', 'creator',)
class Meta:
model = Job
exclude = ['created', 'updated',]
Hide expiry date field from job post create pagefrom django import forms
from .models import Job
class JobForm(forms.ModelForm):
# class Meta:
# model = Job
# fields = ('title', 'creator',)
class Meta:
model = Job
exclude = ['created', 'updated', 'expiry']
|
<commit_before>from django import forms
from .models import Job
class JobForm(forms.ModelForm):
# class Meta:
# model = Job
# fields = ('title', 'creator',)
class Meta:
model = Job
exclude = ['created', 'updated',]
<commit_msg>Hide expiry date field from job post create page<commit_after>from django import forms
from .models import Job
class JobForm(forms.ModelForm):
# class Meta:
# model = Job
# fields = ('title', 'creator',)
class Meta:
model = Job
exclude = ['created', 'updated', 'expiry']
|
c87a71035782da3a9f9b26c9fb6a30ce42855913
|
board.py
|
board.py
|
import numpy
"""
Board represents a four in a row game board.
Author: Isaac Arvestad
"""
class Board:
"""
Initializes the game with a certain number of rows
and columns.
"""
def __init__(self, rows, columns):
self.rows = rows
self.columns = columns
self.boardMatrix = numpy.zeros((rows, columns))
"""
Attempts to add a piece to a certain column. If the column is
full the move is illegal and false is returned, otherwise true
is returned.
"""
def addPiece(self, column, value):
"Check if column is full."
if self.boardMatrix.item(0,column) != 0:
return False
"Place piece."
for y in range(self.rows):
if y == self.rows - 1: # Reached bottom
self.boardMatrix.itemset((y, column), value)
break
elif self.boardMatrix.item(y + 1, column) == 0: # Next row is also empty
continue
else: # Next row is not empty
self.boardMatrix.itemset((y, column), value)
break
return True
|
import numpy
"""
Board represents a four in a row game board.
Author: Isaac Arvestad
"""
class Board:
"""
Initializes the game with a certain number of rows
and columns.
"""
def __init__(self, rows, columns):
self.rows = rows
self.columns = columns
self.matrix = numpy.zeros((rows, columns))
"""
Attempts to add a piece to a certain column. If the column is
full the move is illegal and false is returned, otherwise true
is returned.
"""
def add_piece(self, column, value):
"Check if column is full."
if self.matrix.item(0,column) != 0:
return False
"Place piece."
for y in range(self.rows):
if y == self.rows - 1: # Reached bottom
self.matrix.itemset((y, column), value)
break
elif self.matrix.item(y + 1, column) == 0: # Next row is also empty
continue
else: # Next row is not empty
self.matrix.itemset((y, column), value)
break
return True
|
Change from camelcase to underscores.
|
Change from camelcase to underscores.
|
Python
|
mit
|
isaacarvestad/four-in-a-row
|
import numpy
"""
Board represents a four in a row game board.
Author: Isaac Arvestad
"""
class Board:
"""
Initializes the game with a certain number of rows
and columns.
"""
def __init__(self, rows, columns):
self.rows = rows
self.columns = columns
self.boardMatrix = numpy.zeros((rows, columns))
"""
Attempts to add a piece to a certain column. If the column is
full the move is illegal and false is returned, otherwise true
is returned.
"""
def addPiece(self, column, value):
"Check if column is full."
if self.boardMatrix.item(0,column) != 0:
return False
"Place piece."
for y in range(self.rows):
if y == self.rows - 1: # Reached bottom
self.boardMatrix.itemset((y, column), value)
break
elif self.boardMatrix.item(y + 1, column) == 0: # Next row is also empty
continue
else: # Next row is not empty
self.boardMatrix.itemset((y, column), value)
break
return True
Change from camelcase to underscores.
|
import numpy
"""
Board represents a four in a row game board.
Author: Isaac Arvestad
"""
class Board:
"""
Initializes the game with a certain number of rows
and columns.
"""
def __init__(self, rows, columns):
self.rows = rows
self.columns = columns
self.matrix = numpy.zeros((rows, columns))
"""
Attempts to add a piece to a certain column. If the column is
full the move is illegal and false is returned, otherwise true
is returned.
"""
def add_piece(self, column, value):
"Check if column is full."
if self.matrix.item(0,column) != 0:
return False
"Place piece."
for y in range(self.rows):
if y == self.rows - 1: # Reached bottom
self.matrix.itemset((y, column), value)
break
elif self.matrix.item(y + 1, column) == 0: # Next row is also empty
continue
else: # Next row is not empty
self.matrix.itemset((y, column), value)
break
return True
|
<commit_before>import numpy
"""
Board represents a four in a row game board.
Author: Isaac Arvestad
"""
class Board:
"""
Initializes the game with a certain number of rows
and columns.
"""
def __init__(self, rows, columns):
self.rows = rows
self.columns = columns
self.boardMatrix = numpy.zeros((rows, columns))
"""
Attempts to add a piece to a certain column. If the column is
full the move is illegal and false is returned, otherwise true
is returned.
"""
def addPiece(self, column, value):
"Check if column is full."
if self.boardMatrix.item(0,column) != 0:
return False
"Place piece."
for y in range(self.rows):
if y == self.rows - 1: # Reached bottom
self.boardMatrix.itemset((y, column), value)
break
elif self.boardMatrix.item(y + 1, column) == 0: # Next row is also empty
continue
else: # Next row is not empty
self.boardMatrix.itemset((y, column), value)
break
return True
<commit_msg>Change from camelcase to underscores.<commit_after>
|
import numpy
"""
Board represents a four in a row game board.
Author: Isaac Arvestad
"""
class Board:
"""
Initializes the game with a certain number of rows
and columns.
"""
def __init__(self, rows, columns):
self.rows = rows
self.columns = columns
self.matrix = numpy.zeros((rows, columns))
"""
Attempts to add a piece to a certain column. If the column is
full the move is illegal and false is returned, otherwise true
is returned.
"""
def add_piece(self, column, value):
"Check if column is full."
if self.matrix.item(0,column) != 0:
return False
"Place piece."
for y in range(self.rows):
if y == self.rows - 1: # Reached bottom
self.matrix.itemset((y, column), value)
break
elif self.matrix.item(y + 1, column) == 0: # Next row is also empty
continue
else: # Next row is not empty
self.matrix.itemset((y, column), value)
break
return True
|
import numpy
"""
Board represents a four in a row game board.
Author: Isaac Arvestad
"""
class Board:
"""
Initializes the game with a certain number of rows
and columns.
"""
def __init__(self, rows, columns):
self.rows = rows
self.columns = columns
self.boardMatrix = numpy.zeros((rows, columns))
"""
Attempts to add a piece to a certain column. If the column is
full the move is illegal and false is returned, otherwise true
is returned.
"""
def addPiece(self, column, value):
"Check if column is full."
if self.boardMatrix.item(0,column) != 0:
return False
"Place piece."
for y in range(self.rows):
if y == self.rows - 1: # Reached bottom
self.boardMatrix.itemset((y, column), value)
break
elif self.boardMatrix.item(y + 1, column) == 0: # Next row is also empty
continue
else: # Next row is not empty
self.boardMatrix.itemset((y, column), value)
break
return True
Change from camelcase to underscores.import numpy
"""
Board represents a four in a row game board.
Author: Isaac Arvestad
"""
class Board:
"""
Initializes the game with a certain number of rows
and columns.
"""
def __init__(self, rows, columns):
self.rows = rows
self.columns = columns
self.matrix = numpy.zeros((rows, columns))
"""
Attempts to add a piece to a certain column. If the column is
full the move is illegal and false is returned, otherwise true
is returned.
"""
def add_piece(self, column, value):
"Check if column is full."
if self.matrix.item(0,column) != 0:
return False
"Place piece."
for y in range(self.rows):
if y == self.rows - 1: # Reached bottom
self.matrix.itemset((y, column), value)
break
elif self.matrix.item(y + 1, column) == 0: # Next row is also empty
continue
else: # Next row is not empty
self.matrix.itemset((y, column), value)
break
return True
|
<commit_before>import numpy
"""
Board represents a four in a row game board.
Author: Isaac Arvestad
"""
class Board:
"""
Initializes the game with a certain number of rows
and columns.
"""
def __init__(self, rows, columns):
self.rows = rows
self.columns = columns
self.boardMatrix = numpy.zeros((rows, columns))
"""
Attempts to add a piece to a certain column. If the column is
full the move is illegal and false is returned, otherwise true
is returned.
"""
def addPiece(self, column, value):
"Check if column is full."
if self.boardMatrix.item(0,column) != 0:
return False
"Place piece."
for y in range(self.rows):
if y == self.rows - 1: # Reached bottom
self.boardMatrix.itemset((y, column), value)
break
elif self.boardMatrix.item(y + 1, column) == 0: # Next row is also empty
continue
else: # Next row is not empty
self.boardMatrix.itemset((y, column), value)
break
return True
<commit_msg>Change from camelcase to underscores.<commit_after>import numpy
"""
Board represents a four in a row game board.
Author: Isaac Arvestad
"""
class Board:
"""
Initializes the game with a certain number of rows
and columns.
"""
def __init__(self, rows, columns):
self.rows = rows
self.columns = columns
self.matrix = numpy.zeros((rows, columns))
"""
Attempts to add a piece to a certain column. If the column is
full the move is illegal and false is returned, otherwise true
is returned.
"""
def add_piece(self, column, value):
"Check if column is full."
if self.matrix.item(0,column) != 0:
return False
"Place piece."
for y in range(self.rows):
if y == self.rows - 1: # Reached bottom
self.matrix.itemset((y, column), value)
break
elif self.matrix.item(y + 1, column) == 0: # Next row is also empty
continue
else: # Next row is not empty
self.matrix.itemset((y, column), value)
break
return True
|
dfaff0553379f5686efc5da722e2ffac455a2d9f
|
administrator/serializers.py
|
administrator/serializers.py
|
from categories.models import Category, Keyword, Subcategory
from rest_framework import serializers
class SubcategoryListSerializer(serializers.ModelSerializer):
class Meta:
model = Subcategory
fields = ('pk', 'name')
class CategorySerializer(serializers.ModelSerializer):
subcategories = SubcategoryListSerializer(many=True, source='subcategory_set')
class Meta:
model = Category
fields = ('pk', 'name', 'weight', 'comment_required', 'subcategories')
class KeywordSerializer(serializers.ModelSerializer):
class Meta:
model = Keyword
class SubcategorySerializer(serializers.ModelSerializer):
category = CategorySerializer(read_only=True, many=True)
class Meta:
model = Subcategory
|
from categories.models import Category, Keyword, Subcategory
from rest_framework import serializers
class SubcategoryListSerializer(serializers.ModelSerializer):
class Meta:
model = Subcategory
fields = ('pk', 'name')
class CategorySerializer(serializers.ModelSerializer):
subcategories = SubcategoryListSerializer(many=True, source='subcategory_set')
class Meta:
model = Category
fields = ('pk', 'name', 'weight', 'is_active', 'comment_required', 'subcategories')
class KeywordSerializer(serializers.ModelSerializer):
class Meta:
model = Keyword
class SubcategorySerializer(serializers.ModelSerializer):
category = CategorySerializer(read_only=True, many=True)
class Meta:
model = Subcategory
|
Add is_active to category serializer
|
Add is_active to category serializer
|
Python
|
apache-2.0
|
belatrix/BackendAllStars
|
from categories.models import Category, Keyword, Subcategory
from rest_framework import serializers
class SubcategoryListSerializer(serializers.ModelSerializer):
class Meta:
model = Subcategory
fields = ('pk', 'name')
class CategorySerializer(serializers.ModelSerializer):
subcategories = SubcategoryListSerializer(many=True, source='subcategory_set')
class Meta:
model = Category
fields = ('pk', 'name', 'weight', 'comment_required', 'subcategories')
class KeywordSerializer(serializers.ModelSerializer):
class Meta:
model = Keyword
class SubcategorySerializer(serializers.ModelSerializer):
category = CategorySerializer(read_only=True, many=True)
class Meta:
model = Subcategory
Add is_active to category serializer
|
from categories.models import Category, Keyword, Subcategory
from rest_framework import serializers
class SubcategoryListSerializer(serializers.ModelSerializer):
class Meta:
model = Subcategory
fields = ('pk', 'name')
class CategorySerializer(serializers.ModelSerializer):
subcategories = SubcategoryListSerializer(many=True, source='subcategory_set')
class Meta:
model = Category
fields = ('pk', 'name', 'weight', 'is_active', 'comment_required', 'subcategories')
class KeywordSerializer(serializers.ModelSerializer):
class Meta:
model = Keyword
class SubcategorySerializer(serializers.ModelSerializer):
category = CategorySerializer(read_only=True, many=True)
class Meta:
model = Subcategory
|
<commit_before>from categories.models import Category, Keyword, Subcategory
from rest_framework import serializers
class SubcategoryListSerializer(serializers.ModelSerializer):
class Meta:
model = Subcategory
fields = ('pk', 'name')
class CategorySerializer(serializers.ModelSerializer):
subcategories = SubcategoryListSerializer(many=True, source='subcategory_set')
class Meta:
model = Category
fields = ('pk', 'name', 'weight', 'comment_required', 'subcategories')
class KeywordSerializer(serializers.ModelSerializer):
class Meta:
model = Keyword
class SubcategorySerializer(serializers.ModelSerializer):
category = CategorySerializer(read_only=True, many=True)
class Meta:
model = Subcategory
<commit_msg>Add is_active to category serializer<commit_after>
|
from categories.models import Category, Keyword, Subcategory
from rest_framework import serializers
class SubcategoryListSerializer(serializers.ModelSerializer):
class Meta:
model = Subcategory
fields = ('pk', 'name')
class CategorySerializer(serializers.ModelSerializer):
subcategories = SubcategoryListSerializer(many=True, source='subcategory_set')
class Meta:
model = Category
fields = ('pk', 'name', 'weight', 'is_active', 'comment_required', 'subcategories')
class KeywordSerializer(serializers.ModelSerializer):
class Meta:
model = Keyword
class SubcategorySerializer(serializers.ModelSerializer):
category = CategorySerializer(read_only=True, many=True)
class Meta:
model = Subcategory
|
from categories.models import Category, Keyword, Subcategory
from rest_framework import serializers
class SubcategoryListSerializer(serializers.ModelSerializer):
class Meta:
model = Subcategory
fields = ('pk', 'name')
class CategorySerializer(serializers.ModelSerializer):
subcategories = SubcategoryListSerializer(many=True, source='subcategory_set')
class Meta:
model = Category
fields = ('pk', 'name', 'weight', 'comment_required', 'subcategories')
class KeywordSerializer(serializers.ModelSerializer):
class Meta:
model = Keyword
class SubcategorySerializer(serializers.ModelSerializer):
category = CategorySerializer(read_only=True, many=True)
class Meta:
model = Subcategory
Add is_active to category serializerfrom categories.models import Category, Keyword, Subcategory
from rest_framework import serializers
class SubcategoryListSerializer(serializers.ModelSerializer):
class Meta:
model = Subcategory
fields = ('pk', 'name')
class CategorySerializer(serializers.ModelSerializer):
subcategories = SubcategoryListSerializer(many=True, source='subcategory_set')
class Meta:
model = Category
fields = ('pk', 'name', 'weight', 'is_active', 'comment_required', 'subcategories')
class KeywordSerializer(serializers.ModelSerializer):
class Meta:
model = Keyword
class SubcategorySerializer(serializers.ModelSerializer):
category = CategorySerializer(read_only=True, many=True)
class Meta:
model = Subcategory
|
<commit_before>from categories.models import Category, Keyword, Subcategory
from rest_framework import serializers
class SubcategoryListSerializer(serializers.ModelSerializer):
class Meta:
model = Subcategory
fields = ('pk', 'name')
class CategorySerializer(serializers.ModelSerializer):
subcategories = SubcategoryListSerializer(many=True, source='subcategory_set')
class Meta:
model = Category
fields = ('pk', 'name', 'weight', 'comment_required', 'subcategories')
class KeywordSerializer(serializers.ModelSerializer):
class Meta:
model = Keyword
class SubcategorySerializer(serializers.ModelSerializer):
category = CategorySerializer(read_only=True, many=True)
class Meta:
model = Subcategory
<commit_msg>Add is_active to category serializer<commit_after>from categories.models import Category, Keyword, Subcategory
from rest_framework import serializers
class SubcategoryListSerializer(serializers.ModelSerializer):
class Meta:
model = Subcategory
fields = ('pk', 'name')
class CategorySerializer(serializers.ModelSerializer):
subcategories = SubcategoryListSerializer(many=True, source='subcategory_set')
class Meta:
model = Category
fields = ('pk', 'name', 'weight', 'is_active', 'comment_required', 'subcategories')
class KeywordSerializer(serializers.ModelSerializer):
class Meta:
model = Keyword
class SubcategorySerializer(serializers.ModelSerializer):
category = CategorySerializer(read_only=True, many=True)
class Meta:
model = Subcategory
|
0c79d2fee14d5d2bff51ade9d643df22dde7f301
|
polyaxon/polyaxon/config_settings/scheduler/__init__.py
|
polyaxon/polyaxon/config_settings/scheduler/__init__.py
|
from polyaxon.config_settings.k8s import *
from polyaxon.config_settings.dirs import *
from polyaxon.config_settings.spawner import *
from .apps import *
|
from polyaxon.config_settings.k8s import *
from polyaxon.config_settings.dirs import *
from polyaxon.config_settings.spawner import *
from polyaxon.config_settings.registry import *
from .apps import *
|
Add registry settings to scheduler
|
Add registry settings to scheduler
|
Python
|
apache-2.0
|
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
|
from polyaxon.config_settings.k8s import *
from polyaxon.config_settings.dirs import *
from polyaxon.config_settings.spawner import *
from .apps import *
Add registry settings to scheduler
|
from polyaxon.config_settings.k8s import *
from polyaxon.config_settings.dirs import *
from polyaxon.config_settings.spawner import *
from polyaxon.config_settings.registry import *
from .apps import *
|
<commit_before>from polyaxon.config_settings.k8s import *
from polyaxon.config_settings.dirs import *
from polyaxon.config_settings.spawner import *
from .apps import *
<commit_msg>Add registry settings to scheduler<commit_after>
|
from polyaxon.config_settings.k8s import *
from polyaxon.config_settings.dirs import *
from polyaxon.config_settings.spawner import *
from polyaxon.config_settings.registry import *
from .apps import *
|
from polyaxon.config_settings.k8s import *
from polyaxon.config_settings.dirs import *
from polyaxon.config_settings.spawner import *
from .apps import *
Add registry settings to schedulerfrom polyaxon.config_settings.k8s import *
from polyaxon.config_settings.dirs import *
from polyaxon.config_settings.spawner import *
from polyaxon.config_settings.registry import *
from .apps import *
|
<commit_before>from polyaxon.config_settings.k8s import *
from polyaxon.config_settings.dirs import *
from polyaxon.config_settings.spawner import *
from .apps import *
<commit_msg>Add registry settings to scheduler<commit_after>from polyaxon.config_settings.k8s import *
from polyaxon.config_settings.dirs import *
from polyaxon.config_settings.spawner import *
from polyaxon.config_settings.registry import *
from .apps import *
|
2d5152e72e1813ee7bf040f4033d369d60a44cc2
|
pipeline/compute_rpp/compute_rpp.py
|
pipeline/compute_rpp/compute_rpp.py
|
import sys
import os
import numpy as np
from skcycling.utils import load_power_from_fit
from skcycling.power_profile import Rpp
# The first input argument corresponding to the data path
data_path = sys.argv[1]
# The second input argument is the storage directory
storage_path = sys.argv[2]
# We can create a list of all the *.fit files present inside that directory
# Create a list with the files to considered
filenames = []
for root, dirs, files in os.walk(data_path):
for file in files:
if file.endswith('.fit'):
filenames.append(os.path.join(root, file))
max_duration_rpp = 30
rpp_rider = Rpp(max_duration_rpp=max_duration_rpp)
# Open each file and fit
for idx_file, filename in enumerate(filenames):
print 'Process file #{} over {}'.format(idx_file+1, len(filenames))
# Open the file
power_ride = load_power_from_fit(filename)
# Fit the ride
rpp_rider.fit(power_ride)
# Create a directory to store the data if it is not existing
if not os.path.exists(storage_path):
os.makedirs(storage_path)
# Store the data somewhere
np.save(os.path.join(storage_path, 'profile.npy'), rpp_rider.rpp_)
|
import sys
import os
import numpy as np
from skcycling.utils import load_power_from_fit
from skcycling.restoration import denoise
from skcycling.power_profile import Rpp
# The first input argument corresponding to the data path
data_path = sys.argv[1]
# The second input argument is the storage directory
storage_path = sys.argv[2]
# We can create a list of all the *.fit files present inside that directory
# Create a list with the files to considered
filenames = []
for root, dirs, files in os.walk(data_path):
for file in files:
if file.endswith('.fit'):
filenames.append(os.path.join(root, file))
max_duration_rpp = 30
rpp_rider = Rpp(max_duration_rpp=max_duration_rpp)
# Open each file and fit
for idx_file, filename in enumerate(filenames):
print 'Process file #{} over {}'.format(idx_file+1, len(filenames))
# Open the file
power_ride = load_power_from_fit(filename)
# Reject the outliers using thresholding method
power_ride = denoise.outliers_rejection(power_ride)
# Fit the ride
rpp_rider.fit(power_ride)
# Create a directory to store the data if it is not existing
if not os.path.exists(storage_path):
os.makedirs(storage_path)
# Store the data somewhere
np.save(os.path.join(storage_path, 'profile.npy'), rpp_rider.rpp_)
|
Update the pipeline to take into account the outlier rejection method to compute the RPP
|
Update the pipeline to take into account the outlier rejection method to compute the RPP
|
Python
|
mit
|
glemaitre/power-profile,glemaitre/power-profile,clemaitre58/power-profile,clemaitre58/power-profile
|
import sys
import os
import numpy as np
from skcycling.utils import load_power_from_fit
from skcycling.power_profile import Rpp
# The first input argument corresponding to the data path
data_path = sys.argv[1]
# The second input argument is the storage directory
storage_path = sys.argv[2]
# We can create a list of all the *.fit files present inside that directory
# Create a list with the files to considered
filenames = []
for root, dirs, files in os.walk(data_path):
for file in files:
if file.endswith('.fit'):
filenames.append(os.path.join(root, file))
max_duration_rpp = 30
rpp_rider = Rpp(max_duration_rpp=max_duration_rpp)
# Open each file and fit
for idx_file, filename in enumerate(filenames):
print 'Process file #{} over {}'.format(idx_file+1, len(filenames))
# Open the file
power_ride = load_power_from_fit(filename)
# Fit the ride
rpp_rider.fit(power_ride)
# Create a directory to store the data if it is not existing
if not os.path.exists(storage_path):
os.makedirs(storage_path)
# Store the data somewhere
np.save(os.path.join(storage_path, 'profile.npy'), rpp_rider.rpp_)
Update the pipeline to take into account the outlier rejection method to compute the RPP
|
import sys
import os
import numpy as np
from skcycling.utils import load_power_from_fit
from skcycling.restoration import denoise
from skcycling.power_profile import Rpp
# The first input argument corresponding to the data path
data_path = sys.argv[1]
# The second input argument is the storage directory
storage_path = sys.argv[2]
# We can create a list of all the *.fit files present inside that directory
# Create a list with the files to considered
filenames = []
for root, dirs, files in os.walk(data_path):
for file in files:
if file.endswith('.fit'):
filenames.append(os.path.join(root, file))
max_duration_rpp = 30
rpp_rider = Rpp(max_duration_rpp=max_duration_rpp)
# Open each file and fit
for idx_file, filename in enumerate(filenames):
print 'Process file #{} over {}'.format(idx_file+1, len(filenames))
# Open the file
power_ride = load_power_from_fit(filename)
# Reject the outliers using thresholding method
power_ride = denoise.outliers_rejection(power_ride)
# Fit the ride
rpp_rider.fit(power_ride)
# Create a directory to store the data if it is not existing
if not os.path.exists(storage_path):
os.makedirs(storage_path)
# Store the data somewhere
np.save(os.path.join(storage_path, 'profile.npy'), rpp_rider.rpp_)
|
<commit_before>import sys
import os
import numpy as np
from skcycling.utils import load_power_from_fit
from skcycling.power_profile import Rpp
# The first input argument corresponding to the data path
data_path = sys.argv[1]
# The second input argument is the storage directory
storage_path = sys.argv[2]
# We can create a list of all the *.fit files present inside that directory
# Create a list with the files to considered
filenames = []
for root, dirs, files in os.walk(data_path):
for file in files:
if file.endswith('.fit'):
filenames.append(os.path.join(root, file))
max_duration_rpp = 30
rpp_rider = Rpp(max_duration_rpp=max_duration_rpp)
# Open each file and fit
for idx_file, filename in enumerate(filenames):
print 'Process file #{} over {}'.format(idx_file+1, len(filenames))
# Open the file
power_ride = load_power_from_fit(filename)
# Fit the ride
rpp_rider.fit(power_ride)
# Create a directory to store the data if it is not existing
if not os.path.exists(storage_path):
os.makedirs(storage_path)
# Store the data somewhere
np.save(os.path.join(storage_path, 'profile.npy'), rpp_rider.rpp_)
<commit_msg>Update the pipeline to take into account the outlier rejection method to compute the RPP<commit_after>
|
import sys
import os
import numpy as np
from skcycling.utils import load_power_from_fit
from skcycling.restoration import denoise
from skcycling.power_profile import Rpp
# The first input argument corresponding to the data path
data_path = sys.argv[1]
# The second input argument is the storage directory
storage_path = sys.argv[2]
# We can create a list of all the *.fit files present inside that directory
# Create a list with the files to considered
filenames = []
for root, dirs, files in os.walk(data_path):
for file in files:
if file.endswith('.fit'):
filenames.append(os.path.join(root, file))
max_duration_rpp = 30
rpp_rider = Rpp(max_duration_rpp=max_duration_rpp)
# Open each file and fit
for idx_file, filename in enumerate(filenames):
print 'Process file #{} over {}'.format(idx_file+1, len(filenames))
# Open the file
power_ride = load_power_from_fit(filename)
# Reject the outliers using thresholding method
power_ride = denoise.outliers_rejection(power_ride)
# Fit the ride
rpp_rider.fit(power_ride)
# Create a directory to store the data if it is not existing
if not os.path.exists(storage_path):
os.makedirs(storage_path)
# Store the data somewhere
np.save(os.path.join(storage_path, 'profile.npy'), rpp_rider.rpp_)
|
import sys
import os
import numpy as np
from skcycling.utils import load_power_from_fit
from skcycling.power_profile import Rpp
# The first input argument corresponding to the data path
data_path = sys.argv[1]
# The second input argument is the storage directory
storage_path = sys.argv[2]
# We can create a list of all the *.fit files present inside that directory
# Create a list with the files to considered
filenames = []
for root, dirs, files in os.walk(data_path):
for file in files:
if file.endswith('.fit'):
filenames.append(os.path.join(root, file))
max_duration_rpp = 30
rpp_rider = Rpp(max_duration_rpp=max_duration_rpp)
# Open each file and fit
for idx_file, filename in enumerate(filenames):
print 'Process file #{} over {}'.format(idx_file+1, len(filenames))
# Open the file
power_ride = load_power_from_fit(filename)
# Fit the ride
rpp_rider.fit(power_ride)
# Create a directory to store the data if it is not existing
if not os.path.exists(storage_path):
os.makedirs(storage_path)
# Store the data somewhere
np.save(os.path.join(storage_path, 'profile.npy'), rpp_rider.rpp_)
Update the pipeline to take into account the outlier rejection method to compute the RPPimport sys
import os
import numpy as np
from skcycling.utils import load_power_from_fit
from skcycling.restoration import denoise
from skcycling.power_profile import Rpp
# The first input argument corresponding to the data path
data_path = sys.argv[1]
# The second input argument is the storage directory
storage_path = sys.argv[2]
# We can create a list of all the *.fit files present inside that directory
# Create a list with the files to considered
filenames = []
for root, dirs, files in os.walk(data_path):
for file in files:
if file.endswith('.fit'):
filenames.append(os.path.join(root, file))
max_duration_rpp = 30
rpp_rider = Rpp(max_duration_rpp=max_duration_rpp)
# Open each file and fit
for idx_file, filename in enumerate(filenames):
print 'Process file #{} over {}'.format(idx_file+1, len(filenames))
# Open the file
power_ride = load_power_from_fit(filename)
# Reject the outliers using thresholding method
power_ride = denoise.outliers_rejection(power_ride)
# Fit the ride
rpp_rider.fit(power_ride)
# Create a directory to store the data if it is not existing
if not os.path.exists(storage_path):
os.makedirs(storage_path)
# Store the data somewhere
np.save(os.path.join(storage_path, 'profile.npy'), rpp_rider.rpp_)
|
<commit_before>import sys
import os
import numpy as np
from skcycling.utils import load_power_from_fit
from skcycling.power_profile import Rpp
# The first input argument corresponding to the data path
data_path = sys.argv[1]
# The second input argument is the storage directory
storage_path = sys.argv[2]
# We can create a list of all the *.fit files present inside that directory
# Create a list with the files to considered
filenames = []
for root, dirs, files in os.walk(data_path):
for file in files:
if file.endswith('.fit'):
filenames.append(os.path.join(root, file))
max_duration_rpp = 30
rpp_rider = Rpp(max_duration_rpp=max_duration_rpp)
# Open each file and fit
for idx_file, filename in enumerate(filenames):
print 'Process file #{} over {}'.format(idx_file+1, len(filenames))
# Open the file
power_ride = load_power_from_fit(filename)
# Fit the ride
rpp_rider.fit(power_ride)
# Create a directory to store the data if it is not existing
if not os.path.exists(storage_path):
os.makedirs(storage_path)
# Store the data somewhere
np.save(os.path.join(storage_path, 'profile.npy'), rpp_rider.rpp_)
<commit_msg>Update the pipeline to take into account the outlier rejection method to compute the RPP<commit_after>import sys
import os
import numpy as np
from skcycling.utils import load_power_from_fit
from skcycling.restoration import denoise
from skcycling.power_profile import Rpp
# The first input argument corresponding to the data path
data_path = sys.argv[1]
# The second input argument is the storage directory
storage_path = sys.argv[2]
# We can create a list of all the *.fit files present inside that directory
# Create a list with the files to considered
filenames = []
for root, dirs, files in os.walk(data_path):
for file in files:
if file.endswith('.fit'):
filenames.append(os.path.join(root, file))
max_duration_rpp = 30
rpp_rider = Rpp(max_duration_rpp=max_duration_rpp)
# Open each file and fit
for idx_file, filename in enumerate(filenames):
print 'Process file #{} over {}'.format(idx_file+1, len(filenames))
# Open the file
power_ride = load_power_from_fit(filename)
# Reject the outliers using thresholding method
power_ride = denoise.outliers_rejection(power_ride)
# Fit the ride
rpp_rider.fit(power_ride)
# Create a directory to store the data if it is not existing
if not os.path.exists(storage_path):
os.makedirs(storage_path)
# Store the data somewhere
np.save(os.path.join(storage_path, 'profile.npy'), rpp_rider.rpp_)
|
76a32bf058583072100246c92970fdbda9a45106
|
locations/pipelines.py
|
locations/pipelines.py
|
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import json
from scrapy.xlib.pydispatch import dispatcher
from scrapy.exceptions import DropItem
from scrapy import signals
class GeoJsonWriterPipeline(object):
def __init__(self):
dispatcher.connect(self.spider_opened, signals.spider_opened)
dispatcher.connect(self.spider_closed, signals.spider_closed)
self.file = None
def spider_opened(self, spider):
self.file = open('{}.jl'.format(spider.name), 'wb')
def spider_closed(self, spider):
self.file.close()
def process_item(self, item, spider):
line = json.dumps({
"type": "Feature",
"properties": item['properties'],
"geometry": {
"type": "Point",
"coordinates": item['lon_lat']
}
}, separators=(',', ':'))
self.file.write(line)
self.file.write('\n')
return item
class DuplicatesPipeline(object):
def __init__(self):
self.ids_seen = set()
def process_item(self, item, spider):
if item['properties']['ref'] in self.ids_seen:
raise DropItem("Duplicate item found: %s" % item)
else:
self.ids_seen.add(item['properties']['ref'])
return item
|
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import json
from scrapy.xlib.pydispatch import dispatcher
from scrapy.exceptions import DropItem
from scrapy import signals
class GeoJsonWriterPipeline(object):
def __init__(self):
dispatcher.connect(self.spider_opened, signals.spider_opened)
dispatcher.connect(self.spider_closed, signals.spider_closed)
self.file = None
def spider_opened(self, spider):
self.file = open('{}.jl'.format(spider.name), 'wb')
def spider_closed(self, spider):
self.file.close()
def process_item(self, item, spider):
feature = {
"type": "Feature",
"properties": item['properties'],
}
if item.get('lon_lat'):
feature['geometry'] = {
"type": "Point",
"coordinates": item['lon_lat']
}
line = json.dumps(feature, separators=(',', ':'))
self.file.write(line)
self.file.write('\n')
return item
class DuplicatesPipeline(object):
def __init__(self):
self.ids_seen = set()
def process_item(self, item, spider):
if item['properties']['ref'] in self.ids_seen:
raise DropItem("Duplicate item found: %s" % item)
else:
self.ids_seen.add(item['properties']['ref'])
return item
|
Handle geojson feature without latlon
|
Handle geojson feature without latlon
|
Python
|
mit
|
iandees/all-the-places,iandees/all-the-places,iandees/all-the-places
|
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import json
from scrapy.xlib.pydispatch import dispatcher
from scrapy.exceptions import DropItem
from scrapy import signals
class GeoJsonWriterPipeline(object):
def __init__(self):
dispatcher.connect(self.spider_opened, signals.spider_opened)
dispatcher.connect(self.spider_closed, signals.spider_closed)
self.file = None
def spider_opened(self, spider):
self.file = open('{}.jl'.format(spider.name), 'wb')
def spider_closed(self, spider):
self.file.close()
def process_item(self, item, spider):
line = json.dumps({
"type": "Feature",
"properties": item['properties'],
"geometry": {
"type": "Point",
"coordinates": item['lon_lat']
}
}, separators=(',', ':'))
self.file.write(line)
self.file.write('\n')
return item
class DuplicatesPipeline(object):
def __init__(self):
self.ids_seen = set()
def process_item(self, item, spider):
if item['properties']['ref'] in self.ids_seen:
raise DropItem("Duplicate item found: %s" % item)
else:
self.ids_seen.add(item['properties']['ref'])
return item
Handle geojson feature without latlon
|
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import json
from scrapy.xlib.pydispatch import dispatcher
from scrapy.exceptions import DropItem
from scrapy import signals
class GeoJsonWriterPipeline(object):
def __init__(self):
dispatcher.connect(self.spider_opened, signals.spider_opened)
dispatcher.connect(self.spider_closed, signals.spider_closed)
self.file = None
def spider_opened(self, spider):
self.file = open('{}.jl'.format(spider.name), 'wb')
def spider_closed(self, spider):
self.file.close()
def process_item(self, item, spider):
feature = {
"type": "Feature",
"properties": item['properties'],
}
if item.get('lon_lat'):
feature['geometry'] = {
"type": "Point",
"coordinates": item['lon_lat']
}
line = json.dumps(feature, separators=(',', ':'))
self.file.write(line)
self.file.write('\n')
return item
class DuplicatesPipeline(object):
def __init__(self):
self.ids_seen = set()
def process_item(self, item, spider):
if item['properties']['ref'] in self.ids_seen:
raise DropItem("Duplicate item found: %s" % item)
else:
self.ids_seen.add(item['properties']['ref'])
return item
|
<commit_before># -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import json
from scrapy.xlib.pydispatch import dispatcher
from scrapy.exceptions import DropItem
from scrapy import signals
class GeoJsonWriterPipeline(object):
def __init__(self):
dispatcher.connect(self.spider_opened, signals.spider_opened)
dispatcher.connect(self.spider_closed, signals.spider_closed)
self.file = None
def spider_opened(self, spider):
self.file = open('{}.jl'.format(spider.name), 'wb')
def spider_closed(self, spider):
self.file.close()
def process_item(self, item, spider):
line = json.dumps({
"type": "Feature",
"properties": item['properties'],
"geometry": {
"type": "Point",
"coordinates": item['lon_lat']
}
}, separators=(',', ':'))
self.file.write(line)
self.file.write('\n')
return item
class DuplicatesPipeline(object):
def __init__(self):
self.ids_seen = set()
def process_item(self, item, spider):
if item['properties']['ref'] in self.ids_seen:
raise DropItem("Duplicate item found: %s" % item)
else:
self.ids_seen.add(item['properties']['ref'])
return item
<commit_msg>Handle geojson feature without latlon<commit_after>
|
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import json
from scrapy.xlib.pydispatch import dispatcher
from scrapy.exceptions import DropItem
from scrapy import signals
class GeoJsonWriterPipeline(object):
def __init__(self):
dispatcher.connect(self.spider_opened, signals.spider_opened)
dispatcher.connect(self.spider_closed, signals.spider_closed)
self.file = None
def spider_opened(self, spider):
self.file = open('{}.jl'.format(spider.name), 'wb')
def spider_closed(self, spider):
self.file.close()
def process_item(self, item, spider):
feature = {
"type": "Feature",
"properties": item['properties'],
}
if item.get('lon_lat'):
feature['geometry'] = {
"type": "Point",
"coordinates": item['lon_lat']
}
line = json.dumps(feature, separators=(',', ':'))
self.file.write(line)
self.file.write('\n')
return item
class DuplicatesPipeline(object):
def __init__(self):
self.ids_seen = set()
def process_item(self, item, spider):
if item['properties']['ref'] in self.ids_seen:
raise DropItem("Duplicate item found: %s" % item)
else:
self.ids_seen.add(item['properties']['ref'])
return item
|
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import json
from scrapy.xlib.pydispatch import dispatcher
from scrapy.exceptions import DropItem
from scrapy import signals
class GeoJsonWriterPipeline(object):
def __init__(self):
dispatcher.connect(self.spider_opened, signals.spider_opened)
dispatcher.connect(self.spider_closed, signals.spider_closed)
self.file = None
def spider_opened(self, spider):
self.file = open('{}.jl'.format(spider.name), 'wb')
def spider_closed(self, spider):
self.file.close()
def process_item(self, item, spider):
line = json.dumps({
"type": "Feature",
"properties": item['properties'],
"geometry": {
"type": "Point",
"coordinates": item['lon_lat']
}
}, separators=(',', ':'))
self.file.write(line)
self.file.write('\n')
return item
class DuplicatesPipeline(object):
def __init__(self):
self.ids_seen = set()
def process_item(self, item, spider):
if item['properties']['ref'] in self.ids_seen:
raise DropItem("Duplicate item found: %s" % item)
else:
self.ids_seen.add(item['properties']['ref'])
return item
Handle geojson feature without latlon# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import json
from scrapy.xlib.pydispatch import dispatcher
from scrapy.exceptions import DropItem
from scrapy import signals
class GeoJsonWriterPipeline(object):
def __init__(self):
dispatcher.connect(self.spider_opened, signals.spider_opened)
dispatcher.connect(self.spider_closed, signals.spider_closed)
self.file = None
def spider_opened(self, spider):
self.file = open('{}.jl'.format(spider.name), 'wb')
def spider_closed(self, spider):
self.file.close()
def process_item(self, item, spider):
feature = {
"type": "Feature",
"properties": item['properties'],
}
if item.get('lon_lat'):
feature['geometry'] = {
"type": "Point",
"coordinates": item['lon_lat']
}
line = json.dumps(feature, separators=(',', ':'))
self.file.write(line)
self.file.write('\n')
return item
class DuplicatesPipeline(object):
def __init__(self):
self.ids_seen = set()
def process_item(self, item, spider):
if item['properties']['ref'] in self.ids_seen:
raise DropItem("Duplicate item found: %s" % item)
else:
self.ids_seen.add(item['properties']['ref'])
return item
|
<commit_before># -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import json
from scrapy.xlib.pydispatch import dispatcher
from scrapy.exceptions import DropItem
from scrapy import signals
class GeoJsonWriterPipeline(object):
def __init__(self):
dispatcher.connect(self.spider_opened, signals.spider_opened)
dispatcher.connect(self.spider_closed, signals.spider_closed)
self.file = None
def spider_opened(self, spider):
self.file = open('{}.jl'.format(spider.name), 'wb')
def spider_closed(self, spider):
self.file.close()
def process_item(self, item, spider):
line = json.dumps({
"type": "Feature",
"properties": item['properties'],
"geometry": {
"type": "Point",
"coordinates": item['lon_lat']
}
}, separators=(',', ':'))
self.file.write(line)
self.file.write('\n')
return item
class DuplicatesPipeline(object):
def __init__(self):
self.ids_seen = set()
def process_item(self, item, spider):
if item['properties']['ref'] in self.ids_seen:
raise DropItem("Duplicate item found: %s" % item)
else:
self.ids_seen.add(item['properties']['ref'])
return item
<commit_msg>Handle geojson feature without latlon<commit_after># -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import json
from scrapy.xlib.pydispatch import dispatcher
from scrapy.exceptions import DropItem
from scrapy import signals
class GeoJsonWriterPipeline(object):
def __init__(self):
dispatcher.connect(self.spider_opened, signals.spider_opened)
dispatcher.connect(self.spider_closed, signals.spider_closed)
self.file = None
def spider_opened(self, spider):
self.file = open('{}.jl'.format(spider.name), 'wb')
def spider_closed(self, spider):
self.file.close()
def process_item(self, item, spider):
feature = {
"type": "Feature",
"properties": item['properties'],
}
if item.get('lon_lat'):
feature['geometry'] = {
"type": "Point",
"coordinates": item['lon_lat']
}
line = json.dumps(feature, separators=(',', ':'))
self.file.write(line)
self.file.write('\n')
return item
class DuplicatesPipeline(object):
def __init__(self):
self.ids_seen = set()
def process_item(self, item, spider):
if item['properties']['ref'] in self.ids_seen:
raise DropItem("Duplicate item found: %s" % item)
else:
self.ids_seen.add(item['properties']['ref'])
return item
|
69b816868337683a7dd90f24711e03c5eb982416
|
kitchen/lib/__init__.py
|
kitchen/lib/__init__.py
|
import os
import json
from kitchen.settings import KITCHEN_LOCATION
def load_data(data_type):
retval = {}
nodes_dir = os.path.join(KITCHEN_LOCATION, data_type)
if not os.path.isdir(nodes_dir):
raise IOError('Invalid data type or kitchen location. Check your settings.')
for filename in os.listdir(nodes_dir):
if filename.endswith('.json'):
f = open(os.path.join(nodes_dir, filename), 'r')
retval[filename[:-5]] = json.load(f)
f.close()
return retval
|
import os
import json
from kitchen.settings import KITCHEN_LOCATION
def load_data(data_type):
retval = []
nodes_dir = os.path.join(KITCHEN_LOCATION, data_type)
if not os.path.isdir(nodes_dir):
raise IOError('Invalid data type or kitchen location. Check your settings.')
for filename in os.listdir(nodes_dir):
if filename.endswith('.json'):
entry = {'name': filename[:-5]}
f = open(os.path.join(nodes_dir, filename), 'r')
entry['data'] = json.load(f)
f.close()
retval.append(entry)
return retval
|
Use a sortable list instead of a dictionary of values for the return value
|
Use a sortable list instead of a dictionary of values for the return value
|
Python
|
apache-2.0
|
edelight/kitchen,edelight/kitchen,edelight/kitchen,edelight/kitchen
|
import os
import json
from kitchen.settings import KITCHEN_LOCATION
def load_data(data_type):
retval = {}
nodes_dir = os.path.join(KITCHEN_LOCATION, data_type)
if not os.path.isdir(nodes_dir):
raise IOError('Invalid data type or kitchen location. Check your settings.')
for filename in os.listdir(nodes_dir):
if filename.endswith('.json'):
f = open(os.path.join(nodes_dir, filename), 'r')
retval[filename[:-5]] = json.load(f)
f.close()
return retval
Use a sortable list instead of a dictionary of values for the return value
|
import os
import json
from kitchen.settings import KITCHEN_LOCATION
def load_data(data_type):
retval = []
nodes_dir = os.path.join(KITCHEN_LOCATION, data_type)
if not os.path.isdir(nodes_dir):
raise IOError('Invalid data type or kitchen location. Check your settings.')
for filename in os.listdir(nodes_dir):
if filename.endswith('.json'):
entry = {'name': filename[:-5]}
f = open(os.path.join(nodes_dir, filename), 'r')
entry['data'] = json.load(f)
f.close()
retval.append(entry)
return retval
|
<commit_before>import os
import json
from kitchen.settings import KITCHEN_LOCATION
def load_data(data_type):
retval = {}
nodes_dir = os.path.join(KITCHEN_LOCATION, data_type)
if not os.path.isdir(nodes_dir):
raise IOError('Invalid data type or kitchen location. Check your settings.')
for filename in os.listdir(nodes_dir):
if filename.endswith('.json'):
f = open(os.path.join(nodes_dir, filename), 'r')
retval[filename[:-5]] = json.load(f)
f.close()
return retval
<commit_msg>Use a sortable list instead of a dictionary of values for the return value<commit_after>
|
import os
import json
from kitchen.settings import KITCHEN_LOCATION
def load_data(data_type):
retval = []
nodes_dir = os.path.join(KITCHEN_LOCATION, data_type)
if not os.path.isdir(nodes_dir):
raise IOError('Invalid data type or kitchen location. Check your settings.')
for filename in os.listdir(nodes_dir):
if filename.endswith('.json'):
entry = {'name': filename[:-5]}
f = open(os.path.join(nodes_dir, filename), 'r')
entry['data'] = json.load(f)
f.close()
retval.append(entry)
return retval
|
import os
import json
from kitchen.settings import KITCHEN_LOCATION
def load_data(data_type):
retval = {}
nodes_dir = os.path.join(KITCHEN_LOCATION, data_type)
if not os.path.isdir(nodes_dir):
raise IOError('Invalid data type or kitchen location. Check your settings.')
for filename in os.listdir(nodes_dir):
if filename.endswith('.json'):
f = open(os.path.join(nodes_dir, filename), 'r')
retval[filename[:-5]] = json.load(f)
f.close()
return retval
Use a sortable list instead of a dictionary of values for the return valueimport os
import json
from kitchen.settings import KITCHEN_LOCATION
def load_data(data_type):
retval = []
nodes_dir = os.path.join(KITCHEN_LOCATION, data_type)
if not os.path.isdir(nodes_dir):
raise IOError('Invalid data type or kitchen location. Check your settings.')
for filename in os.listdir(nodes_dir):
if filename.endswith('.json'):
entry = {'name': filename[:-5]}
f = open(os.path.join(nodes_dir, filename), 'r')
entry['data'] = json.load(f)
f.close()
retval.append(entry)
return retval
|
<commit_before>import os
import json
from kitchen.settings import KITCHEN_LOCATION
def load_data(data_type):
retval = {}
nodes_dir = os.path.join(KITCHEN_LOCATION, data_type)
if not os.path.isdir(nodes_dir):
raise IOError('Invalid data type or kitchen location. Check your settings.')
for filename in os.listdir(nodes_dir):
if filename.endswith('.json'):
f = open(os.path.join(nodes_dir, filename), 'r')
retval[filename[:-5]] = json.load(f)
f.close()
return retval
<commit_msg>Use a sortable list instead of a dictionary of values for the return value<commit_after>import os
import json
from kitchen.settings import KITCHEN_LOCATION
def load_data(data_type):
retval = []
nodes_dir = os.path.join(KITCHEN_LOCATION, data_type)
if not os.path.isdir(nodes_dir):
raise IOError('Invalid data type or kitchen location. Check your settings.')
for filename in os.listdir(nodes_dir):
if filename.endswith('.json'):
entry = {'name': filename[:-5]}
f = open(os.path.join(nodes_dir, filename), 'r')
entry['data'] = json.load(f)
f.close()
retval.append(entry)
return retval
|
286c8151c174f11df98d6cb421252c0d61337add
|
flake8_coding.py
|
flake8_coding.py
|
# -*- coding: utf-8 -*-
import re
__version__ = '0.1.0'
class CodingChecker(object):
name = 'flake8_coding'
version = __version__
def __init__(self, tree, filename):
self.filename = filename
@classmethod
def add_options(cls, parser):
parser.add_option(
'--accept-encodings', default='latin-1, utf-8', action='store',
help="Acceptable source code encodings for `coding:` magic comment"
)
parser.config_options.append('accpet-encodings')
@classmethod
def parse_options(cls, options):
cls.encodings = [e.strip().lower() for e in options.accept_encodings.split(',')]
def run(self):
with open(self.filename) as f:
# PEP-263 says: a magic comment must be placed into the source
# files either as first or second line in the file
for lineno in range(1, 3):
matched = re.search('coding: (\S+)', f.readline(), re.IGNORECASE)
if matched:
if matched.group(1).lower() not in self.encodings:
yield lineno, 0, "C102 Unknown encoding found in coding magic comment", type(self)
break
else:
yield 0, 0, "C101 Coding magic comment not found", type(self)
|
# -*- coding: utf-8 -*-
import re
__version__ = '0.1.0'
class CodingChecker(object):
name = 'flake8_coding'
version = __version__
def __init__(self, tree, filename):
self.filename = filename
@classmethod
def add_options(cls, parser):
parser.add_option(
'--accept-encodings', default='latin-1, utf-8', action='store',
help="Acceptable source code encodings for `coding:` magic comment"
)
parser.config_options.append('accpet-encodings')
@classmethod
def parse_options(cls, options):
cls.encodings = [e.strip().lower() for e in options.accept_encodings.split(',')]
def run(self):
with open(self.filename) as f:
# PEP-263 says: a magic comment must be placed into the source
# files either as first or second line in the file
for lineno in range(1, 3):
matched = re.search('coding[:=]\s*([-\w.]+)', f.readline(), re.IGNORECASE)
if matched:
if matched.group(1).lower() not in self.encodings:
yield lineno, 0, "C102 Unknown encoding found in coding magic comment", type(self)
break
else:
yield 0, 0, "C101 Coding magic comment not found", type(self)
|
Update regexp to detect magic comment
|
Update regexp to detect magic comment
|
Python
|
apache-2.0
|
tk0miya/flake8-coding
|
# -*- coding: utf-8 -*-
import re
__version__ = '0.1.0'
class CodingChecker(object):
name = 'flake8_coding'
version = __version__
def __init__(self, tree, filename):
self.filename = filename
@classmethod
def add_options(cls, parser):
parser.add_option(
'--accept-encodings', default='latin-1, utf-8', action='store',
help="Acceptable source code encodings for `coding:` magic comment"
)
parser.config_options.append('accpet-encodings')
@classmethod
def parse_options(cls, options):
cls.encodings = [e.strip().lower() for e in options.accept_encodings.split(',')]
def run(self):
with open(self.filename) as f:
# PEP-263 says: a magic comment must be placed into the source
# files either as first or second line in the file
for lineno in range(1, 3):
matched = re.search('coding: (\S+)', f.readline(), re.IGNORECASE)
if matched:
if matched.group(1).lower() not in self.encodings:
yield lineno, 0, "C102 Unknown encoding found in coding magic comment", type(self)
break
else:
yield 0, 0, "C101 Coding magic comment not found", type(self)
Update regexp to detect magic comment
|
# -*- coding: utf-8 -*-
import re
__version__ = '0.1.0'
class CodingChecker(object):
name = 'flake8_coding'
version = __version__
def __init__(self, tree, filename):
self.filename = filename
@classmethod
def add_options(cls, parser):
parser.add_option(
'--accept-encodings', default='latin-1, utf-8', action='store',
help="Acceptable source code encodings for `coding:` magic comment"
)
parser.config_options.append('accpet-encodings')
@classmethod
def parse_options(cls, options):
cls.encodings = [e.strip().lower() for e in options.accept_encodings.split(',')]
def run(self):
with open(self.filename) as f:
# PEP-263 says: a magic comment must be placed into the source
# files either as first or second line in the file
for lineno in range(1, 3):
matched = re.search('coding[:=]\s*([-\w.]+)', f.readline(), re.IGNORECASE)
if matched:
if matched.group(1).lower() not in self.encodings:
yield lineno, 0, "C102 Unknown encoding found in coding magic comment", type(self)
break
else:
yield 0, 0, "C101 Coding magic comment not found", type(self)
|
<commit_before># -*- coding: utf-8 -*-
import re
__version__ = '0.1.0'
class CodingChecker(object):
name = 'flake8_coding'
version = __version__
def __init__(self, tree, filename):
self.filename = filename
@classmethod
def add_options(cls, parser):
parser.add_option(
'--accept-encodings', default='latin-1, utf-8', action='store',
help="Acceptable source code encodings for `coding:` magic comment"
)
parser.config_options.append('accpet-encodings')
@classmethod
def parse_options(cls, options):
cls.encodings = [e.strip().lower() for e in options.accept_encodings.split(',')]
def run(self):
with open(self.filename) as f:
# PEP-263 says: a magic comment must be placed into the source
# files either as first or second line in the file
for lineno in range(1, 3):
matched = re.search('coding: (\S+)', f.readline(), re.IGNORECASE)
if matched:
if matched.group(1).lower() not in self.encodings:
yield lineno, 0, "C102 Unknown encoding found in coding magic comment", type(self)
break
else:
yield 0, 0, "C101 Coding magic comment not found", type(self)
<commit_msg>Update regexp to detect magic comment<commit_after>
|
# -*- coding: utf-8 -*-
import re
__version__ = '0.1.0'
class CodingChecker(object):
name = 'flake8_coding'
version = __version__
def __init__(self, tree, filename):
self.filename = filename
@classmethod
def add_options(cls, parser):
parser.add_option(
'--accept-encodings', default='latin-1, utf-8', action='store',
help="Acceptable source code encodings for `coding:` magic comment"
)
parser.config_options.append('accpet-encodings')
@classmethod
def parse_options(cls, options):
cls.encodings = [e.strip().lower() for e in options.accept_encodings.split(',')]
def run(self):
with open(self.filename) as f:
# PEP-263 says: a magic comment must be placed into the source
# files either as first or second line in the file
for lineno in range(1, 3):
matched = re.search('coding[:=]\s*([-\w.]+)', f.readline(), re.IGNORECASE)
if matched:
if matched.group(1).lower() not in self.encodings:
yield lineno, 0, "C102 Unknown encoding found in coding magic comment", type(self)
break
else:
yield 0, 0, "C101 Coding magic comment not found", type(self)
|
# -*- coding: utf-8 -*-
import re
__version__ = '0.1.0'
class CodingChecker(object):
name = 'flake8_coding'
version = __version__
def __init__(self, tree, filename):
self.filename = filename
@classmethod
def add_options(cls, parser):
parser.add_option(
'--accept-encodings', default='latin-1, utf-8', action='store',
help="Acceptable source code encodings for `coding:` magic comment"
)
parser.config_options.append('accpet-encodings')
@classmethod
def parse_options(cls, options):
cls.encodings = [e.strip().lower() for e in options.accept_encodings.split(',')]
def run(self):
with open(self.filename) as f:
# PEP-263 says: a magic comment must be placed into the source
# files either as first or second line in the file
for lineno in range(1, 3):
matched = re.search('coding: (\S+)', f.readline(), re.IGNORECASE)
if matched:
if matched.group(1).lower() not in self.encodings:
yield lineno, 0, "C102 Unknown encoding found in coding magic comment", type(self)
break
else:
yield 0, 0, "C101 Coding magic comment not found", type(self)
Update regexp to detect magic comment# -*- coding: utf-8 -*-
import re
__version__ = '0.1.0'
class CodingChecker(object):
name = 'flake8_coding'
version = __version__
def __init__(self, tree, filename):
self.filename = filename
@classmethod
def add_options(cls, parser):
parser.add_option(
'--accept-encodings', default='latin-1, utf-8', action='store',
help="Acceptable source code encodings for `coding:` magic comment"
)
parser.config_options.append('accpet-encodings')
@classmethod
def parse_options(cls, options):
cls.encodings = [e.strip().lower() for e in options.accept_encodings.split(',')]
def run(self):
with open(self.filename) as f:
# PEP-263 says: a magic comment must be placed into the source
# files either as first or second line in the file
for lineno in range(1, 3):
matched = re.search('coding[:=]\s*([-\w.]+)', f.readline(), re.IGNORECASE)
if matched:
if matched.group(1).lower() not in self.encodings:
yield lineno, 0, "C102 Unknown encoding found in coding magic comment", type(self)
break
else:
yield 0, 0, "C101 Coding magic comment not found", type(self)
|
<commit_before># -*- coding: utf-8 -*-
import re
__version__ = '0.1.0'
class CodingChecker(object):
name = 'flake8_coding'
version = __version__
def __init__(self, tree, filename):
self.filename = filename
@classmethod
def add_options(cls, parser):
parser.add_option(
'--accept-encodings', default='latin-1, utf-8', action='store',
help="Acceptable source code encodings for `coding:` magic comment"
)
parser.config_options.append('accpet-encodings')
@classmethod
def parse_options(cls, options):
cls.encodings = [e.strip().lower() for e in options.accept_encodings.split(',')]
def run(self):
with open(self.filename) as f:
# PEP-263 says: a magic comment must be placed into the source
# files either as first or second line in the file
for lineno in range(1, 3):
matched = re.search('coding: (\S+)', f.readline(), re.IGNORECASE)
if matched:
if matched.group(1).lower() not in self.encodings:
yield lineno, 0, "C102 Unknown encoding found in coding magic comment", type(self)
break
else:
yield 0, 0, "C101 Coding magic comment not found", type(self)
<commit_msg>Update regexp to detect magic comment<commit_after># -*- coding: utf-8 -*-
import re
__version__ = '0.1.0'
class CodingChecker(object):
name = 'flake8_coding'
version = __version__
def __init__(self, tree, filename):
self.filename = filename
@classmethod
def add_options(cls, parser):
parser.add_option(
'--accept-encodings', default='latin-1, utf-8', action='store',
help="Acceptable source code encodings for `coding:` magic comment"
)
parser.config_options.append('accpet-encodings')
@classmethod
def parse_options(cls, options):
cls.encodings = [e.strip().lower() for e in options.accept_encodings.split(',')]
def run(self):
with open(self.filename) as f:
# PEP-263 says: a magic comment must be placed into the source
# files either as first or second line in the file
for lineno in range(1, 3):
matched = re.search('coding[:=]\s*([-\w.]+)', f.readline(), re.IGNORECASE)
if matched:
if matched.group(1).lower() not in self.encodings:
yield lineno, 0, "C102 Unknown encoding found in coding magic comment", type(self)
break
else:
yield 0, 0, "C101 Coding magic comment not found", type(self)
|
7cb5a225738bfc1236ef5836aad50e216a7e7355
|
apps/blog/license_urls.py
|
apps/blog/license_urls.py
|
"""
URLCONF for the blog app.
"""
from django.conf.urls import url
from . import views, feeds
# URL patterns configuration
urlpatterns = (
# License index page
url(r'^(?P<slug>[-a-zA-Z0-9_]+)/$', views.license_detail, name='license_detail'),
# Related articles feed
url(r'^(?P<slug>[-a-zA-Z0-9_]+)/flux/$', feeds.LatestArticlesFeed(), name='latest_license_articles_rss'),
url(r'^(?P<slug>[-a-zA-Z0-9_]+)/flux/atom/$', feeds.LatestArticlesAtomFeed(), name='latest_license_articles_atom'),
)
|
"""
URLCONF for the blog app (add-on urls for the license app).
"""
from django.conf.urls import url
from . import views, feeds
# URL patterns configuration
urlpatterns = (
# License index page
url(r'^(?P<slug>[-a-zA-Z0-9_]+)/articles/$', views.license_detail, name='license_articles_detail'),
# Related articles feed
url(r'^(?P<slug>[-a-zA-Z0-9_]+)/articles/flux/$', feeds.LatestArticlesForLicenseFeed(),
name='latest_license_articles_rss'),
url(r'^(?P<slug>[-a-zA-Z0-9_]+)/articles/flux/atom/$', feeds.LatestArticlesForLicenseAtomFeed(),
name='latest_license_articles_atom'),
)
|
Rework blog license add-on urls
|
Rework blog license add-on urls
|
Python
|
agpl-3.0
|
TamiaLab/carnetdumaker,TamiaLab/carnetdumaker,TamiaLab/carnetdumaker,TamiaLab/carnetdumaker
|
"""
URLCONF for the blog app.
"""
from django.conf.urls import url
from . import views, feeds
# URL patterns configuration
urlpatterns = (
# License index page
url(r'^(?P<slug>[-a-zA-Z0-9_]+)/$', views.license_detail, name='license_detail'),
# Related articles feed
url(r'^(?P<slug>[-a-zA-Z0-9_]+)/flux/$', feeds.LatestArticlesFeed(), name='latest_license_articles_rss'),
url(r'^(?P<slug>[-a-zA-Z0-9_]+)/flux/atom/$', feeds.LatestArticlesAtomFeed(), name='latest_license_articles_atom'),
)
Rework blog license add-on urls
|
"""
URLCONF for the blog app (add-on urls for the license app).
"""
from django.conf.urls import url
from . import views, feeds
# URL patterns configuration
urlpatterns = (
# License index page
url(r'^(?P<slug>[-a-zA-Z0-9_]+)/articles/$', views.license_detail, name='license_articles_detail'),
# Related articles feed
url(r'^(?P<slug>[-a-zA-Z0-9_]+)/articles/flux/$', feeds.LatestArticlesForLicenseFeed(),
name='latest_license_articles_rss'),
url(r'^(?P<slug>[-a-zA-Z0-9_]+)/articles/flux/atom/$', feeds.LatestArticlesForLicenseAtomFeed(),
name='latest_license_articles_atom'),
)
|
<commit_before>"""
URLCONF for the blog app.
"""
from django.conf.urls import url
from . import views, feeds
# URL patterns configuration
urlpatterns = (
# License index page
url(r'^(?P<slug>[-a-zA-Z0-9_]+)/$', views.license_detail, name='license_detail'),
# Related articles feed
url(r'^(?P<slug>[-a-zA-Z0-9_]+)/flux/$', feeds.LatestArticlesFeed(), name='latest_license_articles_rss'),
url(r'^(?P<slug>[-a-zA-Z0-9_]+)/flux/atom/$', feeds.LatestArticlesAtomFeed(), name='latest_license_articles_atom'),
)
<commit_msg>Rework blog license add-on urls<commit_after>
|
"""
URLCONF for the blog app (add-on urls for the license app).
"""
from django.conf.urls import url
from . import views, feeds
# URL patterns configuration
urlpatterns = (
# License index page
url(r'^(?P<slug>[-a-zA-Z0-9_]+)/articles/$', views.license_detail, name='license_articles_detail'),
# Related articles feed
url(r'^(?P<slug>[-a-zA-Z0-9_]+)/articles/flux/$', feeds.LatestArticlesForLicenseFeed(),
name='latest_license_articles_rss'),
url(r'^(?P<slug>[-a-zA-Z0-9_]+)/articles/flux/atom/$', feeds.LatestArticlesForLicenseAtomFeed(),
name='latest_license_articles_atom'),
)
|
"""
URLCONF for the blog app.
"""
from django.conf.urls import url
from . import views, feeds
# URL patterns configuration
urlpatterns = (
# License index page
url(r'^(?P<slug>[-a-zA-Z0-9_]+)/$', views.license_detail, name='license_detail'),
# Related articles feed
url(r'^(?P<slug>[-a-zA-Z0-9_]+)/flux/$', feeds.LatestArticlesFeed(), name='latest_license_articles_rss'),
url(r'^(?P<slug>[-a-zA-Z0-9_]+)/flux/atom/$', feeds.LatestArticlesAtomFeed(), name='latest_license_articles_atom'),
)
Rework blog license add-on urls"""
URLCONF for the blog app (add-on urls for the license app).
"""
from django.conf.urls import url
from . import views, feeds
# URL patterns configuration
urlpatterns = (
# License index page
url(r'^(?P<slug>[-a-zA-Z0-9_]+)/articles/$', views.license_detail, name='license_articles_detail'),
# Related articles feed
url(r'^(?P<slug>[-a-zA-Z0-9_]+)/articles/flux/$', feeds.LatestArticlesForLicenseFeed(),
name='latest_license_articles_rss'),
url(r'^(?P<slug>[-a-zA-Z0-9_]+)/articles/flux/atom/$', feeds.LatestArticlesForLicenseAtomFeed(),
name='latest_license_articles_atom'),
)
|
<commit_before>"""
URLCONF for the blog app.
"""
from django.conf.urls import url
from . import views, feeds
# URL patterns configuration
urlpatterns = (
# License index page
url(r'^(?P<slug>[-a-zA-Z0-9_]+)/$', views.license_detail, name='license_detail'),
# Related articles feed
url(r'^(?P<slug>[-a-zA-Z0-9_]+)/flux/$', feeds.LatestArticlesFeed(), name='latest_license_articles_rss'),
url(r'^(?P<slug>[-a-zA-Z0-9_]+)/flux/atom/$', feeds.LatestArticlesAtomFeed(), name='latest_license_articles_atom'),
)
<commit_msg>Rework blog license add-on urls<commit_after>"""
URLCONF for the blog app (add-on urls for the license app).
"""
from django.conf.urls import url
from . import views, feeds
# URL patterns configuration
urlpatterns = (
# License index page
url(r'^(?P<slug>[-a-zA-Z0-9_]+)/articles/$', views.license_detail, name='license_articles_detail'),
# Related articles feed
url(r'^(?P<slug>[-a-zA-Z0-9_]+)/articles/flux/$', feeds.LatestArticlesForLicenseFeed(),
name='latest_license_articles_rss'),
url(r'^(?P<slug>[-a-zA-Z0-9_]+)/articles/flux/atom/$', feeds.LatestArticlesForLicenseAtomFeed(),
name='latest_license_articles_atom'),
)
|
5dfacded4f0dd8e7b5e7fe212fc6bfe017dcb2b5
|
games.py
|
games.py
|
"""
This module is for generating fake game data for use with the API.
An example of some game data::
{
"id": 1,
"logURL": "http://derp.nope/",
"winner": 0,
"updates": [
{
"status": "complete",
"time": "today"
}
],
"players": [
{
"id": 0
},
{
"id": 5
},
]
}
"""
import datetime
import random
def game_data(players, count):
now = datetime.datetime.now()
available_players = players.items()
player = available_players[0]
games = {}
for game_id in xrange(1, count+1):
playing = (player, random.choice(available_players[1:]))
games[game_id] = {
"id": game_id,
"logURL": "about:blank",
"winner": random.choice(playing)[1],
"updates": [
{
"status": "complete",
"time": str(now + datetime.timedelta(minutes=game_id))
}
],
"players": [{"id": player_id} for (_name, player_id) in playing]
}
return games
|
"""
This module is for generating fake game data for use with the API.
An example of some game data::
{
"id": 1,
"logURL": "http://derp.nope/",
"winner": 0,
"updates": [
{
"status": "complete",
"time": "today"
}
],
"players": [
{
"id": 0
},
{
"id": 5
},
]
}
"""
import datetime
import random
def game_data(players, count):
now = datetime.datetime.now()
available_players = players.items()
player = available_players[0]
games = {}
for game_id in xrange(1, count+1):
playing = (player, random.choice(available_players[1:]))
games[game_id] = {
"id": game_id,
"logURL": "http://about:blank",
"winner": random.choice(playing)[1],
"updates": [
{
"status": "complete",
"time": (now + datetime.timedelta(minutes=game_id)).isoformat()
}
],
"players": [{"id": player_id} for (_name, player_id) in playing]
}
return games
|
Use ISO formatted time stamps
|
Use ISO formatted time stamps
|
Python
|
bsd-3-clause
|
siggame/ng-games,siggame/ng-games,siggame/ng-games
|
"""
This module is for generating fake game data for use with the API.
An example of some game data::
{
"id": 1,
"logURL": "http://derp.nope/",
"winner": 0,
"updates": [
{
"status": "complete",
"time": "today"
}
],
"players": [
{
"id": 0
},
{
"id": 5
},
]
}
"""
import datetime
import random
def game_data(players, count):
now = datetime.datetime.now()
available_players = players.items()
player = available_players[0]
games = {}
for game_id in xrange(1, count+1):
playing = (player, random.choice(available_players[1:]))
games[game_id] = {
"id": game_id,
"logURL": "about:blank",
"winner": random.choice(playing)[1],
"updates": [
{
"status": "complete",
"time": str(now + datetime.timedelta(minutes=game_id))
}
],
"players": [{"id": player_id} for (_name, player_id) in playing]
}
return games
Use ISO formatted time stamps
|
"""
This module is for generating fake game data for use with the API.
An example of some game data::
{
"id": 1,
"logURL": "http://derp.nope/",
"winner": 0,
"updates": [
{
"status": "complete",
"time": "today"
}
],
"players": [
{
"id": 0
},
{
"id": 5
},
]
}
"""
import datetime
import random
def game_data(players, count):
now = datetime.datetime.now()
available_players = players.items()
player = available_players[0]
games = {}
for game_id in xrange(1, count+1):
playing = (player, random.choice(available_players[1:]))
games[game_id] = {
"id": game_id,
"logURL": "http://about:blank",
"winner": random.choice(playing)[1],
"updates": [
{
"status": "complete",
"time": (now + datetime.timedelta(minutes=game_id)).isoformat()
}
],
"players": [{"id": player_id} for (_name, player_id) in playing]
}
return games
|
<commit_before>"""
This module is for generating fake game data for use with the API.
An example of some game data::
{
"id": 1,
"logURL": "http://derp.nope/",
"winner": 0,
"updates": [
{
"status": "complete",
"time": "today"
}
],
"players": [
{
"id": 0
},
{
"id": 5
},
]
}
"""
import datetime
import random
def game_data(players, count):
now = datetime.datetime.now()
available_players = players.items()
player = available_players[0]
games = {}
for game_id in xrange(1, count+1):
playing = (player, random.choice(available_players[1:]))
games[game_id] = {
"id": game_id,
"logURL": "about:blank",
"winner": random.choice(playing)[1],
"updates": [
{
"status": "complete",
"time": str(now + datetime.timedelta(minutes=game_id))
}
],
"players": [{"id": player_id} for (_name, player_id) in playing]
}
return games
<commit_msg>Use ISO formatted time stamps<commit_after>
|
"""
This module is for generating fake game data for use with the API.
An example of some game data::
{
"id": 1,
"logURL": "http://derp.nope/",
"winner": 0,
"updates": [
{
"status": "complete",
"time": "today"
}
],
"players": [
{
"id": 0
},
{
"id": 5
},
]
}
"""
import datetime
import random
def game_data(players, count):
now = datetime.datetime.now()
available_players = players.items()
player = available_players[0]
games = {}
for game_id in xrange(1, count+1):
playing = (player, random.choice(available_players[1:]))
games[game_id] = {
"id": game_id,
"logURL": "http://about:blank",
"winner": random.choice(playing)[1],
"updates": [
{
"status": "complete",
"time": (now + datetime.timedelta(minutes=game_id)).isoformat()
}
],
"players": [{"id": player_id} for (_name, player_id) in playing]
}
return games
|
"""
This module is for generating fake game data for use with the API.
An example of some game data::
{
"id": 1,
"logURL": "http://derp.nope/",
"winner": 0,
"updates": [
{
"status": "complete",
"time": "today"
}
],
"players": [
{
"id": 0
},
{
"id": 5
},
]
}
"""
import datetime
import random
def game_data(players, count):
now = datetime.datetime.now()
available_players = players.items()
player = available_players[0]
games = {}
for game_id in xrange(1, count+1):
playing = (player, random.choice(available_players[1:]))
games[game_id] = {
"id": game_id,
"logURL": "about:blank",
"winner": random.choice(playing)[1],
"updates": [
{
"status": "complete",
"time": str(now + datetime.timedelta(minutes=game_id))
}
],
"players": [{"id": player_id} for (_name, player_id) in playing]
}
return games
Use ISO formatted time stamps"""
This module is for generating fake game data for use with the API.
An example of some game data::
{
"id": 1,
"logURL": "http://derp.nope/",
"winner": 0,
"updates": [
{
"status": "complete",
"time": "today"
}
],
"players": [
{
"id": 0
},
{
"id": 5
},
]
}
"""
import datetime
import random
def game_data(players, count):
now = datetime.datetime.now()
available_players = players.items()
player = available_players[0]
games = {}
for game_id in xrange(1, count+1):
playing = (player, random.choice(available_players[1:]))
games[game_id] = {
"id": game_id,
"logURL": "http://about:blank",
"winner": random.choice(playing)[1],
"updates": [
{
"status": "complete",
"time": (now + datetime.timedelta(minutes=game_id)).isoformat()
}
],
"players": [{"id": player_id} for (_name, player_id) in playing]
}
return games
|
<commit_before>"""
This module is for generating fake game data for use with the API.
An example of some game data::
{
"id": 1,
"logURL": "http://derp.nope/",
"winner": 0,
"updates": [
{
"status": "complete",
"time": "today"
}
],
"players": [
{
"id": 0
},
{
"id": 5
},
]
}
"""
import datetime
import random
def game_data(players, count):
now = datetime.datetime.now()
available_players = players.items()
player = available_players[0]
games = {}
for game_id in xrange(1, count+1):
playing = (player, random.choice(available_players[1:]))
games[game_id] = {
"id": game_id,
"logURL": "about:blank",
"winner": random.choice(playing)[1],
"updates": [
{
"status": "complete",
"time": str(now + datetime.timedelta(minutes=game_id))
}
],
"players": [{"id": player_id} for (_name, player_id) in playing]
}
return games
<commit_msg>Use ISO formatted time stamps<commit_after>"""
This module is for generating fake game data for use with the API.
An example of some game data::
{
"id": 1,
"logURL": "http://derp.nope/",
"winner": 0,
"updates": [
{
"status": "complete",
"time": "today"
}
],
"players": [
{
"id": 0
},
{
"id": 5
},
]
}
"""
import datetime
import random
def game_data(players, count):
now = datetime.datetime.now()
available_players = players.items()
player = available_players[0]
games = {}
for game_id in xrange(1, count+1):
playing = (player, random.choice(available_players[1:]))
games[game_id] = {
"id": game_id,
"logURL": "http://about:blank",
"winner": random.choice(playing)[1],
"updates": [
{
"status": "complete",
"time": (now + datetime.timedelta(minutes=game_id)).isoformat()
}
],
"players": [{"id": player_id} for (_name, player_id) in playing]
}
return games
|
e45d6439d3858e70fde8f1dad1d72d8c291e8979
|
build-single-file-version.py
|
build-single-file-version.py
|
#! /usr/bin/env python
import os
import stat
import zipfile
import StringIO
package_dir = 'xyppy'
python_directive = '#!/usr/bin/env python'
packed = StringIO.StringIO()
packed_writer = zipfile.ZipFile(packed, 'w', zipfile.ZIP_DEFLATED)
for fname in os.listdir(package_dir):
fpath = os.path.join(package_dir, fname)
if os.path.isfile(fpath):
packed_writer.write(fpath)
packed_writer.writestr('__main__.py', '''
from xyppy import __main__
if __name__ == '__main__':
__main__.main()
''')
packed_writer.close()
pyfile = package_dir + '.py'
with open(pyfile, 'wb') as f:
f.write(python_directive + '\n')
f.write(packed.getvalue())
os.chmod(pyfile, stat.S_IRWXU | stat.S_IRGRP | stat.S_IROTH)
|
#! /usr/bin/env python
import os
import stat
import zipfile
try:
from StringIO import StringIO
except ImportError:
from io import BytesIO as StringIO
package_dir = 'xyppy'
python_directive = '#!/usr/bin/env python'
packed = StringIO()
packed_writer = zipfile.ZipFile(packed, 'w', zipfile.ZIP_DEFLATED)
for fname in os.listdir(package_dir):
fpath = os.path.join(package_dir, fname)
if os.path.isfile(fpath):
packed_writer.write(fpath)
packed_writer.writestr('__main__.py', '''
from xyppy import __main__
if __name__ == '__main__':
__main__.main()
''')
packed_writer.close()
pyfile = package_dir + '.py'
with open(pyfile, 'wb') as f:
shebang = bytes((python_directive + '\n').encode('ascii'))
f.write(shebang)
f.write(packed.getvalue())
os.chmod(pyfile, stat.S_IRWXU | stat.S_IRGRP | stat.S_IROTH)
|
Make the build script P2/3 compatible
|
Make the build script P2/3 compatible
|
Python
|
mit
|
theinternetftw/xyppy
|
#! /usr/bin/env python
import os
import stat
import zipfile
import StringIO
package_dir = 'xyppy'
python_directive = '#!/usr/bin/env python'
packed = StringIO.StringIO()
packed_writer = zipfile.ZipFile(packed, 'w', zipfile.ZIP_DEFLATED)
for fname in os.listdir(package_dir):
fpath = os.path.join(package_dir, fname)
if os.path.isfile(fpath):
packed_writer.write(fpath)
packed_writer.writestr('__main__.py', '''
from xyppy import __main__
if __name__ == '__main__':
__main__.main()
''')
packed_writer.close()
pyfile = package_dir + '.py'
with open(pyfile, 'wb') as f:
f.write(python_directive + '\n')
f.write(packed.getvalue())
os.chmod(pyfile, stat.S_IRWXU | stat.S_IRGRP | stat.S_IROTH)
Make the build script P2/3 compatible
|
#! /usr/bin/env python
import os
import stat
import zipfile
try:
from StringIO import StringIO
except ImportError:
from io import BytesIO as StringIO
package_dir = 'xyppy'
python_directive = '#!/usr/bin/env python'
packed = StringIO()
packed_writer = zipfile.ZipFile(packed, 'w', zipfile.ZIP_DEFLATED)
for fname in os.listdir(package_dir):
fpath = os.path.join(package_dir, fname)
if os.path.isfile(fpath):
packed_writer.write(fpath)
packed_writer.writestr('__main__.py', '''
from xyppy import __main__
if __name__ == '__main__':
__main__.main()
''')
packed_writer.close()
pyfile = package_dir + '.py'
with open(pyfile, 'wb') as f:
shebang = bytes((python_directive + '\n').encode('ascii'))
f.write(shebang)
f.write(packed.getvalue())
os.chmod(pyfile, stat.S_IRWXU | stat.S_IRGRP | stat.S_IROTH)
|
<commit_before>#! /usr/bin/env python
import os
import stat
import zipfile
import StringIO
package_dir = 'xyppy'
python_directive = '#!/usr/bin/env python'
packed = StringIO.StringIO()
packed_writer = zipfile.ZipFile(packed, 'w', zipfile.ZIP_DEFLATED)
for fname in os.listdir(package_dir):
fpath = os.path.join(package_dir, fname)
if os.path.isfile(fpath):
packed_writer.write(fpath)
packed_writer.writestr('__main__.py', '''
from xyppy import __main__
if __name__ == '__main__':
__main__.main()
''')
packed_writer.close()
pyfile = package_dir + '.py'
with open(pyfile, 'wb') as f:
f.write(python_directive + '\n')
f.write(packed.getvalue())
os.chmod(pyfile, stat.S_IRWXU | stat.S_IRGRP | stat.S_IROTH)
<commit_msg>Make the build script P2/3 compatible<commit_after>
|
#! /usr/bin/env python
import os
import stat
import zipfile
try:
from StringIO import StringIO
except ImportError:
from io import BytesIO as StringIO
package_dir = 'xyppy'
python_directive = '#!/usr/bin/env python'
packed = StringIO()
packed_writer = zipfile.ZipFile(packed, 'w', zipfile.ZIP_DEFLATED)
for fname in os.listdir(package_dir):
fpath = os.path.join(package_dir, fname)
if os.path.isfile(fpath):
packed_writer.write(fpath)
packed_writer.writestr('__main__.py', '''
from xyppy import __main__
if __name__ == '__main__':
__main__.main()
''')
packed_writer.close()
pyfile = package_dir + '.py'
with open(pyfile, 'wb') as f:
shebang = bytes((python_directive + '\n').encode('ascii'))
f.write(shebang)
f.write(packed.getvalue())
os.chmod(pyfile, stat.S_IRWXU | stat.S_IRGRP | stat.S_IROTH)
|
#! /usr/bin/env python
import os
import stat
import zipfile
import StringIO
package_dir = 'xyppy'
python_directive = '#!/usr/bin/env python'
packed = StringIO.StringIO()
packed_writer = zipfile.ZipFile(packed, 'w', zipfile.ZIP_DEFLATED)
for fname in os.listdir(package_dir):
fpath = os.path.join(package_dir, fname)
if os.path.isfile(fpath):
packed_writer.write(fpath)
packed_writer.writestr('__main__.py', '''
from xyppy import __main__
if __name__ == '__main__':
__main__.main()
''')
packed_writer.close()
pyfile = package_dir + '.py'
with open(pyfile, 'wb') as f:
f.write(python_directive + '\n')
f.write(packed.getvalue())
os.chmod(pyfile, stat.S_IRWXU | stat.S_IRGRP | stat.S_IROTH)
Make the build script P2/3 compatible#! /usr/bin/env python
import os
import stat
import zipfile
try:
from StringIO import StringIO
except ImportError:
from io import BytesIO as StringIO
package_dir = 'xyppy'
python_directive = '#!/usr/bin/env python'
packed = StringIO()
packed_writer = zipfile.ZipFile(packed, 'w', zipfile.ZIP_DEFLATED)
for fname in os.listdir(package_dir):
fpath = os.path.join(package_dir, fname)
if os.path.isfile(fpath):
packed_writer.write(fpath)
packed_writer.writestr('__main__.py', '''
from xyppy import __main__
if __name__ == '__main__':
__main__.main()
''')
packed_writer.close()
pyfile = package_dir + '.py'
with open(pyfile, 'wb') as f:
shebang = bytes((python_directive + '\n').encode('ascii'))
f.write(shebang)
f.write(packed.getvalue())
os.chmod(pyfile, stat.S_IRWXU | stat.S_IRGRP | stat.S_IROTH)
|
<commit_before>#! /usr/bin/env python
import os
import stat
import zipfile
import StringIO
package_dir = 'xyppy'
python_directive = '#!/usr/bin/env python'
packed = StringIO.StringIO()
packed_writer = zipfile.ZipFile(packed, 'w', zipfile.ZIP_DEFLATED)
for fname in os.listdir(package_dir):
fpath = os.path.join(package_dir, fname)
if os.path.isfile(fpath):
packed_writer.write(fpath)
packed_writer.writestr('__main__.py', '''
from xyppy import __main__
if __name__ == '__main__':
__main__.main()
''')
packed_writer.close()
pyfile = package_dir + '.py'
with open(pyfile, 'wb') as f:
f.write(python_directive + '\n')
f.write(packed.getvalue())
os.chmod(pyfile, stat.S_IRWXU | stat.S_IRGRP | stat.S_IROTH)
<commit_msg>Make the build script P2/3 compatible<commit_after>#! /usr/bin/env python
import os
import stat
import zipfile
try:
from StringIO import StringIO
except ImportError:
from io import BytesIO as StringIO
package_dir = 'xyppy'
python_directive = '#!/usr/bin/env python'
packed = StringIO()
packed_writer = zipfile.ZipFile(packed, 'w', zipfile.ZIP_DEFLATED)
for fname in os.listdir(package_dir):
fpath = os.path.join(package_dir, fname)
if os.path.isfile(fpath):
packed_writer.write(fpath)
packed_writer.writestr('__main__.py', '''
from xyppy import __main__
if __name__ == '__main__':
__main__.main()
''')
packed_writer.close()
pyfile = package_dir + '.py'
with open(pyfile, 'wb') as f:
shebang = bytes((python_directive + '\n').encode('ascii'))
f.write(shebang)
f.write(packed.getvalue())
os.chmod(pyfile, stat.S_IRWXU | stat.S_IRGRP | stat.S_IROTH)
|
38964f0f840a7b60f5ce65ca2857789d92b133b5
|
django_base64field/tests.py
|
django_base64field/tests.py
|
from django.db import models
from django.test import TestCase
from django.utils.baseconv import base64
from django_base64field.fields import Base64Field
class Planet(models.Model):
ek = Base64Field()
name = models.CharField(max_length=13)
class Continent(models.Model):
ek = Base64Field()
name = models.CharField(max_length=13)
planet = models.ForeignKey(Planet, to_field='ek')
class TestBase64Field(TestCase):
def test_field_is_none_after_creation(self):
planet = Planet.objects.create(name='Fucking Earth')
self.assertIn(planet.ek, ['', None])
self.assertIsNotNone(planet.pk)
def test_field_not_none_after_saved(self):
planet = Planet.objects.create(name='Little Planet')
base64_key = base64.encode(planet.pk)
saved_planet = Planet.objects.get(pk=planet.pk)
self.assertEqual(saved_planet.ek, base64_key)
|
from django.db import models
from django.test import TestCase
from django.utils.baseconv import base64
from django_base64field.fields import Base64Field
class Planet(models.Model):
ek = Base64Field()
name = models.CharField(
default='Fucker',
max_length=103
)
class Continent(models.Model):
ek = Base64Field()
name = models.CharField(
default='Suckers!',
max_length=13
)
planet = models.ForeignKey(Planet, to_field='ek')
class TestBase64Field(TestCase):
def test_field_is_none_after_creation(self):
planet = Planet.objects.create(name='Fucking Earth')
self.assertIn(planet.ek, ['', None])
self.assertIsNotNone(planet.pk)
def test_field_not_none_after_saved(self):
planet = Planet.objects.create(name='Little Planet')
base64_key = base64.encode(planet.pk)
saved_planet = Planet.objects.get(pk=planet.pk)
self.assertEqual(saved_planet.ek, base64_key)
|
Make fields on model have defaults value
|
Make fields on model have defaults value
Like who cares for their default value
|
Python
|
bsd-3-clause
|
Alir3z4/django-base64field
|
from django.db import models
from django.test import TestCase
from django.utils.baseconv import base64
from django_base64field.fields import Base64Field
class Planet(models.Model):
ek = Base64Field()
name = models.CharField(max_length=13)
class Continent(models.Model):
ek = Base64Field()
name = models.CharField(max_length=13)
planet = models.ForeignKey(Planet, to_field='ek')
class TestBase64Field(TestCase):
def test_field_is_none_after_creation(self):
planet = Planet.objects.create(name='Fucking Earth')
self.assertIn(planet.ek, ['', None])
self.assertIsNotNone(planet.pk)
def test_field_not_none_after_saved(self):
planet = Planet.objects.create(name='Little Planet')
base64_key = base64.encode(planet.pk)
saved_planet = Planet.objects.get(pk=planet.pk)
self.assertEqual(saved_planet.ek, base64_key)
Make fields on model have defaults value
Like who cares for their default value
|
from django.db import models
from django.test import TestCase
from django.utils.baseconv import base64
from django_base64field.fields import Base64Field
class Planet(models.Model):
ek = Base64Field()
name = models.CharField(
default='Fucker',
max_length=103
)
class Continent(models.Model):
ek = Base64Field()
name = models.CharField(
default='Suckers!',
max_length=13
)
planet = models.ForeignKey(Planet, to_field='ek')
class TestBase64Field(TestCase):
def test_field_is_none_after_creation(self):
planet = Planet.objects.create(name='Fucking Earth')
self.assertIn(planet.ek, ['', None])
self.assertIsNotNone(planet.pk)
def test_field_not_none_after_saved(self):
planet = Planet.objects.create(name='Little Planet')
base64_key = base64.encode(planet.pk)
saved_planet = Planet.objects.get(pk=planet.pk)
self.assertEqual(saved_planet.ek, base64_key)
|
<commit_before>from django.db import models
from django.test import TestCase
from django.utils.baseconv import base64
from django_base64field.fields import Base64Field
class Planet(models.Model):
ek = Base64Field()
name = models.CharField(max_length=13)
class Continent(models.Model):
ek = Base64Field()
name = models.CharField(max_length=13)
planet = models.ForeignKey(Planet, to_field='ek')
class TestBase64Field(TestCase):
def test_field_is_none_after_creation(self):
planet = Planet.objects.create(name='Fucking Earth')
self.assertIn(planet.ek, ['', None])
self.assertIsNotNone(planet.pk)
def test_field_not_none_after_saved(self):
planet = Planet.objects.create(name='Little Planet')
base64_key = base64.encode(planet.pk)
saved_planet = Planet.objects.get(pk=planet.pk)
self.assertEqual(saved_planet.ek, base64_key)
<commit_msg>Make fields on model have defaults value
Like who cares for their default value<commit_after>
|
from django.db import models
from django.test import TestCase
from django.utils.baseconv import base64
from django_base64field.fields import Base64Field
class Planet(models.Model):
ek = Base64Field()
name = models.CharField(
default='Fucker',
max_length=103
)
class Continent(models.Model):
ek = Base64Field()
name = models.CharField(
default='Suckers!',
max_length=13
)
planet = models.ForeignKey(Planet, to_field='ek')
class TestBase64Field(TestCase):
def test_field_is_none_after_creation(self):
planet = Planet.objects.create(name='Fucking Earth')
self.assertIn(planet.ek, ['', None])
self.assertIsNotNone(planet.pk)
def test_field_not_none_after_saved(self):
planet = Planet.objects.create(name='Little Planet')
base64_key = base64.encode(planet.pk)
saved_planet = Planet.objects.get(pk=planet.pk)
self.assertEqual(saved_planet.ek, base64_key)
|
from django.db import models
from django.test import TestCase
from django.utils.baseconv import base64
from django_base64field.fields import Base64Field
class Planet(models.Model):
ek = Base64Field()
name = models.CharField(max_length=13)
class Continent(models.Model):
ek = Base64Field()
name = models.CharField(max_length=13)
planet = models.ForeignKey(Planet, to_field='ek')
class TestBase64Field(TestCase):
def test_field_is_none_after_creation(self):
planet = Planet.objects.create(name='Fucking Earth')
self.assertIn(planet.ek, ['', None])
self.assertIsNotNone(planet.pk)
def test_field_not_none_after_saved(self):
planet = Planet.objects.create(name='Little Planet')
base64_key = base64.encode(planet.pk)
saved_planet = Planet.objects.get(pk=planet.pk)
self.assertEqual(saved_planet.ek, base64_key)
Make fields on model have defaults value
Like who cares for their default valuefrom django.db import models
from django.test import TestCase
from django.utils.baseconv import base64
from django_base64field.fields import Base64Field
class Planet(models.Model):
ek = Base64Field()
name = models.CharField(
default='Fucker',
max_length=103
)
class Continent(models.Model):
ek = Base64Field()
name = models.CharField(
default='Suckers!',
max_length=13
)
planet = models.ForeignKey(Planet, to_field='ek')
class TestBase64Field(TestCase):
def test_field_is_none_after_creation(self):
planet = Planet.objects.create(name='Fucking Earth')
self.assertIn(planet.ek, ['', None])
self.assertIsNotNone(planet.pk)
def test_field_not_none_after_saved(self):
planet = Planet.objects.create(name='Little Planet')
base64_key = base64.encode(planet.pk)
saved_planet = Planet.objects.get(pk=planet.pk)
self.assertEqual(saved_planet.ek, base64_key)
|
<commit_before>from django.db import models
from django.test import TestCase
from django.utils.baseconv import base64
from django_base64field.fields import Base64Field
class Planet(models.Model):
ek = Base64Field()
name = models.CharField(max_length=13)
class Continent(models.Model):
ek = Base64Field()
name = models.CharField(max_length=13)
planet = models.ForeignKey(Planet, to_field='ek')
class TestBase64Field(TestCase):
def test_field_is_none_after_creation(self):
planet = Planet.objects.create(name='Fucking Earth')
self.assertIn(planet.ek, ['', None])
self.assertIsNotNone(planet.pk)
def test_field_not_none_after_saved(self):
planet = Planet.objects.create(name='Little Planet')
base64_key = base64.encode(planet.pk)
saved_planet = Planet.objects.get(pk=planet.pk)
self.assertEqual(saved_planet.ek, base64_key)
<commit_msg>Make fields on model have defaults value
Like who cares for their default value<commit_after>from django.db import models
from django.test import TestCase
from django.utils.baseconv import base64
from django_base64field.fields import Base64Field
class Planet(models.Model):
ek = Base64Field()
name = models.CharField(
default='Fucker',
max_length=103
)
class Continent(models.Model):
ek = Base64Field()
name = models.CharField(
default='Suckers!',
max_length=13
)
planet = models.ForeignKey(Planet, to_field='ek')
class TestBase64Field(TestCase):
def test_field_is_none_after_creation(self):
planet = Planet.objects.create(name='Fucking Earth')
self.assertIn(planet.ek, ['', None])
self.assertIsNotNone(planet.pk)
def test_field_not_none_after_saved(self):
planet = Planet.objects.create(name='Little Planet')
base64_key = base64.encode(planet.pk)
saved_planet = Planet.objects.get(pk=planet.pk)
self.assertEqual(saved_planet.ek, base64_key)
|
ee5af231a4faff8dd3aab7d6ae6984f95bfe892c
|
search/transforms.py
|
search/transforms.py
|
class Transforms:
convert_rules = {
"committee": {
"id": "id",
"title": "name",
"description": ["info", "about"]
},
"committee-meeting": {
"id": "id",
"title": "title",
"description": ["content", 0, "summary"],
"fulltext": ["content", 0, "body"]
},
"member": {
"id": "id",
"title": "name",
"description": "bio"
},
"bill": {
"id": "id",
"title": "name"
},
"hansard": {
"id": "id",
"title": "title",
"fulltext": "body"
}
}
|
class Transforms:
convert_rules = {
"committee": {
"id": "id",
"title": "name",
"description": ["info", "about"]
},
"committee-meeting": {
"id": "id",
"title": "title",
"description": ["content", 0, "summary"],
"fulltext": ["content", 0, "body"]
},
"member": {
"id": "id",
"title": "name",
"description": "bio"
},
"bill": {
"id": "id",
"title": "title",
"description": "bill_code"
},
"hansard": {
"id": "id",
"title": "title",
"fulltext": "body"
}
}
data_types = ['committee', 'committee-meeting', 'member', 'bill', 'hansard']
|
Move allowed data types out of search and fix for bills
|
Move allowed data types out of search and fix for bills
|
Python
|
apache-2.0
|
Code4SA/pmg-cms-2,Code4SA/pmg-cms-2,Code4SA/pmg-cms-2
|
class Transforms:
convert_rules = {
"committee": {
"id": "id",
"title": "name",
"description": ["info", "about"]
},
"committee-meeting": {
"id": "id",
"title": "title",
"description": ["content", 0, "summary"],
"fulltext": ["content", 0, "body"]
},
"member": {
"id": "id",
"title": "name",
"description": "bio"
},
"bill": {
"id": "id",
"title": "name"
},
"hansard": {
"id": "id",
"title": "title",
"fulltext": "body"
}
}Move allowed data types out of search and fix for bills
|
class Transforms:
convert_rules = {
"committee": {
"id": "id",
"title": "name",
"description": ["info", "about"]
},
"committee-meeting": {
"id": "id",
"title": "title",
"description": ["content", 0, "summary"],
"fulltext": ["content", 0, "body"]
},
"member": {
"id": "id",
"title": "name",
"description": "bio"
},
"bill": {
"id": "id",
"title": "title",
"description": "bill_code"
},
"hansard": {
"id": "id",
"title": "title",
"fulltext": "body"
}
}
data_types = ['committee', 'committee-meeting', 'member', 'bill', 'hansard']
|
<commit_before>class Transforms:
convert_rules = {
"committee": {
"id": "id",
"title": "name",
"description": ["info", "about"]
},
"committee-meeting": {
"id": "id",
"title": "title",
"description": ["content", 0, "summary"],
"fulltext": ["content", 0, "body"]
},
"member": {
"id": "id",
"title": "name",
"description": "bio"
},
"bill": {
"id": "id",
"title": "name"
},
"hansard": {
"id": "id",
"title": "title",
"fulltext": "body"
}
}<commit_msg>Move allowed data types out of search and fix for bills<commit_after>
|
class Transforms:
convert_rules = {
"committee": {
"id": "id",
"title": "name",
"description": ["info", "about"]
},
"committee-meeting": {
"id": "id",
"title": "title",
"description": ["content", 0, "summary"],
"fulltext": ["content", 0, "body"]
},
"member": {
"id": "id",
"title": "name",
"description": "bio"
},
"bill": {
"id": "id",
"title": "title",
"description": "bill_code"
},
"hansard": {
"id": "id",
"title": "title",
"fulltext": "body"
}
}
data_types = ['committee', 'committee-meeting', 'member', 'bill', 'hansard']
|
class Transforms:
convert_rules = {
"committee": {
"id": "id",
"title": "name",
"description": ["info", "about"]
},
"committee-meeting": {
"id": "id",
"title": "title",
"description": ["content", 0, "summary"],
"fulltext": ["content", 0, "body"]
},
"member": {
"id": "id",
"title": "name",
"description": "bio"
},
"bill": {
"id": "id",
"title": "name"
},
"hansard": {
"id": "id",
"title": "title",
"fulltext": "body"
}
}Move allowed data types out of search and fix for billsclass Transforms:
convert_rules = {
"committee": {
"id": "id",
"title": "name",
"description": ["info", "about"]
},
"committee-meeting": {
"id": "id",
"title": "title",
"description": ["content", 0, "summary"],
"fulltext": ["content", 0, "body"]
},
"member": {
"id": "id",
"title": "name",
"description": "bio"
},
"bill": {
"id": "id",
"title": "title",
"description": "bill_code"
},
"hansard": {
"id": "id",
"title": "title",
"fulltext": "body"
}
}
data_types = ['committee', 'committee-meeting', 'member', 'bill', 'hansard']
|
<commit_before>class Transforms:
convert_rules = {
"committee": {
"id": "id",
"title": "name",
"description": ["info", "about"]
},
"committee-meeting": {
"id": "id",
"title": "title",
"description": ["content", 0, "summary"],
"fulltext": ["content", 0, "body"]
},
"member": {
"id": "id",
"title": "name",
"description": "bio"
},
"bill": {
"id": "id",
"title": "name"
},
"hansard": {
"id": "id",
"title": "title",
"fulltext": "body"
}
}<commit_msg>Move allowed data types out of search and fix for bills<commit_after>class Transforms:
convert_rules = {
"committee": {
"id": "id",
"title": "name",
"description": ["info", "about"]
},
"committee-meeting": {
"id": "id",
"title": "title",
"description": ["content", 0, "summary"],
"fulltext": ["content", 0, "body"]
},
"member": {
"id": "id",
"title": "name",
"description": "bio"
},
"bill": {
"id": "id",
"title": "title",
"description": "bill_code"
},
"hansard": {
"id": "id",
"title": "title",
"fulltext": "body"
}
}
data_types = ['committee', 'committee-meeting', 'member', 'bill', 'hansard']
|
abd3542113baf801d76c740a2435c69fcda86b42
|
src/DecodeTest.py
|
src/DecodeTest.py
|
import unittest
from Decode import Decoder
import Frames
class TestDecoder(unittest.TestCase):
def setUp(self):
self.decoder = Decoder()
def test_decoder_get_frame_class(self):
command = 'SEND'
self.assertEquals(self.decoder.get_frame_class(command), Frames.SEND)
def test_decoder_invalid_frame_class(self):
command = '---'
self.assertRaises(Exception, self.decoder.get_frame_class, command)
def test_decoder_decode_connect(self):
testFrame = Frames.CONNECT(**{"accept-version":"1.2", "host":"localhost"})
msg = "CONNECT\naccept-version:1.2\nhost:localhost\n\n\x00"
self.assertEquals(self.decoder.decode(msg).__dict__, testFrame.__dict__)
if __name__ == '__main__':
unittest.main()
|
import unittest
from Decode import Decoder
import Frames
class TestDecoder(unittest.TestCase):
"""
"""
def setUp(self):
self.decoder = Decoder()
def test_decoder_get_frame_class(self):
command = 'SEND'
self.assertEquals(self.decoder.get_frame_class(command), Frames.SEND)
def test_decoder_invalid_frame_class(self):
command = '---'
self.assertRaises(Exception, self.decoder.get_frame_class, command)
def test_decoder_decode_connect(self):
testFrame = Frames.CONNECT(**{"accept-version":"1.2", "host":"localhost"})
msg = "CONNECT\naccept-version:1.2\nhost:localhost\n\n\x00"
self.assertEquals(self.decoder.decode(msg).__dict__, testFrame.__dict__)
def test_decoder_decode_send(self):
testFrame = Frames.CONNECT(**{"accept-version":"1.2", "host":"localhost", "msg":"hello queue a"})
msg = "SEND\naccept-version:1.2\nhost:localhost\n\nhello queue a\x00"
self.assertEquals(self.decoder.decode(msg).__dict__, testFrame.__dict__)
if __name__ == '__main__':
unittest.main()
|
Send and Connect frame tests
|
Send and Connect frame tests
|
Python
|
mit
|
phan91/STOMP_agilis
|
import unittest
from Decode import Decoder
import Frames
class TestDecoder(unittest.TestCase):
def setUp(self):
self.decoder = Decoder()
def test_decoder_get_frame_class(self):
command = 'SEND'
self.assertEquals(self.decoder.get_frame_class(command), Frames.SEND)
def test_decoder_invalid_frame_class(self):
command = '---'
self.assertRaises(Exception, self.decoder.get_frame_class, command)
def test_decoder_decode_connect(self):
testFrame = Frames.CONNECT(**{"accept-version":"1.2", "host":"localhost"})
msg = "CONNECT\naccept-version:1.2\nhost:localhost\n\n\x00"
self.assertEquals(self.decoder.decode(msg).__dict__, testFrame.__dict__)
if __name__ == '__main__':
unittest.main()Send and Connect frame tests
|
import unittest
from Decode import Decoder
import Frames
class TestDecoder(unittest.TestCase):
"""
"""
def setUp(self):
self.decoder = Decoder()
def test_decoder_get_frame_class(self):
command = 'SEND'
self.assertEquals(self.decoder.get_frame_class(command), Frames.SEND)
def test_decoder_invalid_frame_class(self):
command = '---'
self.assertRaises(Exception, self.decoder.get_frame_class, command)
def test_decoder_decode_connect(self):
testFrame = Frames.CONNECT(**{"accept-version":"1.2", "host":"localhost"})
msg = "CONNECT\naccept-version:1.2\nhost:localhost\n\n\x00"
self.assertEquals(self.decoder.decode(msg).__dict__, testFrame.__dict__)
def test_decoder_decode_send(self):
testFrame = Frames.CONNECT(**{"accept-version":"1.2", "host":"localhost", "msg":"hello queue a"})
msg = "SEND\naccept-version:1.2\nhost:localhost\n\nhello queue a\x00"
self.assertEquals(self.decoder.decode(msg).__dict__, testFrame.__dict__)
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
from Decode import Decoder
import Frames
class TestDecoder(unittest.TestCase):
def setUp(self):
self.decoder = Decoder()
def test_decoder_get_frame_class(self):
command = 'SEND'
self.assertEquals(self.decoder.get_frame_class(command), Frames.SEND)
def test_decoder_invalid_frame_class(self):
command = '---'
self.assertRaises(Exception, self.decoder.get_frame_class, command)
def test_decoder_decode_connect(self):
testFrame = Frames.CONNECT(**{"accept-version":"1.2", "host":"localhost"})
msg = "CONNECT\naccept-version:1.2\nhost:localhost\n\n\x00"
self.assertEquals(self.decoder.decode(msg).__dict__, testFrame.__dict__)
if __name__ == '__main__':
unittest.main()<commit_msg>Send and Connect frame tests<commit_after>
|
import unittest
from Decode import Decoder
import Frames
class TestDecoder(unittest.TestCase):
"""
"""
def setUp(self):
self.decoder = Decoder()
def test_decoder_get_frame_class(self):
command = 'SEND'
self.assertEquals(self.decoder.get_frame_class(command), Frames.SEND)
def test_decoder_invalid_frame_class(self):
command = '---'
self.assertRaises(Exception, self.decoder.get_frame_class, command)
def test_decoder_decode_connect(self):
testFrame = Frames.CONNECT(**{"accept-version":"1.2", "host":"localhost"})
msg = "CONNECT\naccept-version:1.2\nhost:localhost\n\n\x00"
self.assertEquals(self.decoder.decode(msg).__dict__, testFrame.__dict__)
def test_decoder_decode_send(self):
testFrame = Frames.CONNECT(**{"accept-version":"1.2", "host":"localhost", "msg":"hello queue a"})
msg = "SEND\naccept-version:1.2\nhost:localhost\n\nhello queue a\x00"
self.assertEquals(self.decoder.decode(msg).__dict__, testFrame.__dict__)
if __name__ == '__main__':
unittest.main()
|
import unittest
from Decode import Decoder
import Frames
class TestDecoder(unittest.TestCase):
def setUp(self):
self.decoder = Decoder()
def test_decoder_get_frame_class(self):
command = 'SEND'
self.assertEquals(self.decoder.get_frame_class(command), Frames.SEND)
def test_decoder_invalid_frame_class(self):
command = '---'
self.assertRaises(Exception, self.decoder.get_frame_class, command)
def test_decoder_decode_connect(self):
testFrame = Frames.CONNECT(**{"accept-version":"1.2", "host":"localhost"})
msg = "CONNECT\naccept-version:1.2\nhost:localhost\n\n\x00"
self.assertEquals(self.decoder.decode(msg).__dict__, testFrame.__dict__)
if __name__ == '__main__':
unittest.main()Send and Connect frame testsimport unittest
from Decode import Decoder
import Frames
class TestDecoder(unittest.TestCase):
"""
"""
def setUp(self):
self.decoder = Decoder()
def test_decoder_get_frame_class(self):
command = 'SEND'
self.assertEquals(self.decoder.get_frame_class(command), Frames.SEND)
def test_decoder_invalid_frame_class(self):
command = '---'
self.assertRaises(Exception, self.decoder.get_frame_class, command)
def test_decoder_decode_connect(self):
testFrame = Frames.CONNECT(**{"accept-version":"1.2", "host":"localhost"})
msg = "CONNECT\naccept-version:1.2\nhost:localhost\n\n\x00"
self.assertEquals(self.decoder.decode(msg).__dict__, testFrame.__dict__)
def test_decoder_decode_send(self):
testFrame = Frames.CONNECT(**{"accept-version":"1.2", "host":"localhost", "msg":"hello queue a"})
msg = "SEND\naccept-version:1.2\nhost:localhost\n\nhello queue a\x00"
self.assertEquals(self.decoder.decode(msg).__dict__, testFrame.__dict__)
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
from Decode import Decoder
import Frames
class TestDecoder(unittest.TestCase):
def setUp(self):
self.decoder = Decoder()
def test_decoder_get_frame_class(self):
command = 'SEND'
self.assertEquals(self.decoder.get_frame_class(command), Frames.SEND)
def test_decoder_invalid_frame_class(self):
command = '---'
self.assertRaises(Exception, self.decoder.get_frame_class, command)
def test_decoder_decode_connect(self):
testFrame = Frames.CONNECT(**{"accept-version":"1.2", "host":"localhost"})
msg = "CONNECT\naccept-version:1.2\nhost:localhost\n\n\x00"
self.assertEquals(self.decoder.decode(msg).__dict__, testFrame.__dict__)
if __name__ == '__main__':
unittest.main()<commit_msg>Send and Connect frame tests<commit_after>import unittest
from Decode import Decoder
import Frames
class TestDecoder(unittest.TestCase):
"""
"""
def setUp(self):
self.decoder = Decoder()
def test_decoder_get_frame_class(self):
command = 'SEND'
self.assertEquals(self.decoder.get_frame_class(command), Frames.SEND)
def test_decoder_invalid_frame_class(self):
command = '---'
self.assertRaises(Exception, self.decoder.get_frame_class, command)
def test_decoder_decode_connect(self):
testFrame = Frames.CONNECT(**{"accept-version":"1.2", "host":"localhost"})
msg = "CONNECT\naccept-version:1.2\nhost:localhost\n\n\x00"
self.assertEquals(self.decoder.decode(msg).__dict__, testFrame.__dict__)
def test_decoder_decode_send(self):
testFrame = Frames.CONNECT(**{"accept-version":"1.2", "host":"localhost", "msg":"hello queue a"})
msg = "SEND\naccept-version:1.2\nhost:localhost\n\nhello queue a\x00"
self.assertEquals(self.decoder.decode(msg).__dict__, testFrame.__dict__)
if __name__ == '__main__':
unittest.main()
|
3a2daedd8bb198f5ec3fd06a0061ae06e6fb139e
|
tests/test_arpreq.py
|
tests/test_arpreq.py
|
import sys
from socket import htonl, inet_ntoa
from struct import pack
import pytest
from arpreq import arpreq
def test_localhost():
assert arpreq('127.0.0.1') == '00:00:00:00:00:00'
def decode_address(value):
return inet_ntoa(pack(">I", htonl(int(value, base=16))))
def decode_flags(value):
return int(value, base=16)
def get_default_gateway():
with open("/proc/net/route") as f:
next(f)
for line in f:
fields = line.strip().split()
destination = decode_address(fields[1])
mask = decode_address(fields[7])
gateway = decode_address(fields[2])
flags = decode_flags(fields[3])
if destination == '0.0.0.0' and mask == '0.0.0.0' and flags & 0x2:
return gateway
return None
def test_default_gateway():
gateway = get_default_gateway()
if not gateway:
pytest.skip("No default gateway present.")
assert arpreq(gateway) is not None
|
import sys
from socket import htonl, inet_ntoa
from struct import pack
import pytest
from arpreq import arpreq
def test_localhost():
assert arpreq('127.0.0.1') == '00:00:00:00:00:00'
def decode_address(value):
return inet_ntoa(pack(">I", htonl(int(value, base=16))))
def decode_flags(value):
return int(value, base=16)
def get_default_gateway():
with open("/proc/net/route") as f:
next(f)
for line in f:
fields = line.strip().split()
destination = decode_address(fields[1])
mask = decode_address(fields[7])
gateway = decode_address(fields[2])
flags = decode_flags(fields[3])
if destination == '0.0.0.0' and mask == '0.0.0.0' and flags & 0x2:
return gateway
return None
def test_default_gateway():
gateway = get_default_gateway()
if not gateway:
pytest.skip("No default gateway present.")
assert arpreq(gateway) is not None
def test_illegal_argument():
with pytest.raises(ValueError):
arpreq("Foobar")
def test_illegal_type():
with pytest.raises(TypeError):
arpreq(42)
|
Add tests for ValueError and TypeError
|
Add tests for ValueError and TypeError
|
Python
|
mit
|
sebschrader/python-arpreq,sebschrader/python-arpreq,sebschrader/python-arpreq,sebschrader/python-arpreq
|
import sys
from socket import htonl, inet_ntoa
from struct import pack
import pytest
from arpreq import arpreq
def test_localhost():
assert arpreq('127.0.0.1') == '00:00:00:00:00:00'
def decode_address(value):
return inet_ntoa(pack(">I", htonl(int(value, base=16))))
def decode_flags(value):
return int(value, base=16)
def get_default_gateway():
with open("/proc/net/route") as f:
next(f)
for line in f:
fields = line.strip().split()
destination = decode_address(fields[1])
mask = decode_address(fields[7])
gateway = decode_address(fields[2])
flags = decode_flags(fields[3])
if destination == '0.0.0.0' and mask == '0.0.0.0' and flags & 0x2:
return gateway
return None
def test_default_gateway():
gateway = get_default_gateway()
if not gateway:
pytest.skip("No default gateway present.")
assert arpreq(gateway) is not None
Add tests for ValueError and TypeError
|
import sys
from socket import htonl, inet_ntoa
from struct import pack
import pytest
from arpreq import arpreq
def test_localhost():
assert arpreq('127.0.0.1') == '00:00:00:00:00:00'
def decode_address(value):
return inet_ntoa(pack(">I", htonl(int(value, base=16))))
def decode_flags(value):
return int(value, base=16)
def get_default_gateway():
with open("/proc/net/route") as f:
next(f)
for line in f:
fields = line.strip().split()
destination = decode_address(fields[1])
mask = decode_address(fields[7])
gateway = decode_address(fields[2])
flags = decode_flags(fields[3])
if destination == '0.0.0.0' and mask == '0.0.0.0' and flags & 0x2:
return gateway
return None
def test_default_gateway():
gateway = get_default_gateway()
if not gateway:
pytest.skip("No default gateway present.")
assert arpreq(gateway) is not None
def test_illegal_argument():
with pytest.raises(ValueError):
arpreq("Foobar")
def test_illegal_type():
with pytest.raises(TypeError):
arpreq(42)
|
<commit_before>import sys
from socket import htonl, inet_ntoa
from struct import pack
import pytest
from arpreq import arpreq
def test_localhost():
assert arpreq('127.0.0.1') == '00:00:00:00:00:00'
def decode_address(value):
return inet_ntoa(pack(">I", htonl(int(value, base=16))))
def decode_flags(value):
return int(value, base=16)
def get_default_gateway():
with open("/proc/net/route") as f:
next(f)
for line in f:
fields = line.strip().split()
destination = decode_address(fields[1])
mask = decode_address(fields[7])
gateway = decode_address(fields[2])
flags = decode_flags(fields[3])
if destination == '0.0.0.0' and mask == '0.0.0.0' and flags & 0x2:
return gateway
return None
def test_default_gateway():
gateway = get_default_gateway()
if not gateway:
pytest.skip("No default gateway present.")
assert arpreq(gateway) is not None
<commit_msg>Add tests for ValueError and TypeError<commit_after>
|
import sys
from socket import htonl, inet_ntoa
from struct import pack
import pytest
from arpreq import arpreq
def test_localhost():
assert arpreq('127.0.0.1') == '00:00:00:00:00:00'
def decode_address(value):
return inet_ntoa(pack(">I", htonl(int(value, base=16))))
def decode_flags(value):
return int(value, base=16)
def get_default_gateway():
with open("/proc/net/route") as f:
next(f)
for line in f:
fields = line.strip().split()
destination = decode_address(fields[1])
mask = decode_address(fields[7])
gateway = decode_address(fields[2])
flags = decode_flags(fields[3])
if destination == '0.0.0.0' and mask == '0.0.0.0' and flags & 0x2:
return gateway
return None
def test_default_gateway():
gateway = get_default_gateway()
if not gateway:
pytest.skip("No default gateway present.")
assert arpreq(gateway) is not None
def test_illegal_argument():
with pytest.raises(ValueError):
arpreq("Foobar")
def test_illegal_type():
with pytest.raises(TypeError):
arpreq(42)
|
import sys
from socket import htonl, inet_ntoa
from struct import pack
import pytest
from arpreq import arpreq
def test_localhost():
assert arpreq('127.0.0.1') == '00:00:00:00:00:00'
def decode_address(value):
return inet_ntoa(pack(">I", htonl(int(value, base=16))))
def decode_flags(value):
return int(value, base=16)
def get_default_gateway():
with open("/proc/net/route") as f:
next(f)
for line in f:
fields = line.strip().split()
destination = decode_address(fields[1])
mask = decode_address(fields[7])
gateway = decode_address(fields[2])
flags = decode_flags(fields[3])
if destination == '0.0.0.0' and mask == '0.0.0.0' and flags & 0x2:
return gateway
return None
def test_default_gateway():
gateway = get_default_gateway()
if not gateway:
pytest.skip("No default gateway present.")
assert arpreq(gateway) is not None
Add tests for ValueError and TypeErrorimport sys
from socket import htonl, inet_ntoa
from struct import pack
import pytest
from arpreq import arpreq
def test_localhost():
assert arpreq('127.0.0.1') == '00:00:00:00:00:00'
def decode_address(value):
return inet_ntoa(pack(">I", htonl(int(value, base=16))))
def decode_flags(value):
return int(value, base=16)
def get_default_gateway():
with open("/proc/net/route") as f:
next(f)
for line in f:
fields = line.strip().split()
destination = decode_address(fields[1])
mask = decode_address(fields[7])
gateway = decode_address(fields[2])
flags = decode_flags(fields[3])
if destination == '0.0.0.0' and mask == '0.0.0.0' and flags & 0x2:
return gateway
return None
def test_default_gateway():
gateway = get_default_gateway()
if not gateway:
pytest.skip("No default gateway present.")
assert arpreq(gateway) is not None
def test_illegal_argument():
with pytest.raises(ValueError):
arpreq("Foobar")
def test_illegal_type():
with pytest.raises(TypeError):
arpreq(42)
|
<commit_before>import sys
from socket import htonl, inet_ntoa
from struct import pack
import pytest
from arpreq import arpreq
def test_localhost():
assert arpreq('127.0.0.1') == '00:00:00:00:00:00'
def decode_address(value):
return inet_ntoa(pack(">I", htonl(int(value, base=16))))
def decode_flags(value):
return int(value, base=16)
def get_default_gateway():
with open("/proc/net/route") as f:
next(f)
for line in f:
fields = line.strip().split()
destination = decode_address(fields[1])
mask = decode_address(fields[7])
gateway = decode_address(fields[2])
flags = decode_flags(fields[3])
if destination == '0.0.0.0' and mask == '0.0.0.0' and flags & 0x2:
return gateway
return None
def test_default_gateway():
gateway = get_default_gateway()
if not gateway:
pytest.skip("No default gateway present.")
assert arpreq(gateway) is not None
<commit_msg>Add tests for ValueError and TypeError<commit_after>import sys
from socket import htonl, inet_ntoa
from struct import pack
import pytest
from arpreq import arpreq
def test_localhost():
assert arpreq('127.0.0.1') == '00:00:00:00:00:00'
def decode_address(value):
return inet_ntoa(pack(">I", htonl(int(value, base=16))))
def decode_flags(value):
return int(value, base=16)
def get_default_gateway():
with open("/proc/net/route") as f:
next(f)
for line in f:
fields = line.strip().split()
destination = decode_address(fields[1])
mask = decode_address(fields[7])
gateway = decode_address(fields[2])
flags = decode_flags(fields[3])
if destination == '0.0.0.0' and mask == '0.0.0.0' and flags & 0x2:
return gateway
return None
def test_default_gateway():
gateway = get_default_gateway()
if not gateway:
pytest.skip("No default gateway present.")
assert arpreq(gateway) is not None
def test_illegal_argument():
with pytest.raises(ValueError):
arpreq("Foobar")
def test_illegal_type():
with pytest.raises(TypeError):
arpreq(42)
|
0fa33bb58d6b042e79c52a6f33454140a7150f64
|
lithium/blog/views.py
|
lithium/blog/views.py
|
from lithium.blog.models import Post
def decorator(request, view, author=None, tag=None, *args, **kwargs):
"""
A view decotator to change the queryset depending on whether
a user may read private posts.
"""
if request.user.has_perm('blog.can_read_private'):
kwargs['queryset'] = Post.on_site.all(allow_private=True)
if author:
kwargs['queryset'] = kwargs['queryset'].filter(author__username=author)
if tag:
kwargs['queryset'] = kwargs['queryset'].filter(category__slug=tag)
return view(request, *args, **kwargs)
|
from lithium.blog.models import Post
def decorator(request, view, author=None, tag=None, *args, **kwargs):
"""
A view decotator to change the queryset depending on whether
a user may read private posts.
"""
if request.user.has_perm('blog.can_read_private'):
kwargs['queryset'] = Post.on_site.all(allow_private=True)
kwargs['allow_future'] = True
if author:
kwargs['queryset'] = kwargs['queryset'].filter(author__username=author)
if tag:
kwargs['queryset'] = kwargs['queryset'].filter(category__slug=tag)
return view(request, *args, **kwargs)
|
Allow users with the permission 'blog.can_read_private' to see posts from the future.
|
Allow users with the permission 'blog.can_read_private' to see posts from the future.
|
Python
|
bsd-2-clause
|
kylef/lithium
|
from lithium.blog.models import Post
def decorator(request, view, author=None, tag=None, *args, **kwargs):
"""
A view decotator to change the queryset depending on whether
a user may read private posts.
"""
if request.user.has_perm('blog.can_read_private'):
kwargs['queryset'] = Post.on_site.all(allow_private=True)
if author:
kwargs['queryset'] = kwargs['queryset'].filter(author__username=author)
if tag:
kwargs['queryset'] = kwargs['queryset'].filter(category__slug=tag)
return view(request, *args, **kwargs)
Allow users with the permission 'blog.can_read_private' to see posts from the future.
|
from lithium.blog.models import Post
def decorator(request, view, author=None, tag=None, *args, **kwargs):
"""
A view decotator to change the queryset depending on whether
a user may read private posts.
"""
if request.user.has_perm('blog.can_read_private'):
kwargs['queryset'] = Post.on_site.all(allow_private=True)
kwargs['allow_future'] = True
if author:
kwargs['queryset'] = kwargs['queryset'].filter(author__username=author)
if tag:
kwargs['queryset'] = kwargs['queryset'].filter(category__slug=tag)
return view(request, *args, **kwargs)
|
<commit_before>from lithium.blog.models import Post
def decorator(request, view, author=None, tag=None, *args, **kwargs):
"""
A view decotator to change the queryset depending on whether
a user may read private posts.
"""
if request.user.has_perm('blog.can_read_private'):
kwargs['queryset'] = Post.on_site.all(allow_private=True)
if author:
kwargs['queryset'] = kwargs['queryset'].filter(author__username=author)
if tag:
kwargs['queryset'] = kwargs['queryset'].filter(category__slug=tag)
return view(request, *args, **kwargs)
<commit_msg>Allow users with the permission 'blog.can_read_private' to see posts from the future.<commit_after>
|
from lithium.blog.models import Post
def decorator(request, view, author=None, tag=None, *args, **kwargs):
"""
A view decotator to change the queryset depending on whether
a user may read private posts.
"""
if request.user.has_perm('blog.can_read_private'):
kwargs['queryset'] = Post.on_site.all(allow_private=True)
kwargs['allow_future'] = True
if author:
kwargs['queryset'] = kwargs['queryset'].filter(author__username=author)
if tag:
kwargs['queryset'] = kwargs['queryset'].filter(category__slug=tag)
return view(request, *args, **kwargs)
|
from lithium.blog.models import Post
def decorator(request, view, author=None, tag=None, *args, **kwargs):
"""
A view decotator to change the queryset depending on whether
a user may read private posts.
"""
if request.user.has_perm('blog.can_read_private'):
kwargs['queryset'] = Post.on_site.all(allow_private=True)
if author:
kwargs['queryset'] = kwargs['queryset'].filter(author__username=author)
if tag:
kwargs['queryset'] = kwargs['queryset'].filter(category__slug=tag)
return view(request, *args, **kwargs)
Allow users with the permission 'blog.can_read_private' to see posts from the future.from lithium.blog.models import Post
def decorator(request, view, author=None, tag=None, *args, **kwargs):
"""
A view decotator to change the queryset depending on whether
a user may read private posts.
"""
if request.user.has_perm('blog.can_read_private'):
kwargs['queryset'] = Post.on_site.all(allow_private=True)
kwargs['allow_future'] = True
if author:
kwargs['queryset'] = kwargs['queryset'].filter(author__username=author)
if tag:
kwargs['queryset'] = kwargs['queryset'].filter(category__slug=tag)
return view(request, *args, **kwargs)
|
<commit_before>from lithium.blog.models import Post
def decorator(request, view, author=None, tag=None, *args, **kwargs):
"""
A view decotator to change the queryset depending on whether
a user may read private posts.
"""
if request.user.has_perm('blog.can_read_private'):
kwargs['queryset'] = Post.on_site.all(allow_private=True)
if author:
kwargs['queryset'] = kwargs['queryset'].filter(author__username=author)
if tag:
kwargs['queryset'] = kwargs['queryset'].filter(category__slug=tag)
return view(request, *args, **kwargs)
<commit_msg>Allow users with the permission 'blog.can_read_private' to see posts from the future.<commit_after>from lithium.blog.models import Post
def decorator(request, view, author=None, tag=None, *args, **kwargs):
"""
A view decotator to change the queryset depending on whether
a user may read private posts.
"""
if request.user.has_perm('blog.can_read_private'):
kwargs['queryset'] = Post.on_site.all(allow_private=True)
kwargs['allow_future'] = True
if author:
kwargs['queryset'] = kwargs['queryset'].filter(author__username=author)
if tag:
kwargs['queryset'] = kwargs['queryset'].filter(category__slug=tag)
return view(request, *args, **kwargs)
|
af88bfaece839d044ccb0781a15c8c538979051e
|
tests/test_object.py
|
tests/test_object.py
|
#!/usr/bin/env python
import unittest
import mlbgame
class TestObject(unittest.TestCase):
def test_object(self):
data = {
'string': 'string',
'int': '10',
'float': '10.1'
}
obj = mlbgame.object.Object(data)
self.assertIsInstance(obj.string, str)
self.assertIsInstance(obj.int, int)
self.assertIsInstance(obj.float, float)
self.assertEqual(obj.string, 'string')
self.assertEqual(obj.int, 10)
self.assertEqual(obj.float, 10.1)
|
#!/usr/bin/env python
import unittest
import mlbgame
class TestObject(unittest.TestCase):
def test_object(self):
data = {
'string': 'string',
'int': '10',
'float': '10.1',
'unicode': u'\xe7\x8c\xab'
}
obj = mlbgame.object.Object(data)
self.assertIsInstance(obj.string, str)
self.assertIsInstance(obj.int, int)
self.assertIsInstance(obj.float, float)
self.assertIsInstance(obj.unicode, unicode)
self.assertEqual(obj.string, 'string')
self.assertEqual(obj.int, 10)
self.assertEqual(obj.float, 10.1)
self.assertEqual(obj.unicode, u'\xe7\x8c\xab')
|
Add test for unicode characters
|
Add test for unicode characters
|
Python
|
mit
|
panzarino/mlbgame,zachpanz88/mlbgame
|
#!/usr/bin/env python
import unittest
import mlbgame
class TestObject(unittest.TestCase):
def test_object(self):
data = {
'string': 'string',
'int': '10',
'float': '10.1'
}
obj = mlbgame.object.Object(data)
self.assertIsInstance(obj.string, str)
self.assertIsInstance(obj.int, int)
self.assertIsInstance(obj.float, float)
self.assertEqual(obj.string, 'string')
self.assertEqual(obj.int, 10)
self.assertEqual(obj.float, 10.1)
Add test for unicode characters
|
#!/usr/bin/env python
import unittest
import mlbgame
class TestObject(unittest.TestCase):
def test_object(self):
data = {
'string': 'string',
'int': '10',
'float': '10.1',
'unicode': u'\xe7\x8c\xab'
}
obj = mlbgame.object.Object(data)
self.assertIsInstance(obj.string, str)
self.assertIsInstance(obj.int, int)
self.assertIsInstance(obj.float, float)
self.assertIsInstance(obj.unicode, unicode)
self.assertEqual(obj.string, 'string')
self.assertEqual(obj.int, 10)
self.assertEqual(obj.float, 10.1)
self.assertEqual(obj.unicode, u'\xe7\x8c\xab')
|
<commit_before>#!/usr/bin/env python
import unittest
import mlbgame
class TestObject(unittest.TestCase):
def test_object(self):
data = {
'string': 'string',
'int': '10',
'float': '10.1'
}
obj = mlbgame.object.Object(data)
self.assertIsInstance(obj.string, str)
self.assertIsInstance(obj.int, int)
self.assertIsInstance(obj.float, float)
self.assertEqual(obj.string, 'string')
self.assertEqual(obj.int, 10)
self.assertEqual(obj.float, 10.1)
<commit_msg>Add test for unicode characters<commit_after>
|
#!/usr/bin/env python
import unittest
import mlbgame
class TestObject(unittest.TestCase):
def test_object(self):
data = {
'string': 'string',
'int': '10',
'float': '10.1',
'unicode': u'\xe7\x8c\xab'
}
obj = mlbgame.object.Object(data)
self.assertIsInstance(obj.string, str)
self.assertIsInstance(obj.int, int)
self.assertIsInstance(obj.float, float)
self.assertIsInstance(obj.unicode, unicode)
self.assertEqual(obj.string, 'string')
self.assertEqual(obj.int, 10)
self.assertEqual(obj.float, 10.1)
self.assertEqual(obj.unicode, u'\xe7\x8c\xab')
|
#!/usr/bin/env python
import unittest
import mlbgame
class TestObject(unittest.TestCase):
def test_object(self):
data = {
'string': 'string',
'int': '10',
'float': '10.1'
}
obj = mlbgame.object.Object(data)
self.assertIsInstance(obj.string, str)
self.assertIsInstance(obj.int, int)
self.assertIsInstance(obj.float, float)
self.assertEqual(obj.string, 'string')
self.assertEqual(obj.int, 10)
self.assertEqual(obj.float, 10.1)
Add test for unicode characters#!/usr/bin/env python
import unittest
import mlbgame
class TestObject(unittest.TestCase):
def test_object(self):
data = {
'string': 'string',
'int': '10',
'float': '10.1',
'unicode': u'\xe7\x8c\xab'
}
obj = mlbgame.object.Object(data)
self.assertIsInstance(obj.string, str)
self.assertIsInstance(obj.int, int)
self.assertIsInstance(obj.float, float)
self.assertIsInstance(obj.unicode, unicode)
self.assertEqual(obj.string, 'string')
self.assertEqual(obj.int, 10)
self.assertEqual(obj.float, 10.1)
self.assertEqual(obj.unicode, u'\xe7\x8c\xab')
|
<commit_before>#!/usr/bin/env python
import unittest
import mlbgame
class TestObject(unittest.TestCase):
def test_object(self):
data = {
'string': 'string',
'int': '10',
'float': '10.1'
}
obj = mlbgame.object.Object(data)
self.assertIsInstance(obj.string, str)
self.assertIsInstance(obj.int, int)
self.assertIsInstance(obj.float, float)
self.assertEqual(obj.string, 'string')
self.assertEqual(obj.int, 10)
self.assertEqual(obj.float, 10.1)
<commit_msg>Add test for unicode characters<commit_after>#!/usr/bin/env python
import unittest
import mlbgame
class TestObject(unittest.TestCase):
def test_object(self):
data = {
'string': 'string',
'int': '10',
'float': '10.1',
'unicode': u'\xe7\x8c\xab'
}
obj = mlbgame.object.Object(data)
self.assertIsInstance(obj.string, str)
self.assertIsInstance(obj.int, int)
self.assertIsInstance(obj.float, float)
self.assertIsInstance(obj.unicode, unicode)
self.assertEqual(obj.string, 'string')
self.assertEqual(obj.int, 10)
self.assertEqual(obj.float, 10.1)
self.assertEqual(obj.unicode, u'\xe7\x8c\xab')
|
c14f9c661e485243660970d3a76014b8e6b7f1af
|
src-python/setup.py
|
src-python/setup.py
|
from distutils.core import setup
import py2exe
setup(console=['process.py'])
|
from distutils.core import setup
import py2exe, sys
sys.argv.append('py2exe')
setup(
options = {'py2exe': {'bundle_files': 1, 'compressed': True}},
console = [{'script': "process.py"}],
zipfile = None,
)
|
Add options to generate single executable file
|
Add options to generate single executable file
|
Python
|
mit
|
yaa110/Adobe-Air-Registry-Modifier
|
from distutils.core import setup
import py2exe
setup(console=['process.py'])
Add options to generate single executable file
|
from distutils.core import setup
import py2exe, sys
sys.argv.append('py2exe')
setup(
options = {'py2exe': {'bundle_files': 1, 'compressed': True}},
console = [{'script': "process.py"}],
zipfile = None,
)
|
<commit_before>from distutils.core import setup
import py2exe
setup(console=['process.py'])
<commit_msg>Add options to generate single executable file<commit_after>
|
from distutils.core import setup
import py2exe, sys
sys.argv.append('py2exe')
setup(
options = {'py2exe': {'bundle_files': 1, 'compressed': True}},
console = [{'script': "process.py"}],
zipfile = None,
)
|
from distutils.core import setup
import py2exe
setup(console=['process.py'])
Add options to generate single executable filefrom distutils.core import setup
import py2exe, sys
sys.argv.append('py2exe')
setup(
options = {'py2exe': {'bundle_files': 1, 'compressed': True}},
console = [{'script': "process.py"}],
zipfile = None,
)
|
<commit_before>from distutils.core import setup
import py2exe
setup(console=['process.py'])
<commit_msg>Add options to generate single executable file<commit_after>from distutils.core import setup
import py2exe, sys
sys.argv.append('py2exe')
setup(
options = {'py2exe': {'bundle_files': 1, 'compressed': True}},
console = [{'script': "process.py"}],
zipfile = None,
)
|
df8ae0415f9bf10c04472fb3009e91d7c3d7e24f
|
teuthology/sentry.py
|
teuthology/sentry.py
|
from raven import Client
client = None
def get_client(ctx):
if client:
return client
dsn = ctx.teuthology_config.get('sentry_dsn')
if dsn:
client = Client(dsn=dsn)
return client
|
from raven import Client
client = None
def get_client(ctx):
global client
if client:
return client
dsn = ctx.teuthology_config.get('sentry_dsn')
if dsn:
client = Client(dsn=dsn)
return client
|
Make client a global variable
|
Make client a global variable
|
Python
|
mit
|
robbat2/teuthology,ceph/teuthology,tchaikov/teuthology,zhouyuan/teuthology,dmick/teuthology,michaelsevilla/teuthology,dreamhost/teuthology,SUSE/teuthology,t-miyamae/teuthology,caibo2014/teuthology,yghannam/teuthology,SUSE/teuthology,SUSE/teuthology,tchaikov/teuthology,michaelsevilla/teuthology,dmick/teuthology,ktdreyer/teuthology,robbat2/teuthology,ivotron/teuthology,zhouyuan/teuthology,dreamhost/teuthology,t-miyamae/teuthology,ktdreyer/teuthology,ivotron/teuthology,yghannam/teuthology,ceph/teuthology,dmick/teuthology,caibo2014/teuthology
|
from raven import Client
client = None
def get_client(ctx):
if client:
return client
dsn = ctx.teuthology_config.get('sentry_dsn')
if dsn:
client = Client(dsn=dsn)
return client
Make client a global variable
|
from raven import Client
client = None
def get_client(ctx):
global client
if client:
return client
dsn = ctx.teuthology_config.get('sentry_dsn')
if dsn:
client = Client(dsn=dsn)
return client
|
<commit_before>from raven import Client
client = None
def get_client(ctx):
if client:
return client
dsn = ctx.teuthology_config.get('sentry_dsn')
if dsn:
client = Client(dsn=dsn)
return client
<commit_msg>Make client a global variable<commit_after>
|
from raven import Client
client = None
def get_client(ctx):
global client
if client:
return client
dsn = ctx.teuthology_config.get('sentry_dsn')
if dsn:
client = Client(dsn=dsn)
return client
|
from raven import Client
client = None
def get_client(ctx):
if client:
return client
dsn = ctx.teuthology_config.get('sentry_dsn')
if dsn:
client = Client(dsn=dsn)
return client
Make client a global variablefrom raven import Client
client = None
def get_client(ctx):
global client
if client:
return client
dsn = ctx.teuthology_config.get('sentry_dsn')
if dsn:
client = Client(dsn=dsn)
return client
|
<commit_before>from raven import Client
client = None
def get_client(ctx):
if client:
return client
dsn = ctx.teuthology_config.get('sentry_dsn')
if dsn:
client = Client(dsn=dsn)
return client
<commit_msg>Make client a global variable<commit_after>from raven import Client
client = None
def get_client(ctx):
global client
if client:
return client
dsn = ctx.teuthology_config.get('sentry_dsn')
if dsn:
client = Client(dsn=dsn)
return client
|
2e3b38d102c7e15ed121651c1eac26acd9c7f399
|
grapdashboard.py
|
grapdashboard.py
|
from django.utils.translation import ugettext_lazy as _
from grappelli.dashboard import modules, Dashboard
class UQAMDashboard(Dashboard):
def __init__(self, **kwargs):
Dashboard.__init__(self, **kwargs)
self.children.append(modules.AppList(
title=_('Catalogue'),
column=1,
collapsible=False,
models=('cat.models.MuseumObject', 'parties.*', 'location.*',
'loans.models.LoanAgreement', 'condition.*',
'uqamcollections.*'),
exclude=('django.contrib.*', 'djcelery.*', 'reports.*'),
))
self.children.append(modules.AppList(
title='Data dictionary',
column=1,
models=('cat.models.*', 'loans.models.LoanPurpose'),
exclude=('cat.models.MuseumObject',)
))
# append an app list module for "Administration"
self.children.append(modules.AppList(
title=_('Administration'),
column=1,
collapsible=True,
models=('django.contrib.*', 'djcelergy.*', 'reports.*',
'dataimport.*', 'mediaman.*'),
))
# append a recent actions module
self.children.append(modules.RecentActions(
title=_('Recent Actions'),
column=3,
collapsible=False,
limit=5,
))
|
from django.utils.translation import ugettext_lazy as _
from grappelli.dashboard import modules, Dashboard
class UQAMDashboard(Dashboard):
def __init__(self, **kwargs):
Dashboard.__init__(self, **kwargs)
self.children.append(modules.AppList(
title=_('Catalogue'),
column=1,
collapsible=False,
models=('cat.models.MuseumObject', 'parties.*', 'location.*',
'loans.models.LoanAgreement', 'condition.*',
'uqamcollections.*'),
exclude=('django.contrib.*', 'djcelery.*', 'reports.*'),
))
self.children.append(modules.AppList(
title='Data dictionary',
column=1,
models=('cat.models.*', 'loans.models.LoanPurpose'),
exclude=('cat.models.MuseumObject',)
))
# append an app list module for "Administration"
self.children.append(modules.AppList(
title=_('Administration'),
column=1,
collapsible=True,
models=('django.contrib.*', 'djcelergy.*', 'reports.*',
'dataimport.*', 'mediaman.*'),
))
# append a recent actions module
self.children.append(modules.RecentActions(
title=_('Recent Actions'),
column=2,
collapsible=False,
limit=5,
))
self.children.append(modules.LinkList(
layout='inline',
title=_('Admin tools'),
column=2,
children=(
['Upload media', '/mediaman/bulk_upload/'],
['Filter/query items', '/admin/cat/museumobject/search'],
)
))
|
Add links to admin dashboard
|
Add links to admin dashboard
|
Python
|
bsd-3-clause
|
uq-eresearch/uqam,uq-eresearch/uqam,uq-eresearch/uqam,uq-eresearch/uqam
|
from django.utils.translation import ugettext_lazy as _
from grappelli.dashboard import modules, Dashboard
class UQAMDashboard(Dashboard):
def __init__(self, **kwargs):
Dashboard.__init__(self, **kwargs)
self.children.append(modules.AppList(
title=_('Catalogue'),
column=1,
collapsible=False,
models=('cat.models.MuseumObject', 'parties.*', 'location.*',
'loans.models.LoanAgreement', 'condition.*',
'uqamcollections.*'),
exclude=('django.contrib.*', 'djcelery.*', 'reports.*'),
))
self.children.append(modules.AppList(
title='Data dictionary',
column=1,
models=('cat.models.*', 'loans.models.LoanPurpose'),
exclude=('cat.models.MuseumObject',)
))
# append an app list module for "Administration"
self.children.append(modules.AppList(
title=_('Administration'),
column=1,
collapsible=True,
models=('django.contrib.*', 'djcelergy.*', 'reports.*',
'dataimport.*', 'mediaman.*'),
))
# append a recent actions module
self.children.append(modules.RecentActions(
title=_('Recent Actions'),
column=3,
collapsible=False,
limit=5,
))
Add links to admin dashboard
|
from django.utils.translation import ugettext_lazy as _
from grappelli.dashboard import modules, Dashboard
class UQAMDashboard(Dashboard):
def __init__(self, **kwargs):
Dashboard.__init__(self, **kwargs)
self.children.append(modules.AppList(
title=_('Catalogue'),
column=1,
collapsible=False,
models=('cat.models.MuseumObject', 'parties.*', 'location.*',
'loans.models.LoanAgreement', 'condition.*',
'uqamcollections.*'),
exclude=('django.contrib.*', 'djcelery.*', 'reports.*'),
))
self.children.append(modules.AppList(
title='Data dictionary',
column=1,
models=('cat.models.*', 'loans.models.LoanPurpose'),
exclude=('cat.models.MuseumObject',)
))
# append an app list module for "Administration"
self.children.append(modules.AppList(
title=_('Administration'),
column=1,
collapsible=True,
models=('django.contrib.*', 'djcelergy.*', 'reports.*',
'dataimport.*', 'mediaman.*'),
))
# append a recent actions module
self.children.append(modules.RecentActions(
title=_('Recent Actions'),
column=2,
collapsible=False,
limit=5,
))
self.children.append(modules.LinkList(
layout='inline',
title=_('Admin tools'),
column=2,
children=(
['Upload media', '/mediaman/bulk_upload/'],
['Filter/query items', '/admin/cat/museumobject/search'],
)
))
|
<commit_before>from django.utils.translation import ugettext_lazy as _
from grappelli.dashboard import modules, Dashboard
class UQAMDashboard(Dashboard):
def __init__(self, **kwargs):
Dashboard.__init__(self, **kwargs)
self.children.append(modules.AppList(
title=_('Catalogue'),
column=1,
collapsible=False,
models=('cat.models.MuseumObject', 'parties.*', 'location.*',
'loans.models.LoanAgreement', 'condition.*',
'uqamcollections.*'),
exclude=('django.contrib.*', 'djcelery.*', 'reports.*'),
))
self.children.append(modules.AppList(
title='Data dictionary',
column=1,
models=('cat.models.*', 'loans.models.LoanPurpose'),
exclude=('cat.models.MuseumObject',)
))
# append an app list module for "Administration"
self.children.append(modules.AppList(
title=_('Administration'),
column=1,
collapsible=True,
models=('django.contrib.*', 'djcelergy.*', 'reports.*',
'dataimport.*', 'mediaman.*'),
))
# append a recent actions module
self.children.append(modules.RecentActions(
title=_('Recent Actions'),
column=3,
collapsible=False,
limit=5,
))
<commit_msg>Add links to admin dashboard<commit_after>
|
from django.utils.translation import ugettext_lazy as _
from grappelli.dashboard import modules, Dashboard
class UQAMDashboard(Dashboard):
def __init__(self, **kwargs):
Dashboard.__init__(self, **kwargs)
self.children.append(modules.AppList(
title=_('Catalogue'),
column=1,
collapsible=False,
models=('cat.models.MuseumObject', 'parties.*', 'location.*',
'loans.models.LoanAgreement', 'condition.*',
'uqamcollections.*'),
exclude=('django.contrib.*', 'djcelery.*', 'reports.*'),
))
self.children.append(modules.AppList(
title='Data dictionary',
column=1,
models=('cat.models.*', 'loans.models.LoanPurpose'),
exclude=('cat.models.MuseumObject',)
))
# append an app list module for "Administration"
self.children.append(modules.AppList(
title=_('Administration'),
column=1,
collapsible=True,
models=('django.contrib.*', 'djcelergy.*', 'reports.*',
'dataimport.*', 'mediaman.*'),
))
# append a recent actions module
self.children.append(modules.RecentActions(
title=_('Recent Actions'),
column=2,
collapsible=False,
limit=5,
))
self.children.append(modules.LinkList(
layout='inline',
title=_('Admin tools'),
column=2,
children=(
['Upload media', '/mediaman/bulk_upload/'],
['Filter/query items', '/admin/cat/museumobject/search'],
)
))
|
from django.utils.translation import ugettext_lazy as _
from grappelli.dashboard import modules, Dashboard
class UQAMDashboard(Dashboard):
def __init__(self, **kwargs):
Dashboard.__init__(self, **kwargs)
self.children.append(modules.AppList(
title=_('Catalogue'),
column=1,
collapsible=False,
models=('cat.models.MuseumObject', 'parties.*', 'location.*',
'loans.models.LoanAgreement', 'condition.*',
'uqamcollections.*'),
exclude=('django.contrib.*', 'djcelery.*', 'reports.*'),
))
self.children.append(modules.AppList(
title='Data dictionary',
column=1,
models=('cat.models.*', 'loans.models.LoanPurpose'),
exclude=('cat.models.MuseumObject',)
))
# append an app list module for "Administration"
self.children.append(modules.AppList(
title=_('Administration'),
column=1,
collapsible=True,
models=('django.contrib.*', 'djcelergy.*', 'reports.*',
'dataimport.*', 'mediaman.*'),
))
# append a recent actions module
self.children.append(modules.RecentActions(
title=_('Recent Actions'),
column=3,
collapsible=False,
limit=5,
))
Add links to admin dashboardfrom django.utils.translation import ugettext_lazy as _
from grappelli.dashboard import modules, Dashboard
class UQAMDashboard(Dashboard):
def __init__(self, **kwargs):
Dashboard.__init__(self, **kwargs)
self.children.append(modules.AppList(
title=_('Catalogue'),
column=1,
collapsible=False,
models=('cat.models.MuseumObject', 'parties.*', 'location.*',
'loans.models.LoanAgreement', 'condition.*',
'uqamcollections.*'),
exclude=('django.contrib.*', 'djcelery.*', 'reports.*'),
))
self.children.append(modules.AppList(
title='Data dictionary',
column=1,
models=('cat.models.*', 'loans.models.LoanPurpose'),
exclude=('cat.models.MuseumObject',)
))
# append an app list module for "Administration"
self.children.append(modules.AppList(
title=_('Administration'),
column=1,
collapsible=True,
models=('django.contrib.*', 'djcelergy.*', 'reports.*',
'dataimport.*', 'mediaman.*'),
))
# append a recent actions module
self.children.append(modules.RecentActions(
title=_('Recent Actions'),
column=2,
collapsible=False,
limit=5,
))
self.children.append(modules.LinkList(
layout='inline',
title=_('Admin tools'),
column=2,
children=(
['Upload media', '/mediaman/bulk_upload/'],
['Filter/query items', '/admin/cat/museumobject/search'],
)
))
|
<commit_before>from django.utils.translation import ugettext_lazy as _
from grappelli.dashboard import modules, Dashboard
class UQAMDashboard(Dashboard):
def __init__(self, **kwargs):
Dashboard.__init__(self, **kwargs)
self.children.append(modules.AppList(
title=_('Catalogue'),
column=1,
collapsible=False,
models=('cat.models.MuseumObject', 'parties.*', 'location.*',
'loans.models.LoanAgreement', 'condition.*',
'uqamcollections.*'),
exclude=('django.contrib.*', 'djcelery.*', 'reports.*'),
))
self.children.append(modules.AppList(
title='Data dictionary',
column=1,
models=('cat.models.*', 'loans.models.LoanPurpose'),
exclude=('cat.models.MuseumObject',)
))
# append an app list module for "Administration"
self.children.append(modules.AppList(
title=_('Administration'),
column=1,
collapsible=True,
models=('django.contrib.*', 'djcelergy.*', 'reports.*',
'dataimport.*', 'mediaman.*'),
))
# append a recent actions module
self.children.append(modules.RecentActions(
title=_('Recent Actions'),
column=3,
collapsible=False,
limit=5,
))
<commit_msg>Add links to admin dashboard<commit_after>from django.utils.translation import ugettext_lazy as _
from grappelli.dashboard import modules, Dashboard
class UQAMDashboard(Dashboard):
def __init__(self, **kwargs):
Dashboard.__init__(self, **kwargs)
self.children.append(modules.AppList(
title=_('Catalogue'),
column=1,
collapsible=False,
models=('cat.models.MuseumObject', 'parties.*', 'location.*',
'loans.models.LoanAgreement', 'condition.*',
'uqamcollections.*'),
exclude=('django.contrib.*', 'djcelery.*', 'reports.*'),
))
self.children.append(modules.AppList(
title='Data dictionary',
column=1,
models=('cat.models.*', 'loans.models.LoanPurpose'),
exclude=('cat.models.MuseumObject',)
))
# append an app list module for "Administration"
self.children.append(modules.AppList(
title=_('Administration'),
column=1,
collapsible=True,
models=('django.contrib.*', 'djcelergy.*', 'reports.*',
'dataimport.*', 'mediaman.*'),
))
# append a recent actions module
self.children.append(modules.RecentActions(
title=_('Recent Actions'),
column=2,
collapsible=False,
limit=5,
))
self.children.append(modules.LinkList(
layout='inline',
title=_('Admin tools'),
column=2,
children=(
['Upload media', '/mediaman/bulk_upload/'],
['Filter/query items', '/admin/cat/museumobject/search'],
)
))
|
bd68b6e44ec65ba8f1f0afeea3a1dce08f579690
|
src/bots/chuck.py
|
src/bots/chuck.py
|
import re
import requests
import logging
from base import BaseBot
logger = logging.getLogger(__name__)
CHUCK_API_URL = 'http://api.icndb.com'
CHUCK_REGEX = re.compile(r'^!chuck')
def random_chuck_fact():
try:
fact = requests.get('%s/jokes/random' % CHUCK_API_URL.rstrip('/')).json()
return fact['value']['joke']
except Exception as e:
logger.info('Error while retrieving Chuck Norris facts: %s' % e)
return None
class ChuckBot(BaseBot):
def __init__(self, connection):
super(ChuckBot, self).__init__(connection)
def handle(self, message):
if re.match(CHUCK_REGEX, message.text):
fact = random_chuck_fact()
if not fact:
response = "Can't find any facts :feelbad:"
else:
response = fact
self.connection.send_message(response, message.channel)
return True
return False
|
import re
import requests
import logging
from base import BaseBot
from HTMLParser import HTMLParser
logger = logging.getLogger(__name__)
html_parser = HTMLParser()
CHUCK_API_URL = 'http://api.icndb.com'
CHUCK_REGEX = re.compile(r'^!chuck')
def random_chuck_fact():
try:
fact = requests.get('%s/jokes/random' % CHUCK_API_URL.rstrip('/')).json()
return html_parser.unescape(fact['value']['joke'])
except Exception as e:
logger.info('Error while retrieving Chuck Norris facts: %s' % e)
return None
class ChuckBot(BaseBot):
def __init__(self, connection):
super(ChuckBot, self).__init__(connection)
def handle(self, message):
if re.match(CHUCK_REGEX, message.text):
fact = random_chuck_fact()
if not fact:
response = "Can't find any facts :feelbad:"
else:
response = fact
self.connection.send_message(response, message.channel)
return True
return False
|
Fix for html escaped characters
|
Fix for html escaped characters
|
Python
|
mit
|
orangeblock/slack-bot
|
import re
import requests
import logging
from base import BaseBot
logger = logging.getLogger(__name__)
CHUCK_API_URL = 'http://api.icndb.com'
CHUCK_REGEX = re.compile(r'^!chuck')
def random_chuck_fact():
try:
fact = requests.get('%s/jokes/random' % CHUCK_API_URL.rstrip('/')).json()
return fact['value']['joke']
except Exception as e:
logger.info('Error while retrieving Chuck Norris facts: %s' % e)
return None
class ChuckBot(BaseBot):
def __init__(self, connection):
super(ChuckBot, self).__init__(connection)
def handle(self, message):
if re.match(CHUCK_REGEX, message.text):
fact = random_chuck_fact()
if not fact:
response = "Can't find any facts :feelbad:"
else:
response = fact
self.connection.send_message(response, message.channel)
return True
return False
Fix for html escaped characters
|
import re
import requests
import logging
from base import BaseBot
from HTMLParser import HTMLParser
logger = logging.getLogger(__name__)
html_parser = HTMLParser()
CHUCK_API_URL = 'http://api.icndb.com'
CHUCK_REGEX = re.compile(r'^!chuck')
def random_chuck_fact():
try:
fact = requests.get('%s/jokes/random' % CHUCK_API_URL.rstrip('/')).json()
return html_parser.unescape(fact['value']['joke'])
except Exception as e:
logger.info('Error while retrieving Chuck Norris facts: %s' % e)
return None
class ChuckBot(BaseBot):
def __init__(self, connection):
super(ChuckBot, self).__init__(connection)
def handle(self, message):
if re.match(CHUCK_REGEX, message.text):
fact = random_chuck_fact()
if not fact:
response = "Can't find any facts :feelbad:"
else:
response = fact
self.connection.send_message(response, message.channel)
return True
return False
|
<commit_before>import re
import requests
import logging
from base import BaseBot
logger = logging.getLogger(__name__)
CHUCK_API_URL = 'http://api.icndb.com'
CHUCK_REGEX = re.compile(r'^!chuck')
def random_chuck_fact():
try:
fact = requests.get('%s/jokes/random' % CHUCK_API_URL.rstrip('/')).json()
return fact['value']['joke']
except Exception as e:
logger.info('Error while retrieving Chuck Norris facts: %s' % e)
return None
class ChuckBot(BaseBot):
def __init__(self, connection):
super(ChuckBot, self).__init__(connection)
def handle(self, message):
if re.match(CHUCK_REGEX, message.text):
fact = random_chuck_fact()
if not fact:
response = "Can't find any facts :feelbad:"
else:
response = fact
self.connection.send_message(response, message.channel)
return True
return False
<commit_msg>Fix for html escaped characters<commit_after>
|
import re
import requests
import logging
from base import BaseBot
from HTMLParser import HTMLParser
logger = logging.getLogger(__name__)
html_parser = HTMLParser()
CHUCK_API_URL = 'http://api.icndb.com'
CHUCK_REGEX = re.compile(r'^!chuck')
def random_chuck_fact():
try:
fact = requests.get('%s/jokes/random' % CHUCK_API_URL.rstrip('/')).json()
return html_parser.unescape(fact['value']['joke'])
except Exception as e:
logger.info('Error while retrieving Chuck Norris facts: %s' % e)
return None
class ChuckBot(BaseBot):
def __init__(self, connection):
super(ChuckBot, self).__init__(connection)
def handle(self, message):
if re.match(CHUCK_REGEX, message.text):
fact = random_chuck_fact()
if not fact:
response = "Can't find any facts :feelbad:"
else:
response = fact
self.connection.send_message(response, message.channel)
return True
return False
|
import re
import requests
import logging
from base import BaseBot
logger = logging.getLogger(__name__)
CHUCK_API_URL = 'http://api.icndb.com'
CHUCK_REGEX = re.compile(r'^!chuck')
def random_chuck_fact():
try:
fact = requests.get('%s/jokes/random' % CHUCK_API_URL.rstrip('/')).json()
return fact['value']['joke']
except Exception as e:
logger.info('Error while retrieving Chuck Norris facts: %s' % e)
return None
class ChuckBot(BaseBot):
def __init__(self, connection):
super(ChuckBot, self).__init__(connection)
def handle(self, message):
if re.match(CHUCK_REGEX, message.text):
fact = random_chuck_fact()
if not fact:
response = "Can't find any facts :feelbad:"
else:
response = fact
self.connection.send_message(response, message.channel)
return True
return False
Fix for html escaped charactersimport re
import requests
import logging
from base import BaseBot
from HTMLParser import HTMLParser
logger = logging.getLogger(__name__)
html_parser = HTMLParser()
CHUCK_API_URL = 'http://api.icndb.com'
CHUCK_REGEX = re.compile(r'^!chuck')
def random_chuck_fact():
try:
fact = requests.get('%s/jokes/random' % CHUCK_API_URL.rstrip('/')).json()
return html_parser.unescape(fact['value']['joke'])
except Exception as e:
logger.info('Error while retrieving Chuck Norris facts: %s' % e)
return None
class ChuckBot(BaseBot):
def __init__(self, connection):
super(ChuckBot, self).__init__(connection)
def handle(self, message):
if re.match(CHUCK_REGEX, message.text):
fact = random_chuck_fact()
if not fact:
response = "Can't find any facts :feelbad:"
else:
response = fact
self.connection.send_message(response, message.channel)
return True
return False
|
<commit_before>import re
import requests
import logging
from base import BaseBot
logger = logging.getLogger(__name__)
CHUCK_API_URL = 'http://api.icndb.com'
CHUCK_REGEX = re.compile(r'^!chuck')
def random_chuck_fact():
try:
fact = requests.get('%s/jokes/random' % CHUCK_API_URL.rstrip('/')).json()
return fact['value']['joke']
except Exception as e:
logger.info('Error while retrieving Chuck Norris facts: %s' % e)
return None
class ChuckBot(BaseBot):
def __init__(self, connection):
super(ChuckBot, self).__init__(connection)
def handle(self, message):
if re.match(CHUCK_REGEX, message.text):
fact = random_chuck_fact()
if not fact:
response = "Can't find any facts :feelbad:"
else:
response = fact
self.connection.send_message(response, message.channel)
return True
return False
<commit_msg>Fix for html escaped characters<commit_after>import re
import requests
import logging
from base import BaseBot
from HTMLParser import HTMLParser
logger = logging.getLogger(__name__)
html_parser = HTMLParser()
CHUCK_API_URL = 'http://api.icndb.com'
CHUCK_REGEX = re.compile(r'^!chuck')
def random_chuck_fact():
try:
fact = requests.get('%s/jokes/random' % CHUCK_API_URL.rstrip('/')).json()
return html_parser.unescape(fact['value']['joke'])
except Exception as e:
logger.info('Error while retrieving Chuck Norris facts: %s' % e)
return None
class ChuckBot(BaseBot):
def __init__(self, connection):
super(ChuckBot, self).__init__(connection)
def handle(self, message):
if re.match(CHUCK_REGEX, message.text):
fact = random_chuck_fact()
if not fact:
response = "Can't find any facts :feelbad:"
else:
response = fact
self.connection.send_message(response, message.channel)
return True
return False
|
532d9ea686793ebef8b6412a038ab58b54ca0ca6
|
lib/disco/schemes/scheme_discodb.py
|
lib/disco/schemes/scheme_discodb.py
|
import __builtin__
from disco import util
from discodb import DiscoDB, Q
def open(url, task=None):
if task:
disco_data = task.disco_data
ddfs_data = task.ddfs_data
else:
from disco.settings import DiscoSettings
settings = DiscoSettings()
disco_data = settings['DISCO_DATA']
ddfs_data = settings['DDFS_DATA']
scheme, netloc, rest = util.urlsplit(url)
path, rest = rest.split('!', 1) if '!' in rest else (rest, '')
discodb = DiscoDB.load(__builtin__.open(util.localize(path,
disco_data=disco_data,
ddfs_data=ddfs_data)))
if rest:
method_name, arg = rest.split('/', 1) if '/' in rest else (rest, None)
method = getattr(discodb, method_name)
if method_name in ('metaquery', 'query'):
return method(Q.urlscan(arg))
return method(*filter(None, arg))
return discodb
def input_stream(fd, size, url, params):
return open(url, task=globals().get('Task')), size, url
|
from disco import util
from discodb import DiscoDB, Q
def open(url, task=None):
if task:
disco_data = task.disco_data
ddfs_data = task.ddfs_data
else:
from disco.settings import DiscoSettings
settings = DiscoSettings()
disco_data = settings['DISCO_DATA']
ddfs_data = settings['DDFS_DATA']
scheme, netloc, rest = util.urlsplit(url)
path, rest = rest.split('!', 1) if '!' in rest else (rest, '')
discodb = DiscoDB.load(__builtins__.open(util.localize(path,
disco_data=disco_data,
ddfs_data=ddfs_data)))
if rest:
method_name, arg = rest.split('/', 1) if '/' in rest else (rest, None)
method = getattr(discodb, method_name)
if method_name in ('metaquery', 'query'):
return method(Q.urlscan(arg))
return method(*filter(None, arg))
return discodb
def input_stream(fd, size, url, params):
return open(url, task=globals().get('Task')), size, url
|
Use __builtins__ directly instead of import __builtin__.
|
Use __builtins__ directly instead of import __builtin__.
|
Python
|
bsd-3-clause
|
simudream/disco,seabirdzh/disco,ErikDubbelboer/disco,simudream/disco,oldmantaiter/disco,ktkt2009/disco,beni55/disco,mwilliams3/disco,discoproject/disco,mozilla/disco,mwilliams3/disco,pooya/disco,pooya/disco,oldmantaiter/disco,discoproject/disco,pombredanne/disco,seabirdzh/disco,discoproject/disco,ErikDubbelboer/disco,oldmantaiter/disco,ktkt2009/disco,beni55/disco,mozilla/disco,pooya/disco,ErikDubbelboer/disco,ktkt2009/disco,mwilliams3/disco,ErikDubbelboer/disco,mwilliams3/disco,pombredanne/disco,beni55/disco,beni55/disco,ktkt2009/disco,oldmantaiter/disco,seabirdzh/disco,oldmantaiter/disco,mozilla/disco,ErikDubbelboer/disco,simudream/disco,ktkt2009/disco,seabirdzh/disco,discoproject/disco,mwilliams3/disco,simudream/disco,mozilla/disco,discoproject/disco,pombredanne/disco,pombredanne/disco,seabirdzh/disco,beni55/disco,pooya/disco,pombredanne/disco,simudream/disco
|
import __builtin__
from disco import util
from discodb import DiscoDB, Q
def open(url, task=None):
if task:
disco_data = task.disco_data
ddfs_data = task.ddfs_data
else:
from disco.settings import DiscoSettings
settings = DiscoSettings()
disco_data = settings['DISCO_DATA']
ddfs_data = settings['DDFS_DATA']
scheme, netloc, rest = util.urlsplit(url)
path, rest = rest.split('!', 1) if '!' in rest else (rest, '')
discodb = DiscoDB.load(__builtin__.open(util.localize(path,
disco_data=disco_data,
ddfs_data=ddfs_data)))
if rest:
method_name, arg = rest.split('/', 1) if '/' in rest else (rest, None)
method = getattr(discodb, method_name)
if method_name in ('metaquery', 'query'):
return method(Q.urlscan(arg))
return method(*filter(None, arg))
return discodb
def input_stream(fd, size, url, params):
return open(url, task=globals().get('Task')), size, url
Use __builtins__ directly instead of import __builtin__.
|
from disco import util
from discodb import DiscoDB, Q
def open(url, task=None):
if task:
disco_data = task.disco_data
ddfs_data = task.ddfs_data
else:
from disco.settings import DiscoSettings
settings = DiscoSettings()
disco_data = settings['DISCO_DATA']
ddfs_data = settings['DDFS_DATA']
scheme, netloc, rest = util.urlsplit(url)
path, rest = rest.split('!', 1) if '!' in rest else (rest, '')
discodb = DiscoDB.load(__builtins__.open(util.localize(path,
disco_data=disco_data,
ddfs_data=ddfs_data)))
if rest:
method_name, arg = rest.split('/', 1) if '/' in rest else (rest, None)
method = getattr(discodb, method_name)
if method_name in ('metaquery', 'query'):
return method(Q.urlscan(arg))
return method(*filter(None, arg))
return discodb
def input_stream(fd, size, url, params):
return open(url, task=globals().get('Task')), size, url
|
<commit_before>import __builtin__
from disco import util
from discodb import DiscoDB, Q
def open(url, task=None):
if task:
disco_data = task.disco_data
ddfs_data = task.ddfs_data
else:
from disco.settings import DiscoSettings
settings = DiscoSettings()
disco_data = settings['DISCO_DATA']
ddfs_data = settings['DDFS_DATA']
scheme, netloc, rest = util.urlsplit(url)
path, rest = rest.split('!', 1) if '!' in rest else (rest, '')
discodb = DiscoDB.load(__builtin__.open(util.localize(path,
disco_data=disco_data,
ddfs_data=ddfs_data)))
if rest:
method_name, arg = rest.split('/', 1) if '/' in rest else (rest, None)
method = getattr(discodb, method_name)
if method_name in ('metaquery', 'query'):
return method(Q.urlscan(arg))
return method(*filter(None, arg))
return discodb
def input_stream(fd, size, url, params):
return open(url, task=globals().get('Task')), size, url
<commit_msg>Use __builtins__ directly instead of import __builtin__.<commit_after>
|
from disco import util
from discodb import DiscoDB, Q
def open(url, task=None):
if task:
disco_data = task.disco_data
ddfs_data = task.ddfs_data
else:
from disco.settings import DiscoSettings
settings = DiscoSettings()
disco_data = settings['DISCO_DATA']
ddfs_data = settings['DDFS_DATA']
scheme, netloc, rest = util.urlsplit(url)
path, rest = rest.split('!', 1) if '!' in rest else (rest, '')
discodb = DiscoDB.load(__builtins__.open(util.localize(path,
disco_data=disco_data,
ddfs_data=ddfs_data)))
if rest:
method_name, arg = rest.split('/', 1) if '/' in rest else (rest, None)
method = getattr(discodb, method_name)
if method_name in ('metaquery', 'query'):
return method(Q.urlscan(arg))
return method(*filter(None, arg))
return discodb
def input_stream(fd, size, url, params):
return open(url, task=globals().get('Task')), size, url
|
import __builtin__
from disco import util
from discodb import DiscoDB, Q
def open(url, task=None):
if task:
disco_data = task.disco_data
ddfs_data = task.ddfs_data
else:
from disco.settings import DiscoSettings
settings = DiscoSettings()
disco_data = settings['DISCO_DATA']
ddfs_data = settings['DDFS_DATA']
scheme, netloc, rest = util.urlsplit(url)
path, rest = rest.split('!', 1) if '!' in rest else (rest, '')
discodb = DiscoDB.load(__builtin__.open(util.localize(path,
disco_data=disco_data,
ddfs_data=ddfs_data)))
if rest:
method_name, arg = rest.split('/', 1) if '/' in rest else (rest, None)
method = getattr(discodb, method_name)
if method_name in ('metaquery', 'query'):
return method(Q.urlscan(arg))
return method(*filter(None, arg))
return discodb
def input_stream(fd, size, url, params):
return open(url, task=globals().get('Task')), size, url
Use __builtins__ directly instead of import __builtin__.from disco import util
from discodb import DiscoDB, Q
def open(url, task=None):
if task:
disco_data = task.disco_data
ddfs_data = task.ddfs_data
else:
from disco.settings import DiscoSettings
settings = DiscoSettings()
disco_data = settings['DISCO_DATA']
ddfs_data = settings['DDFS_DATA']
scheme, netloc, rest = util.urlsplit(url)
path, rest = rest.split('!', 1) if '!' in rest else (rest, '')
discodb = DiscoDB.load(__builtins__.open(util.localize(path,
disco_data=disco_data,
ddfs_data=ddfs_data)))
if rest:
method_name, arg = rest.split('/', 1) if '/' in rest else (rest, None)
method = getattr(discodb, method_name)
if method_name in ('metaquery', 'query'):
return method(Q.urlscan(arg))
return method(*filter(None, arg))
return discodb
def input_stream(fd, size, url, params):
return open(url, task=globals().get('Task')), size, url
|
<commit_before>import __builtin__
from disco import util
from discodb import DiscoDB, Q
def open(url, task=None):
if task:
disco_data = task.disco_data
ddfs_data = task.ddfs_data
else:
from disco.settings import DiscoSettings
settings = DiscoSettings()
disco_data = settings['DISCO_DATA']
ddfs_data = settings['DDFS_DATA']
scheme, netloc, rest = util.urlsplit(url)
path, rest = rest.split('!', 1) if '!' in rest else (rest, '')
discodb = DiscoDB.load(__builtin__.open(util.localize(path,
disco_data=disco_data,
ddfs_data=ddfs_data)))
if rest:
method_name, arg = rest.split('/', 1) if '/' in rest else (rest, None)
method = getattr(discodb, method_name)
if method_name in ('metaquery', 'query'):
return method(Q.urlscan(arg))
return method(*filter(None, arg))
return discodb
def input_stream(fd, size, url, params):
return open(url, task=globals().get('Task')), size, url
<commit_msg>Use __builtins__ directly instead of import __builtin__.<commit_after>from disco import util
from discodb import DiscoDB, Q
def open(url, task=None):
if task:
disco_data = task.disco_data
ddfs_data = task.ddfs_data
else:
from disco.settings import DiscoSettings
settings = DiscoSettings()
disco_data = settings['DISCO_DATA']
ddfs_data = settings['DDFS_DATA']
scheme, netloc, rest = util.urlsplit(url)
path, rest = rest.split('!', 1) if '!' in rest else (rest, '')
discodb = DiscoDB.load(__builtins__.open(util.localize(path,
disco_data=disco_data,
ddfs_data=ddfs_data)))
if rest:
method_name, arg = rest.split('/', 1) if '/' in rest else (rest, None)
method = getattr(discodb, method_name)
if method_name in ('metaquery', 'query'):
return method(Q.urlscan(arg))
return method(*filter(None, arg))
return discodb
def input_stream(fd, size, url, params):
return open(url, task=globals().get('Task')), size, url
|
cfdae6dcd3cc3f12e2c98fc3c6a51f146f185e98
|
rollbar/contrib/starlette/middleware.py
|
rollbar/contrib/starlette/middleware.py
|
import sys
from starlette.requests import Request
from starlette.types import Receive, Scope, Send
import rollbar
from .requests import store_current_request
from rollbar.contrib.asgi import ReporterMiddleware as ASGIReporterMiddleware
from rollbar.lib._async import RollbarAsyncError, try_report
class ReporterMiddleware(ASGIReporterMiddleware):
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
try:
store_current_request(scope, receive)
await self.app(scope, receive, send)
except Exception:
if scope['type'] == 'http':
request = Request(scope, receive, send)
# Consuming the request body in Starlette middleware is problematic.
# See: https://github.com/encode/starlette/issues/495#issuecomment-494008175
# Uncomment the line below if you know the risks.
#
# await request.body()
exc_info = sys.exc_info()
try:
await try_report(exc_info, request)
except RollbarAsyncError:
rollbar.report_exc_info(exc_info, request)
raise
|
import sys
from starlette.requests import Request
from starlette.types import Receive, Scope, Send
import rollbar
from .requests import store_current_request
from rollbar.contrib.asgi import ReporterMiddleware as ASGIReporterMiddleware
from rollbar.lib._async import RollbarAsyncError, try_report
class ReporterMiddleware(ASGIReporterMiddleware):
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
try:
store_current_request(scope, receive)
await self.app(scope, receive, send)
except Exception:
if scope['type'] == 'http':
request = Request(scope, receive, send)
# Consuming the request body in Starlette middleware is problematic.
# See: https://github.com/encode/starlette/issues/495#issuecomment-494008175
# Uncomment lines below if you know the risks.
#
# await request.body()
# await request.form()
exc_info = sys.exc_info()
try:
await try_report(exc_info, request)
except RollbarAsyncError:
rollbar.report_exc_info(exc_info, request)
raise
|
Update comment and optional instructions
|
Update comment and optional instructions
|
Python
|
mit
|
rollbar/pyrollbar
|
import sys
from starlette.requests import Request
from starlette.types import Receive, Scope, Send
import rollbar
from .requests import store_current_request
from rollbar.contrib.asgi import ReporterMiddleware as ASGIReporterMiddleware
from rollbar.lib._async import RollbarAsyncError, try_report
class ReporterMiddleware(ASGIReporterMiddleware):
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
try:
store_current_request(scope, receive)
await self.app(scope, receive, send)
except Exception:
if scope['type'] == 'http':
request = Request(scope, receive, send)
# Consuming the request body in Starlette middleware is problematic.
# See: https://github.com/encode/starlette/issues/495#issuecomment-494008175
# Uncomment the line below if you know the risks.
#
# await request.body()
exc_info = sys.exc_info()
try:
await try_report(exc_info, request)
except RollbarAsyncError:
rollbar.report_exc_info(exc_info, request)
raise
Update comment and optional instructions
|
import sys
from starlette.requests import Request
from starlette.types import Receive, Scope, Send
import rollbar
from .requests import store_current_request
from rollbar.contrib.asgi import ReporterMiddleware as ASGIReporterMiddleware
from rollbar.lib._async import RollbarAsyncError, try_report
class ReporterMiddleware(ASGIReporterMiddleware):
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
try:
store_current_request(scope, receive)
await self.app(scope, receive, send)
except Exception:
if scope['type'] == 'http':
request = Request(scope, receive, send)
# Consuming the request body in Starlette middleware is problematic.
# See: https://github.com/encode/starlette/issues/495#issuecomment-494008175
# Uncomment lines below if you know the risks.
#
# await request.body()
# await request.form()
exc_info = sys.exc_info()
try:
await try_report(exc_info, request)
except RollbarAsyncError:
rollbar.report_exc_info(exc_info, request)
raise
|
<commit_before>import sys
from starlette.requests import Request
from starlette.types import Receive, Scope, Send
import rollbar
from .requests import store_current_request
from rollbar.contrib.asgi import ReporterMiddleware as ASGIReporterMiddleware
from rollbar.lib._async import RollbarAsyncError, try_report
class ReporterMiddleware(ASGIReporterMiddleware):
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
try:
store_current_request(scope, receive)
await self.app(scope, receive, send)
except Exception:
if scope['type'] == 'http':
request = Request(scope, receive, send)
# Consuming the request body in Starlette middleware is problematic.
# See: https://github.com/encode/starlette/issues/495#issuecomment-494008175
# Uncomment the line below if you know the risks.
#
# await request.body()
exc_info = sys.exc_info()
try:
await try_report(exc_info, request)
except RollbarAsyncError:
rollbar.report_exc_info(exc_info, request)
raise
<commit_msg>Update comment and optional instructions<commit_after>
|
import sys
from starlette.requests import Request
from starlette.types import Receive, Scope, Send
import rollbar
from .requests import store_current_request
from rollbar.contrib.asgi import ReporterMiddleware as ASGIReporterMiddleware
from rollbar.lib._async import RollbarAsyncError, try_report
class ReporterMiddleware(ASGIReporterMiddleware):
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
try:
store_current_request(scope, receive)
await self.app(scope, receive, send)
except Exception:
if scope['type'] == 'http':
request = Request(scope, receive, send)
# Consuming the request body in Starlette middleware is problematic.
# See: https://github.com/encode/starlette/issues/495#issuecomment-494008175
# Uncomment lines below if you know the risks.
#
# await request.body()
# await request.form()
exc_info = sys.exc_info()
try:
await try_report(exc_info, request)
except RollbarAsyncError:
rollbar.report_exc_info(exc_info, request)
raise
|
import sys
from starlette.requests import Request
from starlette.types import Receive, Scope, Send
import rollbar
from .requests import store_current_request
from rollbar.contrib.asgi import ReporterMiddleware as ASGIReporterMiddleware
from rollbar.lib._async import RollbarAsyncError, try_report
class ReporterMiddleware(ASGIReporterMiddleware):
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
try:
store_current_request(scope, receive)
await self.app(scope, receive, send)
except Exception:
if scope['type'] == 'http':
request = Request(scope, receive, send)
# Consuming the request body in Starlette middleware is problematic.
# See: https://github.com/encode/starlette/issues/495#issuecomment-494008175
# Uncomment the line below if you know the risks.
#
# await request.body()
exc_info = sys.exc_info()
try:
await try_report(exc_info, request)
except RollbarAsyncError:
rollbar.report_exc_info(exc_info, request)
raise
Update comment and optional instructionsimport sys
from starlette.requests import Request
from starlette.types import Receive, Scope, Send
import rollbar
from .requests import store_current_request
from rollbar.contrib.asgi import ReporterMiddleware as ASGIReporterMiddleware
from rollbar.lib._async import RollbarAsyncError, try_report
class ReporterMiddleware(ASGIReporterMiddleware):
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
try:
store_current_request(scope, receive)
await self.app(scope, receive, send)
except Exception:
if scope['type'] == 'http':
request = Request(scope, receive, send)
# Consuming the request body in Starlette middleware is problematic.
# See: https://github.com/encode/starlette/issues/495#issuecomment-494008175
# Uncomment lines below if you know the risks.
#
# await request.body()
# await request.form()
exc_info = sys.exc_info()
try:
await try_report(exc_info, request)
except RollbarAsyncError:
rollbar.report_exc_info(exc_info, request)
raise
|
<commit_before>import sys
from starlette.requests import Request
from starlette.types import Receive, Scope, Send
import rollbar
from .requests import store_current_request
from rollbar.contrib.asgi import ReporterMiddleware as ASGIReporterMiddleware
from rollbar.lib._async import RollbarAsyncError, try_report
class ReporterMiddleware(ASGIReporterMiddleware):
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
try:
store_current_request(scope, receive)
await self.app(scope, receive, send)
except Exception:
if scope['type'] == 'http':
request = Request(scope, receive, send)
# Consuming the request body in Starlette middleware is problematic.
# See: https://github.com/encode/starlette/issues/495#issuecomment-494008175
# Uncomment the line below if you know the risks.
#
# await request.body()
exc_info = sys.exc_info()
try:
await try_report(exc_info, request)
except RollbarAsyncError:
rollbar.report_exc_info(exc_info, request)
raise
<commit_msg>Update comment and optional instructions<commit_after>import sys
from starlette.requests import Request
from starlette.types import Receive, Scope, Send
import rollbar
from .requests import store_current_request
from rollbar.contrib.asgi import ReporterMiddleware as ASGIReporterMiddleware
from rollbar.lib._async import RollbarAsyncError, try_report
class ReporterMiddleware(ASGIReporterMiddleware):
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
try:
store_current_request(scope, receive)
await self.app(scope, receive, send)
except Exception:
if scope['type'] == 'http':
request = Request(scope, receive, send)
# Consuming the request body in Starlette middleware is problematic.
# See: https://github.com/encode/starlette/issues/495#issuecomment-494008175
# Uncomment lines below if you know the risks.
#
# await request.body()
# await request.form()
exc_info = sys.exc_info()
try:
await try_report(exc_info, request)
except RollbarAsyncError:
rollbar.report_exc_info(exc_info, request)
raise
|
60e10d1e25a68f63a232b5c3fe1c23284baef63e
|
rnacentral/portal/utils/go_terms.py
|
rnacentral/portal/utils/go_terms.py
|
MAPPING = {
"RNase_MRP_RNA": None,
"RNase_P_RNA": None,
"SRP_RNA": None,
"Y_RNA": None,
"antisense_RNA": None,
"autocatalytically_spliced_intron": None,
"guide_RNA": None,
"hammerhead_ribozyme": None,
"lncRNA": None,
"miRNA": None,
"misc_RNA": None,
"ncRNA": None,
"other": None,
"piRNA": None,
"precursor_RNA": None,
"rasiRNA": None,
"ribozyme": None,
"scRNA": None,
"siRNA": None,
"snRNA": None,
"snoRNA": None,
"telomerase_RNA": None,
"tmRNA": None,
"vault_RNA": None,
'rRNA': 'GO:0005840',
'tRNA': None,
}
|
MAPPING = {
"RNase_MRP_RNA": None,
"RNase_P_RNA": None,
"SRP_RNA": None,
"Y_RNA": None,
"antisense_RNA": None,
"autocatalytically_spliced_intron": None,
"guide_RNA": None,
"hammerhead_ribozyme": None,
"lncRNA": None,
"miRNA": None,
"misc_RNA": None,
"ncRNA": None,
"other": None,
"piRNA": None,
"precursor_RNA": None,
"rasiRNA": None,
"ribozyme": None,
"scRNA": None,
"siRNA": None,
"snRNA": None,
"snoRNA": None,
"telomerase_RNA": None,
"tmRNA": None,
"vault_RNA": None,
'rRNA': 'GO:0005840',
'tRNA': 'CHEBI:17843',
}
|
Add a guess about tRNA
|
Add a guess about tRNA
|
Python
|
apache-2.0
|
RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode
|
MAPPING = {
"RNase_MRP_RNA": None,
"RNase_P_RNA": None,
"SRP_RNA": None,
"Y_RNA": None,
"antisense_RNA": None,
"autocatalytically_spliced_intron": None,
"guide_RNA": None,
"hammerhead_ribozyme": None,
"lncRNA": None,
"miRNA": None,
"misc_RNA": None,
"ncRNA": None,
"other": None,
"piRNA": None,
"precursor_RNA": None,
"rasiRNA": None,
"ribozyme": None,
"scRNA": None,
"siRNA": None,
"snRNA": None,
"snoRNA": None,
"telomerase_RNA": None,
"tmRNA": None,
"vault_RNA": None,
'rRNA': 'GO:0005840',
'tRNA': None,
}
Add a guess about tRNA
|
MAPPING = {
"RNase_MRP_RNA": None,
"RNase_P_RNA": None,
"SRP_RNA": None,
"Y_RNA": None,
"antisense_RNA": None,
"autocatalytically_spliced_intron": None,
"guide_RNA": None,
"hammerhead_ribozyme": None,
"lncRNA": None,
"miRNA": None,
"misc_RNA": None,
"ncRNA": None,
"other": None,
"piRNA": None,
"precursor_RNA": None,
"rasiRNA": None,
"ribozyme": None,
"scRNA": None,
"siRNA": None,
"snRNA": None,
"snoRNA": None,
"telomerase_RNA": None,
"tmRNA": None,
"vault_RNA": None,
'rRNA': 'GO:0005840',
'tRNA': 'CHEBI:17843',
}
|
<commit_before>MAPPING = {
"RNase_MRP_RNA": None,
"RNase_P_RNA": None,
"SRP_RNA": None,
"Y_RNA": None,
"antisense_RNA": None,
"autocatalytically_spliced_intron": None,
"guide_RNA": None,
"hammerhead_ribozyme": None,
"lncRNA": None,
"miRNA": None,
"misc_RNA": None,
"ncRNA": None,
"other": None,
"piRNA": None,
"precursor_RNA": None,
"rasiRNA": None,
"ribozyme": None,
"scRNA": None,
"siRNA": None,
"snRNA": None,
"snoRNA": None,
"telomerase_RNA": None,
"tmRNA": None,
"vault_RNA": None,
'rRNA': 'GO:0005840',
'tRNA': None,
}
<commit_msg>Add a guess about tRNA<commit_after>
|
MAPPING = {
"RNase_MRP_RNA": None,
"RNase_P_RNA": None,
"SRP_RNA": None,
"Y_RNA": None,
"antisense_RNA": None,
"autocatalytically_spliced_intron": None,
"guide_RNA": None,
"hammerhead_ribozyme": None,
"lncRNA": None,
"miRNA": None,
"misc_RNA": None,
"ncRNA": None,
"other": None,
"piRNA": None,
"precursor_RNA": None,
"rasiRNA": None,
"ribozyme": None,
"scRNA": None,
"siRNA": None,
"snRNA": None,
"snoRNA": None,
"telomerase_RNA": None,
"tmRNA": None,
"vault_RNA": None,
'rRNA': 'GO:0005840',
'tRNA': 'CHEBI:17843',
}
|
MAPPING = {
"RNase_MRP_RNA": None,
"RNase_P_RNA": None,
"SRP_RNA": None,
"Y_RNA": None,
"antisense_RNA": None,
"autocatalytically_spliced_intron": None,
"guide_RNA": None,
"hammerhead_ribozyme": None,
"lncRNA": None,
"miRNA": None,
"misc_RNA": None,
"ncRNA": None,
"other": None,
"piRNA": None,
"precursor_RNA": None,
"rasiRNA": None,
"ribozyme": None,
"scRNA": None,
"siRNA": None,
"snRNA": None,
"snoRNA": None,
"telomerase_RNA": None,
"tmRNA": None,
"vault_RNA": None,
'rRNA': 'GO:0005840',
'tRNA': None,
}
Add a guess about tRNAMAPPING = {
"RNase_MRP_RNA": None,
"RNase_P_RNA": None,
"SRP_RNA": None,
"Y_RNA": None,
"antisense_RNA": None,
"autocatalytically_spliced_intron": None,
"guide_RNA": None,
"hammerhead_ribozyme": None,
"lncRNA": None,
"miRNA": None,
"misc_RNA": None,
"ncRNA": None,
"other": None,
"piRNA": None,
"precursor_RNA": None,
"rasiRNA": None,
"ribozyme": None,
"scRNA": None,
"siRNA": None,
"snRNA": None,
"snoRNA": None,
"telomerase_RNA": None,
"tmRNA": None,
"vault_RNA": None,
'rRNA': 'GO:0005840',
'tRNA': 'CHEBI:17843',
}
|
<commit_before>MAPPING = {
"RNase_MRP_RNA": None,
"RNase_P_RNA": None,
"SRP_RNA": None,
"Y_RNA": None,
"antisense_RNA": None,
"autocatalytically_spliced_intron": None,
"guide_RNA": None,
"hammerhead_ribozyme": None,
"lncRNA": None,
"miRNA": None,
"misc_RNA": None,
"ncRNA": None,
"other": None,
"piRNA": None,
"precursor_RNA": None,
"rasiRNA": None,
"ribozyme": None,
"scRNA": None,
"siRNA": None,
"snRNA": None,
"snoRNA": None,
"telomerase_RNA": None,
"tmRNA": None,
"vault_RNA": None,
'rRNA': 'GO:0005840',
'tRNA': None,
}
<commit_msg>Add a guess about tRNA<commit_after>MAPPING = {
"RNase_MRP_RNA": None,
"RNase_P_RNA": None,
"SRP_RNA": None,
"Y_RNA": None,
"antisense_RNA": None,
"autocatalytically_spliced_intron": None,
"guide_RNA": None,
"hammerhead_ribozyme": None,
"lncRNA": None,
"miRNA": None,
"misc_RNA": None,
"ncRNA": None,
"other": None,
"piRNA": None,
"precursor_RNA": None,
"rasiRNA": None,
"ribozyme": None,
"scRNA": None,
"siRNA": None,
"snRNA": None,
"snoRNA": None,
"telomerase_RNA": None,
"tmRNA": None,
"vault_RNA": None,
'rRNA': 'GO:0005840',
'tRNA': 'CHEBI:17843',
}
|
10db23db0026c7e0987fb2481f1abebf5509b43c
|
tests/_test_selenium_marionette.py
|
tests/_test_selenium_marionette.py
|
import capybara
from capybara.tests.suite import DriverSuite
@capybara.register_driver("selenium_marionette")
def init_selenium_marionette_driver(app):
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.webdriver.firefox.options import Options
from capybara.selenium.driver import Driver
capabilities = DesiredCapabilities.FIREFOX.copy()
capabilities["marionette"] = True
options = Options()
# Allow the driver to attach files.
options.set_preference("dom.file.createInChild", True)
return Driver(app, browser="firefox", desired_capabilities=capabilities, firefox_options=options)
SeleniumMarionetteDriverSuite = DriverSuite("selenium_marionette")
|
import capybara
from capybara.tests.suite import DriverSuite
@capybara.register_driver("selenium_marionette")
def init_selenium_marionette_driver(app):
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.webdriver.firefox.options import Options
from capybara.selenium.driver import Driver
capabilities = DesiredCapabilities.FIREFOX.copy()
capabilities["marionette"] = True
firefox_options = Options()
# Allow the driver to attach files.
firefox_options.set_preference("dom.file.createInChild", True)
return Driver(
app, browser="firefox", desired_capabilities=capabilities, firefox_options=firefox_options)
SeleniumMarionetteDriverSuite = DriverSuite("selenium_marionette")
|
Rename local Firefox options variable
|
Rename local Firefox options variable
|
Python
|
mit
|
elliterate/capybara.py,elliterate/capybara.py,elliterate/capybara.py
|
import capybara
from capybara.tests.suite import DriverSuite
@capybara.register_driver("selenium_marionette")
def init_selenium_marionette_driver(app):
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.webdriver.firefox.options import Options
from capybara.selenium.driver import Driver
capabilities = DesiredCapabilities.FIREFOX.copy()
capabilities["marionette"] = True
options = Options()
# Allow the driver to attach files.
options.set_preference("dom.file.createInChild", True)
return Driver(app, browser="firefox", desired_capabilities=capabilities, firefox_options=options)
SeleniumMarionetteDriverSuite = DriverSuite("selenium_marionette")
Rename local Firefox options variable
|
import capybara
from capybara.tests.suite import DriverSuite
@capybara.register_driver("selenium_marionette")
def init_selenium_marionette_driver(app):
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.webdriver.firefox.options import Options
from capybara.selenium.driver import Driver
capabilities = DesiredCapabilities.FIREFOX.copy()
capabilities["marionette"] = True
firefox_options = Options()
# Allow the driver to attach files.
firefox_options.set_preference("dom.file.createInChild", True)
return Driver(
app, browser="firefox", desired_capabilities=capabilities, firefox_options=firefox_options)
SeleniumMarionetteDriverSuite = DriverSuite("selenium_marionette")
|
<commit_before>import capybara
from capybara.tests.suite import DriverSuite
@capybara.register_driver("selenium_marionette")
def init_selenium_marionette_driver(app):
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.webdriver.firefox.options import Options
from capybara.selenium.driver import Driver
capabilities = DesiredCapabilities.FIREFOX.copy()
capabilities["marionette"] = True
options = Options()
# Allow the driver to attach files.
options.set_preference("dom.file.createInChild", True)
return Driver(app, browser="firefox", desired_capabilities=capabilities, firefox_options=options)
SeleniumMarionetteDriverSuite = DriverSuite("selenium_marionette")
<commit_msg>Rename local Firefox options variable<commit_after>
|
import capybara
from capybara.tests.suite import DriverSuite
@capybara.register_driver("selenium_marionette")
def init_selenium_marionette_driver(app):
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.webdriver.firefox.options import Options
from capybara.selenium.driver import Driver
capabilities = DesiredCapabilities.FIREFOX.copy()
capabilities["marionette"] = True
firefox_options = Options()
# Allow the driver to attach files.
firefox_options.set_preference("dom.file.createInChild", True)
return Driver(
app, browser="firefox", desired_capabilities=capabilities, firefox_options=firefox_options)
SeleniumMarionetteDriverSuite = DriverSuite("selenium_marionette")
|
import capybara
from capybara.tests.suite import DriverSuite
@capybara.register_driver("selenium_marionette")
def init_selenium_marionette_driver(app):
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.webdriver.firefox.options import Options
from capybara.selenium.driver import Driver
capabilities = DesiredCapabilities.FIREFOX.copy()
capabilities["marionette"] = True
options = Options()
# Allow the driver to attach files.
options.set_preference("dom.file.createInChild", True)
return Driver(app, browser="firefox", desired_capabilities=capabilities, firefox_options=options)
SeleniumMarionetteDriverSuite = DriverSuite("selenium_marionette")
Rename local Firefox options variableimport capybara
from capybara.tests.suite import DriverSuite
@capybara.register_driver("selenium_marionette")
def init_selenium_marionette_driver(app):
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.webdriver.firefox.options import Options
from capybara.selenium.driver import Driver
capabilities = DesiredCapabilities.FIREFOX.copy()
capabilities["marionette"] = True
firefox_options = Options()
# Allow the driver to attach files.
firefox_options.set_preference("dom.file.createInChild", True)
return Driver(
app, browser="firefox", desired_capabilities=capabilities, firefox_options=firefox_options)
SeleniumMarionetteDriverSuite = DriverSuite("selenium_marionette")
|
<commit_before>import capybara
from capybara.tests.suite import DriverSuite
@capybara.register_driver("selenium_marionette")
def init_selenium_marionette_driver(app):
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.webdriver.firefox.options import Options
from capybara.selenium.driver import Driver
capabilities = DesiredCapabilities.FIREFOX.copy()
capabilities["marionette"] = True
options = Options()
# Allow the driver to attach files.
options.set_preference("dom.file.createInChild", True)
return Driver(app, browser="firefox", desired_capabilities=capabilities, firefox_options=options)
SeleniumMarionetteDriverSuite = DriverSuite("selenium_marionette")
<commit_msg>Rename local Firefox options variable<commit_after>import capybara
from capybara.tests.suite import DriverSuite
@capybara.register_driver("selenium_marionette")
def init_selenium_marionette_driver(app):
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.webdriver.firefox.options import Options
from capybara.selenium.driver import Driver
capabilities = DesiredCapabilities.FIREFOX.copy()
capabilities["marionette"] = True
firefox_options = Options()
# Allow the driver to attach files.
firefox_options.set_preference("dom.file.createInChild", True)
return Driver(
app, browser="firefox", desired_capabilities=capabilities, firefox_options=firefox_options)
SeleniumMarionetteDriverSuite = DriverSuite("selenium_marionette")
|
1ceff95a8d11fea8881f26e41594bb35f2aafd5d
|
pipeline/middleware.py
|
pipeline/middleware.py
|
from __future__ import unicode_literals
from django.core.exceptions import MiddlewareNotUsed
from django.utils.encoding import DjangoUnicodeDecodeError
from django.utils.html import strip_spaces_between_tags as minify_html
from pipeline.conf import settings
from django.utils.deprecation import MiddlewareMixin
class MinifyHTMLMiddleware(MiddlewareMixin):
def __init__(self, *args, **kwargs):
super(MinifyHTMLMiddleware, self).__init__(*args, **kwargs)
if not settings.PIPELINE_ENABLED:
raise MiddlewareNotUsed
def process_response(self, request, response):
if response.has_header('Content-Type') and 'text/html' in response['Content-Type']:
try:
response.content = minify_html(response.content.strip())
response['Content-Length'] = str(len(response.content))
except DjangoUnicodeDecodeError:
pass
return response
|
from __future__ import unicode_literals
from django.core.exceptions import MiddlewareNotUsed
from django.utils.encoding import DjangoUnicodeDecodeError
from django.utils.html import strip_spaces_between_tags as minify_html
from pipeline.conf import settings
from django.utils.deprecation import MiddlewareMixin
class MinifyHTMLMiddleware(MiddlewareMixin):
def __init__(self, *args, **kwargs):
super(MinifyHTMLMiddleware, self).__init__(*args, **kwargs)
if not settings.PIPELINE_ENABLED:
raise MiddlewareNotUsed
def process_response(self, request, response):
if response.has_header('Content-Type') and 'text/html' in response['Content-Type']:
try:
response.content = minify_html(response.content.decode().strip())
response['Content-Length'] = str(len(response.content))
except DjangoUnicodeDecodeError:
pass
return response
|
Fix 691 - HTML not getting decoded properly by Middleware in Django 2.2
|
Fix 691 - HTML not getting decoded properly by Middleware in Django 2.2
|
Python
|
mit
|
cyberdelia/django-pipeline,beedesk/django-pipeline,d9pouces/django-pipeline,jazzband/django-pipeline,cyberdelia/django-pipeline,beedesk/django-pipeline,jazzband/django-pipeline,d9pouces/django-pipeline,d9pouces/django-pipeline,jazzband/django-pipeline,beedesk/django-pipeline,cyberdelia/django-pipeline
|
from __future__ import unicode_literals
from django.core.exceptions import MiddlewareNotUsed
from django.utils.encoding import DjangoUnicodeDecodeError
from django.utils.html import strip_spaces_between_tags as minify_html
from pipeline.conf import settings
from django.utils.deprecation import MiddlewareMixin
class MinifyHTMLMiddleware(MiddlewareMixin):
def __init__(self, *args, **kwargs):
super(MinifyHTMLMiddleware, self).__init__(*args, **kwargs)
if not settings.PIPELINE_ENABLED:
raise MiddlewareNotUsed
def process_response(self, request, response):
if response.has_header('Content-Type') and 'text/html' in response['Content-Type']:
try:
response.content = minify_html(response.content.strip())
response['Content-Length'] = str(len(response.content))
except DjangoUnicodeDecodeError:
pass
return response
Fix 691 - HTML not getting decoded properly by Middleware in Django 2.2
|
from __future__ import unicode_literals
from django.core.exceptions import MiddlewareNotUsed
from django.utils.encoding import DjangoUnicodeDecodeError
from django.utils.html import strip_spaces_between_tags as minify_html
from pipeline.conf import settings
from django.utils.deprecation import MiddlewareMixin
class MinifyHTMLMiddleware(MiddlewareMixin):
def __init__(self, *args, **kwargs):
super(MinifyHTMLMiddleware, self).__init__(*args, **kwargs)
if not settings.PIPELINE_ENABLED:
raise MiddlewareNotUsed
def process_response(self, request, response):
if response.has_header('Content-Type') and 'text/html' in response['Content-Type']:
try:
response.content = minify_html(response.content.decode().strip())
response['Content-Length'] = str(len(response.content))
except DjangoUnicodeDecodeError:
pass
return response
|
<commit_before>from __future__ import unicode_literals
from django.core.exceptions import MiddlewareNotUsed
from django.utils.encoding import DjangoUnicodeDecodeError
from django.utils.html import strip_spaces_between_tags as minify_html
from pipeline.conf import settings
from django.utils.deprecation import MiddlewareMixin
class MinifyHTMLMiddleware(MiddlewareMixin):
def __init__(self, *args, **kwargs):
super(MinifyHTMLMiddleware, self).__init__(*args, **kwargs)
if not settings.PIPELINE_ENABLED:
raise MiddlewareNotUsed
def process_response(self, request, response):
if response.has_header('Content-Type') and 'text/html' in response['Content-Type']:
try:
response.content = minify_html(response.content.strip())
response['Content-Length'] = str(len(response.content))
except DjangoUnicodeDecodeError:
pass
return response
<commit_msg>Fix 691 - HTML not getting decoded properly by Middleware in Django 2.2<commit_after>
|
from __future__ import unicode_literals
from django.core.exceptions import MiddlewareNotUsed
from django.utils.encoding import DjangoUnicodeDecodeError
from django.utils.html import strip_spaces_between_tags as minify_html
from pipeline.conf import settings
from django.utils.deprecation import MiddlewareMixin
class MinifyHTMLMiddleware(MiddlewareMixin):
def __init__(self, *args, **kwargs):
super(MinifyHTMLMiddleware, self).__init__(*args, **kwargs)
if not settings.PIPELINE_ENABLED:
raise MiddlewareNotUsed
def process_response(self, request, response):
if response.has_header('Content-Type') and 'text/html' in response['Content-Type']:
try:
response.content = minify_html(response.content.decode().strip())
response['Content-Length'] = str(len(response.content))
except DjangoUnicodeDecodeError:
pass
return response
|
from __future__ import unicode_literals
from django.core.exceptions import MiddlewareNotUsed
from django.utils.encoding import DjangoUnicodeDecodeError
from django.utils.html import strip_spaces_between_tags as minify_html
from pipeline.conf import settings
from django.utils.deprecation import MiddlewareMixin
class MinifyHTMLMiddleware(MiddlewareMixin):
def __init__(self, *args, **kwargs):
super(MinifyHTMLMiddleware, self).__init__(*args, **kwargs)
if not settings.PIPELINE_ENABLED:
raise MiddlewareNotUsed
def process_response(self, request, response):
if response.has_header('Content-Type') and 'text/html' in response['Content-Type']:
try:
response.content = minify_html(response.content.strip())
response['Content-Length'] = str(len(response.content))
except DjangoUnicodeDecodeError:
pass
return response
Fix 691 - HTML not getting decoded properly by Middleware in Django 2.2from __future__ import unicode_literals
from django.core.exceptions import MiddlewareNotUsed
from django.utils.encoding import DjangoUnicodeDecodeError
from django.utils.html import strip_spaces_between_tags as minify_html
from pipeline.conf import settings
from django.utils.deprecation import MiddlewareMixin
class MinifyHTMLMiddleware(MiddlewareMixin):
def __init__(self, *args, **kwargs):
super(MinifyHTMLMiddleware, self).__init__(*args, **kwargs)
if not settings.PIPELINE_ENABLED:
raise MiddlewareNotUsed
def process_response(self, request, response):
if response.has_header('Content-Type') and 'text/html' in response['Content-Type']:
try:
response.content = minify_html(response.content.decode().strip())
response['Content-Length'] = str(len(response.content))
except DjangoUnicodeDecodeError:
pass
return response
|
<commit_before>from __future__ import unicode_literals
from django.core.exceptions import MiddlewareNotUsed
from django.utils.encoding import DjangoUnicodeDecodeError
from django.utils.html import strip_spaces_between_tags as minify_html
from pipeline.conf import settings
from django.utils.deprecation import MiddlewareMixin
class MinifyHTMLMiddleware(MiddlewareMixin):
def __init__(self, *args, **kwargs):
super(MinifyHTMLMiddleware, self).__init__(*args, **kwargs)
if not settings.PIPELINE_ENABLED:
raise MiddlewareNotUsed
def process_response(self, request, response):
if response.has_header('Content-Type') and 'text/html' in response['Content-Type']:
try:
response.content = minify_html(response.content.strip())
response['Content-Length'] = str(len(response.content))
except DjangoUnicodeDecodeError:
pass
return response
<commit_msg>Fix 691 - HTML not getting decoded properly by Middleware in Django 2.2<commit_after>from __future__ import unicode_literals
from django.core.exceptions import MiddlewareNotUsed
from django.utils.encoding import DjangoUnicodeDecodeError
from django.utils.html import strip_spaces_between_tags as minify_html
from pipeline.conf import settings
from django.utils.deprecation import MiddlewareMixin
class MinifyHTMLMiddleware(MiddlewareMixin):
def __init__(self, *args, **kwargs):
super(MinifyHTMLMiddleware, self).__init__(*args, **kwargs)
if not settings.PIPELINE_ENABLED:
raise MiddlewareNotUsed
def process_response(self, request, response):
if response.has_header('Content-Type') and 'text/html' in response['Content-Type']:
try:
response.content = minify_html(response.content.decode().strip())
response['Content-Length'] = str(len(response.content))
except DjangoUnicodeDecodeError:
pass
return response
|
bf254d3f3cff2f3f3f9de1f8a904143813b01240
|
passphrase/passphrase.py
|
passphrase/passphrase.py
|
#!/usr/bin/python
import argparse
from glob import glob
import random
import os.path
from contextlib import contextmanager
@contextmanager
def cd(path):
old_dir = os.getcwd()
os.chdir(path)
yield
os.chdir(old_dir)
_dir = os.path.dirname(os.path.abspath(__file__))
def available_languages():
with cd(_dir + "/dictionaries/"):
langs = list(n.replace(".txt", "") for n in glob("*.txt"))
return sorted(langs)
def generate(lang, num):
return [x.strip() for x in sorted(random.sample(list(open('%s/dictionaries/%s.txt' % (_dir, lang))), num))]
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-l', '--language', default = "en", help = "Show results from which language")
parser.add_argument('-L', '--list', action = "store_true", help = "Show available languages")
parser.add_argument('-n', '--number', type = int, default = 12, help = "Number of results from which to choose")
args = parser.parse_args()
if args.list:
print " ".join(available_languages())
else:
print " ".join(generate(args.language, args.number))
if __name__ == "__main__":
main()
|
#!/usr/bin/python
import argparse
from glob import glob
import random
import os.path
from contextlib import contextmanager
@contextmanager
def cd(path):
old_dir = os.getcwd()
os.chdir(path)
yield
os.chdir(old_dir)
_dir = os.path.dirname(os.path.abspath(__file__))
def available_languages():
with cd(_dir + "/dictionaries/"):
langs = list(n.replace(".txt", "") for n in glob("*.txt"))
return sorted(langs)
def generate(lang, num):
try:
return [x.strip() for x in sorted(random.sample(list(open('%s/dictionaries/%s.txt' % (_dir, lang))), num))]
except:
return ["Dictionary ", lang, " not found. Use the '-L' flag to list available dictionaries."]
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-l', '--language', default = "en", help = "Show results from which language")
parser.add_argument('-L', '--list', action = "store_true", help = "Show available languages")
parser.add_argument('-n', '--number', type = int, default = 12, help = "Number of results from which to choose")
args = parser.parse_args()
if args.list:
print " ".join(available_languages())
else:
print " ".join(generate(args.language, args.number))
if __name__ == "__main__":
main()
|
Handle a missing dictionary exception
|
Handle a missing dictionary exception
|
Python
|
bsd-3-clause
|
Version2beta/passphrase
|
#!/usr/bin/python
import argparse
from glob import glob
import random
import os.path
from contextlib import contextmanager
@contextmanager
def cd(path):
old_dir = os.getcwd()
os.chdir(path)
yield
os.chdir(old_dir)
_dir = os.path.dirname(os.path.abspath(__file__))
def available_languages():
with cd(_dir + "/dictionaries/"):
langs = list(n.replace(".txt", "") for n in glob("*.txt"))
return sorted(langs)
def generate(lang, num):
return [x.strip() for x in sorted(random.sample(list(open('%s/dictionaries/%s.txt' % (_dir, lang))), num))]
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-l', '--language', default = "en", help = "Show results from which language")
parser.add_argument('-L', '--list', action = "store_true", help = "Show available languages")
parser.add_argument('-n', '--number', type = int, default = 12, help = "Number of results from which to choose")
args = parser.parse_args()
if args.list:
print " ".join(available_languages())
else:
print " ".join(generate(args.language, args.number))
if __name__ == "__main__":
main()
Handle a missing dictionary exception
|
#!/usr/bin/python
import argparse
from glob import glob
import random
import os.path
from contextlib import contextmanager
@contextmanager
def cd(path):
old_dir = os.getcwd()
os.chdir(path)
yield
os.chdir(old_dir)
_dir = os.path.dirname(os.path.abspath(__file__))
def available_languages():
with cd(_dir + "/dictionaries/"):
langs = list(n.replace(".txt", "") for n in glob("*.txt"))
return sorted(langs)
def generate(lang, num):
try:
return [x.strip() for x in sorted(random.sample(list(open('%s/dictionaries/%s.txt' % (_dir, lang))), num))]
except:
return ["Dictionary ", lang, " not found. Use the '-L' flag to list available dictionaries."]
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-l', '--language', default = "en", help = "Show results from which language")
parser.add_argument('-L', '--list', action = "store_true", help = "Show available languages")
parser.add_argument('-n', '--number', type = int, default = 12, help = "Number of results from which to choose")
args = parser.parse_args()
if args.list:
print " ".join(available_languages())
else:
print " ".join(generate(args.language, args.number))
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/python
import argparse
from glob import glob
import random
import os.path
from contextlib import contextmanager
@contextmanager
def cd(path):
old_dir = os.getcwd()
os.chdir(path)
yield
os.chdir(old_dir)
_dir = os.path.dirname(os.path.abspath(__file__))
def available_languages():
with cd(_dir + "/dictionaries/"):
langs = list(n.replace(".txt", "") for n in glob("*.txt"))
return sorted(langs)
def generate(lang, num):
return [x.strip() for x in sorted(random.sample(list(open('%s/dictionaries/%s.txt' % (_dir, lang))), num))]
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-l', '--language', default = "en", help = "Show results from which language")
parser.add_argument('-L', '--list', action = "store_true", help = "Show available languages")
parser.add_argument('-n', '--number', type = int, default = 12, help = "Number of results from which to choose")
args = parser.parse_args()
if args.list:
print " ".join(available_languages())
else:
print " ".join(generate(args.language, args.number))
if __name__ == "__main__":
main()
<commit_msg>Handle a missing dictionary exception<commit_after>
|
#!/usr/bin/python
import argparse
from glob import glob
import random
import os.path
from contextlib import contextmanager
@contextmanager
def cd(path):
old_dir = os.getcwd()
os.chdir(path)
yield
os.chdir(old_dir)
_dir = os.path.dirname(os.path.abspath(__file__))
def available_languages():
with cd(_dir + "/dictionaries/"):
langs = list(n.replace(".txt", "") for n in glob("*.txt"))
return sorted(langs)
def generate(lang, num):
try:
return [x.strip() for x in sorted(random.sample(list(open('%s/dictionaries/%s.txt' % (_dir, lang))), num))]
except:
return ["Dictionary ", lang, " not found. Use the '-L' flag to list available dictionaries."]
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-l', '--language', default = "en", help = "Show results from which language")
parser.add_argument('-L', '--list', action = "store_true", help = "Show available languages")
parser.add_argument('-n', '--number', type = int, default = 12, help = "Number of results from which to choose")
args = parser.parse_args()
if args.list:
print " ".join(available_languages())
else:
print " ".join(generate(args.language, args.number))
if __name__ == "__main__":
main()
|
#!/usr/bin/python
import argparse
from glob import glob
import random
import os.path
from contextlib import contextmanager
@contextmanager
def cd(path):
old_dir = os.getcwd()
os.chdir(path)
yield
os.chdir(old_dir)
_dir = os.path.dirname(os.path.abspath(__file__))
def available_languages():
with cd(_dir + "/dictionaries/"):
langs = list(n.replace(".txt", "") for n in glob("*.txt"))
return sorted(langs)
def generate(lang, num):
return [x.strip() for x in sorted(random.sample(list(open('%s/dictionaries/%s.txt' % (_dir, lang))), num))]
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-l', '--language', default = "en", help = "Show results from which language")
parser.add_argument('-L', '--list', action = "store_true", help = "Show available languages")
parser.add_argument('-n', '--number', type = int, default = 12, help = "Number of results from which to choose")
args = parser.parse_args()
if args.list:
print " ".join(available_languages())
else:
print " ".join(generate(args.language, args.number))
if __name__ == "__main__":
main()
Handle a missing dictionary exception#!/usr/bin/python
import argparse
from glob import glob
import random
import os.path
from contextlib import contextmanager
@contextmanager
def cd(path):
old_dir = os.getcwd()
os.chdir(path)
yield
os.chdir(old_dir)
_dir = os.path.dirname(os.path.abspath(__file__))
def available_languages():
with cd(_dir + "/dictionaries/"):
langs = list(n.replace(".txt", "") for n in glob("*.txt"))
return sorted(langs)
def generate(lang, num):
try:
return [x.strip() for x in sorted(random.sample(list(open('%s/dictionaries/%s.txt' % (_dir, lang))), num))]
except:
return ["Dictionary ", lang, " not found. Use the '-L' flag to list available dictionaries."]
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-l', '--language', default = "en", help = "Show results from which language")
parser.add_argument('-L', '--list', action = "store_true", help = "Show available languages")
parser.add_argument('-n', '--number', type = int, default = 12, help = "Number of results from which to choose")
args = parser.parse_args()
if args.list:
print " ".join(available_languages())
else:
print " ".join(generate(args.language, args.number))
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/python
import argparse
from glob import glob
import random
import os.path
from contextlib import contextmanager
@contextmanager
def cd(path):
old_dir = os.getcwd()
os.chdir(path)
yield
os.chdir(old_dir)
_dir = os.path.dirname(os.path.abspath(__file__))
def available_languages():
with cd(_dir + "/dictionaries/"):
langs = list(n.replace(".txt", "") for n in glob("*.txt"))
return sorted(langs)
def generate(lang, num):
return [x.strip() for x in sorted(random.sample(list(open('%s/dictionaries/%s.txt' % (_dir, lang))), num))]
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-l', '--language', default = "en", help = "Show results from which language")
parser.add_argument('-L', '--list', action = "store_true", help = "Show available languages")
parser.add_argument('-n', '--number', type = int, default = 12, help = "Number of results from which to choose")
args = parser.parse_args()
if args.list:
print " ".join(available_languages())
else:
print " ".join(generate(args.language, args.number))
if __name__ == "__main__":
main()
<commit_msg>Handle a missing dictionary exception<commit_after>#!/usr/bin/python
import argparse
from glob import glob
import random
import os.path
from contextlib import contextmanager
@contextmanager
def cd(path):
old_dir = os.getcwd()
os.chdir(path)
yield
os.chdir(old_dir)
_dir = os.path.dirname(os.path.abspath(__file__))
def available_languages():
with cd(_dir + "/dictionaries/"):
langs = list(n.replace(".txt", "") for n in glob("*.txt"))
return sorted(langs)
def generate(lang, num):
try:
return [x.strip() for x in sorted(random.sample(list(open('%s/dictionaries/%s.txt' % (_dir, lang))), num))]
except:
return ["Dictionary ", lang, " not found. Use the '-L' flag to list available dictionaries."]
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-l', '--language', default = "en", help = "Show results from which language")
parser.add_argument('-L', '--list', action = "store_true", help = "Show available languages")
parser.add_argument('-n', '--number', type = int, default = 12, help = "Number of results from which to choose")
args = parser.parse_args()
if args.list:
print " ".join(available_languages())
else:
print " ".join(generate(args.language, args.number))
if __name__ == "__main__":
main()
|
86377a2a0618957d9707441049cad24f0de684ca
|
scripts/round2_submit.py
|
scripts/round2_submit.py
|
#!/usr/bin/env python
import opensim as osim
from osim.redis.client import Client
from osim.env import *
import numpy as np
import argparse
import os
"""
Please ensure that `visualize=False`, else there might be unexpected errors in your submission
"""
env = RunEnv(visualize=False)
client = Client()
# Create environment
observation = client.env_create()
"""
The grader runs N simulations of at most 1000 steps each. We stop after the last one
A new simulation start when `clinet.env_step` returns `done==True`
and all the simulatiosn end when the subsequent `client.env_reset()` returns a False
"""
while True:
_action = env.action_space.sample().tolist()
[observation, reward, done, info] = client.env_step(_action)
print(observation)
if done:
observation = client.env_reset()
if not observation:
break
client.submit()
|
#!/usr/bin/env python
import opensim as osim
from osim.redis.client import Client
from osim.env import *
import numpy as np
import argparse
import os
"""
NOTE: For testing your submission scripts, you first need to ensure that redis-server is running in the background
and you can locally run the grading service by running this script : https://github.com/crowdAI/osim-rl/blob/master/osim/redis/service.py
The client and the grading service communicate with each other by pointing to the same redis server.
"""
"""
Please ensure that `visualize=False`, else there might be unexpected errors in your submission
"""
env = RunEnv(visualize=False)
client = Client()
# Create environment
observation = client.env_create()
"""
The grader runs N simulations of at most 1000 steps each. We stop after the last one
A new simulation start when `clinet.env_step` returns `done==True`
and all the simulatiosn end when the subsequent `client.env_reset()` returns a False
"""
while True:
_action = env.action_space.sample().tolist()
[observation, reward, done, info] = client.env_step(_action)
print(observation)
if done:
observation = client.env_reset()
if not observation:
break
client.submit()
|
Add a little bit more documentation for round2submission script
|
Add a little bit more documentation for round2submission script
|
Python
|
mit
|
stanfordnmbl/osim-rl,vzhuang/osim-rl
|
#!/usr/bin/env python
import opensim as osim
from osim.redis.client import Client
from osim.env import *
import numpy as np
import argparse
import os
"""
Please ensure that `visualize=False`, else there might be unexpected errors in your submission
"""
env = RunEnv(visualize=False)
client = Client()
# Create environment
observation = client.env_create()
"""
The grader runs N simulations of at most 1000 steps each. We stop after the last one
A new simulation start when `clinet.env_step` returns `done==True`
and all the simulatiosn end when the subsequent `client.env_reset()` returns a False
"""
while True:
_action = env.action_space.sample().tolist()
[observation, reward, done, info] = client.env_step(_action)
print(observation)
if done:
observation = client.env_reset()
if not observation:
break
client.submit()
Add a little bit more documentation for round2submission script
|
#!/usr/bin/env python
import opensim as osim
from osim.redis.client import Client
from osim.env import *
import numpy as np
import argparse
import os
"""
NOTE: For testing your submission scripts, you first need to ensure that redis-server is running in the background
and you can locally run the grading service by running this script : https://github.com/crowdAI/osim-rl/blob/master/osim/redis/service.py
The client and the grading service communicate with each other by pointing to the same redis server.
"""
"""
Please ensure that `visualize=False`, else there might be unexpected errors in your submission
"""
env = RunEnv(visualize=False)
client = Client()
# Create environment
observation = client.env_create()
"""
The grader runs N simulations of at most 1000 steps each. We stop after the last one
A new simulation start when `clinet.env_step` returns `done==True`
and all the simulatiosn end when the subsequent `client.env_reset()` returns a False
"""
while True:
_action = env.action_space.sample().tolist()
[observation, reward, done, info] = client.env_step(_action)
print(observation)
if done:
observation = client.env_reset()
if not observation:
break
client.submit()
|
<commit_before>#!/usr/bin/env python
import opensim as osim
from osim.redis.client import Client
from osim.env import *
import numpy as np
import argparse
import os
"""
Please ensure that `visualize=False`, else there might be unexpected errors in your submission
"""
env = RunEnv(visualize=False)
client = Client()
# Create environment
observation = client.env_create()
"""
The grader runs N simulations of at most 1000 steps each. We stop after the last one
A new simulation start when `clinet.env_step` returns `done==True`
and all the simulatiosn end when the subsequent `client.env_reset()` returns a False
"""
while True:
_action = env.action_space.sample().tolist()
[observation, reward, done, info] = client.env_step(_action)
print(observation)
if done:
observation = client.env_reset()
if not observation:
break
client.submit()
<commit_msg>Add a little bit more documentation for round2submission script<commit_after>
|
#!/usr/bin/env python
import opensim as osim
from osim.redis.client import Client
from osim.env import *
import numpy as np
import argparse
import os
"""
NOTE: For testing your submission scripts, you first need to ensure that redis-server is running in the background
and you can locally run the grading service by running this script : https://github.com/crowdAI/osim-rl/blob/master/osim/redis/service.py
The client and the grading service communicate with each other by pointing to the same redis server.
"""
"""
Please ensure that `visualize=False`, else there might be unexpected errors in your submission
"""
env = RunEnv(visualize=False)
client = Client()
# Create environment
observation = client.env_create()
"""
The grader runs N simulations of at most 1000 steps each. We stop after the last one
A new simulation start when `clinet.env_step` returns `done==True`
and all the simulatiosn end when the subsequent `client.env_reset()` returns a False
"""
while True:
_action = env.action_space.sample().tolist()
[observation, reward, done, info] = client.env_step(_action)
print(observation)
if done:
observation = client.env_reset()
if not observation:
break
client.submit()
|
#!/usr/bin/env python
import opensim as osim
from osim.redis.client import Client
from osim.env import *
import numpy as np
import argparse
import os
"""
Please ensure that `visualize=False`, else there might be unexpected errors in your submission
"""
env = RunEnv(visualize=False)
client = Client()
# Create environment
observation = client.env_create()
"""
The grader runs N simulations of at most 1000 steps each. We stop after the last one
A new simulation start when `clinet.env_step` returns `done==True`
and all the simulatiosn end when the subsequent `client.env_reset()` returns a False
"""
while True:
_action = env.action_space.sample().tolist()
[observation, reward, done, info] = client.env_step(_action)
print(observation)
if done:
observation = client.env_reset()
if not observation:
break
client.submit()
Add a little bit more documentation for round2submission script#!/usr/bin/env python
import opensim as osim
from osim.redis.client import Client
from osim.env import *
import numpy as np
import argparse
import os
"""
NOTE: For testing your submission scripts, you first need to ensure that redis-server is running in the background
and you can locally run the grading service by running this script : https://github.com/crowdAI/osim-rl/blob/master/osim/redis/service.py
The client and the grading service communicate with each other by pointing to the same redis server.
"""
"""
Please ensure that `visualize=False`, else there might be unexpected errors in your submission
"""
env = RunEnv(visualize=False)
client = Client()
# Create environment
observation = client.env_create()
"""
The grader runs N simulations of at most 1000 steps each. We stop after the last one
A new simulation start when `clinet.env_step` returns `done==True`
and all the simulatiosn end when the subsequent `client.env_reset()` returns a False
"""
while True:
_action = env.action_space.sample().tolist()
[observation, reward, done, info] = client.env_step(_action)
print(observation)
if done:
observation = client.env_reset()
if not observation:
break
client.submit()
|
<commit_before>#!/usr/bin/env python
import opensim as osim
from osim.redis.client import Client
from osim.env import *
import numpy as np
import argparse
import os
"""
Please ensure that `visualize=False`, else there might be unexpected errors in your submission
"""
env = RunEnv(visualize=False)
client = Client()
# Create environment
observation = client.env_create()
"""
The grader runs N simulations of at most 1000 steps each. We stop after the last one
A new simulation start when `clinet.env_step` returns `done==True`
and all the simulatiosn end when the subsequent `client.env_reset()` returns a False
"""
while True:
_action = env.action_space.sample().tolist()
[observation, reward, done, info] = client.env_step(_action)
print(observation)
if done:
observation = client.env_reset()
if not observation:
break
client.submit()
<commit_msg>Add a little bit more documentation for round2submission script<commit_after>#!/usr/bin/env python
import opensim as osim
from osim.redis.client import Client
from osim.env import *
import numpy as np
import argparse
import os
"""
NOTE: For testing your submission scripts, you first need to ensure that redis-server is running in the background
and you can locally run the grading service by running this script : https://github.com/crowdAI/osim-rl/blob/master/osim/redis/service.py
The client and the grading service communicate with each other by pointing to the same redis server.
"""
"""
Please ensure that `visualize=False`, else there might be unexpected errors in your submission
"""
env = RunEnv(visualize=False)
client = Client()
# Create environment
observation = client.env_create()
"""
The grader runs N simulations of at most 1000 steps each. We stop after the last one
A new simulation start when `clinet.env_step` returns `done==True`
and all the simulatiosn end when the subsequent `client.env_reset()` returns a False
"""
while True:
_action = env.action_space.sample().tolist()
[observation, reward, done, info] = client.env_step(_action)
print(observation)
if done:
observation = client.env_reset()
if not observation:
break
client.submit()
|
ceb5dfe96df6dc98d580b95296924f9c0ff50c5f
|
mrbelvedereci/trigger/models.py
|
mrbelvedereci/trigger/models.py
|
from __future__ import unicode_literals
from django.db import models
TRIGGER_TYPES = (
('manual', 'Manual'),
('commit', 'Commit'),
('tag', 'Tag'),
('pr', 'Pull Request'),
)
class Trigger(models.Model):
name = models.CharField(max_length=255)
repo = models.ForeignKey('github.Repository')
type = models.CharField(max_length=8, choices=TRIGGER_TYPES)
regex = models.CharField(max_length=255, null=True, blank=True)
build_pr_commits = models.BooleanField(default=False)
flows = models.CharField(max_length=255)
org = models.CharField(max_length=255)
context = models.CharField(max_length=255)
def __unicode__(self):
return unicode(self.name)
def check_push(self, push):
return True
def check_pull_request(self, pull_request):
return True
|
from __future__ import unicode_literals
from django.db import models
TRIGGER_TYPES = (
('manual', 'Manual'),
('commit', 'Commit'),
('tag', 'Tag'),
('pr', 'Pull Request'),
)
class Trigger(models.Model):
name = models.CharField(max_length=255)
repo = models.ForeignKey('github.Repository', related_name="triggers")
type = models.CharField(max_length=8, choices=TRIGGER_TYPES)
regex = models.CharField(max_length=255, null=True, blank=True)
build_pr_commits = models.BooleanField(default=False)
flows = models.CharField(max_length=255)
org = models.CharField(max_length=255)
context = models.CharField(max_length=255)
def __unicode__(self):
return unicode(self.name)
def check_push(self, push):
return True
def check_pull_request(self, pull_request):
return True
|
Add Repository.triggers backref to look up Triggers
|
Add Repository.triggers backref to look up Triggers
|
Python
|
bsd-3-clause
|
SalesforceFoundation/mrbelvedereci,SalesforceFoundation/mrbelvedereci,SalesforceFoundation/mrbelvedereci,SalesforceFoundation/mrbelvedereci
|
from __future__ import unicode_literals
from django.db import models
TRIGGER_TYPES = (
('manual', 'Manual'),
('commit', 'Commit'),
('tag', 'Tag'),
('pr', 'Pull Request'),
)
class Trigger(models.Model):
name = models.CharField(max_length=255)
repo = models.ForeignKey('github.Repository')
type = models.CharField(max_length=8, choices=TRIGGER_TYPES)
regex = models.CharField(max_length=255, null=True, blank=True)
build_pr_commits = models.BooleanField(default=False)
flows = models.CharField(max_length=255)
org = models.CharField(max_length=255)
context = models.CharField(max_length=255)
def __unicode__(self):
return unicode(self.name)
def check_push(self, push):
return True
def check_pull_request(self, pull_request):
return True
Add Repository.triggers backref to look up Triggers
|
from __future__ import unicode_literals
from django.db import models
TRIGGER_TYPES = (
('manual', 'Manual'),
('commit', 'Commit'),
('tag', 'Tag'),
('pr', 'Pull Request'),
)
class Trigger(models.Model):
name = models.CharField(max_length=255)
repo = models.ForeignKey('github.Repository', related_name="triggers")
type = models.CharField(max_length=8, choices=TRIGGER_TYPES)
regex = models.CharField(max_length=255, null=True, blank=True)
build_pr_commits = models.BooleanField(default=False)
flows = models.CharField(max_length=255)
org = models.CharField(max_length=255)
context = models.CharField(max_length=255)
def __unicode__(self):
return unicode(self.name)
def check_push(self, push):
return True
def check_pull_request(self, pull_request):
return True
|
<commit_before>from __future__ import unicode_literals
from django.db import models
TRIGGER_TYPES = (
('manual', 'Manual'),
('commit', 'Commit'),
('tag', 'Tag'),
('pr', 'Pull Request'),
)
class Trigger(models.Model):
name = models.CharField(max_length=255)
repo = models.ForeignKey('github.Repository')
type = models.CharField(max_length=8, choices=TRIGGER_TYPES)
regex = models.CharField(max_length=255, null=True, blank=True)
build_pr_commits = models.BooleanField(default=False)
flows = models.CharField(max_length=255)
org = models.CharField(max_length=255)
context = models.CharField(max_length=255)
def __unicode__(self):
return unicode(self.name)
def check_push(self, push):
return True
def check_pull_request(self, pull_request):
return True
<commit_msg>Add Repository.triggers backref to look up Triggers<commit_after>
|
from __future__ import unicode_literals
from django.db import models
TRIGGER_TYPES = (
('manual', 'Manual'),
('commit', 'Commit'),
('tag', 'Tag'),
('pr', 'Pull Request'),
)
class Trigger(models.Model):
name = models.CharField(max_length=255)
repo = models.ForeignKey('github.Repository', related_name="triggers")
type = models.CharField(max_length=8, choices=TRIGGER_TYPES)
regex = models.CharField(max_length=255, null=True, blank=True)
build_pr_commits = models.BooleanField(default=False)
flows = models.CharField(max_length=255)
org = models.CharField(max_length=255)
context = models.CharField(max_length=255)
def __unicode__(self):
return unicode(self.name)
def check_push(self, push):
return True
def check_pull_request(self, pull_request):
return True
|
from __future__ import unicode_literals
from django.db import models
TRIGGER_TYPES = (
('manual', 'Manual'),
('commit', 'Commit'),
('tag', 'Tag'),
('pr', 'Pull Request'),
)
class Trigger(models.Model):
name = models.CharField(max_length=255)
repo = models.ForeignKey('github.Repository')
type = models.CharField(max_length=8, choices=TRIGGER_TYPES)
regex = models.CharField(max_length=255, null=True, blank=True)
build_pr_commits = models.BooleanField(default=False)
flows = models.CharField(max_length=255)
org = models.CharField(max_length=255)
context = models.CharField(max_length=255)
def __unicode__(self):
return unicode(self.name)
def check_push(self, push):
return True
def check_pull_request(self, pull_request):
return True
Add Repository.triggers backref to look up Triggersfrom __future__ import unicode_literals
from django.db import models
TRIGGER_TYPES = (
('manual', 'Manual'),
('commit', 'Commit'),
('tag', 'Tag'),
('pr', 'Pull Request'),
)
class Trigger(models.Model):
name = models.CharField(max_length=255)
repo = models.ForeignKey('github.Repository', related_name="triggers")
type = models.CharField(max_length=8, choices=TRIGGER_TYPES)
regex = models.CharField(max_length=255, null=True, blank=True)
build_pr_commits = models.BooleanField(default=False)
flows = models.CharField(max_length=255)
org = models.CharField(max_length=255)
context = models.CharField(max_length=255)
def __unicode__(self):
return unicode(self.name)
def check_push(self, push):
return True
def check_pull_request(self, pull_request):
return True
|
<commit_before>from __future__ import unicode_literals
from django.db import models
TRIGGER_TYPES = (
('manual', 'Manual'),
('commit', 'Commit'),
('tag', 'Tag'),
('pr', 'Pull Request'),
)
class Trigger(models.Model):
name = models.CharField(max_length=255)
repo = models.ForeignKey('github.Repository')
type = models.CharField(max_length=8, choices=TRIGGER_TYPES)
regex = models.CharField(max_length=255, null=True, blank=True)
build_pr_commits = models.BooleanField(default=False)
flows = models.CharField(max_length=255)
org = models.CharField(max_length=255)
context = models.CharField(max_length=255)
def __unicode__(self):
return unicode(self.name)
def check_push(self, push):
return True
def check_pull_request(self, pull_request):
return True
<commit_msg>Add Repository.triggers backref to look up Triggers<commit_after>from __future__ import unicode_literals
from django.db import models
TRIGGER_TYPES = (
('manual', 'Manual'),
('commit', 'Commit'),
('tag', 'Tag'),
('pr', 'Pull Request'),
)
class Trigger(models.Model):
name = models.CharField(max_length=255)
repo = models.ForeignKey('github.Repository', related_name="triggers")
type = models.CharField(max_length=8, choices=TRIGGER_TYPES)
regex = models.CharField(max_length=255, null=True, blank=True)
build_pr_commits = models.BooleanField(default=False)
flows = models.CharField(max_length=255)
org = models.CharField(max_length=255)
context = models.CharField(max_length=255)
def __unicode__(self):
return unicode(self.name)
def check_push(self, push):
return True
def check_pull_request(self, pull_request):
return True
|
25e4fa7ade76e120afdfa1b737f8d34a6ec744b5
|
constants.py
|
constants.py
|
'''Constants used by TRender.
:copyright: 2015, Jeroen van der Heijden (Transceptor Technology)
'''
LINE_IF = 1
LINE_ELSE = 2
LINE_ELIF = 4
LINE_END = 8
LINE_MACRO = 16
LINE_COMMENT = 32
LINE_BLOCK = 64
LINE_FOR = 128
LINE_PASTE = 256
LINE_TEXT = 512
LINE_INCLUDE = 1024
LINE_EXTEND = 2048
LINE_EMPTY = 4096
EOF_TEXT = 8192
ALWAYS_ALLOWED = (
LINE_IF |
LINE_MACRO |
LINE_PASTE |
LINE_TEXT |
LINE_COMMENT |
LINE_FOR |
LINE_BLOCK |
LINE_INCLUDE |
LINE_EXTEND |
LINE_EMPTY)
MAP_LINE_TYPE = {
True: LINE_TEXT,
None: LINE_EMPTY,
'if': LINE_IF,
'else': LINE_ELSE,
'elif': LINE_ELIF,
'end': LINE_END,
'for': LINE_FOR,
'macro': LINE_MACRO,
'block': LINE_BLOCK,
'include': LINE_INCLUDE,
'extend': LINE_EXTEND,
'': LINE_COMMENT
}
VAR = 'a-zA-Z0-9_'
VAR_DOTS = VAR + '\.'
FILENAME = 'a-zA-Z0-9_\./'
|
'''Constants used by TRender.
:copyright: 2015, Jeroen van der Heijden (Transceptor Technology)
'''
LINE_IF = 1
LINE_ELSE = 2
LINE_ELIF = 4
LINE_END = 8
LINE_MACRO = 16
LINE_COMMENT = 32
LINE_BLOCK = 64
LINE_FOR = 128
LINE_PASTE = 256
LINE_TEXT = 512
LINE_INCLUDE = 1024
LINE_EXTEND = 2048
LINE_EMPTY = 4096
EOF_TEXT = 8192
ALWAYS_ALLOWED = (
LINE_IF |
LINE_MACRO |
LINE_PASTE |
LINE_TEXT |
LINE_COMMENT |
LINE_FOR |
LINE_BLOCK |
LINE_INCLUDE |
LINE_EXTEND |
LINE_EMPTY)
MAP_LINE_TYPE = {
True: LINE_TEXT,
None: LINE_EMPTY,
'if': LINE_IF,
'else': LINE_ELSE,
'elif': LINE_ELIF,
'end': LINE_END,
'for': LINE_FOR,
'macro': LINE_MACRO,
'block': LINE_BLOCK,
'include': LINE_INCLUDE,
'extend': LINE_EXTEND,
'': LINE_COMMENT
}
VAR = 'a-zA-Z0-9_'
VAR_DOTS = VAR + '\.'
FILENAME = 'a-zA-Z0-9_\-\./'
|
Allow "-" as a char in a filename
|
Allow "-" as a char in a filename
|
Python
|
mit
|
transceptor-technology/trender
|
'''Constants used by TRender.
:copyright: 2015, Jeroen van der Heijden (Transceptor Technology)
'''
LINE_IF = 1
LINE_ELSE = 2
LINE_ELIF = 4
LINE_END = 8
LINE_MACRO = 16
LINE_COMMENT = 32
LINE_BLOCK = 64
LINE_FOR = 128
LINE_PASTE = 256
LINE_TEXT = 512
LINE_INCLUDE = 1024
LINE_EXTEND = 2048
LINE_EMPTY = 4096
EOF_TEXT = 8192
ALWAYS_ALLOWED = (
LINE_IF |
LINE_MACRO |
LINE_PASTE |
LINE_TEXT |
LINE_COMMENT |
LINE_FOR |
LINE_BLOCK |
LINE_INCLUDE |
LINE_EXTEND |
LINE_EMPTY)
MAP_LINE_TYPE = {
True: LINE_TEXT,
None: LINE_EMPTY,
'if': LINE_IF,
'else': LINE_ELSE,
'elif': LINE_ELIF,
'end': LINE_END,
'for': LINE_FOR,
'macro': LINE_MACRO,
'block': LINE_BLOCK,
'include': LINE_INCLUDE,
'extend': LINE_EXTEND,
'': LINE_COMMENT
}
VAR = 'a-zA-Z0-9_'
VAR_DOTS = VAR + '\.'
FILENAME = 'a-zA-Z0-9_\./'
Allow "-" as a char in a filename
|
'''Constants used by TRender.
:copyright: 2015, Jeroen van der Heijden (Transceptor Technology)
'''
LINE_IF = 1
LINE_ELSE = 2
LINE_ELIF = 4
LINE_END = 8
LINE_MACRO = 16
LINE_COMMENT = 32
LINE_BLOCK = 64
LINE_FOR = 128
LINE_PASTE = 256
LINE_TEXT = 512
LINE_INCLUDE = 1024
LINE_EXTEND = 2048
LINE_EMPTY = 4096
EOF_TEXT = 8192
ALWAYS_ALLOWED = (
LINE_IF |
LINE_MACRO |
LINE_PASTE |
LINE_TEXT |
LINE_COMMENT |
LINE_FOR |
LINE_BLOCK |
LINE_INCLUDE |
LINE_EXTEND |
LINE_EMPTY)
MAP_LINE_TYPE = {
True: LINE_TEXT,
None: LINE_EMPTY,
'if': LINE_IF,
'else': LINE_ELSE,
'elif': LINE_ELIF,
'end': LINE_END,
'for': LINE_FOR,
'macro': LINE_MACRO,
'block': LINE_BLOCK,
'include': LINE_INCLUDE,
'extend': LINE_EXTEND,
'': LINE_COMMENT
}
VAR = 'a-zA-Z0-9_'
VAR_DOTS = VAR + '\.'
FILENAME = 'a-zA-Z0-9_\-\./'
|
<commit_before>'''Constants used by TRender.
:copyright: 2015, Jeroen van der Heijden (Transceptor Technology)
'''
LINE_IF = 1
LINE_ELSE = 2
LINE_ELIF = 4
LINE_END = 8
LINE_MACRO = 16
LINE_COMMENT = 32
LINE_BLOCK = 64
LINE_FOR = 128
LINE_PASTE = 256
LINE_TEXT = 512
LINE_INCLUDE = 1024
LINE_EXTEND = 2048
LINE_EMPTY = 4096
EOF_TEXT = 8192
ALWAYS_ALLOWED = (
LINE_IF |
LINE_MACRO |
LINE_PASTE |
LINE_TEXT |
LINE_COMMENT |
LINE_FOR |
LINE_BLOCK |
LINE_INCLUDE |
LINE_EXTEND |
LINE_EMPTY)
MAP_LINE_TYPE = {
True: LINE_TEXT,
None: LINE_EMPTY,
'if': LINE_IF,
'else': LINE_ELSE,
'elif': LINE_ELIF,
'end': LINE_END,
'for': LINE_FOR,
'macro': LINE_MACRO,
'block': LINE_BLOCK,
'include': LINE_INCLUDE,
'extend': LINE_EXTEND,
'': LINE_COMMENT
}
VAR = 'a-zA-Z0-9_'
VAR_DOTS = VAR + '\.'
FILENAME = 'a-zA-Z0-9_\./'
<commit_msg>Allow "-" as a char in a filename<commit_after>
|
'''Constants used by TRender.
:copyright: 2015, Jeroen van der Heijden (Transceptor Technology)
'''
LINE_IF = 1
LINE_ELSE = 2
LINE_ELIF = 4
LINE_END = 8
LINE_MACRO = 16
LINE_COMMENT = 32
LINE_BLOCK = 64
LINE_FOR = 128
LINE_PASTE = 256
LINE_TEXT = 512
LINE_INCLUDE = 1024
LINE_EXTEND = 2048
LINE_EMPTY = 4096
EOF_TEXT = 8192
ALWAYS_ALLOWED = (
LINE_IF |
LINE_MACRO |
LINE_PASTE |
LINE_TEXT |
LINE_COMMENT |
LINE_FOR |
LINE_BLOCK |
LINE_INCLUDE |
LINE_EXTEND |
LINE_EMPTY)
MAP_LINE_TYPE = {
True: LINE_TEXT,
None: LINE_EMPTY,
'if': LINE_IF,
'else': LINE_ELSE,
'elif': LINE_ELIF,
'end': LINE_END,
'for': LINE_FOR,
'macro': LINE_MACRO,
'block': LINE_BLOCK,
'include': LINE_INCLUDE,
'extend': LINE_EXTEND,
'': LINE_COMMENT
}
VAR = 'a-zA-Z0-9_'
VAR_DOTS = VAR + '\.'
FILENAME = 'a-zA-Z0-9_\-\./'
|
'''Constants used by TRender.
:copyright: 2015, Jeroen van der Heijden (Transceptor Technology)
'''
LINE_IF = 1
LINE_ELSE = 2
LINE_ELIF = 4
LINE_END = 8
LINE_MACRO = 16
LINE_COMMENT = 32
LINE_BLOCK = 64
LINE_FOR = 128
LINE_PASTE = 256
LINE_TEXT = 512
LINE_INCLUDE = 1024
LINE_EXTEND = 2048
LINE_EMPTY = 4096
EOF_TEXT = 8192
ALWAYS_ALLOWED = (
LINE_IF |
LINE_MACRO |
LINE_PASTE |
LINE_TEXT |
LINE_COMMENT |
LINE_FOR |
LINE_BLOCK |
LINE_INCLUDE |
LINE_EXTEND |
LINE_EMPTY)
MAP_LINE_TYPE = {
True: LINE_TEXT,
None: LINE_EMPTY,
'if': LINE_IF,
'else': LINE_ELSE,
'elif': LINE_ELIF,
'end': LINE_END,
'for': LINE_FOR,
'macro': LINE_MACRO,
'block': LINE_BLOCK,
'include': LINE_INCLUDE,
'extend': LINE_EXTEND,
'': LINE_COMMENT
}
VAR = 'a-zA-Z0-9_'
VAR_DOTS = VAR + '\.'
FILENAME = 'a-zA-Z0-9_\./'
Allow "-" as a char in a filename'''Constants used by TRender.
:copyright: 2015, Jeroen van der Heijden (Transceptor Technology)
'''
LINE_IF = 1
LINE_ELSE = 2
LINE_ELIF = 4
LINE_END = 8
LINE_MACRO = 16
LINE_COMMENT = 32
LINE_BLOCK = 64
LINE_FOR = 128
LINE_PASTE = 256
LINE_TEXT = 512
LINE_INCLUDE = 1024
LINE_EXTEND = 2048
LINE_EMPTY = 4096
EOF_TEXT = 8192
ALWAYS_ALLOWED = (
LINE_IF |
LINE_MACRO |
LINE_PASTE |
LINE_TEXT |
LINE_COMMENT |
LINE_FOR |
LINE_BLOCK |
LINE_INCLUDE |
LINE_EXTEND |
LINE_EMPTY)
MAP_LINE_TYPE = {
True: LINE_TEXT,
None: LINE_EMPTY,
'if': LINE_IF,
'else': LINE_ELSE,
'elif': LINE_ELIF,
'end': LINE_END,
'for': LINE_FOR,
'macro': LINE_MACRO,
'block': LINE_BLOCK,
'include': LINE_INCLUDE,
'extend': LINE_EXTEND,
'': LINE_COMMENT
}
VAR = 'a-zA-Z0-9_'
VAR_DOTS = VAR + '\.'
FILENAME = 'a-zA-Z0-9_\-\./'
|
<commit_before>'''Constants used by TRender.
:copyright: 2015, Jeroen van der Heijden (Transceptor Technology)
'''
LINE_IF = 1
LINE_ELSE = 2
LINE_ELIF = 4
LINE_END = 8
LINE_MACRO = 16
LINE_COMMENT = 32
LINE_BLOCK = 64
LINE_FOR = 128
LINE_PASTE = 256
LINE_TEXT = 512
LINE_INCLUDE = 1024
LINE_EXTEND = 2048
LINE_EMPTY = 4096
EOF_TEXT = 8192
ALWAYS_ALLOWED = (
LINE_IF |
LINE_MACRO |
LINE_PASTE |
LINE_TEXT |
LINE_COMMENT |
LINE_FOR |
LINE_BLOCK |
LINE_INCLUDE |
LINE_EXTEND |
LINE_EMPTY)
MAP_LINE_TYPE = {
True: LINE_TEXT,
None: LINE_EMPTY,
'if': LINE_IF,
'else': LINE_ELSE,
'elif': LINE_ELIF,
'end': LINE_END,
'for': LINE_FOR,
'macro': LINE_MACRO,
'block': LINE_BLOCK,
'include': LINE_INCLUDE,
'extend': LINE_EXTEND,
'': LINE_COMMENT
}
VAR = 'a-zA-Z0-9_'
VAR_DOTS = VAR + '\.'
FILENAME = 'a-zA-Z0-9_\./'
<commit_msg>Allow "-" as a char in a filename<commit_after>'''Constants used by TRender.
:copyright: 2015, Jeroen van der Heijden (Transceptor Technology)
'''
LINE_IF = 1
LINE_ELSE = 2
LINE_ELIF = 4
LINE_END = 8
LINE_MACRO = 16
LINE_COMMENT = 32
LINE_BLOCK = 64
LINE_FOR = 128
LINE_PASTE = 256
LINE_TEXT = 512
LINE_INCLUDE = 1024
LINE_EXTEND = 2048
LINE_EMPTY = 4096
EOF_TEXT = 8192
ALWAYS_ALLOWED = (
LINE_IF |
LINE_MACRO |
LINE_PASTE |
LINE_TEXT |
LINE_COMMENT |
LINE_FOR |
LINE_BLOCK |
LINE_INCLUDE |
LINE_EXTEND |
LINE_EMPTY)
MAP_LINE_TYPE = {
True: LINE_TEXT,
None: LINE_EMPTY,
'if': LINE_IF,
'else': LINE_ELSE,
'elif': LINE_ELIF,
'end': LINE_END,
'for': LINE_FOR,
'macro': LINE_MACRO,
'block': LINE_BLOCK,
'include': LINE_INCLUDE,
'extend': LINE_EXTEND,
'': LINE_COMMENT
}
VAR = 'a-zA-Z0-9_'
VAR_DOTS = VAR + '\.'
FILENAME = 'a-zA-Z0-9_\-\./'
|
3c1747f52c7d0d150803ba938398e9fd3172efc0
|
Orange/canvas/report/__init__.py
|
Orange/canvas/report/__init__.py
|
def plural(s, number):
return s.format(number=number, s="s" if number % 100 != 1 else "")
def plural_w(s, number, capitalize=False):
numbers = ("zero", "one", "two", "three", "four", "five", "six", "seven",
"nine", "ten")
number_str = numbers[number] if number < len(numbers) else str(number)
if capitalize:
number_str = number_str.capitalize()
return s.format(number=number_str, s="s" if number % 100 != 1 else "")
def clip_string(s, limit=1000, sep=None):
if len(s) < limit:
return s
s = s[:limit - 3]
if sep is None:
return s
sep_pos = s.rfind(sep)
if sep_pos == -1:
return s
return s[:sep_pos + len(sep)] + "..."
def clipped_list(s, limit=1000):
return clip_string(", ".join(s), limit, ", ")
|
import itertools
def plural(s, number):
return s.format(number=number, s="s" if number % 100 != 1 else "")
def plural_w(s, number, capitalize=False):
numbers = ("zero", "one", "two", "three", "four", "five", "six", "seven",
"nine", "ten")
number_str = numbers[number] if number < len(numbers) else str(number)
if capitalize:
number_str = number_str.capitalize()
return s.format(number=number_str, s="s" if number % 100 != 1 else "")
def clip_string(s, limit=1000, sep=None):
if len(s) < limit:
return s
s = s[:limit - 3]
if sep is None:
return s
sep_pos = s.rfind(sep)
if sep_pos == -1:
return s
return s[:sep_pos + len(sep)] + "..."
def clipped_list(s, limit=1000, less_lookups=False):
if less_lookups:
s = ", ".join(itertools.islice(s, (limit + 2) // 3))
else:
s = ", ".join(s)
return clip_string(s, limit, ", ")
|
Add option to limit the number of lookups
|
report.clipped_list: Add option to limit the number of lookups
|
Python
|
bsd-2-clause
|
cheral/orange3,marinkaz/orange3,qPCR4vir/orange3,kwikadi/orange3,marinkaz/orange3,marinkaz/orange3,qPCR4vir/orange3,cheral/orange3,qPCR4vir/orange3,qPCR4vir/orange3,kwikadi/orange3,kwikadi/orange3,cheral/orange3,cheral/orange3,marinkaz/orange3,marinkaz/orange3,kwikadi/orange3,kwikadi/orange3,marinkaz/orange3,kwikadi/orange3,qPCR4vir/orange3,qPCR4vir/orange3,cheral/orange3,cheral/orange3
|
def plural(s, number):
return s.format(number=number, s="s" if number % 100 != 1 else "")
def plural_w(s, number, capitalize=False):
numbers = ("zero", "one", "two", "three", "four", "five", "six", "seven",
"nine", "ten")
number_str = numbers[number] if number < len(numbers) else str(number)
if capitalize:
number_str = number_str.capitalize()
return s.format(number=number_str, s="s" if number % 100 != 1 else "")
def clip_string(s, limit=1000, sep=None):
if len(s) < limit:
return s
s = s[:limit - 3]
if sep is None:
return s
sep_pos = s.rfind(sep)
if sep_pos == -1:
return s
return s[:sep_pos + len(sep)] + "..."
def clipped_list(s, limit=1000):
return clip_string(", ".join(s), limit, ", ")
report.clipped_list: Add option to limit the number of lookups
|
import itertools
def plural(s, number):
return s.format(number=number, s="s" if number % 100 != 1 else "")
def plural_w(s, number, capitalize=False):
numbers = ("zero", "one", "two", "three", "four", "five", "six", "seven",
"nine", "ten")
number_str = numbers[number] if number < len(numbers) else str(number)
if capitalize:
number_str = number_str.capitalize()
return s.format(number=number_str, s="s" if number % 100 != 1 else "")
def clip_string(s, limit=1000, sep=None):
if len(s) < limit:
return s
s = s[:limit - 3]
if sep is None:
return s
sep_pos = s.rfind(sep)
if sep_pos == -1:
return s
return s[:sep_pos + len(sep)] + "..."
def clipped_list(s, limit=1000, less_lookups=False):
if less_lookups:
s = ", ".join(itertools.islice(s, (limit + 2) // 3))
else:
s = ", ".join(s)
return clip_string(s, limit, ", ")
|
<commit_before>def plural(s, number):
return s.format(number=number, s="s" if number % 100 != 1 else "")
def plural_w(s, number, capitalize=False):
numbers = ("zero", "one", "two", "three", "four", "five", "six", "seven",
"nine", "ten")
number_str = numbers[number] if number < len(numbers) else str(number)
if capitalize:
number_str = number_str.capitalize()
return s.format(number=number_str, s="s" if number % 100 != 1 else "")
def clip_string(s, limit=1000, sep=None):
if len(s) < limit:
return s
s = s[:limit - 3]
if sep is None:
return s
sep_pos = s.rfind(sep)
if sep_pos == -1:
return s
return s[:sep_pos + len(sep)] + "..."
def clipped_list(s, limit=1000):
return clip_string(", ".join(s), limit, ", ")
<commit_msg>report.clipped_list: Add option to limit the number of lookups<commit_after>
|
import itertools
def plural(s, number):
return s.format(number=number, s="s" if number % 100 != 1 else "")
def plural_w(s, number, capitalize=False):
numbers = ("zero", "one", "two", "three", "four", "five", "six", "seven",
"nine", "ten")
number_str = numbers[number] if number < len(numbers) else str(number)
if capitalize:
number_str = number_str.capitalize()
return s.format(number=number_str, s="s" if number % 100 != 1 else "")
def clip_string(s, limit=1000, sep=None):
if len(s) < limit:
return s
s = s[:limit - 3]
if sep is None:
return s
sep_pos = s.rfind(sep)
if sep_pos == -1:
return s
return s[:sep_pos + len(sep)] + "..."
def clipped_list(s, limit=1000, less_lookups=False):
if less_lookups:
s = ", ".join(itertools.islice(s, (limit + 2) // 3))
else:
s = ", ".join(s)
return clip_string(s, limit, ", ")
|
def plural(s, number):
return s.format(number=number, s="s" if number % 100 != 1 else "")
def plural_w(s, number, capitalize=False):
numbers = ("zero", "one", "two", "three", "four", "five", "six", "seven",
"nine", "ten")
number_str = numbers[number] if number < len(numbers) else str(number)
if capitalize:
number_str = number_str.capitalize()
return s.format(number=number_str, s="s" if number % 100 != 1 else "")
def clip_string(s, limit=1000, sep=None):
if len(s) < limit:
return s
s = s[:limit - 3]
if sep is None:
return s
sep_pos = s.rfind(sep)
if sep_pos == -1:
return s
return s[:sep_pos + len(sep)] + "..."
def clipped_list(s, limit=1000):
return clip_string(", ".join(s), limit, ", ")
report.clipped_list: Add option to limit the number of lookupsimport itertools
def plural(s, number):
return s.format(number=number, s="s" if number % 100 != 1 else "")
def plural_w(s, number, capitalize=False):
numbers = ("zero", "one", "two", "three", "four", "five", "six", "seven",
"nine", "ten")
number_str = numbers[number] if number < len(numbers) else str(number)
if capitalize:
number_str = number_str.capitalize()
return s.format(number=number_str, s="s" if number % 100 != 1 else "")
def clip_string(s, limit=1000, sep=None):
if len(s) < limit:
return s
s = s[:limit - 3]
if sep is None:
return s
sep_pos = s.rfind(sep)
if sep_pos == -1:
return s
return s[:sep_pos + len(sep)] + "..."
def clipped_list(s, limit=1000, less_lookups=False):
if less_lookups:
s = ", ".join(itertools.islice(s, (limit + 2) // 3))
else:
s = ", ".join(s)
return clip_string(s, limit, ", ")
|
<commit_before>def plural(s, number):
return s.format(number=number, s="s" if number % 100 != 1 else "")
def plural_w(s, number, capitalize=False):
numbers = ("zero", "one", "two", "three", "four", "five", "six", "seven",
"nine", "ten")
number_str = numbers[number] if number < len(numbers) else str(number)
if capitalize:
number_str = number_str.capitalize()
return s.format(number=number_str, s="s" if number % 100 != 1 else "")
def clip_string(s, limit=1000, sep=None):
if len(s) < limit:
return s
s = s[:limit - 3]
if sep is None:
return s
sep_pos = s.rfind(sep)
if sep_pos == -1:
return s
return s[:sep_pos + len(sep)] + "..."
def clipped_list(s, limit=1000):
return clip_string(", ".join(s), limit, ", ")
<commit_msg>report.clipped_list: Add option to limit the number of lookups<commit_after>import itertools
def plural(s, number):
return s.format(number=number, s="s" if number % 100 != 1 else "")
def plural_w(s, number, capitalize=False):
numbers = ("zero", "one", "two", "three", "four", "five", "six", "seven",
"nine", "ten")
number_str = numbers[number] if number < len(numbers) else str(number)
if capitalize:
number_str = number_str.capitalize()
return s.format(number=number_str, s="s" if number % 100 != 1 else "")
def clip_string(s, limit=1000, sep=None):
if len(s) < limit:
return s
s = s[:limit - 3]
if sep is None:
return s
sep_pos = s.rfind(sep)
if sep_pos == -1:
return s
return s[:sep_pos + len(sep)] + "..."
def clipped_list(s, limit=1000, less_lookups=False):
if less_lookups:
s = ", ".join(itertools.islice(s, (limit + 2) // 3))
else:
s = ", ".join(s)
return clip_string(s, limit, ", ")
|
bf81f681bd15ccfd009a901a652f5fde6a885d9b
|
Orange/canvas/report/__init__.py
|
Orange/canvas/report/__init__.py
|
def plural(s, number):
return s.format(number=number, s="s" if number % 100 != 1 else "")
def plural_w(s, number, capitalize=False):
numbers = ("zero", "one", "two", "three", "four", "five", "six", "seven",
"nine", "ten")
number_str = numbers[number] if number < len(numbers) else str(number)
if capitalize:
number_str = number_str.capitalize()
return s.format(number=number_str, s="s" if number % 100 != 1 else "")
def clip_string(s, limit=1000, sep=None):
if len(s) < limit:
return s
s = s[:limit - 3]
if sep is None:
return s
sep_pos = s.rfind(sep)
if sep_pos == -1:
return s
return s[:sep_pos + len(sep)] + "..."
def clipped_list(s, limit=1000):
return clip_string(", ".join(s), limit, ", ")
|
import itertools
def plural(s, number):
return s.format(number=number, s="s" if number % 100 != 1 else "")
def plural_w(s, number, capitalize=False):
numbers = ("zero", "one", "two", "three", "four", "five", "six", "seven",
"nine", "ten")
number_str = numbers[number] if number < len(numbers) else str(number)
if capitalize:
number_str = number_str.capitalize()
return s.format(number=number_str, s="s" if number % 100 != 1 else "")
def clip_string(s, limit=1000, sep=None):
if len(s) < limit:
return s
s = s[:limit - 3]
if sep is None:
return s
sep_pos = s.rfind(sep)
if sep_pos == -1:
return s
return s[:sep_pos + len(sep)] + "..."
def clipped_list(s, limit=1000, less_lookups=False):
if less_lookups:
s = ", ".join(itertools.islice(s, (limit + 2) // 3))
else:
s = ", ".join(s)
return clip_string(s, limit, ", ")
|
Add option to limit the number of lookups
|
report.clipped_list: Add option to limit the number of lookups
|
Python
|
bsd-2-clause
|
marinkaz/orange3,marinkaz/orange3,cheral/orange3,cheral/orange3,marinkaz/orange3,kwikadi/orange3,kwikadi/orange3,marinkaz/orange3,qPCR4vir/orange3,cheral/orange3,qPCR4vir/orange3,qPCR4vir/orange3,kwikadi/orange3,cheral/orange3,qPCR4vir/orange3,marinkaz/orange3,marinkaz/orange3,cheral/orange3,kwikadi/orange3,qPCR4vir/orange3,kwikadi/orange3,kwikadi/orange3,qPCR4vir/orange3,cheral/orange3
|
def plural(s, number):
return s.format(number=number, s="s" if number % 100 != 1 else "")
def plural_w(s, number, capitalize=False):
numbers = ("zero", "one", "two", "three", "four", "five", "six", "seven",
"nine", "ten")
number_str = numbers[number] if number < len(numbers) else str(number)
if capitalize:
number_str = number_str.capitalize()
return s.format(number=number_str, s="s" if number % 100 != 1 else "")
def clip_string(s, limit=1000, sep=None):
if len(s) < limit:
return s
s = s[:limit - 3]
if sep is None:
return s
sep_pos = s.rfind(sep)
if sep_pos == -1:
return s
return s[:sep_pos + len(sep)] + "..."
def clipped_list(s, limit=1000):
return clip_string(", ".join(s), limit, ", ")
report.clipped_list: Add option to limit the number of lookups
|
import itertools
def plural(s, number):
return s.format(number=number, s="s" if number % 100 != 1 else "")
def plural_w(s, number, capitalize=False):
numbers = ("zero", "one", "two", "three", "four", "five", "six", "seven",
"nine", "ten")
number_str = numbers[number] if number < len(numbers) else str(number)
if capitalize:
number_str = number_str.capitalize()
return s.format(number=number_str, s="s" if number % 100 != 1 else "")
def clip_string(s, limit=1000, sep=None):
if len(s) < limit:
return s
s = s[:limit - 3]
if sep is None:
return s
sep_pos = s.rfind(sep)
if sep_pos == -1:
return s
return s[:sep_pos + len(sep)] + "..."
def clipped_list(s, limit=1000, less_lookups=False):
if less_lookups:
s = ", ".join(itertools.islice(s, (limit + 2) // 3))
else:
s = ", ".join(s)
return clip_string(s, limit, ", ")
|
<commit_before>def plural(s, number):
return s.format(number=number, s="s" if number % 100 != 1 else "")
def plural_w(s, number, capitalize=False):
numbers = ("zero", "one", "two", "three", "four", "five", "six", "seven",
"nine", "ten")
number_str = numbers[number] if number < len(numbers) else str(number)
if capitalize:
number_str = number_str.capitalize()
return s.format(number=number_str, s="s" if number % 100 != 1 else "")
def clip_string(s, limit=1000, sep=None):
if len(s) < limit:
return s
s = s[:limit - 3]
if sep is None:
return s
sep_pos = s.rfind(sep)
if sep_pos == -1:
return s
return s[:sep_pos + len(sep)] + "..."
def clipped_list(s, limit=1000):
return clip_string(", ".join(s), limit, ", ")
<commit_msg>report.clipped_list: Add option to limit the number of lookups<commit_after>
|
import itertools
def plural(s, number):
return s.format(number=number, s="s" if number % 100 != 1 else "")
def plural_w(s, number, capitalize=False):
numbers = ("zero", "one", "two", "three", "four", "five", "six", "seven",
"nine", "ten")
number_str = numbers[number] if number < len(numbers) else str(number)
if capitalize:
number_str = number_str.capitalize()
return s.format(number=number_str, s="s" if number % 100 != 1 else "")
def clip_string(s, limit=1000, sep=None):
if len(s) < limit:
return s
s = s[:limit - 3]
if sep is None:
return s
sep_pos = s.rfind(sep)
if sep_pos == -1:
return s
return s[:sep_pos + len(sep)] + "..."
def clipped_list(s, limit=1000, less_lookups=False):
if less_lookups:
s = ", ".join(itertools.islice(s, (limit + 2) // 3))
else:
s = ", ".join(s)
return clip_string(s, limit, ", ")
|
def plural(s, number):
return s.format(number=number, s="s" if number % 100 != 1 else "")
def plural_w(s, number, capitalize=False):
numbers = ("zero", "one", "two", "three", "four", "five", "six", "seven",
"nine", "ten")
number_str = numbers[number] if number < len(numbers) else str(number)
if capitalize:
number_str = number_str.capitalize()
return s.format(number=number_str, s="s" if number % 100 != 1 else "")
def clip_string(s, limit=1000, sep=None):
if len(s) < limit:
return s
s = s[:limit - 3]
if sep is None:
return s
sep_pos = s.rfind(sep)
if sep_pos == -1:
return s
return s[:sep_pos + len(sep)] + "..."
def clipped_list(s, limit=1000):
return clip_string(", ".join(s), limit, ", ")
report.clipped_list: Add option to limit the number of lookupsimport itertools
def plural(s, number):
return s.format(number=number, s="s" if number % 100 != 1 else "")
def plural_w(s, number, capitalize=False):
numbers = ("zero", "one", "two", "three", "four", "five", "six", "seven",
"nine", "ten")
number_str = numbers[number] if number < len(numbers) else str(number)
if capitalize:
number_str = number_str.capitalize()
return s.format(number=number_str, s="s" if number % 100 != 1 else "")
def clip_string(s, limit=1000, sep=None):
if len(s) < limit:
return s
s = s[:limit - 3]
if sep is None:
return s
sep_pos = s.rfind(sep)
if sep_pos == -1:
return s
return s[:sep_pos + len(sep)] + "..."
def clipped_list(s, limit=1000, less_lookups=False):
if less_lookups:
s = ", ".join(itertools.islice(s, (limit + 2) // 3))
else:
s = ", ".join(s)
return clip_string(s, limit, ", ")
|
<commit_before>def plural(s, number):
return s.format(number=number, s="s" if number % 100 != 1 else "")
def plural_w(s, number, capitalize=False):
numbers = ("zero", "one", "two", "three", "four", "five", "six", "seven",
"nine", "ten")
number_str = numbers[number] if number < len(numbers) else str(number)
if capitalize:
number_str = number_str.capitalize()
return s.format(number=number_str, s="s" if number % 100 != 1 else "")
def clip_string(s, limit=1000, sep=None):
if len(s) < limit:
return s
s = s[:limit - 3]
if sep is None:
return s
sep_pos = s.rfind(sep)
if sep_pos == -1:
return s
return s[:sep_pos + len(sep)] + "..."
def clipped_list(s, limit=1000):
return clip_string(", ".join(s), limit, ", ")
<commit_msg>report.clipped_list: Add option to limit the number of lookups<commit_after>import itertools
def plural(s, number):
return s.format(number=number, s="s" if number % 100 != 1 else "")
def plural_w(s, number, capitalize=False):
numbers = ("zero", "one", "two", "three", "four", "five", "six", "seven",
"nine", "ten")
number_str = numbers[number] if number < len(numbers) else str(number)
if capitalize:
number_str = number_str.capitalize()
return s.format(number=number_str, s="s" if number % 100 != 1 else "")
def clip_string(s, limit=1000, sep=None):
if len(s) < limit:
return s
s = s[:limit - 3]
if sep is None:
return s
sep_pos = s.rfind(sep)
if sep_pos == -1:
return s
return s[:sep_pos + len(sep)] + "..."
def clipped_list(s, limit=1000, less_lookups=False):
if less_lookups:
s = ", ".join(itertools.islice(s, (limit + 2) // 3))
else:
s = ", ".join(s)
return clip_string(s, limit, ", ")
|
ee7a4cbe1e6745d627e58142e4a1c1bb7b972e3a
|
services/dns/__init__.py
|
services/dns/__init__.py
|
import os
from fabric.api import put, task
from fablib import authbind, requires_root
from fablib.twisted import service
@task
@requires_root
def install():
# TODO:
# - Setup zone files (incl. PYTHONPATH in script if needed)
# - Rename dns to t-names or whatever (locations, scripts,...)
# Bootstrap a new service environment
service.bootstrap('dns')
# Setup authbind
authbind.install()
authbind.allow('dns', 53)
initscript = os.path.join(os.path.dirname(__file__), 'initscript.sh')
put(initscript, '/srv/dns/etc/init.d/dns', use_sudo=True, mode=0755)
@task
def update():
# TODO
pass
@task
def start():
service.start('dns')
@task
def stop():
service.stop('dns')
@task
def restart():
service.restart('dns')
|
import os
from fabric.api import put, task, sudo
from fablib import authbind, requires_root
from fablib.twisted import service
@task
@requires_root
def install():
# TODO:
# - Setup zone files (incl. PYTHONPATH in script if needed)
# - Rename dns to t-names or whatever (locations, scripts,...)
# Bootstrap a new service environment
service.bootstrap('dns')
# Setup authbind
authbind.install()
authbind.allow('dns', 53)
initscript = os.path.join(os.path.dirname(__file__), 'initscript.sh')
put(initscript, '/srv/dns/etc/init.d/dns', use_sudo=True, mode=0755)
sudo('ln -fs /srv/dns/etc/init.d/dns /etc/init.d/dns')
sudo('update-rc.d dns defaults')
@task
def update():
# TODO
pass
@task
def start():
service.start('dns')
@task
def stop():
service.stop('dns')
@task
def restart():
service.restart('dns')
|
Install initscript link into /etc/init.d. Allows for system integration while the original file can still be modified
|
Install initscript link into /etc/init.d. Allows for system integration while the original file can still be modified
|
Python
|
mit
|
alex/braid,alex/braid
|
import os
from fabric.api import put, task
from fablib import authbind, requires_root
from fablib.twisted import service
@task
@requires_root
def install():
# TODO:
# - Setup zone files (incl. PYTHONPATH in script if needed)
# - Rename dns to t-names or whatever (locations, scripts,...)
# Bootstrap a new service environment
service.bootstrap('dns')
# Setup authbind
authbind.install()
authbind.allow('dns', 53)
initscript = os.path.join(os.path.dirname(__file__), 'initscript.sh')
put(initscript, '/srv/dns/etc/init.d/dns', use_sudo=True, mode=0755)
@task
def update():
# TODO
pass
@task
def start():
service.start('dns')
@task
def stop():
service.stop('dns')
@task
def restart():
service.restart('dns')
Install initscript link into /etc/init.d. Allows for system integration while the original file can still be modified
|
import os
from fabric.api import put, task, sudo
from fablib import authbind, requires_root
from fablib.twisted import service
@task
@requires_root
def install():
# TODO:
# - Setup zone files (incl. PYTHONPATH in script if needed)
# - Rename dns to t-names or whatever (locations, scripts,...)
# Bootstrap a new service environment
service.bootstrap('dns')
# Setup authbind
authbind.install()
authbind.allow('dns', 53)
initscript = os.path.join(os.path.dirname(__file__), 'initscript.sh')
put(initscript, '/srv/dns/etc/init.d/dns', use_sudo=True, mode=0755)
sudo('ln -fs /srv/dns/etc/init.d/dns /etc/init.d/dns')
sudo('update-rc.d dns defaults')
@task
def update():
# TODO
pass
@task
def start():
service.start('dns')
@task
def stop():
service.stop('dns')
@task
def restart():
service.restart('dns')
|
<commit_before>import os
from fabric.api import put, task
from fablib import authbind, requires_root
from fablib.twisted import service
@task
@requires_root
def install():
# TODO:
# - Setup zone files (incl. PYTHONPATH in script if needed)
# - Rename dns to t-names or whatever (locations, scripts,...)
# Bootstrap a new service environment
service.bootstrap('dns')
# Setup authbind
authbind.install()
authbind.allow('dns', 53)
initscript = os.path.join(os.path.dirname(__file__), 'initscript.sh')
put(initscript, '/srv/dns/etc/init.d/dns', use_sudo=True, mode=0755)
@task
def update():
# TODO
pass
@task
def start():
service.start('dns')
@task
def stop():
service.stop('dns')
@task
def restart():
service.restart('dns')
<commit_msg>Install initscript link into /etc/init.d. Allows for system integration while the original file can still be modified<commit_after>
|
import os
from fabric.api import put, task, sudo
from fablib import authbind, requires_root
from fablib.twisted import service
@task
@requires_root
def install():
# TODO:
# - Setup zone files (incl. PYTHONPATH in script if needed)
# - Rename dns to t-names or whatever (locations, scripts,...)
# Bootstrap a new service environment
service.bootstrap('dns')
# Setup authbind
authbind.install()
authbind.allow('dns', 53)
initscript = os.path.join(os.path.dirname(__file__), 'initscript.sh')
put(initscript, '/srv/dns/etc/init.d/dns', use_sudo=True, mode=0755)
sudo('ln -fs /srv/dns/etc/init.d/dns /etc/init.d/dns')
sudo('update-rc.d dns defaults')
@task
def update():
# TODO
pass
@task
def start():
service.start('dns')
@task
def stop():
service.stop('dns')
@task
def restart():
service.restart('dns')
|
import os
from fabric.api import put, task
from fablib import authbind, requires_root
from fablib.twisted import service
@task
@requires_root
def install():
# TODO:
# - Setup zone files (incl. PYTHONPATH in script if needed)
# - Rename dns to t-names or whatever (locations, scripts,...)
# Bootstrap a new service environment
service.bootstrap('dns')
# Setup authbind
authbind.install()
authbind.allow('dns', 53)
initscript = os.path.join(os.path.dirname(__file__), 'initscript.sh')
put(initscript, '/srv/dns/etc/init.d/dns', use_sudo=True, mode=0755)
@task
def update():
# TODO
pass
@task
def start():
service.start('dns')
@task
def stop():
service.stop('dns')
@task
def restart():
service.restart('dns')
Install initscript link into /etc/init.d. Allows for system integration while the original file can still be modifiedimport os
from fabric.api import put, task, sudo
from fablib import authbind, requires_root
from fablib.twisted import service
@task
@requires_root
def install():
# TODO:
# - Setup zone files (incl. PYTHONPATH in script if needed)
# - Rename dns to t-names or whatever (locations, scripts,...)
# Bootstrap a new service environment
service.bootstrap('dns')
# Setup authbind
authbind.install()
authbind.allow('dns', 53)
initscript = os.path.join(os.path.dirname(__file__), 'initscript.sh')
put(initscript, '/srv/dns/etc/init.d/dns', use_sudo=True, mode=0755)
sudo('ln -fs /srv/dns/etc/init.d/dns /etc/init.d/dns')
sudo('update-rc.d dns defaults')
@task
def update():
# TODO
pass
@task
def start():
service.start('dns')
@task
def stop():
service.stop('dns')
@task
def restart():
service.restart('dns')
|
<commit_before>import os
from fabric.api import put, task
from fablib import authbind, requires_root
from fablib.twisted import service
@task
@requires_root
def install():
# TODO:
# - Setup zone files (incl. PYTHONPATH in script if needed)
# - Rename dns to t-names or whatever (locations, scripts,...)
# Bootstrap a new service environment
service.bootstrap('dns')
# Setup authbind
authbind.install()
authbind.allow('dns', 53)
initscript = os.path.join(os.path.dirname(__file__), 'initscript.sh')
put(initscript, '/srv/dns/etc/init.d/dns', use_sudo=True, mode=0755)
@task
def update():
# TODO
pass
@task
def start():
service.start('dns')
@task
def stop():
service.stop('dns')
@task
def restart():
service.restart('dns')
<commit_msg>Install initscript link into /etc/init.d. Allows for system integration while the original file can still be modified<commit_after>import os
from fabric.api import put, task, sudo
from fablib import authbind, requires_root
from fablib.twisted import service
@task
@requires_root
def install():
# TODO:
# - Setup zone files (incl. PYTHONPATH in script if needed)
# - Rename dns to t-names or whatever (locations, scripts,...)
# Bootstrap a new service environment
service.bootstrap('dns')
# Setup authbind
authbind.install()
authbind.allow('dns', 53)
initscript = os.path.join(os.path.dirname(__file__), 'initscript.sh')
put(initscript, '/srv/dns/etc/init.d/dns', use_sudo=True, mode=0755)
sudo('ln -fs /srv/dns/etc/init.d/dns /etc/init.d/dns')
sudo('update-rc.d dns defaults')
@task
def update():
# TODO
pass
@task
def start():
service.start('dns')
@task
def stop():
service.stop('dns')
@task
def restart():
service.restart('dns')
|
f3a327d8fc5f43ad82f0696cef4a14e6dd2533ea
|
ognskylines/commands/gateway.py
|
ognskylines/commands/gateway.py
|
import logging
from ognskylines.gateway import ognSkylinesGateway
from ognskylines.dbutils import session
from manager import Manager
gateway_manager = Manager()
@gateway_manager.command
def run(logfile=''):
"""Run the ogn-->skylines gateway."""
# Enable logging
log_handlers = [logging.StreamHandler()]
if logfile:
log_handlers.append(logging.FileHandler(logfile))
logging.basicConfig(level='INFO', handlers=log_handlers)
logging.getLogger('ognskylines').setLevel('DEBUG')
print('Start ogn-skylines gateway')
skylines_gateway = ognSkylinesGateway(session=session, aprs_user='skylines', host='127.0.0.1', port=5597)
try:
skylines_gateway.run()
except KeyboardInterrupt:
print('\nStop ogn-skylines gateway')
skylines_gateway.disconnect()
logging.shutdown()
|
import logging
from ognskylines.gateway import ognSkylinesGateway
from ognskylines.dbutils import session
from manager import Manager
gateway_manager = Manager()
@gateway_manager.command
def run(skylines_host='127.0.0.1', skylines_port=5597, logfile=''):
"""Run the ogn-->skylines gateway."""
# Enable logging
log_handlers = [logging.StreamHandler()]
if logfile:
log_handlers.append(logging.FileHandler(logfile))
logging.basicConfig(level='INFO', handlers=log_handlers)
logging.getLogger('ognskylines').setLevel('DEBUG')
print('Start ogn-skylines gateway')
skylines_gateway = ognSkylinesGateway(session=session, aprs_user='skylines', host=skylines_host, port=skylines_port)
try:
skylines_gateway.run()
except KeyboardInterrupt:
print('\nStop ogn-skylines gateway')
skylines_gateway.disconnect()
logging.shutdown()
|
Add arguments skylines-host and -port
|
manage.py: Add arguments skylines-host and -port
|
Python
|
agpl-3.0
|
kerel-fs/ogn-skylines-gateway,kerel-fs/ogn-skylines-gateway
|
import logging
from ognskylines.gateway import ognSkylinesGateway
from ognskylines.dbutils import session
from manager import Manager
gateway_manager = Manager()
@gateway_manager.command
def run(logfile=''):
"""Run the ogn-->skylines gateway."""
# Enable logging
log_handlers = [logging.StreamHandler()]
if logfile:
log_handlers.append(logging.FileHandler(logfile))
logging.basicConfig(level='INFO', handlers=log_handlers)
logging.getLogger('ognskylines').setLevel('DEBUG')
print('Start ogn-skylines gateway')
skylines_gateway = ognSkylinesGateway(session=session, aprs_user='skylines', host='127.0.0.1', port=5597)
try:
skylines_gateway.run()
except KeyboardInterrupt:
print('\nStop ogn-skylines gateway')
skylines_gateway.disconnect()
logging.shutdown()
manage.py: Add arguments skylines-host and -port
|
import logging
from ognskylines.gateway import ognSkylinesGateway
from ognskylines.dbutils import session
from manager import Manager
gateway_manager = Manager()
@gateway_manager.command
def run(skylines_host='127.0.0.1', skylines_port=5597, logfile=''):
"""Run the ogn-->skylines gateway."""
# Enable logging
log_handlers = [logging.StreamHandler()]
if logfile:
log_handlers.append(logging.FileHandler(logfile))
logging.basicConfig(level='INFO', handlers=log_handlers)
logging.getLogger('ognskylines').setLevel('DEBUG')
print('Start ogn-skylines gateway')
skylines_gateway = ognSkylinesGateway(session=session, aprs_user='skylines', host=skylines_host, port=skylines_port)
try:
skylines_gateway.run()
except KeyboardInterrupt:
print('\nStop ogn-skylines gateway')
skylines_gateway.disconnect()
logging.shutdown()
|
<commit_before>import logging
from ognskylines.gateway import ognSkylinesGateway
from ognskylines.dbutils import session
from manager import Manager
gateway_manager = Manager()
@gateway_manager.command
def run(logfile=''):
"""Run the ogn-->skylines gateway."""
# Enable logging
log_handlers = [logging.StreamHandler()]
if logfile:
log_handlers.append(logging.FileHandler(logfile))
logging.basicConfig(level='INFO', handlers=log_handlers)
logging.getLogger('ognskylines').setLevel('DEBUG')
print('Start ogn-skylines gateway')
skylines_gateway = ognSkylinesGateway(session=session, aprs_user='skylines', host='127.0.0.1', port=5597)
try:
skylines_gateway.run()
except KeyboardInterrupt:
print('\nStop ogn-skylines gateway')
skylines_gateway.disconnect()
logging.shutdown()
<commit_msg>manage.py: Add arguments skylines-host and -port<commit_after>
|
import logging
from ognskylines.gateway import ognSkylinesGateway
from ognskylines.dbutils import session
from manager import Manager
gateway_manager = Manager()
@gateway_manager.command
def run(skylines_host='127.0.0.1', skylines_port=5597, logfile=''):
"""Run the ogn-->skylines gateway."""
# Enable logging
log_handlers = [logging.StreamHandler()]
if logfile:
log_handlers.append(logging.FileHandler(logfile))
logging.basicConfig(level='INFO', handlers=log_handlers)
logging.getLogger('ognskylines').setLevel('DEBUG')
print('Start ogn-skylines gateway')
skylines_gateway = ognSkylinesGateway(session=session, aprs_user='skylines', host=skylines_host, port=skylines_port)
try:
skylines_gateway.run()
except KeyboardInterrupt:
print('\nStop ogn-skylines gateway')
skylines_gateway.disconnect()
logging.shutdown()
|
import logging
from ognskylines.gateway import ognSkylinesGateway
from ognskylines.dbutils import session
from manager import Manager
gateway_manager = Manager()
@gateway_manager.command
def run(logfile=''):
"""Run the ogn-->skylines gateway."""
# Enable logging
log_handlers = [logging.StreamHandler()]
if logfile:
log_handlers.append(logging.FileHandler(logfile))
logging.basicConfig(level='INFO', handlers=log_handlers)
logging.getLogger('ognskylines').setLevel('DEBUG')
print('Start ogn-skylines gateway')
skylines_gateway = ognSkylinesGateway(session=session, aprs_user='skylines', host='127.0.0.1', port=5597)
try:
skylines_gateway.run()
except KeyboardInterrupt:
print('\nStop ogn-skylines gateway')
skylines_gateway.disconnect()
logging.shutdown()
manage.py: Add arguments skylines-host and -portimport logging
from ognskylines.gateway import ognSkylinesGateway
from ognskylines.dbutils import session
from manager import Manager
gateway_manager = Manager()
@gateway_manager.command
def run(skylines_host='127.0.0.1', skylines_port=5597, logfile=''):
"""Run the ogn-->skylines gateway."""
# Enable logging
log_handlers = [logging.StreamHandler()]
if logfile:
log_handlers.append(logging.FileHandler(logfile))
logging.basicConfig(level='INFO', handlers=log_handlers)
logging.getLogger('ognskylines').setLevel('DEBUG')
print('Start ogn-skylines gateway')
skylines_gateway = ognSkylinesGateway(session=session, aprs_user='skylines', host=skylines_host, port=skylines_port)
try:
skylines_gateway.run()
except KeyboardInterrupt:
print('\nStop ogn-skylines gateway')
skylines_gateway.disconnect()
logging.shutdown()
|
<commit_before>import logging
from ognskylines.gateway import ognSkylinesGateway
from ognskylines.dbutils import session
from manager import Manager
gateway_manager = Manager()
@gateway_manager.command
def run(logfile=''):
"""Run the ogn-->skylines gateway."""
# Enable logging
log_handlers = [logging.StreamHandler()]
if logfile:
log_handlers.append(logging.FileHandler(logfile))
logging.basicConfig(level='INFO', handlers=log_handlers)
logging.getLogger('ognskylines').setLevel('DEBUG')
print('Start ogn-skylines gateway')
skylines_gateway = ognSkylinesGateway(session=session, aprs_user='skylines', host='127.0.0.1', port=5597)
try:
skylines_gateway.run()
except KeyboardInterrupt:
print('\nStop ogn-skylines gateway')
skylines_gateway.disconnect()
logging.shutdown()
<commit_msg>manage.py: Add arguments skylines-host and -port<commit_after>import logging
from ognskylines.gateway import ognSkylinesGateway
from ognskylines.dbutils import session
from manager import Manager
gateway_manager = Manager()
@gateway_manager.command
def run(skylines_host='127.0.0.1', skylines_port=5597, logfile=''):
"""Run the ogn-->skylines gateway."""
# Enable logging
log_handlers = [logging.StreamHandler()]
if logfile:
log_handlers.append(logging.FileHandler(logfile))
logging.basicConfig(level='INFO', handlers=log_handlers)
logging.getLogger('ognskylines').setLevel('DEBUG')
print('Start ogn-skylines gateway')
skylines_gateway = ognSkylinesGateway(session=session, aprs_user='skylines', host=skylines_host, port=skylines_port)
try:
skylines_gateway.run()
except KeyboardInterrupt:
print('\nStop ogn-skylines gateway')
skylines_gateway.disconnect()
logging.shutdown()
|
2288ff574db552dd5c078102f9bbed1b0c3b6490
|
forms.py
|
forms.py
|
from flask.ext.wtf import Form, TextField, PasswordField, BooleanField, validators
from models import User
class LoginForm(Form):
username = TextField('username', [validators.Required()])
password = PasswordField('password', [validators.Required()])
remember = BooleanField('remember')
def __init__(self, *args, **kwargs):
Form.__init__(self, *args, **kwargs)
self.user = None
def validate(self):
rv = Form.validate(self)
if not rv:
return False
user = User.query.filter_by(username=self.username.data).first()
if user is None:
self.username.errors.append('Unknown username')
return False
if not user.check_password(self.password.data):
self.password.errors.append('Invalid password')
return False
self.user = user
return True
|
from flask.ext.wtf import Form
from wtforms.fields import TextField, PasswordField, BooleanField
from wtforms.validators import Required
from models import User
class LoginForm(Form):
username = TextField('username', [Required()])
password = PasswordField('password', [Required()])
remember = BooleanField('remember')
def __init__(self, *args, **kwargs):
Form.__init__(self, *args, **kwargs)
self.user = None
def validate(self):
rv = Form.validate(self)
if not rv:
return False
user = User.query.filter_by(username=self.username.data).first()
if user is None:
self.username.errors.append('Unknown username')
return False
if not user.check_password(self.password.data):
self.password.errors.append('Invalid password')
return False
self.user = user
return True
|
Update Flask-WTF imports to >0.9.0-style
|
Update Flask-WTF imports to >0.9.0-style
|
Python
|
mit
|
mahrz/kernkrieg,mahrz/kernkrieg,mahrz/kernkrieg
|
from flask.ext.wtf import Form, TextField, PasswordField, BooleanField, validators
from models import User
class LoginForm(Form):
username = TextField('username', [validators.Required()])
password = PasswordField('password', [validators.Required()])
remember = BooleanField('remember')
def __init__(self, *args, **kwargs):
Form.__init__(self, *args, **kwargs)
self.user = None
def validate(self):
rv = Form.validate(self)
if not rv:
return False
user = User.query.filter_by(username=self.username.data).first()
if user is None:
self.username.errors.append('Unknown username')
return False
if not user.check_password(self.password.data):
self.password.errors.append('Invalid password')
return False
self.user = user
return True
Update Flask-WTF imports to >0.9.0-style
|
from flask.ext.wtf import Form
from wtforms.fields import TextField, PasswordField, BooleanField
from wtforms.validators import Required
from models import User
class LoginForm(Form):
username = TextField('username', [Required()])
password = PasswordField('password', [Required()])
remember = BooleanField('remember')
def __init__(self, *args, **kwargs):
Form.__init__(self, *args, **kwargs)
self.user = None
def validate(self):
rv = Form.validate(self)
if not rv:
return False
user = User.query.filter_by(username=self.username.data).first()
if user is None:
self.username.errors.append('Unknown username')
return False
if not user.check_password(self.password.data):
self.password.errors.append('Invalid password')
return False
self.user = user
return True
|
<commit_before>from flask.ext.wtf import Form, TextField, PasswordField, BooleanField, validators
from models import User
class LoginForm(Form):
username = TextField('username', [validators.Required()])
password = PasswordField('password', [validators.Required()])
remember = BooleanField('remember')
def __init__(self, *args, **kwargs):
Form.__init__(self, *args, **kwargs)
self.user = None
def validate(self):
rv = Form.validate(self)
if not rv:
return False
user = User.query.filter_by(username=self.username.data).first()
if user is None:
self.username.errors.append('Unknown username')
return False
if not user.check_password(self.password.data):
self.password.errors.append('Invalid password')
return False
self.user = user
return True
<commit_msg>Update Flask-WTF imports to >0.9.0-style<commit_after>
|
from flask.ext.wtf import Form
from wtforms.fields import TextField, PasswordField, BooleanField
from wtforms.validators import Required
from models import User
class LoginForm(Form):
username = TextField('username', [Required()])
password = PasswordField('password', [Required()])
remember = BooleanField('remember')
def __init__(self, *args, **kwargs):
Form.__init__(self, *args, **kwargs)
self.user = None
def validate(self):
rv = Form.validate(self)
if not rv:
return False
user = User.query.filter_by(username=self.username.data).first()
if user is None:
self.username.errors.append('Unknown username')
return False
if not user.check_password(self.password.data):
self.password.errors.append('Invalid password')
return False
self.user = user
return True
|
from flask.ext.wtf import Form, TextField, PasswordField, BooleanField, validators
from models import User
class LoginForm(Form):
username = TextField('username', [validators.Required()])
password = PasswordField('password', [validators.Required()])
remember = BooleanField('remember')
def __init__(self, *args, **kwargs):
Form.__init__(self, *args, **kwargs)
self.user = None
def validate(self):
rv = Form.validate(self)
if not rv:
return False
user = User.query.filter_by(username=self.username.data).first()
if user is None:
self.username.errors.append('Unknown username')
return False
if not user.check_password(self.password.data):
self.password.errors.append('Invalid password')
return False
self.user = user
return True
Update Flask-WTF imports to >0.9.0-stylefrom flask.ext.wtf import Form
from wtforms.fields import TextField, PasswordField, BooleanField
from wtforms.validators import Required
from models import User
class LoginForm(Form):
username = TextField('username', [Required()])
password = PasswordField('password', [Required()])
remember = BooleanField('remember')
def __init__(self, *args, **kwargs):
Form.__init__(self, *args, **kwargs)
self.user = None
def validate(self):
rv = Form.validate(self)
if not rv:
return False
user = User.query.filter_by(username=self.username.data).first()
if user is None:
self.username.errors.append('Unknown username')
return False
if not user.check_password(self.password.data):
self.password.errors.append('Invalid password')
return False
self.user = user
return True
|
<commit_before>from flask.ext.wtf import Form, TextField, PasswordField, BooleanField, validators
from models import User
class LoginForm(Form):
username = TextField('username', [validators.Required()])
password = PasswordField('password', [validators.Required()])
remember = BooleanField('remember')
def __init__(self, *args, **kwargs):
Form.__init__(self, *args, **kwargs)
self.user = None
def validate(self):
rv = Form.validate(self)
if not rv:
return False
user = User.query.filter_by(username=self.username.data).first()
if user is None:
self.username.errors.append('Unknown username')
return False
if not user.check_password(self.password.data):
self.password.errors.append('Invalid password')
return False
self.user = user
return True
<commit_msg>Update Flask-WTF imports to >0.9.0-style<commit_after>from flask.ext.wtf import Form
from wtforms.fields import TextField, PasswordField, BooleanField
from wtforms.validators import Required
from models import User
class LoginForm(Form):
username = TextField('username', [Required()])
password = PasswordField('password', [Required()])
remember = BooleanField('remember')
def __init__(self, *args, **kwargs):
Form.__init__(self, *args, **kwargs)
self.user = None
def validate(self):
rv = Form.validate(self)
if not rv:
return False
user = User.query.filter_by(username=self.username.data).first()
if user is None:
self.username.errors.append('Unknown username')
return False
if not user.check_password(self.password.data):
self.password.errors.append('Invalid password')
return False
self.user = user
return True
|
ef6bfe9a1ef25979a8e78a0c05012974c2d0d974
|
opentreemap/opentreemap/util.py
|
opentreemap/opentreemap/util.py
|
from django.views.decorators.csrf import csrf_exempt
import json
def route(**kwargs):
@csrf_exempt
def routed(request, **kwargs2):
method = request.method
req_method = kwargs[method]
return req_method(request, **kwargs2)
return routed
def json_from_request(request):
"""
Accessing body throws an exception when using the Django test
client in to make requests in unit tests.
"""
try:
data = json.loads(request.body)
except Exception:
data = request.POST
return data
|
from django.views.decorators.csrf import csrf_exempt
import json
def route(**kwargs):
@csrf_exempt
def routed(request, *args2, **kwargs2):
method = request.method
req_method = kwargs[method]
return req_method(request, *args2, **kwargs2)
return routed
def json_from_request(request):
"""
Accessing body throws an exception when using the Django test
client in to make requests in unit tests.
"""
try:
data = json.loads(request.body)
except Exception:
data = request.POST
return data
|
Fix route function to support positional args
|
Fix route function to support positional args
|
Python
|
agpl-3.0
|
maurizi/otm-core,recklessromeo/otm-core,RickMohr/otm-core,clever-crow-consulting/otm-core,RickMohr/otm-core,maurizi/otm-core,maurizi/otm-core,recklessromeo/otm-core,maurizi/otm-core,recklessromeo/otm-core,clever-crow-consulting/otm-core,clever-crow-consulting/otm-core,RickMohr/otm-core,clever-crow-consulting/otm-core,recklessromeo/otm-core,RickMohr/otm-core
|
from django.views.decorators.csrf import csrf_exempt
import json
def route(**kwargs):
@csrf_exempt
def routed(request, **kwargs2):
method = request.method
req_method = kwargs[method]
return req_method(request, **kwargs2)
return routed
def json_from_request(request):
"""
Accessing body throws an exception when using the Django test
client in to make requests in unit tests.
"""
try:
data = json.loads(request.body)
except Exception:
data = request.POST
return data
Fix route function to support positional args
|
from django.views.decorators.csrf import csrf_exempt
import json
def route(**kwargs):
@csrf_exempt
def routed(request, *args2, **kwargs2):
method = request.method
req_method = kwargs[method]
return req_method(request, *args2, **kwargs2)
return routed
def json_from_request(request):
"""
Accessing body throws an exception when using the Django test
client in to make requests in unit tests.
"""
try:
data = json.loads(request.body)
except Exception:
data = request.POST
return data
|
<commit_before>from django.views.decorators.csrf import csrf_exempt
import json
def route(**kwargs):
@csrf_exempt
def routed(request, **kwargs2):
method = request.method
req_method = kwargs[method]
return req_method(request, **kwargs2)
return routed
def json_from_request(request):
"""
Accessing body throws an exception when using the Django test
client in to make requests in unit tests.
"""
try:
data = json.loads(request.body)
except Exception:
data = request.POST
return data
<commit_msg>Fix route function to support positional args<commit_after>
|
from django.views.decorators.csrf import csrf_exempt
import json
def route(**kwargs):
@csrf_exempt
def routed(request, *args2, **kwargs2):
method = request.method
req_method = kwargs[method]
return req_method(request, *args2, **kwargs2)
return routed
def json_from_request(request):
"""
Accessing body throws an exception when using the Django test
client in to make requests in unit tests.
"""
try:
data = json.loads(request.body)
except Exception:
data = request.POST
return data
|
from django.views.decorators.csrf import csrf_exempt
import json
def route(**kwargs):
@csrf_exempt
def routed(request, **kwargs2):
method = request.method
req_method = kwargs[method]
return req_method(request, **kwargs2)
return routed
def json_from_request(request):
"""
Accessing body throws an exception when using the Django test
client in to make requests in unit tests.
"""
try:
data = json.loads(request.body)
except Exception:
data = request.POST
return data
Fix route function to support positional argsfrom django.views.decorators.csrf import csrf_exempt
import json
def route(**kwargs):
@csrf_exempt
def routed(request, *args2, **kwargs2):
method = request.method
req_method = kwargs[method]
return req_method(request, *args2, **kwargs2)
return routed
def json_from_request(request):
"""
Accessing body throws an exception when using the Django test
client in to make requests in unit tests.
"""
try:
data = json.loads(request.body)
except Exception:
data = request.POST
return data
|
<commit_before>from django.views.decorators.csrf import csrf_exempt
import json
def route(**kwargs):
@csrf_exempt
def routed(request, **kwargs2):
method = request.method
req_method = kwargs[method]
return req_method(request, **kwargs2)
return routed
def json_from_request(request):
"""
Accessing body throws an exception when using the Django test
client in to make requests in unit tests.
"""
try:
data = json.loads(request.body)
except Exception:
data = request.POST
return data
<commit_msg>Fix route function to support positional args<commit_after>from django.views.decorators.csrf import csrf_exempt
import json
def route(**kwargs):
@csrf_exempt
def routed(request, *args2, **kwargs2):
method = request.method
req_method = kwargs[method]
return req_method(request, *args2, **kwargs2)
return routed
def json_from_request(request):
"""
Accessing body throws an exception when using the Django test
client in to make requests in unit tests.
"""
try:
data = json.loads(request.body)
except Exception:
data = request.POST
return data
|
db295ff62ac945e03b97e405077e0fa501e4b1c4
|
src/setup.py
|
src/setup.py
|
import os
from setuptools import setup, find_packages
HERE = os.path.abspath(os.path.dirname(__file__))
install_requires = (
"argparse",
"six>=1.10.0",
)
setup(
name='cmdtree',
version='0.0.2',
packages=find_packages(HERE, include=['cmdtree']),
install_requires=install_requires,
url='https://github.com/winkidney/cmdtree',
license='MIT',
author='winkidney',
author_email='winkidney@gmail.com',
description='Yet another cli tool library ,'
'sub-command friendly, '
'designed for cli auto-generating.',
)
|
import os
from setuptools import setup, find_packages
HERE = os.path.abspath(os.path.dirname(__file__))
install_requires = (
"argparse",
"six>=1.10.0",
)
setup(
name='cmdtree',
version='0.0.3',
packages=find_packages(HERE, include=['cmdtree']),
install_requires=install_requires,
url='https://github.com/winkidney/cmdtree',
license='MIT',
author='winkidney',
author_email='winkidney@gmail.com',
description='Yet another cli tool library ,'
'sub-command friendly, '
'designed for cli auto-generating.',
)
|
Update Version to 0.0.3 caused by pypi's limit
|
Update: Update Version to 0.0.3 caused by pypi's limit
|
Python
|
mit
|
winkidney/cmdtree,winkidney/cmdtree
|
import os
from setuptools import setup, find_packages
HERE = os.path.abspath(os.path.dirname(__file__))
install_requires = (
"argparse",
"six>=1.10.0",
)
setup(
name='cmdtree',
version='0.0.2',
packages=find_packages(HERE, include=['cmdtree']),
install_requires=install_requires,
url='https://github.com/winkidney/cmdtree',
license='MIT',
author='winkidney',
author_email='winkidney@gmail.com',
description='Yet another cli tool library ,'
'sub-command friendly, '
'designed for cli auto-generating.',
)
Update: Update Version to 0.0.3 caused by pypi's limit
|
import os
from setuptools import setup, find_packages
HERE = os.path.abspath(os.path.dirname(__file__))
install_requires = (
"argparse",
"six>=1.10.0",
)
setup(
name='cmdtree',
version='0.0.3',
packages=find_packages(HERE, include=['cmdtree']),
install_requires=install_requires,
url='https://github.com/winkidney/cmdtree',
license='MIT',
author='winkidney',
author_email='winkidney@gmail.com',
description='Yet another cli tool library ,'
'sub-command friendly, '
'designed for cli auto-generating.',
)
|
<commit_before>import os
from setuptools import setup, find_packages
HERE = os.path.abspath(os.path.dirname(__file__))
install_requires = (
"argparse",
"six>=1.10.0",
)
setup(
name='cmdtree',
version='0.0.2',
packages=find_packages(HERE, include=['cmdtree']),
install_requires=install_requires,
url='https://github.com/winkidney/cmdtree',
license='MIT',
author='winkidney',
author_email='winkidney@gmail.com',
description='Yet another cli tool library ,'
'sub-command friendly, '
'designed for cli auto-generating.',
)
<commit_msg>Update: Update Version to 0.0.3 caused by pypi's limit<commit_after>
|
import os
from setuptools import setup, find_packages
HERE = os.path.abspath(os.path.dirname(__file__))
install_requires = (
"argparse",
"six>=1.10.0",
)
setup(
name='cmdtree',
version='0.0.3',
packages=find_packages(HERE, include=['cmdtree']),
install_requires=install_requires,
url='https://github.com/winkidney/cmdtree',
license='MIT',
author='winkidney',
author_email='winkidney@gmail.com',
description='Yet another cli tool library ,'
'sub-command friendly, '
'designed for cli auto-generating.',
)
|
import os
from setuptools import setup, find_packages
HERE = os.path.abspath(os.path.dirname(__file__))
install_requires = (
"argparse",
"six>=1.10.0",
)
setup(
name='cmdtree',
version='0.0.2',
packages=find_packages(HERE, include=['cmdtree']),
install_requires=install_requires,
url='https://github.com/winkidney/cmdtree',
license='MIT',
author='winkidney',
author_email='winkidney@gmail.com',
description='Yet another cli tool library ,'
'sub-command friendly, '
'designed for cli auto-generating.',
)
Update: Update Version to 0.0.3 caused by pypi's limitimport os
from setuptools import setup, find_packages
HERE = os.path.abspath(os.path.dirname(__file__))
install_requires = (
"argparse",
"six>=1.10.0",
)
setup(
name='cmdtree',
version='0.0.3',
packages=find_packages(HERE, include=['cmdtree']),
install_requires=install_requires,
url='https://github.com/winkidney/cmdtree',
license='MIT',
author='winkidney',
author_email='winkidney@gmail.com',
description='Yet another cli tool library ,'
'sub-command friendly, '
'designed for cli auto-generating.',
)
|
<commit_before>import os
from setuptools import setup, find_packages
HERE = os.path.abspath(os.path.dirname(__file__))
install_requires = (
"argparse",
"six>=1.10.0",
)
setup(
name='cmdtree',
version='0.0.2',
packages=find_packages(HERE, include=['cmdtree']),
install_requires=install_requires,
url='https://github.com/winkidney/cmdtree',
license='MIT',
author='winkidney',
author_email='winkidney@gmail.com',
description='Yet another cli tool library ,'
'sub-command friendly, '
'designed for cli auto-generating.',
)
<commit_msg>Update: Update Version to 0.0.3 caused by pypi's limit<commit_after>import os
from setuptools import setup, find_packages
HERE = os.path.abspath(os.path.dirname(__file__))
install_requires = (
"argparse",
"six>=1.10.0",
)
setup(
name='cmdtree',
version='0.0.3',
packages=find_packages(HERE, include=['cmdtree']),
install_requires=install_requires,
url='https://github.com/winkidney/cmdtree',
license='MIT',
author='winkidney',
author_email='winkidney@gmail.com',
description='Yet another cli tool library ,'
'sub-command friendly, '
'designed for cli auto-generating.',
)
|
890647cc0cd952ed1a52bdd96f7e9dd8c28810c9
|
socketlabs/socketlabs.py
|
socketlabs/socketlabs.py
|
import requests
from . constants import BASE_URL
from . exceptions import SocketLabsUnauthorized
class SocketLabs():
def __init__(self, username=None, password=None, serverid=None):
self._username = username
self._password = password
self._serverid = serverid
def failedMessages(self, **kwargs):
url = BASE_URL + '/messagesFailed'
headers = {'Accept': 'application/json'}
params = {'serverId': self._serverid, 'type': 'json'}
# Apply any custom parameters passed in
for key, value in kwargs.items():
params[key] = value
r = requests.get(url, params=params, headers=headers,
auth=(self._username, self._password))
if r.status_code == 200:
return r.json()
else:
raise SocketLabsUnauthorized(r)
|
import requests
from . constants import BASE_URL
from . exceptions import SocketLabsUnauthorized
class SocketLabs():
def __init__(self, username, password, serverid):
if username is None:
raise RuntimeError("username not defined")
if password is None:
raise RuntimeError("password not defined")
if serverid is None:
raise RuntimeError("serverid not defined")
self._username = username
self._password = password
self._serverid = serverid
def failedMessages(self, **kwargs):
url = BASE_URL + '/messagesFailed'
headers = {'Accept': 'application/json'}
params = {'serverId': self._serverid, 'type': 'json'}
# Apply any custom parameters passed in
for key, value in kwargs.items():
params[key] = value
r = requests.get(url, params=params, headers=headers,
auth=(self._username, self._password))
if r.status_code == 200:
return r.json()
else:
raise SocketLabsUnauthorized(r)
|
Add checks for username/password/serverid being defined
|
Add checks for username/password/serverid being defined
|
Python
|
mit
|
MattHealy/socketlabs-python
|
import requests
from . constants import BASE_URL
from . exceptions import SocketLabsUnauthorized
class SocketLabs():
def __init__(self, username=None, password=None, serverid=None):
self._username = username
self._password = password
self._serverid = serverid
def failedMessages(self, **kwargs):
url = BASE_URL + '/messagesFailed'
headers = {'Accept': 'application/json'}
params = {'serverId': self._serverid, 'type': 'json'}
# Apply any custom parameters passed in
for key, value in kwargs.items():
params[key] = value
r = requests.get(url, params=params, headers=headers,
auth=(self._username, self._password))
if r.status_code == 200:
return r.json()
else:
raise SocketLabsUnauthorized(r)
Add checks for username/password/serverid being defined
|
import requests
from . constants import BASE_URL
from . exceptions import SocketLabsUnauthorized
class SocketLabs():
def __init__(self, username, password, serverid):
if username is None:
raise RuntimeError("username not defined")
if password is None:
raise RuntimeError("password not defined")
if serverid is None:
raise RuntimeError("serverid not defined")
self._username = username
self._password = password
self._serverid = serverid
def failedMessages(self, **kwargs):
url = BASE_URL + '/messagesFailed'
headers = {'Accept': 'application/json'}
params = {'serverId': self._serverid, 'type': 'json'}
# Apply any custom parameters passed in
for key, value in kwargs.items():
params[key] = value
r = requests.get(url, params=params, headers=headers,
auth=(self._username, self._password))
if r.status_code == 200:
return r.json()
else:
raise SocketLabsUnauthorized(r)
|
<commit_before>import requests
from . constants import BASE_URL
from . exceptions import SocketLabsUnauthorized
class SocketLabs():
def __init__(self, username=None, password=None, serverid=None):
self._username = username
self._password = password
self._serverid = serverid
def failedMessages(self, **kwargs):
url = BASE_URL + '/messagesFailed'
headers = {'Accept': 'application/json'}
params = {'serverId': self._serverid, 'type': 'json'}
# Apply any custom parameters passed in
for key, value in kwargs.items():
params[key] = value
r = requests.get(url, params=params, headers=headers,
auth=(self._username, self._password))
if r.status_code == 200:
return r.json()
else:
raise SocketLabsUnauthorized(r)
<commit_msg>Add checks for username/password/serverid being defined<commit_after>
|
import requests
from . constants import BASE_URL
from . exceptions import SocketLabsUnauthorized
class SocketLabs():
def __init__(self, username, password, serverid):
if username is None:
raise RuntimeError("username not defined")
if password is None:
raise RuntimeError("password not defined")
if serverid is None:
raise RuntimeError("serverid not defined")
self._username = username
self._password = password
self._serverid = serverid
def failedMessages(self, **kwargs):
url = BASE_URL + '/messagesFailed'
headers = {'Accept': 'application/json'}
params = {'serverId': self._serverid, 'type': 'json'}
# Apply any custom parameters passed in
for key, value in kwargs.items():
params[key] = value
r = requests.get(url, params=params, headers=headers,
auth=(self._username, self._password))
if r.status_code == 200:
return r.json()
else:
raise SocketLabsUnauthorized(r)
|
import requests
from . constants import BASE_URL
from . exceptions import SocketLabsUnauthorized
class SocketLabs():
def __init__(self, username=None, password=None, serverid=None):
self._username = username
self._password = password
self._serverid = serverid
def failedMessages(self, **kwargs):
url = BASE_URL + '/messagesFailed'
headers = {'Accept': 'application/json'}
params = {'serverId': self._serverid, 'type': 'json'}
# Apply any custom parameters passed in
for key, value in kwargs.items():
params[key] = value
r = requests.get(url, params=params, headers=headers,
auth=(self._username, self._password))
if r.status_code == 200:
return r.json()
else:
raise SocketLabsUnauthorized(r)
Add checks for username/password/serverid being definedimport requests
from . constants import BASE_URL
from . exceptions import SocketLabsUnauthorized
class SocketLabs():
def __init__(self, username, password, serverid):
if username is None:
raise RuntimeError("username not defined")
if password is None:
raise RuntimeError("password not defined")
if serverid is None:
raise RuntimeError("serverid not defined")
self._username = username
self._password = password
self._serverid = serverid
def failedMessages(self, **kwargs):
url = BASE_URL + '/messagesFailed'
headers = {'Accept': 'application/json'}
params = {'serverId': self._serverid, 'type': 'json'}
# Apply any custom parameters passed in
for key, value in kwargs.items():
params[key] = value
r = requests.get(url, params=params, headers=headers,
auth=(self._username, self._password))
if r.status_code == 200:
return r.json()
else:
raise SocketLabsUnauthorized(r)
|
<commit_before>import requests
from . constants import BASE_URL
from . exceptions import SocketLabsUnauthorized
class SocketLabs():
def __init__(self, username=None, password=None, serverid=None):
self._username = username
self._password = password
self._serverid = serverid
def failedMessages(self, **kwargs):
url = BASE_URL + '/messagesFailed'
headers = {'Accept': 'application/json'}
params = {'serverId': self._serverid, 'type': 'json'}
# Apply any custom parameters passed in
for key, value in kwargs.items():
params[key] = value
r = requests.get(url, params=params, headers=headers,
auth=(self._username, self._password))
if r.status_code == 200:
return r.json()
else:
raise SocketLabsUnauthorized(r)
<commit_msg>Add checks for username/password/serverid being defined<commit_after>import requests
from . constants import BASE_URL
from . exceptions import SocketLabsUnauthorized
class SocketLabs():
def __init__(self, username, password, serverid):
if username is None:
raise RuntimeError("username not defined")
if password is None:
raise RuntimeError("password not defined")
if serverid is None:
raise RuntimeError("serverid not defined")
self._username = username
self._password = password
self._serverid = serverid
def failedMessages(self, **kwargs):
url = BASE_URL + '/messagesFailed'
headers = {'Accept': 'application/json'}
params = {'serverId': self._serverid, 'type': 'json'}
# Apply any custom parameters passed in
for key, value in kwargs.items():
params[key] = value
r = requests.get(url, params=params, headers=headers,
auth=(self._username, self._password))
if r.status_code == 200:
return r.json()
else:
raise SocketLabsUnauthorized(r)
|
ea83b615ef6fcaf71eb5e5d656585056757ac64f
|
{{cookiecutter.app_name}}/views.py
|
{{cookiecutter.app_name}}/views.py
|
from django.core.urlresolvers import reverse_lazy
from vanilla import ListView, CreateView, DetailView, UpdateView, DeleteView
from .forms import {{ cookiecutter.model_name }}Form
from .models import {{ cookiecutter.model_name }}
class {{ cookiecutter.model_name }}CRUDView(object):
model = {{ cookiecutter.model_name }}
form_class = {{ cookiecutter.model_name }}Form
success_url = reverse_lazy('{{ cookiecutter.model_name|lower }}_list')
class {{ cookiecutter.model_name }}List({{ cookiecutter.model_name }}CRUDView, ListView):
pass
class {{ cookiecutter.model_name }}Create({{ cookiecutter.model_name }}CRUDView, CreateView):
pass
class {{ cookiecutter.model_name }}Detail({{ cookiecutter.model_name }}CRUDView, DetailView):
pass
class {{ cookiecutter.model_name }}Update({{ cookiecutter.model_name }}CRUDView, UpdateView):
pass
class {{ cookiecutter.model_name }}Delete({{ cookiecutter.model_name }}CRUDView, DeleteView):
pass
|
from django.core.urlresolvers import reverse
from vanilla import ListView, CreateView, DetailView, UpdateView, DeleteView
from .forms import {{ cookiecutter.model_name }}Form
from .models import {{ cookiecutter.model_name }}
class {{ cookiecutter.model_name }}CRUDView(object):
model = {{ cookiecutter.model_name }}
form_class = {{ cookiecutter.model_name }}Form
def get_success_url(self):
return reverse('{{ cookiecutter.model_name|lower }}_list')
class {{ cookiecutter.model_name }}List({{ cookiecutter.model_name }}CRUDView, ListView):
pass
class {{ cookiecutter.model_name }}Create({{ cookiecutter.model_name }}CRUDView, CreateView):
pass
class {{ cookiecutter.model_name }}Detail({{ cookiecutter.model_name }}CRUDView, DetailView):
pass
class {{ cookiecutter.model_name }}Update({{ cookiecutter.model_name }}CRUDView, UpdateView):
pass
class {{ cookiecutter.model_name }}Delete({{ cookiecutter.model_name }}CRUDView, DeleteView):
pass
|
Use get_success_url to work around reverse_lazy issue on Python3.
|
Use get_success_url to work around reverse_lazy issue on Python3.
|
Python
|
bsd-3-clause
|
janusnic/cookiecutter-django-crud,wildfish/cookiecutter-django-crud,cericoda/cookiecutter-django-crud,janusnic/cookiecutter-django-crud,wildfish/cookiecutter-django-crud,cericoda/cookiecutter-django-crud
|
from django.core.urlresolvers import reverse_lazy
from vanilla import ListView, CreateView, DetailView, UpdateView, DeleteView
from .forms import {{ cookiecutter.model_name }}Form
from .models import {{ cookiecutter.model_name }}
class {{ cookiecutter.model_name }}CRUDView(object):
model = {{ cookiecutter.model_name }}
form_class = {{ cookiecutter.model_name }}Form
success_url = reverse_lazy('{{ cookiecutter.model_name|lower }}_list')
class {{ cookiecutter.model_name }}List({{ cookiecutter.model_name }}CRUDView, ListView):
pass
class {{ cookiecutter.model_name }}Create({{ cookiecutter.model_name }}CRUDView, CreateView):
pass
class {{ cookiecutter.model_name }}Detail({{ cookiecutter.model_name }}CRUDView, DetailView):
pass
class {{ cookiecutter.model_name }}Update({{ cookiecutter.model_name }}CRUDView, UpdateView):
pass
class {{ cookiecutter.model_name }}Delete({{ cookiecutter.model_name }}CRUDView, DeleteView):
pass
Use get_success_url to work around reverse_lazy issue on Python3.
|
from django.core.urlresolvers import reverse
from vanilla import ListView, CreateView, DetailView, UpdateView, DeleteView
from .forms import {{ cookiecutter.model_name }}Form
from .models import {{ cookiecutter.model_name }}
class {{ cookiecutter.model_name }}CRUDView(object):
model = {{ cookiecutter.model_name }}
form_class = {{ cookiecutter.model_name }}Form
def get_success_url(self):
return reverse('{{ cookiecutter.model_name|lower }}_list')
class {{ cookiecutter.model_name }}List({{ cookiecutter.model_name }}CRUDView, ListView):
pass
class {{ cookiecutter.model_name }}Create({{ cookiecutter.model_name }}CRUDView, CreateView):
pass
class {{ cookiecutter.model_name }}Detail({{ cookiecutter.model_name }}CRUDView, DetailView):
pass
class {{ cookiecutter.model_name }}Update({{ cookiecutter.model_name }}CRUDView, UpdateView):
pass
class {{ cookiecutter.model_name }}Delete({{ cookiecutter.model_name }}CRUDView, DeleteView):
pass
|
<commit_before>from django.core.urlresolvers import reverse_lazy
from vanilla import ListView, CreateView, DetailView, UpdateView, DeleteView
from .forms import {{ cookiecutter.model_name }}Form
from .models import {{ cookiecutter.model_name }}
class {{ cookiecutter.model_name }}CRUDView(object):
model = {{ cookiecutter.model_name }}
form_class = {{ cookiecutter.model_name }}Form
success_url = reverse_lazy('{{ cookiecutter.model_name|lower }}_list')
class {{ cookiecutter.model_name }}List({{ cookiecutter.model_name }}CRUDView, ListView):
pass
class {{ cookiecutter.model_name }}Create({{ cookiecutter.model_name }}CRUDView, CreateView):
pass
class {{ cookiecutter.model_name }}Detail({{ cookiecutter.model_name }}CRUDView, DetailView):
pass
class {{ cookiecutter.model_name }}Update({{ cookiecutter.model_name }}CRUDView, UpdateView):
pass
class {{ cookiecutter.model_name }}Delete({{ cookiecutter.model_name }}CRUDView, DeleteView):
pass
<commit_msg>Use get_success_url to work around reverse_lazy issue on Python3.<commit_after>
|
from django.core.urlresolvers import reverse
from vanilla import ListView, CreateView, DetailView, UpdateView, DeleteView
from .forms import {{ cookiecutter.model_name }}Form
from .models import {{ cookiecutter.model_name }}
class {{ cookiecutter.model_name }}CRUDView(object):
model = {{ cookiecutter.model_name }}
form_class = {{ cookiecutter.model_name }}Form
def get_success_url(self):
return reverse('{{ cookiecutter.model_name|lower }}_list')
class {{ cookiecutter.model_name }}List({{ cookiecutter.model_name }}CRUDView, ListView):
pass
class {{ cookiecutter.model_name }}Create({{ cookiecutter.model_name }}CRUDView, CreateView):
pass
class {{ cookiecutter.model_name }}Detail({{ cookiecutter.model_name }}CRUDView, DetailView):
pass
class {{ cookiecutter.model_name }}Update({{ cookiecutter.model_name }}CRUDView, UpdateView):
pass
class {{ cookiecutter.model_name }}Delete({{ cookiecutter.model_name }}CRUDView, DeleteView):
pass
|
from django.core.urlresolvers import reverse_lazy
from vanilla import ListView, CreateView, DetailView, UpdateView, DeleteView
from .forms import {{ cookiecutter.model_name }}Form
from .models import {{ cookiecutter.model_name }}
class {{ cookiecutter.model_name }}CRUDView(object):
model = {{ cookiecutter.model_name }}
form_class = {{ cookiecutter.model_name }}Form
success_url = reverse_lazy('{{ cookiecutter.model_name|lower }}_list')
class {{ cookiecutter.model_name }}List({{ cookiecutter.model_name }}CRUDView, ListView):
pass
class {{ cookiecutter.model_name }}Create({{ cookiecutter.model_name }}CRUDView, CreateView):
pass
class {{ cookiecutter.model_name }}Detail({{ cookiecutter.model_name }}CRUDView, DetailView):
pass
class {{ cookiecutter.model_name }}Update({{ cookiecutter.model_name }}CRUDView, UpdateView):
pass
class {{ cookiecutter.model_name }}Delete({{ cookiecutter.model_name }}CRUDView, DeleteView):
pass
Use get_success_url to work around reverse_lazy issue on Python3.from django.core.urlresolvers import reverse
from vanilla import ListView, CreateView, DetailView, UpdateView, DeleteView
from .forms import {{ cookiecutter.model_name }}Form
from .models import {{ cookiecutter.model_name }}
class {{ cookiecutter.model_name }}CRUDView(object):
model = {{ cookiecutter.model_name }}
form_class = {{ cookiecutter.model_name }}Form
def get_success_url(self):
return reverse('{{ cookiecutter.model_name|lower }}_list')
class {{ cookiecutter.model_name }}List({{ cookiecutter.model_name }}CRUDView, ListView):
pass
class {{ cookiecutter.model_name }}Create({{ cookiecutter.model_name }}CRUDView, CreateView):
pass
class {{ cookiecutter.model_name }}Detail({{ cookiecutter.model_name }}CRUDView, DetailView):
pass
class {{ cookiecutter.model_name }}Update({{ cookiecutter.model_name }}CRUDView, UpdateView):
pass
class {{ cookiecutter.model_name }}Delete({{ cookiecutter.model_name }}CRUDView, DeleteView):
pass
|
<commit_before>from django.core.urlresolvers import reverse_lazy
from vanilla import ListView, CreateView, DetailView, UpdateView, DeleteView
from .forms import {{ cookiecutter.model_name }}Form
from .models import {{ cookiecutter.model_name }}
class {{ cookiecutter.model_name }}CRUDView(object):
model = {{ cookiecutter.model_name }}
form_class = {{ cookiecutter.model_name }}Form
success_url = reverse_lazy('{{ cookiecutter.model_name|lower }}_list')
class {{ cookiecutter.model_name }}List({{ cookiecutter.model_name }}CRUDView, ListView):
pass
class {{ cookiecutter.model_name }}Create({{ cookiecutter.model_name }}CRUDView, CreateView):
pass
class {{ cookiecutter.model_name }}Detail({{ cookiecutter.model_name }}CRUDView, DetailView):
pass
class {{ cookiecutter.model_name }}Update({{ cookiecutter.model_name }}CRUDView, UpdateView):
pass
class {{ cookiecutter.model_name }}Delete({{ cookiecutter.model_name }}CRUDView, DeleteView):
pass
<commit_msg>Use get_success_url to work around reverse_lazy issue on Python3.<commit_after>from django.core.urlresolvers import reverse
from vanilla import ListView, CreateView, DetailView, UpdateView, DeleteView
from .forms import {{ cookiecutter.model_name }}Form
from .models import {{ cookiecutter.model_name }}
class {{ cookiecutter.model_name }}CRUDView(object):
model = {{ cookiecutter.model_name }}
form_class = {{ cookiecutter.model_name }}Form
def get_success_url(self):
return reverse('{{ cookiecutter.model_name|lower }}_list')
class {{ cookiecutter.model_name }}List({{ cookiecutter.model_name }}CRUDView, ListView):
pass
class {{ cookiecutter.model_name }}Create({{ cookiecutter.model_name }}CRUDView, CreateView):
pass
class {{ cookiecutter.model_name }}Detail({{ cookiecutter.model_name }}CRUDView, DetailView):
pass
class {{ cookiecutter.model_name }}Update({{ cookiecutter.model_name }}CRUDView, UpdateView):
pass
class {{ cookiecutter.model_name }}Delete({{ cookiecutter.model_name }}CRUDView, DeleteView):
pass
|
216f0bb3680b86ac2dfc8c506b791db4e34eeee6
|
nextactions/board.py
|
nextactions/board.py
|
from nextactions.list import List
class Board:
def __init__(self, trello, json):
self._trello = trello
self.id = json['id']
self.name = json['name']
self.nextActionList = []
def getLists(self):
json = self._trello.get(
'https://api.trello.com/1/boards/' + self.id + '/lists?cards=none)'
)
return [List(j) for j in json]
def getListByName(self, name):
for l in self.getLists():
if l.name == name:
return l
return None
|
from nextactions.list import List
class Board:
def __init__(self, trello, json):
self._trello = trello
self.id = json['id']
self.name = json['name']
self.nextActionList = []
def getLists(self):
json = self._trello.get(
'https://api.trello.com/1/boards/' + self.id + '/lists?cards=none)'
)
return [List(j) for j in json]
def getListByName(self, name):
matches = [l for l in self.getLists() if l.name == name]
return matches[0] if len(matches) else None
|
Tidy matching lists by name
|
Tidy matching lists by name
|
Python
|
mit
|
stevecshanks/trello-next-actions
|
from nextactions.list import List
class Board:
def __init__(self, trello, json):
self._trello = trello
self.id = json['id']
self.name = json['name']
self.nextActionList = []
def getLists(self):
json = self._trello.get(
'https://api.trello.com/1/boards/' + self.id + '/lists?cards=none)'
)
return [List(j) for j in json]
def getListByName(self, name):
for l in self.getLists():
if l.name == name:
return l
return None
Tidy matching lists by name
|
from nextactions.list import List
class Board:
def __init__(self, trello, json):
self._trello = trello
self.id = json['id']
self.name = json['name']
self.nextActionList = []
def getLists(self):
json = self._trello.get(
'https://api.trello.com/1/boards/' + self.id + '/lists?cards=none)'
)
return [List(j) for j in json]
def getListByName(self, name):
matches = [l for l in self.getLists() if l.name == name]
return matches[0] if len(matches) else None
|
<commit_before>from nextactions.list import List
class Board:
def __init__(self, trello, json):
self._trello = trello
self.id = json['id']
self.name = json['name']
self.nextActionList = []
def getLists(self):
json = self._trello.get(
'https://api.trello.com/1/boards/' + self.id + '/lists?cards=none)'
)
return [List(j) for j in json]
def getListByName(self, name):
for l in self.getLists():
if l.name == name:
return l
return None
<commit_msg>Tidy matching lists by name<commit_after>
|
from nextactions.list import List
class Board:
def __init__(self, trello, json):
self._trello = trello
self.id = json['id']
self.name = json['name']
self.nextActionList = []
def getLists(self):
json = self._trello.get(
'https://api.trello.com/1/boards/' + self.id + '/lists?cards=none)'
)
return [List(j) for j in json]
def getListByName(self, name):
matches = [l for l in self.getLists() if l.name == name]
return matches[0] if len(matches) else None
|
from nextactions.list import List
class Board:
def __init__(self, trello, json):
self._trello = trello
self.id = json['id']
self.name = json['name']
self.nextActionList = []
def getLists(self):
json = self._trello.get(
'https://api.trello.com/1/boards/' + self.id + '/lists?cards=none)'
)
return [List(j) for j in json]
def getListByName(self, name):
for l in self.getLists():
if l.name == name:
return l
return None
Tidy matching lists by namefrom nextactions.list import List
class Board:
def __init__(self, trello, json):
self._trello = trello
self.id = json['id']
self.name = json['name']
self.nextActionList = []
def getLists(self):
json = self._trello.get(
'https://api.trello.com/1/boards/' + self.id + '/lists?cards=none)'
)
return [List(j) for j in json]
def getListByName(self, name):
matches = [l for l in self.getLists() if l.name == name]
return matches[0] if len(matches) else None
|
<commit_before>from nextactions.list import List
class Board:
def __init__(self, trello, json):
self._trello = trello
self.id = json['id']
self.name = json['name']
self.nextActionList = []
def getLists(self):
json = self._trello.get(
'https://api.trello.com/1/boards/' + self.id + '/lists?cards=none)'
)
return [List(j) for j in json]
def getListByName(self, name):
for l in self.getLists():
if l.name == name:
return l
return None
<commit_msg>Tidy matching lists by name<commit_after>from nextactions.list import List
class Board:
def __init__(self, trello, json):
self._trello = trello
self.id = json['id']
self.name = json['name']
self.nextActionList = []
def getLists(self):
json = self._trello.get(
'https://api.trello.com/1/boards/' + self.id + '/lists?cards=none)'
)
return [List(j) for j in json]
def getListByName(self, name):
matches = [l for l in self.getLists() if l.name == name]
return matches[0] if len(matches) else None
|
b7f153a383dad71f272d8ef211deeb1c1a149f51
|
kerze.py
|
kerze.py
|
from turtle import *
GROESSE = 0.5
FARBE = "red"
FAERBEN = True
fillcolor(FARBE)
def zeichneKerze(brennt):
pd()
begin_fill()
forward(GROESSE*100)
left(90)
forward(GROESSE*400)
left(90)
forward(GROESSE*100)
right(90)
forward(GROESSE*30)
back(GROESSE*30)
left(90)
forward(GROESSE*100)
left(90)
forward(GROESSE*400)
left(90)
forward(GROESSE*100)
end_fill()
pu()
if brennt:
zeichneFlamme()
def zeichneFlamme():
left(90)
fd(GROESSE*430)
pd()
color("yellow")
dot(GROESSE*60)
color("black")
back(GROESSE*30)
pu()
home()
##zeichneKerze(brennt=False) # testweise erstmal nur nicht brennende Kerze
zeichneKerze(True)
hide()
|
from turtle import *
GROESSE = 0.5
FARBE = "red"
FAERBEN = True
SHAPE = "turtle"
fillcolor(FARBE)
shape(SHAPE)
def zeichneKerze(brennt):
pd()
begin_fill()
forward(GROESSE*100)
left(90)
forward(GROESSE*400)
left(90)
forward(GROESSE*100)
right(90)
forward(GROESSE*30)
back(GROESSE*30)
left(90)
forward(GROESSE*100)
left(90)
forward(GROESSE*400)
left(90)
forward(GROESSE*100)
end_fill()
pu()
if brennt:
zeichneFlamme()
def zeichneFlamme():
left(90)
fd(GROESSE*430)
pd()
color("yellow")
dot(GROESSE*60)
color("black")
back(GROESSE*30)
pu()
home()
##zeichneKerze(brennt=False) # testweise erstmal nur nicht brennende Kerze
zeichneKerze(True)
hideturtle()
|
Resolve NameError, add changeable turtle shape constant.
|
Resolve NameError, add changeable turtle shape constant.
|
Python
|
mit
|
luforst/adventskranz
|
from turtle import *
GROESSE = 0.5
FARBE = "red"
FAERBEN = True
fillcolor(FARBE)
def zeichneKerze(brennt):
pd()
begin_fill()
forward(GROESSE*100)
left(90)
forward(GROESSE*400)
left(90)
forward(GROESSE*100)
right(90)
forward(GROESSE*30)
back(GROESSE*30)
left(90)
forward(GROESSE*100)
left(90)
forward(GROESSE*400)
left(90)
forward(GROESSE*100)
end_fill()
pu()
if brennt:
zeichneFlamme()
def zeichneFlamme():
left(90)
fd(GROESSE*430)
pd()
color("yellow")
dot(GROESSE*60)
color("black")
back(GROESSE*30)
pu()
home()
##zeichneKerze(brennt=False) # testweise erstmal nur nicht brennende Kerze
zeichneKerze(True)
hide()
Resolve NameError, add changeable turtle shape constant.
|
from turtle import *
GROESSE = 0.5
FARBE = "red"
FAERBEN = True
SHAPE = "turtle"
fillcolor(FARBE)
shape(SHAPE)
def zeichneKerze(brennt):
pd()
begin_fill()
forward(GROESSE*100)
left(90)
forward(GROESSE*400)
left(90)
forward(GROESSE*100)
right(90)
forward(GROESSE*30)
back(GROESSE*30)
left(90)
forward(GROESSE*100)
left(90)
forward(GROESSE*400)
left(90)
forward(GROESSE*100)
end_fill()
pu()
if brennt:
zeichneFlamme()
def zeichneFlamme():
left(90)
fd(GROESSE*430)
pd()
color("yellow")
dot(GROESSE*60)
color("black")
back(GROESSE*30)
pu()
home()
##zeichneKerze(brennt=False) # testweise erstmal nur nicht brennende Kerze
zeichneKerze(True)
hideturtle()
|
<commit_before>from turtle import *
GROESSE = 0.5
FARBE = "red"
FAERBEN = True
fillcolor(FARBE)
def zeichneKerze(brennt):
pd()
begin_fill()
forward(GROESSE*100)
left(90)
forward(GROESSE*400)
left(90)
forward(GROESSE*100)
right(90)
forward(GROESSE*30)
back(GROESSE*30)
left(90)
forward(GROESSE*100)
left(90)
forward(GROESSE*400)
left(90)
forward(GROESSE*100)
end_fill()
pu()
if brennt:
zeichneFlamme()
def zeichneFlamme():
left(90)
fd(GROESSE*430)
pd()
color("yellow")
dot(GROESSE*60)
color("black")
back(GROESSE*30)
pu()
home()
##zeichneKerze(brennt=False) # testweise erstmal nur nicht brennende Kerze
zeichneKerze(True)
hide()
<commit_msg>Resolve NameError, add changeable turtle shape constant.<commit_after>
|
from turtle import *
GROESSE = 0.5
FARBE = "red"
FAERBEN = True
SHAPE = "turtle"
fillcolor(FARBE)
shape(SHAPE)
def zeichneKerze(brennt):
pd()
begin_fill()
forward(GROESSE*100)
left(90)
forward(GROESSE*400)
left(90)
forward(GROESSE*100)
right(90)
forward(GROESSE*30)
back(GROESSE*30)
left(90)
forward(GROESSE*100)
left(90)
forward(GROESSE*400)
left(90)
forward(GROESSE*100)
end_fill()
pu()
if brennt:
zeichneFlamme()
def zeichneFlamme():
left(90)
fd(GROESSE*430)
pd()
color("yellow")
dot(GROESSE*60)
color("black")
back(GROESSE*30)
pu()
home()
##zeichneKerze(brennt=False) # testweise erstmal nur nicht brennende Kerze
zeichneKerze(True)
hideturtle()
|
from turtle import *
GROESSE = 0.5
FARBE = "red"
FAERBEN = True
fillcolor(FARBE)
def zeichneKerze(brennt):
pd()
begin_fill()
forward(GROESSE*100)
left(90)
forward(GROESSE*400)
left(90)
forward(GROESSE*100)
right(90)
forward(GROESSE*30)
back(GROESSE*30)
left(90)
forward(GROESSE*100)
left(90)
forward(GROESSE*400)
left(90)
forward(GROESSE*100)
end_fill()
pu()
if brennt:
zeichneFlamme()
def zeichneFlamme():
left(90)
fd(GROESSE*430)
pd()
color("yellow")
dot(GROESSE*60)
color("black")
back(GROESSE*30)
pu()
home()
##zeichneKerze(brennt=False) # testweise erstmal nur nicht brennende Kerze
zeichneKerze(True)
hide()
Resolve NameError, add changeable turtle shape constant.from turtle import *
GROESSE = 0.5
FARBE = "red"
FAERBEN = True
SHAPE = "turtle"
fillcolor(FARBE)
shape(SHAPE)
def zeichneKerze(brennt):
pd()
begin_fill()
forward(GROESSE*100)
left(90)
forward(GROESSE*400)
left(90)
forward(GROESSE*100)
right(90)
forward(GROESSE*30)
back(GROESSE*30)
left(90)
forward(GROESSE*100)
left(90)
forward(GROESSE*400)
left(90)
forward(GROESSE*100)
end_fill()
pu()
if brennt:
zeichneFlamme()
def zeichneFlamme():
left(90)
fd(GROESSE*430)
pd()
color("yellow")
dot(GROESSE*60)
color("black")
back(GROESSE*30)
pu()
home()
##zeichneKerze(brennt=False) # testweise erstmal nur nicht brennende Kerze
zeichneKerze(True)
hideturtle()
|
<commit_before>from turtle import *
GROESSE = 0.5
FARBE = "red"
FAERBEN = True
fillcolor(FARBE)
def zeichneKerze(brennt):
pd()
begin_fill()
forward(GROESSE*100)
left(90)
forward(GROESSE*400)
left(90)
forward(GROESSE*100)
right(90)
forward(GROESSE*30)
back(GROESSE*30)
left(90)
forward(GROESSE*100)
left(90)
forward(GROESSE*400)
left(90)
forward(GROESSE*100)
end_fill()
pu()
if brennt:
zeichneFlamme()
def zeichneFlamme():
left(90)
fd(GROESSE*430)
pd()
color("yellow")
dot(GROESSE*60)
color("black")
back(GROESSE*30)
pu()
home()
##zeichneKerze(brennt=False) # testweise erstmal nur nicht brennende Kerze
zeichneKerze(True)
hide()
<commit_msg>Resolve NameError, add changeable turtle shape constant.<commit_after>from turtle import *
GROESSE = 0.5
FARBE = "red"
FAERBEN = True
SHAPE = "turtle"
fillcolor(FARBE)
shape(SHAPE)
def zeichneKerze(brennt):
pd()
begin_fill()
forward(GROESSE*100)
left(90)
forward(GROESSE*400)
left(90)
forward(GROESSE*100)
right(90)
forward(GROESSE*30)
back(GROESSE*30)
left(90)
forward(GROESSE*100)
left(90)
forward(GROESSE*400)
left(90)
forward(GROESSE*100)
end_fill()
pu()
if brennt:
zeichneFlamme()
def zeichneFlamme():
left(90)
fd(GROESSE*430)
pd()
color("yellow")
dot(GROESSE*60)
color("black")
back(GROESSE*30)
pu()
home()
##zeichneKerze(brennt=False) # testweise erstmal nur nicht brennende Kerze
zeichneKerze(True)
hideturtle()
|
f6fed4cd1fe4f8363d9060c7a80aa2b077f0e57a
|
smst/__init__.py
|
smst/__init__.py
|
# _ _
# ___ _ __ ___ ___ | |_ ___ ___ | |___
# / __| '_ ` _ \/ __| | __/ _ \ / _ \| / __|
# \__ \ | | | | \__ \ | || (_) | (_) | \__ \
# |___/_| |_| |_|___/ \__\___/ \___/|_|___/
#
# ~ Spectral Modeling Synthesis Tools ~
#
__version__ = '0.2.0'
|
# _ _
# ___ _ __ ___ ___ | |_ ___ ___ | |___
# / __| '_ ` _ \/ __| | __/ _ \ / _ \| / __|
# \__ \ | | | | \__ \ | || (_) | (_) | \__ \
# |___/_| |_| |_|___/ \__\___/ \___/|_|___/
#
# ~ Spectral Modeling Synthesis Tools ~
#
__version__ = '0.3.0'
|
Update the version to 0.3.0.
|
Update the version to 0.3.0.
|
Python
|
agpl-3.0
|
bzamecnik/sms-tools,bzamecnik/sms-tools,bzamecnik/sms-tools
|
# _ _
# ___ _ __ ___ ___ | |_ ___ ___ | |___
# / __| '_ ` _ \/ __| | __/ _ \ / _ \| / __|
# \__ \ | | | | \__ \ | || (_) | (_) | \__ \
# |___/_| |_| |_|___/ \__\___/ \___/|_|___/
#
# ~ Spectral Modeling Synthesis Tools ~
#
__version__ = '0.2.0'
Update the version to 0.3.0.
|
# _ _
# ___ _ __ ___ ___ | |_ ___ ___ | |___
# / __| '_ ` _ \/ __| | __/ _ \ / _ \| / __|
# \__ \ | | | | \__ \ | || (_) | (_) | \__ \
# |___/_| |_| |_|___/ \__\___/ \___/|_|___/
#
# ~ Spectral Modeling Synthesis Tools ~
#
__version__ = '0.3.0'
|
<commit_before># _ _
# ___ _ __ ___ ___ | |_ ___ ___ | |___
# / __| '_ ` _ \/ __| | __/ _ \ / _ \| / __|
# \__ \ | | | | \__ \ | || (_) | (_) | \__ \
# |___/_| |_| |_|___/ \__\___/ \___/|_|___/
#
# ~ Spectral Modeling Synthesis Tools ~
#
__version__ = '0.2.0'
<commit_msg>Update the version to 0.3.0.<commit_after>
|
# _ _
# ___ _ __ ___ ___ | |_ ___ ___ | |___
# / __| '_ ` _ \/ __| | __/ _ \ / _ \| / __|
# \__ \ | | | | \__ \ | || (_) | (_) | \__ \
# |___/_| |_| |_|___/ \__\___/ \___/|_|___/
#
# ~ Spectral Modeling Synthesis Tools ~
#
__version__ = '0.3.0'
|
# _ _
# ___ _ __ ___ ___ | |_ ___ ___ | |___
# / __| '_ ` _ \/ __| | __/ _ \ / _ \| / __|
# \__ \ | | | | \__ \ | || (_) | (_) | \__ \
# |___/_| |_| |_|___/ \__\___/ \___/|_|___/
#
# ~ Spectral Modeling Synthesis Tools ~
#
__version__ = '0.2.0'
Update the version to 0.3.0.# _ _
# ___ _ __ ___ ___ | |_ ___ ___ | |___
# / __| '_ ` _ \/ __| | __/ _ \ / _ \| / __|
# \__ \ | | | | \__ \ | || (_) | (_) | \__ \
# |___/_| |_| |_|___/ \__\___/ \___/|_|___/
#
# ~ Spectral Modeling Synthesis Tools ~
#
__version__ = '0.3.0'
|
<commit_before># _ _
# ___ _ __ ___ ___ | |_ ___ ___ | |___
# / __| '_ ` _ \/ __| | __/ _ \ / _ \| / __|
# \__ \ | | | | \__ \ | || (_) | (_) | \__ \
# |___/_| |_| |_|___/ \__\___/ \___/|_|___/
#
# ~ Spectral Modeling Synthesis Tools ~
#
__version__ = '0.2.0'
<commit_msg>Update the version to 0.3.0.<commit_after># _ _
# ___ _ __ ___ ___ | |_ ___ ___ | |___
# / __| '_ ` _ \/ __| | __/ _ \ / _ \| / __|
# \__ \ | | | | \__ \ | || (_) | (_) | \__ \
# |___/_| |_| |_|___/ \__\___/ \___/|_|___/
#
# ~ Spectral Modeling Synthesis Tools ~
#
__version__ = '0.3.0'
|
99a41171c6030cfd88b66979d2f62bb18b51041a
|
sqlobject/tests/test_exceptions.py
|
sqlobject/tests/test_exceptions.py
|
from sqlobject import *
from sqlobject.dberrors import DuplicateEntryError
from sqlobject.tests.dbtest import *
########################################
## Table aliases and self-joins
########################################
class TestException(SQLObject):
name = StringCol(unique=True, length=100)
def test_exceptions():
if not supports("exceptions"):
return
setupClass(TestException)
TestException(name="test")
raises(DuplicateEntryError, TestException, name="test")
|
from sqlobject import *
from sqlobject.dberrors import *
from sqlobject.tests.dbtest import *
########################################
## Table aliases and self-joins
########################################
class TestException(SQLObject):
name = StringCol(unique=True, length=100)
class TestExceptionWithNonexistingTable(SQLObject):
pass
def test_exceptions():
if not supports("exceptions"):
return
setupClass(TestException)
TestException(name="test")
raises(DuplicateEntryError, TestException, name="test")
connection = getConnection()
if connection.module.__name__ != 'psycopg2':
return
TestExceptionWithNonexistingTable.setConnection(connection)
try:
list(TestExceptionWithNonexistingTable.select())
except ProgrammingError, e:
assert e.args[0].code == '42P01'
else:
assert False, "DID NOT RAISE"
|
Add a test for pgcode
|
Add a test for pgcode
git-svn-id: ace7fa9e7412674399eb986d17112e1377537c44@4665 95a46c32-92d2-0310-94a5-8d71aeb3d4b3
|
Python
|
lgpl-2.1
|
sqlobject/sqlobject,drnlm/sqlobject,drnlm/sqlobject,sqlobject/sqlobject
|
from sqlobject import *
from sqlobject.dberrors import DuplicateEntryError
from sqlobject.tests.dbtest import *
########################################
## Table aliases and self-joins
########################################
class TestException(SQLObject):
name = StringCol(unique=True, length=100)
def test_exceptions():
if not supports("exceptions"):
return
setupClass(TestException)
TestException(name="test")
raises(DuplicateEntryError, TestException, name="test")
Add a test for pgcode
git-svn-id: ace7fa9e7412674399eb986d17112e1377537c44@4665 95a46c32-92d2-0310-94a5-8d71aeb3d4b3
|
from sqlobject import *
from sqlobject.dberrors import *
from sqlobject.tests.dbtest import *
########################################
## Table aliases and self-joins
########################################
class TestException(SQLObject):
name = StringCol(unique=True, length=100)
class TestExceptionWithNonexistingTable(SQLObject):
pass
def test_exceptions():
if not supports("exceptions"):
return
setupClass(TestException)
TestException(name="test")
raises(DuplicateEntryError, TestException, name="test")
connection = getConnection()
if connection.module.__name__ != 'psycopg2':
return
TestExceptionWithNonexistingTable.setConnection(connection)
try:
list(TestExceptionWithNonexistingTable.select())
except ProgrammingError, e:
assert e.args[0].code == '42P01'
else:
assert False, "DID NOT RAISE"
|
<commit_before>from sqlobject import *
from sqlobject.dberrors import DuplicateEntryError
from sqlobject.tests.dbtest import *
########################################
## Table aliases and self-joins
########################################
class TestException(SQLObject):
name = StringCol(unique=True, length=100)
def test_exceptions():
if not supports("exceptions"):
return
setupClass(TestException)
TestException(name="test")
raises(DuplicateEntryError, TestException, name="test")
<commit_msg>Add a test for pgcode
git-svn-id: ace7fa9e7412674399eb986d17112e1377537c44@4665 95a46c32-92d2-0310-94a5-8d71aeb3d4b3<commit_after>
|
from sqlobject import *
from sqlobject.dberrors import *
from sqlobject.tests.dbtest import *
########################################
## Table aliases and self-joins
########################################
class TestException(SQLObject):
name = StringCol(unique=True, length=100)
class TestExceptionWithNonexistingTable(SQLObject):
pass
def test_exceptions():
if not supports("exceptions"):
return
setupClass(TestException)
TestException(name="test")
raises(DuplicateEntryError, TestException, name="test")
connection = getConnection()
if connection.module.__name__ != 'psycopg2':
return
TestExceptionWithNonexistingTable.setConnection(connection)
try:
list(TestExceptionWithNonexistingTable.select())
except ProgrammingError, e:
assert e.args[0].code == '42P01'
else:
assert False, "DID NOT RAISE"
|
from sqlobject import *
from sqlobject.dberrors import DuplicateEntryError
from sqlobject.tests.dbtest import *
########################################
## Table aliases and self-joins
########################################
class TestException(SQLObject):
name = StringCol(unique=True, length=100)
def test_exceptions():
if not supports("exceptions"):
return
setupClass(TestException)
TestException(name="test")
raises(DuplicateEntryError, TestException, name="test")
Add a test for pgcode
git-svn-id: ace7fa9e7412674399eb986d17112e1377537c44@4665 95a46c32-92d2-0310-94a5-8d71aeb3d4b3from sqlobject import *
from sqlobject.dberrors import *
from sqlobject.tests.dbtest import *
########################################
## Table aliases and self-joins
########################################
class TestException(SQLObject):
name = StringCol(unique=True, length=100)
class TestExceptionWithNonexistingTable(SQLObject):
pass
def test_exceptions():
if not supports("exceptions"):
return
setupClass(TestException)
TestException(name="test")
raises(DuplicateEntryError, TestException, name="test")
connection = getConnection()
if connection.module.__name__ != 'psycopg2':
return
TestExceptionWithNonexistingTable.setConnection(connection)
try:
list(TestExceptionWithNonexistingTable.select())
except ProgrammingError, e:
assert e.args[0].code == '42P01'
else:
assert False, "DID NOT RAISE"
|
<commit_before>from sqlobject import *
from sqlobject.dberrors import DuplicateEntryError
from sqlobject.tests.dbtest import *
########################################
## Table aliases and self-joins
########################################
class TestException(SQLObject):
name = StringCol(unique=True, length=100)
def test_exceptions():
if not supports("exceptions"):
return
setupClass(TestException)
TestException(name="test")
raises(DuplicateEntryError, TestException, name="test")
<commit_msg>Add a test for pgcode
git-svn-id: ace7fa9e7412674399eb986d17112e1377537c44@4665 95a46c32-92d2-0310-94a5-8d71aeb3d4b3<commit_after>from sqlobject import *
from sqlobject.dberrors import *
from sqlobject.tests.dbtest import *
########################################
## Table aliases and self-joins
########################################
class TestException(SQLObject):
name = StringCol(unique=True, length=100)
class TestExceptionWithNonexistingTable(SQLObject):
pass
def test_exceptions():
if not supports("exceptions"):
return
setupClass(TestException)
TestException(name="test")
raises(DuplicateEntryError, TestException, name="test")
connection = getConnection()
if connection.module.__name__ != 'psycopg2':
return
TestExceptionWithNonexistingTable.setConnection(connection)
try:
list(TestExceptionWithNonexistingTable.select())
except ProgrammingError, e:
assert e.args[0].code == '42P01'
else:
assert False, "DID NOT RAISE"
|
6a3fbb7280c1078b574736eae3c6a3e4e42d3f46
|
seaborn/__init__.py
|
seaborn/__init__.py
|
# Capture the original matplotlib rcParams
import matplotlib as mpl
_orig_rc_params = mpl.rcParams.copy()
# Import seaborn objects
from .rcmod import *
from .utils import *
from .palettes import *
from .relational import *
from .regression import *
from .categorical import *
from .distributions import *
from .timeseries import *
from .matrix import *
from .miscplot import *
from .axisgrid import *
from .widgets import *
from .colors import xkcd_rgb, crayons
from . import cm
__version__ = "0.9.1.dev0"
|
# Capture the original matplotlib rcParams
import matplotlib as mpl
_orig_rc_params = mpl.rcParams.copy()
# Import seaborn objects
from .rcmod import *
from .utils import *
from .palettes import *
from .relational import *
from .regression import *
from .categorical import *
from .distributions import *
from .matrix import *
from .miscplot import *
from .axisgrid import *
from .widgets import *
from .colors import xkcd_rgb, crayons
from . import cm
__version__ = "0.9.1.dev0"
|
Remove top-level import of timeseries module
|
Remove top-level import of timeseries module
|
Python
|
bsd-3-clause
|
arokem/seaborn,mwaskom/seaborn,mwaskom/seaborn,arokem/seaborn,anntzer/seaborn,anntzer/seaborn
|
# Capture the original matplotlib rcParams
import matplotlib as mpl
_orig_rc_params = mpl.rcParams.copy()
# Import seaborn objects
from .rcmod import *
from .utils import *
from .palettes import *
from .relational import *
from .regression import *
from .categorical import *
from .distributions import *
from .timeseries import *
from .matrix import *
from .miscplot import *
from .axisgrid import *
from .widgets import *
from .colors import xkcd_rgb, crayons
from . import cm
__version__ = "0.9.1.dev0"
Remove top-level import of timeseries module
|
# Capture the original matplotlib rcParams
import matplotlib as mpl
_orig_rc_params = mpl.rcParams.copy()
# Import seaborn objects
from .rcmod import *
from .utils import *
from .palettes import *
from .relational import *
from .regression import *
from .categorical import *
from .distributions import *
from .matrix import *
from .miscplot import *
from .axisgrid import *
from .widgets import *
from .colors import xkcd_rgb, crayons
from . import cm
__version__ = "0.9.1.dev0"
|
<commit_before># Capture the original matplotlib rcParams
import matplotlib as mpl
_orig_rc_params = mpl.rcParams.copy()
# Import seaborn objects
from .rcmod import *
from .utils import *
from .palettes import *
from .relational import *
from .regression import *
from .categorical import *
from .distributions import *
from .timeseries import *
from .matrix import *
from .miscplot import *
from .axisgrid import *
from .widgets import *
from .colors import xkcd_rgb, crayons
from . import cm
__version__ = "0.9.1.dev0"
<commit_msg>Remove top-level import of timeseries module<commit_after>
|
# Capture the original matplotlib rcParams
import matplotlib as mpl
_orig_rc_params = mpl.rcParams.copy()
# Import seaborn objects
from .rcmod import *
from .utils import *
from .palettes import *
from .relational import *
from .regression import *
from .categorical import *
from .distributions import *
from .matrix import *
from .miscplot import *
from .axisgrid import *
from .widgets import *
from .colors import xkcd_rgb, crayons
from . import cm
__version__ = "0.9.1.dev0"
|
# Capture the original matplotlib rcParams
import matplotlib as mpl
_orig_rc_params = mpl.rcParams.copy()
# Import seaborn objects
from .rcmod import *
from .utils import *
from .palettes import *
from .relational import *
from .regression import *
from .categorical import *
from .distributions import *
from .timeseries import *
from .matrix import *
from .miscplot import *
from .axisgrid import *
from .widgets import *
from .colors import xkcd_rgb, crayons
from . import cm
__version__ = "0.9.1.dev0"
Remove top-level import of timeseries module# Capture the original matplotlib rcParams
import matplotlib as mpl
_orig_rc_params = mpl.rcParams.copy()
# Import seaborn objects
from .rcmod import *
from .utils import *
from .palettes import *
from .relational import *
from .regression import *
from .categorical import *
from .distributions import *
from .matrix import *
from .miscplot import *
from .axisgrid import *
from .widgets import *
from .colors import xkcd_rgb, crayons
from . import cm
__version__ = "0.9.1.dev0"
|
<commit_before># Capture the original matplotlib rcParams
import matplotlib as mpl
_orig_rc_params = mpl.rcParams.copy()
# Import seaborn objects
from .rcmod import *
from .utils import *
from .palettes import *
from .relational import *
from .regression import *
from .categorical import *
from .distributions import *
from .timeseries import *
from .matrix import *
from .miscplot import *
from .axisgrid import *
from .widgets import *
from .colors import xkcd_rgb, crayons
from . import cm
__version__ = "0.9.1.dev0"
<commit_msg>Remove top-level import of timeseries module<commit_after># Capture the original matplotlib rcParams
import matplotlib as mpl
_orig_rc_params = mpl.rcParams.copy()
# Import seaborn objects
from .rcmod import *
from .utils import *
from .palettes import *
from .relational import *
from .regression import *
from .categorical import *
from .distributions import *
from .matrix import *
from .miscplot import *
from .axisgrid import *
from .widgets import *
from .colors import xkcd_rgb, crayons
from . import cm
__version__ = "0.9.1.dev0"
|
95f48c85aee59906fc498c8c44c34551fca32a43
|
tests/blueprints/metrics/test_metrics.py
|
tests/blueprints/metrics/test_metrics.py
|
"""
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
def test_metrics(make_admin_app):
client = _get_test_client(make_admin_app, True)
response = client.get('/metrics')
assert response.status_code == 200
assert response.content_type == 'text/plain; version=0.0.4; charset=utf-8'
assert response.mimetype == 'text/plain'
assert response.get_data(as_text=True) == (
'users_enabled_count 0\n'
'users_disabled_count 0\n'
'users_suspended_count 0\n'
'users_deleted_count 0\n'
'users_total_count 0\n'
)
def test_disabled_metrics(make_admin_app):
client = _get_test_client(make_admin_app, False)
response = client.get('/metrics')
assert response.status_code == 404
def _get_test_client(make_admin_app, metrics_enabled):
config_overrides = {'METRICS_ENABLED': metrics_enabled}
app = make_admin_app(**config_overrides)
return app.test_client()
|
"""
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import pytest
from ...conftest import database_recreated
# To be overridden by test parametrization
@pytest.fixture
def config_overrides():
return {}
@pytest.fixture
def client(config_overrides, make_admin_app, db):
app = make_admin_app(**config_overrides)
with app.app_context():
with database_recreated(db):
yield app.test_client()
@pytest.mark.parametrize('config_overrides', [{'METRICS_ENABLED': True}])
def test_metrics(client):
response = client.get('/metrics')
assert response.status_code == 200
assert response.content_type == 'text/plain; version=0.0.4; charset=utf-8'
assert response.mimetype == 'text/plain'
assert response.get_data(as_text=True) == (
'users_enabled_count 0\n'
'users_disabled_count 0\n'
'users_suspended_count 0\n'
'users_deleted_count 0\n'
'users_total_count 0\n'
)
@pytest.mark.parametrize('config_overrides', [{'METRICS_ENABLED': False}])
def test_disabled_metrics(client):
response = client.get('/metrics')
assert response.status_code == 404
|
Adjust metrics test to set up/tear down database
|
Adjust metrics test to set up/tear down database
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps,m-ober/byceps,m-ober/byceps,homeworkprod/byceps
|
"""
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
def test_metrics(make_admin_app):
client = _get_test_client(make_admin_app, True)
response = client.get('/metrics')
assert response.status_code == 200
assert response.content_type == 'text/plain; version=0.0.4; charset=utf-8'
assert response.mimetype == 'text/plain'
assert response.get_data(as_text=True) == (
'users_enabled_count 0\n'
'users_disabled_count 0\n'
'users_suspended_count 0\n'
'users_deleted_count 0\n'
'users_total_count 0\n'
)
def test_disabled_metrics(make_admin_app):
client = _get_test_client(make_admin_app, False)
response = client.get('/metrics')
assert response.status_code == 404
def _get_test_client(make_admin_app, metrics_enabled):
config_overrides = {'METRICS_ENABLED': metrics_enabled}
app = make_admin_app(**config_overrides)
return app.test_client()
Adjust metrics test to set up/tear down database
|
"""
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import pytest
from ...conftest import database_recreated
# To be overridden by test parametrization
@pytest.fixture
def config_overrides():
return {}
@pytest.fixture
def client(config_overrides, make_admin_app, db):
app = make_admin_app(**config_overrides)
with app.app_context():
with database_recreated(db):
yield app.test_client()
@pytest.mark.parametrize('config_overrides', [{'METRICS_ENABLED': True}])
def test_metrics(client):
response = client.get('/metrics')
assert response.status_code == 200
assert response.content_type == 'text/plain; version=0.0.4; charset=utf-8'
assert response.mimetype == 'text/plain'
assert response.get_data(as_text=True) == (
'users_enabled_count 0\n'
'users_disabled_count 0\n'
'users_suspended_count 0\n'
'users_deleted_count 0\n'
'users_total_count 0\n'
)
@pytest.mark.parametrize('config_overrides', [{'METRICS_ENABLED': False}])
def test_disabled_metrics(client):
response = client.get('/metrics')
assert response.status_code == 404
|
<commit_before>"""
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
def test_metrics(make_admin_app):
client = _get_test_client(make_admin_app, True)
response = client.get('/metrics')
assert response.status_code == 200
assert response.content_type == 'text/plain; version=0.0.4; charset=utf-8'
assert response.mimetype == 'text/plain'
assert response.get_data(as_text=True) == (
'users_enabled_count 0\n'
'users_disabled_count 0\n'
'users_suspended_count 0\n'
'users_deleted_count 0\n'
'users_total_count 0\n'
)
def test_disabled_metrics(make_admin_app):
client = _get_test_client(make_admin_app, False)
response = client.get('/metrics')
assert response.status_code == 404
def _get_test_client(make_admin_app, metrics_enabled):
config_overrides = {'METRICS_ENABLED': metrics_enabled}
app = make_admin_app(**config_overrides)
return app.test_client()
<commit_msg>Adjust metrics test to set up/tear down database<commit_after>
|
"""
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import pytest
from ...conftest import database_recreated
# To be overridden by test parametrization
@pytest.fixture
def config_overrides():
return {}
@pytest.fixture
def client(config_overrides, make_admin_app, db):
app = make_admin_app(**config_overrides)
with app.app_context():
with database_recreated(db):
yield app.test_client()
@pytest.mark.parametrize('config_overrides', [{'METRICS_ENABLED': True}])
def test_metrics(client):
response = client.get('/metrics')
assert response.status_code == 200
assert response.content_type == 'text/plain; version=0.0.4; charset=utf-8'
assert response.mimetype == 'text/plain'
assert response.get_data(as_text=True) == (
'users_enabled_count 0\n'
'users_disabled_count 0\n'
'users_suspended_count 0\n'
'users_deleted_count 0\n'
'users_total_count 0\n'
)
@pytest.mark.parametrize('config_overrides', [{'METRICS_ENABLED': False}])
def test_disabled_metrics(client):
response = client.get('/metrics')
assert response.status_code == 404
|
"""
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
def test_metrics(make_admin_app):
client = _get_test_client(make_admin_app, True)
response = client.get('/metrics')
assert response.status_code == 200
assert response.content_type == 'text/plain; version=0.0.4; charset=utf-8'
assert response.mimetype == 'text/plain'
assert response.get_data(as_text=True) == (
'users_enabled_count 0\n'
'users_disabled_count 0\n'
'users_suspended_count 0\n'
'users_deleted_count 0\n'
'users_total_count 0\n'
)
def test_disabled_metrics(make_admin_app):
client = _get_test_client(make_admin_app, False)
response = client.get('/metrics')
assert response.status_code == 404
def _get_test_client(make_admin_app, metrics_enabled):
config_overrides = {'METRICS_ENABLED': metrics_enabled}
app = make_admin_app(**config_overrides)
return app.test_client()
Adjust metrics test to set up/tear down database"""
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import pytest
from ...conftest import database_recreated
# To be overridden by test parametrization
@pytest.fixture
def config_overrides():
return {}
@pytest.fixture
def client(config_overrides, make_admin_app, db):
app = make_admin_app(**config_overrides)
with app.app_context():
with database_recreated(db):
yield app.test_client()
@pytest.mark.parametrize('config_overrides', [{'METRICS_ENABLED': True}])
def test_metrics(client):
response = client.get('/metrics')
assert response.status_code == 200
assert response.content_type == 'text/plain; version=0.0.4; charset=utf-8'
assert response.mimetype == 'text/plain'
assert response.get_data(as_text=True) == (
'users_enabled_count 0\n'
'users_disabled_count 0\n'
'users_suspended_count 0\n'
'users_deleted_count 0\n'
'users_total_count 0\n'
)
@pytest.mark.parametrize('config_overrides', [{'METRICS_ENABLED': False}])
def test_disabled_metrics(client):
response = client.get('/metrics')
assert response.status_code == 404
|
<commit_before>"""
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
def test_metrics(make_admin_app):
client = _get_test_client(make_admin_app, True)
response = client.get('/metrics')
assert response.status_code == 200
assert response.content_type == 'text/plain; version=0.0.4; charset=utf-8'
assert response.mimetype == 'text/plain'
assert response.get_data(as_text=True) == (
'users_enabled_count 0\n'
'users_disabled_count 0\n'
'users_suspended_count 0\n'
'users_deleted_count 0\n'
'users_total_count 0\n'
)
def test_disabled_metrics(make_admin_app):
client = _get_test_client(make_admin_app, False)
response = client.get('/metrics')
assert response.status_code == 404
def _get_test_client(make_admin_app, metrics_enabled):
config_overrides = {'METRICS_ENABLED': metrics_enabled}
app = make_admin_app(**config_overrides)
return app.test_client()
<commit_msg>Adjust metrics test to set up/tear down database<commit_after>"""
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import pytest
from ...conftest import database_recreated
# To be overridden by test parametrization
@pytest.fixture
def config_overrides():
return {}
@pytest.fixture
def client(config_overrides, make_admin_app, db):
app = make_admin_app(**config_overrides)
with app.app_context():
with database_recreated(db):
yield app.test_client()
@pytest.mark.parametrize('config_overrides', [{'METRICS_ENABLED': True}])
def test_metrics(client):
response = client.get('/metrics')
assert response.status_code == 200
assert response.content_type == 'text/plain; version=0.0.4; charset=utf-8'
assert response.mimetype == 'text/plain'
assert response.get_data(as_text=True) == (
'users_enabled_count 0\n'
'users_disabled_count 0\n'
'users_suspended_count 0\n'
'users_deleted_count 0\n'
'users_total_count 0\n'
)
@pytest.mark.parametrize('config_overrides', [{'METRICS_ENABLED': False}])
def test_disabled_metrics(client):
response = client.get('/metrics')
assert response.status_code == 404
|
60704cb85f4e512e0acd9b144d6599c3b3763820
|
testing/test_detail_page.py
|
testing/test_detail_page.py
|
import pytest
try:
from unittest import mock
except ImportError:
import mock
from k2catalogue import detail_object
@pytest.mark.parametrize('input,expected', [
(1, '1.html'),
(2, '2.html'),
(201, '201.html'),
])
def test_detail_url(input, expected):
epic_object = mock.Mock(epic_id=input)
url_root = 'http://deneb.astro.warwick.ac.uk/phrlbj/k2varcat/objects/{}'
assert detail_object.DetailObject(epic_object).url == url_root.format(
expected)
def test_open_detail_url():
epic_object = mock.Mock(epic_id=1)
with mock.patch('k2catalogue.detail_object.webbrowser.open') as mock_open:
detail_object.DetailObject(epic_object).open()
mock_open.assert_called_once_with(
'http://deneb.astro.warwick.ac.uk/phrlbj/k2varcat/objects/1.html')
def test_epic_id():
epic_object = mock.Mock(epic_id=1)
assert detail_object.DetailObject(epic_object).epic_id == 1
|
import pytest
try:
from unittest import mock
except ImportError:
import mock
from k2catalogue import detail_object
@pytest.mark.parametrize('input,expected', [
(1, '1.html'),
(2, '2.html'),
(201, '201.html'),
])
def test_detail_url(input, expected):
epic_object = mock.Mock(epic_id=input)
url_root = 'http://deneb.astro.warwick.ac.uk/phrlbj/k2varcat/objects/{0}'
assert detail_object.DetailObject(epic_object).url == url_root.format(
expected)
def test_open_detail_url():
epic_object = mock.Mock(epic_id=1)
with mock.patch('k2catalogue.detail_object.webbrowser.open') as mock_open:
detail_object.DetailObject(epic_object).open()
mock_open.assert_called_once_with(
'http://deneb.astro.warwick.ac.uk/phrlbj/k2varcat/objects/1.html')
def test_epic_id():
epic_object = mock.Mock(epic_id=1)
assert detail_object.DetailObject(epic_object).epic_id == 1
|
Update format placeholder to be 2.6 compatible
|
Update format placeholder to be 2.6 compatible
|
Python
|
mit
|
mindriot101/k2catalogue
|
import pytest
try:
from unittest import mock
except ImportError:
import mock
from k2catalogue import detail_object
@pytest.mark.parametrize('input,expected', [
(1, '1.html'),
(2, '2.html'),
(201, '201.html'),
])
def test_detail_url(input, expected):
epic_object = mock.Mock(epic_id=input)
url_root = 'http://deneb.astro.warwick.ac.uk/phrlbj/k2varcat/objects/{}'
assert detail_object.DetailObject(epic_object).url == url_root.format(
expected)
def test_open_detail_url():
epic_object = mock.Mock(epic_id=1)
with mock.patch('k2catalogue.detail_object.webbrowser.open') as mock_open:
detail_object.DetailObject(epic_object).open()
mock_open.assert_called_once_with(
'http://deneb.astro.warwick.ac.uk/phrlbj/k2varcat/objects/1.html')
def test_epic_id():
epic_object = mock.Mock(epic_id=1)
assert detail_object.DetailObject(epic_object).epic_id == 1
Update format placeholder to be 2.6 compatible
|
import pytest
try:
from unittest import mock
except ImportError:
import mock
from k2catalogue import detail_object
@pytest.mark.parametrize('input,expected', [
(1, '1.html'),
(2, '2.html'),
(201, '201.html'),
])
def test_detail_url(input, expected):
epic_object = mock.Mock(epic_id=input)
url_root = 'http://deneb.astro.warwick.ac.uk/phrlbj/k2varcat/objects/{0}'
assert detail_object.DetailObject(epic_object).url == url_root.format(
expected)
def test_open_detail_url():
epic_object = mock.Mock(epic_id=1)
with mock.patch('k2catalogue.detail_object.webbrowser.open') as mock_open:
detail_object.DetailObject(epic_object).open()
mock_open.assert_called_once_with(
'http://deneb.astro.warwick.ac.uk/phrlbj/k2varcat/objects/1.html')
def test_epic_id():
epic_object = mock.Mock(epic_id=1)
assert detail_object.DetailObject(epic_object).epic_id == 1
|
<commit_before>import pytest
try:
from unittest import mock
except ImportError:
import mock
from k2catalogue import detail_object
@pytest.mark.parametrize('input,expected', [
(1, '1.html'),
(2, '2.html'),
(201, '201.html'),
])
def test_detail_url(input, expected):
epic_object = mock.Mock(epic_id=input)
url_root = 'http://deneb.astro.warwick.ac.uk/phrlbj/k2varcat/objects/{}'
assert detail_object.DetailObject(epic_object).url == url_root.format(
expected)
def test_open_detail_url():
epic_object = mock.Mock(epic_id=1)
with mock.patch('k2catalogue.detail_object.webbrowser.open') as mock_open:
detail_object.DetailObject(epic_object).open()
mock_open.assert_called_once_with(
'http://deneb.astro.warwick.ac.uk/phrlbj/k2varcat/objects/1.html')
def test_epic_id():
epic_object = mock.Mock(epic_id=1)
assert detail_object.DetailObject(epic_object).epic_id == 1
<commit_msg>Update format placeholder to be 2.6 compatible<commit_after>
|
import pytest
try:
from unittest import mock
except ImportError:
import mock
from k2catalogue import detail_object
@pytest.mark.parametrize('input,expected', [
(1, '1.html'),
(2, '2.html'),
(201, '201.html'),
])
def test_detail_url(input, expected):
epic_object = mock.Mock(epic_id=input)
url_root = 'http://deneb.astro.warwick.ac.uk/phrlbj/k2varcat/objects/{0}'
assert detail_object.DetailObject(epic_object).url == url_root.format(
expected)
def test_open_detail_url():
epic_object = mock.Mock(epic_id=1)
with mock.patch('k2catalogue.detail_object.webbrowser.open') as mock_open:
detail_object.DetailObject(epic_object).open()
mock_open.assert_called_once_with(
'http://deneb.astro.warwick.ac.uk/phrlbj/k2varcat/objects/1.html')
def test_epic_id():
epic_object = mock.Mock(epic_id=1)
assert detail_object.DetailObject(epic_object).epic_id == 1
|
import pytest
try:
from unittest import mock
except ImportError:
import mock
from k2catalogue import detail_object
@pytest.mark.parametrize('input,expected', [
(1, '1.html'),
(2, '2.html'),
(201, '201.html'),
])
def test_detail_url(input, expected):
epic_object = mock.Mock(epic_id=input)
url_root = 'http://deneb.astro.warwick.ac.uk/phrlbj/k2varcat/objects/{}'
assert detail_object.DetailObject(epic_object).url == url_root.format(
expected)
def test_open_detail_url():
epic_object = mock.Mock(epic_id=1)
with mock.patch('k2catalogue.detail_object.webbrowser.open') as mock_open:
detail_object.DetailObject(epic_object).open()
mock_open.assert_called_once_with(
'http://deneb.astro.warwick.ac.uk/phrlbj/k2varcat/objects/1.html')
def test_epic_id():
epic_object = mock.Mock(epic_id=1)
assert detail_object.DetailObject(epic_object).epic_id == 1
Update format placeholder to be 2.6 compatibleimport pytest
try:
from unittest import mock
except ImportError:
import mock
from k2catalogue import detail_object
@pytest.mark.parametrize('input,expected', [
(1, '1.html'),
(2, '2.html'),
(201, '201.html'),
])
def test_detail_url(input, expected):
epic_object = mock.Mock(epic_id=input)
url_root = 'http://deneb.astro.warwick.ac.uk/phrlbj/k2varcat/objects/{0}'
assert detail_object.DetailObject(epic_object).url == url_root.format(
expected)
def test_open_detail_url():
epic_object = mock.Mock(epic_id=1)
with mock.patch('k2catalogue.detail_object.webbrowser.open') as mock_open:
detail_object.DetailObject(epic_object).open()
mock_open.assert_called_once_with(
'http://deneb.astro.warwick.ac.uk/phrlbj/k2varcat/objects/1.html')
def test_epic_id():
epic_object = mock.Mock(epic_id=1)
assert detail_object.DetailObject(epic_object).epic_id == 1
|
<commit_before>import pytest
try:
from unittest import mock
except ImportError:
import mock
from k2catalogue import detail_object
@pytest.mark.parametrize('input,expected', [
(1, '1.html'),
(2, '2.html'),
(201, '201.html'),
])
def test_detail_url(input, expected):
epic_object = mock.Mock(epic_id=input)
url_root = 'http://deneb.astro.warwick.ac.uk/phrlbj/k2varcat/objects/{}'
assert detail_object.DetailObject(epic_object).url == url_root.format(
expected)
def test_open_detail_url():
epic_object = mock.Mock(epic_id=1)
with mock.patch('k2catalogue.detail_object.webbrowser.open') as mock_open:
detail_object.DetailObject(epic_object).open()
mock_open.assert_called_once_with(
'http://deneb.astro.warwick.ac.uk/phrlbj/k2varcat/objects/1.html')
def test_epic_id():
epic_object = mock.Mock(epic_id=1)
assert detail_object.DetailObject(epic_object).epic_id == 1
<commit_msg>Update format placeholder to be 2.6 compatible<commit_after>import pytest
try:
from unittest import mock
except ImportError:
import mock
from k2catalogue import detail_object
@pytest.mark.parametrize('input,expected', [
(1, '1.html'),
(2, '2.html'),
(201, '201.html'),
])
def test_detail_url(input, expected):
epic_object = mock.Mock(epic_id=input)
url_root = 'http://deneb.astro.warwick.ac.uk/phrlbj/k2varcat/objects/{0}'
assert detail_object.DetailObject(epic_object).url == url_root.format(
expected)
def test_open_detail_url():
epic_object = mock.Mock(epic_id=1)
with mock.patch('k2catalogue.detail_object.webbrowser.open') as mock_open:
detail_object.DetailObject(epic_object).open()
mock_open.assert_called_once_with(
'http://deneb.astro.warwick.ac.uk/phrlbj/k2varcat/objects/1.html')
def test_epic_id():
epic_object = mock.Mock(epic_id=1)
assert detail_object.DetailObject(epic_object).epic_id == 1
|
6c9f4aa7d179632acee5bc2d0828198a3a58b295
|
app.py
|
app.py
|
from flask import Flask, render_template, request, jsonify
from werkzeug.utils import secure_filename
import os
app = Flask(__name__)
app.config.from_object('config.Debug')
@app.route('/upload', methods=['GET', 'POST'])
def upload():
if request.method == 'GET':
return render_template('upload.html')
elif request.method == 'POST':
file = request.files['file']
if file:
filename = secure_filename(file.filename)
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
return jsonify(filename=filename)
if __name__ == "__main__":
app.run()
|
from flask import Flask, render_template, request, jsonify
import os
app = Flask(__name__)
app.config.from_object('config.Debug')
@app.route('/upload', methods=['GET', 'POST'])
def upload():
if request.method == 'GET':
return render_template('upload.html')
elif request.method == 'POST':
file = request.files['file']
if file:
filename = os.urandom(30).encode('hex')
while os.path.isfile(os.path.join(app.config['UPLOAD_FOLDER'], filename)):
filename = os.urandom(30).encode('hex')
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
return jsonify(filename=filename)
if __name__ == "__main__":
app.run()
|
Use random file names inplace of the original file names
|
Use random file names inplace of the original file names
|
Python
|
mit
|
citruspi/Alexandria,citruspi/Alexandria
|
from flask import Flask, render_template, request, jsonify
from werkzeug.utils import secure_filename
import os
app = Flask(__name__)
app.config.from_object('config.Debug')
@app.route('/upload', methods=['GET', 'POST'])
def upload():
if request.method == 'GET':
return render_template('upload.html')
elif request.method == 'POST':
file = request.files['file']
if file:
filename = secure_filename(file.filename)
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
return jsonify(filename=filename)
if __name__ == "__main__":
app.run()
Use random file names inplace of the original file names
|
from flask import Flask, render_template, request, jsonify
import os
app = Flask(__name__)
app.config.from_object('config.Debug')
@app.route('/upload', methods=['GET', 'POST'])
def upload():
if request.method == 'GET':
return render_template('upload.html')
elif request.method == 'POST':
file = request.files['file']
if file:
filename = os.urandom(30).encode('hex')
while os.path.isfile(os.path.join(app.config['UPLOAD_FOLDER'], filename)):
filename = os.urandom(30).encode('hex')
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
return jsonify(filename=filename)
if __name__ == "__main__":
app.run()
|
<commit_before>from flask import Flask, render_template, request, jsonify
from werkzeug.utils import secure_filename
import os
app = Flask(__name__)
app.config.from_object('config.Debug')
@app.route('/upload', methods=['GET', 'POST'])
def upload():
if request.method == 'GET':
return render_template('upload.html')
elif request.method == 'POST':
file = request.files['file']
if file:
filename = secure_filename(file.filename)
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
return jsonify(filename=filename)
if __name__ == "__main__":
app.run()
<commit_msg>Use random file names inplace of the original file names<commit_after>
|
from flask import Flask, render_template, request, jsonify
import os
app = Flask(__name__)
app.config.from_object('config.Debug')
@app.route('/upload', methods=['GET', 'POST'])
def upload():
if request.method == 'GET':
return render_template('upload.html')
elif request.method == 'POST':
file = request.files['file']
if file:
filename = os.urandom(30).encode('hex')
while os.path.isfile(os.path.join(app.config['UPLOAD_FOLDER'], filename)):
filename = os.urandom(30).encode('hex')
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
return jsonify(filename=filename)
if __name__ == "__main__":
app.run()
|
from flask import Flask, render_template, request, jsonify
from werkzeug.utils import secure_filename
import os
app = Flask(__name__)
app.config.from_object('config.Debug')
@app.route('/upload', methods=['GET', 'POST'])
def upload():
if request.method == 'GET':
return render_template('upload.html')
elif request.method == 'POST':
file = request.files['file']
if file:
filename = secure_filename(file.filename)
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
return jsonify(filename=filename)
if __name__ == "__main__":
app.run()
Use random file names inplace of the original file namesfrom flask import Flask, render_template, request, jsonify
import os
app = Flask(__name__)
app.config.from_object('config.Debug')
@app.route('/upload', methods=['GET', 'POST'])
def upload():
if request.method == 'GET':
return render_template('upload.html')
elif request.method == 'POST':
file = request.files['file']
if file:
filename = os.urandom(30).encode('hex')
while os.path.isfile(os.path.join(app.config['UPLOAD_FOLDER'], filename)):
filename = os.urandom(30).encode('hex')
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
return jsonify(filename=filename)
if __name__ == "__main__":
app.run()
|
<commit_before>from flask import Flask, render_template, request, jsonify
from werkzeug.utils import secure_filename
import os
app = Flask(__name__)
app.config.from_object('config.Debug')
@app.route('/upload', methods=['GET', 'POST'])
def upload():
if request.method == 'GET':
return render_template('upload.html')
elif request.method == 'POST':
file = request.files['file']
if file:
filename = secure_filename(file.filename)
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
return jsonify(filename=filename)
if __name__ == "__main__":
app.run()
<commit_msg>Use random file names inplace of the original file names<commit_after>from flask import Flask, render_template, request, jsonify
import os
app = Flask(__name__)
app.config.from_object('config.Debug')
@app.route('/upload', methods=['GET', 'POST'])
def upload():
if request.method == 'GET':
return render_template('upload.html')
elif request.method == 'POST':
file = request.files['file']
if file:
filename = os.urandom(30).encode('hex')
while os.path.isfile(os.path.join(app.config['UPLOAD_FOLDER'], filename)):
filename = os.urandom(30).encode('hex')
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
return jsonify(filename=filename)
if __name__ == "__main__":
app.run()
|
b9f54aa03896f3e9135be6b64ccf696656125a49
|
st2common/st2common/runners/__init__.py
|
st2common/st2common/runners/__init__.py
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = [
'BACKENDS_NAMESPACE'
]
BACKENDS_NAMESPACE = 'st2common.runners.runner'
|
Add BACKENDS_NAMESPACE constant so it's consistent with auth backends.
|
Add BACKENDS_NAMESPACE constant so it's consistent with auth backends.
|
Python
|
apache-2.0
|
StackStorm/st2,StackStorm/st2,nzlosh/st2,Plexxi/st2,Plexxi/st2,Plexxi/st2,nzlosh/st2,StackStorm/st2,nzlosh/st2,StackStorm/st2,nzlosh/st2,Plexxi/st2
|
Add BACKENDS_NAMESPACE constant so it's consistent with auth backends.
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = [
'BACKENDS_NAMESPACE'
]
BACKENDS_NAMESPACE = 'st2common.runners.runner'
|
<commit_before><commit_msg>Add BACKENDS_NAMESPACE constant so it's consistent with auth backends.<commit_after>
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = [
'BACKENDS_NAMESPACE'
]
BACKENDS_NAMESPACE = 'st2common.runners.runner'
|
Add BACKENDS_NAMESPACE constant so it's consistent with auth backends.# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = [
'BACKENDS_NAMESPACE'
]
BACKENDS_NAMESPACE = 'st2common.runners.runner'
|
<commit_before><commit_msg>Add BACKENDS_NAMESPACE constant so it's consistent with auth backends.<commit_after># Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = [
'BACKENDS_NAMESPACE'
]
BACKENDS_NAMESPACE = 'st2common.runners.runner'
|
|
3aba89dce81a47bbd2fbe99f46636108e243641b
|
docs/conf.py
|
docs/conf.py
|
# -*- coding: utf-8 -*-
#
# This file is execfile()d with the current directory set to its containing
# directory.
import os
import sys
sys.path.append(os.path.join(os.path.abspath('.'), '_ext'))
sys.path.append(os.path.dirname(os.path.abspath('.')))
import analytical
# -- General configuration --------------------------------------------------
project = u'django-analytical'
copyright = u'2011, Joost Cassee <joost@cassee.net>'
release = analytical.__version__
# The short X.Y version.
version = release.rsplit('.', 1)[0]
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'local']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
add_function_parentheses = True
pygments_style = 'sphinx'
intersphinx_mapping = {
'http://docs.python.org/2.7': None,
'http://docs.djangoproject.com/en/1.8':
'http://docs.djangoproject.com/en/1.8/_objects/',
}
# -- Options for HTML output ------------------------------------------------
html_theme = 'default'
htmlhelp_basename = 'analyticaldoc'
# -- Options for LaTeX output -----------------------------------------------
latex_documents = [
('index', 'django-analytical.tex', u'Documentation for django-analytical',
u'Joost Cassee', 'manual'),
]
|
# -*- coding: utf-8 -*-
#
# This file is execfile()d with the current directory set to its containing
# directory.
import os
import sys
sys.path.append(os.path.join(os.path.abspath('.'), '_ext'))
sys.path.append(os.path.dirname(os.path.abspath('.')))
import analytical
# -- General configuration --------------------------------------------------
project = u'django-analytical'
copyright = u'2011, Joost Cassee <joost@cassee.net>'
release = analytical.__version__
# The short X.Y version.
version = release.rsplit('.', 1)[0]
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'local']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
add_function_parentheses = True
pygments_style = 'sphinx'
intersphinx_mapping = {
'http://docs.python.org/2.7': None,
'http://docs.djangoproject.com/en/1.9':
'http://docs.djangoproject.com/en/1.9/_objects/',
}
# -- Options for HTML output ------------------------------------------------
html_theme = 'default'
htmlhelp_basename = 'analyticaldoc'
# -- Options for LaTeX output -----------------------------------------------
latex_documents = [
('index', 'django-analytical.tex', u'Documentation for django-analytical',
u'Joost Cassee', 'manual'),
]
|
Update documentation links to Django 1.9
|
Update documentation links to Django 1.9
|
Python
|
mit
|
bittner/django-analytical,jcassee/django-analytical,pjdelport/django-analytical
|
# -*- coding: utf-8 -*-
#
# This file is execfile()d with the current directory set to its containing
# directory.
import os
import sys
sys.path.append(os.path.join(os.path.abspath('.'), '_ext'))
sys.path.append(os.path.dirname(os.path.abspath('.')))
import analytical
# -- General configuration --------------------------------------------------
project = u'django-analytical'
copyright = u'2011, Joost Cassee <joost@cassee.net>'
release = analytical.__version__
# The short X.Y version.
version = release.rsplit('.', 1)[0]
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'local']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
add_function_parentheses = True
pygments_style = 'sphinx'
intersphinx_mapping = {
'http://docs.python.org/2.7': None,
'http://docs.djangoproject.com/en/1.8':
'http://docs.djangoproject.com/en/1.8/_objects/',
}
# -- Options for HTML output ------------------------------------------------
html_theme = 'default'
htmlhelp_basename = 'analyticaldoc'
# -- Options for LaTeX output -----------------------------------------------
latex_documents = [
('index', 'django-analytical.tex', u'Documentation for django-analytical',
u'Joost Cassee', 'manual'),
]
Update documentation links to Django 1.9
|
# -*- coding: utf-8 -*-
#
# This file is execfile()d with the current directory set to its containing
# directory.
import os
import sys
sys.path.append(os.path.join(os.path.abspath('.'), '_ext'))
sys.path.append(os.path.dirname(os.path.abspath('.')))
import analytical
# -- General configuration --------------------------------------------------
project = u'django-analytical'
copyright = u'2011, Joost Cassee <joost@cassee.net>'
release = analytical.__version__
# The short X.Y version.
version = release.rsplit('.', 1)[0]
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'local']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
add_function_parentheses = True
pygments_style = 'sphinx'
intersphinx_mapping = {
'http://docs.python.org/2.7': None,
'http://docs.djangoproject.com/en/1.9':
'http://docs.djangoproject.com/en/1.9/_objects/',
}
# -- Options for HTML output ------------------------------------------------
html_theme = 'default'
htmlhelp_basename = 'analyticaldoc'
# -- Options for LaTeX output -----------------------------------------------
latex_documents = [
('index', 'django-analytical.tex', u'Documentation for django-analytical',
u'Joost Cassee', 'manual'),
]
|
<commit_before># -*- coding: utf-8 -*-
#
# This file is execfile()d with the current directory set to its containing
# directory.
import os
import sys
sys.path.append(os.path.join(os.path.abspath('.'), '_ext'))
sys.path.append(os.path.dirname(os.path.abspath('.')))
import analytical
# -- General configuration --------------------------------------------------
project = u'django-analytical'
copyright = u'2011, Joost Cassee <joost@cassee.net>'
release = analytical.__version__
# The short X.Y version.
version = release.rsplit('.', 1)[0]
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'local']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
add_function_parentheses = True
pygments_style = 'sphinx'
intersphinx_mapping = {
'http://docs.python.org/2.7': None,
'http://docs.djangoproject.com/en/1.8':
'http://docs.djangoproject.com/en/1.8/_objects/',
}
# -- Options for HTML output ------------------------------------------------
html_theme = 'default'
htmlhelp_basename = 'analyticaldoc'
# -- Options for LaTeX output -----------------------------------------------
latex_documents = [
('index', 'django-analytical.tex', u'Documentation for django-analytical',
u'Joost Cassee', 'manual'),
]
<commit_msg>Update documentation links to Django 1.9<commit_after>
|
# -*- coding: utf-8 -*-
#
# This file is execfile()d with the current directory set to its containing
# directory.
import os
import sys
sys.path.append(os.path.join(os.path.abspath('.'), '_ext'))
sys.path.append(os.path.dirname(os.path.abspath('.')))
import analytical
# -- General configuration --------------------------------------------------
project = u'django-analytical'
copyright = u'2011, Joost Cassee <joost@cassee.net>'
release = analytical.__version__
# The short X.Y version.
version = release.rsplit('.', 1)[0]
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'local']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
add_function_parentheses = True
pygments_style = 'sphinx'
intersphinx_mapping = {
'http://docs.python.org/2.7': None,
'http://docs.djangoproject.com/en/1.9':
'http://docs.djangoproject.com/en/1.9/_objects/',
}
# -- Options for HTML output ------------------------------------------------
html_theme = 'default'
htmlhelp_basename = 'analyticaldoc'
# -- Options for LaTeX output -----------------------------------------------
latex_documents = [
('index', 'django-analytical.tex', u'Documentation for django-analytical',
u'Joost Cassee', 'manual'),
]
|
# -*- coding: utf-8 -*-
#
# This file is execfile()d with the current directory set to its containing
# directory.
import os
import sys
sys.path.append(os.path.join(os.path.abspath('.'), '_ext'))
sys.path.append(os.path.dirname(os.path.abspath('.')))
import analytical
# -- General configuration --------------------------------------------------
project = u'django-analytical'
copyright = u'2011, Joost Cassee <joost@cassee.net>'
release = analytical.__version__
# The short X.Y version.
version = release.rsplit('.', 1)[0]
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'local']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
add_function_parentheses = True
pygments_style = 'sphinx'
intersphinx_mapping = {
'http://docs.python.org/2.7': None,
'http://docs.djangoproject.com/en/1.8':
'http://docs.djangoproject.com/en/1.8/_objects/',
}
# -- Options for HTML output ------------------------------------------------
html_theme = 'default'
htmlhelp_basename = 'analyticaldoc'
# -- Options for LaTeX output -----------------------------------------------
latex_documents = [
('index', 'django-analytical.tex', u'Documentation for django-analytical',
u'Joost Cassee', 'manual'),
]
Update documentation links to Django 1.9# -*- coding: utf-8 -*-
#
# This file is execfile()d with the current directory set to its containing
# directory.
import os
import sys
sys.path.append(os.path.join(os.path.abspath('.'), '_ext'))
sys.path.append(os.path.dirname(os.path.abspath('.')))
import analytical
# -- General configuration --------------------------------------------------
project = u'django-analytical'
copyright = u'2011, Joost Cassee <joost@cassee.net>'
release = analytical.__version__
# The short X.Y version.
version = release.rsplit('.', 1)[0]
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'local']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
add_function_parentheses = True
pygments_style = 'sphinx'
intersphinx_mapping = {
'http://docs.python.org/2.7': None,
'http://docs.djangoproject.com/en/1.9':
'http://docs.djangoproject.com/en/1.9/_objects/',
}
# -- Options for HTML output ------------------------------------------------
html_theme = 'default'
htmlhelp_basename = 'analyticaldoc'
# -- Options for LaTeX output -----------------------------------------------
latex_documents = [
('index', 'django-analytical.tex', u'Documentation for django-analytical',
u'Joost Cassee', 'manual'),
]
|
<commit_before># -*- coding: utf-8 -*-
#
# This file is execfile()d with the current directory set to its containing
# directory.
import os
import sys
sys.path.append(os.path.join(os.path.abspath('.'), '_ext'))
sys.path.append(os.path.dirname(os.path.abspath('.')))
import analytical
# -- General configuration --------------------------------------------------
project = u'django-analytical'
copyright = u'2011, Joost Cassee <joost@cassee.net>'
release = analytical.__version__
# The short X.Y version.
version = release.rsplit('.', 1)[0]
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'local']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
add_function_parentheses = True
pygments_style = 'sphinx'
intersphinx_mapping = {
'http://docs.python.org/2.7': None,
'http://docs.djangoproject.com/en/1.8':
'http://docs.djangoproject.com/en/1.8/_objects/',
}
# -- Options for HTML output ------------------------------------------------
html_theme = 'default'
htmlhelp_basename = 'analyticaldoc'
# -- Options for LaTeX output -----------------------------------------------
latex_documents = [
('index', 'django-analytical.tex', u'Documentation for django-analytical',
u'Joost Cassee', 'manual'),
]
<commit_msg>Update documentation links to Django 1.9<commit_after># -*- coding: utf-8 -*-
#
# This file is execfile()d with the current directory set to its containing
# directory.
import os
import sys
sys.path.append(os.path.join(os.path.abspath('.'), '_ext'))
sys.path.append(os.path.dirname(os.path.abspath('.')))
import analytical
# -- General configuration --------------------------------------------------
project = u'django-analytical'
copyright = u'2011, Joost Cassee <joost@cassee.net>'
release = analytical.__version__
# The short X.Y version.
version = release.rsplit('.', 1)[0]
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'local']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
add_function_parentheses = True
pygments_style = 'sphinx'
intersphinx_mapping = {
'http://docs.python.org/2.7': None,
'http://docs.djangoproject.com/en/1.9':
'http://docs.djangoproject.com/en/1.9/_objects/',
}
# -- Options for HTML output ------------------------------------------------
html_theme = 'default'
htmlhelp_basename = 'analyticaldoc'
# -- Options for LaTeX output -----------------------------------------------
latex_documents = [
('index', 'django-analytical.tex', u'Documentation for django-analytical',
u'Joost Cassee', 'manual'),
]
|
f7b48c9193511f693cc2ec17d46253077d06dcc3
|
LR/lr/lib/__init__.py
|
LR/lr/lib/__init__.py
|
from model_parser import ModelParser, getFileString
__all__=['ModelParser', 'getFileString']
|
# !/usr/bin/python
# Copyright 2011 Lockheed Martin
'''
Base couchdb threshold change handler class.
Created on August 18, 2011
@author: jpoyau
'''
from model_parser import ModelParser, getFileString
from couch_change_monitor import *
__all__=["ModelParser",
"getFileString",
"MonitorChanges",
"BaseChangeHandler",
"BaseThresholdHandler",
"BaseViewsUpdateHandler"]
|
Add the new change feed module to __all__
|
Add the new change feed module to __all__
|
Python
|
apache-2.0
|
jimklo/LearningRegistry,jimklo/LearningRegistry,jimklo/LearningRegistry,jimklo/LearningRegistry,jimklo/LearningRegistry,LearningRegistry/LearningRegistry,LearningRegistry/LearningRegistry,LearningRegistry/LearningRegistry,LearningRegistry/LearningRegistry,LearningRegistry/LearningRegistry,jimklo/LearningRegistry,LearningRegistry/LearningRegistry,jimklo/LearningRegistry,jimklo/LearningRegistry,LearningRegistry/LearningRegistry,jimklo/LearningRegistry
|
from model_parser import ModelParser, getFileString
__all__=['ModelParser', 'getFileString']
Add the new change feed module to __all__
|
# !/usr/bin/python
# Copyright 2011 Lockheed Martin
'''
Base couchdb threshold change handler class.
Created on August 18, 2011
@author: jpoyau
'''
from model_parser import ModelParser, getFileString
from couch_change_monitor import *
__all__=["ModelParser",
"getFileString",
"MonitorChanges",
"BaseChangeHandler",
"BaseThresholdHandler",
"BaseViewsUpdateHandler"]
|
<commit_before>from model_parser import ModelParser, getFileString
__all__=['ModelParser', 'getFileString']
<commit_msg>Add the new change feed module to __all__<commit_after>
|
# !/usr/bin/python
# Copyright 2011 Lockheed Martin
'''
Base couchdb threshold change handler class.
Created on August 18, 2011
@author: jpoyau
'''
from model_parser import ModelParser, getFileString
from couch_change_monitor import *
__all__=["ModelParser",
"getFileString",
"MonitorChanges",
"BaseChangeHandler",
"BaseThresholdHandler",
"BaseViewsUpdateHandler"]
|
from model_parser import ModelParser, getFileString
__all__=['ModelParser', 'getFileString']
Add the new change feed module to __all__# !/usr/bin/python
# Copyright 2011 Lockheed Martin
'''
Base couchdb threshold change handler class.
Created on August 18, 2011
@author: jpoyau
'''
from model_parser import ModelParser, getFileString
from couch_change_monitor import *
__all__=["ModelParser",
"getFileString",
"MonitorChanges",
"BaseChangeHandler",
"BaseThresholdHandler",
"BaseViewsUpdateHandler"]
|
<commit_before>from model_parser import ModelParser, getFileString
__all__=['ModelParser', 'getFileString']
<commit_msg>Add the new change feed module to __all__<commit_after># !/usr/bin/python
# Copyright 2011 Lockheed Martin
'''
Base couchdb threshold change handler class.
Created on August 18, 2011
@author: jpoyau
'''
from model_parser import ModelParser, getFileString
from couch_change_monitor import *
__all__=["ModelParser",
"getFileString",
"MonitorChanges",
"BaseChangeHandler",
"BaseThresholdHandler",
"BaseViewsUpdateHandler"]
|
e6e3cd2b8e6ad64bce9fe6614c3d532fcbfa3359
|
OpenSearchInNewTab.py
|
OpenSearchInNewTab.py
|
import sublime_plugin
class OpenSearchInNewTab(sublime_plugin.EventListener):
def on_deactivated(self, view):
if view.name() == 'Find Results':
# set a name with space
# so it won't be bothered
# during new search
view.set_name('Find Results ')
|
import sublime_plugin
default_name = 'Find Results'
alt_name = 'Find Results '
class OpenSearchInNewTab(sublime_plugin.EventListener):
def on_deactivated(self, view):
if view.name() == 'Find Results':
# set a name with space
# so it won't be bothered
# during new search
view.set_name(alt_name)
# these hooks will help other plugins
# to understand that we are in search results file
def on_text_command(self, view, command_name, args):
if view.name() == alt_name:
view.set_name(default_name)
def post_text_command(self, view, command_name, args):
if view.name() == default_name:
view.set_name(alt_name)
|
Add text commands hook for other plugins
|
Add text commands hook for other plugins
|
Python
|
mit
|
everyonesdesign/OpenSearchInNewTab
|
import sublime_plugin
class OpenSearchInNewTab(sublime_plugin.EventListener):
def on_deactivated(self, view):
if view.name() == 'Find Results':
# set a name with space
# so it won't be bothered
# during new search
view.set_name('Find Results ')Add text commands hook for other plugins
|
import sublime_plugin
default_name = 'Find Results'
alt_name = 'Find Results '
class OpenSearchInNewTab(sublime_plugin.EventListener):
def on_deactivated(self, view):
if view.name() == 'Find Results':
# set a name with space
# so it won't be bothered
# during new search
view.set_name(alt_name)
# these hooks will help other plugins
# to understand that we are in search results file
def on_text_command(self, view, command_name, args):
if view.name() == alt_name:
view.set_name(default_name)
def post_text_command(self, view, command_name, args):
if view.name() == default_name:
view.set_name(alt_name)
|
<commit_before>import sublime_plugin
class OpenSearchInNewTab(sublime_plugin.EventListener):
def on_deactivated(self, view):
if view.name() == 'Find Results':
# set a name with space
# so it won't be bothered
# during new search
view.set_name('Find Results ')<commit_msg>Add text commands hook for other plugins<commit_after>
|
import sublime_plugin
default_name = 'Find Results'
alt_name = 'Find Results '
class OpenSearchInNewTab(sublime_plugin.EventListener):
def on_deactivated(self, view):
if view.name() == 'Find Results':
# set a name with space
# so it won't be bothered
# during new search
view.set_name(alt_name)
# these hooks will help other plugins
# to understand that we are in search results file
def on_text_command(self, view, command_name, args):
if view.name() == alt_name:
view.set_name(default_name)
def post_text_command(self, view, command_name, args):
if view.name() == default_name:
view.set_name(alt_name)
|
import sublime_plugin
class OpenSearchInNewTab(sublime_plugin.EventListener):
def on_deactivated(self, view):
if view.name() == 'Find Results':
# set a name with space
# so it won't be bothered
# during new search
view.set_name('Find Results ')Add text commands hook for other pluginsimport sublime_plugin
default_name = 'Find Results'
alt_name = 'Find Results '
class OpenSearchInNewTab(sublime_plugin.EventListener):
def on_deactivated(self, view):
if view.name() == 'Find Results':
# set a name with space
# so it won't be bothered
# during new search
view.set_name(alt_name)
# these hooks will help other plugins
# to understand that we are in search results file
def on_text_command(self, view, command_name, args):
if view.name() == alt_name:
view.set_name(default_name)
def post_text_command(self, view, command_name, args):
if view.name() == default_name:
view.set_name(alt_name)
|
<commit_before>import sublime_plugin
class OpenSearchInNewTab(sublime_plugin.EventListener):
def on_deactivated(self, view):
if view.name() == 'Find Results':
# set a name with space
# so it won't be bothered
# during new search
view.set_name('Find Results ')<commit_msg>Add text commands hook for other plugins<commit_after>import sublime_plugin
default_name = 'Find Results'
alt_name = 'Find Results '
class OpenSearchInNewTab(sublime_plugin.EventListener):
def on_deactivated(self, view):
if view.name() == 'Find Results':
# set a name with space
# so it won't be bothered
# during new search
view.set_name(alt_name)
# these hooks will help other plugins
# to understand that we are in search results file
def on_text_command(self, view, command_name, args):
if view.name() == alt_name:
view.set_name(default_name)
def post_text_command(self, view, command_name, args):
if view.name() == default_name:
view.set_name(alt_name)
|
bf97326c668580eef49fc4323a249a1c3cd1b126
|
src/formatter.py
|
src/formatter.py
|
from .command import Command
from .settings import Settings
class Formatter(object):
def __init__(self, name=None, source=None, binary=None):
self.__name = name
self.__source = 'source.' + (source if source else name.lower())
self.__binary = binary
self.__settings = Settings(name.lower())
@property
def settings(self):
return self.__settings
@property
def name(self):
return self.__name
@property
def source(self):
return self.__source
@property
def binary(self):
return self.settings.get('binary', self.__binary)
@property
def format_on_save(self):
return self.settings.get('format_on_save', False)
@format_on_save.setter
def format_on_save(self, value):
self.settings.set('format_on_save', value)
def command(self):
return [self.binary]
def selection_args(self):
return []
def file_args(self, file_name):
return [file_name]
def format(self, file=None, input=None):
args = self.file_args(file) if file else self.selection_args()
return Command(self.command() + args).run(input)
|
from .command import Command
from .settings import Settings
class Formatter(object):
def __init__(self, name=None, source=None, binary=None):
self.__name = name
self.__source = 'source.' + (source if source else name.lower())
self.__binary = binary
self.__settings = Settings(name.lower())
@property
def settings(self):
return self.__settings
@property
def name(self):
return self.__name
@property
def source(self):
return self.__source
@property
def binary(self):
return self.settings.get('binary', self.__binary)
@property
def options(self):
return self.settings.get('options', {})
@property
def format_on_save(self):
return self.settings.get('format_on_save', False)
@format_on_save.setter
def format_on_save(self, value):
self.settings.set('format_on_save', value)
def command(self):
return [self.binary]
def selection_args(self):
return []
def file_args(self, file_name):
return [file_name]
def parsed_options(self):
options = []
for key, value in self.options.items():
options.extend(['--' + key, value])
return options
def format(self, file=None, input=None):
command = self.command()
options = self.parsed_options()
args = self.file_args(file) if file else self.selection_args()
return Command(command + options + args).run(input)
|
Apply options defined in user settings to the formatting command
|
Apply options defined in user settings to the formatting command
|
Python
|
mit
|
Rypac/sublime-format
|
from .command import Command
from .settings import Settings
class Formatter(object):
def __init__(self, name=None, source=None, binary=None):
self.__name = name
self.__source = 'source.' + (source if source else name.lower())
self.__binary = binary
self.__settings = Settings(name.lower())
@property
def settings(self):
return self.__settings
@property
def name(self):
return self.__name
@property
def source(self):
return self.__source
@property
def binary(self):
return self.settings.get('binary', self.__binary)
@property
def format_on_save(self):
return self.settings.get('format_on_save', False)
@format_on_save.setter
def format_on_save(self, value):
self.settings.set('format_on_save', value)
def command(self):
return [self.binary]
def selection_args(self):
return []
def file_args(self, file_name):
return [file_name]
def format(self, file=None, input=None):
args = self.file_args(file) if file else self.selection_args()
return Command(self.command() + args).run(input)
Apply options defined in user settings to the formatting command
|
from .command import Command
from .settings import Settings
class Formatter(object):
def __init__(self, name=None, source=None, binary=None):
self.__name = name
self.__source = 'source.' + (source if source else name.lower())
self.__binary = binary
self.__settings = Settings(name.lower())
@property
def settings(self):
return self.__settings
@property
def name(self):
return self.__name
@property
def source(self):
return self.__source
@property
def binary(self):
return self.settings.get('binary', self.__binary)
@property
def options(self):
return self.settings.get('options', {})
@property
def format_on_save(self):
return self.settings.get('format_on_save', False)
@format_on_save.setter
def format_on_save(self, value):
self.settings.set('format_on_save', value)
def command(self):
return [self.binary]
def selection_args(self):
return []
def file_args(self, file_name):
return [file_name]
def parsed_options(self):
options = []
for key, value in self.options.items():
options.extend(['--' + key, value])
return options
def format(self, file=None, input=None):
command = self.command()
options = self.parsed_options()
args = self.file_args(file) if file else self.selection_args()
return Command(command + options + args).run(input)
|
<commit_before>from .command import Command
from .settings import Settings
class Formatter(object):
def __init__(self, name=None, source=None, binary=None):
self.__name = name
self.__source = 'source.' + (source if source else name.lower())
self.__binary = binary
self.__settings = Settings(name.lower())
@property
def settings(self):
return self.__settings
@property
def name(self):
return self.__name
@property
def source(self):
return self.__source
@property
def binary(self):
return self.settings.get('binary', self.__binary)
@property
def format_on_save(self):
return self.settings.get('format_on_save', False)
@format_on_save.setter
def format_on_save(self, value):
self.settings.set('format_on_save', value)
def command(self):
return [self.binary]
def selection_args(self):
return []
def file_args(self, file_name):
return [file_name]
def format(self, file=None, input=None):
args = self.file_args(file) if file else self.selection_args()
return Command(self.command() + args).run(input)
<commit_msg>Apply options defined in user settings to the formatting command<commit_after>
|
from .command import Command
from .settings import Settings
class Formatter(object):
def __init__(self, name=None, source=None, binary=None):
self.__name = name
self.__source = 'source.' + (source if source else name.lower())
self.__binary = binary
self.__settings = Settings(name.lower())
@property
def settings(self):
return self.__settings
@property
def name(self):
return self.__name
@property
def source(self):
return self.__source
@property
def binary(self):
return self.settings.get('binary', self.__binary)
@property
def options(self):
return self.settings.get('options', {})
@property
def format_on_save(self):
return self.settings.get('format_on_save', False)
@format_on_save.setter
def format_on_save(self, value):
self.settings.set('format_on_save', value)
def command(self):
return [self.binary]
def selection_args(self):
return []
def file_args(self, file_name):
return [file_name]
def parsed_options(self):
options = []
for key, value in self.options.items():
options.extend(['--' + key, value])
return options
def format(self, file=None, input=None):
command = self.command()
options = self.parsed_options()
args = self.file_args(file) if file else self.selection_args()
return Command(command + options + args).run(input)
|
from .command import Command
from .settings import Settings
class Formatter(object):
def __init__(self, name=None, source=None, binary=None):
self.__name = name
self.__source = 'source.' + (source if source else name.lower())
self.__binary = binary
self.__settings = Settings(name.lower())
@property
def settings(self):
return self.__settings
@property
def name(self):
return self.__name
@property
def source(self):
return self.__source
@property
def binary(self):
return self.settings.get('binary', self.__binary)
@property
def format_on_save(self):
return self.settings.get('format_on_save', False)
@format_on_save.setter
def format_on_save(self, value):
self.settings.set('format_on_save', value)
def command(self):
return [self.binary]
def selection_args(self):
return []
def file_args(self, file_name):
return [file_name]
def format(self, file=None, input=None):
args = self.file_args(file) if file else self.selection_args()
return Command(self.command() + args).run(input)
Apply options defined in user settings to the formatting commandfrom .command import Command
from .settings import Settings
class Formatter(object):
def __init__(self, name=None, source=None, binary=None):
self.__name = name
self.__source = 'source.' + (source if source else name.lower())
self.__binary = binary
self.__settings = Settings(name.lower())
@property
def settings(self):
return self.__settings
@property
def name(self):
return self.__name
@property
def source(self):
return self.__source
@property
def binary(self):
return self.settings.get('binary', self.__binary)
@property
def options(self):
return self.settings.get('options', {})
@property
def format_on_save(self):
return self.settings.get('format_on_save', False)
@format_on_save.setter
def format_on_save(self, value):
self.settings.set('format_on_save', value)
def command(self):
return [self.binary]
def selection_args(self):
return []
def file_args(self, file_name):
return [file_name]
def parsed_options(self):
options = []
for key, value in self.options.items():
options.extend(['--' + key, value])
return options
def format(self, file=None, input=None):
command = self.command()
options = self.parsed_options()
args = self.file_args(file) if file else self.selection_args()
return Command(command + options + args).run(input)
|
<commit_before>from .command import Command
from .settings import Settings
class Formatter(object):
def __init__(self, name=None, source=None, binary=None):
self.__name = name
self.__source = 'source.' + (source if source else name.lower())
self.__binary = binary
self.__settings = Settings(name.lower())
@property
def settings(self):
return self.__settings
@property
def name(self):
return self.__name
@property
def source(self):
return self.__source
@property
def binary(self):
return self.settings.get('binary', self.__binary)
@property
def format_on_save(self):
return self.settings.get('format_on_save', False)
@format_on_save.setter
def format_on_save(self, value):
self.settings.set('format_on_save', value)
def command(self):
return [self.binary]
def selection_args(self):
return []
def file_args(self, file_name):
return [file_name]
def format(self, file=None, input=None):
args = self.file_args(file) if file else self.selection_args()
return Command(self.command() + args).run(input)
<commit_msg>Apply options defined in user settings to the formatting command<commit_after>from .command import Command
from .settings import Settings
class Formatter(object):
def __init__(self, name=None, source=None, binary=None):
self.__name = name
self.__source = 'source.' + (source if source else name.lower())
self.__binary = binary
self.__settings = Settings(name.lower())
@property
def settings(self):
return self.__settings
@property
def name(self):
return self.__name
@property
def source(self):
return self.__source
@property
def binary(self):
return self.settings.get('binary', self.__binary)
@property
def options(self):
return self.settings.get('options', {})
@property
def format_on_save(self):
return self.settings.get('format_on_save', False)
@format_on_save.setter
def format_on_save(self, value):
self.settings.set('format_on_save', value)
def command(self):
return [self.binary]
def selection_args(self):
return []
def file_args(self, file_name):
return [file_name]
def parsed_options(self):
options = []
for key, value in self.options.items():
options.extend(['--' + key, value])
return options
def format(self, file=None, input=None):
command = self.command()
options = self.parsed_options()
args = self.file_args(file) if file else self.selection_args()
return Command(command + options + args).run(input)
|
b5cc400ccc89fb790ef2e505b1b3bd934087cc48
|
src/dbbrankingparser/httpclient.py
|
src/dbbrankingparser/httpclient.py
|
"""
dbbrankingparser.httpclient
~~~~~~~~~~~~~~~~~~~~~~~~~~~
HTTP client utilities
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from urllib.request import Request, urlopen
USER_AGENT = (
'Mozilla/5.0 (X11; Linux x86_64; rv:38.0) '
'Gecko/20100101 Firefox/38.0 Iceweasel/38.6.0'
) # type: str
def assemble_url(league_id: int) -> str:
"""Assemble the ranking HTML's URL for the league with that ID."""
template = (
'https://www.basketball-bund.net/public/tabelle.jsp'
'?print=1'
'&viewDescKey=sport.dbb.views.TabellePublicView/index.jsp_'
'&liga_id={:d}'
)
return template.format(league_id)
def fetch_content(url: str) -> str:
"""Retrieve and return the content of that URL."""
request = _create_request(url)
return urlopen(request).read().decode('utf-8')
def _create_request(url: str) -> Request:
"""Create an HTTP GET request."""
headers = {'User-Agent': USER_AGENT}
return Request(url, headers=headers)
|
"""
dbbrankingparser.httpclient
~~~~~~~~~~~~~~~~~~~~~~~~~~~
HTTP client utilities
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from urllib.request import Request, urlopen
USER_AGENT = (
'Mozilla/5.0 (X11; Linux x86_64; rv:38.0) '
'Gecko/20100101 Firefox/38.0 Iceweasel/38.6.0'
) # type: str
def assemble_url(league_id: int) -> str:
"""Assemble the ranking HTML's URL for the league with that ID."""
template = (
'http://www.basketball-bund.net/public/tabelle.jsp'
'?print=1'
'&viewDescKey=sport.dbb.views.TabellePublicView/index.jsp_'
'&liga_id={:d}'
)
return template.format(league_id)
def fetch_content(url: str) -> str:
"""Retrieve and return the content of that URL."""
request = _create_request(url)
return urlopen(request).read().decode('utf-8')
def _create_request(url: str) -> Request:
"""Create an HTTP GET request."""
headers = {'User-Agent': USER_AGENT}
return Request(url, headers=headers)
|
Revert "Use HTTPS to retrieve ranking from DBB"
|
Revert "Use HTTPS to retrieve ranking from DBB"
Not using HTTPS avoids certificate errors.
This ranking data is not sensitive, so it should be fine to continue as
before the original change.
|
Python
|
mit
|
homeworkprod/dbb-ranking-parser
|
"""
dbbrankingparser.httpclient
~~~~~~~~~~~~~~~~~~~~~~~~~~~
HTTP client utilities
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from urllib.request import Request, urlopen
USER_AGENT = (
'Mozilla/5.0 (X11; Linux x86_64; rv:38.0) '
'Gecko/20100101 Firefox/38.0 Iceweasel/38.6.0'
) # type: str
def assemble_url(league_id: int) -> str:
"""Assemble the ranking HTML's URL for the league with that ID."""
template = (
'https://www.basketball-bund.net/public/tabelle.jsp'
'?print=1'
'&viewDescKey=sport.dbb.views.TabellePublicView/index.jsp_'
'&liga_id={:d}'
)
return template.format(league_id)
def fetch_content(url: str) -> str:
"""Retrieve and return the content of that URL."""
request = _create_request(url)
return urlopen(request).read().decode('utf-8')
def _create_request(url: str) -> Request:
"""Create an HTTP GET request."""
headers = {'User-Agent': USER_AGENT}
return Request(url, headers=headers)
Revert "Use HTTPS to retrieve ranking from DBB"
Not using HTTPS avoids certificate errors.
This ranking data is not sensitive, so it should be fine to continue as
before the original change.
|
"""
dbbrankingparser.httpclient
~~~~~~~~~~~~~~~~~~~~~~~~~~~
HTTP client utilities
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from urllib.request import Request, urlopen
USER_AGENT = (
'Mozilla/5.0 (X11; Linux x86_64; rv:38.0) '
'Gecko/20100101 Firefox/38.0 Iceweasel/38.6.0'
) # type: str
def assemble_url(league_id: int) -> str:
"""Assemble the ranking HTML's URL for the league with that ID."""
template = (
'http://www.basketball-bund.net/public/tabelle.jsp'
'?print=1'
'&viewDescKey=sport.dbb.views.TabellePublicView/index.jsp_'
'&liga_id={:d}'
)
return template.format(league_id)
def fetch_content(url: str) -> str:
"""Retrieve and return the content of that URL."""
request = _create_request(url)
return urlopen(request).read().decode('utf-8')
def _create_request(url: str) -> Request:
"""Create an HTTP GET request."""
headers = {'User-Agent': USER_AGENT}
return Request(url, headers=headers)
|
<commit_before>"""
dbbrankingparser.httpclient
~~~~~~~~~~~~~~~~~~~~~~~~~~~
HTTP client utilities
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from urllib.request import Request, urlopen
USER_AGENT = (
'Mozilla/5.0 (X11; Linux x86_64; rv:38.0) '
'Gecko/20100101 Firefox/38.0 Iceweasel/38.6.0'
) # type: str
def assemble_url(league_id: int) -> str:
"""Assemble the ranking HTML's URL for the league with that ID."""
template = (
'https://www.basketball-bund.net/public/tabelle.jsp'
'?print=1'
'&viewDescKey=sport.dbb.views.TabellePublicView/index.jsp_'
'&liga_id={:d}'
)
return template.format(league_id)
def fetch_content(url: str) -> str:
"""Retrieve and return the content of that URL."""
request = _create_request(url)
return urlopen(request).read().decode('utf-8')
def _create_request(url: str) -> Request:
"""Create an HTTP GET request."""
headers = {'User-Agent': USER_AGENT}
return Request(url, headers=headers)
<commit_msg>Revert "Use HTTPS to retrieve ranking from DBB"
Not using HTTPS avoids certificate errors.
This ranking data is not sensitive, so it should be fine to continue as
before the original change.<commit_after>
|
"""
dbbrankingparser.httpclient
~~~~~~~~~~~~~~~~~~~~~~~~~~~
HTTP client utilities
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from urllib.request import Request, urlopen
USER_AGENT = (
'Mozilla/5.0 (X11; Linux x86_64; rv:38.0) '
'Gecko/20100101 Firefox/38.0 Iceweasel/38.6.0'
) # type: str
def assemble_url(league_id: int) -> str:
"""Assemble the ranking HTML's URL for the league with that ID."""
template = (
'http://www.basketball-bund.net/public/tabelle.jsp'
'?print=1'
'&viewDescKey=sport.dbb.views.TabellePublicView/index.jsp_'
'&liga_id={:d}'
)
return template.format(league_id)
def fetch_content(url: str) -> str:
"""Retrieve and return the content of that URL."""
request = _create_request(url)
return urlopen(request).read().decode('utf-8')
def _create_request(url: str) -> Request:
"""Create an HTTP GET request."""
headers = {'User-Agent': USER_AGENT}
return Request(url, headers=headers)
|
"""
dbbrankingparser.httpclient
~~~~~~~~~~~~~~~~~~~~~~~~~~~
HTTP client utilities
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from urllib.request import Request, urlopen
USER_AGENT = (
'Mozilla/5.0 (X11; Linux x86_64; rv:38.0) '
'Gecko/20100101 Firefox/38.0 Iceweasel/38.6.0'
) # type: str
def assemble_url(league_id: int) -> str:
"""Assemble the ranking HTML's URL for the league with that ID."""
template = (
'https://www.basketball-bund.net/public/tabelle.jsp'
'?print=1'
'&viewDescKey=sport.dbb.views.TabellePublicView/index.jsp_'
'&liga_id={:d}'
)
return template.format(league_id)
def fetch_content(url: str) -> str:
"""Retrieve and return the content of that URL."""
request = _create_request(url)
return urlopen(request).read().decode('utf-8')
def _create_request(url: str) -> Request:
"""Create an HTTP GET request."""
headers = {'User-Agent': USER_AGENT}
return Request(url, headers=headers)
Revert "Use HTTPS to retrieve ranking from DBB"
Not using HTTPS avoids certificate errors.
This ranking data is not sensitive, so it should be fine to continue as
before the original change."""
dbbrankingparser.httpclient
~~~~~~~~~~~~~~~~~~~~~~~~~~~
HTTP client utilities
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from urllib.request import Request, urlopen
USER_AGENT = (
'Mozilla/5.0 (X11; Linux x86_64; rv:38.0) '
'Gecko/20100101 Firefox/38.0 Iceweasel/38.6.0'
) # type: str
def assemble_url(league_id: int) -> str:
"""Assemble the ranking HTML's URL for the league with that ID."""
template = (
'http://www.basketball-bund.net/public/tabelle.jsp'
'?print=1'
'&viewDescKey=sport.dbb.views.TabellePublicView/index.jsp_'
'&liga_id={:d}'
)
return template.format(league_id)
def fetch_content(url: str) -> str:
"""Retrieve and return the content of that URL."""
request = _create_request(url)
return urlopen(request).read().decode('utf-8')
def _create_request(url: str) -> Request:
"""Create an HTTP GET request."""
headers = {'User-Agent': USER_AGENT}
return Request(url, headers=headers)
|
<commit_before>"""
dbbrankingparser.httpclient
~~~~~~~~~~~~~~~~~~~~~~~~~~~
HTTP client utilities
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from urllib.request import Request, urlopen
USER_AGENT = (
'Mozilla/5.0 (X11; Linux x86_64; rv:38.0) '
'Gecko/20100101 Firefox/38.0 Iceweasel/38.6.0'
) # type: str
def assemble_url(league_id: int) -> str:
"""Assemble the ranking HTML's URL for the league with that ID."""
template = (
'https://www.basketball-bund.net/public/tabelle.jsp'
'?print=1'
'&viewDescKey=sport.dbb.views.TabellePublicView/index.jsp_'
'&liga_id={:d}'
)
return template.format(league_id)
def fetch_content(url: str) -> str:
"""Retrieve and return the content of that URL."""
request = _create_request(url)
return urlopen(request).read().decode('utf-8')
def _create_request(url: str) -> Request:
"""Create an HTTP GET request."""
headers = {'User-Agent': USER_AGENT}
return Request(url, headers=headers)
<commit_msg>Revert "Use HTTPS to retrieve ranking from DBB"
Not using HTTPS avoids certificate errors.
This ranking data is not sensitive, so it should be fine to continue as
before the original change.<commit_after>"""
dbbrankingparser.httpclient
~~~~~~~~~~~~~~~~~~~~~~~~~~~
HTTP client utilities
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from urllib.request import Request, urlopen
USER_AGENT = (
'Mozilla/5.0 (X11; Linux x86_64; rv:38.0) '
'Gecko/20100101 Firefox/38.0 Iceweasel/38.6.0'
) # type: str
def assemble_url(league_id: int) -> str:
"""Assemble the ranking HTML's URL for the league with that ID."""
template = (
'http://www.basketball-bund.net/public/tabelle.jsp'
'?print=1'
'&viewDescKey=sport.dbb.views.TabellePublicView/index.jsp_'
'&liga_id={:d}'
)
return template.format(league_id)
def fetch_content(url: str) -> str:
"""Retrieve and return the content of that URL."""
request = _create_request(url)
return urlopen(request).read().decode('utf-8')
def _create_request(url: str) -> Request:
"""Create an HTTP GET request."""
headers = {'User-Agent': USER_AGENT}
return Request(url, headers=headers)
|
2fad10bde71a56ee3a5046a4ffc0db2d3becb319
|
src/shared/ident.py
|
src/shared/ident.py
|
class UnitId:
def __init__(self, playerId, unitSubId):
self.playerId = playerId
self.subId = unitSubId
def __eq__(self, rhs):
return self.playerId == rhs.playerId and self.subId == other.subId
def __ne__(self, rhs):
return not (self == rhs)
def __hash__(self):
return hash((self.playerId, self.unitSubId))
def __repr__(self):
return repr((self.playerId, self.unitSubId))
def unitToPlayer(unitId):
return unitId.playerId
# FIXME [#15]: This function shouldn't exist.
def playerToUnit(playerId):
return UnitId(playerId, 0)
# For using UnitIds in messages
def encodeUnitId(unitId):
return (str(unitId.playerId), str(unitId.subId))
def parseUnitId(words):
# TODO: Somewhere higher up, handle all exceptions in parsing functions and
# turn them into InvalidMessageErrors. Do we do this already?
playerId, subId = map(int, words)
return UnitId(playerId, subId)
|
class UnitId:
def __init__(self, playerId, unitSubId):
self.playerId = playerId
self.subId = unitSubId
def __eq__(self, other):
return self.playerId == other.playerId and self.subId == other.subId
def __ne__(self, other):
return not (self == other)
def __hash__(self):
return hash((self.playerId, self.subId))
def __repr__(self):
return repr((self.playerId, self.subId))
def unitToPlayer(unitId):
return unitId.playerId
# FIXME [#15]: This function shouldn't exist.
def playerToUnit(playerId):
return UnitId(playerId, 0)
# For using UnitIds in messages
def encodeUnitId(unitId):
return (str(unitId.playerId), str(unitId.subId))
def parseUnitId(words):
# TODO: Somewhere higher up, handle all exceptions in parsing functions and
# turn them into InvalidMessageErrors. Do we do this already?
playerId, subId = map(int, words)
return UnitId(playerId, subId)
|
Fix a couple trivial bugs.
|
Fix a couple trivial bugs.
|
Python
|
mit
|
CheeseLord/warts,CheeseLord/warts
|
class UnitId:
def __init__(self, playerId, unitSubId):
self.playerId = playerId
self.subId = unitSubId
def __eq__(self, rhs):
return self.playerId == rhs.playerId and self.subId == other.subId
def __ne__(self, rhs):
return not (self == rhs)
def __hash__(self):
return hash((self.playerId, self.unitSubId))
def __repr__(self):
return repr((self.playerId, self.unitSubId))
def unitToPlayer(unitId):
return unitId.playerId
# FIXME [#15]: This function shouldn't exist.
def playerToUnit(playerId):
return UnitId(playerId, 0)
# For using UnitIds in messages
def encodeUnitId(unitId):
return (str(unitId.playerId), str(unitId.subId))
def parseUnitId(words):
# TODO: Somewhere higher up, handle all exceptions in parsing functions and
# turn them into InvalidMessageErrors. Do we do this already?
playerId, subId = map(int, words)
return UnitId(playerId, subId)
Fix a couple trivial bugs.
|
class UnitId:
def __init__(self, playerId, unitSubId):
self.playerId = playerId
self.subId = unitSubId
def __eq__(self, other):
return self.playerId == other.playerId and self.subId == other.subId
def __ne__(self, other):
return not (self == other)
def __hash__(self):
return hash((self.playerId, self.subId))
def __repr__(self):
return repr((self.playerId, self.subId))
def unitToPlayer(unitId):
return unitId.playerId
# FIXME [#15]: This function shouldn't exist.
def playerToUnit(playerId):
return UnitId(playerId, 0)
# For using UnitIds in messages
def encodeUnitId(unitId):
return (str(unitId.playerId), str(unitId.subId))
def parseUnitId(words):
# TODO: Somewhere higher up, handle all exceptions in parsing functions and
# turn them into InvalidMessageErrors. Do we do this already?
playerId, subId = map(int, words)
return UnitId(playerId, subId)
|
<commit_before>class UnitId:
def __init__(self, playerId, unitSubId):
self.playerId = playerId
self.subId = unitSubId
def __eq__(self, rhs):
return self.playerId == rhs.playerId and self.subId == other.subId
def __ne__(self, rhs):
return not (self == rhs)
def __hash__(self):
return hash((self.playerId, self.unitSubId))
def __repr__(self):
return repr((self.playerId, self.unitSubId))
def unitToPlayer(unitId):
return unitId.playerId
# FIXME [#15]: This function shouldn't exist.
def playerToUnit(playerId):
return UnitId(playerId, 0)
# For using UnitIds in messages
def encodeUnitId(unitId):
return (str(unitId.playerId), str(unitId.subId))
def parseUnitId(words):
# TODO: Somewhere higher up, handle all exceptions in parsing functions and
# turn them into InvalidMessageErrors. Do we do this already?
playerId, subId = map(int, words)
return UnitId(playerId, subId)
<commit_msg>Fix a couple trivial bugs.<commit_after>
|
class UnitId:
def __init__(self, playerId, unitSubId):
self.playerId = playerId
self.subId = unitSubId
def __eq__(self, other):
return self.playerId == other.playerId and self.subId == other.subId
def __ne__(self, other):
return not (self == other)
def __hash__(self):
return hash((self.playerId, self.subId))
def __repr__(self):
return repr((self.playerId, self.subId))
def unitToPlayer(unitId):
return unitId.playerId
# FIXME [#15]: This function shouldn't exist.
def playerToUnit(playerId):
return UnitId(playerId, 0)
# For using UnitIds in messages
def encodeUnitId(unitId):
return (str(unitId.playerId), str(unitId.subId))
def parseUnitId(words):
# TODO: Somewhere higher up, handle all exceptions in parsing functions and
# turn them into InvalidMessageErrors. Do we do this already?
playerId, subId = map(int, words)
return UnitId(playerId, subId)
|
class UnitId:
def __init__(self, playerId, unitSubId):
self.playerId = playerId
self.subId = unitSubId
def __eq__(self, rhs):
return self.playerId == rhs.playerId and self.subId == other.subId
def __ne__(self, rhs):
return not (self == rhs)
def __hash__(self):
return hash((self.playerId, self.unitSubId))
def __repr__(self):
return repr((self.playerId, self.unitSubId))
def unitToPlayer(unitId):
return unitId.playerId
# FIXME [#15]: This function shouldn't exist.
def playerToUnit(playerId):
return UnitId(playerId, 0)
# For using UnitIds in messages
def encodeUnitId(unitId):
return (str(unitId.playerId), str(unitId.subId))
def parseUnitId(words):
# TODO: Somewhere higher up, handle all exceptions in parsing functions and
# turn them into InvalidMessageErrors. Do we do this already?
playerId, subId = map(int, words)
return UnitId(playerId, subId)
Fix a couple trivial bugs.class UnitId:
def __init__(self, playerId, unitSubId):
self.playerId = playerId
self.subId = unitSubId
def __eq__(self, other):
return self.playerId == other.playerId and self.subId == other.subId
def __ne__(self, other):
return not (self == other)
def __hash__(self):
return hash((self.playerId, self.subId))
def __repr__(self):
return repr((self.playerId, self.subId))
def unitToPlayer(unitId):
return unitId.playerId
# FIXME [#15]: This function shouldn't exist.
def playerToUnit(playerId):
return UnitId(playerId, 0)
# For using UnitIds in messages
def encodeUnitId(unitId):
return (str(unitId.playerId), str(unitId.subId))
def parseUnitId(words):
# TODO: Somewhere higher up, handle all exceptions in parsing functions and
# turn them into InvalidMessageErrors. Do we do this already?
playerId, subId = map(int, words)
return UnitId(playerId, subId)
|
<commit_before>class UnitId:
def __init__(self, playerId, unitSubId):
self.playerId = playerId
self.subId = unitSubId
def __eq__(self, rhs):
return self.playerId == rhs.playerId and self.subId == other.subId
def __ne__(self, rhs):
return not (self == rhs)
def __hash__(self):
return hash((self.playerId, self.unitSubId))
def __repr__(self):
return repr((self.playerId, self.unitSubId))
def unitToPlayer(unitId):
return unitId.playerId
# FIXME [#15]: This function shouldn't exist.
def playerToUnit(playerId):
return UnitId(playerId, 0)
# For using UnitIds in messages
def encodeUnitId(unitId):
return (str(unitId.playerId), str(unitId.subId))
def parseUnitId(words):
# TODO: Somewhere higher up, handle all exceptions in parsing functions and
# turn them into InvalidMessageErrors. Do we do this already?
playerId, subId = map(int, words)
return UnitId(playerId, subId)
<commit_msg>Fix a couple trivial bugs.<commit_after>class UnitId:
def __init__(self, playerId, unitSubId):
self.playerId = playerId
self.subId = unitSubId
def __eq__(self, other):
return self.playerId == other.playerId and self.subId == other.subId
def __ne__(self, other):
return not (self == other)
def __hash__(self):
return hash((self.playerId, self.subId))
def __repr__(self):
return repr((self.playerId, self.subId))
def unitToPlayer(unitId):
return unitId.playerId
# FIXME [#15]: This function shouldn't exist.
def playerToUnit(playerId):
return UnitId(playerId, 0)
# For using UnitIds in messages
def encodeUnitId(unitId):
return (str(unitId.playerId), str(unitId.subId))
def parseUnitId(words):
# TODO: Somewhere higher up, handle all exceptions in parsing functions and
# turn them into InvalidMessageErrors. Do we do this already?
playerId, subId = map(int, words)
return UnitId(playerId, subId)
|
a4f1e0d23ee9e7a3395b6e04d5124ee2ca1e28da
|
trac/versioncontrol/web_ui/__init__.py
|
trac/versioncontrol/web_ui/__init__.py
|
from trac.versioncontrol.web_ui.browser import *
from trac.versioncontrol.web_ui.changeset import *
from trac.versioncontrol.web_ui.log import *
|
from trac.versioncontrol.web_ui.browser import *
from trac.versioncontrol.web_ui.changeset import *
from trac.versioncontrol.web_ui.log import *
|
Add missing `svn:eol-style : native` prop, which prevented making clean patches against the early 0.9b1 archives (now both the .zip and the .tar.gz have CRLFs for this file)
|
Add missing `svn:eol-style : native` prop, which prevented making clean patches against the early 0.9b1 archives (now both the .zip and the .tar.gz have CRLFs for this file)
git-svn-id: 0d96b0c1a6983ccc08b3732614f4d6bfcf9cbb42@2214 af82e41b-90c4-0310-8c96-b1721e28e2e2
|
Python
|
bsd-3-clause
|
rbaumg/trac,rbaumg/trac,rbaumg/trac,rbaumg/trac
|
from trac.versioncontrol.web_ui.browser import *
from trac.versioncontrol.web_ui.changeset import *
from trac.versioncontrol.web_ui.log import *
Add missing `svn:eol-style : native` prop, which prevented making clean patches against the early 0.9b1 archives (now both the .zip and the .tar.gz have CRLFs for this file)
git-svn-id: 0d96b0c1a6983ccc08b3732614f4d6bfcf9cbb42@2214 af82e41b-90c4-0310-8c96-b1721e28e2e2
|
from trac.versioncontrol.web_ui.browser import *
from trac.versioncontrol.web_ui.changeset import *
from trac.versioncontrol.web_ui.log import *
|
<commit_before>from trac.versioncontrol.web_ui.browser import *
from trac.versioncontrol.web_ui.changeset import *
from trac.versioncontrol.web_ui.log import *
<commit_msg>Add missing `svn:eol-style : native` prop, which prevented making clean patches against the early 0.9b1 archives (now both the .zip and the .tar.gz have CRLFs for this file)
git-svn-id: 0d96b0c1a6983ccc08b3732614f4d6bfcf9cbb42@2214 af82e41b-90c4-0310-8c96-b1721e28e2e2<commit_after>
|
from trac.versioncontrol.web_ui.browser import *
from trac.versioncontrol.web_ui.changeset import *
from trac.versioncontrol.web_ui.log import *
|
from trac.versioncontrol.web_ui.browser import *
from trac.versioncontrol.web_ui.changeset import *
from trac.versioncontrol.web_ui.log import *
Add missing `svn:eol-style : native` prop, which prevented making clean patches against the early 0.9b1 archives (now both the .zip and the .tar.gz have CRLFs for this file)
git-svn-id: 0d96b0c1a6983ccc08b3732614f4d6bfcf9cbb42@2214 af82e41b-90c4-0310-8c96-b1721e28e2e2from trac.versioncontrol.web_ui.browser import *
from trac.versioncontrol.web_ui.changeset import *
from trac.versioncontrol.web_ui.log import *
|
<commit_before>from trac.versioncontrol.web_ui.browser import *
from trac.versioncontrol.web_ui.changeset import *
from trac.versioncontrol.web_ui.log import *
<commit_msg>Add missing `svn:eol-style : native` prop, which prevented making clean patches against the early 0.9b1 archives (now both the .zip and the .tar.gz have CRLFs for this file)
git-svn-id: 0d96b0c1a6983ccc08b3732614f4d6bfcf9cbb42@2214 af82e41b-90c4-0310-8c96-b1721e28e2e2<commit_after>from trac.versioncontrol.web_ui.browser import *
from trac.versioncontrol.web_ui.changeset import *
from trac.versioncontrol.web_ui.log import *
|
6f6a84a002980b4d6c0c497fba2416c9e0f319fb
|
admin_tools/checks.py
|
admin_tools/checks.py
|
from django.core.checks import register, Warning
from django.template.loader import get_template, TemplateDoesNotExist
W001 = Warning(
'You must add "admin_tools.template_loaders.TemplateLoader" in your '
'template loaders variable, see: '
'https://django-admin-tools.readthedocs.org/en/latest/configuration.html',
id='admin_tools.W001',
obj='admin_tools'
)
@register('admin_tools')
def check_admin_tools_configuration(app_configs=None, **kwargs):
result = []
try:
get_template('admin:admin/base.html')
except TemplateDoesNotExist:
result.append(W001)
return result
|
from django.core.checks import register, Warning
from django.template.loader import get_template, TemplateDoesNotExist
W001 = Warning(
'You must add "admin_tools.template_loaders.Loader" in your '
'template loaders variable, see: '
'https://django-admin-tools.readthedocs.org/en/latest/configuration.html',
id='admin_tools.W001',
obj='admin_tools'
)
@register('admin_tools')
def check_admin_tools_configuration(app_configs=None, **kwargs):
result = []
try:
get_template('admin:admin/base.html')
except TemplateDoesNotExist:
result.append(W001)
return result
|
Fix class name in template loader check
|
Fix class name in template loader check
|
Python
|
mit
|
django-admin-tools/django-admin-tools,django-admin-tools/django-admin-tools,django-admin-tools/django-admin-tools
|
from django.core.checks import register, Warning
from django.template.loader import get_template, TemplateDoesNotExist
W001 = Warning(
'You must add "admin_tools.template_loaders.TemplateLoader" in your '
'template loaders variable, see: '
'https://django-admin-tools.readthedocs.org/en/latest/configuration.html',
id='admin_tools.W001',
obj='admin_tools'
)
@register('admin_tools')
def check_admin_tools_configuration(app_configs=None, **kwargs):
result = []
try:
get_template('admin:admin/base.html')
except TemplateDoesNotExist:
result.append(W001)
return result
Fix class name in template loader check
|
from django.core.checks import register, Warning
from django.template.loader import get_template, TemplateDoesNotExist
W001 = Warning(
'You must add "admin_tools.template_loaders.Loader" in your '
'template loaders variable, see: '
'https://django-admin-tools.readthedocs.org/en/latest/configuration.html',
id='admin_tools.W001',
obj='admin_tools'
)
@register('admin_tools')
def check_admin_tools_configuration(app_configs=None, **kwargs):
result = []
try:
get_template('admin:admin/base.html')
except TemplateDoesNotExist:
result.append(W001)
return result
|
<commit_before>from django.core.checks import register, Warning
from django.template.loader import get_template, TemplateDoesNotExist
W001 = Warning(
'You must add "admin_tools.template_loaders.TemplateLoader" in your '
'template loaders variable, see: '
'https://django-admin-tools.readthedocs.org/en/latest/configuration.html',
id='admin_tools.W001',
obj='admin_tools'
)
@register('admin_tools')
def check_admin_tools_configuration(app_configs=None, **kwargs):
result = []
try:
get_template('admin:admin/base.html')
except TemplateDoesNotExist:
result.append(W001)
return result
<commit_msg>Fix class name in template loader check<commit_after>
|
from django.core.checks import register, Warning
from django.template.loader import get_template, TemplateDoesNotExist
W001 = Warning(
'You must add "admin_tools.template_loaders.Loader" in your '
'template loaders variable, see: '
'https://django-admin-tools.readthedocs.org/en/latest/configuration.html',
id='admin_tools.W001',
obj='admin_tools'
)
@register('admin_tools')
def check_admin_tools_configuration(app_configs=None, **kwargs):
result = []
try:
get_template('admin:admin/base.html')
except TemplateDoesNotExist:
result.append(W001)
return result
|
from django.core.checks import register, Warning
from django.template.loader import get_template, TemplateDoesNotExist
W001 = Warning(
'You must add "admin_tools.template_loaders.TemplateLoader" in your '
'template loaders variable, see: '
'https://django-admin-tools.readthedocs.org/en/latest/configuration.html',
id='admin_tools.W001',
obj='admin_tools'
)
@register('admin_tools')
def check_admin_tools_configuration(app_configs=None, **kwargs):
result = []
try:
get_template('admin:admin/base.html')
except TemplateDoesNotExist:
result.append(W001)
return result
Fix class name in template loader checkfrom django.core.checks import register, Warning
from django.template.loader import get_template, TemplateDoesNotExist
W001 = Warning(
'You must add "admin_tools.template_loaders.Loader" in your '
'template loaders variable, see: '
'https://django-admin-tools.readthedocs.org/en/latest/configuration.html',
id='admin_tools.W001',
obj='admin_tools'
)
@register('admin_tools')
def check_admin_tools_configuration(app_configs=None, **kwargs):
result = []
try:
get_template('admin:admin/base.html')
except TemplateDoesNotExist:
result.append(W001)
return result
|
<commit_before>from django.core.checks import register, Warning
from django.template.loader import get_template, TemplateDoesNotExist
W001 = Warning(
'You must add "admin_tools.template_loaders.TemplateLoader" in your '
'template loaders variable, see: '
'https://django-admin-tools.readthedocs.org/en/latest/configuration.html',
id='admin_tools.W001',
obj='admin_tools'
)
@register('admin_tools')
def check_admin_tools_configuration(app_configs=None, **kwargs):
result = []
try:
get_template('admin:admin/base.html')
except TemplateDoesNotExist:
result.append(W001)
return result
<commit_msg>Fix class name in template loader check<commit_after>from django.core.checks import register, Warning
from django.template.loader import get_template, TemplateDoesNotExist
W001 = Warning(
'You must add "admin_tools.template_loaders.Loader" in your '
'template loaders variable, see: '
'https://django-admin-tools.readthedocs.org/en/latest/configuration.html',
id='admin_tools.W001',
obj='admin_tools'
)
@register('admin_tools')
def check_admin_tools_configuration(app_configs=None, **kwargs):
result = []
try:
get_template('admin:admin/base.html')
except TemplateDoesNotExist:
result.append(W001)
return result
|
ee01e4574ec1a365e87c879a01216249f75c0da8
|
src/commoner/registration/admin.py
|
src/commoner/registration/admin.py
|
from django.contrib import admin
from commoner.registration.models import PartialRegistration
class PartialRegistrationAdmin(admin.ModelAdmin):
pass
admin.site.register(PartialRegistration, PartialRegistrationAdmin)
|
from django.contrib import admin
from commoner.registration.models import PartialRegistration
class PartialRegistrationAdmin(admin.ModelAdmin):
list_filter = ('complete',)
admin.site.register(PartialRegistration, PartialRegistrationAdmin)
|
Allow filtering of registrations by complete status.
|
Allow filtering of registrations by complete status.
|
Python
|
agpl-3.0
|
cc-archive/commoner,cc-archive/commoner
|
from django.contrib import admin
from commoner.registration.models import PartialRegistration
class PartialRegistrationAdmin(admin.ModelAdmin):
pass
admin.site.register(PartialRegistration, PartialRegistrationAdmin)
Allow filtering of registrations by complete status.
|
from django.contrib import admin
from commoner.registration.models import PartialRegistration
class PartialRegistrationAdmin(admin.ModelAdmin):
list_filter = ('complete',)
admin.site.register(PartialRegistration, PartialRegistrationAdmin)
|
<commit_before>from django.contrib import admin
from commoner.registration.models import PartialRegistration
class PartialRegistrationAdmin(admin.ModelAdmin):
pass
admin.site.register(PartialRegistration, PartialRegistrationAdmin)
<commit_msg>Allow filtering of registrations by complete status.<commit_after>
|
from django.contrib import admin
from commoner.registration.models import PartialRegistration
class PartialRegistrationAdmin(admin.ModelAdmin):
list_filter = ('complete',)
admin.site.register(PartialRegistration, PartialRegistrationAdmin)
|
from django.contrib import admin
from commoner.registration.models import PartialRegistration
class PartialRegistrationAdmin(admin.ModelAdmin):
pass
admin.site.register(PartialRegistration, PartialRegistrationAdmin)
Allow filtering of registrations by complete status.from django.contrib import admin
from commoner.registration.models import PartialRegistration
class PartialRegistrationAdmin(admin.ModelAdmin):
list_filter = ('complete',)
admin.site.register(PartialRegistration, PartialRegistrationAdmin)
|
<commit_before>from django.contrib import admin
from commoner.registration.models import PartialRegistration
class PartialRegistrationAdmin(admin.ModelAdmin):
pass
admin.site.register(PartialRegistration, PartialRegistrationAdmin)
<commit_msg>Allow filtering of registrations by complete status.<commit_after>from django.contrib import admin
from commoner.registration.models import PartialRegistration
class PartialRegistrationAdmin(admin.ModelAdmin):
list_filter = ('complete',)
admin.site.register(PartialRegistration, PartialRegistrationAdmin)
|
bc4543a7663516d689d00feb5a392ff4004117ad
|
src/python/setup.py
|
src/python/setup.py
|
from setuptools import setup, find_packages
with open('README.md') as file:
long_description = file.read()
setup(
name="circllhist",
long_description=long_description,
long_description_content_type='text/markdown',
version="0.4.0",
description="OpenHistogram log-linear histogram library",
maintainer="Circonus Packaging",
maintainer_email="packaging@circonus.com",
url="https://github.com/openhistogram/libcircllhist",
install_requires=['cffi'],
packages=['circllhist'],
classifiers=[
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
"Operating System :: POSIX"
],
python_requires=">=2.7",
)
|
from setuptools import setup, find_packages
with open('README.md') as file:
long_description = file.read()
setup(
name="circllhist",
long_description=long_description,
long_description_content_type='text/markdown',
version="0.3.2",
description="OpenHistogram log-linear histogram library",
maintainer="Circonus Packaging",
maintainer_email="packaging@circonus.com",
url="https://github.com/openhistogram/libcircllhist",
install_requires=['cffi'],
packages=['circllhist'],
classifiers=[
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
"Operating System :: POSIX"
],
python_requires=">=2.7",
)
|
Revert "Bump Python binding to 0.4.0"
|
Revert "Bump Python binding to 0.4.0"
This reverts commit d46e582186ee0fdc30f52319b2bb2cfca1a7d59b.
|
Python
|
bsd-3-clause
|
circonus-labs/libcircllhist,circonus-labs/libcircllhist,circonus-labs/libcircllhist
|
from setuptools import setup, find_packages
with open('README.md') as file:
long_description = file.read()
setup(
name="circllhist",
long_description=long_description,
long_description_content_type='text/markdown',
version="0.4.0",
description="OpenHistogram log-linear histogram library",
maintainer="Circonus Packaging",
maintainer_email="packaging@circonus.com",
url="https://github.com/openhistogram/libcircllhist",
install_requires=['cffi'],
packages=['circllhist'],
classifiers=[
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
"Operating System :: POSIX"
],
python_requires=">=2.7",
)
Revert "Bump Python binding to 0.4.0"
This reverts commit d46e582186ee0fdc30f52319b2bb2cfca1a7d59b.
|
from setuptools import setup, find_packages
with open('README.md') as file:
long_description = file.read()
setup(
name="circllhist",
long_description=long_description,
long_description_content_type='text/markdown',
version="0.3.2",
description="OpenHistogram log-linear histogram library",
maintainer="Circonus Packaging",
maintainer_email="packaging@circonus.com",
url="https://github.com/openhistogram/libcircllhist",
install_requires=['cffi'],
packages=['circllhist'],
classifiers=[
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
"Operating System :: POSIX"
],
python_requires=">=2.7",
)
|
<commit_before>from setuptools import setup, find_packages
with open('README.md') as file:
long_description = file.read()
setup(
name="circllhist",
long_description=long_description,
long_description_content_type='text/markdown',
version="0.4.0",
description="OpenHistogram log-linear histogram library",
maintainer="Circonus Packaging",
maintainer_email="packaging@circonus.com",
url="https://github.com/openhistogram/libcircllhist",
install_requires=['cffi'],
packages=['circllhist'],
classifiers=[
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
"Operating System :: POSIX"
],
python_requires=">=2.7",
)
<commit_msg>Revert "Bump Python binding to 0.4.0"
This reverts commit d46e582186ee0fdc30f52319b2bb2cfca1a7d59b.<commit_after>
|
from setuptools import setup, find_packages
with open('README.md') as file:
long_description = file.read()
setup(
name="circllhist",
long_description=long_description,
long_description_content_type='text/markdown',
version="0.3.2",
description="OpenHistogram log-linear histogram library",
maintainer="Circonus Packaging",
maintainer_email="packaging@circonus.com",
url="https://github.com/openhistogram/libcircllhist",
install_requires=['cffi'],
packages=['circllhist'],
classifiers=[
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
"Operating System :: POSIX"
],
python_requires=">=2.7",
)
|
from setuptools import setup, find_packages
with open('README.md') as file:
long_description = file.read()
setup(
name="circllhist",
long_description=long_description,
long_description_content_type='text/markdown',
version="0.4.0",
description="OpenHistogram log-linear histogram library",
maintainer="Circonus Packaging",
maintainer_email="packaging@circonus.com",
url="https://github.com/openhistogram/libcircllhist",
install_requires=['cffi'],
packages=['circllhist'],
classifiers=[
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
"Operating System :: POSIX"
],
python_requires=">=2.7",
)
Revert "Bump Python binding to 0.4.0"
This reverts commit d46e582186ee0fdc30f52319b2bb2cfca1a7d59b.from setuptools import setup, find_packages
with open('README.md') as file:
long_description = file.read()
setup(
name="circllhist",
long_description=long_description,
long_description_content_type='text/markdown',
version="0.3.2",
description="OpenHistogram log-linear histogram library",
maintainer="Circonus Packaging",
maintainer_email="packaging@circonus.com",
url="https://github.com/openhistogram/libcircllhist",
install_requires=['cffi'],
packages=['circllhist'],
classifiers=[
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
"Operating System :: POSIX"
],
python_requires=">=2.7",
)
|
<commit_before>from setuptools import setup, find_packages
with open('README.md') as file:
long_description = file.read()
setup(
name="circllhist",
long_description=long_description,
long_description_content_type='text/markdown',
version="0.4.0",
description="OpenHistogram log-linear histogram library",
maintainer="Circonus Packaging",
maintainer_email="packaging@circonus.com",
url="https://github.com/openhistogram/libcircllhist",
install_requires=['cffi'],
packages=['circllhist'],
classifiers=[
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
"Operating System :: POSIX"
],
python_requires=">=2.7",
)
<commit_msg>Revert "Bump Python binding to 0.4.0"
This reverts commit d46e582186ee0fdc30f52319b2bb2cfca1a7d59b.<commit_after>from setuptools import setup, find_packages
with open('README.md') as file:
long_description = file.read()
setup(
name="circllhist",
long_description=long_description,
long_description_content_type='text/markdown',
version="0.3.2",
description="OpenHistogram log-linear histogram library",
maintainer="Circonus Packaging",
maintainer_email="packaging@circonus.com",
url="https://github.com/openhistogram/libcircllhist",
install_requires=['cffi'],
packages=['circllhist'],
classifiers=[
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
"Operating System :: POSIX"
],
python_requires=">=2.7",
)
|
2cf7f70e352f8427cfb7d1dba309ee7d7e0ce5f4
|
markitup/urls.py
|
markitup/urls.py
|
from __future__ import unicode_literals
from django.conf.urls import patterns, url
from markitup.views import apply_filter
urlpatterns = patterns(
'',
url(r'preview/$', apply_filter, name='markitup_preview')
)
|
from __future__ import unicode_literals
from django.conf.urls import url
from markitup.views import apply_filter
urlpatterns = [
url(r'preview/$', apply_filter, name='markitup_preview'),
]
|
Use plain Python list for urlpatterns.
|
Use plain Python list for urlpatterns.
|
Python
|
bsd-3-clause
|
zsiciarz/django-markitup,zsiciarz/django-markitup,carljm/django-markitup,carljm/django-markitup,carljm/django-markitup,zsiciarz/django-markitup
|
from __future__ import unicode_literals
from django.conf.urls import patterns, url
from markitup.views import apply_filter
urlpatterns = patterns(
'',
url(r'preview/$', apply_filter, name='markitup_preview')
)
Use plain Python list for urlpatterns.
|
from __future__ import unicode_literals
from django.conf.urls import url
from markitup.views import apply_filter
urlpatterns = [
url(r'preview/$', apply_filter, name='markitup_preview'),
]
|
<commit_before>from __future__ import unicode_literals
from django.conf.urls import patterns, url
from markitup.views import apply_filter
urlpatterns = patterns(
'',
url(r'preview/$', apply_filter, name='markitup_preview')
)
<commit_msg>Use plain Python list for urlpatterns.<commit_after>
|
from __future__ import unicode_literals
from django.conf.urls import url
from markitup.views import apply_filter
urlpatterns = [
url(r'preview/$', apply_filter, name='markitup_preview'),
]
|
from __future__ import unicode_literals
from django.conf.urls import patterns, url
from markitup.views import apply_filter
urlpatterns = patterns(
'',
url(r'preview/$', apply_filter, name='markitup_preview')
)
Use plain Python list for urlpatterns.from __future__ import unicode_literals
from django.conf.urls import url
from markitup.views import apply_filter
urlpatterns = [
url(r'preview/$', apply_filter, name='markitup_preview'),
]
|
<commit_before>from __future__ import unicode_literals
from django.conf.urls import patterns, url
from markitup.views import apply_filter
urlpatterns = patterns(
'',
url(r'preview/$', apply_filter, name='markitup_preview')
)
<commit_msg>Use plain Python list for urlpatterns.<commit_after>from __future__ import unicode_literals
from django.conf.urls import url
from markitup.views import apply_filter
urlpatterns = [
url(r'preview/$', apply_filter, name='markitup_preview'),
]
|
bdc0466c63347280fbd8bc8c30fb07f294200194
|
client/third_party/idna/__init__.py
|
client/third_party/idna/__init__.py
|
# Emulate the bare minimum for idna for the Swarming bot.
# In practice, we do not need it, and it's very large.
def encode(host, uts46):
return unicode(host)
|
# Emulate the bare minimum for idna for the Swarming bot.
# In practice, we do not need it, and it's very large.
# See https://pypi.org/project/idna/
from encodings import idna
def encode(host, uts46=False): # pylint: disable=unused-argument
# Used by urllib3
return idna.ToASCII(host)
def decode(host):
# Used by cryptography/hazmat/backends/openssl/x509.py
return idna.ToUnicode(host)
|
Change idna stub to use python's default
|
[client] Change idna stub to use python's default
Fix a regression from 690b8ae29be2ca3b4782fa6ad0e7f2454443c38d which broke
select bots running inside docker.
The new stub is still simpler than https://pypi.org/project/idna/ and lighter
weight but much better than ignoring the "xn-" encoding as this was done
previously. As per the project home page:
This acts as a suitable replacement for the “encodings.idna” module that comes
with the Python standard library, but only supports the old, deprecated IDNA
specification (RFC 3490).
In practice, we don't expect to use non-ASCII hostnames, so it's not a big deal
for us.
decode() is required by openssl/x509.py.
TBR=jchinlee@chromium.org
Bug: 916644
Change-Id: Ia999a56b981d943e2f3d942f83e40d40e1bb805b
Reviewed-on: https://chromium-review.googlesource.com/c/infra/luci/luci-py/+/1573244
Reviewed-by: Marc-Antoine Ruel <d2bf0fc09b08f7b0888b3ddab32b3e89c7122c8b@chromium.org>
Commit-Queue: Marc-Antoine Ruel <d2bf0fc09b08f7b0888b3ddab32b3e89c7122c8b@chromium.org>
|
Python
|
apache-2.0
|
luci/luci-py,luci/luci-py,luci/luci-py,luci/luci-py
|
# Emulate the bare minimum for idna for the Swarming bot.
# In practice, we do not need it, and it's very large.
def encode(host, uts46):
return unicode(host)
[client] Change idna stub to use python's default
Fix a regression from 690b8ae29be2ca3b4782fa6ad0e7f2454443c38d which broke
select bots running inside docker.
The new stub is still simpler than https://pypi.org/project/idna/ and lighter
weight but much better than ignoring the "xn-" encoding as this was done
previously. As per the project home page:
This acts as a suitable replacement for the “encodings.idna” module that comes
with the Python standard library, but only supports the old, deprecated IDNA
specification (RFC 3490).
In practice, we don't expect to use non-ASCII hostnames, so it's not a big deal
for us.
decode() is required by openssl/x509.py.
TBR=jchinlee@chromium.org
Bug: 916644
Change-Id: Ia999a56b981d943e2f3d942f83e40d40e1bb805b
Reviewed-on: https://chromium-review.googlesource.com/c/infra/luci/luci-py/+/1573244
Reviewed-by: Marc-Antoine Ruel <d2bf0fc09b08f7b0888b3ddab32b3e89c7122c8b@chromium.org>
Commit-Queue: Marc-Antoine Ruel <d2bf0fc09b08f7b0888b3ddab32b3e89c7122c8b@chromium.org>
|
# Emulate the bare minimum for idna for the Swarming bot.
# In practice, we do not need it, and it's very large.
# See https://pypi.org/project/idna/
from encodings import idna
def encode(host, uts46=False): # pylint: disable=unused-argument
# Used by urllib3
return idna.ToASCII(host)
def decode(host):
# Used by cryptography/hazmat/backends/openssl/x509.py
return idna.ToUnicode(host)
|
<commit_before># Emulate the bare minimum for idna for the Swarming bot.
# In practice, we do not need it, and it's very large.
def encode(host, uts46):
return unicode(host)
<commit_msg>[client] Change idna stub to use python's default
Fix a regression from 690b8ae29be2ca3b4782fa6ad0e7f2454443c38d which broke
select bots running inside docker.
The new stub is still simpler than https://pypi.org/project/idna/ and lighter
weight but much better than ignoring the "xn-" encoding as this was done
previously. As per the project home page:
This acts as a suitable replacement for the “encodings.idna” module that comes
with the Python standard library, but only supports the old, deprecated IDNA
specification (RFC 3490).
In practice, we don't expect to use non-ASCII hostnames, so it's not a big deal
for us.
decode() is required by openssl/x509.py.
TBR=jchinlee@chromium.org
Bug: 916644
Change-Id: Ia999a56b981d943e2f3d942f83e40d40e1bb805b
Reviewed-on: https://chromium-review.googlesource.com/c/infra/luci/luci-py/+/1573244
Reviewed-by: Marc-Antoine Ruel <d2bf0fc09b08f7b0888b3ddab32b3e89c7122c8b@chromium.org>
Commit-Queue: Marc-Antoine Ruel <d2bf0fc09b08f7b0888b3ddab32b3e89c7122c8b@chromium.org><commit_after>
|
# Emulate the bare minimum for idna for the Swarming bot.
# In practice, we do not need it, and it's very large.
# See https://pypi.org/project/idna/
from encodings import idna
def encode(host, uts46=False): # pylint: disable=unused-argument
# Used by urllib3
return idna.ToASCII(host)
def decode(host):
# Used by cryptography/hazmat/backends/openssl/x509.py
return idna.ToUnicode(host)
|
# Emulate the bare minimum for idna for the Swarming bot.
# In practice, we do not need it, and it's very large.
def encode(host, uts46):
return unicode(host)
[client] Change idna stub to use python's default
Fix a regression from 690b8ae29be2ca3b4782fa6ad0e7f2454443c38d which broke
select bots running inside docker.
The new stub is still simpler than https://pypi.org/project/idna/ and lighter
weight but much better than ignoring the "xn-" encoding as this was done
previously. As per the project home page:
This acts as a suitable replacement for the “encodings.idna” module that comes
with the Python standard library, but only supports the old, deprecated IDNA
specification (RFC 3490).
In practice, we don't expect to use non-ASCII hostnames, so it's not a big deal
for us.
decode() is required by openssl/x509.py.
TBR=jchinlee@chromium.org
Bug: 916644
Change-Id: Ia999a56b981d943e2f3d942f83e40d40e1bb805b
Reviewed-on: https://chromium-review.googlesource.com/c/infra/luci/luci-py/+/1573244
Reviewed-by: Marc-Antoine Ruel <d2bf0fc09b08f7b0888b3ddab32b3e89c7122c8b@chromium.org>
Commit-Queue: Marc-Antoine Ruel <d2bf0fc09b08f7b0888b3ddab32b3e89c7122c8b@chromium.org># Emulate the bare minimum for idna for the Swarming bot.
# In practice, we do not need it, and it's very large.
# See https://pypi.org/project/idna/
from encodings import idna
def encode(host, uts46=False): # pylint: disable=unused-argument
# Used by urllib3
return idna.ToASCII(host)
def decode(host):
# Used by cryptography/hazmat/backends/openssl/x509.py
return idna.ToUnicode(host)
|
<commit_before># Emulate the bare minimum for idna for the Swarming bot.
# In practice, we do not need it, and it's very large.
def encode(host, uts46):
return unicode(host)
<commit_msg>[client] Change idna stub to use python's default
Fix a regression from 690b8ae29be2ca3b4782fa6ad0e7f2454443c38d which broke
select bots running inside docker.
The new stub is still simpler than https://pypi.org/project/idna/ and lighter
weight but much better than ignoring the "xn-" encoding as this was done
previously. As per the project home page:
This acts as a suitable replacement for the “encodings.idna” module that comes
with the Python standard library, but only supports the old, deprecated IDNA
specification (RFC 3490).
In practice, we don't expect to use non-ASCII hostnames, so it's not a big deal
for us.
decode() is required by openssl/x509.py.
TBR=jchinlee@chromium.org
Bug: 916644
Change-Id: Ia999a56b981d943e2f3d942f83e40d40e1bb805b
Reviewed-on: https://chromium-review.googlesource.com/c/infra/luci/luci-py/+/1573244
Reviewed-by: Marc-Antoine Ruel <d2bf0fc09b08f7b0888b3ddab32b3e89c7122c8b@chromium.org>
Commit-Queue: Marc-Antoine Ruel <d2bf0fc09b08f7b0888b3ddab32b3e89c7122c8b@chromium.org><commit_after># Emulate the bare minimum for idna for the Swarming bot.
# In practice, we do not need it, and it's very large.
# See https://pypi.org/project/idna/
from encodings import idna
def encode(host, uts46=False): # pylint: disable=unused-argument
# Used by urllib3
return idna.ToASCII(host)
def decode(host):
# Used by cryptography/hazmat/backends/openssl/x509.py
return idna.ToUnicode(host)
|
e64f1add0c36f33c15c93118b653de8752c576d5
|
webserver/codemanagement/validators.py
|
webserver/codemanagement/validators.py
|
from django.core.validators import RegexValidator
from django.core.exceptions import ValidationError
from dulwich.repo import check_ref_format
import re
sha1_validator = RegexValidator(regex="^[a-f0-9]{40}$",
message="Must be valid sha1 sum")
tag_regex = re.compile(r'^[A-Za-z][\w\-\.]+[A-Za-z]$')
def tag_validator(value):
if not tag_regex.match(value):
msg = "Must be letters and numbers separated "
msg += "by dashes, dots, or underscores"
raise ValidationError(msg)
if not check_ref_format('refs/tags/' + value):
msg = "Invalid tag. Tags must adhere to ref formats defined here: "
msg += "https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html"
raise ValidationError(msg)
|
from django.core.validators import RegexValidator
from django.core.exceptions import ValidationError
from dulwich.repo import check_ref_format
import re
sha1_validator = RegexValidator(regex="^[a-f0-9]{40}$",
message="Must be valid sha1 sum")
tag_regex = re.compile(r'^[\w\-\.]+$')
def tag_validator(value):
if not tag_regex.match(value):
msg = "Must be letters and numbers separated "
msg += "by dashes, dots, or underscores"
raise ValidationError(msg)
if not check_ref_format('refs/tags/' + value):
msg = "Invalid tag. Tags must adhere to ref formats defined here: "
msg += "https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html"
raise ValidationError(msg)
|
Check submission names more leniently
|
Check submission names more leniently
Fixes #55
|
Python
|
bsd-3-clause
|
siggame/webserver,siggame/webserver,siggame/webserver
|
from django.core.validators import RegexValidator
from django.core.exceptions import ValidationError
from dulwich.repo import check_ref_format
import re
sha1_validator = RegexValidator(regex="^[a-f0-9]{40}$",
message="Must be valid sha1 sum")
tag_regex = re.compile(r'^[A-Za-z][\w\-\.]+[A-Za-z]$')
def tag_validator(value):
if not tag_regex.match(value):
msg = "Must be letters and numbers separated "
msg += "by dashes, dots, or underscores"
raise ValidationError(msg)
if not check_ref_format('refs/tags/' + value):
msg = "Invalid tag. Tags must adhere to ref formats defined here: "
msg += "https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html"
raise ValidationError(msg)
Check submission names more leniently
Fixes #55
|
from django.core.validators import RegexValidator
from django.core.exceptions import ValidationError
from dulwich.repo import check_ref_format
import re
sha1_validator = RegexValidator(regex="^[a-f0-9]{40}$",
message="Must be valid sha1 sum")
tag_regex = re.compile(r'^[\w\-\.]+$')
def tag_validator(value):
if not tag_regex.match(value):
msg = "Must be letters and numbers separated "
msg += "by dashes, dots, or underscores"
raise ValidationError(msg)
if not check_ref_format('refs/tags/' + value):
msg = "Invalid tag. Tags must adhere to ref formats defined here: "
msg += "https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html"
raise ValidationError(msg)
|
<commit_before>from django.core.validators import RegexValidator
from django.core.exceptions import ValidationError
from dulwich.repo import check_ref_format
import re
sha1_validator = RegexValidator(regex="^[a-f0-9]{40}$",
message="Must be valid sha1 sum")
tag_regex = re.compile(r'^[A-Za-z][\w\-\.]+[A-Za-z]$')
def tag_validator(value):
if not tag_regex.match(value):
msg = "Must be letters and numbers separated "
msg += "by dashes, dots, or underscores"
raise ValidationError(msg)
if not check_ref_format('refs/tags/' + value):
msg = "Invalid tag. Tags must adhere to ref formats defined here: "
msg += "https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html"
raise ValidationError(msg)
<commit_msg>Check submission names more leniently
Fixes #55<commit_after>
|
from django.core.validators import RegexValidator
from django.core.exceptions import ValidationError
from dulwich.repo import check_ref_format
import re
sha1_validator = RegexValidator(regex="^[a-f0-9]{40}$",
message="Must be valid sha1 sum")
tag_regex = re.compile(r'^[\w\-\.]+$')
def tag_validator(value):
if not tag_regex.match(value):
msg = "Must be letters and numbers separated "
msg += "by dashes, dots, or underscores"
raise ValidationError(msg)
if not check_ref_format('refs/tags/' + value):
msg = "Invalid tag. Tags must adhere to ref formats defined here: "
msg += "https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html"
raise ValidationError(msg)
|
from django.core.validators import RegexValidator
from django.core.exceptions import ValidationError
from dulwich.repo import check_ref_format
import re
sha1_validator = RegexValidator(regex="^[a-f0-9]{40}$",
message="Must be valid sha1 sum")
tag_regex = re.compile(r'^[A-Za-z][\w\-\.]+[A-Za-z]$')
def tag_validator(value):
if not tag_regex.match(value):
msg = "Must be letters and numbers separated "
msg += "by dashes, dots, or underscores"
raise ValidationError(msg)
if not check_ref_format('refs/tags/' + value):
msg = "Invalid tag. Tags must adhere to ref formats defined here: "
msg += "https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html"
raise ValidationError(msg)
Check submission names more leniently
Fixes #55from django.core.validators import RegexValidator
from django.core.exceptions import ValidationError
from dulwich.repo import check_ref_format
import re
sha1_validator = RegexValidator(regex="^[a-f0-9]{40}$",
message="Must be valid sha1 sum")
tag_regex = re.compile(r'^[\w\-\.]+$')
def tag_validator(value):
if not tag_regex.match(value):
msg = "Must be letters and numbers separated "
msg += "by dashes, dots, or underscores"
raise ValidationError(msg)
if not check_ref_format('refs/tags/' + value):
msg = "Invalid tag. Tags must adhere to ref formats defined here: "
msg += "https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html"
raise ValidationError(msg)
|
<commit_before>from django.core.validators import RegexValidator
from django.core.exceptions import ValidationError
from dulwich.repo import check_ref_format
import re
sha1_validator = RegexValidator(regex="^[a-f0-9]{40}$",
message="Must be valid sha1 sum")
tag_regex = re.compile(r'^[A-Za-z][\w\-\.]+[A-Za-z]$')
def tag_validator(value):
if not tag_regex.match(value):
msg = "Must be letters and numbers separated "
msg += "by dashes, dots, or underscores"
raise ValidationError(msg)
if not check_ref_format('refs/tags/' + value):
msg = "Invalid tag. Tags must adhere to ref formats defined here: "
msg += "https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html"
raise ValidationError(msg)
<commit_msg>Check submission names more leniently
Fixes #55<commit_after>from django.core.validators import RegexValidator
from django.core.exceptions import ValidationError
from dulwich.repo import check_ref_format
import re
sha1_validator = RegexValidator(regex="^[a-f0-9]{40}$",
message="Must be valid sha1 sum")
tag_regex = re.compile(r'^[\w\-\.]+$')
def tag_validator(value):
if not tag_regex.match(value):
msg = "Must be letters and numbers separated "
msg += "by dashes, dots, or underscores"
raise ValidationError(msg)
if not check_ref_format('refs/tags/' + value):
msg = "Invalid tag. Tags must adhere to ref formats defined here: "
msg += "https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html"
raise ValidationError(msg)
|
9e2eef4f246c446fbcf05ce29ae309b9a554d46b
|
app/views/schemas.py
|
app/views/schemas.py
|
from dataclasses import dataclass
from datetime import datetime
@dataclass
class AuthResponse:
email: str
image_access: bool
search_access: bool
created: datetime
modified: datetime
@dataclass
class FontResponse:
filename: str
id: str
alias: str
_self: str
@dataclass
class MemeRequest:
template_id: str
style: list[str]
text: list[str]
layout: str
font: str
extension: str
redirect: bool
@dataclass
class CustomRequest:
background: str
style: str
text: list[str]
layout: str
font: str
extension: str
redirect: bool
@dataclass
class MemeTemplateRequest:
style: list[str]
text: list[str]
font: str
extension: str
redirect: bool
@dataclass
class AutomaticRequest:
text: str
safe: bool
redirect: bool
@dataclass
class MemeResponse:
url: str
@dataclass
class ExampleResponse:
url: str
template: str
@dataclass
class _Example:
text: list[str]
url: str
@dataclass
class TemplateResponse:
id: str
name: str
lines: int
overlays: int
styles: list[str]
blank: str
example: _Example
source: str
_self: str
@dataclass
class ErrorResponse:
error: str
|
from dataclasses import dataclass
from datetime import datetime
@dataclass
class AuthResponse:
email: str
image_access: bool
search_access: bool
created: datetime
modified: datetime
@dataclass
class FontResponse:
filename: str
id: str
alias: str
_self: str
@dataclass
class MemeRequest:
template_id: str
style: list[str]
text: list[str]
layout: str
font: str
extension: str
redirect: bool
@dataclass
class CustomRequest:
background: str
style: str
text: list[str]
layout: str
font: str
extension: str
redirect: bool
@dataclass
class MemeTemplateRequest:
style: list[str]
text: list[str]
layout: str
font: str
extension: str
redirect: bool
@dataclass
class AutomaticRequest:
text: str
safe: bool
redirect: bool
@dataclass
class MemeResponse:
url: str
@dataclass
class ExampleResponse:
url: str
template: str
@dataclass
class _Example:
text: list[str]
url: str
@dataclass
class TemplateResponse:
id: str
name: str
lines: int
overlays: int
styles: list[str]
blank: str
example: _Example
source: str
_self: str
@dataclass
class ErrorResponse:
error: str
|
Support layout on template endpoints
|
Support layout on template endpoints
|
Python
|
mit
|
jacebrowning/memegen,jacebrowning/memegen
|
from dataclasses import dataclass
from datetime import datetime
@dataclass
class AuthResponse:
email: str
image_access: bool
search_access: bool
created: datetime
modified: datetime
@dataclass
class FontResponse:
filename: str
id: str
alias: str
_self: str
@dataclass
class MemeRequest:
template_id: str
style: list[str]
text: list[str]
layout: str
font: str
extension: str
redirect: bool
@dataclass
class CustomRequest:
background: str
style: str
text: list[str]
layout: str
font: str
extension: str
redirect: bool
@dataclass
class MemeTemplateRequest:
style: list[str]
text: list[str]
font: str
extension: str
redirect: bool
@dataclass
class AutomaticRequest:
text: str
safe: bool
redirect: bool
@dataclass
class MemeResponse:
url: str
@dataclass
class ExampleResponse:
url: str
template: str
@dataclass
class _Example:
text: list[str]
url: str
@dataclass
class TemplateResponse:
id: str
name: str
lines: int
overlays: int
styles: list[str]
blank: str
example: _Example
source: str
_self: str
@dataclass
class ErrorResponse:
error: str
Support layout on template endpoints
|
from dataclasses import dataclass
from datetime import datetime
@dataclass
class AuthResponse:
email: str
image_access: bool
search_access: bool
created: datetime
modified: datetime
@dataclass
class FontResponse:
filename: str
id: str
alias: str
_self: str
@dataclass
class MemeRequest:
template_id: str
style: list[str]
text: list[str]
layout: str
font: str
extension: str
redirect: bool
@dataclass
class CustomRequest:
background: str
style: str
text: list[str]
layout: str
font: str
extension: str
redirect: bool
@dataclass
class MemeTemplateRequest:
style: list[str]
text: list[str]
layout: str
font: str
extension: str
redirect: bool
@dataclass
class AutomaticRequest:
text: str
safe: bool
redirect: bool
@dataclass
class MemeResponse:
url: str
@dataclass
class ExampleResponse:
url: str
template: str
@dataclass
class _Example:
text: list[str]
url: str
@dataclass
class TemplateResponse:
id: str
name: str
lines: int
overlays: int
styles: list[str]
blank: str
example: _Example
source: str
_self: str
@dataclass
class ErrorResponse:
error: str
|
<commit_before>from dataclasses import dataclass
from datetime import datetime
@dataclass
class AuthResponse:
email: str
image_access: bool
search_access: bool
created: datetime
modified: datetime
@dataclass
class FontResponse:
filename: str
id: str
alias: str
_self: str
@dataclass
class MemeRequest:
template_id: str
style: list[str]
text: list[str]
layout: str
font: str
extension: str
redirect: bool
@dataclass
class CustomRequest:
background: str
style: str
text: list[str]
layout: str
font: str
extension: str
redirect: bool
@dataclass
class MemeTemplateRequest:
style: list[str]
text: list[str]
font: str
extension: str
redirect: bool
@dataclass
class AutomaticRequest:
text: str
safe: bool
redirect: bool
@dataclass
class MemeResponse:
url: str
@dataclass
class ExampleResponse:
url: str
template: str
@dataclass
class _Example:
text: list[str]
url: str
@dataclass
class TemplateResponse:
id: str
name: str
lines: int
overlays: int
styles: list[str]
blank: str
example: _Example
source: str
_self: str
@dataclass
class ErrorResponse:
error: str
<commit_msg>Support layout on template endpoints<commit_after>
|
from dataclasses import dataclass
from datetime import datetime
@dataclass
class AuthResponse:
email: str
image_access: bool
search_access: bool
created: datetime
modified: datetime
@dataclass
class FontResponse:
filename: str
id: str
alias: str
_self: str
@dataclass
class MemeRequest:
template_id: str
style: list[str]
text: list[str]
layout: str
font: str
extension: str
redirect: bool
@dataclass
class CustomRequest:
background: str
style: str
text: list[str]
layout: str
font: str
extension: str
redirect: bool
@dataclass
class MemeTemplateRequest:
style: list[str]
text: list[str]
layout: str
font: str
extension: str
redirect: bool
@dataclass
class AutomaticRequest:
text: str
safe: bool
redirect: bool
@dataclass
class MemeResponse:
url: str
@dataclass
class ExampleResponse:
url: str
template: str
@dataclass
class _Example:
text: list[str]
url: str
@dataclass
class TemplateResponse:
id: str
name: str
lines: int
overlays: int
styles: list[str]
blank: str
example: _Example
source: str
_self: str
@dataclass
class ErrorResponse:
error: str
|
from dataclasses import dataclass
from datetime import datetime
@dataclass
class AuthResponse:
email: str
image_access: bool
search_access: bool
created: datetime
modified: datetime
@dataclass
class FontResponse:
filename: str
id: str
alias: str
_self: str
@dataclass
class MemeRequest:
template_id: str
style: list[str]
text: list[str]
layout: str
font: str
extension: str
redirect: bool
@dataclass
class CustomRequest:
background: str
style: str
text: list[str]
layout: str
font: str
extension: str
redirect: bool
@dataclass
class MemeTemplateRequest:
style: list[str]
text: list[str]
font: str
extension: str
redirect: bool
@dataclass
class AutomaticRequest:
text: str
safe: bool
redirect: bool
@dataclass
class MemeResponse:
url: str
@dataclass
class ExampleResponse:
url: str
template: str
@dataclass
class _Example:
text: list[str]
url: str
@dataclass
class TemplateResponse:
id: str
name: str
lines: int
overlays: int
styles: list[str]
blank: str
example: _Example
source: str
_self: str
@dataclass
class ErrorResponse:
error: str
Support layout on template endpointsfrom dataclasses import dataclass
from datetime import datetime
@dataclass
class AuthResponse:
email: str
image_access: bool
search_access: bool
created: datetime
modified: datetime
@dataclass
class FontResponse:
filename: str
id: str
alias: str
_self: str
@dataclass
class MemeRequest:
template_id: str
style: list[str]
text: list[str]
layout: str
font: str
extension: str
redirect: bool
@dataclass
class CustomRequest:
background: str
style: str
text: list[str]
layout: str
font: str
extension: str
redirect: bool
@dataclass
class MemeTemplateRequest:
style: list[str]
text: list[str]
layout: str
font: str
extension: str
redirect: bool
@dataclass
class AutomaticRequest:
text: str
safe: bool
redirect: bool
@dataclass
class MemeResponse:
url: str
@dataclass
class ExampleResponse:
url: str
template: str
@dataclass
class _Example:
text: list[str]
url: str
@dataclass
class TemplateResponse:
id: str
name: str
lines: int
overlays: int
styles: list[str]
blank: str
example: _Example
source: str
_self: str
@dataclass
class ErrorResponse:
error: str
|
<commit_before>from dataclasses import dataclass
from datetime import datetime
@dataclass
class AuthResponse:
email: str
image_access: bool
search_access: bool
created: datetime
modified: datetime
@dataclass
class FontResponse:
filename: str
id: str
alias: str
_self: str
@dataclass
class MemeRequest:
template_id: str
style: list[str]
text: list[str]
layout: str
font: str
extension: str
redirect: bool
@dataclass
class CustomRequest:
background: str
style: str
text: list[str]
layout: str
font: str
extension: str
redirect: bool
@dataclass
class MemeTemplateRequest:
style: list[str]
text: list[str]
font: str
extension: str
redirect: bool
@dataclass
class AutomaticRequest:
text: str
safe: bool
redirect: bool
@dataclass
class MemeResponse:
url: str
@dataclass
class ExampleResponse:
url: str
template: str
@dataclass
class _Example:
text: list[str]
url: str
@dataclass
class TemplateResponse:
id: str
name: str
lines: int
overlays: int
styles: list[str]
blank: str
example: _Example
source: str
_self: str
@dataclass
class ErrorResponse:
error: str
<commit_msg>Support layout on template endpoints<commit_after>from dataclasses import dataclass
from datetime import datetime
@dataclass
class AuthResponse:
email: str
image_access: bool
search_access: bool
created: datetime
modified: datetime
@dataclass
class FontResponse:
filename: str
id: str
alias: str
_self: str
@dataclass
class MemeRequest:
template_id: str
style: list[str]
text: list[str]
layout: str
font: str
extension: str
redirect: bool
@dataclass
class CustomRequest:
background: str
style: str
text: list[str]
layout: str
font: str
extension: str
redirect: bool
@dataclass
class MemeTemplateRequest:
style: list[str]
text: list[str]
layout: str
font: str
extension: str
redirect: bool
@dataclass
class AutomaticRequest:
text: str
safe: bool
redirect: bool
@dataclass
class MemeResponse:
url: str
@dataclass
class ExampleResponse:
url: str
template: str
@dataclass
class _Example:
text: list[str]
url: str
@dataclass
class TemplateResponse:
id: str
name: str
lines: int
overlays: int
styles: list[str]
blank: str
example: _Example
source: str
_self: str
@dataclass
class ErrorResponse:
error: str
|
8a8d36d1f39cf893328b008cb11ef8e4a3fe71b5
|
txlege84/topics/management/commands/bootstraptopics.py
|
txlege84/topics/management/commands/bootstraptopics.py
|
from django.core.management.base import BaseCommand
from topics.models import Topic
class Command(BaseCommand):
help = u'Bootstrap the topic lists in the database.'
def handle(self, *args, **kwargs):
self.load_topics()
def load_topics(self):
self.stdout.write(u'Loading hot list topics...')
topics = [
u'Budget & Taxes',
u'Criminal Justice',
u'Energy',
u'Environment',
u'Ethics',
u'Health & Human Services',
u'Higher Education',
u'Immigration & Border Security',
u'Public Education',
u'Social Justice',
u'Transportation',
]
for topic in topics:
Topic.objects.get_or_create(name=topic)
|
from django.core.management.base import BaseCommand
from topics.models import Topic
class Command(BaseCommand):
help = u'Bootstrap the topic lists in the database.'
def handle(self, *args, **kwargs):
self.load_topics()
def load_topics(self):
self.stdout.write(u'Loading hot list topics...')
topics = [
u'Budget & Taxes',
u'Energy',
u'Environment',
u'Ethics',
u'Health & Human Services',
u'Higher Education',
u'Immigration & Border Security',
u'Law & Order',
u'Public Education',
u'Social Justice',
u'Transportation',
]
for topic in topics:
Topic.objects.get_or_create(name=topic)
|
Rename Criminal Justice to Law & Order, per Emily's request
|
Rename Criminal Justice to Law & Order, per Emily's request
|
Python
|
mit
|
texastribune/txlege84,texastribune/txlege84,texastribune/txlege84,texastribune/txlege84
|
from django.core.management.base import BaseCommand
from topics.models import Topic
class Command(BaseCommand):
help = u'Bootstrap the topic lists in the database.'
def handle(self, *args, **kwargs):
self.load_topics()
def load_topics(self):
self.stdout.write(u'Loading hot list topics...')
topics = [
u'Budget & Taxes',
u'Criminal Justice',
u'Energy',
u'Environment',
u'Ethics',
u'Health & Human Services',
u'Higher Education',
u'Immigration & Border Security',
u'Public Education',
u'Social Justice',
u'Transportation',
]
for topic in topics:
Topic.objects.get_or_create(name=topic)
Rename Criminal Justice to Law & Order, per Emily's request
|
from django.core.management.base import BaseCommand
from topics.models import Topic
class Command(BaseCommand):
help = u'Bootstrap the topic lists in the database.'
def handle(self, *args, **kwargs):
self.load_topics()
def load_topics(self):
self.stdout.write(u'Loading hot list topics...')
topics = [
u'Budget & Taxes',
u'Energy',
u'Environment',
u'Ethics',
u'Health & Human Services',
u'Higher Education',
u'Immigration & Border Security',
u'Law & Order',
u'Public Education',
u'Social Justice',
u'Transportation',
]
for topic in topics:
Topic.objects.get_or_create(name=topic)
|
<commit_before>from django.core.management.base import BaseCommand
from topics.models import Topic
class Command(BaseCommand):
help = u'Bootstrap the topic lists in the database.'
def handle(self, *args, **kwargs):
self.load_topics()
def load_topics(self):
self.stdout.write(u'Loading hot list topics...')
topics = [
u'Budget & Taxes',
u'Criminal Justice',
u'Energy',
u'Environment',
u'Ethics',
u'Health & Human Services',
u'Higher Education',
u'Immigration & Border Security',
u'Public Education',
u'Social Justice',
u'Transportation',
]
for topic in topics:
Topic.objects.get_or_create(name=topic)
<commit_msg>Rename Criminal Justice to Law & Order, per Emily's request<commit_after>
|
from django.core.management.base import BaseCommand
from topics.models import Topic
class Command(BaseCommand):
help = u'Bootstrap the topic lists in the database.'
def handle(self, *args, **kwargs):
self.load_topics()
def load_topics(self):
self.stdout.write(u'Loading hot list topics...')
topics = [
u'Budget & Taxes',
u'Energy',
u'Environment',
u'Ethics',
u'Health & Human Services',
u'Higher Education',
u'Immigration & Border Security',
u'Law & Order',
u'Public Education',
u'Social Justice',
u'Transportation',
]
for topic in topics:
Topic.objects.get_or_create(name=topic)
|
from django.core.management.base import BaseCommand
from topics.models import Topic
class Command(BaseCommand):
help = u'Bootstrap the topic lists in the database.'
def handle(self, *args, **kwargs):
self.load_topics()
def load_topics(self):
self.stdout.write(u'Loading hot list topics...')
topics = [
u'Budget & Taxes',
u'Criminal Justice',
u'Energy',
u'Environment',
u'Ethics',
u'Health & Human Services',
u'Higher Education',
u'Immigration & Border Security',
u'Public Education',
u'Social Justice',
u'Transportation',
]
for topic in topics:
Topic.objects.get_or_create(name=topic)
Rename Criminal Justice to Law & Order, per Emily's requestfrom django.core.management.base import BaseCommand
from topics.models import Topic
class Command(BaseCommand):
help = u'Bootstrap the topic lists in the database.'
def handle(self, *args, **kwargs):
self.load_topics()
def load_topics(self):
self.stdout.write(u'Loading hot list topics...')
topics = [
u'Budget & Taxes',
u'Energy',
u'Environment',
u'Ethics',
u'Health & Human Services',
u'Higher Education',
u'Immigration & Border Security',
u'Law & Order',
u'Public Education',
u'Social Justice',
u'Transportation',
]
for topic in topics:
Topic.objects.get_or_create(name=topic)
|
<commit_before>from django.core.management.base import BaseCommand
from topics.models import Topic
class Command(BaseCommand):
help = u'Bootstrap the topic lists in the database.'
def handle(self, *args, **kwargs):
self.load_topics()
def load_topics(self):
self.stdout.write(u'Loading hot list topics...')
topics = [
u'Budget & Taxes',
u'Criminal Justice',
u'Energy',
u'Environment',
u'Ethics',
u'Health & Human Services',
u'Higher Education',
u'Immigration & Border Security',
u'Public Education',
u'Social Justice',
u'Transportation',
]
for topic in topics:
Topic.objects.get_or_create(name=topic)
<commit_msg>Rename Criminal Justice to Law & Order, per Emily's request<commit_after>from django.core.management.base import BaseCommand
from topics.models import Topic
class Command(BaseCommand):
help = u'Bootstrap the topic lists in the database.'
def handle(self, *args, **kwargs):
self.load_topics()
def load_topics(self):
self.stdout.write(u'Loading hot list topics...')
topics = [
u'Budget & Taxes',
u'Energy',
u'Environment',
u'Ethics',
u'Health & Human Services',
u'Higher Education',
u'Immigration & Border Security',
u'Law & Order',
u'Public Education',
u'Social Justice',
u'Transportation',
]
for topic in topics:
Topic.objects.get_or_create(name=topic)
|
14adf187c6b76c77259f140dad4fb1d502ec6779
|
compass-api/G4SE/api/serializers.py
|
compass-api/G4SE/api/serializers.py
|
from .models import Record, HarvestedRecord, AllRecords
from django.contrib.auth.models import User
from rest_framework import serializers
import datetime
class AllRecordsSerializer(serializers.ModelSerializer):
class Meta:
model = AllRecords
class RecordSerializer(serializers.ModelSerializer):
class Meta:
model = Record
class EditRecordSerializer(serializers.ModelSerializer):
modified = serializers.HiddenField(default=datetime.datetime.now())
def validate_login_name(self, value):
user = self.context['request'].user.username
if not value:
return user
return value
class Meta:
model = Record
class HarvestedRecordSerializer(serializers.ModelSerializer):
class Meta:
model = HarvestedRecord
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
|
from .models import Record, HarvestedRecord, AllRecords
from django.contrib.auth.models import User
from rest_framework import serializers
import datetime
class AllRecordsSerializer(serializers.ModelSerializer):
login_name = serializers.HiddenField(default=None)
class Meta:
model = AllRecords
class RecordSerializer(serializers.ModelSerializer):
login_name = serializers.HiddenField(default=None)
class Meta:
model = Record
class HarvestedRecordSerializer(serializers.ModelSerializer):
login_name = serializers.HiddenField(default=None)
class Meta:
model = HarvestedRecord
class EditRecordSerializer(serializers.ModelSerializer):
modified = serializers.HiddenField(default=datetime.datetime.now())
def validate_login_name(self, value):
user = self.context['request'].user.username
if not value:
return user
return value
class Meta:
model = Record
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
|
Exclude user name from api
|
Exclude user name from api
|
Python
|
mit
|
geometalab/G4SE-Compass,geometalab/G4SE-Compass,geometalab/G4SE-Compass,geometalab/G4SE-Compass
|
from .models import Record, HarvestedRecord, AllRecords
from django.contrib.auth.models import User
from rest_framework import serializers
import datetime
class AllRecordsSerializer(serializers.ModelSerializer):
class Meta:
model = AllRecords
class RecordSerializer(serializers.ModelSerializer):
class Meta:
model = Record
class EditRecordSerializer(serializers.ModelSerializer):
modified = serializers.HiddenField(default=datetime.datetime.now())
def validate_login_name(self, value):
user = self.context['request'].user.username
if not value:
return user
return value
class Meta:
model = Record
class HarvestedRecordSerializer(serializers.ModelSerializer):
class Meta:
model = HarvestedRecord
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
Exclude user name from api
|
from .models import Record, HarvestedRecord, AllRecords
from django.contrib.auth.models import User
from rest_framework import serializers
import datetime
class AllRecordsSerializer(serializers.ModelSerializer):
login_name = serializers.HiddenField(default=None)
class Meta:
model = AllRecords
class RecordSerializer(serializers.ModelSerializer):
login_name = serializers.HiddenField(default=None)
class Meta:
model = Record
class HarvestedRecordSerializer(serializers.ModelSerializer):
login_name = serializers.HiddenField(default=None)
class Meta:
model = HarvestedRecord
class EditRecordSerializer(serializers.ModelSerializer):
modified = serializers.HiddenField(default=datetime.datetime.now())
def validate_login_name(self, value):
user = self.context['request'].user.username
if not value:
return user
return value
class Meta:
model = Record
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
|
<commit_before>from .models import Record, HarvestedRecord, AllRecords
from django.contrib.auth.models import User
from rest_framework import serializers
import datetime
class AllRecordsSerializer(serializers.ModelSerializer):
class Meta:
model = AllRecords
class RecordSerializer(serializers.ModelSerializer):
class Meta:
model = Record
class EditRecordSerializer(serializers.ModelSerializer):
modified = serializers.HiddenField(default=datetime.datetime.now())
def validate_login_name(self, value):
user = self.context['request'].user.username
if not value:
return user
return value
class Meta:
model = Record
class HarvestedRecordSerializer(serializers.ModelSerializer):
class Meta:
model = HarvestedRecord
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
<commit_msg>Exclude user name from api<commit_after>
|
from .models import Record, HarvestedRecord, AllRecords
from django.contrib.auth.models import User
from rest_framework import serializers
import datetime
class AllRecordsSerializer(serializers.ModelSerializer):
login_name = serializers.HiddenField(default=None)
class Meta:
model = AllRecords
class RecordSerializer(serializers.ModelSerializer):
login_name = serializers.HiddenField(default=None)
class Meta:
model = Record
class HarvestedRecordSerializer(serializers.ModelSerializer):
login_name = serializers.HiddenField(default=None)
class Meta:
model = HarvestedRecord
class EditRecordSerializer(serializers.ModelSerializer):
modified = serializers.HiddenField(default=datetime.datetime.now())
def validate_login_name(self, value):
user = self.context['request'].user.username
if not value:
return user
return value
class Meta:
model = Record
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
|
from .models import Record, HarvestedRecord, AllRecords
from django.contrib.auth.models import User
from rest_framework import serializers
import datetime
class AllRecordsSerializer(serializers.ModelSerializer):
class Meta:
model = AllRecords
class RecordSerializer(serializers.ModelSerializer):
class Meta:
model = Record
class EditRecordSerializer(serializers.ModelSerializer):
modified = serializers.HiddenField(default=datetime.datetime.now())
def validate_login_name(self, value):
user = self.context['request'].user.username
if not value:
return user
return value
class Meta:
model = Record
class HarvestedRecordSerializer(serializers.ModelSerializer):
class Meta:
model = HarvestedRecord
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
Exclude user name from apifrom .models import Record, HarvestedRecord, AllRecords
from django.contrib.auth.models import User
from rest_framework import serializers
import datetime
class AllRecordsSerializer(serializers.ModelSerializer):
login_name = serializers.HiddenField(default=None)
class Meta:
model = AllRecords
class RecordSerializer(serializers.ModelSerializer):
login_name = serializers.HiddenField(default=None)
class Meta:
model = Record
class HarvestedRecordSerializer(serializers.ModelSerializer):
login_name = serializers.HiddenField(default=None)
class Meta:
model = HarvestedRecord
class EditRecordSerializer(serializers.ModelSerializer):
modified = serializers.HiddenField(default=datetime.datetime.now())
def validate_login_name(self, value):
user = self.context['request'].user.username
if not value:
return user
return value
class Meta:
model = Record
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
|
<commit_before>from .models import Record, HarvestedRecord, AllRecords
from django.contrib.auth.models import User
from rest_framework import serializers
import datetime
class AllRecordsSerializer(serializers.ModelSerializer):
class Meta:
model = AllRecords
class RecordSerializer(serializers.ModelSerializer):
class Meta:
model = Record
class EditRecordSerializer(serializers.ModelSerializer):
modified = serializers.HiddenField(default=datetime.datetime.now())
def validate_login_name(self, value):
user = self.context['request'].user.username
if not value:
return user
return value
class Meta:
model = Record
class HarvestedRecordSerializer(serializers.ModelSerializer):
class Meta:
model = HarvestedRecord
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
<commit_msg>Exclude user name from api<commit_after>from .models import Record, HarvestedRecord, AllRecords
from django.contrib.auth.models import User
from rest_framework import serializers
import datetime
class AllRecordsSerializer(serializers.ModelSerializer):
login_name = serializers.HiddenField(default=None)
class Meta:
model = AllRecords
class RecordSerializer(serializers.ModelSerializer):
login_name = serializers.HiddenField(default=None)
class Meta:
model = Record
class HarvestedRecordSerializer(serializers.ModelSerializer):
login_name = serializers.HiddenField(default=None)
class Meta:
model = HarvestedRecord
class EditRecordSerializer(serializers.ModelSerializer):
modified = serializers.HiddenField(default=datetime.datetime.now())
def validate_login_name(self, value):
user = self.context['request'].user.username
if not value:
return user
return value
class Meta:
model = Record
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
|
962b674053ecf52730315550675c29fa8ba8ec12
|
openprovider/data/exception_map.py
|
openprovider/data/exception_map.py
|
# coding=utf-8
from openprovider.exceptions import *
MAPPING = {
307: BadRequest, # Invalid domain extension
501: BadRequest, # Domain name too short
}
def from_code(code):
"""
Return the specific exception class for the given code, or OpenproviderError
if no specific exception class is available.
"""
if code in MAPPING:
return MAPPING[code]
else:
return OpenproviderError
|
# coding=utf-8
from openprovider.exceptions import *
MAPPING = {
307: BadRequest, # Invalid domain extension
501: BadRequest, # Domain name too short
4005: ServiceUnavailable, # Temprorarily unavailable due to maintenance
}
def from_code(code):
"""
Return the specific exception class for the given code, or OpenproviderError
if no specific exception class is available.
"""
if code in MAPPING:
return MAPPING[code]
else:
return OpenproviderError
|
Add maintenance response to exception map
|
Add maintenance response to exception map
|
Python
|
mit
|
AntagonistHQ/openprovider.py
|
# coding=utf-8
from openprovider.exceptions import *
MAPPING = {
307: BadRequest, # Invalid domain extension
501: BadRequest, # Domain name too short
}
def from_code(code):
"""
Return the specific exception class for the given code, or OpenproviderError
if no specific exception class is available.
"""
if code in MAPPING:
return MAPPING[code]
else:
return OpenproviderError
Add maintenance response to exception map
|
# coding=utf-8
from openprovider.exceptions import *
MAPPING = {
307: BadRequest, # Invalid domain extension
501: BadRequest, # Domain name too short
4005: ServiceUnavailable, # Temprorarily unavailable due to maintenance
}
def from_code(code):
"""
Return the specific exception class for the given code, or OpenproviderError
if no specific exception class is available.
"""
if code in MAPPING:
return MAPPING[code]
else:
return OpenproviderError
|
<commit_before># coding=utf-8
from openprovider.exceptions import *
MAPPING = {
307: BadRequest, # Invalid domain extension
501: BadRequest, # Domain name too short
}
def from_code(code):
"""
Return the specific exception class for the given code, or OpenproviderError
if no specific exception class is available.
"""
if code in MAPPING:
return MAPPING[code]
else:
return OpenproviderError
<commit_msg>Add maintenance response to exception map<commit_after>
|
# coding=utf-8
from openprovider.exceptions import *
MAPPING = {
307: BadRequest, # Invalid domain extension
501: BadRequest, # Domain name too short
4005: ServiceUnavailable, # Temprorarily unavailable due to maintenance
}
def from_code(code):
"""
Return the specific exception class for the given code, or OpenproviderError
if no specific exception class is available.
"""
if code in MAPPING:
return MAPPING[code]
else:
return OpenproviderError
|
# coding=utf-8
from openprovider.exceptions import *
MAPPING = {
307: BadRequest, # Invalid domain extension
501: BadRequest, # Domain name too short
}
def from_code(code):
"""
Return the specific exception class for the given code, or OpenproviderError
if no specific exception class is available.
"""
if code in MAPPING:
return MAPPING[code]
else:
return OpenproviderError
Add maintenance response to exception map# coding=utf-8
from openprovider.exceptions import *
MAPPING = {
307: BadRequest, # Invalid domain extension
501: BadRequest, # Domain name too short
4005: ServiceUnavailable, # Temprorarily unavailable due to maintenance
}
def from_code(code):
"""
Return the specific exception class for the given code, or OpenproviderError
if no specific exception class is available.
"""
if code in MAPPING:
return MAPPING[code]
else:
return OpenproviderError
|
<commit_before># coding=utf-8
from openprovider.exceptions import *
MAPPING = {
307: BadRequest, # Invalid domain extension
501: BadRequest, # Domain name too short
}
def from_code(code):
"""
Return the specific exception class for the given code, or OpenproviderError
if no specific exception class is available.
"""
if code in MAPPING:
return MAPPING[code]
else:
return OpenproviderError
<commit_msg>Add maintenance response to exception map<commit_after># coding=utf-8
from openprovider.exceptions import *
MAPPING = {
307: BadRequest, # Invalid domain extension
501: BadRequest, # Domain name too short
4005: ServiceUnavailable, # Temprorarily unavailable due to maintenance
}
def from_code(code):
"""
Return the specific exception class for the given code, or OpenproviderError
if no specific exception class is available.
"""
if code in MAPPING:
return MAPPING[code]
else:
return OpenproviderError
|
8fe04b096348ab81d31f59030a22b943e548dc1f
|
mfnd/todotask.py
|
mfnd/todotask.py
|
#!/usr/bin/env python3
"""
Module for tasks in the to-do list
"""
class TodoTask:
"""
Represents a task in the to-do list
"""
def __init__(self, description, position = None, completionStatus = 'todo'):
"""
Initialize a to-do list task item
"""
self.description = description
self.position = position
self.completionStatus = completionStatus
self.visible = True
self.mode = 0
def __str__(self):
"""
Return a human readable str representation of this task
"""
return self.description
|
#!/usr/bin/env python3
"""
Module handles tasks in the to-do list
"""
class TodoTask:
"""
Class represents a task in the to-do list
"""
def __init__(self, description, position = None, completionStatus = 'todo'):
"""
Initialize a to-do list task item
"""
self.description = description
self.position = position
self.completionStatus = completionStatus
def __str__(self):
"""
Return a human readable str representation of this task
"""
return self.description
|
Remove unneccessary variables from class 'TodoTask'
|
refactor: Remove unneccessary variables from class 'TodoTask'
|
Python
|
mit
|
mes32/mfnd
|
#!/usr/bin/env python3
"""
Module for tasks in the to-do list
"""
class TodoTask:
"""
Represents a task in the to-do list
"""
def __init__(self, description, position = None, completionStatus = 'todo'):
"""
Initialize a to-do list task item
"""
self.description = description
self.position = position
self.completionStatus = completionStatus
self.visible = True
self.mode = 0
def __str__(self):
"""
Return a human readable str representation of this task
"""
return self.descriptionrefactor: Remove unneccessary variables from class 'TodoTask'
|
#!/usr/bin/env python3
"""
Module handles tasks in the to-do list
"""
class TodoTask:
"""
Class represents a task in the to-do list
"""
def __init__(self, description, position = None, completionStatus = 'todo'):
"""
Initialize a to-do list task item
"""
self.description = description
self.position = position
self.completionStatus = completionStatus
def __str__(self):
"""
Return a human readable str representation of this task
"""
return self.description
|
<commit_before>#!/usr/bin/env python3
"""
Module for tasks in the to-do list
"""
class TodoTask:
"""
Represents a task in the to-do list
"""
def __init__(self, description, position = None, completionStatus = 'todo'):
"""
Initialize a to-do list task item
"""
self.description = description
self.position = position
self.completionStatus = completionStatus
self.visible = True
self.mode = 0
def __str__(self):
"""
Return a human readable str representation of this task
"""
return self.description<commit_msg>refactor: Remove unneccessary variables from class 'TodoTask'<commit_after>
|
#!/usr/bin/env python3
"""
Module handles tasks in the to-do list
"""
class TodoTask:
"""
Class represents a task in the to-do list
"""
def __init__(self, description, position = None, completionStatus = 'todo'):
"""
Initialize a to-do list task item
"""
self.description = description
self.position = position
self.completionStatus = completionStatus
def __str__(self):
"""
Return a human readable str representation of this task
"""
return self.description
|
#!/usr/bin/env python3
"""
Module for tasks in the to-do list
"""
class TodoTask:
"""
Represents a task in the to-do list
"""
def __init__(self, description, position = None, completionStatus = 'todo'):
"""
Initialize a to-do list task item
"""
self.description = description
self.position = position
self.completionStatus = completionStatus
self.visible = True
self.mode = 0
def __str__(self):
"""
Return a human readable str representation of this task
"""
return self.descriptionrefactor: Remove unneccessary variables from class 'TodoTask'#!/usr/bin/env python3
"""
Module handles tasks in the to-do list
"""
class TodoTask:
"""
Class represents a task in the to-do list
"""
def __init__(self, description, position = None, completionStatus = 'todo'):
"""
Initialize a to-do list task item
"""
self.description = description
self.position = position
self.completionStatus = completionStatus
def __str__(self):
"""
Return a human readable str representation of this task
"""
return self.description
|
<commit_before>#!/usr/bin/env python3
"""
Module for tasks in the to-do list
"""
class TodoTask:
"""
Represents a task in the to-do list
"""
def __init__(self, description, position = None, completionStatus = 'todo'):
"""
Initialize a to-do list task item
"""
self.description = description
self.position = position
self.completionStatus = completionStatus
self.visible = True
self.mode = 0
def __str__(self):
"""
Return a human readable str representation of this task
"""
return self.description<commit_msg>refactor: Remove unneccessary variables from class 'TodoTask'<commit_after>#!/usr/bin/env python3
"""
Module handles tasks in the to-do list
"""
class TodoTask:
"""
Class represents a task in the to-do list
"""
def __init__(self, description, position = None, completionStatus = 'todo'):
"""
Initialize a to-do list task item
"""
self.description = description
self.position = position
self.completionStatus = completionStatus
def __str__(self):
"""
Return a human readable str representation of this task
"""
return self.description
|
3cd25ea433518ec9b25a5e646e63413ebd0ffcd4
|
parse.py
|
parse.py
|
import sys
indentation = 0
repl = [
('%', '_ARSCL', '['),
('$', '_ARSCR', ']'),
('#', '_EQOP', '='),
('<', '_PARL', '('),
('>', '_PARR', ')'),
]
sin = sys.argv[1]
for r in repl:
sin = sin.replace(r[0], r[1])
for r in repl:
sin = sin.replace(r[1], r[2])
sin = sin.replace('\\n', '\n')
for l in sin.splitlines():
exec(l)
|
import sys
import simplejson as json
indentation = 0
lang_def = None
with open('language.json') as lang_def_file:
lang_def = json.loads(lang_def_file.read())
if lang_def is None:
print("error reading json language definition")
exit(1)
repl = lang_def['rules']
sin = sys.argv[1]
for r in repl:
sin = sin.replace(r['lang_rep'], r['il_rep'])
for r in repl:
sin = sin.replace(r['il_rep'], r['python_rep'])
sin = sin.replace('\\n', '\n')
for l in sin.splitlines():
try:
r = eval(l)
if r is not None:
print(r)
except:
try:
exec(l)
except:
print("ERROR OMG ERROR" + str(l))
|
Read json language rep and try to eval/exec stdin
|
Read json language rep and try to eval/exec stdin
|
Python
|
unlicense
|
philipdexter/build-a-lang
|
import sys
indentation = 0
repl = [
('%', '_ARSCL', '['),
('$', '_ARSCR', ']'),
('#', '_EQOP', '='),
('<', '_PARL', '('),
('>', '_PARR', ')'),
]
sin = sys.argv[1]
for r in repl:
sin = sin.replace(r[0], r[1])
for r in repl:
sin = sin.replace(r[1], r[2])
sin = sin.replace('\\n', '\n')
for l in sin.splitlines():
exec(l)
Read json language rep and try to eval/exec stdin
|
import sys
import simplejson as json
indentation = 0
lang_def = None
with open('language.json') as lang_def_file:
lang_def = json.loads(lang_def_file.read())
if lang_def is None:
print("error reading json language definition")
exit(1)
repl = lang_def['rules']
sin = sys.argv[1]
for r in repl:
sin = sin.replace(r['lang_rep'], r['il_rep'])
for r in repl:
sin = sin.replace(r['il_rep'], r['python_rep'])
sin = sin.replace('\\n', '\n')
for l in sin.splitlines():
try:
r = eval(l)
if r is not None:
print(r)
except:
try:
exec(l)
except:
print("ERROR OMG ERROR" + str(l))
|
<commit_before>import sys
indentation = 0
repl = [
('%', '_ARSCL', '['),
('$', '_ARSCR', ']'),
('#', '_EQOP', '='),
('<', '_PARL', '('),
('>', '_PARR', ')'),
]
sin = sys.argv[1]
for r in repl:
sin = sin.replace(r[0], r[1])
for r in repl:
sin = sin.replace(r[1], r[2])
sin = sin.replace('\\n', '\n')
for l in sin.splitlines():
exec(l)
<commit_msg>Read json language rep and try to eval/exec stdin<commit_after>
|
import sys
import simplejson as json
indentation = 0
lang_def = None
with open('language.json') as lang_def_file:
lang_def = json.loads(lang_def_file.read())
if lang_def is None:
print("error reading json language definition")
exit(1)
repl = lang_def['rules']
sin = sys.argv[1]
for r in repl:
sin = sin.replace(r['lang_rep'], r['il_rep'])
for r in repl:
sin = sin.replace(r['il_rep'], r['python_rep'])
sin = sin.replace('\\n', '\n')
for l in sin.splitlines():
try:
r = eval(l)
if r is not None:
print(r)
except:
try:
exec(l)
except:
print("ERROR OMG ERROR" + str(l))
|
import sys
indentation = 0
repl = [
('%', '_ARSCL', '['),
('$', '_ARSCR', ']'),
('#', '_EQOP', '='),
('<', '_PARL', '('),
('>', '_PARR', ')'),
]
sin = sys.argv[1]
for r in repl:
sin = sin.replace(r[0], r[1])
for r in repl:
sin = sin.replace(r[1], r[2])
sin = sin.replace('\\n', '\n')
for l in sin.splitlines():
exec(l)
Read json language rep and try to eval/exec stdinimport sys
import simplejson as json
indentation = 0
lang_def = None
with open('language.json') as lang_def_file:
lang_def = json.loads(lang_def_file.read())
if lang_def is None:
print("error reading json language definition")
exit(1)
repl = lang_def['rules']
sin = sys.argv[1]
for r in repl:
sin = sin.replace(r['lang_rep'], r['il_rep'])
for r in repl:
sin = sin.replace(r['il_rep'], r['python_rep'])
sin = sin.replace('\\n', '\n')
for l in sin.splitlines():
try:
r = eval(l)
if r is not None:
print(r)
except:
try:
exec(l)
except:
print("ERROR OMG ERROR" + str(l))
|
<commit_before>import sys
indentation = 0
repl = [
('%', '_ARSCL', '['),
('$', '_ARSCR', ']'),
('#', '_EQOP', '='),
('<', '_PARL', '('),
('>', '_PARR', ')'),
]
sin = sys.argv[1]
for r in repl:
sin = sin.replace(r[0], r[1])
for r in repl:
sin = sin.replace(r[1], r[2])
sin = sin.replace('\\n', '\n')
for l in sin.splitlines():
exec(l)
<commit_msg>Read json language rep and try to eval/exec stdin<commit_after>import sys
import simplejson as json
indentation = 0
lang_def = None
with open('language.json') as lang_def_file:
lang_def = json.loads(lang_def_file.read())
if lang_def is None:
print("error reading json language definition")
exit(1)
repl = lang_def['rules']
sin = sys.argv[1]
for r in repl:
sin = sin.replace(r['lang_rep'], r['il_rep'])
for r in repl:
sin = sin.replace(r['il_rep'], r['python_rep'])
sin = sin.replace('\\n', '\n')
for l in sin.splitlines():
try:
r = eval(l)
if r is not None:
print(r)
except:
try:
exec(l)
except:
print("ERROR OMG ERROR" + str(l))
|
b39db786b73cc00676d35cd14b42c70d63b21ba3
|
readthedocs/projects/templatetags/projects_tags.py
|
readthedocs/projects/templatetags/projects_tags.py
|
from django import template
register = template.Library()
@register.filter
def sort_version_aware(versions):
"""
Takes a list of versions objects and sort them caring about version schemes
"""
from distutils2.version import NormalizedVersion
from projects.utils import mkversion
fallback = NormalizedVersion('99999999.0', error_on_huge_major_num=False)
return sorted(versions,
key=lambda v: (mkversion(v) or fallback),
reverse=True)
@register.filter
def is_project_user(user, project):
"""
Return if user is a member of project.users
"""
return user in project.users.all()
|
from django import template
from distutils2.version import NormalizedVersion
from projects.utils import mkversion
register = template.Library()
def make_version(version):
ver = mkversion(version)
if not ver:
if version.slug == 'latest':
return NormalizedVersion('99999.0', error_on_huge_major_num=False)
elif version.slug == 'stable':
return NormalizedVersion('9999.0', error_on_huge_major_num=False)
else:
return NormalizedVersion('999.0', error_on_huge_major_num=False)
@register.filter
def sort_version_aware(versions):
"""
Takes a list of versions objects and sort them caring about version schemes
"""
sorted_verisons = sorted(versions,
key=make_version,
reverse=True)
return sorted_verisons
@register.filter
def is_project_user(user, project):
"""
Return if user is a member of project.users
"""
return user in project.users.all()
|
Fix version sorting to make latest and stable first.
|
Fix version sorting to make latest and stable first.
|
Python
|
mit
|
CedarLogic/readthedocs.org,GovReady/readthedocs.org,emawind84/readthedocs.org,attakei/readthedocs-oauth,sunnyzwh/readthedocs.org,rtfd/readthedocs.org,SteveViss/readthedocs.org,wanghaven/readthedocs.org,clarkperkins/readthedocs.org,asampat3090/readthedocs.org,wanghaven/readthedocs.org,fujita-shintaro/readthedocs.org,atsuyim/readthedocs.org,takluyver/readthedocs.org,pombredanne/readthedocs.org,sid-kap/readthedocs.org,sils1297/readthedocs.org,clarkperkins/readthedocs.org,mhils/readthedocs.org,takluyver/readthedocs.org,takluyver/readthedocs.org,agjohnson/readthedocs.org,royalwang/readthedocs.org,istresearch/readthedocs.org,soulshake/readthedocs.org,Tazer/readthedocs.org,d0ugal/readthedocs.org,techtonik/readthedocs.org,LukasBoersma/readthedocs.org,jerel/readthedocs.org,fujita-shintaro/readthedocs.org,stevepiercy/readthedocs.org,wijerasa/readthedocs.org,titiushko/readthedocs.org,emawind84/readthedocs.org,atsuyim/readthedocs.org,dirn/readthedocs.org,sid-kap/readthedocs.org,SteveViss/readthedocs.org,sid-kap/readthedocs.org,agjohnson/readthedocs.org,kdkeyser/readthedocs.org,kdkeyser/readthedocs.org,sunnyzwh/readthedocs.org,tddv/readthedocs.org,GovReady/readthedocs.org,stevepiercy/readthedocs.org,gjtorikian/readthedocs.org,asampat3090/readthedocs.org,nikolas/readthedocs.org,jerel/readthedocs.org,safwanrahman/readthedocs.org,emawind84/readthedocs.org,safwanrahman/readthedocs.org,jerel/readthedocs.org,hach-que/readthedocs.org,SteveViss/readthedocs.org,raven47git/readthedocs.org,dirn/readthedocs.org,tddv/readthedocs.org,istresearch/readthedocs.org,KamranMackey/readthedocs.org,titiushko/readthedocs.org,techtonik/readthedocs.org,GovReady/readthedocs.org,rtfd/readthedocs.org,asampat3090/readthedocs.org,soulshake/readthedocs.org,VishvajitP/readthedocs.org,KamranMackey/readthedocs.org,clarkperkins/readthedocs.org,espdev/readthedocs.org,kenwang76/readthedocs.org,singingwolfboy/readthedocs.org,takluyver/readthedocs.org,stevepiercy/readthedocs.org,espdev/readthedocs.org,atsuyim/readthedocs.org,singingwolfboy/readthedocs.org,LukasBoersma/readthedocs.org,sils1297/readthedocs.org,d0ugal/readthedocs.org,kenwang76/readthedocs.org,agjohnson/readthedocs.org,Tazer/readthedocs.org,pombredanne/readthedocs.org,laplaceliu/readthedocs.org,cgourlay/readthedocs.org,hach-que/readthedocs.org,wanghaven/readthedocs.org,gjtorikian/readthedocs.org,singingwolfboy/readthedocs.org,KamranMackey/readthedocs.org,wijerasa/readthedocs.org,gjtorikian/readthedocs.org,wanghaven/readthedocs.org,attakei/readthedocs-oauth,davidfischer/readthedocs.org,mrshoki/readthedocs.org,rtfd/readthedocs.org,agjohnson/readthedocs.org,kenwang76/readthedocs.org,mhils/readthedocs.org,gjtorikian/readthedocs.org,Tazer/readthedocs.org,emawind84/readthedocs.org,CedarLogic/readthedocs.org,VishvajitP/readthedocs.org,Carreau/readthedocs.org,kdkeyser/readthedocs.org,titiushko/readthedocs.org,fujita-shintaro/readthedocs.org,kenshinthebattosai/readthedocs.org,LukasBoersma/readthedocs.org,soulshake/readthedocs.org,nikolas/readthedocs.org,safwanrahman/readthedocs.org,techtonik/readthedocs.org,kenshinthebattosai/readthedocs.org,michaelmcandrew/readthedocs.org,raven47git/readthedocs.org,GovReady/readthedocs.org,davidfischer/readthedocs.org,mrshoki/readthedocs.org,KamranMackey/readthedocs.org,singingwolfboy/readthedocs.org,sils1297/readthedocs.org,CedarLogic/readthedocs.org,LukasBoersma/readthedocs.org,mrshoki/readthedocs.org,hach-que/readthedocs.org,Carreau/readthedocs.org,michaelmcandrew/readthedocs.org,tddv/readthedocs.org,attakei/readthedocs-oauth,istresearch/readthedocs.org,atsuyim/readthedocs.org,wijerasa/readthedocs.org,espdev/readthedocs.org,dirn/readthedocs.org,cgourlay/readthedocs.org,rtfd/readthedocs.org,michaelmcandrew/readthedocs.org,raven47git/readthedocs.org,soulshake/readthedocs.org,kenshinthebattosai/readthedocs.org,royalwang/readthedocs.org,sid-kap/readthedocs.org,attakei/readthedocs-oauth,SteveViss/readthedocs.org,mrshoki/readthedocs.org,techtonik/readthedocs.org,kenwang76/readthedocs.org,fujita-shintaro/readthedocs.org,laplaceliu/readthedocs.org,mhils/readthedocs.org,hach-que/readthedocs.org,d0ugal/readthedocs.org,clarkperkins/readthedocs.org,safwanrahman/readthedocs.org,royalwang/readthedocs.org,michaelmcandrew/readthedocs.org,kdkeyser/readthedocs.org,kenshinthebattosai/readthedocs.org,VishvajitP/readthedocs.org,VishvajitP/readthedocs.org,davidfischer/readthedocs.org,royalwang/readthedocs.org,sils1297/readthedocs.org,mhils/readthedocs.org,cgourlay/readthedocs.org,asampat3090/readthedocs.org,sunnyzwh/readthedocs.org,titiushko/readthedocs.org,istresearch/readthedocs.org,stevepiercy/readthedocs.org,raven47git/readthedocs.org,d0ugal/readthedocs.org,wijerasa/readthedocs.org,nikolas/readthedocs.org,dirn/readthedocs.org,espdev/readthedocs.org,laplaceliu/readthedocs.org,sunnyzwh/readthedocs.org,nikolas/readthedocs.org,jerel/readthedocs.org,pombredanne/readthedocs.org,cgourlay/readthedocs.org,Tazer/readthedocs.org,Carreau/readthedocs.org,CedarLogic/readthedocs.org,laplaceliu/readthedocs.org,Carreau/readthedocs.org,davidfischer/readthedocs.org,espdev/readthedocs.org
|
from django import template
register = template.Library()
@register.filter
def sort_version_aware(versions):
"""
Takes a list of versions objects and sort them caring about version schemes
"""
from distutils2.version import NormalizedVersion
from projects.utils import mkversion
fallback = NormalizedVersion('99999999.0', error_on_huge_major_num=False)
return sorted(versions,
key=lambda v: (mkversion(v) or fallback),
reverse=True)
@register.filter
def is_project_user(user, project):
"""
Return if user is a member of project.users
"""
return user in project.users.all()
Fix version sorting to make latest and stable first.
|
from django import template
from distutils2.version import NormalizedVersion
from projects.utils import mkversion
register = template.Library()
def make_version(version):
ver = mkversion(version)
if not ver:
if version.slug == 'latest':
return NormalizedVersion('99999.0', error_on_huge_major_num=False)
elif version.slug == 'stable':
return NormalizedVersion('9999.0', error_on_huge_major_num=False)
else:
return NormalizedVersion('999.0', error_on_huge_major_num=False)
@register.filter
def sort_version_aware(versions):
"""
Takes a list of versions objects and sort them caring about version schemes
"""
sorted_verisons = sorted(versions,
key=make_version,
reverse=True)
return sorted_verisons
@register.filter
def is_project_user(user, project):
"""
Return if user is a member of project.users
"""
return user in project.users.all()
|
<commit_before>from django import template
register = template.Library()
@register.filter
def sort_version_aware(versions):
"""
Takes a list of versions objects and sort them caring about version schemes
"""
from distutils2.version import NormalizedVersion
from projects.utils import mkversion
fallback = NormalizedVersion('99999999.0', error_on_huge_major_num=False)
return sorted(versions,
key=lambda v: (mkversion(v) or fallback),
reverse=True)
@register.filter
def is_project_user(user, project):
"""
Return if user is a member of project.users
"""
return user in project.users.all()
<commit_msg>Fix version sorting to make latest and stable first.<commit_after>
|
from django import template
from distutils2.version import NormalizedVersion
from projects.utils import mkversion
register = template.Library()
def make_version(version):
ver = mkversion(version)
if not ver:
if version.slug == 'latest':
return NormalizedVersion('99999.0', error_on_huge_major_num=False)
elif version.slug == 'stable':
return NormalizedVersion('9999.0', error_on_huge_major_num=False)
else:
return NormalizedVersion('999.0', error_on_huge_major_num=False)
@register.filter
def sort_version_aware(versions):
"""
Takes a list of versions objects and sort them caring about version schemes
"""
sorted_verisons = sorted(versions,
key=make_version,
reverse=True)
return sorted_verisons
@register.filter
def is_project_user(user, project):
"""
Return if user is a member of project.users
"""
return user in project.users.all()
|
from django import template
register = template.Library()
@register.filter
def sort_version_aware(versions):
"""
Takes a list of versions objects and sort them caring about version schemes
"""
from distutils2.version import NormalizedVersion
from projects.utils import mkversion
fallback = NormalizedVersion('99999999.0', error_on_huge_major_num=False)
return sorted(versions,
key=lambda v: (mkversion(v) or fallback),
reverse=True)
@register.filter
def is_project_user(user, project):
"""
Return if user is a member of project.users
"""
return user in project.users.all()
Fix version sorting to make latest and stable first.from django import template
from distutils2.version import NormalizedVersion
from projects.utils import mkversion
register = template.Library()
def make_version(version):
ver = mkversion(version)
if not ver:
if version.slug == 'latest':
return NormalizedVersion('99999.0', error_on_huge_major_num=False)
elif version.slug == 'stable':
return NormalizedVersion('9999.0', error_on_huge_major_num=False)
else:
return NormalizedVersion('999.0', error_on_huge_major_num=False)
@register.filter
def sort_version_aware(versions):
"""
Takes a list of versions objects and sort them caring about version schemes
"""
sorted_verisons = sorted(versions,
key=make_version,
reverse=True)
return sorted_verisons
@register.filter
def is_project_user(user, project):
"""
Return if user is a member of project.users
"""
return user in project.users.all()
|
<commit_before>from django import template
register = template.Library()
@register.filter
def sort_version_aware(versions):
"""
Takes a list of versions objects and sort them caring about version schemes
"""
from distutils2.version import NormalizedVersion
from projects.utils import mkversion
fallback = NormalizedVersion('99999999.0', error_on_huge_major_num=False)
return sorted(versions,
key=lambda v: (mkversion(v) or fallback),
reverse=True)
@register.filter
def is_project_user(user, project):
"""
Return if user is a member of project.users
"""
return user in project.users.all()
<commit_msg>Fix version sorting to make latest and stable first.<commit_after>from django import template
from distutils2.version import NormalizedVersion
from projects.utils import mkversion
register = template.Library()
def make_version(version):
ver = mkversion(version)
if not ver:
if version.slug == 'latest':
return NormalizedVersion('99999.0', error_on_huge_major_num=False)
elif version.slug == 'stable':
return NormalizedVersion('9999.0', error_on_huge_major_num=False)
else:
return NormalizedVersion('999.0', error_on_huge_major_num=False)
@register.filter
def sort_version_aware(versions):
"""
Takes a list of versions objects and sort them caring about version schemes
"""
sorted_verisons = sorted(versions,
key=make_version,
reverse=True)
return sorted_verisons
@register.filter
def is_project_user(user, project):
"""
Return if user is a member of project.users
"""
return user in project.users.all()
|
eda9d0d607a23d40b0844e9c20b87debf605cfab
|
powerline/bindings/qtile/widget.py
|
powerline/bindings/qtile/widget.py
|
# vim:fileencoding=utf-8:noet
from __future__ import (unicode_literals, division, absolute_import, print_function)
from libqtile.bar import CALCULATED
from libqtile.widget import TextBox
from powerline import Powerline
class QTilePowerline(Powerline):
def do_setup(self, obj):
obj.powerline = self
class PowerlineTextBox(TextBox):
def __init__(self, timeout=2, text=' ', width=CALCULATED, **config):
super(PowerlineTextBox, self).__init__(text, width, **config)
self.timeout_add(timeout, self.update)
powerline = QTilePowerline(ext='wm', renderer_module='pango_markup')
powerline.setup(self)
def update(self):
if not self.configured:
return True
self.text = self.powerline.render(side='right')
self.bar.draw()
return True
def cmd_update(self, text):
self.update(text)
def cmd_get(self):
return self.text
def _configure(self, qtile, bar):
super(PowerlineTextBox, self)._configure(qtile, bar)
self.layout = self.drawer.textlayout(
self.text,
self.foreground,
self.font,
self.fontsize,
self.fontshadow,
markup=True)
# TODO: Remove this at next major release
Powerline = PowerlineTextBox
|
# vim:fileencoding=utf-8:noet
from __future__ import (unicode_literals, division, absolute_import, print_function)
from libqtile.bar import CALCULATED
from libqtile.widget import TextBox
from powerline import Powerline
class QTilePowerline(Powerline):
def do_setup(self, obj):
obj.powerline = self
class PowerlineTextBox(TextBox):
def __init__(self, timeout=2, text=' ', width=CALCULATED, **config):
super(PowerlineTextBox, self).__init__(text, width, **config)
self.timeout_add(timeout, self.update)
powerline = QTilePowerline(ext='wm', renderer_module='pango_markup')
powerline.setup(self)
def update(self):
if not self.configured:
return True
self.text = self.powerline.render(side='right')
self.bar.draw()
return True
def cmd_update(self, text):
self.update(text)
def cmd_get(self):
return self.text
def _configure(self, qtile, bar):
super(PowerlineTextBox, self)._configure(qtile, bar)
self.layout = self.drawer.textlayout(
self.text,
self.foreground,
self.font,
self.fontsize,
self.fontshadow,
markup=True,
)
# TODO: Remove this at next major release
Powerline = PowerlineTextBox
|
Move the closing parenthesis to the next line
|
Move the closing parenthesis to the next line
|
Python
|
mit
|
junix/powerline,cyrixhero/powerline,russellb/powerline,dragon788/powerline,Liangjianghao/powerline,Liangjianghao/powerline,Luffin/powerline,darac/powerline,EricSB/powerline,kenrachynski/powerline,S0lll0s/powerline,xfumihiro/powerline,cyrixhero/powerline,xxxhycl2010/powerline,bartvm/powerline,xfumihiro/powerline,lukw00/powerline,junix/powerline,Luffin/powerline,seanfisk/powerline,kenrachynski/powerline,russellb/powerline,cyrixhero/powerline,prvnkumar/powerline,areteix/powerline,prvnkumar/powerline,QuLogic/powerline,kenrachynski/powerline,lukw00/powerline,areteix/powerline,seanfisk/powerline,dragon788/powerline,seanfisk/powerline,bartvm/powerline,DoctorJellyface/powerline,russellb/powerline,bartvm/powerline,EricSB/powerline,prvnkumar/powerline,Liangjianghao/powerline,S0lll0s/powerline,IvanAli/powerline,s0undt3ch/powerline,lukw00/powerline,QuLogic/powerline,blindFS/powerline,QuLogic/powerline,DoctorJellyface/powerline,IvanAli/powerline,bezhermoso/powerline,s0undt3ch/powerline,DoctorJellyface/powerline,bezhermoso/powerline,dragon788/powerline,bezhermoso/powerline,darac/powerline,blindFS/powerline,xfumihiro/powerline,EricSB/powerline,blindFS/powerline,xxxhycl2010/powerline,junix/powerline,xxxhycl2010/powerline,S0lll0s/powerline,darac/powerline,IvanAli/powerline,s0undt3ch/powerline,areteix/powerline,Luffin/powerline
|
# vim:fileencoding=utf-8:noet
from __future__ import (unicode_literals, division, absolute_import, print_function)
from libqtile.bar import CALCULATED
from libqtile.widget import TextBox
from powerline import Powerline
class QTilePowerline(Powerline):
def do_setup(self, obj):
obj.powerline = self
class PowerlineTextBox(TextBox):
def __init__(self, timeout=2, text=' ', width=CALCULATED, **config):
super(PowerlineTextBox, self).__init__(text, width, **config)
self.timeout_add(timeout, self.update)
powerline = QTilePowerline(ext='wm', renderer_module='pango_markup')
powerline.setup(self)
def update(self):
if not self.configured:
return True
self.text = self.powerline.render(side='right')
self.bar.draw()
return True
def cmd_update(self, text):
self.update(text)
def cmd_get(self):
return self.text
def _configure(self, qtile, bar):
super(PowerlineTextBox, self)._configure(qtile, bar)
self.layout = self.drawer.textlayout(
self.text,
self.foreground,
self.font,
self.fontsize,
self.fontshadow,
markup=True)
# TODO: Remove this at next major release
Powerline = PowerlineTextBox
Move the closing parenthesis to the next line
|
# vim:fileencoding=utf-8:noet
from __future__ import (unicode_literals, division, absolute_import, print_function)
from libqtile.bar import CALCULATED
from libqtile.widget import TextBox
from powerline import Powerline
class QTilePowerline(Powerline):
def do_setup(self, obj):
obj.powerline = self
class PowerlineTextBox(TextBox):
def __init__(self, timeout=2, text=' ', width=CALCULATED, **config):
super(PowerlineTextBox, self).__init__(text, width, **config)
self.timeout_add(timeout, self.update)
powerline = QTilePowerline(ext='wm', renderer_module='pango_markup')
powerline.setup(self)
def update(self):
if not self.configured:
return True
self.text = self.powerline.render(side='right')
self.bar.draw()
return True
def cmd_update(self, text):
self.update(text)
def cmd_get(self):
return self.text
def _configure(self, qtile, bar):
super(PowerlineTextBox, self)._configure(qtile, bar)
self.layout = self.drawer.textlayout(
self.text,
self.foreground,
self.font,
self.fontsize,
self.fontshadow,
markup=True,
)
# TODO: Remove this at next major release
Powerline = PowerlineTextBox
|
<commit_before># vim:fileencoding=utf-8:noet
from __future__ import (unicode_literals, division, absolute_import, print_function)
from libqtile.bar import CALCULATED
from libqtile.widget import TextBox
from powerline import Powerline
class QTilePowerline(Powerline):
def do_setup(self, obj):
obj.powerline = self
class PowerlineTextBox(TextBox):
def __init__(self, timeout=2, text=' ', width=CALCULATED, **config):
super(PowerlineTextBox, self).__init__(text, width, **config)
self.timeout_add(timeout, self.update)
powerline = QTilePowerline(ext='wm', renderer_module='pango_markup')
powerline.setup(self)
def update(self):
if not self.configured:
return True
self.text = self.powerline.render(side='right')
self.bar.draw()
return True
def cmd_update(self, text):
self.update(text)
def cmd_get(self):
return self.text
def _configure(self, qtile, bar):
super(PowerlineTextBox, self)._configure(qtile, bar)
self.layout = self.drawer.textlayout(
self.text,
self.foreground,
self.font,
self.fontsize,
self.fontshadow,
markup=True)
# TODO: Remove this at next major release
Powerline = PowerlineTextBox
<commit_msg>Move the closing parenthesis to the next line<commit_after>
|
# vim:fileencoding=utf-8:noet
from __future__ import (unicode_literals, division, absolute_import, print_function)
from libqtile.bar import CALCULATED
from libqtile.widget import TextBox
from powerline import Powerline
class QTilePowerline(Powerline):
def do_setup(self, obj):
obj.powerline = self
class PowerlineTextBox(TextBox):
def __init__(self, timeout=2, text=' ', width=CALCULATED, **config):
super(PowerlineTextBox, self).__init__(text, width, **config)
self.timeout_add(timeout, self.update)
powerline = QTilePowerline(ext='wm', renderer_module='pango_markup')
powerline.setup(self)
def update(self):
if not self.configured:
return True
self.text = self.powerline.render(side='right')
self.bar.draw()
return True
def cmd_update(self, text):
self.update(text)
def cmd_get(self):
return self.text
def _configure(self, qtile, bar):
super(PowerlineTextBox, self)._configure(qtile, bar)
self.layout = self.drawer.textlayout(
self.text,
self.foreground,
self.font,
self.fontsize,
self.fontshadow,
markup=True,
)
# TODO: Remove this at next major release
Powerline = PowerlineTextBox
|
# vim:fileencoding=utf-8:noet
from __future__ import (unicode_literals, division, absolute_import, print_function)
from libqtile.bar import CALCULATED
from libqtile.widget import TextBox
from powerline import Powerline
class QTilePowerline(Powerline):
def do_setup(self, obj):
obj.powerline = self
class PowerlineTextBox(TextBox):
def __init__(self, timeout=2, text=' ', width=CALCULATED, **config):
super(PowerlineTextBox, self).__init__(text, width, **config)
self.timeout_add(timeout, self.update)
powerline = QTilePowerline(ext='wm', renderer_module='pango_markup')
powerline.setup(self)
def update(self):
if not self.configured:
return True
self.text = self.powerline.render(side='right')
self.bar.draw()
return True
def cmd_update(self, text):
self.update(text)
def cmd_get(self):
return self.text
def _configure(self, qtile, bar):
super(PowerlineTextBox, self)._configure(qtile, bar)
self.layout = self.drawer.textlayout(
self.text,
self.foreground,
self.font,
self.fontsize,
self.fontshadow,
markup=True)
# TODO: Remove this at next major release
Powerline = PowerlineTextBox
Move the closing parenthesis to the next line# vim:fileencoding=utf-8:noet
from __future__ import (unicode_literals, division, absolute_import, print_function)
from libqtile.bar import CALCULATED
from libqtile.widget import TextBox
from powerline import Powerline
class QTilePowerline(Powerline):
def do_setup(self, obj):
obj.powerline = self
class PowerlineTextBox(TextBox):
def __init__(self, timeout=2, text=' ', width=CALCULATED, **config):
super(PowerlineTextBox, self).__init__(text, width, **config)
self.timeout_add(timeout, self.update)
powerline = QTilePowerline(ext='wm', renderer_module='pango_markup')
powerline.setup(self)
def update(self):
if not self.configured:
return True
self.text = self.powerline.render(side='right')
self.bar.draw()
return True
def cmd_update(self, text):
self.update(text)
def cmd_get(self):
return self.text
def _configure(self, qtile, bar):
super(PowerlineTextBox, self)._configure(qtile, bar)
self.layout = self.drawer.textlayout(
self.text,
self.foreground,
self.font,
self.fontsize,
self.fontshadow,
markup=True,
)
# TODO: Remove this at next major release
Powerline = PowerlineTextBox
|
<commit_before># vim:fileencoding=utf-8:noet
from __future__ import (unicode_literals, division, absolute_import, print_function)
from libqtile.bar import CALCULATED
from libqtile.widget import TextBox
from powerline import Powerline
class QTilePowerline(Powerline):
def do_setup(self, obj):
obj.powerline = self
class PowerlineTextBox(TextBox):
def __init__(self, timeout=2, text=' ', width=CALCULATED, **config):
super(PowerlineTextBox, self).__init__(text, width, **config)
self.timeout_add(timeout, self.update)
powerline = QTilePowerline(ext='wm', renderer_module='pango_markup')
powerline.setup(self)
def update(self):
if not self.configured:
return True
self.text = self.powerline.render(side='right')
self.bar.draw()
return True
def cmd_update(self, text):
self.update(text)
def cmd_get(self):
return self.text
def _configure(self, qtile, bar):
super(PowerlineTextBox, self)._configure(qtile, bar)
self.layout = self.drawer.textlayout(
self.text,
self.foreground,
self.font,
self.fontsize,
self.fontshadow,
markup=True)
# TODO: Remove this at next major release
Powerline = PowerlineTextBox
<commit_msg>Move the closing parenthesis to the next line<commit_after># vim:fileencoding=utf-8:noet
from __future__ import (unicode_literals, division, absolute_import, print_function)
from libqtile.bar import CALCULATED
from libqtile.widget import TextBox
from powerline import Powerline
class QTilePowerline(Powerline):
def do_setup(self, obj):
obj.powerline = self
class PowerlineTextBox(TextBox):
def __init__(self, timeout=2, text=' ', width=CALCULATED, **config):
super(PowerlineTextBox, self).__init__(text, width, **config)
self.timeout_add(timeout, self.update)
powerline = QTilePowerline(ext='wm', renderer_module='pango_markup')
powerline.setup(self)
def update(self):
if not self.configured:
return True
self.text = self.powerline.render(side='right')
self.bar.draw()
return True
def cmd_update(self, text):
self.update(text)
def cmd_get(self):
return self.text
def _configure(self, qtile, bar):
super(PowerlineTextBox, self)._configure(qtile, bar)
self.layout = self.drawer.textlayout(
self.text,
self.foreground,
self.font,
self.fontsize,
self.fontshadow,
markup=True,
)
# TODO: Remove this at next major release
Powerline = PowerlineTextBox
|
3aae172eae884bfb5f29be21cc70b032a574dfc1
|
setup.py
|
setup.py
|
import sys
from setuptools import setup, find_packages
import mut
REQUIRES = [
'boto>=2.39,<2.40',
'certifi',
'docopt>=0.6,<0.7',
'docutils',
'dominate>=2.1,<2.2',
'libgiza>=0.2.13,<0.3',
'PyYAML',
'requests>2.9,<2.10',
'rstcloth>0.2.5',
'sphinx>1.4',
]
# Need a fallback for the typing module
if sys.version < '3.5':
REQUIRES.append('mypy-lang')
setup(
name='mut',
description='',
version=mut.__version__,
author='Andrew Aldridge',
author_email='i80and@foxquill.com',
license='Apache',
packages=find_packages(),
install_requires=REQUIRES,
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: Apache Software License',
'Topic :: Software Development :: Build Tools',
'Topic :: Documentation',
'Topic :: Text Processing',
],
entry_points={
'console_scripts': [
'mut = mut.helper:main',
'mut-build = mut.main:main',
'mut-images = mut.build_images:main',
'mut-intersphinx = mut.intersphinx:main',
'mut-lint = mut.lint:main',
'mut-publish = mut.stage:main',
],
}
)
|
import sys
from setuptools import setup, find_packages
import mut
REQUIRES = [
'boto>=2.39,<2.40',
'certifi',
'docopt>=0.6,<0.7',
'docutils',
'dominate>=2.1,<2.2',
'libgiza>=0.2.13,<0.3',
'PyYAML',
'requests>2.9,<2.10',
'rstcloth>=0.2.6',
'sphinx>=1.5',
]
# Need a fallback for the typing module
if sys.version < '3.5':
REQUIRES.append('mypy-lang')
setup(
name='mut',
description='',
version=mut.__version__,
author='Andrew Aldridge',
author_email='i80and@foxquill.com',
license='Apache',
packages=find_packages(),
install_requires=REQUIRES,
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: Apache Software License',
'Topic :: Software Development :: Build Tools',
'Topic :: Documentation',
'Topic :: Text Processing',
],
entry_points={
'console_scripts': [
'mut = mut.helper:main',
'mut-build = mut.main:main',
'mut-images = mut.build_images:main',
'mut-intersphinx = mut.intersphinx:main',
'mut-lint = mut.lint:main',
'mut-publish = mut.stage:main',
],
}
)
|
Upgrade dependencies a wee bit
|
Upgrade dependencies a wee bit
|
Python
|
apache-2.0
|
jeff-allen-mongo/mut,jeff-allen-mongo/mut
|
import sys
from setuptools import setup, find_packages
import mut
REQUIRES = [
'boto>=2.39,<2.40',
'certifi',
'docopt>=0.6,<0.7',
'docutils',
'dominate>=2.1,<2.2',
'libgiza>=0.2.13,<0.3',
'PyYAML',
'requests>2.9,<2.10',
'rstcloth>0.2.5',
'sphinx>1.4',
]
# Need a fallback for the typing module
if sys.version < '3.5':
REQUIRES.append('mypy-lang')
setup(
name='mut',
description='',
version=mut.__version__,
author='Andrew Aldridge',
author_email='i80and@foxquill.com',
license='Apache',
packages=find_packages(),
install_requires=REQUIRES,
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: Apache Software License',
'Topic :: Software Development :: Build Tools',
'Topic :: Documentation',
'Topic :: Text Processing',
],
entry_points={
'console_scripts': [
'mut = mut.helper:main',
'mut-build = mut.main:main',
'mut-images = mut.build_images:main',
'mut-intersphinx = mut.intersphinx:main',
'mut-lint = mut.lint:main',
'mut-publish = mut.stage:main',
],
}
)
Upgrade dependencies a wee bit
|
import sys
from setuptools import setup, find_packages
import mut
REQUIRES = [
'boto>=2.39,<2.40',
'certifi',
'docopt>=0.6,<0.7',
'docutils',
'dominate>=2.1,<2.2',
'libgiza>=0.2.13,<0.3',
'PyYAML',
'requests>2.9,<2.10',
'rstcloth>=0.2.6',
'sphinx>=1.5',
]
# Need a fallback for the typing module
if sys.version < '3.5':
REQUIRES.append('mypy-lang')
setup(
name='mut',
description='',
version=mut.__version__,
author='Andrew Aldridge',
author_email='i80and@foxquill.com',
license='Apache',
packages=find_packages(),
install_requires=REQUIRES,
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: Apache Software License',
'Topic :: Software Development :: Build Tools',
'Topic :: Documentation',
'Topic :: Text Processing',
],
entry_points={
'console_scripts': [
'mut = mut.helper:main',
'mut-build = mut.main:main',
'mut-images = mut.build_images:main',
'mut-intersphinx = mut.intersphinx:main',
'mut-lint = mut.lint:main',
'mut-publish = mut.stage:main',
],
}
)
|
<commit_before>import sys
from setuptools import setup, find_packages
import mut
REQUIRES = [
'boto>=2.39,<2.40',
'certifi',
'docopt>=0.6,<0.7',
'docutils',
'dominate>=2.1,<2.2',
'libgiza>=0.2.13,<0.3',
'PyYAML',
'requests>2.9,<2.10',
'rstcloth>0.2.5',
'sphinx>1.4',
]
# Need a fallback for the typing module
if sys.version < '3.5':
REQUIRES.append('mypy-lang')
setup(
name='mut',
description='',
version=mut.__version__,
author='Andrew Aldridge',
author_email='i80and@foxquill.com',
license='Apache',
packages=find_packages(),
install_requires=REQUIRES,
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: Apache Software License',
'Topic :: Software Development :: Build Tools',
'Topic :: Documentation',
'Topic :: Text Processing',
],
entry_points={
'console_scripts': [
'mut = mut.helper:main',
'mut-build = mut.main:main',
'mut-images = mut.build_images:main',
'mut-intersphinx = mut.intersphinx:main',
'mut-lint = mut.lint:main',
'mut-publish = mut.stage:main',
],
}
)
<commit_msg>Upgrade dependencies a wee bit<commit_after>
|
import sys
from setuptools import setup, find_packages
import mut
REQUIRES = [
'boto>=2.39,<2.40',
'certifi',
'docopt>=0.6,<0.7',
'docutils',
'dominate>=2.1,<2.2',
'libgiza>=0.2.13,<0.3',
'PyYAML',
'requests>2.9,<2.10',
'rstcloth>=0.2.6',
'sphinx>=1.5',
]
# Need a fallback for the typing module
if sys.version < '3.5':
REQUIRES.append('mypy-lang')
setup(
name='mut',
description='',
version=mut.__version__,
author='Andrew Aldridge',
author_email='i80and@foxquill.com',
license='Apache',
packages=find_packages(),
install_requires=REQUIRES,
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: Apache Software License',
'Topic :: Software Development :: Build Tools',
'Topic :: Documentation',
'Topic :: Text Processing',
],
entry_points={
'console_scripts': [
'mut = mut.helper:main',
'mut-build = mut.main:main',
'mut-images = mut.build_images:main',
'mut-intersphinx = mut.intersphinx:main',
'mut-lint = mut.lint:main',
'mut-publish = mut.stage:main',
],
}
)
|
import sys
from setuptools import setup, find_packages
import mut
REQUIRES = [
'boto>=2.39,<2.40',
'certifi',
'docopt>=0.6,<0.7',
'docutils',
'dominate>=2.1,<2.2',
'libgiza>=0.2.13,<0.3',
'PyYAML',
'requests>2.9,<2.10',
'rstcloth>0.2.5',
'sphinx>1.4',
]
# Need a fallback for the typing module
if sys.version < '3.5':
REQUIRES.append('mypy-lang')
setup(
name='mut',
description='',
version=mut.__version__,
author='Andrew Aldridge',
author_email='i80and@foxquill.com',
license='Apache',
packages=find_packages(),
install_requires=REQUIRES,
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: Apache Software License',
'Topic :: Software Development :: Build Tools',
'Topic :: Documentation',
'Topic :: Text Processing',
],
entry_points={
'console_scripts': [
'mut = mut.helper:main',
'mut-build = mut.main:main',
'mut-images = mut.build_images:main',
'mut-intersphinx = mut.intersphinx:main',
'mut-lint = mut.lint:main',
'mut-publish = mut.stage:main',
],
}
)
Upgrade dependencies a wee bitimport sys
from setuptools import setup, find_packages
import mut
REQUIRES = [
'boto>=2.39,<2.40',
'certifi',
'docopt>=0.6,<0.7',
'docutils',
'dominate>=2.1,<2.2',
'libgiza>=0.2.13,<0.3',
'PyYAML',
'requests>2.9,<2.10',
'rstcloth>=0.2.6',
'sphinx>=1.5',
]
# Need a fallback for the typing module
if sys.version < '3.5':
REQUIRES.append('mypy-lang')
setup(
name='mut',
description='',
version=mut.__version__,
author='Andrew Aldridge',
author_email='i80and@foxquill.com',
license='Apache',
packages=find_packages(),
install_requires=REQUIRES,
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: Apache Software License',
'Topic :: Software Development :: Build Tools',
'Topic :: Documentation',
'Topic :: Text Processing',
],
entry_points={
'console_scripts': [
'mut = mut.helper:main',
'mut-build = mut.main:main',
'mut-images = mut.build_images:main',
'mut-intersphinx = mut.intersphinx:main',
'mut-lint = mut.lint:main',
'mut-publish = mut.stage:main',
],
}
)
|
<commit_before>import sys
from setuptools import setup, find_packages
import mut
REQUIRES = [
'boto>=2.39,<2.40',
'certifi',
'docopt>=0.6,<0.7',
'docutils',
'dominate>=2.1,<2.2',
'libgiza>=0.2.13,<0.3',
'PyYAML',
'requests>2.9,<2.10',
'rstcloth>0.2.5',
'sphinx>1.4',
]
# Need a fallback for the typing module
if sys.version < '3.5':
REQUIRES.append('mypy-lang')
setup(
name='mut',
description='',
version=mut.__version__,
author='Andrew Aldridge',
author_email='i80and@foxquill.com',
license='Apache',
packages=find_packages(),
install_requires=REQUIRES,
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: Apache Software License',
'Topic :: Software Development :: Build Tools',
'Topic :: Documentation',
'Topic :: Text Processing',
],
entry_points={
'console_scripts': [
'mut = mut.helper:main',
'mut-build = mut.main:main',
'mut-images = mut.build_images:main',
'mut-intersphinx = mut.intersphinx:main',
'mut-lint = mut.lint:main',
'mut-publish = mut.stage:main',
],
}
)
<commit_msg>Upgrade dependencies a wee bit<commit_after>import sys
from setuptools import setup, find_packages
import mut
REQUIRES = [
'boto>=2.39,<2.40',
'certifi',
'docopt>=0.6,<0.7',
'docutils',
'dominate>=2.1,<2.2',
'libgiza>=0.2.13,<0.3',
'PyYAML',
'requests>2.9,<2.10',
'rstcloth>=0.2.6',
'sphinx>=1.5',
]
# Need a fallback for the typing module
if sys.version < '3.5':
REQUIRES.append('mypy-lang')
setup(
name='mut',
description='',
version=mut.__version__,
author='Andrew Aldridge',
author_email='i80and@foxquill.com',
license='Apache',
packages=find_packages(),
install_requires=REQUIRES,
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: Apache Software License',
'Topic :: Software Development :: Build Tools',
'Topic :: Documentation',
'Topic :: Text Processing',
],
entry_points={
'console_scripts': [
'mut = mut.helper:main',
'mut-build = mut.main:main',
'mut-images = mut.build_images:main',
'mut-intersphinx = mut.intersphinx:main',
'mut-lint = mut.lint:main',
'mut-publish = mut.stage:main',
],
}
)
|
1d928cbc7b2cfcf1ffd2ec27f83ee33f0af39dfe
|
setuptools/py27compat.py
|
setuptools/py27compat.py
|
"""
Compatibility Support for Python 2.7 and earlier
"""
import sys
import platform
def get_all_headers(message, key):
"""
Given an HTTPMessage, return all headers matching a given key.
"""
return message.get_all(key)
if sys.version_info < (3,):
def get_all_headers(message, key):
return message.getheaders(key)
linux_py2_ascii = (
platform.system() == 'Linux' and
sys.version_info < (3,)
)
rmtree_safe = str if linux_py2_ascii else lambda x: x
"""Workaround for http://bugs.python.org/issue24672"""
|
"""
Compatibility Support for Python 2.7 and earlier
"""
import platform
import six
def get_all_headers(message, key):
"""
Given an HTTPMessage, return all headers matching a given key.
"""
return message.get_all(key)
if six.PY2:
def get_all_headers(message, key):
return message.getheaders(key)
linux_py2_ascii = (
platform.system() == 'Linux' and
six.PY2
)
rmtree_safe = str if linux_py2_ascii else lambda x: x
"""Workaround for http://bugs.python.org/issue24672"""
|
Use six to detect Python 2
|
Use six to detect Python 2
|
Python
|
mit
|
pypa/setuptools,pypa/setuptools,pypa/setuptools
|
"""
Compatibility Support for Python 2.7 and earlier
"""
import sys
import platform
def get_all_headers(message, key):
"""
Given an HTTPMessage, return all headers matching a given key.
"""
return message.get_all(key)
if sys.version_info < (3,):
def get_all_headers(message, key):
return message.getheaders(key)
linux_py2_ascii = (
platform.system() == 'Linux' and
sys.version_info < (3,)
)
rmtree_safe = str if linux_py2_ascii else lambda x: x
"""Workaround for http://bugs.python.org/issue24672"""
Use six to detect Python 2
|
"""
Compatibility Support for Python 2.7 and earlier
"""
import platform
import six
def get_all_headers(message, key):
"""
Given an HTTPMessage, return all headers matching a given key.
"""
return message.get_all(key)
if six.PY2:
def get_all_headers(message, key):
return message.getheaders(key)
linux_py2_ascii = (
platform.system() == 'Linux' and
six.PY2
)
rmtree_safe = str if linux_py2_ascii else lambda x: x
"""Workaround for http://bugs.python.org/issue24672"""
|
<commit_before>"""
Compatibility Support for Python 2.7 and earlier
"""
import sys
import platform
def get_all_headers(message, key):
"""
Given an HTTPMessage, return all headers matching a given key.
"""
return message.get_all(key)
if sys.version_info < (3,):
def get_all_headers(message, key):
return message.getheaders(key)
linux_py2_ascii = (
platform.system() == 'Linux' and
sys.version_info < (3,)
)
rmtree_safe = str if linux_py2_ascii else lambda x: x
"""Workaround for http://bugs.python.org/issue24672"""
<commit_msg>Use six to detect Python 2<commit_after>
|
"""
Compatibility Support for Python 2.7 and earlier
"""
import platform
import six
def get_all_headers(message, key):
"""
Given an HTTPMessage, return all headers matching a given key.
"""
return message.get_all(key)
if six.PY2:
def get_all_headers(message, key):
return message.getheaders(key)
linux_py2_ascii = (
platform.system() == 'Linux' and
six.PY2
)
rmtree_safe = str if linux_py2_ascii else lambda x: x
"""Workaround for http://bugs.python.org/issue24672"""
|
"""
Compatibility Support for Python 2.7 and earlier
"""
import sys
import platform
def get_all_headers(message, key):
"""
Given an HTTPMessage, return all headers matching a given key.
"""
return message.get_all(key)
if sys.version_info < (3,):
def get_all_headers(message, key):
return message.getheaders(key)
linux_py2_ascii = (
platform.system() == 'Linux' and
sys.version_info < (3,)
)
rmtree_safe = str if linux_py2_ascii else lambda x: x
"""Workaround for http://bugs.python.org/issue24672"""
Use six to detect Python 2"""
Compatibility Support for Python 2.7 and earlier
"""
import platform
import six
def get_all_headers(message, key):
"""
Given an HTTPMessage, return all headers matching a given key.
"""
return message.get_all(key)
if six.PY2:
def get_all_headers(message, key):
return message.getheaders(key)
linux_py2_ascii = (
platform.system() == 'Linux' and
six.PY2
)
rmtree_safe = str if linux_py2_ascii else lambda x: x
"""Workaround for http://bugs.python.org/issue24672"""
|
<commit_before>"""
Compatibility Support for Python 2.7 and earlier
"""
import sys
import platform
def get_all_headers(message, key):
"""
Given an HTTPMessage, return all headers matching a given key.
"""
return message.get_all(key)
if sys.version_info < (3,):
def get_all_headers(message, key):
return message.getheaders(key)
linux_py2_ascii = (
platform.system() == 'Linux' and
sys.version_info < (3,)
)
rmtree_safe = str if linux_py2_ascii else lambda x: x
"""Workaround for http://bugs.python.org/issue24672"""
<commit_msg>Use six to detect Python 2<commit_after>"""
Compatibility Support for Python 2.7 and earlier
"""
import platform
import six
def get_all_headers(message, key):
"""
Given an HTTPMessage, return all headers matching a given key.
"""
return message.get_all(key)
if six.PY2:
def get_all_headers(message, key):
return message.getheaders(key)
linux_py2_ascii = (
platform.system() == 'Linux' and
six.PY2
)
rmtree_safe = str if linux_py2_ascii else lambda x: x
"""Workaround for http://bugs.python.org/issue24672"""
|
0dd80314ae29d615b287819ae075deda435f3fe8
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='statbank',
version='0.2.1',
description='Statbank API client library',
url='http://github.com/gisgroup/statbank-python',
author='Gis Group ApS',
author_email='valentin@gisgroup.dk, zacharias@gisgroup.dk',
license='MIT',
packages=['statbank'],
install_requires=[
'python-dateutil',
],
test_suite='tests',
)
|
from setuptools import setup
setup(
name='gisgroup-statbank',
version='0.0.1',
description='Statbank API client library',
url='http://github.com/gisgroup/statbank-python',
author='Gis Group ApS',
author_email='valentin@gisgroup.dk, zacharias@gisgroup.dk',
license='MIT',
packages=['statbank'],
install_requires=[
'python-dateutil',
],
test_suite='tests',
)
|
Rename pypi package to gisgroup-statbank
|
Rename pypi package to gisgroup-statbank
|
Python
|
mit
|
gisgroup/statbank-python
|
from setuptools import setup
setup(
name='statbank',
version='0.2.1',
description='Statbank API client library',
url='http://github.com/gisgroup/statbank-python',
author='Gis Group ApS',
author_email='valentin@gisgroup.dk, zacharias@gisgroup.dk',
license='MIT',
packages=['statbank'],
install_requires=[
'python-dateutil',
],
test_suite='tests',
)
Rename pypi package to gisgroup-statbank
|
from setuptools import setup
setup(
name='gisgroup-statbank',
version='0.0.1',
description='Statbank API client library',
url='http://github.com/gisgroup/statbank-python',
author='Gis Group ApS',
author_email='valentin@gisgroup.dk, zacharias@gisgroup.dk',
license='MIT',
packages=['statbank'],
install_requires=[
'python-dateutil',
],
test_suite='tests',
)
|
<commit_before>from setuptools import setup
setup(
name='statbank',
version='0.2.1',
description='Statbank API client library',
url='http://github.com/gisgroup/statbank-python',
author='Gis Group ApS',
author_email='valentin@gisgroup.dk, zacharias@gisgroup.dk',
license='MIT',
packages=['statbank'],
install_requires=[
'python-dateutil',
],
test_suite='tests',
)
<commit_msg>Rename pypi package to gisgroup-statbank<commit_after>
|
from setuptools import setup
setup(
name='gisgroup-statbank',
version='0.0.1',
description='Statbank API client library',
url='http://github.com/gisgroup/statbank-python',
author='Gis Group ApS',
author_email='valentin@gisgroup.dk, zacharias@gisgroup.dk',
license='MIT',
packages=['statbank'],
install_requires=[
'python-dateutil',
],
test_suite='tests',
)
|
from setuptools import setup
setup(
name='statbank',
version='0.2.1',
description='Statbank API client library',
url='http://github.com/gisgroup/statbank-python',
author='Gis Group ApS',
author_email='valentin@gisgroup.dk, zacharias@gisgroup.dk',
license='MIT',
packages=['statbank'],
install_requires=[
'python-dateutil',
],
test_suite='tests',
)
Rename pypi package to gisgroup-statbankfrom setuptools import setup
setup(
name='gisgroup-statbank',
version='0.0.1',
description='Statbank API client library',
url='http://github.com/gisgroup/statbank-python',
author='Gis Group ApS',
author_email='valentin@gisgroup.dk, zacharias@gisgroup.dk',
license='MIT',
packages=['statbank'],
install_requires=[
'python-dateutil',
],
test_suite='tests',
)
|
<commit_before>from setuptools import setup
setup(
name='statbank',
version='0.2.1',
description='Statbank API client library',
url='http://github.com/gisgroup/statbank-python',
author='Gis Group ApS',
author_email='valentin@gisgroup.dk, zacharias@gisgroup.dk',
license='MIT',
packages=['statbank'],
install_requires=[
'python-dateutil',
],
test_suite='tests',
)
<commit_msg>Rename pypi package to gisgroup-statbank<commit_after>from setuptools import setup
setup(
name='gisgroup-statbank',
version='0.0.1',
description='Statbank API client library',
url='http://github.com/gisgroup/statbank-python',
author='Gis Group ApS',
author_email='valentin@gisgroup.dk, zacharias@gisgroup.dk',
license='MIT',
packages=['statbank'],
install_requires=[
'python-dateutil',
],
test_suite='tests',
)
|
a76fe727f9d6a7b95da2c3307ee7317a6426bd67
|
simple_model/__init__.py
|
simple_model/__init__.py
|
from .builder import model_builder
from .models import DynamicModel, Model
__all__ = ('DynamicModel', 'Model', 'model_builder')
|
from .builder import model_builder
from .models import Model
__all__ = ('Model', 'model_builder')
|
Remove remaining links to DynamicModel
|
Remove remaining links to DynamicModel
|
Python
|
mit
|
lamenezes/simple-model
|
from .builder import model_builder
from .models import DynamicModel, Model
__all__ = ('DynamicModel', 'Model', 'model_builder')
Remove remaining links to DynamicModel
|
from .builder import model_builder
from .models import Model
__all__ = ('Model', 'model_builder')
|
<commit_before>from .builder import model_builder
from .models import DynamicModel, Model
__all__ = ('DynamicModel', 'Model', 'model_builder')
<commit_msg>Remove remaining links to DynamicModel<commit_after>
|
from .builder import model_builder
from .models import Model
__all__ = ('Model', 'model_builder')
|
from .builder import model_builder
from .models import DynamicModel, Model
__all__ = ('DynamicModel', 'Model', 'model_builder')
Remove remaining links to DynamicModelfrom .builder import model_builder
from .models import Model
__all__ = ('Model', 'model_builder')
|
<commit_before>from .builder import model_builder
from .models import DynamicModel, Model
__all__ = ('DynamicModel', 'Model', 'model_builder')
<commit_msg>Remove remaining links to DynamicModel<commit_after>from .builder import model_builder
from .models import Model
__all__ = ('Model', 'model_builder')
|
51337c5fa3fe21ccfadbc26f19aa9f2574663fdc
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name='flask-reverse-proxy',
version='0.1.0.0',
packages=['flask_reverse_proxy'],
url='',
license='',
author='Wilberto Morales',
author_email='wilbertomorales777@gmail.com',
description=''
)
|
from distutils.core import setup
setup(
name='flask-reverse-proxy',
version='0.2.0.0',
packages=['flask_reverse_proxy'],
url='',
license='',
author='Wilberto Morales',
author_email='wilbertomorales777@gmail.com',
description=''
)
|
Update version after Apache configuration
|
Update version after Apache configuration
|
Python
|
mit
|
wilbertom/flask-reverse-proxy
|
from distutils.core import setup
setup(
name='flask-reverse-proxy',
version='0.1.0.0',
packages=['flask_reverse_proxy'],
url='',
license='',
author='Wilberto Morales',
author_email='wilbertomorales777@gmail.com',
description=''
)
Update version after Apache configuration
|
from distutils.core import setup
setup(
name='flask-reverse-proxy',
version='0.2.0.0',
packages=['flask_reverse_proxy'],
url='',
license='',
author='Wilberto Morales',
author_email='wilbertomorales777@gmail.com',
description=''
)
|
<commit_before>from distutils.core import setup
setup(
name='flask-reverse-proxy',
version='0.1.0.0',
packages=['flask_reverse_proxy'],
url='',
license='',
author='Wilberto Morales',
author_email='wilbertomorales777@gmail.com',
description=''
)
<commit_msg>Update version after Apache configuration<commit_after>
|
from distutils.core import setup
setup(
name='flask-reverse-proxy',
version='0.2.0.0',
packages=['flask_reverse_proxy'],
url='',
license='',
author='Wilberto Morales',
author_email='wilbertomorales777@gmail.com',
description=''
)
|
from distutils.core import setup
setup(
name='flask-reverse-proxy',
version='0.1.0.0',
packages=['flask_reverse_proxy'],
url='',
license='',
author='Wilberto Morales',
author_email='wilbertomorales777@gmail.com',
description=''
)
Update version after Apache configurationfrom distutils.core import setup
setup(
name='flask-reverse-proxy',
version='0.2.0.0',
packages=['flask_reverse_proxy'],
url='',
license='',
author='Wilberto Morales',
author_email='wilbertomorales777@gmail.com',
description=''
)
|
<commit_before>from distutils.core import setup
setup(
name='flask-reverse-proxy',
version='0.1.0.0',
packages=['flask_reverse_proxy'],
url='',
license='',
author='Wilberto Morales',
author_email='wilbertomorales777@gmail.com',
description=''
)
<commit_msg>Update version after Apache configuration<commit_after>from distutils.core import setup
setup(
name='flask-reverse-proxy',
version='0.2.0.0',
packages=['flask_reverse_proxy'],
url='',
license='',
author='Wilberto Morales',
author_email='wilbertomorales777@gmail.com',
description=''
)
|
8eb61667984cad09086f442ef299c582d0208a8f
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
__author__ = 'shafferm'
setup(
name="fast_sparCC",
version="v0.1.5",
author="Michael Shaffer",
author_email="michael.shaffer@ucdenver.edu",
description="A fast command line interface to find correlations in biom tables with SparCC.",
license="BSD",
url="https://github.com/shafferm/fast_sparCC",
download_url="https://github.com/shafferm/fast_sparCC/archive/v0.1.5.tar.gz",
install_requires=["numpy", "scipy", "biom-format", "pandas"],
scripts=["scripts/fast_sparCC.py"],
packages=find_packages()
)
|
from setuptools import setup, find_packages
__author__ = 'shafferm'
setup(
name="fast_sparCC",
version="v0.1.6",
author="Michael Shaffer",
author_email="michael.shaffer@ucdenver.edu",
description="A fast command line interface to find correlations in biom tables with SparCC.",
license="BSD",
url="https://github.com/shafferm/fast_sparCC",
download_url="https://github.com/shafferm/fast_sparCC/archive/v0.1.6.tar.gz",
install_requires=["numpy", "scipy", "biom-format", "pandas"],
scripts=["scripts/fast_sparCC.py"],
packages=find_packages()
)
|
Update to v0.1.6 to add in function moves
|
Update to v0.1.6 to add in function moves
|
Python
|
bsd-3-clause
|
shafferm/fast_sparCC
|
from setuptools import setup, find_packages
__author__ = 'shafferm'
setup(
name="fast_sparCC",
version="v0.1.5",
author="Michael Shaffer",
author_email="michael.shaffer@ucdenver.edu",
description="A fast command line interface to find correlations in biom tables with SparCC.",
license="BSD",
url="https://github.com/shafferm/fast_sparCC",
download_url="https://github.com/shafferm/fast_sparCC/archive/v0.1.5.tar.gz",
install_requires=["numpy", "scipy", "biom-format", "pandas"],
scripts=["scripts/fast_sparCC.py"],
packages=find_packages()
)
Update to v0.1.6 to add in function moves
|
from setuptools import setup, find_packages
__author__ = 'shafferm'
setup(
name="fast_sparCC",
version="v0.1.6",
author="Michael Shaffer",
author_email="michael.shaffer@ucdenver.edu",
description="A fast command line interface to find correlations in biom tables with SparCC.",
license="BSD",
url="https://github.com/shafferm/fast_sparCC",
download_url="https://github.com/shafferm/fast_sparCC/archive/v0.1.6.tar.gz",
install_requires=["numpy", "scipy", "biom-format", "pandas"],
scripts=["scripts/fast_sparCC.py"],
packages=find_packages()
)
|
<commit_before>from setuptools import setup, find_packages
__author__ = 'shafferm'
setup(
name="fast_sparCC",
version="v0.1.5",
author="Michael Shaffer",
author_email="michael.shaffer@ucdenver.edu",
description="A fast command line interface to find correlations in biom tables with SparCC.",
license="BSD",
url="https://github.com/shafferm/fast_sparCC",
download_url="https://github.com/shafferm/fast_sparCC/archive/v0.1.5.tar.gz",
install_requires=["numpy", "scipy", "biom-format", "pandas"],
scripts=["scripts/fast_sparCC.py"],
packages=find_packages()
)
<commit_msg>Update to v0.1.6 to add in function moves<commit_after>
|
from setuptools import setup, find_packages
__author__ = 'shafferm'
setup(
name="fast_sparCC",
version="v0.1.6",
author="Michael Shaffer",
author_email="michael.shaffer@ucdenver.edu",
description="A fast command line interface to find correlations in biom tables with SparCC.",
license="BSD",
url="https://github.com/shafferm/fast_sparCC",
download_url="https://github.com/shafferm/fast_sparCC/archive/v0.1.6.tar.gz",
install_requires=["numpy", "scipy", "biom-format", "pandas"],
scripts=["scripts/fast_sparCC.py"],
packages=find_packages()
)
|
from setuptools import setup, find_packages
__author__ = 'shafferm'
setup(
name="fast_sparCC",
version="v0.1.5",
author="Michael Shaffer",
author_email="michael.shaffer@ucdenver.edu",
description="A fast command line interface to find correlations in biom tables with SparCC.",
license="BSD",
url="https://github.com/shafferm/fast_sparCC",
download_url="https://github.com/shafferm/fast_sparCC/archive/v0.1.5.tar.gz",
install_requires=["numpy", "scipy", "biom-format", "pandas"],
scripts=["scripts/fast_sparCC.py"],
packages=find_packages()
)
Update to v0.1.6 to add in function movesfrom setuptools import setup, find_packages
__author__ = 'shafferm'
setup(
name="fast_sparCC",
version="v0.1.6",
author="Michael Shaffer",
author_email="michael.shaffer@ucdenver.edu",
description="A fast command line interface to find correlations in biom tables with SparCC.",
license="BSD",
url="https://github.com/shafferm/fast_sparCC",
download_url="https://github.com/shafferm/fast_sparCC/archive/v0.1.6.tar.gz",
install_requires=["numpy", "scipy", "biom-format", "pandas"],
scripts=["scripts/fast_sparCC.py"],
packages=find_packages()
)
|
<commit_before>from setuptools import setup, find_packages
__author__ = 'shafferm'
setup(
name="fast_sparCC",
version="v0.1.5",
author="Michael Shaffer",
author_email="michael.shaffer@ucdenver.edu",
description="A fast command line interface to find correlations in biom tables with SparCC.",
license="BSD",
url="https://github.com/shafferm/fast_sparCC",
download_url="https://github.com/shafferm/fast_sparCC/archive/v0.1.5.tar.gz",
install_requires=["numpy", "scipy", "biom-format", "pandas"],
scripts=["scripts/fast_sparCC.py"],
packages=find_packages()
)
<commit_msg>Update to v0.1.6 to add in function moves<commit_after>from setuptools import setup, find_packages
__author__ = 'shafferm'
setup(
name="fast_sparCC",
version="v0.1.6",
author="Michael Shaffer",
author_email="michael.shaffer@ucdenver.edu",
description="A fast command line interface to find correlations in biom tables with SparCC.",
license="BSD",
url="https://github.com/shafferm/fast_sparCC",
download_url="https://github.com/shafferm/fast_sparCC/archive/v0.1.6.tar.gz",
install_requires=["numpy", "scipy", "biom-format", "pandas"],
scripts=["scripts/fast_sparCC.py"],
packages=find_packages()
)
|
3cfe9cd12e2ac57dd702ad208347e213b627b8be
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
import flask_boost
entry_points = {
"console_scripts": [
"boost = flask_boost.cli:main",
]
}
with open("requirements.txt") as f:
requires = [l for l in f.read().splitlines() if l]
setup(
name='Flask-Boost',
version=flask_boost.__version__,
packages=find_packages(),
include_package_data=True,
description='Flask application generator for boosting your development.',
long_description=open('README.rst').read(),
url='https://github.com/hustlzp/Flask-Boost',
author='hustlzp',
author_email='hustlzp@gmail.com',
license='MIT',
keywords='flask sample generator',
install_requires=requires,
entry_points=entry_points,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
|
from setuptools import setup, find_packages
import flask_boost
entry_points = {
"console_scripts": [
"boost = flask_boost.cli:main",
]
}
with open("requirements.txt") as f:
requires = [l for l in f.read().splitlines() if l]
setup(
name='Flask-Boost',
version=flask_boost.__version__,
packages=find_packages(),
include_package_data=True,
description='Flask application generator for boosting your development.',
long_description=open('README.rst').read(),
url='https://github.com/hustlzp/Flask-Boost',
author='hustlzp',
author_email='hustlzp@gmail.com',
license='MIT',
keywords='flask sample generator',
install_requires=requires,
entry_points=entry_points,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
|
Change Development Status to Beta, add Python 3.4 support flag.
|
Change Development Status to Beta, add Python 3.4 support flag.
|
Python
|
mit
|
1045347128/Flask-Boost,hustlzp/Flask-Boost,1045347128/Flask-Boost,hustlzp/Flask-Boost,hustlzp/Flask-Boost,hustlzp/Flask-Boost,1045347128/Flask-Boost,1045347128/Flask-Boost
|
from setuptools import setup, find_packages
import flask_boost
entry_points = {
"console_scripts": [
"boost = flask_boost.cli:main",
]
}
with open("requirements.txt") as f:
requires = [l for l in f.read().splitlines() if l]
setup(
name='Flask-Boost',
version=flask_boost.__version__,
packages=find_packages(),
include_package_data=True,
description='Flask application generator for boosting your development.',
long_description=open('README.rst').read(),
url='https://github.com/hustlzp/Flask-Boost',
author='hustlzp',
author_email='hustlzp@gmail.com',
license='MIT',
keywords='flask sample generator',
install_requires=requires,
entry_points=entry_points,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)Change Development Status to Beta, add Python 3.4 support flag.
|
from setuptools import setup, find_packages
import flask_boost
entry_points = {
"console_scripts": [
"boost = flask_boost.cli:main",
]
}
with open("requirements.txt") as f:
requires = [l for l in f.read().splitlines() if l]
setup(
name='Flask-Boost',
version=flask_boost.__version__,
packages=find_packages(),
include_package_data=True,
description='Flask application generator for boosting your development.',
long_description=open('README.rst').read(),
url='https://github.com/hustlzp/Flask-Boost',
author='hustlzp',
author_email='hustlzp@gmail.com',
license='MIT',
keywords='flask sample generator',
install_requires=requires,
entry_points=entry_points,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
|
<commit_before>from setuptools import setup, find_packages
import flask_boost
entry_points = {
"console_scripts": [
"boost = flask_boost.cli:main",
]
}
with open("requirements.txt") as f:
requires = [l for l in f.read().splitlines() if l]
setup(
name='Flask-Boost',
version=flask_boost.__version__,
packages=find_packages(),
include_package_data=True,
description='Flask application generator for boosting your development.',
long_description=open('README.rst').read(),
url='https://github.com/hustlzp/Flask-Boost',
author='hustlzp',
author_email='hustlzp@gmail.com',
license='MIT',
keywords='flask sample generator',
install_requires=requires,
entry_points=entry_points,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)<commit_msg>Change Development Status to Beta, add Python 3.4 support flag.<commit_after>
|
from setuptools import setup, find_packages
import flask_boost
entry_points = {
"console_scripts": [
"boost = flask_boost.cli:main",
]
}
with open("requirements.txt") as f:
requires = [l for l in f.read().splitlines() if l]
setup(
name='Flask-Boost',
version=flask_boost.__version__,
packages=find_packages(),
include_package_data=True,
description='Flask application generator for boosting your development.',
long_description=open('README.rst').read(),
url='https://github.com/hustlzp/Flask-Boost',
author='hustlzp',
author_email='hustlzp@gmail.com',
license='MIT',
keywords='flask sample generator',
install_requires=requires,
entry_points=entry_points,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
|
from setuptools import setup, find_packages
import flask_boost
entry_points = {
"console_scripts": [
"boost = flask_boost.cli:main",
]
}
with open("requirements.txt") as f:
requires = [l for l in f.read().splitlines() if l]
setup(
name='Flask-Boost',
version=flask_boost.__version__,
packages=find_packages(),
include_package_data=True,
description='Flask application generator for boosting your development.',
long_description=open('README.rst').read(),
url='https://github.com/hustlzp/Flask-Boost',
author='hustlzp',
author_email='hustlzp@gmail.com',
license='MIT',
keywords='flask sample generator',
install_requires=requires,
entry_points=entry_points,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)Change Development Status to Beta, add Python 3.4 support flag.from setuptools import setup, find_packages
import flask_boost
entry_points = {
"console_scripts": [
"boost = flask_boost.cli:main",
]
}
with open("requirements.txt") as f:
requires = [l for l in f.read().splitlines() if l]
setup(
name='Flask-Boost',
version=flask_boost.__version__,
packages=find_packages(),
include_package_data=True,
description='Flask application generator for boosting your development.',
long_description=open('README.rst').read(),
url='https://github.com/hustlzp/Flask-Boost',
author='hustlzp',
author_email='hustlzp@gmail.com',
license='MIT',
keywords='flask sample generator',
install_requires=requires,
entry_points=entry_points,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
|
<commit_before>from setuptools import setup, find_packages
import flask_boost
entry_points = {
"console_scripts": [
"boost = flask_boost.cli:main",
]
}
with open("requirements.txt") as f:
requires = [l for l in f.read().splitlines() if l]
setup(
name='Flask-Boost',
version=flask_boost.__version__,
packages=find_packages(),
include_package_data=True,
description='Flask application generator for boosting your development.',
long_description=open('README.rst').read(),
url='https://github.com/hustlzp/Flask-Boost',
author='hustlzp',
author_email='hustlzp@gmail.com',
license='MIT',
keywords='flask sample generator',
install_requires=requires,
entry_points=entry_points,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)<commit_msg>Change Development Status to Beta, add Python 3.4 support flag.<commit_after>from setuptools import setup, find_packages
import flask_boost
entry_points = {
"console_scripts": [
"boost = flask_boost.cli:main",
]
}
with open("requirements.txt") as f:
requires = [l for l in f.read().splitlines() if l]
setup(
name='Flask-Boost',
version=flask_boost.__version__,
packages=find_packages(),
include_package_data=True,
description='Flask application generator for boosting your development.',
long_description=open('README.rst').read(),
url='https://github.com/hustlzp/Flask-Boost',
author='hustlzp',
author_email='hustlzp@gmail.com',
license='MIT',
keywords='flask sample generator',
install_requires=requires,
entry_points=entry_points,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
|
c4ab1ebcbc9d452972732ef5b15c0cf1b09bd8bc
|
changes/jobs/sync_repo.py
|
changes/jobs/sync_repo.py
|
from datetime import datetime
from changes.config import db, queue
from changes.models import Repository
def sync_repo(repo_id):
repo = Repository.query.get(repo_id)
if not repo:
return
vcs = repo.get_vcs()
if vcs is None:
return
repo.last_update_attempt = datetime.utcnow()
db.session.add(repo)
db.session.commit()
try:
if vcs.exists():
vcs.update()
else:
vcs.clone()
# TODO(dcramer): this doesnt scrape everything, and really we wouldn't
# want to do this all in a single job so we should split this into a
# backfill task
might_have_more = True
parent = None
while might_have_more:
might_have_more = False
for commit in vcs.log(parent=parent):
revision, created = commit.save(repo)
db.session.commit()
if not created:
break
might_have_more = True
parent = commit.id
repo.last_update = datetime.utcnow()
db.session.add(repo)
db.session.commit()
queue.delay('sync_repo', kwargs={
'repo_id': repo_id
}, countdown=15)
except Exception as exc:
# should we actually use retry support here?
raise queue.retry('sync_repo', kwargs={
'repo_id': repo_id,
}, exc=exc, countdown=120)
|
from datetime import datetime
from flask import current_app
from changes.config import db, queue
from changes.models import Repository
def sync_repo(repo_id):
repo = Repository.query.get(repo_id)
if not repo:
return
vcs = repo.get_vcs()
if vcs is None:
return
repo.last_update_attempt = datetime.utcnow()
db.session.add(repo)
db.session.commit()
try:
if vcs.exists():
vcs.update()
else:
vcs.clone()
# TODO(dcramer): this doesnt scrape everything, and really we wouldn't
# want to do this all in a single job so we should split this into a
# backfill task
might_have_more = True
parent = None
while might_have_more:
might_have_more = False
for commit in vcs.log(parent=parent):
revision, created = commit.save(repo)
db.session.commit()
if not created:
break
might_have_more = True
parent = commit.id
repo.last_update = datetime.utcnow()
db.session.add(repo)
db.session.commit()
queue.delay('sync_repo', kwargs={
'repo_id': repo_id
}, countdown=15)
except Exception as exc:
# should we actually use retry support here?
current_app.logger.exception('Failed to sync repository %s', repo_id)
raise queue.retry('sync_repo', kwargs={
'repo_id': repo_id,
}, exc=exc, countdown=120)
|
Use app logging instead of celery
|
Use app logging instead of celery
|
Python
|
apache-2.0
|
wfxiang08/changes,dropbox/changes,bowlofstew/changes,bowlofstew/changes,wfxiang08/changes,dropbox/changes,wfxiang08/changes,bowlofstew/changes,dropbox/changes,dropbox/changes,wfxiang08/changes,bowlofstew/changes
|
from datetime import datetime
from changes.config import db, queue
from changes.models import Repository
def sync_repo(repo_id):
repo = Repository.query.get(repo_id)
if not repo:
return
vcs = repo.get_vcs()
if vcs is None:
return
repo.last_update_attempt = datetime.utcnow()
db.session.add(repo)
db.session.commit()
try:
if vcs.exists():
vcs.update()
else:
vcs.clone()
# TODO(dcramer): this doesnt scrape everything, and really we wouldn't
# want to do this all in a single job so we should split this into a
# backfill task
might_have_more = True
parent = None
while might_have_more:
might_have_more = False
for commit in vcs.log(parent=parent):
revision, created = commit.save(repo)
db.session.commit()
if not created:
break
might_have_more = True
parent = commit.id
repo.last_update = datetime.utcnow()
db.session.add(repo)
db.session.commit()
queue.delay('sync_repo', kwargs={
'repo_id': repo_id
}, countdown=15)
except Exception as exc:
# should we actually use retry support here?
raise queue.retry('sync_repo', kwargs={
'repo_id': repo_id,
}, exc=exc, countdown=120)
Use app logging instead of celery
|
from datetime import datetime
from flask import current_app
from changes.config import db, queue
from changes.models import Repository
def sync_repo(repo_id):
repo = Repository.query.get(repo_id)
if not repo:
return
vcs = repo.get_vcs()
if vcs is None:
return
repo.last_update_attempt = datetime.utcnow()
db.session.add(repo)
db.session.commit()
try:
if vcs.exists():
vcs.update()
else:
vcs.clone()
# TODO(dcramer): this doesnt scrape everything, and really we wouldn't
# want to do this all in a single job so we should split this into a
# backfill task
might_have_more = True
parent = None
while might_have_more:
might_have_more = False
for commit in vcs.log(parent=parent):
revision, created = commit.save(repo)
db.session.commit()
if not created:
break
might_have_more = True
parent = commit.id
repo.last_update = datetime.utcnow()
db.session.add(repo)
db.session.commit()
queue.delay('sync_repo', kwargs={
'repo_id': repo_id
}, countdown=15)
except Exception as exc:
# should we actually use retry support here?
current_app.logger.exception('Failed to sync repository %s', repo_id)
raise queue.retry('sync_repo', kwargs={
'repo_id': repo_id,
}, exc=exc, countdown=120)
|
<commit_before>from datetime import datetime
from changes.config import db, queue
from changes.models import Repository
def sync_repo(repo_id):
repo = Repository.query.get(repo_id)
if not repo:
return
vcs = repo.get_vcs()
if vcs is None:
return
repo.last_update_attempt = datetime.utcnow()
db.session.add(repo)
db.session.commit()
try:
if vcs.exists():
vcs.update()
else:
vcs.clone()
# TODO(dcramer): this doesnt scrape everything, and really we wouldn't
# want to do this all in a single job so we should split this into a
# backfill task
might_have_more = True
parent = None
while might_have_more:
might_have_more = False
for commit in vcs.log(parent=parent):
revision, created = commit.save(repo)
db.session.commit()
if not created:
break
might_have_more = True
parent = commit.id
repo.last_update = datetime.utcnow()
db.session.add(repo)
db.session.commit()
queue.delay('sync_repo', kwargs={
'repo_id': repo_id
}, countdown=15)
except Exception as exc:
# should we actually use retry support here?
raise queue.retry('sync_repo', kwargs={
'repo_id': repo_id,
}, exc=exc, countdown=120)
<commit_msg>Use app logging instead of celery<commit_after>
|
from datetime import datetime
from flask import current_app
from changes.config import db, queue
from changes.models import Repository
def sync_repo(repo_id):
repo = Repository.query.get(repo_id)
if not repo:
return
vcs = repo.get_vcs()
if vcs is None:
return
repo.last_update_attempt = datetime.utcnow()
db.session.add(repo)
db.session.commit()
try:
if vcs.exists():
vcs.update()
else:
vcs.clone()
# TODO(dcramer): this doesnt scrape everything, and really we wouldn't
# want to do this all in a single job so we should split this into a
# backfill task
might_have_more = True
parent = None
while might_have_more:
might_have_more = False
for commit in vcs.log(parent=parent):
revision, created = commit.save(repo)
db.session.commit()
if not created:
break
might_have_more = True
parent = commit.id
repo.last_update = datetime.utcnow()
db.session.add(repo)
db.session.commit()
queue.delay('sync_repo', kwargs={
'repo_id': repo_id
}, countdown=15)
except Exception as exc:
# should we actually use retry support here?
current_app.logger.exception('Failed to sync repository %s', repo_id)
raise queue.retry('sync_repo', kwargs={
'repo_id': repo_id,
}, exc=exc, countdown=120)
|
from datetime import datetime
from changes.config import db, queue
from changes.models import Repository
def sync_repo(repo_id):
repo = Repository.query.get(repo_id)
if not repo:
return
vcs = repo.get_vcs()
if vcs is None:
return
repo.last_update_attempt = datetime.utcnow()
db.session.add(repo)
db.session.commit()
try:
if vcs.exists():
vcs.update()
else:
vcs.clone()
# TODO(dcramer): this doesnt scrape everything, and really we wouldn't
# want to do this all in a single job so we should split this into a
# backfill task
might_have_more = True
parent = None
while might_have_more:
might_have_more = False
for commit in vcs.log(parent=parent):
revision, created = commit.save(repo)
db.session.commit()
if not created:
break
might_have_more = True
parent = commit.id
repo.last_update = datetime.utcnow()
db.session.add(repo)
db.session.commit()
queue.delay('sync_repo', kwargs={
'repo_id': repo_id
}, countdown=15)
except Exception as exc:
# should we actually use retry support here?
raise queue.retry('sync_repo', kwargs={
'repo_id': repo_id,
}, exc=exc, countdown=120)
Use app logging instead of celeryfrom datetime import datetime
from flask import current_app
from changes.config import db, queue
from changes.models import Repository
def sync_repo(repo_id):
repo = Repository.query.get(repo_id)
if not repo:
return
vcs = repo.get_vcs()
if vcs is None:
return
repo.last_update_attempt = datetime.utcnow()
db.session.add(repo)
db.session.commit()
try:
if vcs.exists():
vcs.update()
else:
vcs.clone()
# TODO(dcramer): this doesnt scrape everything, and really we wouldn't
# want to do this all in a single job so we should split this into a
# backfill task
might_have_more = True
parent = None
while might_have_more:
might_have_more = False
for commit in vcs.log(parent=parent):
revision, created = commit.save(repo)
db.session.commit()
if not created:
break
might_have_more = True
parent = commit.id
repo.last_update = datetime.utcnow()
db.session.add(repo)
db.session.commit()
queue.delay('sync_repo', kwargs={
'repo_id': repo_id
}, countdown=15)
except Exception as exc:
# should we actually use retry support here?
current_app.logger.exception('Failed to sync repository %s', repo_id)
raise queue.retry('sync_repo', kwargs={
'repo_id': repo_id,
}, exc=exc, countdown=120)
|
<commit_before>from datetime import datetime
from changes.config import db, queue
from changes.models import Repository
def sync_repo(repo_id):
repo = Repository.query.get(repo_id)
if not repo:
return
vcs = repo.get_vcs()
if vcs is None:
return
repo.last_update_attempt = datetime.utcnow()
db.session.add(repo)
db.session.commit()
try:
if vcs.exists():
vcs.update()
else:
vcs.clone()
# TODO(dcramer): this doesnt scrape everything, and really we wouldn't
# want to do this all in a single job so we should split this into a
# backfill task
might_have_more = True
parent = None
while might_have_more:
might_have_more = False
for commit in vcs.log(parent=parent):
revision, created = commit.save(repo)
db.session.commit()
if not created:
break
might_have_more = True
parent = commit.id
repo.last_update = datetime.utcnow()
db.session.add(repo)
db.session.commit()
queue.delay('sync_repo', kwargs={
'repo_id': repo_id
}, countdown=15)
except Exception as exc:
# should we actually use retry support here?
raise queue.retry('sync_repo', kwargs={
'repo_id': repo_id,
}, exc=exc, countdown=120)
<commit_msg>Use app logging instead of celery<commit_after>from datetime import datetime
from flask import current_app
from changes.config import db, queue
from changes.models import Repository
def sync_repo(repo_id):
repo = Repository.query.get(repo_id)
if not repo:
return
vcs = repo.get_vcs()
if vcs is None:
return
repo.last_update_attempt = datetime.utcnow()
db.session.add(repo)
db.session.commit()
try:
if vcs.exists():
vcs.update()
else:
vcs.clone()
# TODO(dcramer): this doesnt scrape everything, and really we wouldn't
# want to do this all in a single job so we should split this into a
# backfill task
might_have_more = True
parent = None
while might_have_more:
might_have_more = False
for commit in vcs.log(parent=parent):
revision, created = commit.save(repo)
db.session.commit()
if not created:
break
might_have_more = True
parent = commit.id
repo.last_update = datetime.utcnow()
db.session.add(repo)
db.session.commit()
queue.delay('sync_repo', kwargs={
'repo_id': repo_id
}, countdown=15)
except Exception as exc:
# should we actually use retry support here?
current_app.logger.exception('Failed to sync repository %s', repo_id)
raise queue.retry('sync_repo', kwargs={
'repo_id': repo_id,
}, exc=exc, countdown=120)
|
5bef5472b55b36c1c9174ef861e92f057249ca9a
|
zou/app/models/preview_file.py
|
zou/app/models/preview_file.py
|
from sqlalchemy_utils import UUIDType
from zou.app import db
from zou.app.models.serializer import SerializerMixin
from zou.app.models.base import BaseMixin
class PreviewFile(db.Model, BaseMixin, SerializerMixin):
"""
Describes a file which is aimed at being reviewed. It is not a publication
neither a working file.
"""
name = db.Column(db.String(250))
revision = db.Column(db.Integer(), default=1)
description = db.Column(db.Text())
source = db.Column(db.String(40))
shotgun_id = db.Column(db.Integer, unique=True)
is_movie = db.Column(db.Boolean, default=False)
url = db.Column(db.String(600))
uploaded_movie_url = db.Column(db.String(600))
uploaded_movie_name = db.Column(db.String(150))
task_id = db.Column(
UUIDType(binary=False),
db.ForeignKey("task.id"),
index=True
)
person_id = db.Column(UUIDType(binary=False), db.ForeignKey("person.id"))
source_file_id = db.Column(
UUIDType(binary=False),
db.ForeignKey("output_file.id")
)
__table_args__ = (
db.UniqueConstraint(
"name",
"task_id",
"revision",
name="preview_uc"
),
)
def __repr__(self):
return "<PreviewFile %s>" % self.id
|
from sqlalchemy_utils import UUIDType
from zou.app import db
from zou.app.models.serializer import SerializerMixin
from zou.app.models.base import BaseMixin
class PreviewFile(db.Model, BaseMixin, SerializerMixin):
"""
Describes a file which is aimed at being reviewed. It is not a publication
neither a working file.
"""
name = db.Column(db.String(250))
revision = db.Column(db.Integer(), default=1)
description = db.Column(db.Text())
path = db.Column(db.String(400))
source = db.Column(db.String(40))
shotgun_id = db.Column(db.Integer, unique=True)
is_movie = db.Column(db.Boolean, default=False)
url = db.Column(db.String(600))
uploaded_movie_url = db.Column(db.String(600))
uploaded_movie_name = db.Column(db.String(150))
task_id = db.Column(
UUIDType(binary=False),
db.ForeignKey("task.id"),
index=True
)
person_id = db.Column(UUIDType(binary=False), db.ForeignKey("person.id"))
source_file_id = db.Column(
UUIDType(binary=False),
db.ForeignKey("output_file.id")
)
__table_args__ = (
db.UniqueConstraint(
"name",
"task_id",
"revision",
name="preview_uc"
),
)
def __repr__(self):
return "<PreviewFile %s>" % self.id
|
Add path field to preview file
|
Add path field to preview file
|
Python
|
agpl-3.0
|
cgwire/zou
|
from sqlalchemy_utils import UUIDType
from zou.app import db
from zou.app.models.serializer import SerializerMixin
from zou.app.models.base import BaseMixin
class PreviewFile(db.Model, BaseMixin, SerializerMixin):
"""
Describes a file which is aimed at being reviewed. It is not a publication
neither a working file.
"""
name = db.Column(db.String(250))
revision = db.Column(db.Integer(), default=1)
description = db.Column(db.Text())
source = db.Column(db.String(40))
shotgun_id = db.Column(db.Integer, unique=True)
is_movie = db.Column(db.Boolean, default=False)
url = db.Column(db.String(600))
uploaded_movie_url = db.Column(db.String(600))
uploaded_movie_name = db.Column(db.String(150))
task_id = db.Column(
UUIDType(binary=False),
db.ForeignKey("task.id"),
index=True
)
person_id = db.Column(UUIDType(binary=False), db.ForeignKey("person.id"))
source_file_id = db.Column(
UUIDType(binary=False),
db.ForeignKey("output_file.id")
)
__table_args__ = (
db.UniqueConstraint(
"name",
"task_id",
"revision",
name="preview_uc"
),
)
def __repr__(self):
return "<PreviewFile %s>" % self.id
Add path field to preview file
|
from sqlalchemy_utils import UUIDType
from zou.app import db
from zou.app.models.serializer import SerializerMixin
from zou.app.models.base import BaseMixin
class PreviewFile(db.Model, BaseMixin, SerializerMixin):
"""
Describes a file which is aimed at being reviewed. It is not a publication
neither a working file.
"""
name = db.Column(db.String(250))
revision = db.Column(db.Integer(), default=1)
description = db.Column(db.Text())
path = db.Column(db.String(400))
source = db.Column(db.String(40))
shotgun_id = db.Column(db.Integer, unique=True)
is_movie = db.Column(db.Boolean, default=False)
url = db.Column(db.String(600))
uploaded_movie_url = db.Column(db.String(600))
uploaded_movie_name = db.Column(db.String(150))
task_id = db.Column(
UUIDType(binary=False),
db.ForeignKey("task.id"),
index=True
)
person_id = db.Column(UUIDType(binary=False), db.ForeignKey("person.id"))
source_file_id = db.Column(
UUIDType(binary=False),
db.ForeignKey("output_file.id")
)
__table_args__ = (
db.UniqueConstraint(
"name",
"task_id",
"revision",
name="preview_uc"
),
)
def __repr__(self):
return "<PreviewFile %s>" % self.id
|
<commit_before>from sqlalchemy_utils import UUIDType
from zou.app import db
from zou.app.models.serializer import SerializerMixin
from zou.app.models.base import BaseMixin
class PreviewFile(db.Model, BaseMixin, SerializerMixin):
"""
Describes a file which is aimed at being reviewed. It is not a publication
neither a working file.
"""
name = db.Column(db.String(250))
revision = db.Column(db.Integer(), default=1)
description = db.Column(db.Text())
source = db.Column(db.String(40))
shotgun_id = db.Column(db.Integer, unique=True)
is_movie = db.Column(db.Boolean, default=False)
url = db.Column(db.String(600))
uploaded_movie_url = db.Column(db.String(600))
uploaded_movie_name = db.Column(db.String(150))
task_id = db.Column(
UUIDType(binary=False),
db.ForeignKey("task.id"),
index=True
)
person_id = db.Column(UUIDType(binary=False), db.ForeignKey("person.id"))
source_file_id = db.Column(
UUIDType(binary=False),
db.ForeignKey("output_file.id")
)
__table_args__ = (
db.UniqueConstraint(
"name",
"task_id",
"revision",
name="preview_uc"
),
)
def __repr__(self):
return "<PreviewFile %s>" % self.id
<commit_msg>Add path field to preview file<commit_after>
|
from sqlalchemy_utils import UUIDType
from zou.app import db
from zou.app.models.serializer import SerializerMixin
from zou.app.models.base import BaseMixin
class PreviewFile(db.Model, BaseMixin, SerializerMixin):
"""
Describes a file which is aimed at being reviewed. It is not a publication
neither a working file.
"""
name = db.Column(db.String(250))
revision = db.Column(db.Integer(), default=1)
description = db.Column(db.Text())
path = db.Column(db.String(400))
source = db.Column(db.String(40))
shotgun_id = db.Column(db.Integer, unique=True)
is_movie = db.Column(db.Boolean, default=False)
url = db.Column(db.String(600))
uploaded_movie_url = db.Column(db.String(600))
uploaded_movie_name = db.Column(db.String(150))
task_id = db.Column(
UUIDType(binary=False),
db.ForeignKey("task.id"),
index=True
)
person_id = db.Column(UUIDType(binary=False), db.ForeignKey("person.id"))
source_file_id = db.Column(
UUIDType(binary=False),
db.ForeignKey("output_file.id")
)
__table_args__ = (
db.UniqueConstraint(
"name",
"task_id",
"revision",
name="preview_uc"
),
)
def __repr__(self):
return "<PreviewFile %s>" % self.id
|
from sqlalchemy_utils import UUIDType
from zou.app import db
from zou.app.models.serializer import SerializerMixin
from zou.app.models.base import BaseMixin
class PreviewFile(db.Model, BaseMixin, SerializerMixin):
"""
Describes a file which is aimed at being reviewed. It is not a publication
neither a working file.
"""
name = db.Column(db.String(250))
revision = db.Column(db.Integer(), default=1)
description = db.Column(db.Text())
source = db.Column(db.String(40))
shotgun_id = db.Column(db.Integer, unique=True)
is_movie = db.Column(db.Boolean, default=False)
url = db.Column(db.String(600))
uploaded_movie_url = db.Column(db.String(600))
uploaded_movie_name = db.Column(db.String(150))
task_id = db.Column(
UUIDType(binary=False),
db.ForeignKey("task.id"),
index=True
)
person_id = db.Column(UUIDType(binary=False), db.ForeignKey("person.id"))
source_file_id = db.Column(
UUIDType(binary=False),
db.ForeignKey("output_file.id")
)
__table_args__ = (
db.UniqueConstraint(
"name",
"task_id",
"revision",
name="preview_uc"
),
)
def __repr__(self):
return "<PreviewFile %s>" % self.id
Add path field to preview filefrom sqlalchemy_utils import UUIDType
from zou.app import db
from zou.app.models.serializer import SerializerMixin
from zou.app.models.base import BaseMixin
class PreviewFile(db.Model, BaseMixin, SerializerMixin):
"""
Describes a file which is aimed at being reviewed. It is not a publication
neither a working file.
"""
name = db.Column(db.String(250))
revision = db.Column(db.Integer(), default=1)
description = db.Column(db.Text())
path = db.Column(db.String(400))
source = db.Column(db.String(40))
shotgun_id = db.Column(db.Integer, unique=True)
is_movie = db.Column(db.Boolean, default=False)
url = db.Column(db.String(600))
uploaded_movie_url = db.Column(db.String(600))
uploaded_movie_name = db.Column(db.String(150))
task_id = db.Column(
UUIDType(binary=False),
db.ForeignKey("task.id"),
index=True
)
person_id = db.Column(UUIDType(binary=False), db.ForeignKey("person.id"))
source_file_id = db.Column(
UUIDType(binary=False),
db.ForeignKey("output_file.id")
)
__table_args__ = (
db.UniqueConstraint(
"name",
"task_id",
"revision",
name="preview_uc"
),
)
def __repr__(self):
return "<PreviewFile %s>" % self.id
|
<commit_before>from sqlalchemy_utils import UUIDType
from zou.app import db
from zou.app.models.serializer import SerializerMixin
from zou.app.models.base import BaseMixin
class PreviewFile(db.Model, BaseMixin, SerializerMixin):
"""
Describes a file which is aimed at being reviewed. It is not a publication
neither a working file.
"""
name = db.Column(db.String(250))
revision = db.Column(db.Integer(), default=1)
description = db.Column(db.Text())
source = db.Column(db.String(40))
shotgun_id = db.Column(db.Integer, unique=True)
is_movie = db.Column(db.Boolean, default=False)
url = db.Column(db.String(600))
uploaded_movie_url = db.Column(db.String(600))
uploaded_movie_name = db.Column(db.String(150))
task_id = db.Column(
UUIDType(binary=False),
db.ForeignKey("task.id"),
index=True
)
person_id = db.Column(UUIDType(binary=False), db.ForeignKey("person.id"))
source_file_id = db.Column(
UUIDType(binary=False),
db.ForeignKey("output_file.id")
)
__table_args__ = (
db.UniqueConstraint(
"name",
"task_id",
"revision",
name="preview_uc"
),
)
def __repr__(self):
return "<PreviewFile %s>" % self.id
<commit_msg>Add path field to preview file<commit_after>from sqlalchemy_utils import UUIDType
from zou.app import db
from zou.app.models.serializer import SerializerMixin
from zou.app.models.base import BaseMixin
class PreviewFile(db.Model, BaseMixin, SerializerMixin):
"""
Describes a file which is aimed at being reviewed. It is not a publication
neither a working file.
"""
name = db.Column(db.String(250))
revision = db.Column(db.Integer(), default=1)
description = db.Column(db.Text())
path = db.Column(db.String(400))
source = db.Column(db.String(40))
shotgun_id = db.Column(db.Integer, unique=True)
is_movie = db.Column(db.Boolean, default=False)
url = db.Column(db.String(600))
uploaded_movie_url = db.Column(db.String(600))
uploaded_movie_name = db.Column(db.String(150))
task_id = db.Column(
UUIDType(binary=False),
db.ForeignKey("task.id"),
index=True
)
person_id = db.Column(UUIDType(binary=False), db.ForeignKey("person.id"))
source_file_id = db.Column(
UUIDType(binary=False),
db.ForeignKey("output_file.id")
)
__table_args__ = (
db.UniqueConstraint(
"name",
"task_id",
"revision",
name="preview_uc"
),
)
def __repr__(self):
return "<PreviewFile %s>" % self.id
|
51e434dfb11aaa35a93b1ca83777b6fc10ce609c
|
setup.py
|
setup.py
|
import io
from setuptools import setup, find_packages
long_description = '\n'.join((
io.open('README.rst', encoding='utf-8').read(),
io.open('CHANGES.txt', encoding='utf-8').read()
))
setup(name='more.transaction',
version='0.8.dev0',
description="transaction integration for Morepath",
long_description=long_description,
author="Martijn Faassen",
author_email="faassen@startifact.com",
keywords='morepath sqlalchemy zodb transaction',
license="BSD",
url="http://pypi.python.org/pypi/more.transaction",
namespace_packages=['more'],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'morepath >= 0.15',
'transaction',
],
extras_require=dict(
test=[
'coverage',
'pytest >= 2.6.0',
'pytest-cov',
'webtest'
],
))
|
import io
from setuptools import setup, find_packages
long_description = '\n'.join((
io.open('README.rst', encoding='utf-8').read(),
io.open('CHANGES.txt', encoding='utf-8').read()
))
setup(
name='more.transaction',
version='0.8.dev0',
description="transaction integration for Morepath",
long_description=long_description,
author="Martijn Faassen",
author_email="faassen@startifact.com",
keywords='morepath sqlalchemy zodb transaction',
license="BSD",
url="https://github.com/morepath/more.transaction",
namespace_packages=['more'],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Environment :: Web Environment',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Internet :: WWW/HTTP :: WSGI',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: PyPy',
'Development Status :: 5 - Production/Stable'
],
install_requires=[
'setuptools',
'morepath >= 0.15',
'transaction',
],
extras_require=dict(
test=[
'coverage',
'pytest >= 2.6.0',
'pytest-cov',
'webtest'
],
))
|
Add classifiers and fix URL
|
Add classifiers and fix URL
|
Python
|
bsd-3-clause
|
morepath/more.transaction
|
import io
from setuptools import setup, find_packages
long_description = '\n'.join((
io.open('README.rst', encoding='utf-8').read(),
io.open('CHANGES.txt', encoding='utf-8').read()
))
setup(name='more.transaction',
version='0.8.dev0',
description="transaction integration for Morepath",
long_description=long_description,
author="Martijn Faassen",
author_email="faassen@startifact.com",
keywords='morepath sqlalchemy zodb transaction',
license="BSD",
url="http://pypi.python.org/pypi/more.transaction",
namespace_packages=['more'],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'morepath >= 0.15',
'transaction',
],
extras_require=dict(
test=[
'coverage',
'pytest >= 2.6.0',
'pytest-cov',
'webtest'
],
))
Add classifiers and fix URL
|
import io
from setuptools import setup, find_packages
long_description = '\n'.join((
io.open('README.rst', encoding='utf-8').read(),
io.open('CHANGES.txt', encoding='utf-8').read()
))
setup(
name='more.transaction',
version='0.8.dev0',
description="transaction integration for Morepath",
long_description=long_description,
author="Martijn Faassen",
author_email="faassen@startifact.com",
keywords='morepath sqlalchemy zodb transaction',
license="BSD",
url="https://github.com/morepath/more.transaction",
namespace_packages=['more'],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Environment :: Web Environment',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Internet :: WWW/HTTP :: WSGI',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: PyPy',
'Development Status :: 5 - Production/Stable'
],
install_requires=[
'setuptools',
'morepath >= 0.15',
'transaction',
],
extras_require=dict(
test=[
'coverage',
'pytest >= 2.6.0',
'pytest-cov',
'webtest'
],
))
|
<commit_before>import io
from setuptools import setup, find_packages
long_description = '\n'.join((
io.open('README.rst', encoding='utf-8').read(),
io.open('CHANGES.txt', encoding='utf-8').read()
))
setup(name='more.transaction',
version='0.8.dev0',
description="transaction integration for Morepath",
long_description=long_description,
author="Martijn Faassen",
author_email="faassen@startifact.com",
keywords='morepath sqlalchemy zodb transaction',
license="BSD",
url="http://pypi.python.org/pypi/more.transaction",
namespace_packages=['more'],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'morepath >= 0.15',
'transaction',
],
extras_require=dict(
test=[
'coverage',
'pytest >= 2.6.0',
'pytest-cov',
'webtest'
],
))
<commit_msg>Add classifiers and fix URL<commit_after>
|
import io
from setuptools import setup, find_packages
long_description = '\n'.join((
io.open('README.rst', encoding='utf-8').read(),
io.open('CHANGES.txt', encoding='utf-8').read()
))
setup(
name='more.transaction',
version='0.8.dev0',
description="transaction integration for Morepath",
long_description=long_description,
author="Martijn Faassen",
author_email="faassen@startifact.com",
keywords='morepath sqlalchemy zodb transaction',
license="BSD",
url="https://github.com/morepath/more.transaction",
namespace_packages=['more'],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Environment :: Web Environment',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Internet :: WWW/HTTP :: WSGI',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: PyPy',
'Development Status :: 5 - Production/Stable'
],
install_requires=[
'setuptools',
'morepath >= 0.15',
'transaction',
],
extras_require=dict(
test=[
'coverage',
'pytest >= 2.6.0',
'pytest-cov',
'webtest'
],
))
|
import io
from setuptools import setup, find_packages
long_description = '\n'.join((
io.open('README.rst', encoding='utf-8').read(),
io.open('CHANGES.txt', encoding='utf-8').read()
))
setup(name='more.transaction',
version='0.8.dev0',
description="transaction integration for Morepath",
long_description=long_description,
author="Martijn Faassen",
author_email="faassen@startifact.com",
keywords='morepath sqlalchemy zodb transaction',
license="BSD",
url="http://pypi.python.org/pypi/more.transaction",
namespace_packages=['more'],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'morepath >= 0.15',
'transaction',
],
extras_require=dict(
test=[
'coverage',
'pytest >= 2.6.0',
'pytest-cov',
'webtest'
],
))
Add classifiers and fix URLimport io
from setuptools import setup, find_packages
long_description = '\n'.join((
io.open('README.rst', encoding='utf-8').read(),
io.open('CHANGES.txt', encoding='utf-8').read()
))
setup(
name='more.transaction',
version='0.8.dev0',
description="transaction integration for Morepath",
long_description=long_description,
author="Martijn Faassen",
author_email="faassen@startifact.com",
keywords='morepath sqlalchemy zodb transaction',
license="BSD",
url="https://github.com/morepath/more.transaction",
namespace_packages=['more'],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Environment :: Web Environment',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Internet :: WWW/HTTP :: WSGI',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: PyPy',
'Development Status :: 5 - Production/Stable'
],
install_requires=[
'setuptools',
'morepath >= 0.15',
'transaction',
],
extras_require=dict(
test=[
'coverage',
'pytest >= 2.6.0',
'pytest-cov',
'webtest'
],
))
|
<commit_before>import io
from setuptools import setup, find_packages
long_description = '\n'.join((
io.open('README.rst', encoding='utf-8').read(),
io.open('CHANGES.txt', encoding='utf-8').read()
))
setup(name='more.transaction',
version='0.8.dev0',
description="transaction integration for Morepath",
long_description=long_description,
author="Martijn Faassen",
author_email="faassen@startifact.com",
keywords='morepath sqlalchemy zodb transaction',
license="BSD",
url="http://pypi.python.org/pypi/more.transaction",
namespace_packages=['more'],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'morepath >= 0.15',
'transaction',
],
extras_require=dict(
test=[
'coverage',
'pytest >= 2.6.0',
'pytest-cov',
'webtest'
],
))
<commit_msg>Add classifiers and fix URL<commit_after>import io
from setuptools import setup, find_packages
long_description = '\n'.join((
io.open('README.rst', encoding='utf-8').read(),
io.open('CHANGES.txt', encoding='utf-8').read()
))
setup(
name='more.transaction',
version='0.8.dev0',
description="transaction integration for Morepath",
long_description=long_description,
author="Martijn Faassen",
author_email="faassen@startifact.com",
keywords='morepath sqlalchemy zodb transaction',
license="BSD",
url="https://github.com/morepath/more.transaction",
namespace_packages=['more'],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Environment :: Web Environment',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Internet :: WWW/HTTP :: WSGI',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: PyPy',
'Development Status :: 5 - Production/Stable'
],
install_requires=[
'setuptools',
'morepath >= 0.15',
'transaction',
],
extras_require=dict(
test=[
'coverage',
'pytest >= 2.6.0',
'pytest-cov',
'webtest'
],
))
|
69e34aff3a25b33fa804ca97e327ad4f818f7d14
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name='pyhaproxy',
version='0.2.2',
keywords=('haproxy', 'parse'),
description='A Python library to parse haproxy configuration file',
license='MIT License',
install_requires=[],
include_package_data=True,
package_data={
'pyhaproxy': ['*.peg'],
},
author='Joey',
author_email='majunjiev@gmail.com',
url='https://github.com/imjoey/pyhaproxy',
packages=find_packages(),
platforms='any',
)
|
from setuptools import setup, find_packages
setup(
name='pyhaproxy',
version='0.2.4',
keywords=('haproxy', 'parse'),
description='A Python library to parse haproxy configuration file',
license='MIT License',
install_requires=[],
include_package_data=True,
package_data={
'pyhaproxy': ['*.peg'],
},
author='Joey',
author_email='majunjiev@gmail.com',
url='https://github.com/imjoey/pyhaproxy',
packages=find_packages(),
platforms='any',
)
|
Add additional filestring param for parser.Parse method.
|
version2.4: Add additional filestring param for parser.Parse method.
|
Python
|
mit
|
imjoey/pyhaproxy
|
from setuptools import setup, find_packages
setup(
name='pyhaproxy',
version='0.2.2',
keywords=('haproxy', 'parse'),
description='A Python library to parse haproxy configuration file',
license='MIT License',
install_requires=[],
include_package_data=True,
package_data={
'pyhaproxy': ['*.peg'],
},
author='Joey',
author_email='majunjiev@gmail.com',
url='https://github.com/imjoey/pyhaproxy',
packages=find_packages(),
platforms='any',
)
version2.4: Add additional filestring param for parser.Parse method.
|
from setuptools import setup, find_packages
setup(
name='pyhaproxy',
version='0.2.4',
keywords=('haproxy', 'parse'),
description='A Python library to parse haproxy configuration file',
license='MIT License',
install_requires=[],
include_package_data=True,
package_data={
'pyhaproxy': ['*.peg'],
},
author='Joey',
author_email='majunjiev@gmail.com',
url='https://github.com/imjoey/pyhaproxy',
packages=find_packages(),
platforms='any',
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='pyhaproxy',
version='0.2.2',
keywords=('haproxy', 'parse'),
description='A Python library to parse haproxy configuration file',
license='MIT License',
install_requires=[],
include_package_data=True,
package_data={
'pyhaproxy': ['*.peg'],
},
author='Joey',
author_email='majunjiev@gmail.com',
url='https://github.com/imjoey/pyhaproxy',
packages=find_packages(),
platforms='any',
)
<commit_msg>version2.4: Add additional filestring param for parser.Parse method.<commit_after>
|
from setuptools import setup, find_packages
setup(
name='pyhaproxy',
version='0.2.4',
keywords=('haproxy', 'parse'),
description='A Python library to parse haproxy configuration file',
license='MIT License',
install_requires=[],
include_package_data=True,
package_data={
'pyhaproxy': ['*.peg'],
},
author='Joey',
author_email='majunjiev@gmail.com',
url='https://github.com/imjoey/pyhaproxy',
packages=find_packages(),
platforms='any',
)
|
from setuptools import setup, find_packages
setup(
name='pyhaproxy',
version='0.2.2',
keywords=('haproxy', 'parse'),
description='A Python library to parse haproxy configuration file',
license='MIT License',
install_requires=[],
include_package_data=True,
package_data={
'pyhaproxy': ['*.peg'],
},
author='Joey',
author_email='majunjiev@gmail.com',
url='https://github.com/imjoey/pyhaproxy',
packages=find_packages(),
platforms='any',
)
version2.4: Add additional filestring param for parser.Parse method.from setuptools import setup, find_packages
setup(
name='pyhaproxy',
version='0.2.4',
keywords=('haproxy', 'parse'),
description='A Python library to parse haproxy configuration file',
license='MIT License',
install_requires=[],
include_package_data=True,
package_data={
'pyhaproxy': ['*.peg'],
},
author='Joey',
author_email='majunjiev@gmail.com',
url='https://github.com/imjoey/pyhaproxy',
packages=find_packages(),
platforms='any',
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='pyhaproxy',
version='0.2.2',
keywords=('haproxy', 'parse'),
description='A Python library to parse haproxy configuration file',
license='MIT License',
install_requires=[],
include_package_data=True,
package_data={
'pyhaproxy': ['*.peg'],
},
author='Joey',
author_email='majunjiev@gmail.com',
url='https://github.com/imjoey/pyhaproxy',
packages=find_packages(),
platforms='any',
)
<commit_msg>version2.4: Add additional filestring param for parser.Parse method.<commit_after>from setuptools import setup, find_packages
setup(
name='pyhaproxy',
version='0.2.4',
keywords=('haproxy', 'parse'),
description='A Python library to parse haproxy configuration file',
license='MIT License',
install_requires=[],
include_package_data=True,
package_data={
'pyhaproxy': ['*.peg'],
},
author='Joey',
author_email='majunjiev@gmail.com',
url='https://github.com/imjoey/pyhaproxy',
packages=find_packages(),
platforms='any',
)
|
ba41dc9bff21558d1712fe06751f867806d8abd6
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name='python_lemonway',
version='0.1.0',
author='Pierre Pigeau',
author_email='ppigeau@payplug.fr',
packages=['lemonway'],
url='',
license='LICENSE.txt',
description='',
long_description=open('README.rst').read(),
package_data={'lemonway': ['lemonway.wsdl']},
install_requires=[
"suds-jurko==0.6",
"lxml==3.3.5"
],
)
|
from distutils.core import setup
setup(
name='python_lemonway',
version='0.1.1',
author='Pierre Pigeau',
author_email='ppigeau@payplug.fr',
packages=['lemonway'],
url='',
license='LICENSE.txt',
description='',
long_description=open('README.rst').read(),
package_data={'lemonway': ['lemonway.wsdl']},
install_requires=[
"suds-jurko==0.6",
"lxml==3.3.5"
],
)
|
ADD - newversion of python_lemonway with improvements of MoneyIn
|
ADD - newversion of python_lemonway with improvements of MoneyIn
|
Python
|
mit
|
brightforme/python-lemonway
|
from distutils.core import setup
setup(
name='python_lemonway',
version='0.1.0',
author='Pierre Pigeau',
author_email='ppigeau@payplug.fr',
packages=['lemonway'],
url='',
license='LICENSE.txt',
description='',
long_description=open('README.rst').read(),
package_data={'lemonway': ['lemonway.wsdl']},
install_requires=[
"suds-jurko==0.6",
"lxml==3.3.5"
],
)
ADD - newversion of python_lemonway with improvements of MoneyIn
|
from distutils.core import setup
setup(
name='python_lemonway',
version='0.1.1',
author='Pierre Pigeau',
author_email='ppigeau@payplug.fr',
packages=['lemonway'],
url='',
license='LICENSE.txt',
description='',
long_description=open('README.rst').read(),
package_data={'lemonway': ['lemonway.wsdl']},
install_requires=[
"suds-jurko==0.6",
"lxml==3.3.5"
],
)
|
<commit_before>from distutils.core import setup
setup(
name='python_lemonway',
version='0.1.0',
author='Pierre Pigeau',
author_email='ppigeau@payplug.fr',
packages=['lemonway'],
url='',
license='LICENSE.txt',
description='',
long_description=open('README.rst').read(),
package_data={'lemonway': ['lemonway.wsdl']},
install_requires=[
"suds-jurko==0.6",
"lxml==3.3.5"
],
)
<commit_msg>ADD - newversion of python_lemonway with improvements of MoneyIn<commit_after>
|
from distutils.core import setup
setup(
name='python_lemonway',
version='0.1.1',
author='Pierre Pigeau',
author_email='ppigeau@payplug.fr',
packages=['lemonway'],
url='',
license='LICENSE.txt',
description='',
long_description=open('README.rst').read(),
package_data={'lemonway': ['lemonway.wsdl']},
install_requires=[
"suds-jurko==0.6",
"lxml==3.3.5"
],
)
|
from distutils.core import setup
setup(
name='python_lemonway',
version='0.1.0',
author='Pierre Pigeau',
author_email='ppigeau@payplug.fr',
packages=['lemonway'],
url='',
license='LICENSE.txt',
description='',
long_description=open('README.rst').read(),
package_data={'lemonway': ['lemonway.wsdl']},
install_requires=[
"suds-jurko==0.6",
"lxml==3.3.5"
],
)
ADD - newversion of python_lemonway with improvements of MoneyInfrom distutils.core import setup
setup(
name='python_lemonway',
version='0.1.1',
author='Pierre Pigeau',
author_email='ppigeau@payplug.fr',
packages=['lemonway'],
url='',
license='LICENSE.txt',
description='',
long_description=open('README.rst').read(),
package_data={'lemonway': ['lemonway.wsdl']},
install_requires=[
"suds-jurko==0.6",
"lxml==3.3.5"
],
)
|
<commit_before>from distutils.core import setup
setup(
name='python_lemonway',
version='0.1.0',
author='Pierre Pigeau',
author_email='ppigeau@payplug.fr',
packages=['lemonway'],
url='',
license='LICENSE.txt',
description='',
long_description=open('README.rst').read(),
package_data={'lemonway': ['lemonway.wsdl']},
install_requires=[
"suds-jurko==0.6",
"lxml==3.3.5"
],
)
<commit_msg>ADD - newversion of python_lemonway with improvements of MoneyIn<commit_after>from distutils.core import setup
setup(
name='python_lemonway',
version='0.1.1',
author='Pierre Pigeau',
author_email='ppigeau@payplug.fr',
packages=['lemonway'],
url='',
license='LICENSE.txt',
description='',
long_description=open('README.rst').read(),
package_data={'lemonway': ['lemonway.wsdl']},
install_requires=[
"suds-jurko==0.6",
"lxml==3.3.5"
],
)
|
c60e104777b9f7aed79974efd3fa77855e6c7c0a
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
"""
nose-notify
~~~~~~~~~~~
A nose plugin to display testsuite progress in the notify osd.
:copyright: 2010, Pascal Hartig <phartig@rdrei.net>
:license: BSD, see LICENSE for more details
"""
from setuptools import setup
from nosenotify import __version__
setup(
name="nose-notify",
version=__version__,
author="Pascal Hartig",
author_email="phartig@rdrei.de",
description="A nose plugin to display testsuite progress "
"in the notify osd",
url="https://github.com/passy/nose-notify",
packages=['nosenotify'],
package_data={'nosenotify': ['images/*']},
long_description=__doc__,
requires=['nose (>=0.10)'],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Software Development :: Testing",
"Topic :: Software Development :: Libraries :: Python Modules"
],
entry_points={
'nose.plugins.0.10': [
'notify = nosenotify.plugin:NotifyPlugin'
]
}
)
|
# -*- coding: utf-8 -*-
"""
nose-notify
~~~~~~~~~~~
A nose plugin to display testsuite progress in the notify osd.
:copyright: 2010, Pascal Hartig <phartig@rdrei.net>
:license: BSD, see LICENSE for more details
"""
from setuptools import setup
from nosenotify import __version__
setup(
name="nose-notify",
version=__version__,
author="Pascal Hartig",
author_email="phartig@rdrei.de",
description="A nose plugin to display testsuite progress "
"in the notify osd",
url="https://github.com/passy/nose-notify",
packages=['nosenotify'],
package_data={'nosenotify': ['images/*']},
long_description=__doc__,
requires=['nose (>=0.10)'],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Topic :: Software Development :: Testing",
"Topic :: Software Development :: Libraries :: Python Modules"
],
entry_points={
'nose.plugins.0.10': [
'notify = nosenotify.plugin:NotifyPlugin'
]
}
)
|
Add Python 3 to supported platforms
|
Add Python 3 to supported platforms
|
Python
|
bsd-3-clause
|
passy/nose-notify
|
# -*- coding: utf-8 -*-
"""
nose-notify
~~~~~~~~~~~
A nose plugin to display testsuite progress in the notify osd.
:copyright: 2010, Pascal Hartig <phartig@rdrei.net>
:license: BSD, see LICENSE for more details
"""
from setuptools import setup
from nosenotify import __version__
setup(
name="nose-notify",
version=__version__,
author="Pascal Hartig",
author_email="phartig@rdrei.de",
description="A nose plugin to display testsuite progress "
"in the notify osd",
url="https://github.com/passy/nose-notify",
packages=['nosenotify'],
package_data={'nosenotify': ['images/*']},
long_description=__doc__,
requires=['nose (>=0.10)'],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Software Development :: Testing",
"Topic :: Software Development :: Libraries :: Python Modules"
],
entry_points={
'nose.plugins.0.10': [
'notify = nosenotify.plugin:NotifyPlugin'
]
}
)
Add Python 3 to supported platforms
|
# -*- coding: utf-8 -*-
"""
nose-notify
~~~~~~~~~~~
A nose plugin to display testsuite progress in the notify osd.
:copyright: 2010, Pascal Hartig <phartig@rdrei.net>
:license: BSD, see LICENSE for more details
"""
from setuptools import setup
from nosenotify import __version__
setup(
name="nose-notify",
version=__version__,
author="Pascal Hartig",
author_email="phartig@rdrei.de",
description="A nose plugin to display testsuite progress "
"in the notify osd",
url="https://github.com/passy/nose-notify",
packages=['nosenotify'],
package_data={'nosenotify': ['images/*']},
long_description=__doc__,
requires=['nose (>=0.10)'],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Topic :: Software Development :: Testing",
"Topic :: Software Development :: Libraries :: Python Modules"
],
entry_points={
'nose.plugins.0.10': [
'notify = nosenotify.plugin:NotifyPlugin'
]
}
)
|
<commit_before># -*- coding: utf-8 -*-
"""
nose-notify
~~~~~~~~~~~
A nose plugin to display testsuite progress in the notify osd.
:copyright: 2010, Pascal Hartig <phartig@rdrei.net>
:license: BSD, see LICENSE for more details
"""
from setuptools import setup
from nosenotify import __version__
setup(
name="nose-notify",
version=__version__,
author="Pascal Hartig",
author_email="phartig@rdrei.de",
description="A nose plugin to display testsuite progress "
"in the notify osd",
url="https://github.com/passy/nose-notify",
packages=['nosenotify'],
package_data={'nosenotify': ['images/*']},
long_description=__doc__,
requires=['nose (>=0.10)'],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Software Development :: Testing",
"Topic :: Software Development :: Libraries :: Python Modules"
],
entry_points={
'nose.plugins.0.10': [
'notify = nosenotify.plugin:NotifyPlugin'
]
}
)
<commit_msg>Add Python 3 to supported platforms<commit_after>
|
# -*- coding: utf-8 -*-
"""
nose-notify
~~~~~~~~~~~
A nose plugin to display testsuite progress in the notify osd.
:copyright: 2010, Pascal Hartig <phartig@rdrei.net>
:license: BSD, see LICENSE for more details
"""
from setuptools import setup
from nosenotify import __version__
setup(
name="nose-notify",
version=__version__,
author="Pascal Hartig",
author_email="phartig@rdrei.de",
description="A nose plugin to display testsuite progress "
"in the notify osd",
url="https://github.com/passy/nose-notify",
packages=['nosenotify'],
package_data={'nosenotify': ['images/*']},
long_description=__doc__,
requires=['nose (>=0.10)'],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Topic :: Software Development :: Testing",
"Topic :: Software Development :: Libraries :: Python Modules"
],
entry_points={
'nose.plugins.0.10': [
'notify = nosenotify.plugin:NotifyPlugin'
]
}
)
|
# -*- coding: utf-8 -*-
"""
nose-notify
~~~~~~~~~~~
A nose plugin to display testsuite progress in the notify osd.
:copyright: 2010, Pascal Hartig <phartig@rdrei.net>
:license: BSD, see LICENSE for more details
"""
from setuptools import setup
from nosenotify import __version__
setup(
name="nose-notify",
version=__version__,
author="Pascal Hartig",
author_email="phartig@rdrei.de",
description="A nose plugin to display testsuite progress "
"in the notify osd",
url="https://github.com/passy/nose-notify",
packages=['nosenotify'],
package_data={'nosenotify': ['images/*']},
long_description=__doc__,
requires=['nose (>=0.10)'],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Software Development :: Testing",
"Topic :: Software Development :: Libraries :: Python Modules"
],
entry_points={
'nose.plugins.0.10': [
'notify = nosenotify.plugin:NotifyPlugin'
]
}
)
Add Python 3 to supported platforms# -*- coding: utf-8 -*-
"""
nose-notify
~~~~~~~~~~~
A nose plugin to display testsuite progress in the notify osd.
:copyright: 2010, Pascal Hartig <phartig@rdrei.net>
:license: BSD, see LICENSE for more details
"""
from setuptools import setup
from nosenotify import __version__
setup(
name="nose-notify",
version=__version__,
author="Pascal Hartig",
author_email="phartig@rdrei.de",
description="A nose plugin to display testsuite progress "
"in the notify osd",
url="https://github.com/passy/nose-notify",
packages=['nosenotify'],
package_data={'nosenotify': ['images/*']},
long_description=__doc__,
requires=['nose (>=0.10)'],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Topic :: Software Development :: Testing",
"Topic :: Software Development :: Libraries :: Python Modules"
],
entry_points={
'nose.plugins.0.10': [
'notify = nosenotify.plugin:NotifyPlugin'
]
}
)
|
<commit_before># -*- coding: utf-8 -*-
"""
nose-notify
~~~~~~~~~~~
A nose plugin to display testsuite progress in the notify osd.
:copyright: 2010, Pascal Hartig <phartig@rdrei.net>
:license: BSD, see LICENSE for more details
"""
from setuptools import setup
from nosenotify import __version__
setup(
name="nose-notify",
version=__version__,
author="Pascal Hartig",
author_email="phartig@rdrei.de",
description="A nose plugin to display testsuite progress "
"in the notify osd",
url="https://github.com/passy/nose-notify",
packages=['nosenotify'],
package_data={'nosenotify': ['images/*']},
long_description=__doc__,
requires=['nose (>=0.10)'],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Software Development :: Testing",
"Topic :: Software Development :: Libraries :: Python Modules"
],
entry_points={
'nose.plugins.0.10': [
'notify = nosenotify.plugin:NotifyPlugin'
]
}
)
<commit_msg>Add Python 3 to supported platforms<commit_after># -*- coding: utf-8 -*-
"""
nose-notify
~~~~~~~~~~~
A nose plugin to display testsuite progress in the notify osd.
:copyright: 2010, Pascal Hartig <phartig@rdrei.net>
:license: BSD, see LICENSE for more details
"""
from setuptools import setup
from nosenotify import __version__
setup(
name="nose-notify",
version=__version__,
author="Pascal Hartig",
author_email="phartig@rdrei.de",
description="A nose plugin to display testsuite progress "
"in the notify osd",
url="https://github.com/passy/nose-notify",
packages=['nosenotify'],
package_data={'nosenotify': ['images/*']},
long_description=__doc__,
requires=['nose (>=0.10)'],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Topic :: Software Development :: Testing",
"Topic :: Software Development :: Libraries :: Python Modules"
],
entry_points={
'nose.plugins.0.10': [
'notify = nosenotify.plugin:NotifyPlugin'
]
}
)
|
c7e5e221f8ca333ecdf757747cbc7fbbaf1f860a
|
ipkg/utils.py
|
ipkg/utils.py
|
import os
import json
import logging
LOGGER = logging.getLogger(__name__)
class DictFile(dict):
"""A ``dict``, storable as a JSON file.
"""
def __init__(self, file_path):
super(DictFile, self).__init__()
self.__file_path = file_path
self.reload()
def reload(self):
if os.path.isfile(self.__file_path):
LOGGER.debug('Loading %s', self.__file_path)
with open(self.__file_path) as f:
self.update(json.load(f))
def save(self):
LOGGER.debug('Writing %s', self.__file_path)
with open(self.__file_path, 'w') as f:
json.dump(self, f, indent=4)
|
import os
import json
import logging
LOGGER = logging.getLogger(__name__)
class DictFile(dict):
"""A ``dict``, storable as a JSON file.
"""
def __init__(self, file_path):
super(DictFile, self).__init__()
self.__file_path = file_path
self.reload()
def reload(self):
if os.path.isfile(self.__file_path):
LOGGER.debug('Loading %s', self.__file_path)
with open(self.__file_path) as f:
self.update(json.load(f))
def clear(self):
"""Force the dictionary to be empty.
"""
if os.path.isfile(self.__file_path):
os.unlink(self.__file_path)
super(DictFile, self).clear()
def save(self):
LOGGER.debug('Writing %s', self.__file_path)
with open(self.__file_path, 'w') as f:
json.dump(self, f, indent=4)
|
Make DictFile remove its meta data file when calling clear()
|
Make DictFile remove its meta data file when calling clear()
|
Python
|
mit
|
pmuller/ipkg
|
import os
import json
import logging
LOGGER = logging.getLogger(__name__)
class DictFile(dict):
"""A ``dict``, storable as a JSON file.
"""
def __init__(self, file_path):
super(DictFile, self).__init__()
self.__file_path = file_path
self.reload()
def reload(self):
if os.path.isfile(self.__file_path):
LOGGER.debug('Loading %s', self.__file_path)
with open(self.__file_path) as f:
self.update(json.load(f))
def save(self):
LOGGER.debug('Writing %s', self.__file_path)
with open(self.__file_path, 'w') as f:
json.dump(self, f, indent=4)
Make DictFile remove its meta data file when calling clear()
|
import os
import json
import logging
LOGGER = logging.getLogger(__name__)
class DictFile(dict):
"""A ``dict``, storable as a JSON file.
"""
def __init__(self, file_path):
super(DictFile, self).__init__()
self.__file_path = file_path
self.reload()
def reload(self):
if os.path.isfile(self.__file_path):
LOGGER.debug('Loading %s', self.__file_path)
with open(self.__file_path) as f:
self.update(json.load(f))
def clear(self):
"""Force the dictionary to be empty.
"""
if os.path.isfile(self.__file_path):
os.unlink(self.__file_path)
super(DictFile, self).clear()
def save(self):
LOGGER.debug('Writing %s', self.__file_path)
with open(self.__file_path, 'w') as f:
json.dump(self, f, indent=4)
|
<commit_before>import os
import json
import logging
LOGGER = logging.getLogger(__name__)
class DictFile(dict):
"""A ``dict``, storable as a JSON file.
"""
def __init__(self, file_path):
super(DictFile, self).__init__()
self.__file_path = file_path
self.reload()
def reload(self):
if os.path.isfile(self.__file_path):
LOGGER.debug('Loading %s', self.__file_path)
with open(self.__file_path) as f:
self.update(json.load(f))
def save(self):
LOGGER.debug('Writing %s', self.__file_path)
with open(self.__file_path, 'w') as f:
json.dump(self, f, indent=4)
<commit_msg>Make DictFile remove its meta data file when calling clear()<commit_after>
|
import os
import json
import logging
LOGGER = logging.getLogger(__name__)
class DictFile(dict):
"""A ``dict``, storable as a JSON file.
"""
def __init__(self, file_path):
super(DictFile, self).__init__()
self.__file_path = file_path
self.reload()
def reload(self):
if os.path.isfile(self.__file_path):
LOGGER.debug('Loading %s', self.__file_path)
with open(self.__file_path) as f:
self.update(json.load(f))
def clear(self):
"""Force the dictionary to be empty.
"""
if os.path.isfile(self.__file_path):
os.unlink(self.__file_path)
super(DictFile, self).clear()
def save(self):
LOGGER.debug('Writing %s', self.__file_path)
with open(self.__file_path, 'w') as f:
json.dump(self, f, indent=4)
|
import os
import json
import logging
LOGGER = logging.getLogger(__name__)
class DictFile(dict):
"""A ``dict``, storable as a JSON file.
"""
def __init__(self, file_path):
super(DictFile, self).__init__()
self.__file_path = file_path
self.reload()
def reload(self):
if os.path.isfile(self.__file_path):
LOGGER.debug('Loading %s', self.__file_path)
with open(self.__file_path) as f:
self.update(json.load(f))
def save(self):
LOGGER.debug('Writing %s', self.__file_path)
with open(self.__file_path, 'w') as f:
json.dump(self, f, indent=4)
Make DictFile remove its meta data file when calling clear()import os
import json
import logging
LOGGER = logging.getLogger(__name__)
class DictFile(dict):
"""A ``dict``, storable as a JSON file.
"""
def __init__(self, file_path):
super(DictFile, self).__init__()
self.__file_path = file_path
self.reload()
def reload(self):
if os.path.isfile(self.__file_path):
LOGGER.debug('Loading %s', self.__file_path)
with open(self.__file_path) as f:
self.update(json.load(f))
def clear(self):
"""Force the dictionary to be empty.
"""
if os.path.isfile(self.__file_path):
os.unlink(self.__file_path)
super(DictFile, self).clear()
def save(self):
LOGGER.debug('Writing %s', self.__file_path)
with open(self.__file_path, 'w') as f:
json.dump(self, f, indent=4)
|
<commit_before>import os
import json
import logging
LOGGER = logging.getLogger(__name__)
class DictFile(dict):
"""A ``dict``, storable as a JSON file.
"""
def __init__(self, file_path):
super(DictFile, self).__init__()
self.__file_path = file_path
self.reload()
def reload(self):
if os.path.isfile(self.__file_path):
LOGGER.debug('Loading %s', self.__file_path)
with open(self.__file_path) as f:
self.update(json.load(f))
def save(self):
LOGGER.debug('Writing %s', self.__file_path)
with open(self.__file_path, 'w') as f:
json.dump(self, f, indent=4)
<commit_msg>Make DictFile remove its meta data file when calling clear()<commit_after>import os
import json
import logging
LOGGER = logging.getLogger(__name__)
class DictFile(dict):
"""A ``dict``, storable as a JSON file.
"""
def __init__(self, file_path):
super(DictFile, self).__init__()
self.__file_path = file_path
self.reload()
def reload(self):
if os.path.isfile(self.__file_path):
LOGGER.debug('Loading %s', self.__file_path)
with open(self.__file_path) as f:
self.update(json.load(f))
def clear(self):
"""Force the dictionary to be empty.
"""
if os.path.isfile(self.__file_path):
os.unlink(self.__file_path)
super(DictFile, self).clear()
def save(self):
LOGGER.debug('Writing %s', self.__file_path)
with open(self.__file_path, 'w') as f:
json.dump(self, f, indent=4)
|
c2503a459b64efaa02d611f27384c7a808c4d24d
|
setup.py
|
setup.py
|
from setuptools import setup
import platform
# Make the install_requires
target = platform.python_implementation()
if target == 'PyPy':
install_requires = ['psycopg2ct']
else:
install_requires = ['psycopg2']
setup(name='queries',
version='1.2.0',
description="Simplified PostgreSQL client built upon Psycopg2",
maintainer="Gavin M. Roy",
maintainer_email="gavinmroy@gmail.com",
url="https://github.com/gmr/queries",
install_requires=install_requires,
license=open('LICENSE').read(),
package_data={'': ['LICENSE', 'README.md']},
py_modules=['pgsql_wrapper'],
classifiers=['Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Database',
'Topic :: Software Development :: Libraries'],
zip_safe=True)
|
from setuptools import setup
import platform
# Make the install_requires
target = platform.python_implementation()
if target == 'PyPy':
install_requires = ['psycopg2ct']
else:
install_requires = ['psycopg2']
setup(name='queries',
version='1.2.0',
description="Simplified PostgreSQL client built upon Psycopg2",
maintainer="Gavin M. Roy",
maintainer_email="gavinmroy@gmail.com",
url="https://github.com/gmr/queries",
install_requires=install_requires,
extras_require={'tornado': 'tornado'},
license=open('LICENSE').read(),
package_data={'': ['LICENSE', 'README.md']},
py_modules=['pgsql_wrapper'],
classifiers=['Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Database',
'Topic :: Software Development :: Libraries'],
zip_safe=True)
|
Add extras_requires for tornado support
|
Add extras_requires for tornado support
|
Python
|
bsd-3-clause
|
gmr/queries,gmr/queries
|
from setuptools import setup
import platform
# Make the install_requires
target = platform.python_implementation()
if target == 'PyPy':
install_requires = ['psycopg2ct']
else:
install_requires = ['psycopg2']
setup(name='queries',
version='1.2.0',
description="Simplified PostgreSQL client built upon Psycopg2",
maintainer="Gavin M. Roy",
maintainer_email="gavinmroy@gmail.com",
url="https://github.com/gmr/queries",
install_requires=install_requires,
license=open('LICENSE').read(),
package_data={'': ['LICENSE', 'README.md']},
py_modules=['pgsql_wrapper'],
classifiers=['Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Database',
'Topic :: Software Development :: Libraries'],
zip_safe=True)
Add extras_requires for tornado support
|
from setuptools import setup
import platform
# Make the install_requires
target = platform.python_implementation()
if target == 'PyPy':
install_requires = ['psycopg2ct']
else:
install_requires = ['psycopg2']
setup(name='queries',
version='1.2.0',
description="Simplified PostgreSQL client built upon Psycopg2",
maintainer="Gavin M. Roy",
maintainer_email="gavinmroy@gmail.com",
url="https://github.com/gmr/queries",
install_requires=install_requires,
extras_require={'tornado': 'tornado'},
license=open('LICENSE').read(),
package_data={'': ['LICENSE', 'README.md']},
py_modules=['pgsql_wrapper'],
classifiers=['Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Database',
'Topic :: Software Development :: Libraries'],
zip_safe=True)
|
<commit_before>from setuptools import setup
import platform
# Make the install_requires
target = platform.python_implementation()
if target == 'PyPy':
install_requires = ['psycopg2ct']
else:
install_requires = ['psycopg2']
setup(name='queries',
version='1.2.0',
description="Simplified PostgreSQL client built upon Psycopg2",
maintainer="Gavin M. Roy",
maintainer_email="gavinmroy@gmail.com",
url="https://github.com/gmr/queries",
install_requires=install_requires,
license=open('LICENSE').read(),
package_data={'': ['LICENSE', 'README.md']},
py_modules=['pgsql_wrapper'],
classifiers=['Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Database',
'Topic :: Software Development :: Libraries'],
zip_safe=True)
<commit_msg>Add extras_requires for tornado support<commit_after>
|
from setuptools import setup
import platform
# Make the install_requires
target = platform.python_implementation()
if target == 'PyPy':
install_requires = ['psycopg2ct']
else:
install_requires = ['psycopg2']
setup(name='queries',
version='1.2.0',
description="Simplified PostgreSQL client built upon Psycopg2",
maintainer="Gavin M. Roy",
maintainer_email="gavinmroy@gmail.com",
url="https://github.com/gmr/queries",
install_requires=install_requires,
extras_require={'tornado': 'tornado'},
license=open('LICENSE').read(),
package_data={'': ['LICENSE', 'README.md']},
py_modules=['pgsql_wrapper'],
classifiers=['Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Database',
'Topic :: Software Development :: Libraries'],
zip_safe=True)
|
from setuptools import setup
import platform
# Make the install_requires
target = platform.python_implementation()
if target == 'PyPy':
install_requires = ['psycopg2ct']
else:
install_requires = ['psycopg2']
setup(name='queries',
version='1.2.0',
description="Simplified PostgreSQL client built upon Psycopg2",
maintainer="Gavin M. Roy",
maintainer_email="gavinmroy@gmail.com",
url="https://github.com/gmr/queries",
install_requires=install_requires,
license=open('LICENSE').read(),
package_data={'': ['LICENSE', 'README.md']},
py_modules=['pgsql_wrapper'],
classifiers=['Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Database',
'Topic :: Software Development :: Libraries'],
zip_safe=True)
Add extras_requires for tornado supportfrom setuptools import setup
import platform
# Make the install_requires
target = platform.python_implementation()
if target == 'PyPy':
install_requires = ['psycopg2ct']
else:
install_requires = ['psycopg2']
setup(name='queries',
version='1.2.0',
description="Simplified PostgreSQL client built upon Psycopg2",
maintainer="Gavin M. Roy",
maintainer_email="gavinmroy@gmail.com",
url="https://github.com/gmr/queries",
install_requires=install_requires,
extras_require={'tornado': 'tornado'},
license=open('LICENSE').read(),
package_data={'': ['LICENSE', 'README.md']},
py_modules=['pgsql_wrapper'],
classifiers=['Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Database',
'Topic :: Software Development :: Libraries'],
zip_safe=True)
|
<commit_before>from setuptools import setup
import platform
# Make the install_requires
target = platform.python_implementation()
if target == 'PyPy':
install_requires = ['psycopg2ct']
else:
install_requires = ['psycopg2']
setup(name='queries',
version='1.2.0',
description="Simplified PostgreSQL client built upon Psycopg2",
maintainer="Gavin M. Roy",
maintainer_email="gavinmroy@gmail.com",
url="https://github.com/gmr/queries",
install_requires=install_requires,
license=open('LICENSE').read(),
package_data={'': ['LICENSE', 'README.md']},
py_modules=['pgsql_wrapper'],
classifiers=['Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Database',
'Topic :: Software Development :: Libraries'],
zip_safe=True)
<commit_msg>Add extras_requires for tornado support<commit_after>from setuptools import setup
import platform
# Make the install_requires
target = platform.python_implementation()
if target == 'PyPy':
install_requires = ['psycopg2ct']
else:
install_requires = ['psycopg2']
setup(name='queries',
version='1.2.0',
description="Simplified PostgreSQL client built upon Psycopg2",
maintainer="Gavin M. Roy",
maintainer_email="gavinmroy@gmail.com",
url="https://github.com/gmr/queries",
install_requires=install_requires,
extras_require={'tornado': 'tornado'},
license=open('LICENSE').read(),
package_data={'': ['LICENSE', 'README.md']},
py_modules=['pgsql_wrapper'],
classifiers=['Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Database',
'Topic :: Software Development :: Libraries'],
zip_safe=True)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.