commit stringlengths 40 40 | old_file stringlengths 4 118 | new_file stringlengths 4 118 | old_contents stringlengths 0 2.94k | new_contents stringlengths 1 4.43k | subject stringlengths 15 444 | message stringlengths 16 3.45k | lang stringclasses 1 value | license stringclasses 13 values | repos stringlengths 5 43.2k | prompt stringlengths 17 4.58k | response stringlengths 1 4.43k | prompt_tagged stringlengths 58 4.62k | response_tagged stringlengths 1 4.43k | text stringlengths 132 7.29k | text_tagged stringlengths 173 7.33k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
851579b14a34b8acc1977b2f4d2c991d8e5f5f2c | ledlight.py | ledlight.py | #!/usr/bin/env python
import RPi.GPIO as GPIO
from time import sleep
pin_switch = 12
GPIO.setmode(GPIO.BCM)
GPIO.setup(pin_switch, GPIO.IN)
period = 0.1
duration = 4
samples = int(duration / float(period))
freq = 1.0 / period
series = []
print "inputting", samples, "samples,", "at", freq, "Hz"
for i in range(samples):
series.append((GPIO.input(pin_switch)) ^ 1)
sleep(period)
print "outputting"
print series
GPIO.cleanup()
| #!/usr/bin/env python
import RPi.GPIO as GPIO
from time import sleep
pin_switch = 12
GPIO.setmode(GPIO.BCM)
GPIO.setup(pin_switch, GPIO.IN)
period = 0.25
duration = 30
samples = int(duration / float(period))
freq = 1.0 / period
series = []
print "inputting", samples, "samples,", "at", freq, "Hz"
for i in range(samples):
print (GPIO.input(pin_switch)) ^ 1
sleep(period)
print
GPIO.cleanup()
| Print out 0/1 values as we sense them real-time. | Print out 0/1 values as we sense them real-time.
| Python | mit | zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie | #!/usr/bin/env python
import RPi.GPIO as GPIO
from time import sleep
pin_switch = 12
GPIO.setmode(GPIO.BCM)
GPIO.setup(pin_switch, GPIO.IN)
period = 0.1
duration = 4
samples = int(duration / float(period))
freq = 1.0 / period
series = []
print "inputting", samples, "samples,", "at", freq, "Hz"
for i in range(samples):
series.append((GPIO.input(pin_switch)) ^ 1)
sleep(period)
print "outputting"
print series
GPIO.cleanup()
Print out 0/1 values as we sense them real-time. | #!/usr/bin/env python
import RPi.GPIO as GPIO
from time import sleep
pin_switch = 12
GPIO.setmode(GPIO.BCM)
GPIO.setup(pin_switch, GPIO.IN)
period = 0.25
duration = 30
samples = int(duration / float(period))
freq = 1.0 / period
series = []
print "inputting", samples, "samples,", "at", freq, "Hz"
for i in range(samples):
print (GPIO.input(pin_switch)) ^ 1
sleep(period)
print
GPIO.cleanup()
| <commit_before>#!/usr/bin/env python
import RPi.GPIO as GPIO
from time import sleep
pin_switch = 12
GPIO.setmode(GPIO.BCM)
GPIO.setup(pin_switch, GPIO.IN)
period = 0.1
duration = 4
samples = int(duration / float(period))
freq = 1.0 / period
series = []
print "inputting", samples, "samples,", "at", freq, "Hz"
for i in range(samples):
series.append((GPIO.input(pin_switch)) ^ 1)
sleep(period)
print "outputting"
print series
GPIO.cleanup()
<commit_msg>Print out 0/1 values as we sense them real-time.<commit_after> | #!/usr/bin/env python
import RPi.GPIO as GPIO
from time import sleep
pin_switch = 12
GPIO.setmode(GPIO.BCM)
GPIO.setup(pin_switch, GPIO.IN)
period = 0.25
duration = 30
samples = int(duration / float(period))
freq = 1.0 / period
series = []
print "inputting", samples, "samples,", "at", freq, "Hz"
for i in range(samples):
print (GPIO.input(pin_switch)) ^ 1
sleep(period)
print
GPIO.cleanup()
| #!/usr/bin/env python
import RPi.GPIO as GPIO
from time import sleep
pin_switch = 12
GPIO.setmode(GPIO.BCM)
GPIO.setup(pin_switch, GPIO.IN)
period = 0.1
duration = 4
samples = int(duration / float(period))
freq = 1.0 / period
series = []
print "inputting", samples, "samples,", "at", freq, "Hz"
for i in range(samples):
series.append((GPIO.input(pin_switch)) ^ 1)
sleep(period)
print "outputting"
print series
GPIO.cleanup()
Print out 0/1 values as we sense them real-time.#!/usr/bin/env python
import RPi.GPIO as GPIO
from time import sleep
pin_switch = 12
GPIO.setmode(GPIO.BCM)
GPIO.setup(pin_switch, GPIO.IN)
period = 0.25
duration = 30
samples = int(duration / float(period))
freq = 1.0 / period
series = []
print "inputting", samples, "samples,", "at", freq, "Hz"
for i in range(samples):
print (GPIO.input(pin_switch)) ^ 1
sleep(period)
print
GPIO.cleanup()
| <commit_before>#!/usr/bin/env python
import RPi.GPIO as GPIO
from time import sleep
pin_switch = 12
GPIO.setmode(GPIO.BCM)
GPIO.setup(pin_switch, GPIO.IN)
period = 0.1
duration = 4
samples = int(duration / float(period))
freq = 1.0 / period
series = []
print "inputting", samples, "samples,", "at", freq, "Hz"
for i in range(samples):
series.append((GPIO.input(pin_switch)) ^ 1)
sleep(period)
print "outputting"
print series
GPIO.cleanup()
<commit_msg>Print out 0/1 values as we sense them real-time.<commit_after>#!/usr/bin/env python
import RPi.GPIO as GPIO
from time import sleep
pin_switch = 12
GPIO.setmode(GPIO.BCM)
GPIO.setup(pin_switch, GPIO.IN)
period = 0.25
duration = 30
samples = int(duration / float(period))
freq = 1.0 / period
series = []
print "inputting", samples, "samples,", "at", freq, "Hz"
for i in range(samples):
print (GPIO.input(pin_switch)) ^ 1
sleep(period)
print
GPIO.cleanup()
|
d2e52377f90c81365bd0ff62c8bea95207b44328 | indra/sources/sofia/api.py | indra/sources/sofia/api.py | import openpyxl
from .processor import SofiaProcessor
def process_table(fname):
"""Return processor by processing a given sheet of a spreadsheet file.
Parameters
----------
fname : str
The name of the Excel file (typically .xlsx extension) to process
Returns
-------
sp : indra.sources.sofia.processor.SofiaProcessor
A SofiaProcessor object which has a list of extracted INDRA
Statements as its statements attribute
"""
book = openpyxl.load_workbook(fname, read_only=True)
rel_sheet = book['Relations']
event_sheet = book['Events']
entities_sheet = book['Entities']
sp = SofiaProcessor(rel_sheet.rows, event_sheet.rows, entities_sheet.rows)
return sp
| import openpyxl
from .processor import SofiaProcessor
def process_table(fname):
"""Return processor by processing a given sheet of a spreadsheet file.
Parameters
----------
fname : str
The name of the Excel file (typically .xlsx extension) to process
Returns
-------
sp : indra.sources.sofia.processor.SofiaProcessor
A SofiaProcessor object which has a list of extracted INDRA
Statements as its statements attribute
"""
book = openpyxl.load_workbook(fname, read_only=True)
try:
rel_sheet = book['Relations']
except Exception as e:
rel_sheet = book['Causal']
event_sheet = book['Events']
entities_sheet = book['Entities']
sp = SofiaProcessor(rel_sheet.rows, event_sheet.rows, entities_sheet.rows)
return sp
| Handle Causal and Relations worksheets | Handle Causal and Relations worksheets
| Python | bsd-2-clause | pvtodorov/indra,bgyori/indra,sorgerlab/indra,sorgerlab/indra,johnbachman/indra,pvtodorov/indra,johnbachman/indra,johnbachman/belpy,bgyori/indra,johnbachman/belpy,sorgerlab/belpy,pvtodorov/indra,pvtodorov/indra,sorgerlab/belpy,johnbachman/belpy,johnbachman/indra,sorgerlab/belpy,bgyori/indra,sorgerlab/indra | import openpyxl
from .processor import SofiaProcessor
def process_table(fname):
"""Return processor by processing a given sheet of a spreadsheet file.
Parameters
----------
fname : str
The name of the Excel file (typically .xlsx extension) to process
Returns
-------
sp : indra.sources.sofia.processor.SofiaProcessor
A SofiaProcessor object which has a list of extracted INDRA
Statements as its statements attribute
"""
book = openpyxl.load_workbook(fname, read_only=True)
rel_sheet = book['Relations']
event_sheet = book['Events']
entities_sheet = book['Entities']
sp = SofiaProcessor(rel_sheet.rows, event_sheet.rows, entities_sheet.rows)
return sp
Handle Causal and Relations worksheets | import openpyxl
from .processor import SofiaProcessor
def process_table(fname):
"""Return processor by processing a given sheet of a spreadsheet file.
Parameters
----------
fname : str
The name of the Excel file (typically .xlsx extension) to process
Returns
-------
sp : indra.sources.sofia.processor.SofiaProcessor
A SofiaProcessor object which has a list of extracted INDRA
Statements as its statements attribute
"""
book = openpyxl.load_workbook(fname, read_only=True)
try:
rel_sheet = book['Relations']
except Exception as e:
rel_sheet = book['Causal']
event_sheet = book['Events']
entities_sheet = book['Entities']
sp = SofiaProcessor(rel_sheet.rows, event_sheet.rows, entities_sheet.rows)
return sp
| <commit_before>import openpyxl
from .processor import SofiaProcessor
def process_table(fname):
"""Return processor by processing a given sheet of a spreadsheet file.
Parameters
----------
fname : str
The name of the Excel file (typically .xlsx extension) to process
Returns
-------
sp : indra.sources.sofia.processor.SofiaProcessor
A SofiaProcessor object which has a list of extracted INDRA
Statements as its statements attribute
"""
book = openpyxl.load_workbook(fname, read_only=True)
rel_sheet = book['Relations']
event_sheet = book['Events']
entities_sheet = book['Entities']
sp = SofiaProcessor(rel_sheet.rows, event_sheet.rows, entities_sheet.rows)
return sp
<commit_msg>Handle Causal and Relations worksheets<commit_after> | import openpyxl
from .processor import SofiaProcessor
def process_table(fname):
"""Return processor by processing a given sheet of a spreadsheet file.
Parameters
----------
fname : str
The name of the Excel file (typically .xlsx extension) to process
Returns
-------
sp : indra.sources.sofia.processor.SofiaProcessor
A SofiaProcessor object which has a list of extracted INDRA
Statements as its statements attribute
"""
book = openpyxl.load_workbook(fname, read_only=True)
try:
rel_sheet = book['Relations']
except Exception as e:
rel_sheet = book['Causal']
event_sheet = book['Events']
entities_sheet = book['Entities']
sp = SofiaProcessor(rel_sheet.rows, event_sheet.rows, entities_sheet.rows)
return sp
| import openpyxl
from .processor import SofiaProcessor
def process_table(fname):
"""Return processor by processing a given sheet of a spreadsheet file.
Parameters
----------
fname : str
The name of the Excel file (typically .xlsx extension) to process
Returns
-------
sp : indra.sources.sofia.processor.SofiaProcessor
A SofiaProcessor object which has a list of extracted INDRA
Statements as its statements attribute
"""
book = openpyxl.load_workbook(fname, read_only=True)
rel_sheet = book['Relations']
event_sheet = book['Events']
entities_sheet = book['Entities']
sp = SofiaProcessor(rel_sheet.rows, event_sheet.rows, entities_sheet.rows)
return sp
Handle Causal and Relations worksheetsimport openpyxl
from .processor import SofiaProcessor
def process_table(fname):
"""Return processor by processing a given sheet of a spreadsheet file.
Parameters
----------
fname : str
The name of the Excel file (typically .xlsx extension) to process
Returns
-------
sp : indra.sources.sofia.processor.SofiaProcessor
A SofiaProcessor object which has a list of extracted INDRA
Statements as its statements attribute
"""
book = openpyxl.load_workbook(fname, read_only=True)
try:
rel_sheet = book['Relations']
except Exception as e:
rel_sheet = book['Causal']
event_sheet = book['Events']
entities_sheet = book['Entities']
sp = SofiaProcessor(rel_sheet.rows, event_sheet.rows, entities_sheet.rows)
return sp
| <commit_before>import openpyxl
from .processor import SofiaProcessor
def process_table(fname):
"""Return processor by processing a given sheet of a spreadsheet file.
Parameters
----------
fname : str
The name of the Excel file (typically .xlsx extension) to process
Returns
-------
sp : indra.sources.sofia.processor.SofiaProcessor
A SofiaProcessor object which has a list of extracted INDRA
Statements as its statements attribute
"""
book = openpyxl.load_workbook(fname, read_only=True)
rel_sheet = book['Relations']
event_sheet = book['Events']
entities_sheet = book['Entities']
sp = SofiaProcessor(rel_sheet.rows, event_sheet.rows, entities_sheet.rows)
return sp
<commit_msg>Handle Causal and Relations worksheets<commit_after>import openpyxl
from .processor import SofiaProcessor
def process_table(fname):
"""Return processor by processing a given sheet of a spreadsheet file.
Parameters
----------
fname : str
The name of the Excel file (typically .xlsx extension) to process
Returns
-------
sp : indra.sources.sofia.processor.SofiaProcessor
A SofiaProcessor object which has a list of extracted INDRA
Statements as its statements attribute
"""
book = openpyxl.load_workbook(fname, read_only=True)
try:
rel_sheet = book['Relations']
except Exception as e:
rel_sheet = book['Causal']
event_sheet = book['Events']
entities_sheet = book['Entities']
sp = SofiaProcessor(rel_sheet.rows, event_sheet.rows, entities_sheet.rows)
return sp
|
e2cad9831c3d3658e096c05ba45a9285744549dd | tnm/urls.py | tnm/urls.py | from django.conf.urls.defaults import patterns, include, url
from django.contrib.gis import admin
from django.views.generic.simple import direct_to_template
admin.autodiscover()
urlpatterns = patterns('')
urlpatterns += patterns('',
url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += patterns('',
(r'^nearby/$', direct_to_template, {'template': 'leaflet.html'}),
(r'^api/', include('api.urls')),
)
| from django.conf.urls.defaults import patterns, include, url
from django.contrib.gis import admin
from django.views.generic.simple import direct_to_template
admin.autodiscover()
urlpatterns = patterns('')
urlpatterns += patterns('',
url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += patterns('',
(r'^$', direct_to_template, {'template': 'leaflet.html'}),
(r'^api/', include('api.urls')),
)
| Move code to / from /nearby. Move to tnm.com. | Move code to / from /nearby. Move to tnm.com.
| Python | mit | chosak/TransitNearMe,MobilityLab/TransitNearMe,chosak/TransitNearMe,MobilityLab/TransitNearMe,chosak/TransitNearMe | from django.conf.urls.defaults import patterns, include, url
from django.contrib.gis import admin
from django.views.generic.simple import direct_to_template
admin.autodiscover()
urlpatterns = patterns('')
urlpatterns += patterns('',
url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += patterns('',
(r'^nearby/$', direct_to_template, {'template': 'leaflet.html'}),
(r'^api/', include('api.urls')),
)
Move code to / from /nearby. Move to tnm.com. | from django.conf.urls.defaults import patterns, include, url
from django.contrib.gis import admin
from django.views.generic.simple import direct_to_template
admin.autodiscover()
urlpatterns = patterns('')
urlpatterns += patterns('',
url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += patterns('',
(r'^$', direct_to_template, {'template': 'leaflet.html'}),
(r'^api/', include('api.urls')),
)
| <commit_before>from django.conf.urls.defaults import patterns, include, url
from django.contrib.gis import admin
from django.views.generic.simple import direct_to_template
admin.autodiscover()
urlpatterns = patterns('')
urlpatterns += patterns('',
url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += patterns('',
(r'^nearby/$', direct_to_template, {'template': 'leaflet.html'}),
(r'^api/', include('api.urls')),
)
<commit_msg>Move code to / from /nearby. Move to tnm.com.<commit_after> | from django.conf.urls.defaults import patterns, include, url
from django.contrib.gis import admin
from django.views.generic.simple import direct_to_template
admin.autodiscover()
urlpatterns = patterns('')
urlpatterns += patterns('',
url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += patterns('',
(r'^$', direct_to_template, {'template': 'leaflet.html'}),
(r'^api/', include('api.urls')),
)
| from django.conf.urls.defaults import patterns, include, url
from django.contrib.gis import admin
from django.views.generic.simple import direct_to_template
admin.autodiscover()
urlpatterns = patterns('')
urlpatterns += patterns('',
url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += patterns('',
(r'^nearby/$', direct_to_template, {'template': 'leaflet.html'}),
(r'^api/', include('api.urls')),
)
Move code to / from /nearby. Move to tnm.com.from django.conf.urls.defaults import patterns, include, url
from django.contrib.gis import admin
from django.views.generic.simple import direct_to_template
admin.autodiscover()
urlpatterns = patterns('')
urlpatterns += patterns('',
url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += patterns('',
(r'^$', direct_to_template, {'template': 'leaflet.html'}),
(r'^api/', include('api.urls')),
)
| <commit_before>from django.conf.urls.defaults import patterns, include, url
from django.contrib.gis import admin
from django.views.generic.simple import direct_to_template
admin.autodiscover()
urlpatterns = patterns('')
urlpatterns += patterns('',
url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += patterns('',
(r'^nearby/$', direct_to_template, {'template': 'leaflet.html'}),
(r'^api/', include('api.urls')),
)
<commit_msg>Move code to / from /nearby. Move to tnm.com.<commit_after>from django.conf.urls.defaults import patterns, include, url
from django.contrib.gis import admin
from django.views.generic.simple import direct_to_template
admin.autodiscover()
urlpatterns = patterns('')
urlpatterns += patterns('',
url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += patterns('',
(r'^$', direct_to_template, {'template': 'leaflet.html'}),
(r'^api/', include('api.urls')),
)
|
a555737e2d594a67078a15be9d5eb3c8524d0698 | app/models.py | app/models.py | from . import db
class Monkey(db.Model):
__tablename__ = 'monkeys'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64))
email = db.Column(db.String(64), unique=True)
age = db.Column(db.Date())
def __repr__(self):
return '<User {} {}>'.format(self.id, self.email) | from . import db
from werkzeug.security import generate_password_hash, check_password_hash
class Monkey(db.Model):
__tablename__ = 'monkeys'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64))
email = db.Column(db.String(64), unique=True)
password_hash = db.Column(db.String(128))
birth_date = db.Column(db.Date())
@property
def password(self):
raise AttributeError('password is not a readable attribute')
@password.setter
def password(self, password):
self.password_hash = generate_password_hash(password)
def verify_password(self, password):
return check_password_hash(self.password_hash, password)
def __repr__(self):
return '<User {} {}>'.format(self.id, self.email) | Add password hash to Monkey model | Add password hash to Monkey model
| Python | mit | timzdevz/fm-flask-app | from . import db
class Monkey(db.Model):
__tablename__ = 'monkeys'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64))
email = db.Column(db.String(64), unique=True)
age = db.Column(db.Date())
def __repr__(self):
return '<User {} {}>'.format(self.id, self.email)Add password hash to Monkey model | from . import db
from werkzeug.security import generate_password_hash, check_password_hash
class Monkey(db.Model):
__tablename__ = 'monkeys'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64))
email = db.Column(db.String(64), unique=True)
password_hash = db.Column(db.String(128))
birth_date = db.Column(db.Date())
@property
def password(self):
raise AttributeError('password is not a readable attribute')
@password.setter
def password(self, password):
self.password_hash = generate_password_hash(password)
def verify_password(self, password):
return check_password_hash(self.password_hash, password)
def __repr__(self):
return '<User {} {}>'.format(self.id, self.email) | <commit_before>from . import db
class Monkey(db.Model):
__tablename__ = 'monkeys'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64))
email = db.Column(db.String(64), unique=True)
age = db.Column(db.Date())
def __repr__(self):
return '<User {} {}>'.format(self.id, self.email)<commit_msg>Add password hash to Monkey model<commit_after> | from . import db
from werkzeug.security import generate_password_hash, check_password_hash
class Monkey(db.Model):
__tablename__ = 'monkeys'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64))
email = db.Column(db.String(64), unique=True)
password_hash = db.Column(db.String(128))
birth_date = db.Column(db.Date())
@property
def password(self):
raise AttributeError('password is not a readable attribute')
@password.setter
def password(self, password):
self.password_hash = generate_password_hash(password)
def verify_password(self, password):
return check_password_hash(self.password_hash, password)
def __repr__(self):
return '<User {} {}>'.format(self.id, self.email) | from . import db
class Monkey(db.Model):
__tablename__ = 'monkeys'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64))
email = db.Column(db.String(64), unique=True)
age = db.Column(db.Date())
def __repr__(self):
return '<User {} {}>'.format(self.id, self.email)Add password hash to Monkey modelfrom . import db
from werkzeug.security import generate_password_hash, check_password_hash
class Monkey(db.Model):
__tablename__ = 'monkeys'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64))
email = db.Column(db.String(64), unique=True)
password_hash = db.Column(db.String(128))
birth_date = db.Column(db.Date())
@property
def password(self):
raise AttributeError('password is not a readable attribute')
@password.setter
def password(self, password):
self.password_hash = generate_password_hash(password)
def verify_password(self, password):
return check_password_hash(self.password_hash, password)
def __repr__(self):
return '<User {} {}>'.format(self.id, self.email) | <commit_before>from . import db
class Monkey(db.Model):
__tablename__ = 'monkeys'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64))
email = db.Column(db.String(64), unique=True)
age = db.Column(db.Date())
def __repr__(self):
return '<User {} {}>'.format(self.id, self.email)<commit_msg>Add password hash to Monkey model<commit_after>from . import db
from werkzeug.security import generate_password_hash, check_password_hash
class Monkey(db.Model):
__tablename__ = 'monkeys'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64))
email = db.Column(db.String(64), unique=True)
password_hash = db.Column(db.String(128))
birth_date = db.Column(db.Date())
@property
def password(self):
raise AttributeError('password is not a readable attribute')
@password.setter
def password(self, password):
self.password_hash = generate_password_hash(password)
def verify_password(self, password):
return check_password_hash(self.password_hash, password)
def __repr__(self):
return '<User {} {}>'.format(self.id, self.email) |
f333f29d4170527c985bc695cd7b8331041769d5 | eva/layers/out_channels.py | eva/layers/out_channels.py | from keras.layers import Convolution2D, Reshape, Lambda, Activation
from eva.layers.masked_convolution2d import MaskedConvolution2D
class OutChannels(object):
def __init__(self, height, width, channels, masked=False, palette=256):
self.height = height
self.width = width
self.channels = channels
self.cxp = MaskedConvolution2D if masked else Convolution2D
self.palette = palette
def __call__(self, model):
if self.channels == 1:
outs = Convolution2D(1, 1, 1, activation='sigmoid', border_mode='valid')(model)
else:
out = self.cxp(self.palette*self.channels, 1, 1, border_mode='valid', name='channels_mult_palette')(model)
out = Reshape((self.height, self.width, self.palette, self.channels), name='palette_channels')(out)
outs = [None] * self.channels
for i in range(self.channels):
outs[i] = Lambda(lambda x: x[:, :, :, :, i], name='channel'+str(i)+'_extract')(out)
outs[i] = Reshape((self.height * self.width, self.palette), name='hw_palette'+str(i))(outs[i])
outs[i] = Activation('softmax', name='channel'+str(i))(outs[i])
return outs | from keras.layers import Convolution2D, Reshape, Lambda, Activation
from eva.layers.masked_convolution2d import MaskedConvolution2D
class OutChannels(object):
def __init__(self, height, width, channels, masked=False, palette=256):
self.height = height
self.width = width
self.channels = channels
self.cxp = MaskedConvolution2D if masked else Convolution2D
self.palette = palette
def __call__(self, model):
if self.channels == 1:
outs = Convolution2D(1, 1, 1, border_mode='valid')(model)
outs = Activation('sigmoid')(outs)
else:
out = self.cxp(self.palette*self.channels, 1, 1, border_mode='valid', name='channels_mult_palette')(model)
out = Reshape((self.height, self.width, self.palette, self.channels), name='palette_channels')(out)
outs = [None] * self.channels
for i in range(self.channels):
outs[i] = Lambda(lambda x: x[:, :, :, :, i], name='channel'+str(i)+'_extract')(out)
outs[i] = Reshape((self.height * self.width, self.palette), name='hw_palette'+str(i))(outs[i])
outs[i] = Activation('softmax', name='channel'+str(i))(outs[i])
return outs | Make mono channel output activation more readable | Make mono channel output activation more readable
| Python | apache-2.0 | israelg99/eva | from keras.layers import Convolution2D, Reshape, Lambda, Activation
from eva.layers.masked_convolution2d import MaskedConvolution2D
class OutChannels(object):
def __init__(self, height, width, channels, masked=False, palette=256):
self.height = height
self.width = width
self.channels = channels
self.cxp = MaskedConvolution2D if masked else Convolution2D
self.palette = palette
def __call__(self, model):
if self.channels == 1:
outs = Convolution2D(1, 1, 1, activation='sigmoid', border_mode='valid')(model)
else:
out = self.cxp(self.palette*self.channels, 1, 1, border_mode='valid', name='channels_mult_palette')(model)
out = Reshape((self.height, self.width, self.palette, self.channels), name='palette_channels')(out)
outs = [None] * self.channels
for i in range(self.channels):
outs[i] = Lambda(lambda x: x[:, :, :, :, i], name='channel'+str(i)+'_extract')(out)
outs[i] = Reshape((self.height * self.width, self.palette), name='hw_palette'+str(i))(outs[i])
outs[i] = Activation('softmax', name='channel'+str(i))(outs[i])
return outsMake mono channel output activation more readable | from keras.layers import Convolution2D, Reshape, Lambda, Activation
from eva.layers.masked_convolution2d import MaskedConvolution2D
class OutChannels(object):
def __init__(self, height, width, channels, masked=False, palette=256):
self.height = height
self.width = width
self.channels = channels
self.cxp = MaskedConvolution2D if masked else Convolution2D
self.palette = palette
def __call__(self, model):
if self.channels == 1:
outs = Convolution2D(1, 1, 1, border_mode='valid')(model)
outs = Activation('sigmoid')(outs)
else:
out = self.cxp(self.palette*self.channels, 1, 1, border_mode='valid', name='channels_mult_palette')(model)
out = Reshape((self.height, self.width, self.palette, self.channels), name='palette_channels')(out)
outs = [None] * self.channels
for i in range(self.channels):
outs[i] = Lambda(lambda x: x[:, :, :, :, i], name='channel'+str(i)+'_extract')(out)
outs[i] = Reshape((self.height * self.width, self.palette), name='hw_palette'+str(i))(outs[i])
outs[i] = Activation('softmax', name='channel'+str(i))(outs[i])
return outs | <commit_before>from keras.layers import Convolution2D, Reshape, Lambda, Activation
from eva.layers.masked_convolution2d import MaskedConvolution2D
class OutChannels(object):
def __init__(self, height, width, channels, masked=False, palette=256):
self.height = height
self.width = width
self.channels = channels
self.cxp = MaskedConvolution2D if masked else Convolution2D
self.palette = palette
def __call__(self, model):
if self.channels == 1:
outs = Convolution2D(1, 1, 1, activation='sigmoid', border_mode='valid')(model)
else:
out = self.cxp(self.palette*self.channels, 1, 1, border_mode='valid', name='channels_mult_palette')(model)
out = Reshape((self.height, self.width, self.palette, self.channels), name='palette_channels')(out)
outs = [None] * self.channels
for i in range(self.channels):
outs[i] = Lambda(lambda x: x[:, :, :, :, i], name='channel'+str(i)+'_extract')(out)
outs[i] = Reshape((self.height * self.width, self.palette), name='hw_palette'+str(i))(outs[i])
outs[i] = Activation('softmax', name='channel'+str(i))(outs[i])
return outs<commit_msg>Make mono channel output activation more readable<commit_after> | from keras.layers import Convolution2D, Reshape, Lambda, Activation
from eva.layers.masked_convolution2d import MaskedConvolution2D
class OutChannels(object):
def __init__(self, height, width, channels, masked=False, palette=256):
self.height = height
self.width = width
self.channels = channels
self.cxp = MaskedConvolution2D if masked else Convolution2D
self.palette = palette
def __call__(self, model):
if self.channels == 1:
outs = Convolution2D(1, 1, 1, border_mode='valid')(model)
outs = Activation('sigmoid')(outs)
else:
out = self.cxp(self.palette*self.channels, 1, 1, border_mode='valid', name='channels_mult_palette')(model)
out = Reshape((self.height, self.width, self.palette, self.channels), name='palette_channels')(out)
outs = [None] * self.channels
for i in range(self.channels):
outs[i] = Lambda(lambda x: x[:, :, :, :, i], name='channel'+str(i)+'_extract')(out)
outs[i] = Reshape((self.height * self.width, self.palette), name='hw_palette'+str(i))(outs[i])
outs[i] = Activation('softmax', name='channel'+str(i))(outs[i])
return outs | from keras.layers import Convolution2D, Reshape, Lambda, Activation
from eva.layers.masked_convolution2d import MaskedConvolution2D
class OutChannels(object):
def __init__(self, height, width, channels, masked=False, palette=256):
self.height = height
self.width = width
self.channels = channels
self.cxp = MaskedConvolution2D if masked else Convolution2D
self.palette = palette
def __call__(self, model):
if self.channels == 1:
outs = Convolution2D(1, 1, 1, activation='sigmoid', border_mode='valid')(model)
else:
out = self.cxp(self.palette*self.channels, 1, 1, border_mode='valid', name='channels_mult_palette')(model)
out = Reshape((self.height, self.width, self.palette, self.channels), name='palette_channels')(out)
outs = [None] * self.channels
for i in range(self.channels):
outs[i] = Lambda(lambda x: x[:, :, :, :, i], name='channel'+str(i)+'_extract')(out)
outs[i] = Reshape((self.height * self.width, self.palette), name='hw_palette'+str(i))(outs[i])
outs[i] = Activation('softmax', name='channel'+str(i))(outs[i])
return outsMake mono channel output activation more readablefrom keras.layers import Convolution2D, Reshape, Lambda, Activation
from eva.layers.masked_convolution2d import MaskedConvolution2D
class OutChannels(object):
def __init__(self, height, width, channels, masked=False, palette=256):
self.height = height
self.width = width
self.channels = channels
self.cxp = MaskedConvolution2D if masked else Convolution2D
self.palette = palette
def __call__(self, model):
if self.channels == 1:
outs = Convolution2D(1, 1, 1, border_mode='valid')(model)
outs = Activation('sigmoid')(outs)
else:
out = self.cxp(self.palette*self.channels, 1, 1, border_mode='valid', name='channels_mult_palette')(model)
out = Reshape((self.height, self.width, self.palette, self.channels), name='palette_channels')(out)
outs = [None] * self.channels
for i in range(self.channels):
outs[i] = Lambda(lambda x: x[:, :, :, :, i], name='channel'+str(i)+'_extract')(out)
outs[i] = Reshape((self.height * self.width, self.palette), name='hw_palette'+str(i))(outs[i])
outs[i] = Activation('softmax', name='channel'+str(i))(outs[i])
return outs | <commit_before>from keras.layers import Convolution2D, Reshape, Lambda, Activation
from eva.layers.masked_convolution2d import MaskedConvolution2D
class OutChannels(object):
def __init__(self, height, width, channels, masked=False, palette=256):
self.height = height
self.width = width
self.channels = channels
self.cxp = MaskedConvolution2D if masked else Convolution2D
self.palette = palette
def __call__(self, model):
if self.channels == 1:
outs = Convolution2D(1, 1, 1, activation='sigmoid', border_mode='valid')(model)
else:
out = self.cxp(self.palette*self.channels, 1, 1, border_mode='valid', name='channels_mult_palette')(model)
out = Reshape((self.height, self.width, self.palette, self.channels), name='palette_channels')(out)
outs = [None] * self.channels
for i in range(self.channels):
outs[i] = Lambda(lambda x: x[:, :, :, :, i], name='channel'+str(i)+'_extract')(out)
outs[i] = Reshape((self.height * self.width, self.palette), name='hw_palette'+str(i))(outs[i])
outs[i] = Activation('softmax', name='channel'+str(i))(outs[i])
return outs<commit_msg>Make mono channel output activation more readable<commit_after>from keras.layers import Convolution2D, Reshape, Lambda, Activation
from eva.layers.masked_convolution2d import MaskedConvolution2D
class OutChannels(object):
def __init__(self, height, width, channels, masked=False, palette=256):
self.height = height
self.width = width
self.channels = channels
self.cxp = MaskedConvolution2D if masked else Convolution2D
self.palette = palette
def __call__(self, model):
if self.channels == 1:
outs = Convolution2D(1, 1, 1, border_mode='valid')(model)
outs = Activation('sigmoid')(outs)
else:
out = self.cxp(self.palette*self.channels, 1, 1, border_mode='valid', name='channels_mult_palette')(model)
out = Reshape((self.height, self.width, self.palette, self.channels), name='palette_channels')(out)
outs = [None] * self.channels
for i in range(self.channels):
outs[i] = Lambda(lambda x: x[:, :, :, :, i], name='channel'+str(i)+'_extract')(out)
outs[i] = Reshape((self.height * self.width, self.palette), name='hw_palette'+str(i))(outs[i])
outs[i] = Activation('softmax', name='channel'+str(i))(outs[i])
return outs |
a21d484cc1131b56d793e75fbb6ab1531205dae6 | joueur/base_game_object.py | joueur/base_game_object.py | from joueur.delta_mergeable import DeltaMergeable
# the base class that every game object within a game inherit from for Python
# manipulation that would be redundant via Creer
class BaseGameObject(DeltaMergeable):
def __init__(self):
DeltaMergeable.__init__(self)
def __str__(self):
return "{} #{}".format(self.game_object_name, self.id)
def __repr__(self):
return str(self)
| from joueur.delta_mergeable import DeltaMergeable
# the base class that every game object within a game inherit from for Python
# manipulation that would be redundant via Creer
class BaseGameObject(DeltaMergeable):
def __init__(self):
DeltaMergeable.__init__(self)
def __str__(self):
return "{} #{}".format(self.game_object_name, self.id)
def __repr__(self):
return str(self)
def __hash__(self):
# id will always be unique server side anyways,
# so it should be safe to hash on
return hash(self.id)
| Update BaseGameObject to be hashable | Update BaseGameObject to be hashable
| Python | mit | JacobFischer/Joueur.py,siggame/Joueur.py,siggame/Joueur.py,JacobFischer/Joueur.py | from joueur.delta_mergeable import DeltaMergeable
# the base class that every game object within a game inherit from for Python
# manipulation that would be redundant via Creer
class BaseGameObject(DeltaMergeable):
def __init__(self):
DeltaMergeable.__init__(self)
def __str__(self):
return "{} #{}".format(self.game_object_name, self.id)
def __repr__(self):
return str(self)
Update BaseGameObject to be hashable | from joueur.delta_mergeable import DeltaMergeable
# the base class that every game object within a game inherit from for Python
# manipulation that would be redundant via Creer
class BaseGameObject(DeltaMergeable):
def __init__(self):
DeltaMergeable.__init__(self)
def __str__(self):
return "{} #{}".format(self.game_object_name, self.id)
def __repr__(self):
return str(self)
def __hash__(self):
# id will always be unique server side anyways,
# so it should be safe to hash on
return hash(self.id)
| <commit_before>from joueur.delta_mergeable import DeltaMergeable
# the base class that every game object within a game inherit from for Python
# manipulation that would be redundant via Creer
class BaseGameObject(DeltaMergeable):
def __init__(self):
DeltaMergeable.__init__(self)
def __str__(self):
return "{} #{}".format(self.game_object_name, self.id)
def __repr__(self):
return str(self)
<commit_msg>Update BaseGameObject to be hashable<commit_after> | from joueur.delta_mergeable import DeltaMergeable
# the base class that every game object within a game inherit from for Python
# manipulation that would be redundant via Creer
class BaseGameObject(DeltaMergeable):
def __init__(self):
DeltaMergeable.__init__(self)
def __str__(self):
return "{} #{}".format(self.game_object_name, self.id)
def __repr__(self):
return str(self)
def __hash__(self):
# id will always be unique server side anyways,
# so it should be safe to hash on
return hash(self.id)
| from joueur.delta_mergeable import DeltaMergeable
# the base class that every game object within a game inherit from for Python
# manipulation that would be redundant via Creer
class BaseGameObject(DeltaMergeable):
def __init__(self):
DeltaMergeable.__init__(self)
def __str__(self):
return "{} #{}".format(self.game_object_name, self.id)
def __repr__(self):
return str(self)
Update BaseGameObject to be hashablefrom joueur.delta_mergeable import DeltaMergeable
# the base class that every game object within a game inherit from for Python
# manipulation that would be redundant via Creer
class BaseGameObject(DeltaMergeable):
def __init__(self):
DeltaMergeable.__init__(self)
def __str__(self):
return "{} #{}".format(self.game_object_name, self.id)
def __repr__(self):
return str(self)
def __hash__(self):
# id will always be unique server side anyways,
# so it should be safe to hash on
return hash(self.id)
| <commit_before>from joueur.delta_mergeable import DeltaMergeable
# the base class that every game object within a game inherit from for Python
# manipulation that would be redundant via Creer
class BaseGameObject(DeltaMergeable):
def __init__(self):
DeltaMergeable.__init__(self)
def __str__(self):
return "{} #{}".format(self.game_object_name, self.id)
def __repr__(self):
return str(self)
<commit_msg>Update BaseGameObject to be hashable<commit_after>from joueur.delta_mergeable import DeltaMergeable
# the base class that every game object within a game inherit from for Python
# manipulation that would be redundant via Creer
class BaseGameObject(DeltaMergeable):
def __init__(self):
DeltaMergeable.__init__(self)
def __str__(self):
return "{} #{}".format(self.game_object_name, self.id)
def __repr__(self):
return str(self)
def __hash__(self):
# id will always be unique server side anyways,
# so it should be safe to hash on
return hash(self.id)
|
aa9cb1bc1a04de4e4a4a787881123e2a60aaeb4e | docs/apps.py | docs/apps.py | import certifi
from django.apps import AppConfig
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from elasticsearch_dsl.connections import connections
class DocsConfig(AppConfig):
name = 'docs'
verbose_name = _('Documentation')
def ready(self):
super(DocsConfig, self).ready()
# Configure Elasticsearch connections for connection pooling.
connections.configure(
default={
'hosts': settings.ES_HOST,
'verify_certs': True,
'ca_certs': certifi.where(),
},
)
| import certifi
from django.apps import AppConfig
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from elasticsearch_dsl.connections import connections
class DocsConfig(AppConfig):
name = 'docs'
verbose_name = _('Documentation')
def ready(self):
super(DocsConfig, self).ready()
# Configure Elasticsearch connections for connection pooling.
connections.configure(
default={
'hosts': settings.ES_HOST,
'verify_certs': True,
'ca_certs': certifi.where(),
'timeout': 60.0,
},
)
| Increase the ES timeout to 1 minute. | Increase the ES timeout to 1 minute.
| Python | bsd-3-clause | rmoorman/djangoproject.com,hassanabidpk/djangoproject.com,relekang/djangoproject.com,django/djangoproject.com,khkaminska/djangoproject.com,django/djangoproject.com,hassanabidpk/djangoproject.com,gnarf/djangoproject.com,vxvinh1511/djangoproject.com,xavierdutreilh/djangoproject.com,hassanabidpk/djangoproject.com,django/djangoproject.com,django/djangoproject.com,nanuxbe/django,rmoorman/djangoproject.com,relekang/djangoproject.com,alawnchen/djangoproject.com,rmoorman/djangoproject.com,rmoorman/djangoproject.com,django/djangoproject.com,vxvinh1511/djangoproject.com,khkaminska/djangoproject.com,nanuxbe/django,xavierdutreilh/djangoproject.com,hassanabidpk/djangoproject.com,xavierdutreilh/djangoproject.com,vxvinh1511/djangoproject.com,alawnchen/djangoproject.com,relekang/djangoproject.com,khkaminska/djangoproject.com,relekang/djangoproject.com,khkaminska/djangoproject.com,vxvinh1511/djangoproject.com,nanuxbe/django,xavierdutreilh/djangoproject.com,alawnchen/djangoproject.com,gnarf/djangoproject.com,nanuxbe/django,alawnchen/djangoproject.com,django/djangoproject.com,gnarf/djangoproject.com,gnarf/djangoproject.com | import certifi
from django.apps import AppConfig
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from elasticsearch_dsl.connections import connections
class DocsConfig(AppConfig):
name = 'docs'
verbose_name = _('Documentation')
def ready(self):
super(DocsConfig, self).ready()
# Configure Elasticsearch connections for connection pooling.
connections.configure(
default={
'hosts': settings.ES_HOST,
'verify_certs': True,
'ca_certs': certifi.where(),
},
)
Increase the ES timeout to 1 minute. | import certifi
from django.apps import AppConfig
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from elasticsearch_dsl.connections import connections
class DocsConfig(AppConfig):
name = 'docs'
verbose_name = _('Documentation')
def ready(self):
super(DocsConfig, self).ready()
# Configure Elasticsearch connections for connection pooling.
connections.configure(
default={
'hosts': settings.ES_HOST,
'verify_certs': True,
'ca_certs': certifi.where(),
'timeout': 60.0,
},
)
| <commit_before>import certifi
from django.apps import AppConfig
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from elasticsearch_dsl.connections import connections
class DocsConfig(AppConfig):
name = 'docs'
verbose_name = _('Documentation')
def ready(self):
super(DocsConfig, self).ready()
# Configure Elasticsearch connections for connection pooling.
connections.configure(
default={
'hosts': settings.ES_HOST,
'verify_certs': True,
'ca_certs': certifi.where(),
},
)
<commit_msg>Increase the ES timeout to 1 minute.<commit_after> | import certifi
from django.apps import AppConfig
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from elasticsearch_dsl.connections import connections
class DocsConfig(AppConfig):
name = 'docs'
verbose_name = _('Documentation')
def ready(self):
super(DocsConfig, self).ready()
# Configure Elasticsearch connections for connection pooling.
connections.configure(
default={
'hosts': settings.ES_HOST,
'verify_certs': True,
'ca_certs': certifi.where(),
'timeout': 60.0,
},
)
| import certifi
from django.apps import AppConfig
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from elasticsearch_dsl.connections import connections
class DocsConfig(AppConfig):
name = 'docs'
verbose_name = _('Documentation')
def ready(self):
super(DocsConfig, self).ready()
# Configure Elasticsearch connections for connection pooling.
connections.configure(
default={
'hosts': settings.ES_HOST,
'verify_certs': True,
'ca_certs': certifi.where(),
},
)
Increase the ES timeout to 1 minute.import certifi
from django.apps import AppConfig
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from elasticsearch_dsl.connections import connections
class DocsConfig(AppConfig):
name = 'docs'
verbose_name = _('Documentation')
def ready(self):
super(DocsConfig, self).ready()
# Configure Elasticsearch connections for connection pooling.
connections.configure(
default={
'hosts': settings.ES_HOST,
'verify_certs': True,
'ca_certs': certifi.where(),
'timeout': 60.0,
},
)
| <commit_before>import certifi
from django.apps import AppConfig
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from elasticsearch_dsl.connections import connections
class DocsConfig(AppConfig):
name = 'docs'
verbose_name = _('Documentation')
def ready(self):
super(DocsConfig, self).ready()
# Configure Elasticsearch connections for connection pooling.
connections.configure(
default={
'hosts': settings.ES_HOST,
'verify_certs': True,
'ca_certs': certifi.where(),
},
)
<commit_msg>Increase the ES timeout to 1 minute.<commit_after>import certifi
from django.apps import AppConfig
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from elasticsearch_dsl.connections import connections
class DocsConfig(AppConfig):
name = 'docs'
verbose_name = _('Documentation')
def ready(self):
super(DocsConfig, self).ready()
# Configure Elasticsearch connections for connection pooling.
connections.configure(
default={
'hosts': settings.ES_HOST,
'verify_certs': True,
'ca_certs': certifi.where(),
'timeout': 60.0,
},
)
|
2a2b11f78ee64cd54cdfb8d105606215c42dcaa5 | bot.py | bot.py |
import tweepy
from secrets import *
# standard for accessing Twitter API
auth = tweepy.OAuthHandler(C_KEY, C_SECRET)
auth.set_access_token(A_TOKEN, A_TOKEN_SECRET)
api = tweepy.API(auth)
| Update 0.0.1 - Added imports - Added standards for accessing Twitter API | Update 0.0.1
- Added imports
- Added standards for accessing Twitter API
| Python | mit | FXelix/space_facts_bot |
Update 0.0.1
- Added imports
- Added standards for accessing Twitter API |
import tweepy
from secrets import *
# standard for accessing Twitter API
auth = tweepy.OAuthHandler(C_KEY, C_SECRET)
auth.set_access_token(A_TOKEN, A_TOKEN_SECRET)
api = tweepy.API(auth)
| <commit_before>
<commit_msg>Update 0.0.1
- Added imports
- Added standards for accessing Twitter API<commit_after> |
import tweepy
from secrets import *
# standard for accessing Twitter API
auth = tweepy.OAuthHandler(C_KEY, C_SECRET)
auth.set_access_token(A_TOKEN, A_TOKEN_SECRET)
api = tweepy.API(auth)
|
Update 0.0.1
- Added imports
- Added standards for accessing Twitter API
import tweepy
from secrets import *
# standard for accessing Twitter API
auth = tweepy.OAuthHandler(C_KEY, C_SECRET)
auth.set_access_token(A_TOKEN, A_TOKEN_SECRET)
api = tweepy.API(auth)
| <commit_before>
<commit_msg>Update 0.0.1
- Added imports
- Added standards for accessing Twitter API<commit_after>
import tweepy
from secrets import *
# standard for accessing Twitter API
auth = tweepy.OAuthHandler(C_KEY, C_SECRET)
auth.set_access_token(A_TOKEN, A_TOKEN_SECRET)
api = tweepy.API(auth)
| |
9efa42ae172e240c01be9caed5bb942bc0459494 | tools/perf/benchmarks/session_restore.py | tools/perf/benchmarks/session_restore.py | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from measurements import session_restore
from telemetry import test
# crbug.com/325479: Disabling this test for now since it never ran before.
@test.Disabled('android', 'linux')
class SessionRestoreColdTypical25(test.Test):
tag = 'cold'
test = session_restore.SessionRestore
page_set = 'page_sets/typical_25.py'
options = {'cold': True,
'pageset_repeat': 5}
class SessionRestoreWarmTypical25(test.Test):
tag = 'warm'
test = session_restore.SessionRestore
page_set = 'page_sets/typical_25.py'
options = {'warm': True,
'pageset_repeat': 20}
| # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from measurements import session_restore
from telemetry import test
@test.Disabled('android') # crbug.com/325479
class SessionRestoreColdTypical25(test.Test):
tag = 'cold'
test = session_restore.SessionRestore
page_set = 'page_sets/typical_25.py'
options = {'cold': True,
'pageset_repeat': 5}
class SessionRestoreWarmTypical25(test.Test):
tag = 'warm'
test = session_restore.SessionRestore
page_set = 'page_sets/typical_25.py'
options = {'warm': True,
'pageset_repeat': 20}
| Enable session restore benchmark on linux. | [Telemetry] Enable session restore benchmark on linux.
I believe it was disabled because the clear_system_cache binary wasn't checked
into cloud storage yet. It is now.
BUG=325479
Review URL: https://codereview.chromium.org/311053006
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@275163 0039d316-1c4b-4281-b951-d872f2087c98
| Python | bsd-3-clause | dushu1203/chromium.src,dednal/chromium.src,hgl888/chromium-crosswalk,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,dushu1203/chromium.src,axinging/chromium-crosswalk,chuan9/chromium-crosswalk,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,axinging/chromium-crosswalk,Fireblend/chromium-crosswalk,dednal/chromium.src,bright-sparks/chromium-spacewalk,M4sse/chromium.src,krieger-od/nwjs_chromium.src,littlstar/chromium.src,axinging/chromium-crosswalk,bright-sparks/chromium-spacewalk,TheTypoMaster/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Chilledheart/chromium,krieger-od/nwjs_chromium.src,mohamed--abdel-maksoud/chromium.src,mohamed--abdel-maksoud/chromium.src,Pluto-tv/chromium-crosswalk,fujunwei/chromium-crosswalk,ltilve/chromium,mohamed--abdel-maksoud/chromium.src,Pluto-tv/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,bright-sparks/chromium-spacewalk,Chilledheart/chromium,M4sse/chromium.src,PeterWangIntel/chromium-crosswalk,littlstar/chromium.src,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk-efl,M4sse/chromium.src,ondra-novak/chromium.src,PeterWangIntel/chromium-crosswalk,dednal/chromium.src,krieger-od/nwjs_chromium.src,littlstar/chromium.src,dushu1203/chromium.src,ltilve/chromium,PeterWangIntel/chromium-crosswalk,Jonekee/chromium.src,dushu1203/chromium.src,jaruba/chromium.src,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk-efl,jaruba/chromium.src,axinging/chromium-crosswalk,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,jaruba/chromium.src,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,markYoungH/chromium.src,Chilledheart/chromium,chuan9/chromium-crosswalk,Jonekee/chromium.src,ondra-novak/chromium.src,ondra-novak/chromium.src,M4sse/chromium.src,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,markYoungH/chromium.src,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,ondra-novak/chromium.src,ondra-novak/chromium.src,hgl888/chromium-crosswalk,M4sse/chromium.src,PeterWangIntel/chromium-crosswalk,jaruba/chromium.src,crosswalk-project/chromium-crosswalk-efl,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,chuan9/chromium-crosswalk,ltilve/chromium,krieger-od/nwjs_chromium.src,jaruba/chromium.src,dednal/chromium.src,Just-D/chromium-1,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk,Chilledheart/chromium,jaruba/chromium.src,dednal/chromium.src,krieger-od/nwjs_chromium.src,fujunwei/chromium-crosswalk,chuan9/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk-efl,Just-D/chromium-1,krieger-od/nwjs_chromium.src,jaruba/chromium.src,TheTypoMaster/chromium-crosswalk,Fireblend/chromium-crosswalk,littlstar/chromium.src,jaruba/chromium.src,ondra-novak/chromium.src,mohamed--abdel-maksoud/chromium.src,jaruba/chromium.src,Chilledheart/chromium,Chilledheart/chromium,Jonekee/chromium.src,littlstar/chromium.src,chuan9/chromium-crosswalk,Just-D/chromium-1,chuan9/chromium-crosswalk,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src,mohamed--abdel-maksoud/chromium.src,ltilve/chromium,hgl888/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,axinging/chromium-crosswalk,bright-sparks/chromium-spacewalk,M4sse/chromium.src,Jonekee/chromium.src,axinging/chromium-crosswalk,littlstar/chromium.src,dushu1203/chromium.src,ltilve/chromium,TheTypoMaster/chromium-crosswalk,axinging/chromium-crosswalk,M4sse/chromium.src,jaruba/chromium.src,markYoungH/chromium.src,littlstar/chromium.src,markYoungH/chromium.src,Fireblend/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Jonekee/chromium.src,M4sse/chromium.src,fujunwei/chromium-crosswalk,Jonekee/chromium.src,markYoungH/chromium.src,bright-sparks/chromium-spacewalk,Jonekee/chromium.src,hgl888/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,markYoungH/chromium.src,ltilve/chromium,bright-sparks/chromium-spacewalk,dednal/chromium.src,crosswalk-project/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,bright-sparks/chromium-spacewalk,Just-D/chromium-1,fujunwei/chromium-crosswalk,Fireblend/chromium-crosswalk,Just-D/chromium-1,ltilve/chromium,krieger-od/nwjs_chromium.src,ltilve/chromium,hgl888/chromium-crosswalk-efl,ondra-novak/chromium.src,axinging/chromium-crosswalk,markYoungH/chromium.src,krieger-od/nwjs_chromium.src,dushu1203/chromium.src,ondra-novak/chromium.src,M4sse/chromium.src,crosswalk-project/chromium-crosswalk-efl,markYoungH/chromium.src,Fireblend/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,ondra-novak/chromium.src,Pluto-tv/chromium-crosswalk,bright-sparks/chromium-spacewalk,markYoungH/chromium.src,hgl888/chromium-crosswalk-efl,M4sse/chromium.src,krieger-od/nwjs_chromium.src,dednal/chromium.src,ltilve/chromium,dushu1203/chromium.src,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,jaruba/chromium.src,crosswalk-project/chromium-crosswalk-efl,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,bright-sparks/chromium-spacewalk,Fireblend/chromium-crosswalk,axinging/chromium-crosswalk,markYoungH/chromium.src,littlstar/chromium.src,PeterWangIntel/chromium-crosswalk,Jonekee/chromium.src,Just-D/chromium-1,mohamed--abdel-maksoud/chromium.src,chuan9/chromium-crosswalk,krieger-od/nwjs_chromium.src,fujunwei/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,markYoungH/chromium.src,Fireblend/chromium-crosswalk,dednal/chromium.src,dednal/chromium.src,dushu1203/chromium.src,TheTypoMaster/chromium-crosswalk,fujunwei/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,dushu1203/chromium.src,TheTypoMaster/chromium-crosswalk,dushu1203/chromium.src,Just-D/chromium-1,M4sse/chromium.src,dednal/chromium.src,Jonekee/chromium.src,Jonekee/chromium.src,axinging/chromium-crosswalk,hgl888/chromium-crosswalk,Jonekee/chromium.src,Chilledheart/chromium | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from measurements import session_restore
from telemetry import test
# crbug.com/325479: Disabling this test for now since it never ran before.
@test.Disabled('android', 'linux')
class SessionRestoreColdTypical25(test.Test):
tag = 'cold'
test = session_restore.SessionRestore
page_set = 'page_sets/typical_25.py'
options = {'cold': True,
'pageset_repeat': 5}
class SessionRestoreWarmTypical25(test.Test):
tag = 'warm'
test = session_restore.SessionRestore
page_set = 'page_sets/typical_25.py'
options = {'warm': True,
'pageset_repeat': 20}
[Telemetry] Enable session restore benchmark on linux.
I believe it was disabled because the clear_system_cache binary wasn't checked
into cloud storage yet. It is now.
BUG=325479
Review URL: https://codereview.chromium.org/311053006
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@275163 0039d316-1c4b-4281-b951-d872f2087c98 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from measurements import session_restore
from telemetry import test
@test.Disabled('android') # crbug.com/325479
class SessionRestoreColdTypical25(test.Test):
tag = 'cold'
test = session_restore.SessionRestore
page_set = 'page_sets/typical_25.py'
options = {'cold': True,
'pageset_repeat': 5}
class SessionRestoreWarmTypical25(test.Test):
tag = 'warm'
test = session_restore.SessionRestore
page_set = 'page_sets/typical_25.py'
options = {'warm': True,
'pageset_repeat': 20}
| <commit_before># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from measurements import session_restore
from telemetry import test
# crbug.com/325479: Disabling this test for now since it never ran before.
@test.Disabled('android', 'linux')
class SessionRestoreColdTypical25(test.Test):
tag = 'cold'
test = session_restore.SessionRestore
page_set = 'page_sets/typical_25.py'
options = {'cold': True,
'pageset_repeat': 5}
class SessionRestoreWarmTypical25(test.Test):
tag = 'warm'
test = session_restore.SessionRestore
page_set = 'page_sets/typical_25.py'
options = {'warm': True,
'pageset_repeat': 20}
<commit_msg>[Telemetry] Enable session restore benchmark on linux.
I believe it was disabled because the clear_system_cache binary wasn't checked
into cloud storage yet. It is now.
BUG=325479
Review URL: https://codereview.chromium.org/311053006
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@275163 0039d316-1c4b-4281-b951-d872f2087c98<commit_after> | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from measurements import session_restore
from telemetry import test
@test.Disabled('android') # crbug.com/325479
class SessionRestoreColdTypical25(test.Test):
tag = 'cold'
test = session_restore.SessionRestore
page_set = 'page_sets/typical_25.py'
options = {'cold': True,
'pageset_repeat': 5}
class SessionRestoreWarmTypical25(test.Test):
tag = 'warm'
test = session_restore.SessionRestore
page_set = 'page_sets/typical_25.py'
options = {'warm': True,
'pageset_repeat': 20}
| # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from measurements import session_restore
from telemetry import test
# crbug.com/325479: Disabling this test for now since it never ran before.
@test.Disabled('android', 'linux')
class SessionRestoreColdTypical25(test.Test):
tag = 'cold'
test = session_restore.SessionRestore
page_set = 'page_sets/typical_25.py'
options = {'cold': True,
'pageset_repeat': 5}
class SessionRestoreWarmTypical25(test.Test):
tag = 'warm'
test = session_restore.SessionRestore
page_set = 'page_sets/typical_25.py'
options = {'warm': True,
'pageset_repeat': 20}
[Telemetry] Enable session restore benchmark on linux.
I believe it was disabled because the clear_system_cache binary wasn't checked
into cloud storage yet. It is now.
BUG=325479
Review URL: https://codereview.chromium.org/311053006
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@275163 0039d316-1c4b-4281-b951-d872f2087c98# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from measurements import session_restore
from telemetry import test
@test.Disabled('android') # crbug.com/325479
class SessionRestoreColdTypical25(test.Test):
tag = 'cold'
test = session_restore.SessionRestore
page_set = 'page_sets/typical_25.py'
options = {'cold': True,
'pageset_repeat': 5}
class SessionRestoreWarmTypical25(test.Test):
tag = 'warm'
test = session_restore.SessionRestore
page_set = 'page_sets/typical_25.py'
options = {'warm': True,
'pageset_repeat': 20}
| <commit_before># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from measurements import session_restore
from telemetry import test
# crbug.com/325479: Disabling this test for now since it never ran before.
@test.Disabled('android', 'linux')
class SessionRestoreColdTypical25(test.Test):
tag = 'cold'
test = session_restore.SessionRestore
page_set = 'page_sets/typical_25.py'
options = {'cold': True,
'pageset_repeat': 5}
class SessionRestoreWarmTypical25(test.Test):
tag = 'warm'
test = session_restore.SessionRestore
page_set = 'page_sets/typical_25.py'
options = {'warm': True,
'pageset_repeat': 20}
<commit_msg>[Telemetry] Enable session restore benchmark on linux.
I believe it was disabled because the clear_system_cache binary wasn't checked
into cloud storage yet. It is now.
BUG=325479
Review URL: https://codereview.chromium.org/311053006
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@275163 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from measurements import session_restore
from telemetry import test
@test.Disabled('android') # crbug.com/325479
class SessionRestoreColdTypical25(test.Test):
tag = 'cold'
test = session_restore.SessionRestore
page_set = 'page_sets/typical_25.py'
options = {'cold': True,
'pageset_repeat': 5}
class SessionRestoreWarmTypical25(test.Test):
tag = 'warm'
test = session_restore.SessionRestore
page_set = 'page_sets/typical_25.py'
options = {'warm': True,
'pageset_repeat': 20}
|
763d9f8ef45aff357e318d73cfd10512228d85f3 | src/zeit/content/article/edit/browser/tests/test_preview.py | src/zeit/content/article/edit/browser/tests/test_preview.py | # Copyright (c) 2012 gocept gmbh & co. kg
# See also LICENSE.txt
import zeit.content.article.testing
class Preview(zeit.content.article.testing.SeleniumTestCase):
def test_selected_tab_is_stored_across_reload(self):
self.open('/repository/online/2007/01/Somalia')
s = self.selenium
selected_tab = 'css=#preview-tabs .ui-tabs-selected'
s.waitForElementPresent(selected_tab)
s.assertText(selected_tab, 'iPad')
s.click('css=#preview-tabs a:contains("Desktop")')
s.waitForText(selected_tab, 'Desktop')
s.refresh()
s.waitForElementPresent(selected_tab)
s.assertText(selected_tab, 'Desktop')
| # Copyright (c) 2012 gocept gmbh & co. kg
# See also LICENSE.txt
import zeit.content.article.testing
class Preview(zeit.content.article.testing.SeleniumTestCase):
def test_selected_tab_is_stored_across_reload(self):
self.open('/repository/online/2007/01/Somalia')
s = self.selenium
selected_tab = 'css=#preview-tabs .ui-tabs-active'
s.waitForElementPresent(selected_tab)
s.waitForText(selected_tab, 'iPad')
s.click('css=#preview-tabs a:contains("Desktop")')
s.waitForText(selected_tab, 'Desktop')
s.refresh()
s.waitForElementPresent(selected_tab)
s.waitForText(selected_tab, 'Desktop')
| Update test to work with current jquery-ui | Update test to work with current jquery-ui
| Python | bsd-3-clause | ZeitOnline/zeit.content.article,ZeitOnline/zeit.content.article,ZeitOnline/zeit.content.article | # Copyright (c) 2012 gocept gmbh & co. kg
# See also LICENSE.txt
import zeit.content.article.testing
class Preview(zeit.content.article.testing.SeleniumTestCase):
def test_selected_tab_is_stored_across_reload(self):
self.open('/repository/online/2007/01/Somalia')
s = self.selenium
selected_tab = 'css=#preview-tabs .ui-tabs-selected'
s.waitForElementPresent(selected_tab)
s.assertText(selected_tab, 'iPad')
s.click('css=#preview-tabs a:contains("Desktop")')
s.waitForText(selected_tab, 'Desktop')
s.refresh()
s.waitForElementPresent(selected_tab)
s.assertText(selected_tab, 'Desktop')
Update test to work with current jquery-ui | # Copyright (c) 2012 gocept gmbh & co. kg
# See also LICENSE.txt
import zeit.content.article.testing
class Preview(zeit.content.article.testing.SeleniumTestCase):
def test_selected_tab_is_stored_across_reload(self):
self.open('/repository/online/2007/01/Somalia')
s = self.selenium
selected_tab = 'css=#preview-tabs .ui-tabs-active'
s.waitForElementPresent(selected_tab)
s.waitForText(selected_tab, 'iPad')
s.click('css=#preview-tabs a:contains("Desktop")')
s.waitForText(selected_tab, 'Desktop')
s.refresh()
s.waitForElementPresent(selected_tab)
s.waitForText(selected_tab, 'Desktop')
| <commit_before># Copyright (c) 2012 gocept gmbh & co. kg
# See also LICENSE.txt
import zeit.content.article.testing
class Preview(zeit.content.article.testing.SeleniumTestCase):
def test_selected_tab_is_stored_across_reload(self):
self.open('/repository/online/2007/01/Somalia')
s = self.selenium
selected_tab = 'css=#preview-tabs .ui-tabs-selected'
s.waitForElementPresent(selected_tab)
s.assertText(selected_tab, 'iPad')
s.click('css=#preview-tabs a:contains("Desktop")')
s.waitForText(selected_tab, 'Desktop')
s.refresh()
s.waitForElementPresent(selected_tab)
s.assertText(selected_tab, 'Desktop')
<commit_msg>Update test to work with current jquery-ui<commit_after> | # Copyright (c) 2012 gocept gmbh & co. kg
# See also LICENSE.txt
import zeit.content.article.testing
class Preview(zeit.content.article.testing.SeleniumTestCase):
def test_selected_tab_is_stored_across_reload(self):
self.open('/repository/online/2007/01/Somalia')
s = self.selenium
selected_tab = 'css=#preview-tabs .ui-tabs-active'
s.waitForElementPresent(selected_tab)
s.waitForText(selected_tab, 'iPad')
s.click('css=#preview-tabs a:contains("Desktop")')
s.waitForText(selected_tab, 'Desktop')
s.refresh()
s.waitForElementPresent(selected_tab)
s.waitForText(selected_tab, 'Desktop')
| # Copyright (c) 2012 gocept gmbh & co. kg
# See also LICENSE.txt
import zeit.content.article.testing
class Preview(zeit.content.article.testing.SeleniumTestCase):
def test_selected_tab_is_stored_across_reload(self):
self.open('/repository/online/2007/01/Somalia')
s = self.selenium
selected_tab = 'css=#preview-tabs .ui-tabs-selected'
s.waitForElementPresent(selected_tab)
s.assertText(selected_tab, 'iPad')
s.click('css=#preview-tabs a:contains("Desktop")')
s.waitForText(selected_tab, 'Desktop')
s.refresh()
s.waitForElementPresent(selected_tab)
s.assertText(selected_tab, 'Desktop')
Update test to work with current jquery-ui# Copyright (c) 2012 gocept gmbh & co. kg
# See also LICENSE.txt
import zeit.content.article.testing
class Preview(zeit.content.article.testing.SeleniumTestCase):
def test_selected_tab_is_stored_across_reload(self):
self.open('/repository/online/2007/01/Somalia')
s = self.selenium
selected_tab = 'css=#preview-tabs .ui-tabs-active'
s.waitForElementPresent(selected_tab)
s.waitForText(selected_tab, 'iPad')
s.click('css=#preview-tabs a:contains("Desktop")')
s.waitForText(selected_tab, 'Desktop')
s.refresh()
s.waitForElementPresent(selected_tab)
s.waitForText(selected_tab, 'Desktop')
| <commit_before># Copyright (c) 2012 gocept gmbh & co. kg
# See also LICENSE.txt
import zeit.content.article.testing
class Preview(zeit.content.article.testing.SeleniumTestCase):
def test_selected_tab_is_stored_across_reload(self):
self.open('/repository/online/2007/01/Somalia')
s = self.selenium
selected_tab = 'css=#preview-tabs .ui-tabs-selected'
s.waitForElementPresent(selected_tab)
s.assertText(selected_tab, 'iPad')
s.click('css=#preview-tabs a:contains("Desktop")')
s.waitForText(selected_tab, 'Desktop')
s.refresh()
s.waitForElementPresent(selected_tab)
s.assertText(selected_tab, 'Desktop')
<commit_msg>Update test to work with current jquery-ui<commit_after># Copyright (c) 2012 gocept gmbh & co. kg
# See also LICENSE.txt
import zeit.content.article.testing
class Preview(zeit.content.article.testing.SeleniumTestCase):
def test_selected_tab_is_stored_across_reload(self):
self.open('/repository/online/2007/01/Somalia')
s = self.selenium
selected_tab = 'css=#preview-tabs .ui-tabs-active'
s.waitForElementPresent(selected_tab)
s.waitForText(selected_tab, 'iPad')
s.click('css=#preview-tabs a:contains("Desktop")')
s.waitForText(selected_tab, 'Desktop')
s.refresh()
s.waitForElementPresent(selected_tab)
s.waitForText(selected_tab, 'Desktop')
|
23ceddaff1752797fe775df950f6e62769b285a6 | foyer/tests/test_plugin.py | foyer/tests/test_plugin.py | import pytest
import foyer
def test_basic_import():
assert 'forcefields' in dir(foyer)
@pytest.mark.parametrize('ff_name', ['OPLSAA', 'TRAPPE_UA'])
def test_forcefields_exist(ff_name):
ff_name in dir(foyer.forcefields)
def test_load_forcefield():
OPLSAA = foyer.forcefields.get_forcefield(name='oplsaa')
TRAPPE_UA = foyer.forcefields.get_forcefield(name='trappe-ua')
with pytest.raises(ValueError):
foyer.forcefields.get_forcefield('bogus_name')
| import pytest
import foyer
def test_basic_import():
assert 'forcefields' in dir(foyer)
@pytest.mark.parametrize('ff_loader', ['load_OPLSAA', 'load_TRAPPE_UA'])
def test_forcefields_exist(ff_loader):
assert ff_loader in dir(foyer.forcefields)
def test_load_forcefield():
OPLSAA = foyer.forcefields.get_forcefield(name='oplsaa')
TRAPPE_UA = foyer.forcefields.get_forcefield(name='trappe-ua')
with pytest.raises(ValueError):
foyer.forcefields.get_forcefield('bogus_name')
| Update test to properly assert loaders | Update test to properly assert loaders
| Python | mit | iModels/foyer,mosdef-hub/foyer,mosdef-hub/foyer,iModels/foyer | import pytest
import foyer
def test_basic_import():
assert 'forcefields' in dir(foyer)
@pytest.mark.parametrize('ff_name', ['OPLSAA', 'TRAPPE_UA'])
def test_forcefields_exist(ff_name):
ff_name in dir(foyer.forcefields)
def test_load_forcefield():
OPLSAA = foyer.forcefields.get_forcefield(name='oplsaa')
TRAPPE_UA = foyer.forcefields.get_forcefield(name='trappe-ua')
with pytest.raises(ValueError):
foyer.forcefields.get_forcefield('bogus_name')
Update test to properly assert loaders | import pytest
import foyer
def test_basic_import():
assert 'forcefields' in dir(foyer)
@pytest.mark.parametrize('ff_loader', ['load_OPLSAA', 'load_TRAPPE_UA'])
def test_forcefields_exist(ff_loader):
assert ff_loader in dir(foyer.forcefields)
def test_load_forcefield():
OPLSAA = foyer.forcefields.get_forcefield(name='oplsaa')
TRAPPE_UA = foyer.forcefields.get_forcefield(name='trappe-ua')
with pytest.raises(ValueError):
foyer.forcefields.get_forcefield('bogus_name')
| <commit_before>import pytest
import foyer
def test_basic_import():
assert 'forcefields' in dir(foyer)
@pytest.mark.parametrize('ff_name', ['OPLSAA', 'TRAPPE_UA'])
def test_forcefields_exist(ff_name):
ff_name in dir(foyer.forcefields)
def test_load_forcefield():
OPLSAA = foyer.forcefields.get_forcefield(name='oplsaa')
TRAPPE_UA = foyer.forcefields.get_forcefield(name='trappe-ua')
with pytest.raises(ValueError):
foyer.forcefields.get_forcefield('bogus_name')
<commit_msg>Update test to properly assert loaders<commit_after> | import pytest
import foyer
def test_basic_import():
assert 'forcefields' in dir(foyer)
@pytest.mark.parametrize('ff_loader', ['load_OPLSAA', 'load_TRAPPE_UA'])
def test_forcefields_exist(ff_loader):
assert ff_loader in dir(foyer.forcefields)
def test_load_forcefield():
OPLSAA = foyer.forcefields.get_forcefield(name='oplsaa')
TRAPPE_UA = foyer.forcefields.get_forcefield(name='trappe-ua')
with pytest.raises(ValueError):
foyer.forcefields.get_forcefield('bogus_name')
| import pytest
import foyer
def test_basic_import():
assert 'forcefields' in dir(foyer)
@pytest.mark.parametrize('ff_name', ['OPLSAA', 'TRAPPE_UA'])
def test_forcefields_exist(ff_name):
ff_name in dir(foyer.forcefields)
def test_load_forcefield():
OPLSAA = foyer.forcefields.get_forcefield(name='oplsaa')
TRAPPE_UA = foyer.forcefields.get_forcefield(name='trappe-ua')
with pytest.raises(ValueError):
foyer.forcefields.get_forcefield('bogus_name')
Update test to properly assert loadersimport pytest
import foyer
def test_basic_import():
assert 'forcefields' in dir(foyer)
@pytest.mark.parametrize('ff_loader', ['load_OPLSAA', 'load_TRAPPE_UA'])
def test_forcefields_exist(ff_loader):
assert ff_loader in dir(foyer.forcefields)
def test_load_forcefield():
OPLSAA = foyer.forcefields.get_forcefield(name='oplsaa')
TRAPPE_UA = foyer.forcefields.get_forcefield(name='trappe-ua')
with pytest.raises(ValueError):
foyer.forcefields.get_forcefield('bogus_name')
| <commit_before>import pytest
import foyer
def test_basic_import():
assert 'forcefields' in dir(foyer)
@pytest.mark.parametrize('ff_name', ['OPLSAA', 'TRAPPE_UA'])
def test_forcefields_exist(ff_name):
ff_name in dir(foyer.forcefields)
def test_load_forcefield():
OPLSAA = foyer.forcefields.get_forcefield(name='oplsaa')
TRAPPE_UA = foyer.forcefields.get_forcefield(name='trappe-ua')
with pytest.raises(ValueError):
foyer.forcefields.get_forcefield('bogus_name')
<commit_msg>Update test to properly assert loaders<commit_after>import pytest
import foyer
def test_basic_import():
assert 'forcefields' in dir(foyer)
@pytest.mark.parametrize('ff_loader', ['load_OPLSAA', 'load_TRAPPE_UA'])
def test_forcefields_exist(ff_loader):
assert ff_loader in dir(foyer.forcefields)
def test_load_forcefield():
OPLSAA = foyer.forcefields.get_forcefield(name='oplsaa')
TRAPPE_UA = foyer.forcefields.get_forcefield(name='trappe-ua')
with pytest.raises(ValueError):
foyer.forcefields.get_forcefield('bogus_name')
|
183aacf12405eec38ba8b2193f8f89904d415c4a | yagocd/resources/base.py | yagocd/resources/base.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# The MIT License
#
# Copyright (c) 2016 Grigory Chernyshev
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
from easydict import EasyDict
class Base(object):
def __init__(self, session, data):
self._session = session
self._data = EasyDict(data)
self.base_api = self._session.base_api()
@property
def data(self):
return self._data
if __name__ == '__main__':
pass
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# The MIT License
#
# Copyright (c) 2016 Grigory Chernyshev
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
from easydict import EasyDict
class Base(object):
def __init__(self, session, data):
self._session = session
self._data = EasyDict(data)
self.base_api = self._session.base_api()
@property
def data(self):
return self._data
def __str__(self):
return self.data.__str__()
def __repr__(self):
return self.data.__repr__()
if __name__ == '__main__':
pass
| Return internal data for string representation. | Return internal data for string representation.
| Python | isc | grundic/yagocd,grundic/yagocd | #!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# The MIT License
#
# Copyright (c) 2016 Grigory Chernyshev
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
from easydict import EasyDict
class Base(object):
def __init__(self, session, data):
self._session = session
self._data = EasyDict(data)
self.base_api = self._session.base_api()
@property
def data(self):
return self._data
if __name__ == '__main__':
pass
Return internal data for string representation. | #!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# The MIT License
#
# Copyright (c) 2016 Grigory Chernyshev
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
from easydict import EasyDict
class Base(object):
def __init__(self, session, data):
self._session = session
self._data = EasyDict(data)
self.base_api = self._session.base_api()
@property
def data(self):
return self._data
def __str__(self):
return self.data.__str__()
def __repr__(self):
return self.data.__repr__()
if __name__ == '__main__':
pass
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# The MIT License
#
# Copyright (c) 2016 Grigory Chernyshev
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
from easydict import EasyDict
class Base(object):
def __init__(self, session, data):
self._session = session
self._data = EasyDict(data)
self.base_api = self._session.base_api()
@property
def data(self):
return self._data
if __name__ == '__main__':
pass
<commit_msg>Return internal data for string representation.<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# The MIT License
#
# Copyright (c) 2016 Grigory Chernyshev
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
from easydict import EasyDict
class Base(object):
def __init__(self, session, data):
self._session = session
self._data = EasyDict(data)
self.base_api = self._session.base_api()
@property
def data(self):
return self._data
def __str__(self):
return self.data.__str__()
def __repr__(self):
return self.data.__repr__()
if __name__ == '__main__':
pass
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# The MIT License
#
# Copyright (c) 2016 Grigory Chernyshev
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
from easydict import EasyDict
class Base(object):
def __init__(self, session, data):
self._session = session
self._data = EasyDict(data)
self.base_api = self._session.base_api()
@property
def data(self):
return self._data
if __name__ == '__main__':
pass
Return internal data for string representation.#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# The MIT License
#
# Copyright (c) 2016 Grigory Chernyshev
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
from easydict import EasyDict
class Base(object):
def __init__(self, session, data):
self._session = session
self._data = EasyDict(data)
self.base_api = self._session.base_api()
@property
def data(self):
return self._data
def __str__(self):
return self.data.__str__()
def __repr__(self):
return self.data.__repr__()
if __name__ == '__main__':
pass
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# The MIT License
#
# Copyright (c) 2016 Grigory Chernyshev
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
from easydict import EasyDict
class Base(object):
def __init__(self, session, data):
self._session = session
self._data = EasyDict(data)
self.base_api = self._session.base_api()
@property
def data(self):
return self._data
if __name__ == '__main__':
pass
<commit_msg>Return internal data for string representation.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# The MIT License
#
# Copyright (c) 2016 Grigory Chernyshev
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
from easydict import EasyDict
class Base(object):
def __init__(self, session, data):
self._session = session
self._data = EasyDict(data)
self.base_api = self._session.base_api()
@property
def data(self):
return self._data
def __str__(self):
return self.data.__str__()
def __repr__(self):
return self.data.__repr__()
if __name__ == '__main__':
pass
|
77fd12a850fbca0b3308e964e457f234d12d7c11 | src/wad.blog/wad/blog/utils.py | src/wad.blog/wad/blog/utils.py | from zope.component import getUtility, getMultiAdapter, ComponentLookupError
from plone.portlets.interfaces import IPortletAssignmentMapping
from plone.portlets.interfaces import IPortletManager
def find_assignment_context(assignment, context):
# Finds the creation context of the assignment
context = context.aq_inner
manager_name = assignment.manager.__name__
assignment_name = assignment.__name__
while True:
try:
manager = getUtility(IPortletManager, manager_name,
context=context)
mapping = getMultiAdapter((context, manager),
IPortletAssignmentMapping)
if assignment_name in mapping:
if mapping[assignment_name] is assignment.aq_base:
return context
except ComponentLookupError:
pass
parent = context.aq_parent
if parent is context:
return None
context = parent
| from Acquisition import aq_inner
from Acquisition import aq_parent
from Acquisition import aq_base
from plone.portlets.interfaces import IPortletAssignmentMapping
from plone.portlets.interfaces import IPortletManager
from zope.component import getUtility, getMultiAdapter, ComponentLookupError
def find_assignment_context(assignment, context):
# Finds the creation context of the assignment
context = aq_inner(context)
manager_name = assignment.manager.__name__
assignment_name = assignment.__name__
while True:
try:
manager = getUtility(IPortletManager,
manager_name,
context=context)
mapping = getMultiAdapter((context, manager),
IPortletAssignmentMapping)
if assignment_name in mapping:
if mapping[assignment_name] is aq_base(assignment):
return context
except ComponentLookupError:
pass
parent = aq_parent(context)
if parent is context:
return None
context = parent
| Fix portlet assignment context utility | Fix portlet assignment context utility
| Python | mit | potzenheimer/buildout.wad,potzenheimer/buildout.wad | from zope.component import getUtility, getMultiAdapter, ComponentLookupError
from plone.portlets.interfaces import IPortletAssignmentMapping
from plone.portlets.interfaces import IPortletManager
def find_assignment_context(assignment, context):
# Finds the creation context of the assignment
context = context.aq_inner
manager_name = assignment.manager.__name__
assignment_name = assignment.__name__
while True:
try:
manager = getUtility(IPortletManager, manager_name,
context=context)
mapping = getMultiAdapter((context, manager),
IPortletAssignmentMapping)
if assignment_name in mapping:
if mapping[assignment_name] is assignment.aq_base:
return context
except ComponentLookupError:
pass
parent = context.aq_parent
if parent is context:
return None
context = parent
Fix portlet assignment context utility | from Acquisition import aq_inner
from Acquisition import aq_parent
from Acquisition import aq_base
from plone.portlets.interfaces import IPortletAssignmentMapping
from plone.portlets.interfaces import IPortletManager
from zope.component import getUtility, getMultiAdapter, ComponentLookupError
def find_assignment_context(assignment, context):
# Finds the creation context of the assignment
context = aq_inner(context)
manager_name = assignment.manager.__name__
assignment_name = assignment.__name__
while True:
try:
manager = getUtility(IPortletManager,
manager_name,
context=context)
mapping = getMultiAdapter((context, manager),
IPortletAssignmentMapping)
if assignment_name in mapping:
if mapping[assignment_name] is aq_base(assignment):
return context
except ComponentLookupError:
pass
parent = aq_parent(context)
if parent is context:
return None
context = parent
| <commit_before>from zope.component import getUtility, getMultiAdapter, ComponentLookupError
from plone.portlets.interfaces import IPortletAssignmentMapping
from plone.portlets.interfaces import IPortletManager
def find_assignment_context(assignment, context):
# Finds the creation context of the assignment
context = context.aq_inner
manager_name = assignment.manager.__name__
assignment_name = assignment.__name__
while True:
try:
manager = getUtility(IPortletManager, manager_name,
context=context)
mapping = getMultiAdapter((context, manager),
IPortletAssignmentMapping)
if assignment_name in mapping:
if mapping[assignment_name] is assignment.aq_base:
return context
except ComponentLookupError:
pass
parent = context.aq_parent
if parent is context:
return None
context = parent
<commit_msg>Fix portlet assignment context utility<commit_after> | from Acquisition import aq_inner
from Acquisition import aq_parent
from Acquisition import aq_base
from plone.portlets.interfaces import IPortletAssignmentMapping
from plone.portlets.interfaces import IPortletManager
from zope.component import getUtility, getMultiAdapter, ComponentLookupError
def find_assignment_context(assignment, context):
# Finds the creation context of the assignment
context = aq_inner(context)
manager_name = assignment.manager.__name__
assignment_name = assignment.__name__
while True:
try:
manager = getUtility(IPortletManager,
manager_name,
context=context)
mapping = getMultiAdapter((context, manager),
IPortletAssignmentMapping)
if assignment_name in mapping:
if mapping[assignment_name] is aq_base(assignment):
return context
except ComponentLookupError:
pass
parent = aq_parent(context)
if parent is context:
return None
context = parent
| from zope.component import getUtility, getMultiAdapter, ComponentLookupError
from plone.portlets.interfaces import IPortletAssignmentMapping
from plone.portlets.interfaces import IPortletManager
def find_assignment_context(assignment, context):
# Finds the creation context of the assignment
context = context.aq_inner
manager_name = assignment.manager.__name__
assignment_name = assignment.__name__
while True:
try:
manager = getUtility(IPortletManager, manager_name,
context=context)
mapping = getMultiAdapter((context, manager),
IPortletAssignmentMapping)
if assignment_name in mapping:
if mapping[assignment_name] is assignment.aq_base:
return context
except ComponentLookupError:
pass
parent = context.aq_parent
if parent is context:
return None
context = parent
Fix portlet assignment context utilityfrom Acquisition import aq_inner
from Acquisition import aq_parent
from Acquisition import aq_base
from plone.portlets.interfaces import IPortletAssignmentMapping
from plone.portlets.interfaces import IPortletManager
from zope.component import getUtility, getMultiAdapter, ComponentLookupError
def find_assignment_context(assignment, context):
# Finds the creation context of the assignment
context = aq_inner(context)
manager_name = assignment.manager.__name__
assignment_name = assignment.__name__
while True:
try:
manager = getUtility(IPortletManager,
manager_name,
context=context)
mapping = getMultiAdapter((context, manager),
IPortletAssignmentMapping)
if assignment_name in mapping:
if mapping[assignment_name] is aq_base(assignment):
return context
except ComponentLookupError:
pass
parent = aq_parent(context)
if parent is context:
return None
context = parent
| <commit_before>from zope.component import getUtility, getMultiAdapter, ComponentLookupError
from plone.portlets.interfaces import IPortletAssignmentMapping
from plone.portlets.interfaces import IPortletManager
def find_assignment_context(assignment, context):
# Finds the creation context of the assignment
context = context.aq_inner
manager_name = assignment.manager.__name__
assignment_name = assignment.__name__
while True:
try:
manager = getUtility(IPortletManager, manager_name,
context=context)
mapping = getMultiAdapter((context, manager),
IPortletAssignmentMapping)
if assignment_name in mapping:
if mapping[assignment_name] is assignment.aq_base:
return context
except ComponentLookupError:
pass
parent = context.aq_parent
if parent is context:
return None
context = parent
<commit_msg>Fix portlet assignment context utility<commit_after>from Acquisition import aq_inner
from Acquisition import aq_parent
from Acquisition import aq_base
from plone.portlets.interfaces import IPortletAssignmentMapping
from plone.portlets.interfaces import IPortletManager
from zope.component import getUtility, getMultiAdapter, ComponentLookupError
def find_assignment_context(assignment, context):
# Finds the creation context of the assignment
context = aq_inner(context)
manager_name = assignment.manager.__name__
assignment_name = assignment.__name__
while True:
try:
manager = getUtility(IPortletManager,
manager_name,
context=context)
mapping = getMultiAdapter((context, manager),
IPortletAssignmentMapping)
if assignment_name in mapping:
if mapping[assignment_name] is aq_base(assignment):
return context
except ComponentLookupError:
pass
parent = aq_parent(context)
if parent is context:
return None
context = parent
|
eea0a6bc56d69377519e5441074f32f5eb9fb01e | examples/storage/ext_flash_fatfs/example_test.py | examples/storage/ext_flash_fatfs/example_test.py | from __future__ import print_function
import os
import sys
try:
import IDF
except ImportError:
test_fw_path = os.getenv('TEST_FW_PATH')
if test_fw_path and test_fw_path not in sys.path:
sys.path.insert(0, test_fw_path)
import IDF
@IDF.idf_example_test(env_tag='Example_ExtFlash')
def test_examples_storage_ext_flash_fatfs(env, extra_data):
dut = env.get_dut('ext_flash_fatfs', 'examples/storage/ext_flash_fatfs')
dut.start_app()
dut.expect('Initialized external Flash')
dut.expect('partition \'nvs\'')
dut.expect('partition \'storage\'')
dut.expect('File written')
dut.expect('Read from file: \'Written using ESP-IDF')
if __name__ == '__main__':
test_examples_storage_ext_flash_fatfs()
| from __future__ import print_function
import os
import sys
try:
import IDF
except ImportError:
test_fw_path = os.getenv('TEST_FW_PATH')
if test_fw_path and test_fw_path not in sys.path:
sys.path.insert(0, test_fw_path)
import IDF
from IDF.IDFDUT import ESP32DUT
@IDF.idf_example_test(env_tag='Example_ExtFlash')
def test_examples_storage_ext_flash_fatfs(env, extra_data):
dut = env.get_dut('ext_flash_fatfs', 'examples/storage/ext_flash_fatfs', dut_class=ESP32DUT)
dut.start_app()
dut.expect('Initialized external Flash')
dut.expect('partition \'nvs\'')
dut.expect('partition \'storage\'')
dut.expect('File written')
dut.expect('Read from file: \'Written using ESP-IDF')
if __name__ == '__main__':
test_examples_storage_ext_flash_fatfs()
| Add ESP32 DUT class to ext_flash_fatfs example test | examples: Add ESP32 DUT class to ext_flash_fatfs example test
| Python | apache-2.0 | espressif/esp-idf,espressif/esp-idf,espressif/esp-idf,espressif/esp-idf | from __future__ import print_function
import os
import sys
try:
import IDF
except ImportError:
test_fw_path = os.getenv('TEST_FW_PATH')
if test_fw_path and test_fw_path not in sys.path:
sys.path.insert(0, test_fw_path)
import IDF
@IDF.idf_example_test(env_tag='Example_ExtFlash')
def test_examples_storage_ext_flash_fatfs(env, extra_data):
dut = env.get_dut('ext_flash_fatfs', 'examples/storage/ext_flash_fatfs')
dut.start_app()
dut.expect('Initialized external Flash')
dut.expect('partition \'nvs\'')
dut.expect('partition \'storage\'')
dut.expect('File written')
dut.expect('Read from file: \'Written using ESP-IDF')
if __name__ == '__main__':
test_examples_storage_ext_flash_fatfs()
examples: Add ESP32 DUT class to ext_flash_fatfs example test | from __future__ import print_function
import os
import sys
try:
import IDF
except ImportError:
test_fw_path = os.getenv('TEST_FW_PATH')
if test_fw_path and test_fw_path not in sys.path:
sys.path.insert(0, test_fw_path)
import IDF
from IDF.IDFDUT import ESP32DUT
@IDF.idf_example_test(env_tag='Example_ExtFlash')
def test_examples_storage_ext_flash_fatfs(env, extra_data):
dut = env.get_dut('ext_flash_fatfs', 'examples/storage/ext_flash_fatfs', dut_class=ESP32DUT)
dut.start_app()
dut.expect('Initialized external Flash')
dut.expect('partition \'nvs\'')
dut.expect('partition \'storage\'')
dut.expect('File written')
dut.expect('Read from file: \'Written using ESP-IDF')
if __name__ == '__main__':
test_examples_storage_ext_flash_fatfs()
| <commit_before>from __future__ import print_function
import os
import sys
try:
import IDF
except ImportError:
test_fw_path = os.getenv('TEST_FW_PATH')
if test_fw_path and test_fw_path not in sys.path:
sys.path.insert(0, test_fw_path)
import IDF
@IDF.idf_example_test(env_tag='Example_ExtFlash')
def test_examples_storage_ext_flash_fatfs(env, extra_data):
dut = env.get_dut('ext_flash_fatfs', 'examples/storage/ext_flash_fatfs')
dut.start_app()
dut.expect('Initialized external Flash')
dut.expect('partition \'nvs\'')
dut.expect('partition \'storage\'')
dut.expect('File written')
dut.expect('Read from file: \'Written using ESP-IDF')
if __name__ == '__main__':
test_examples_storage_ext_flash_fatfs()
<commit_msg>examples: Add ESP32 DUT class to ext_flash_fatfs example test<commit_after> | from __future__ import print_function
import os
import sys
try:
import IDF
except ImportError:
test_fw_path = os.getenv('TEST_FW_PATH')
if test_fw_path and test_fw_path not in sys.path:
sys.path.insert(0, test_fw_path)
import IDF
from IDF.IDFDUT import ESP32DUT
@IDF.idf_example_test(env_tag='Example_ExtFlash')
def test_examples_storage_ext_flash_fatfs(env, extra_data):
dut = env.get_dut('ext_flash_fatfs', 'examples/storage/ext_flash_fatfs', dut_class=ESP32DUT)
dut.start_app()
dut.expect('Initialized external Flash')
dut.expect('partition \'nvs\'')
dut.expect('partition \'storage\'')
dut.expect('File written')
dut.expect('Read from file: \'Written using ESP-IDF')
if __name__ == '__main__':
test_examples_storage_ext_flash_fatfs()
| from __future__ import print_function
import os
import sys
try:
import IDF
except ImportError:
test_fw_path = os.getenv('TEST_FW_PATH')
if test_fw_path and test_fw_path not in sys.path:
sys.path.insert(0, test_fw_path)
import IDF
@IDF.idf_example_test(env_tag='Example_ExtFlash')
def test_examples_storage_ext_flash_fatfs(env, extra_data):
dut = env.get_dut('ext_flash_fatfs', 'examples/storage/ext_flash_fatfs')
dut.start_app()
dut.expect('Initialized external Flash')
dut.expect('partition \'nvs\'')
dut.expect('partition \'storage\'')
dut.expect('File written')
dut.expect('Read from file: \'Written using ESP-IDF')
if __name__ == '__main__':
test_examples_storage_ext_flash_fatfs()
examples: Add ESP32 DUT class to ext_flash_fatfs example testfrom __future__ import print_function
import os
import sys
try:
import IDF
except ImportError:
test_fw_path = os.getenv('TEST_FW_PATH')
if test_fw_path and test_fw_path not in sys.path:
sys.path.insert(0, test_fw_path)
import IDF
from IDF.IDFDUT import ESP32DUT
@IDF.idf_example_test(env_tag='Example_ExtFlash')
def test_examples_storage_ext_flash_fatfs(env, extra_data):
dut = env.get_dut('ext_flash_fatfs', 'examples/storage/ext_flash_fatfs', dut_class=ESP32DUT)
dut.start_app()
dut.expect('Initialized external Flash')
dut.expect('partition \'nvs\'')
dut.expect('partition \'storage\'')
dut.expect('File written')
dut.expect('Read from file: \'Written using ESP-IDF')
if __name__ == '__main__':
test_examples_storage_ext_flash_fatfs()
| <commit_before>from __future__ import print_function
import os
import sys
try:
import IDF
except ImportError:
test_fw_path = os.getenv('TEST_FW_PATH')
if test_fw_path and test_fw_path not in sys.path:
sys.path.insert(0, test_fw_path)
import IDF
@IDF.idf_example_test(env_tag='Example_ExtFlash')
def test_examples_storage_ext_flash_fatfs(env, extra_data):
dut = env.get_dut('ext_flash_fatfs', 'examples/storage/ext_flash_fatfs')
dut.start_app()
dut.expect('Initialized external Flash')
dut.expect('partition \'nvs\'')
dut.expect('partition \'storage\'')
dut.expect('File written')
dut.expect('Read from file: \'Written using ESP-IDF')
if __name__ == '__main__':
test_examples_storage_ext_flash_fatfs()
<commit_msg>examples: Add ESP32 DUT class to ext_flash_fatfs example test<commit_after>from __future__ import print_function
import os
import sys
try:
import IDF
except ImportError:
test_fw_path = os.getenv('TEST_FW_PATH')
if test_fw_path and test_fw_path not in sys.path:
sys.path.insert(0, test_fw_path)
import IDF
from IDF.IDFDUT import ESP32DUT
@IDF.idf_example_test(env_tag='Example_ExtFlash')
def test_examples_storage_ext_flash_fatfs(env, extra_data):
dut = env.get_dut('ext_flash_fatfs', 'examples/storage/ext_flash_fatfs', dut_class=ESP32DUT)
dut.start_app()
dut.expect('Initialized external Flash')
dut.expect('partition \'nvs\'')
dut.expect('partition \'storage\'')
dut.expect('File written')
dut.expect('Read from file: \'Written using ESP-IDF')
if __name__ == '__main__':
test_examples_storage_ext_flash_fatfs()
|
5a310285c6e528555136a95221b628827d04cb81 | l10n_br_base/__init__.py | l10n_br_base/__init__.py | # Copyright (C) 2009 Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from . import models
from . import tests
| # Copyright (C) 2009 Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from . import models
from . import tests
from odoo.addons import account
from odoo import api, SUPERUSER_ID
# Install Simple Chart of Account Template for Brazilian Companies
_auto_install_l10n_original = account._auto_install_l10n
def _auto_install_l10n_br_simple(cr, registry):
env = api.Environment(cr, SUPERUSER_ID, {})
country_code = env.user.company_id.country_id.code
if country_code and country_code.upper() == "BR":
module_ids = env["ir.module.module"].search(
[("name", "in", ("l10n_br_simple",)), ("state", "=", "uninstalled")]
)
module_ids.sudo().button_install()
else:
_auto_install_l10n_original(cr, registry)
account._auto_install_l10n = _auto_install_l10n_br_simple
| Define l10n_br_simple as default COA for brazilian companies | Define l10n_br_simple as default COA for brazilian companies
| Python | agpl-3.0 | akretion/l10n-brazil,akretion/l10n-brazil,akretion/l10n-brazil,OCA/l10n-brazil,OCA/l10n-brazil,OCA/l10n-brazil | # Copyright (C) 2009 Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from . import models
from . import tests
Define l10n_br_simple as default COA for brazilian companies | # Copyright (C) 2009 Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from . import models
from . import tests
from odoo.addons import account
from odoo import api, SUPERUSER_ID
# Install Simple Chart of Account Template for Brazilian Companies
_auto_install_l10n_original = account._auto_install_l10n
def _auto_install_l10n_br_simple(cr, registry):
env = api.Environment(cr, SUPERUSER_ID, {})
country_code = env.user.company_id.country_id.code
if country_code and country_code.upper() == "BR":
module_ids = env["ir.module.module"].search(
[("name", "in", ("l10n_br_simple",)), ("state", "=", "uninstalled")]
)
module_ids.sudo().button_install()
else:
_auto_install_l10n_original(cr, registry)
account._auto_install_l10n = _auto_install_l10n_br_simple
| <commit_before># Copyright (C) 2009 Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from . import models
from . import tests
<commit_msg>Define l10n_br_simple as default COA for brazilian companies<commit_after> | # Copyright (C) 2009 Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from . import models
from . import tests
from odoo.addons import account
from odoo import api, SUPERUSER_ID
# Install Simple Chart of Account Template for Brazilian Companies
_auto_install_l10n_original = account._auto_install_l10n
def _auto_install_l10n_br_simple(cr, registry):
env = api.Environment(cr, SUPERUSER_ID, {})
country_code = env.user.company_id.country_id.code
if country_code and country_code.upper() == "BR":
module_ids = env["ir.module.module"].search(
[("name", "in", ("l10n_br_simple",)), ("state", "=", "uninstalled")]
)
module_ids.sudo().button_install()
else:
_auto_install_l10n_original(cr, registry)
account._auto_install_l10n = _auto_install_l10n_br_simple
| # Copyright (C) 2009 Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from . import models
from . import tests
Define l10n_br_simple as default COA for brazilian companies# Copyright (C) 2009 Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from . import models
from . import tests
from odoo.addons import account
from odoo import api, SUPERUSER_ID
# Install Simple Chart of Account Template for Brazilian Companies
_auto_install_l10n_original = account._auto_install_l10n
def _auto_install_l10n_br_simple(cr, registry):
env = api.Environment(cr, SUPERUSER_ID, {})
country_code = env.user.company_id.country_id.code
if country_code and country_code.upper() == "BR":
module_ids = env["ir.module.module"].search(
[("name", "in", ("l10n_br_simple",)), ("state", "=", "uninstalled")]
)
module_ids.sudo().button_install()
else:
_auto_install_l10n_original(cr, registry)
account._auto_install_l10n = _auto_install_l10n_br_simple
| <commit_before># Copyright (C) 2009 Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from . import models
from . import tests
<commit_msg>Define l10n_br_simple as default COA for brazilian companies<commit_after># Copyright (C) 2009 Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from . import models
from . import tests
from odoo.addons import account
from odoo import api, SUPERUSER_ID
# Install Simple Chart of Account Template for Brazilian Companies
_auto_install_l10n_original = account._auto_install_l10n
def _auto_install_l10n_br_simple(cr, registry):
env = api.Environment(cr, SUPERUSER_ID, {})
country_code = env.user.company_id.country_id.code
if country_code and country_code.upper() == "BR":
module_ids = env["ir.module.module"].search(
[("name", "in", ("l10n_br_simple",)), ("state", "=", "uninstalled")]
)
module_ids.sudo().button_install()
else:
_auto_install_l10n_original(cr, registry)
account._auto_install_l10n = _auto_install_l10n_br_simple
|
08797de13a88bc742d905f2067df533a1a319c83 | yawf/revision/models.py | yawf/revision/models.py | from django.db import models
from django.contrib.contenttypes import generic
class RevisionModelMixin(models.Model):
class Meta:
abstract = True
_has_revision_support = True
revision = models.PositiveIntegerField(default=0,
db_index=True, editable=False)
versions = generic.GenericRelation('reversion.Version')
def save(self, *args, **kwargs):
self.revision += 1
super(RevisionModelMixin, self).save(*args, **kwargs)
| from django.db import models
class RevisionModelMixin(models.Model):
class Meta:
abstract = True
_has_revision_support = True
revision = models.PositiveIntegerField(default=0,
db_index=True, editable=False)
def save(self, *args, **kwargs):
self.revision += 1
super(RevisionModelMixin, self).save(*args, **kwargs)
| Remove generic relation to reversion.Version from RevisionModelMixin | Remove generic relation to reversion.Version from RevisionModelMixin
| Python | mit | freevoid/yawf | from django.db import models
from django.contrib.contenttypes import generic
class RevisionModelMixin(models.Model):
class Meta:
abstract = True
_has_revision_support = True
revision = models.PositiveIntegerField(default=0,
db_index=True, editable=False)
versions = generic.GenericRelation('reversion.Version')
def save(self, *args, **kwargs):
self.revision += 1
super(RevisionModelMixin, self).save(*args, **kwargs)
Remove generic relation to reversion.Version from RevisionModelMixin | from django.db import models
class RevisionModelMixin(models.Model):
class Meta:
abstract = True
_has_revision_support = True
revision = models.PositiveIntegerField(default=0,
db_index=True, editable=False)
def save(self, *args, **kwargs):
self.revision += 1
super(RevisionModelMixin, self).save(*args, **kwargs)
| <commit_before>from django.db import models
from django.contrib.contenttypes import generic
class RevisionModelMixin(models.Model):
class Meta:
abstract = True
_has_revision_support = True
revision = models.PositiveIntegerField(default=0,
db_index=True, editable=False)
versions = generic.GenericRelation('reversion.Version')
def save(self, *args, **kwargs):
self.revision += 1
super(RevisionModelMixin, self).save(*args, **kwargs)
<commit_msg>Remove generic relation to reversion.Version from RevisionModelMixin<commit_after> | from django.db import models
class RevisionModelMixin(models.Model):
class Meta:
abstract = True
_has_revision_support = True
revision = models.PositiveIntegerField(default=0,
db_index=True, editable=False)
def save(self, *args, **kwargs):
self.revision += 1
super(RevisionModelMixin, self).save(*args, **kwargs)
| from django.db import models
from django.contrib.contenttypes import generic
class RevisionModelMixin(models.Model):
class Meta:
abstract = True
_has_revision_support = True
revision = models.PositiveIntegerField(default=0,
db_index=True, editable=False)
versions = generic.GenericRelation('reversion.Version')
def save(self, *args, **kwargs):
self.revision += 1
super(RevisionModelMixin, self).save(*args, **kwargs)
Remove generic relation to reversion.Version from RevisionModelMixinfrom django.db import models
class RevisionModelMixin(models.Model):
class Meta:
abstract = True
_has_revision_support = True
revision = models.PositiveIntegerField(default=0,
db_index=True, editable=False)
def save(self, *args, **kwargs):
self.revision += 1
super(RevisionModelMixin, self).save(*args, **kwargs)
| <commit_before>from django.db import models
from django.contrib.contenttypes import generic
class RevisionModelMixin(models.Model):
class Meta:
abstract = True
_has_revision_support = True
revision = models.PositiveIntegerField(default=0,
db_index=True, editable=False)
versions = generic.GenericRelation('reversion.Version')
def save(self, *args, **kwargs):
self.revision += 1
super(RevisionModelMixin, self).save(*args, **kwargs)
<commit_msg>Remove generic relation to reversion.Version from RevisionModelMixin<commit_after>from django.db import models
class RevisionModelMixin(models.Model):
class Meta:
abstract = True
_has_revision_support = True
revision = models.PositiveIntegerField(default=0,
db_index=True, editable=False)
def save(self, *args, **kwargs):
self.revision += 1
super(RevisionModelMixin, self).save(*args, **kwargs)
|
adb33e7240029bf842213ab8157ae872fe2298fd | subiquity/models/installpath.py | subiquity/models/installpath.py | # Copyright 2015 Canonical, Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
log = logging.getLogger("subiquity.models.installpath")
class InstallpathModel(object):
""" Model representing install options
List of install paths in the form of:
('UI Text seen by user', <signal name>, <callback function string>)
"""
# TODO: Re-enable once available
install_paths = [
(_('Install Ubuntu'), 'installpath:install-ubuntu'),
# ('Install MAAS Region Server', 'installpath:maas-region-server'),
# ('Install MAAS Cluster Server', 'installpath:maas-cluster-server'),
# ('Test installation media', 'installpath:test-media'),
# ('Test machine memory', 'installpath:test-memory')
]
def get_menu(self):
return self.install_paths
| # Copyright 2015 Canonical, Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
log = logging.getLogger("subiquity.models.installpath")
class InstallpathModel(object):
""" Model representing install options
List of install paths in the form of:
('UI Text seen by user', <signal name>, <callback function string>)
"""
def _refresh_install_paths(self):
# TODO: Re-enable once available
self.install_paths = [
(_('Install Ubuntu'), 'installpath:install-ubuntu'),
# ('Install MAAS Region Server', 'installpath:maas-region-server'),
# ('Install MAAS Cluster Server', 'installpath:maas-cluster-server'),
# ('Test installation media', 'installpath:test-media'),
# ('Test machine memory', 'installpath:test-memory')
]
def get_menu(self):
self._refresh_install_paths()
return self.install_paths
| Fix dynamic translation of install_path labels. | Fix dynamic translation of install_path labels.
| Python | agpl-3.0 | CanonicalLtd/subiquity,CanonicalLtd/subiquity | # Copyright 2015 Canonical, Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
log = logging.getLogger("subiquity.models.installpath")
class InstallpathModel(object):
""" Model representing install options
List of install paths in the form of:
('UI Text seen by user', <signal name>, <callback function string>)
"""
# TODO: Re-enable once available
install_paths = [
(_('Install Ubuntu'), 'installpath:install-ubuntu'),
# ('Install MAAS Region Server', 'installpath:maas-region-server'),
# ('Install MAAS Cluster Server', 'installpath:maas-cluster-server'),
# ('Test installation media', 'installpath:test-media'),
# ('Test machine memory', 'installpath:test-memory')
]
def get_menu(self):
return self.install_paths
Fix dynamic translation of install_path labels. | # Copyright 2015 Canonical, Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
log = logging.getLogger("subiquity.models.installpath")
class InstallpathModel(object):
""" Model representing install options
List of install paths in the form of:
('UI Text seen by user', <signal name>, <callback function string>)
"""
def _refresh_install_paths(self):
# TODO: Re-enable once available
self.install_paths = [
(_('Install Ubuntu'), 'installpath:install-ubuntu'),
# ('Install MAAS Region Server', 'installpath:maas-region-server'),
# ('Install MAAS Cluster Server', 'installpath:maas-cluster-server'),
# ('Test installation media', 'installpath:test-media'),
# ('Test machine memory', 'installpath:test-memory')
]
def get_menu(self):
self._refresh_install_paths()
return self.install_paths
| <commit_before># Copyright 2015 Canonical, Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
log = logging.getLogger("subiquity.models.installpath")
class InstallpathModel(object):
""" Model representing install options
List of install paths in the form of:
('UI Text seen by user', <signal name>, <callback function string>)
"""
# TODO: Re-enable once available
install_paths = [
(_('Install Ubuntu'), 'installpath:install-ubuntu'),
# ('Install MAAS Region Server', 'installpath:maas-region-server'),
# ('Install MAAS Cluster Server', 'installpath:maas-cluster-server'),
# ('Test installation media', 'installpath:test-media'),
# ('Test machine memory', 'installpath:test-memory')
]
def get_menu(self):
return self.install_paths
<commit_msg>Fix dynamic translation of install_path labels.<commit_after> | # Copyright 2015 Canonical, Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
log = logging.getLogger("subiquity.models.installpath")
class InstallpathModel(object):
""" Model representing install options
List of install paths in the form of:
('UI Text seen by user', <signal name>, <callback function string>)
"""
def _refresh_install_paths(self):
# TODO: Re-enable once available
self.install_paths = [
(_('Install Ubuntu'), 'installpath:install-ubuntu'),
# ('Install MAAS Region Server', 'installpath:maas-region-server'),
# ('Install MAAS Cluster Server', 'installpath:maas-cluster-server'),
# ('Test installation media', 'installpath:test-media'),
# ('Test machine memory', 'installpath:test-memory')
]
def get_menu(self):
self._refresh_install_paths()
return self.install_paths
| # Copyright 2015 Canonical, Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
log = logging.getLogger("subiquity.models.installpath")
class InstallpathModel(object):
""" Model representing install options
List of install paths in the form of:
('UI Text seen by user', <signal name>, <callback function string>)
"""
# TODO: Re-enable once available
install_paths = [
(_('Install Ubuntu'), 'installpath:install-ubuntu'),
# ('Install MAAS Region Server', 'installpath:maas-region-server'),
# ('Install MAAS Cluster Server', 'installpath:maas-cluster-server'),
# ('Test installation media', 'installpath:test-media'),
# ('Test machine memory', 'installpath:test-memory')
]
def get_menu(self):
return self.install_paths
Fix dynamic translation of install_path labels.# Copyright 2015 Canonical, Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
log = logging.getLogger("subiquity.models.installpath")
class InstallpathModel(object):
""" Model representing install options
List of install paths in the form of:
('UI Text seen by user', <signal name>, <callback function string>)
"""
def _refresh_install_paths(self):
# TODO: Re-enable once available
self.install_paths = [
(_('Install Ubuntu'), 'installpath:install-ubuntu'),
# ('Install MAAS Region Server', 'installpath:maas-region-server'),
# ('Install MAAS Cluster Server', 'installpath:maas-cluster-server'),
# ('Test installation media', 'installpath:test-media'),
# ('Test machine memory', 'installpath:test-memory')
]
def get_menu(self):
self._refresh_install_paths()
return self.install_paths
| <commit_before># Copyright 2015 Canonical, Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
log = logging.getLogger("subiquity.models.installpath")
class InstallpathModel(object):
""" Model representing install options
List of install paths in the form of:
('UI Text seen by user', <signal name>, <callback function string>)
"""
# TODO: Re-enable once available
install_paths = [
(_('Install Ubuntu'), 'installpath:install-ubuntu'),
# ('Install MAAS Region Server', 'installpath:maas-region-server'),
# ('Install MAAS Cluster Server', 'installpath:maas-cluster-server'),
# ('Test installation media', 'installpath:test-media'),
# ('Test machine memory', 'installpath:test-memory')
]
def get_menu(self):
return self.install_paths
<commit_msg>Fix dynamic translation of install_path labels.<commit_after># Copyright 2015 Canonical, Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
log = logging.getLogger("subiquity.models.installpath")
class InstallpathModel(object):
""" Model representing install options
List of install paths in the form of:
('UI Text seen by user', <signal name>, <callback function string>)
"""
def _refresh_install_paths(self):
# TODO: Re-enable once available
self.install_paths = [
(_('Install Ubuntu'), 'installpath:install-ubuntu'),
# ('Install MAAS Region Server', 'installpath:maas-region-server'),
# ('Install MAAS Cluster Server', 'installpath:maas-cluster-server'),
# ('Test installation media', 'installpath:test-media'),
# ('Test machine memory', 'installpath:test-memory')
]
def get_menu(self):
self._refresh_install_paths()
return self.install_paths
|
cd09a040270eb3cf1b1966e76382e9e92f4323a8 | soco/__init__.py | soco/__init__.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
""" SoCo (Sonos Controller) is a simple library to control Sonos speakers """
# Will be parsed by setup.py to determine package metadata
__author__ = 'Rahim Sonawalla <rsonawalla@gmail.com>'
__version__ = '0.6'
__website__ = 'https://github.com/SoCo/SoCo'
__license__ = 'MIT License'
from .core import SonosDiscovery, SoCo
from .exceptions import SoCoException, UnknownSoCoException
__all__ = ['SonosDiscovery', 'SoCo', 'SoCoException', 'UnknownSoCoException']
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
""" SoCo (Sonos Controller) is a simple library to control Sonos speakers """
# Will be parsed by setup.py to determine package metadata
__author__ = 'Rahim Sonawalla <rsonawalla@gmail.com>'
__version__ = '0.6'
__website__ = 'https://github.com/SoCo/SoCo'
__license__ = 'MIT License'
from .core import SonosDiscovery, SoCo
from .exceptions import SoCoException, UnknownSoCoException
__all__ = ['SonosDiscovery', 'SoCo', 'SoCoException', 'UnknownSoCoException']
# http://docs.python.org/2/howto/logging.html#library-config
# Avoids spurious error messages if no logger is configured by the user
import logging
logging.getLogger(__name__).addHandler(logging.NullHandler())
| Fix for logger configuration errors | Fix for logger configuration errors
| Python | mit | dajobe/SoCo,flavio/SoCo,KennethNielsen/SoCo,lawrenceakka/SoCo,lawrenceakka/SoCo,bwhaley/SoCo,dundeemt/SoCo,dsully/SoCo,dsully/SoCo,TrondKjeldas/SoCo,bwhaley/SoCo,simonalpha/SoCo,petteraas/SoCo,flavio/SoCo,xxdede/SoCo,SoCo/SoCo,fgend31/SoCo,TrondKjeldas/SoCo,SoCo/SoCo,xxdede/SoCo,dundeemt/SoCo,petteraas/SoCo,intfrr/SoCo,oyvindmal/SocoWebService,oyvindmal/SocoWebService,fgend31/SoCo,simonalpha/SoCo,TrondKjeldas/SoCo,jlmcgehee21/SoCo,fxstein/SoCo,meska/SoCo,jlmcgehee21/SoCo,dajobe/SoCo,intfrr/SoCo,DPH/SoCo,bwhaley/SoCo,DPH/SoCo,petteraas/SoCo,fxstein/SoCo,xxdede/SoCo,KennethNielsen/SoCo,meska/SoCo | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
""" SoCo (Sonos Controller) is a simple library to control Sonos speakers """
# Will be parsed by setup.py to determine package metadata
__author__ = 'Rahim Sonawalla <rsonawalla@gmail.com>'
__version__ = '0.6'
__website__ = 'https://github.com/SoCo/SoCo'
__license__ = 'MIT License'
from .core import SonosDiscovery, SoCo
from .exceptions import SoCoException, UnknownSoCoException
__all__ = ['SonosDiscovery', 'SoCo', 'SoCoException', 'UnknownSoCoException']
Fix for logger configuration errors | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
""" SoCo (Sonos Controller) is a simple library to control Sonos speakers """
# Will be parsed by setup.py to determine package metadata
__author__ = 'Rahim Sonawalla <rsonawalla@gmail.com>'
__version__ = '0.6'
__website__ = 'https://github.com/SoCo/SoCo'
__license__ = 'MIT License'
from .core import SonosDiscovery, SoCo
from .exceptions import SoCoException, UnknownSoCoException
__all__ = ['SonosDiscovery', 'SoCo', 'SoCoException', 'UnknownSoCoException']
# http://docs.python.org/2/howto/logging.html#library-config
# Avoids spurious error messages if no logger is configured by the user
import logging
logging.getLogger(__name__).addHandler(logging.NullHandler())
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
""" SoCo (Sonos Controller) is a simple library to control Sonos speakers """
# Will be parsed by setup.py to determine package metadata
__author__ = 'Rahim Sonawalla <rsonawalla@gmail.com>'
__version__ = '0.6'
__website__ = 'https://github.com/SoCo/SoCo'
__license__ = 'MIT License'
from .core import SonosDiscovery, SoCo
from .exceptions import SoCoException, UnknownSoCoException
__all__ = ['SonosDiscovery', 'SoCo', 'SoCoException', 'UnknownSoCoException']
<commit_msg>Fix for logger configuration errors<commit_after> | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
""" SoCo (Sonos Controller) is a simple library to control Sonos speakers """
# Will be parsed by setup.py to determine package metadata
__author__ = 'Rahim Sonawalla <rsonawalla@gmail.com>'
__version__ = '0.6'
__website__ = 'https://github.com/SoCo/SoCo'
__license__ = 'MIT License'
from .core import SonosDiscovery, SoCo
from .exceptions import SoCoException, UnknownSoCoException
__all__ = ['SonosDiscovery', 'SoCo', 'SoCoException', 'UnknownSoCoException']
# http://docs.python.org/2/howto/logging.html#library-config
# Avoids spurious error messages if no logger is configured by the user
import logging
logging.getLogger(__name__).addHandler(logging.NullHandler())
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
""" SoCo (Sonos Controller) is a simple library to control Sonos speakers """
# Will be parsed by setup.py to determine package metadata
__author__ = 'Rahim Sonawalla <rsonawalla@gmail.com>'
__version__ = '0.6'
__website__ = 'https://github.com/SoCo/SoCo'
__license__ = 'MIT License'
from .core import SonosDiscovery, SoCo
from .exceptions import SoCoException, UnknownSoCoException
__all__ = ['SonosDiscovery', 'SoCo', 'SoCoException', 'UnknownSoCoException']
Fix for logger configuration errors# -*- coding: utf-8 -*-
from __future__ import unicode_literals
""" SoCo (Sonos Controller) is a simple library to control Sonos speakers """
# Will be parsed by setup.py to determine package metadata
__author__ = 'Rahim Sonawalla <rsonawalla@gmail.com>'
__version__ = '0.6'
__website__ = 'https://github.com/SoCo/SoCo'
__license__ = 'MIT License'
from .core import SonosDiscovery, SoCo
from .exceptions import SoCoException, UnknownSoCoException
__all__ = ['SonosDiscovery', 'SoCo', 'SoCoException', 'UnknownSoCoException']
# http://docs.python.org/2/howto/logging.html#library-config
# Avoids spurious error messages if no logger is configured by the user
import logging
logging.getLogger(__name__).addHandler(logging.NullHandler())
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
""" SoCo (Sonos Controller) is a simple library to control Sonos speakers """
# Will be parsed by setup.py to determine package metadata
__author__ = 'Rahim Sonawalla <rsonawalla@gmail.com>'
__version__ = '0.6'
__website__ = 'https://github.com/SoCo/SoCo'
__license__ = 'MIT License'
from .core import SonosDiscovery, SoCo
from .exceptions import SoCoException, UnknownSoCoException
__all__ = ['SonosDiscovery', 'SoCo', 'SoCoException', 'UnknownSoCoException']
<commit_msg>Fix for logger configuration errors<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
""" SoCo (Sonos Controller) is a simple library to control Sonos speakers """
# Will be parsed by setup.py to determine package metadata
__author__ = 'Rahim Sonawalla <rsonawalla@gmail.com>'
__version__ = '0.6'
__website__ = 'https://github.com/SoCo/SoCo'
__license__ = 'MIT License'
from .core import SonosDiscovery, SoCo
from .exceptions import SoCoException, UnknownSoCoException
__all__ = ['SonosDiscovery', 'SoCo', 'SoCoException', 'UnknownSoCoException']
# http://docs.python.org/2/howto/logging.html#library-config
# Avoids spurious error messages if no logger is configured by the user
import logging
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
46c1378f345c8290fa34fc7f756ef6fafa8e2aa8 | lucid/modelzoo/aligned_activations.py | lucid/modelzoo/aligned_activations.py | # Copyright 2018 The Lucid Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import, division, print_function
from lucid.misc.io.sanitizing import sanitize
from lucid.misc.io import load
import numpy as np
PATH_TEMPLATE = "gs://modelzoo/aligned-activations/{}/{}-{:05d}-of-01000.npy"
PAGE_SIZE = 10000
NUMBER_OF_AVAILABLE_SAMPLES = 100000
assert NUMBER_OF_AVAILABLE_SAMPLES % PAGE_SIZE == 0
NUMBER_OF_PAGES = NUMBER_OF_AVAILABLE_SAMPLES // PAGE_SIZE
def get_aligned_activations(layer):
activation_paths = [
PATH_TEMPLATE.format(sanitize(layer.model_class.name), sanitize(layer.name), page)
for page in range(NUMBER_OF_PAGES)
]
print(activation_paths)
activations = [load(path) for path in activation_paths]
return np.vstack(activations)
| # Copyright 2018 The Lucid Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import, division, print_function
from lucid.misc.io.sanitizing import sanitize
from lucid.misc.io import load
import numpy as np
PATH_TEMPLATE = "gs://modelzoo/aligned-activations/{}/{}-{:05d}-of-01000.npy"
PAGE_SIZE = 10000
NUMBER_OF_AVAILABLE_SAMPLES = 100000
assert NUMBER_OF_AVAILABLE_SAMPLES % PAGE_SIZE == 0
NUMBER_OF_PAGES = NUMBER_OF_AVAILABLE_SAMPLES // PAGE_SIZE
def get_aligned_activations(layer):
activation_paths = [
PATH_TEMPLATE.format(sanitize(layer.model_class.name), sanitize(layer.name), page)
for page in range(NUMBER_OF_PAGES)
]
activations = [load(path) for path in activation_paths]
return np.vstack(activations)
| Remove stray print statement O_o | Remove stray print statement O_o
| Python | apache-2.0 | tensorflow/lucid,tensorflow/lucid,tensorflow/lucid,tensorflow/lucid | # Copyright 2018 The Lucid Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import, division, print_function
from lucid.misc.io.sanitizing import sanitize
from lucid.misc.io import load
import numpy as np
PATH_TEMPLATE = "gs://modelzoo/aligned-activations/{}/{}-{:05d}-of-01000.npy"
PAGE_SIZE = 10000
NUMBER_OF_AVAILABLE_SAMPLES = 100000
assert NUMBER_OF_AVAILABLE_SAMPLES % PAGE_SIZE == 0
NUMBER_OF_PAGES = NUMBER_OF_AVAILABLE_SAMPLES // PAGE_SIZE
def get_aligned_activations(layer):
activation_paths = [
PATH_TEMPLATE.format(sanitize(layer.model_class.name), sanitize(layer.name), page)
for page in range(NUMBER_OF_PAGES)
]
print(activation_paths)
activations = [load(path) for path in activation_paths]
return np.vstack(activations)
Remove stray print statement O_o | # Copyright 2018 The Lucid Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import, division, print_function
from lucid.misc.io.sanitizing import sanitize
from lucid.misc.io import load
import numpy as np
PATH_TEMPLATE = "gs://modelzoo/aligned-activations/{}/{}-{:05d}-of-01000.npy"
PAGE_SIZE = 10000
NUMBER_OF_AVAILABLE_SAMPLES = 100000
assert NUMBER_OF_AVAILABLE_SAMPLES % PAGE_SIZE == 0
NUMBER_OF_PAGES = NUMBER_OF_AVAILABLE_SAMPLES // PAGE_SIZE
def get_aligned_activations(layer):
activation_paths = [
PATH_TEMPLATE.format(sanitize(layer.model_class.name), sanitize(layer.name), page)
for page in range(NUMBER_OF_PAGES)
]
activations = [load(path) for path in activation_paths]
return np.vstack(activations)
| <commit_before># Copyright 2018 The Lucid Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import, division, print_function
from lucid.misc.io.sanitizing import sanitize
from lucid.misc.io import load
import numpy as np
PATH_TEMPLATE = "gs://modelzoo/aligned-activations/{}/{}-{:05d}-of-01000.npy"
PAGE_SIZE = 10000
NUMBER_OF_AVAILABLE_SAMPLES = 100000
assert NUMBER_OF_AVAILABLE_SAMPLES % PAGE_SIZE == 0
NUMBER_OF_PAGES = NUMBER_OF_AVAILABLE_SAMPLES // PAGE_SIZE
def get_aligned_activations(layer):
activation_paths = [
PATH_TEMPLATE.format(sanitize(layer.model_class.name), sanitize(layer.name), page)
for page in range(NUMBER_OF_PAGES)
]
print(activation_paths)
activations = [load(path) for path in activation_paths]
return np.vstack(activations)
<commit_msg>Remove stray print statement O_o<commit_after> | # Copyright 2018 The Lucid Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import, division, print_function
from lucid.misc.io.sanitizing import sanitize
from lucid.misc.io import load
import numpy as np
PATH_TEMPLATE = "gs://modelzoo/aligned-activations/{}/{}-{:05d}-of-01000.npy"
PAGE_SIZE = 10000
NUMBER_OF_AVAILABLE_SAMPLES = 100000
assert NUMBER_OF_AVAILABLE_SAMPLES % PAGE_SIZE == 0
NUMBER_OF_PAGES = NUMBER_OF_AVAILABLE_SAMPLES // PAGE_SIZE
def get_aligned_activations(layer):
activation_paths = [
PATH_TEMPLATE.format(sanitize(layer.model_class.name), sanitize(layer.name), page)
for page in range(NUMBER_OF_PAGES)
]
activations = [load(path) for path in activation_paths]
return np.vstack(activations)
| # Copyright 2018 The Lucid Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import, division, print_function
from lucid.misc.io.sanitizing import sanitize
from lucid.misc.io import load
import numpy as np
PATH_TEMPLATE = "gs://modelzoo/aligned-activations/{}/{}-{:05d}-of-01000.npy"
PAGE_SIZE = 10000
NUMBER_OF_AVAILABLE_SAMPLES = 100000
assert NUMBER_OF_AVAILABLE_SAMPLES % PAGE_SIZE == 0
NUMBER_OF_PAGES = NUMBER_OF_AVAILABLE_SAMPLES // PAGE_SIZE
def get_aligned_activations(layer):
activation_paths = [
PATH_TEMPLATE.format(sanitize(layer.model_class.name), sanitize(layer.name), page)
for page in range(NUMBER_OF_PAGES)
]
print(activation_paths)
activations = [load(path) for path in activation_paths]
return np.vstack(activations)
Remove stray print statement O_o# Copyright 2018 The Lucid Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import, division, print_function
from lucid.misc.io.sanitizing import sanitize
from lucid.misc.io import load
import numpy as np
PATH_TEMPLATE = "gs://modelzoo/aligned-activations/{}/{}-{:05d}-of-01000.npy"
PAGE_SIZE = 10000
NUMBER_OF_AVAILABLE_SAMPLES = 100000
assert NUMBER_OF_AVAILABLE_SAMPLES % PAGE_SIZE == 0
NUMBER_OF_PAGES = NUMBER_OF_AVAILABLE_SAMPLES // PAGE_SIZE
def get_aligned_activations(layer):
activation_paths = [
PATH_TEMPLATE.format(sanitize(layer.model_class.name), sanitize(layer.name), page)
for page in range(NUMBER_OF_PAGES)
]
activations = [load(path) for path in activation_paths]
return np.vstack(activations)
| <commit_before># Copyright 2018 The Lucid Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import, division, print_function
from lucid.misc.io.sanitizing import sanitize
from lucid.misc.io import load
import numpy as np
PATH_TEMPLATE = "gs://modelzoo/aligned-activations/{}/{}-{:05d}-of-01000.npy"
PAGE_SIZE = 10000
NUMBER_OF_AVAILABLE_SAMPLES = 100000
assert NUMBER_OF_AVAILABLE_SAMPLES % PAGE_SIZE == 0
NUMBER_OF_PAGES = NUMBER_OF_AVAILABLE_SAMPLES // PAGE_SIZE
def get_aligned_activations(layer):
activation_paths = [
PATH_TEMPLATE.format(sanitize(layer.model_class.name), sanitize(layer.name), page)
for page in range(NUMBER_OF_PAGES)
]
print(activation_paths)
activations = [load(path) for path in activation_paths]
return np.vstack(activations)
<commit_msg>Remove stray print statement O_o<commit_after># Copyright 2018 The Lucid Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import, division, print_function
from lucid.misc.io.sanitizing import sanitize
from lucid.misc.io import load
import numpy as np
PATH_TEMPLATE = "gs://modelzoo/aligned-activations/{}/{}-{:05d}-of-01000.npy"
PAGE_SIZE = 10000
NUMBER_OF_AVAILABLE_SAMPLES = 100000
assert NUMBER_OF_AVAILABLE_SAMPLES % PAGE_SIZE == 0
NUMBER_OF_PAGES = NUMBER_OF_AVAILABLE_SAMPLES // PAGE_SIZE
def get_aligned_activations(layer):
activation_paths = [
PATH_TEMPLATE.format(sanitize(layer.model_class.name), sanitize(layer.name), page)
for page in range(NUMBER_OF_PAGES)
]
activations = [load(path) for path in activation_paths]
return np.vstack(activations)
|
1e5a956eb289b8333ecf3c3cc00f51295f37870a | api_tests/institutions/views/test_institution_users_list.py | api_tests/institutions/views/test_institution_users_list.py | from nose.tools import * # flake8: noqa
from tests.base import ApiTestCase
from osf_tests.factories import InstitutionFactory, UserFactory
from api.base.settings.defaults import API_BASE
class TestInstitutionUsersList(ApiTestCase):
def setUp(self):
super(TestInstitutionUsersList, self).setUp()
self.institution = InstitutionFactory()
self.user1 = UserFactory()
self.user1.affiliated_institutions.add(self.institution)
self.user1.save()
self.user2 = UserFactory()
self.user2.affiliated_institutions.add(self.institution)
self.user2.save()
self.institution_user_url = '/{0}institutions/{1}/users/'.format(API_BASE, self.institution._id)
def test_return_all_users(self):
res = self.app.get(self.institution_user_url)
assert_equal(res.status_code, 200)
ids = [each['id'] for each in res.json['data']]
assert_equal(len(res.json['data']), 2)
assert_in(self.user1._id, ids)
assert_in(self.user2._id, ids)
| import pytest
from api.base.settings.defaults import API_BASE
from osf_tests.factories import (
InstitutionFactory,
UserFactory,
)
@pytest.mark.django_db
class TestInstitutionUsersList:
@pytest.fixture()
def institution(self):
return InstitutionFactory()
@pytest.fixture()
def user_one(self, institution):
user_one = UserFactory()
user_one.affiliated_institutions.add(institution)
user_one.save()
return user_one
@pytest.fixture()
def user_two(self, institution):
user_two = UserFactory()
user_two.affiliated_institutions.add(institution)
user_two.save()
return user_two
@pytest.fixture()
def url_institution_user(self, institution):
return '/{0}institutions/{1}/users/'.format(API_BASE, institution._id)
def test_return_all_users(self, app, institution, user_one, user_two, url_institution_user):
res = app.get(url_institution_user)
assert res.status_code == 200
ids = [each['id'] for each in res.json['data']]
assert len(res.json['data']) == 2
assert user_one._id in ids
assert user_two._id in ids
| Convert institutions users to pytest | Convert institutions users to pytest
| Python | apache-2.0 | cslzchen/osf.io,chennan47/osf.io,crcresearch/osf.io,caneruguz/osf.io,HalcyonChimera/osf.io,leb2dg/osf.io,icereval/osf.io,crcresearch/osf.io,cslzchen/osf.io,sloria/osf.io,felliott/osf.io,binoculars/osf.io,laurenrevere/osf.io,mfraezz/osf.io,felliott/osf.io,Johnetordoff/osf.io,aaxelb/osf.io,crcresearch/osf.io,adlius/osf.io,brianjgeiger/osf.io,caneruguz/osf.io,erinspace/osf.io,baylee-d/osf.io,erinspace/osf.io,laurenrevere/osf.io,Johnetordoff/osf.io,CenterForOpenScience/osf.io,chrisseto/osf.io,sloria/osf.io,mfraezz/osf.io,chrisseto/osf.io,cslzchen/osf.io,brianjgeiger/osf.io,saradbowman/osf.io,leb2dg/osf.io,mattclark/osf.io,TomBaxter/osf.io,TomBaxter/osf.io,caseyrollins/osf.io,caseyrollins/osf.io,leb2dg/osf.io,felliott/osf.io,Johnetordoff/osf.io,chennan47/osf.io,erinspace/osf.io,HalcyonChimera/osf.io,baylee-d/osf.io,adlius/osf.io,leb2dg/osf.io,Johnetordoff/osf.io,cslzchen/osf.io,laurenrevere/osf.io,icereval/osf.io,aaxelb/osf.io,adlius/osf.io,aaxelb/osf.io,pattisdr/osf.io,CenterForOpenScience/osf.io,caseyrollins/osf.io,brianjgeiger/osf.io,CenterForOpenScience/osf.io,mattclark/osf.io,chrisseto/osf.io,caneruguz/osf.io,pattisdr/osf.io,adlius/osf.io,HalcyonChimera/osf.io,binoculars/osf.io,baylee-d/osf.io,aaxelb/osf.io,mfraezz/osf.io,saradbowman/osf.io,sloria/osf.io,brianjgeiger/osf.io,pattisdr/osf.io,CenterForOpenScience/osf.io,icereval/osf.io,chrisseto/osf.io,HalcyonChimera/osf.io,caneruguz/osf.io,chennan47/osf.io,TomBaxter/osf.io,binoculars/osf.io,mfraezz/osf.io,felliott/osf.io,mattclark/osf.io | from nose.tools import * # flake8: noqa
from tests.base import ApiTestCase
from osf_tests.factories import InstitutionFactory, UserFactory
from api.base.settings.defaults import API_BASE
class TestInstitutionUsersList(ApiTestCase):
def setUp(self):
super(TestInstitutionUsersList, self).setUp()
self.institution = InstitutionFactory()
self.user1 = UserFactory()
self.user1.affiliated_institutions.add(self.institution)
self.user1.save()
self.user2 = UserFactory()
self.user2.affiliated_institutions.add(self.institution)
self.user2.save()
self.institution_user_url = '/{0}institutions/{1}/users/'.format(API_BASE, self.institution._id)
def test_return_all_users(self):
res = self.app.get(self.institution_user_url)
assert_equal(res.status_code, 200)
ids = [each['id'] for each in res.json['data']]
assert_equal(len(res.json['data']), 2)
assert_in(self.user1._id, ids)
assert_in(self.user2._id, ids)
Convert institutions users to pytest | import pytest
from api.base.settings.defaults import API_BASE
from osf_tests.factories import (
InstitutionFactory,
UserFactory,
)
@pytest.mark.django_db
class TestInstitutionUsersList:
@pytest.fixture()
def institution(self):
return InstitutionFactory()
@pytest.fixture()
def user_one(self, institution):
user_one = UserFactory()
user_one.affiliated_institutions.add(institution)
user_one.save()
return user_one
@pytest.fixture()
def user_two(self, institution):
user_two = UserFactory()
user_two.affiliated_institutions.add(institution)
user_two.save()
return user_two
@pytest.fixture()
def url_institution_user(self, institution):
return '/{0}institutions/{1}/users/'.format(API_BASE, institution._id)
def test_return_all_users(self, app, institution, user_one, user_two, url_institution_user):
res = app.get(url_institution_user)
assert res.status_code == 200
ids = [each['id'] for each in res.json['data']]
assert len(res.json['data']) == 2
assert user_one._id in ids
assert user_two._id in ids
| <commit_before>from nose.tools import * # flake8: noqa
from tests.base import ApiTestCase
from osf_tests.factories import InstitutionFactory, UserFactory
from api.base.settings.defaults import API_BASE
class TestInstitutionUsersList(ApiTestCase):
def setUp(self):
super(TestInstitutionUsersList, self).setUp()
self.institution = InstitutionFactory()
self.user1 = UserFactory()
self.user1.affiliated_institutions.add(self.institution)
self.user1.save()
self.user2 = UserFactory()
self.user2.affiliated_institutions.add(self.institution)
self.user2.save()
self.institution_user_url = '/{0}institutions/{1}/users/'.format(API_BASE, self.institution._id)
def test_return_all_users(self):
res = self.app.get(self.institution_user_url)
assert_equal(res.status_code, 200)
ids = [each['id'] for each in res.json['data']]
assert_equal(len(res.json['data']), 2)
assert_in(self.user1._id, ids)
assert_in(self.user2._id, ids)
<commit_msg>Convert institutions users to pytest<commit_after> | import pytest
from api.base.settings.defaults import API_BASE
from osf_tests.factories import (
InstitutionFactory,
UserFactory,
)
@pytest.mark.django_db
class TestInstitutionUsersList:
@pytest.fixture()
def institution(self):
return InstitutionFactory()
@pytest.fixture()
def user_one(self, institution):
user_one = UserFactory()
user_one.affiliated_institutions.add(institution)
user_one.save()
return user_one
@pytest.fixture()
def user_two(self, institution):
user_two = UserFactory()
user_two.affiliated_institutions.add(institution)
user_two.save()
return user_two
@pytest.fixture()
def url_institution_user(self, institution):
return '/{0}institutions/{1}/users/'.format(API_BASE, institution._id)
def test_return_all_users(self, app, institution, user_one, user_two, url_institution_user):
res = app.get(url_institution_user)
assert res.status_code == 200
ids = [each['id'] for each in res.json['data']]
assert len(res.json['data']) == 2
assert user_one._id in ids
assert user_two._id in ids
| from nose.tools import * # flake8: noqa
from tests.base import ApiTestCase
from osf_tests.factories import InstitutionFactory, UserFactory
from api.base.settings.defaults import API_BASE
class TestInstitutionUsersList(ApiTestCase):
def setUp(self):
super(TestInstitutionUsersList, self).setUp()
self.institution = InstitutionFactory()
self.user1 = UserFactory()
self.user1.affiliated_institutions.add(self.institution)
self.user1.save()
self.user2 = UserFactory()
self.user2.affiliated_institutions.add(self.institution)
self.user2.save()
self.institution_user_url = '/{0}institutions/{1}/users/'.format(API_BASE, self.institution._id)
def test_return_all_users(self):
res = self.app.get(self.institution_user_url)
assert_equal(res.status_code, 200)
ids = [each['id'] for each in res.json['data']]
assert_equal(len(res.json['data']), 2)
assert_in(self.user1._id, ids)
assert_in(self.user2._id, ids)
Convert institutions users to pytestimport pytest
from api.base.settings.defaults import API_BASE
from osf_tests.factories import (
InstitutionFactory,
UserFactory,
)
@pytest.mark.django_db
class TestInstitutionUsersList:
@pytest.fixture()
def institution(self):
return InstitutionFactory()
@pytest.fixture()
def user_one(self, institution):
user_one = UserFactory()
user_one.affiliated_institutions.add(institution)
user_one.save()
return user_one
@pytest.fixture()
def user_two(self, institution):
user_two = UserFactory()
user_two.affiliated_institutions.add(institution)
user_two.save()
return user_two
@pytest.fixture()
def url_institution_user(self, institution):
return '/{0}institutions/{1}/users/'.format(API_BASE, institution._id)
def test_return_all_users(self, app, institution, user_one, user_two, url_institution_user):
res = app.get(url_institution_user)
assert res.status_code == 200
ids = [each['id'] for each in res.json['data']]
assert len(res.json['data']) == 2
assert user_one._id in ids
assert user_two._id in ids
| <commit_before>from nose.tools import * # flake8: noqa
from tests.base import ApiTestCase
from osf_tests.factories import InstitutionFactory, UserFactory
from api.base.settings.defaults import API_BASE
class TestInstitutionUsersList(ApiTestCase):
def setUp(self):
super(TestInstitutionUsersList, self).setUp()
self.institution = InstitutionFactory()
self.user1 = UserFactory()
self.user1.affiliated_institutions.add(self.institution)
self.user1.save()
self.user2 = UserFactory()
self.user2.affiliated_institutions.add(self.institution)
self.user2.save()
self.institution_user_url = '/{0}institutions/{1}/users/'.format(API_BASE, self.institution._id)
def test_return_all_users(self):
res = self.app.get(self.institution_user_url)
assert_equal(res.status_code, 200)
ids = [each['id'] for each in res.json['data']]
assert_equal(len(res.json['data']), 2)
assert_in(self.user1._id, ids)
assert_in(self.user2._id, ids)
<commit_msg>Convert institutions users to pytest<commit_after>import pytest
from api.base.settings.defaults import API_BASE
from osf_tests.factories import (
InstitutionFactory,
UserFactory,
)
@pytest.mark.django_db
class TestInstitutionUsersList:
@pytest.fixture()
def institution(self):
return InstitutionFactory()
@pytest.fixture()
def user_one(self, institution):
user_one = UserFactory()
user_one.affiliated_institutions.add(institution)
user_one.save()
return user_one
@pytest.fixture()
def user_two(self, institution):
user_two = UserFactory()
user_two.affiliated_institutions.add(institution)
user_two.save()
return user_two
@pytest.fixture()
def url_institution_user(self, institution):
return '/{0}institutions/{1}/users/'.format(API_BASE, institution._id)
def test_return_all_users(self, app, institution, user_one, user_two, url_institution_user):
res = app.get(url_institution_user)
assert res.status_code == 200
ids = [each['id'] for each in res.json['data']]
assert len(res.json['data']) == 2
assert user_one._id in ids
assert user_two._id in ids
|
b4627ab3448ab70db44d8e9af3310a0755eeca64 | mediacloud/mediawords/db/schema/version.py | mediacloud/mediawords/db/schema/version.py | import re
from mediawords.util.perl import decode_string_from_bytes_if_needed
class SchemaVersionFromLinesException(Exception):
pass
def schema_version_from_lines(sql: str) -> int:
"""Utility function to determine a database schema version from a bunch of SQL commands."""
sql = decode_string_from_bytes_if_needed(sql)
matches = re.search(r'[+\-]*\s*MEDIACLOUD_DATABASE_SCHEMA_VERSION CONSTANT INT := (\d+?);', sql)
if matches is None:
raise SchemaVersionFromLinesException("Unable to parse the database schema version number")
schema_version = int(matches.group(1))
if schema_version == 0:
raise SchemaVersionFromLinesException("Invalid schema version")
return schema_version
| import re
from mediawords.util.perl import decode_string_from_bytes_if_needed
class McSchemaVersionFromLinesException(Exception):
pass
def schema_version_from_lines(sql: str) -> int:
"""Utility function to determine a database schema version from a bunch of SQL commands."""
sql = decode_string_from_bytes_if_needed(sql)
matches = re.search(r'[+\-]*\s*MEDIACLOUD_DATABASE_SCHEMA_VERSION CONSTANT INT := (\d+?);', sql)
if matches is None:
raise McSchemaVersionFromLinesException("Unable to parse the database schema version number")
schema_version = int(matches.group(1))
if schema_version == 0:
raise McSchemaVersionFromLinesException("Invalid schema version")
return schema_version
| Prepend “Mc” to exception name | Prepend “Mc” to exception name
| Python | agpl-3.0 | berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud | import re
from mediawords.util.perl import decode_string_from_bytes_if_needed
class SchemaVersionFromLinesException(Exception):
pass
def schema_version_from_lines(sql: str) -> int:
"""Utility function to determine a database schema version from a bunch of SQL commands."""
sql = decode_string_from_bytes_if_needed(sql)
matches = re.search(r'[+\-]*\s*MEDIACLOUD_DATABASE_SCHEMA_VERSION CONSTANT INT := (\d+?);', sql)
if matches is None:
raise SchemaVersionFromLinesException("Unable to parse the database schema version number")
schema_version = int(matches.group(1))
if schema_version == 0:
raise SchemaVersionFromLinesException("Invalid schema version")
return schema_version
Prepend “Mc” to exception name | import re
from mediawords.util.perl import decode_string_from_bytes_if_needed
class McSchemaVersionFromLinesException(Exception):
pass
def schema_version_from_lines(sql: str) -> int:
"""Utility function to determine a database schema version from a bunch of SQL commands."""
sql = decode_string_from_bytes_if_needed(sql)
matches = re.search(r'[+\-]*\s*MEDIACLOUD_DATABASE_SCHEMA_VERSION CONSTANT INT := (\d+?);', sql)
if matches is None:
raise McSchemaVersionFromLinesException("Unable to parse the database schema version number")
schema_version = int(matches.group(1))
if schema_version == 0:
raise McSchemaVersionFromLinesException("Invalid schema version")
return schema_version
| <commit_before>import re
from mediawords.util.perl import decode_string_from_bytes_if_needed
class SchemaVersionFromLinesException(Exception):
pass
def schema_version_from_lines(sql: str) -> int:
"""Utility function to determine a database schema version from a bunch of SQL commands."""
sql = decode_string_from_bytes_if_needed(sql)
matches = re.search(r'[+\-]*\s*MEDIACLOUD_DATABASE_SCHEMA_VERSION CONSTANT INT := (\d+?);', sql)
if matches is None:
raise SchemaVersionFromLinesException("Unable to parse the database schema version number")
schema_version = int(matches.group(1))
if schema_version == 0:
raise SchemaVersionFromLinesException("Invalid schema version")
return schema_version
<commit_msg>Prepend “Mc” to exception name<commit_after> | import re
from mediawords.util.perl import decode_string_from_bytes_if_needed
class McSchemaVersionFromLinesException(Exception):
pass
def schema_version_from_lines(sql: str) -> int:
"""Utility function to determine a database schema version from a bunch of SQL commands."""
sql = decode_string_from_bytes_if_needed(sql)
matches = re.search(r'[+\-]*\s*MEDIACLOUD_DATABASE_SCHEMA_VERSION CONSTANT INT := (\d+?);', sql)
if matches is None:
raise McSchemaVersionFromLinesException("Unable to parse the database schema version number")
schema_version = int(matches.group(1))
if schema_version == 0:
raise McSchemaVersionFromLinesException("Invalid schema version")
return schema_version
| import re
from mediawords.util.perl import decode_string_from_bytes_if_needed
class SchemaVersionFromLinesException(Exception):
pass
def schema_version_from_lines(sql: str) -> int:
"""Utility function to determine a database schema version from a bunch of SQL commands."""
sql = decode_string_from_bytes_if_needed(sql)
matches = re.search(r'[+\-]*\s*MEDIACLOUD_DATABASE_SCHEMA_VERSION CONSTANT INT := (\d+?);', sql)
if matches is None:
raise SchemaVersionFromLinesException("Unable to parse the database schema version number")
schema_version = int(matches.group(1))
if schema_version == 0:
raise SchemaVersionFromLinesException("Invalid schema version")
return schema_version
Prepend “Mc” to exception nameimport re
from mediawords.util.perl import decode_string_from_bytes_if_needed
class McSchemaVersionFromLinesException(Exception):
pass
def schema_version_from_lines(sql: str) -> int:
"""Utility function to determine a database schema version from a bunch of SQL commands."""
sql = decode_string_from_bytes_if_needed(sql)
matches = re.search(r'[+\-]*\s*MEDIACLOUD_DATABASE_SCHEMA_VERSION CONSTANT INT := (\d+?);', sql)
if matches is None:
raise McSchemaVersionFromLinesException("Unable to parse the database schema version number")
schema_version = int(matches.group(1))
if schema_version == 0:
raise McSchemaVersionFromLinesException("Invalid schema version")
return schema_version
| <commit_before>import re
from mediawords.util.perl import decode_string_from_bytes_if_needed
class SchemaVersionFromLinesException(Exception):
pass
def schema_version_from_lines(sql: str) -> int:
"""Utility function to determine a database schema version from a bunch of SQL commands."""
sql = decode_string_from_bytes_if_needed(sql)
matches = re.search(r'[+\-]*\s*MEDIACLOUD_DATABASE_SCHEMA_VERSION CONSTANT INT := (\d+?);', sql)
if matches is None:
raise SchemaVersionFromLinesException("Unable to parse the database schema version number")
schema_version = int(matches.group(1))
if schema_version == 0:
raise SchemaVersionFromLinesException("Invalid schema version")
return schema_version
<commit_msg>Prepend “Mc” to exception name<commit_after>import re
from mediawords.util.perl import decode_string_from_bytes_if_needed
class McSchemaVersionFromLinesException(Exception):
pass
def schema_version_from_lines(sql: str) -> int:
"""Utility function to determine a database schema version from a bunch of SQL commands."""
sql = decode_string_from_bytes_if_needed(sql)
matches = re.search(r'[+\-]*\s*MEDIACLOUD_DATABASE_SCHEMA_VERSION CONSTANT INT := (\d+?);', sql)
if matches is None:
raise McSchemaVersionFromLinesException("Unable to parse the database schema version number")
schema_version = int(matches.group(1))
if schema_version == 0:
raise McSchemaVersionFromLinesException("Invalid schema version")
return schema_version
|
97fa1de8a22ff8fd9fd80a39328ec57be672575d | mlabdata.py | mlabdata.py | import collections
"""All of the datatypes that get passed around inside Signal Searcher."""
InternetData = collections.namedtuple(
'InternetData',
['key', 'table', 'time', 'upload', 'download', 'rtt', 'samples'])
# Everything below this line is temporary and should be deleted when the
# migration away from the spike is complete.
# pylint disable=missing-docstring, no-self-use
# Deprecated --- DO NOT USE
MlabDataEntry = collections.namedtuple(
'MlabDataEntry', ['time', 'upload_speed', 'download_speed', 'min_latency'])
ProblemTuple = collections.namedtuple('ProblemTuple', [
'key', 'table', 'start_date', 'end_date', 'severity', 'test_count',
'description'
])
class Problem(ProblemTuple):
def to_url(self):
return 'http://127.0.0.1'
| """All of the datatypes that get passed around inside Signal Searcher."""
import collections
InternetData = collections.namedtuple(
'InternetData',
['key', 'table', 'time', 'upload', 'download', 'rtt', 'samples'])
# Everything below this line is temporary and should be deleted when the
# migration away from the spike is complete.
# pylint disable=missing-docstring, no-self-use
# Deprecated --- DO NOT USE
MlabDataEntry = collections.namedtuple(
'MlabDataEntry', ['time', 'upload_speed', 'download_speed', 'min_latency'])
ProblemTuple = collections.namedtuple('ProblemTuple', [
'key', 'table', 'start_date', 'end_date', 'severity', 'test_count',
'description'
])
class Problem(ProblemTuple):
def to_url(self):
return 'http://127.0.0.1'
| Make the string be an actual docstring | Make the string be an actual docstring
| Python | apache-2.0 | m-lab/signal-searcher | import collections
"""All of the datatypes that get passed around inside Signal Searcher."""
InternetData = collections.namedtuple(
'InternetData',
['key', 'table', 'time', 'upload', 'download', 'rtt', 'samples'])
# Everything below this line is temporary and should be deleted when the
# migration away from the spike is complete.
# pylint disable=missing-docstring, no-self-use
# Deprecated --- DO NOT USE
MlabDataEntry = collections.namedtuple(
'MlabDataEntry', ['time', 'upload_speed', 'download_speed', 'min_latency'])
ProblemTuple = collections.namedtuple('ProblemTuple', [
'key', 'table', 'start_date', 'end_date', 'severity', 'test_count',
'description'
])
class Problem(ProblemTuple):
def to_url(self):
return 'http://127.0.0.1'
Make the string be an actual docstring | """All of the datatypes that get passed around inside Signal Searcher."""
import collections
InternetData = collections.namedtuple(
'InternetData',
['key', 'table', 'time', 'upload', 'download', 'rtt', 'samples'])
# Everything below this line is temporary and should be deleted when the
# migration away from the spike is complete.
# pylint disable=missing-docstring, no-self-use
# Deprecated --- DO NOT USE
MlabDataEntry = collections.namedtuple(
'MlabDataEntry', ['time', 'upload_speed', 'download_speed', 'min_latency'])
ProblemTuple = collections.namedtuple('ProblemTuple', [
'key', 'table', 'start_date', 'end_date', 'severity', 'test_count',
'description'
])
class Problem(ProblemTuple):
def to_url(self):
return 'http://127.0.0.1'
| <commit_before>import collections
"""All of the datatypes that get passed around inside Signal Searcher."""
InternetData = collections.namedtuple(
'InternetData',
['key', 'table', 'time', 'upload', 'download', 'rtt', 'samples'])
# Everything below this line is temporary and should be deleted when the
# migration away from the spike is complete.
# pylint disable=missing-docstring, no-self-use
# Deprecated --- DO NOT USE
MlabDataEntry = collections.namedtuple(
'MlabDataEntry', ['time', 'upload_speed', 'download_speed', 'min_latency'])
ProblemTuple = collections.namedtuple('ProblemTuple', [
'key', 'table', 'start_date', 'end_date', 'severity', 'test_count',
'description'
])
class Problem(ProblemTuple):
def to_url(self):
return 'http://127.0.0.1'
<commit_msg>Make the string be an actual docstring<commit_after> | """All of the datatypes that get passed around inside Signal Searcher."""
import collections
InternetData = collections.namedtuple(
'InternetData',
['key', 'table', 'time', 'upload', 'download', 'rtt', 'samples'])
# Everything below this line is temporary and should be deleted when the
# migration away from the spike is complete.
# pylint disable=missing-docstring, no-self-use
# Deprecated --- DO NOT USE
MlabDataEntry = collections.namedtuple(
'MlabDataEntry', ['time', 'upload_speed', 'download_speed', 'min_latency'])
ProblemTuple = collections.namedtuple('ProblemTuple', [
'key', 'table', 'start_date', 'end_date', 'severity', 'test_count',
'description'
])
class Problem(ProblemTuple):
def to_url(self):
return 'http://127.0.0.1'
| import collections
"""All of the datatypes that get passed around inside Signal Searcher."""
InternetData = collections.namedtuple(
'InternetData',
['key', 'table', 'time', 'upload', 'download', 'rtt', 'samples'])
# Everything below this line is temporary and should be deleted when the
# migration away from the spike is complete.
# pylint disable=missing-docstring, no-self-use
# Deprecated --- DO NOT USE
MlabDataEntry = collections.namedtuple(
'MlabDataEntry', ['time', 'upload_speed', 'download_speed', 'min_latency'])
ProblemTuple = collections.namedtuple('ProblemTuple', [
'key', 'table', 'start_date', 'end_date', 'severity', 'test_count',
'description'
])
class Problem(ProblemTuple):
def to_url(self):
return 'http://127.0.0.1'
Make the string be an actual docstring"""All of the datatypes that get passed around inside Signal Searcher."""
import collections
InternetData = collections.namedtuple(
'InternetData',
['key', 'table', 'time', 'upload', 'download', 'rtt', 'samples'])
# Everything below this line is temporary and should be deleted when the
# migration away from the spike is complete.
# pylint disable=missing-docstring, no-self-use
# Deprecated --- DO NOT USE
MlabDataEntry = collections.namedtuple(
'MlabDataEntry', ['time', 'upload_speed', 'download_speed', 'min_latency'])
ProblemTuple = collections.namedtuple('ProblemTuple', [
'key', 'table', 'start_date', 'end_date', 'severity', 'test_count',
'description'
])
class Problem(ProblemTuple):
def to_url(self):
return 'http://127.0.0.1'
| <commit_before>import collections
"""All of the datatypes that get passed around inside Signal Searcher."""
InternetData = collections.namedtuple(
'InternetData',
['key', 'table', 'time', 'upload', 'download', 'rtt', 'samples'])
# Everything below this line is temporary and should be deleted when the
# migration away from the spike is complete.
# pylint disable=missing-docstring, no-self-use
# Deprecated --- DO NOT USE
MlabDataEntry = collections.namedtuple(
'MlabDataEntry', ['time', 'upload_speed', 'download_speed', 'min_latency'])
ProblemTuple = collections.namedtuple('ProblemTuple', [
'key', 'table', 'start_date', 'end_date', 'severity', 'test_count',
'description'
])
class Problem(ProblemTuple):
def to_url(self):
return 'http://127.0.0.1'
<commit_msg>Make the string be an actual docstring<commit_after>"""All of the datatypes that get passed around inside Signal Searcher."""
import collections
InternetData = collections.namedtuple(
'InternetData',
['key', 'table', 'time', 'upload', 'download', 'rtt', 'samples'])
# Everything below this line is temporary and should be deleted when the
# migration away from the spike is complete.
# pylint disable=missing-docstring, no-self-use
# Deprecated --- DO NOT USE
MlabDataEntry = collections.namedtuple(
'MlabDataEntry', ['time', 'upload_speed', 'download_speed', 'min_latency'])
ProblemTuple = collections.namedtuple('ProblemTuple', [
'key', 'table', 'start_date', 'end_date', 'severity', 'test_count',
'description'
])
class Problem(ProblemTuple):
def to_url(self):
return 'http://127.0.0.1'
|
ca31ecaf79e42cacc023277aa163af8887a360ad | mlog/log.py | mlog/log.py | import gzip
import json
from datetime import datetime
def log_database(conn, param, email):
param = json.dumps(param)
email_gz = gzip.compress(email.encode('ascii'))
values = (param, email_gz)
c = conn.cursor()
c.execute('''
INSERT INTO email_log (`param`, `email_gz`)
VALUES (?, ?)
''', values)
def log_text(fname, param, email):
from datetime import datetime
f = open(fname, 'a')
f.write("=== %s ===\n" % datetime.now())
f.write("args: %s\n" % (" ".join(param),))
f.write("-------------\n")
f.write(email)
f.write("\n\n")
f.flush()
f.close()
| import gzip
import json
from datetime import datetime
def log_database(conn, param, email):
param = json.dumps(param)
email_gz = gzip.compress(email.encode('ascii'))
values = (param, email_gz)
c = conn.cursor()
c.execute('''
INSERT INTO email_log (`param`, `email_gz`)
VALUES (?, ?)
''', values)
def log_text(fname, param, email):
from datetime import datetime
with open(fname, 'a') as f:
f.write("=== %s ===\n" % datetime.now())
f.write("args: %s\n" % (" ".join(param),))
f.write("-------------\n")
f.write(email)
f.write("\n\n")
f.flush()
| Use with statement when writing to a file | Use with statement when writing to a file
| Python | agpl-3.0 | fajran/mlog | import gzip
import json
from datetime import datetime
def log_database(conn, param, email):
param = json.dumps(param)
email_gz = gzip.compress(email.encode('ascii'))
values = (param, email_gz)
c = conn.cursor()
c.execute('''
INSERT INTO email_log (`param`, `email_gz`)
VALUES (?, ?)
''', values)
def log_text(fname, param, email):
from datetime import datetime
f = open(fname, 'a')
f.write("=== %s ===\n" % datetime.now())
f.write("args: %s\n" % (" ".join(param),))
f.write("-------------\n")
f.write(email)
f.write("\n\n")
f.flush()
f.close()
Use with statement when writing to a file | import gzip
import json
from datetime import datetime
def log_database(conn, param, email):
param = json.dumps(param)
email_gz = gzip.compress(email.encode('ascii'))
values = (param, email_gz)
c = conn.cursor()
c.execute('''
INSERT INTO email_log (`param`, `email_gz`)
VALUES (?, ?)
''', values)
def log_text(fname, param, email):
from datetime import datetime
with open(fname, 'a') as f:
f.write("=== %s ===\n" % datetime.now())
f.write("args: %s\n" % (" ".join(param),))
f.write("-------------\n")
f.write(email)
f.write("\n\n")
f.flush()
| <commit_before>import gzip
import json
from datetime import datetime
def log_database(conn, param, email):
param = json.dumps(param)
email_gz = gzip.compress(email.encode('ascii'))
values = (param, email_gz)
c = conn.cursor()
c.execute('''
INSERT INTO email_log (`param`, `email_gz`)
VALUES (?, ?)
''', values)
def log_text(fname, param, email):
from datetime import datetime
f = open(fname, 'a')
f.write("=== %s ===\n" % datetime.now())
f.write("args: %s\n" % (" ".join(param),))
f.write("-------------\n")
f.write(email)
f.write("\n\n")
f.flush()
f.close()
<commit_msg>Use with statement when writing to a file<commit_after> | import gzip
import json
from datetime import datetime
def log_database(conn, param, email):
param = json.dumps(param)
email_gz = gzip.compress(email.encode('ascii'))
values = (param, email_gz)
c = conn.cursor()
c.execute('''
INSERT INTO email_log (`param`, `email_gz`)
VALUES (?, ?)
''', values)
def log_text(fname, param, email):
from datetime import datetime
with open(fname, 'a') as f:
f.write("=== %s ===\n" % datetime.now())
f.write("args: %s\n" % (" ".join(param),))
f.write("-------------\n")
f.write(email)
f.write("\n\n")
f.flush()
| import gzip
import json
from datetime import datetime
def log_database(conn, param, email):
param = json.dumps(param)
email_gz = gzip.compress(email.encode('ascii'))
values = (param, email_gz)
c = conn.cursor()
c.execute('''
INSERT INTO email_log (`param`, `email_gz`)
VALUES (?, ?)
''', values)
def log_text(fname, param, email):
from datetime import datetime
f = open(fname, 'a')
f.write("=== %s ===\n" % datetime.now())
f.write("args: %s\n" % (" ".join(param),))
f.write("-------------\n")
f.write(email)
f.write("\n\n")
f.flush()
f.close()
Use with statement when writing to a fileimport gzip
import json
from datetime import datetime
def log_database(conn, param, email):
param = json.dumps(param)
email_gz = gzip.compress(email.encode('ascii'))
values = (param, email_gz)
c = conn.cursor()
c.execute('''
INSERT INTO email_log (`param`, `email_gz`)
VALUES (?, ?)
''', values)
def log_text(fname, param, email):
from datetime import datetime
with open(fname, 'a') as f:
f.write("=== %s ===\n" % datetime.now())
f.write("args: %s\n" % (" ".join(param),))
f.write("-------------\n")
f.write(email)
f.write("\n\n")
f.flush()
| <commit_before>import gzip
import json
from datetime import datetime
def log_database(conn, param, email):
param = json.dumps(param)
email_gz = gzip.compress(email.encode('ascii'))
values = (param, email_gz)
c = conn.cursor()
c.execute('''
INSERT INTO email_log (`param`, `email_gz`)
VALUES (?, ?)
''', values)
def log_text(fname, param, email):
from datetime import datetime
f = open(fname, 'a')
f.write("=== %s ===\n" % datetime.now())
f.write("args: %s\n" % (" ".join(param),))
f.write("-------------\n")
f.write(email)
f.write("\n\n")
f.flush()
f.close()
<commit_msg>Use with statement when writing to a file<commit_after>import gzip
import json
from datetime import datetime
def log_database(conn, param, email):
param = json.dumps(param)
email_gz = gzip.compress(email.encode('ascii'))
values = (param, email_gz)
c = conn.cursor()
c.execute('''
INSERT INTO email_log (`param`, `email_gz`)
VALUES (?, ?)
''', values)
def log_text(fname, param, email):
from datetime import datetime
with open(fname, 'a') as f:
f.write("=== %s ===\n" % datetime.now())
f.write("args: %s\n" % (" ".join(param),))
f.write("-------------\n")
f.write(email)
f.write("\n\n")
f.flush()
|
d1e5cce57da49bd93950004b1a4e8766b525106a | backend/unpp_api/apps/common/tests/test_views.py | backend/unpp_api/apps/common/tests/test_views.py | from django.urls import reverse
from agency.roles import AgencyRole
from common.tests.base import BaseAPITestCase
from rest_framework import status
class TestGeneralConfigAPIView(BaseAPITestCase):
user_type = BaseAPITestCase.USER_AGENCY
agency_role = AgencyRole.ADMINISTRATOR
def test_view(self):
url = reverse('config:general-config')
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
class TestAPISwaggerView(BaseAPITestCase):
user_type = BaseAPITestCase.USER_AGENCY
agency_role = AgencyRole.ADMINISTRATOR
def test_view(self):
response = self.client.get('/api/doc/')
self.assertEqual(response.status_code, status.HTTP_200_OK)
| from django.core.management import call_command
from django.test import TestCase
from django.urls import reverse
from agency.roles import AgencyRole
from common.tests.base import BaseAPITestCase
from rest_framework import status
class TestGeneralConfigAPIView(BaseAPITestCase):
user_type = BaseAPITestCase.USER_AGENCY
agency_role = AgencyRole.ADMINISTRATOR
def test_view(self):
url = reverse('config:general-config')
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
class TestAPISwaggerView(BaseAPITestCase):
user_type = BaseAPITestCase.USER_AGENCY
agency_role = AgencyRole.ADMINISTRATOR
def test_view(self):
response = self.client.get('/api/doc/')
self.assertEqual(response.status_code, status.HTTP_200_OK)
class MigrationTestCase(TestCase):
def test_all_changes_migrated(self):
"""
Fail if there are changes in the models not reflected in migrations
"""
call_command('makemigrations', check=True, dry_run=True)
| Add testin for unmigrated changes in models | Add testin for unmigrated changes in models
| Python | apache-2.0 | unicef/un-partner-portal,unicef/un-partner-portal,unicef/un-partner-portal,unicef/un-partner-portal | from django.urls import reverse
from agency.roles import AgencyRole
from common.tests.base import BaseAPITestCase
from rest_framework import status
class TestGeneralConfigAPIView(BaseAPITestCase):
user_type = BaseAPITestCase.USER_AGENCY
agency_role = AgencyRole.ADMINISTRATOR
def test_view(self):
url = reverse('config:general-config')
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
class TestAPISwaggerView(BaseAPITestCase):
user_type = BaseAPITestCase.USER_AGENCY
agency_role = AgencyRole.ADMINISTRATOR
def test_view(self):
response = self.client.get('/api/doc/')
self.assertEqual(response.status_code, status.HTTP_200_OK)
Add testin for unmigrated changes in models | from django.core.management import call_command
from django.test import TestCase
from django.urls import reverse
from agency.roles import AgencyRole
from common.tests.base import BaseAPITestCase
from rest_framework import status
class TestGeneralConfigAPIView(BaseAPITestCase):
user_type = BaseAPITestCase.USER_AGENCY
agency_role = AgencyRole.ADMINISTRATOR
def test_view(self):
url = reverse('config:general-config')
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
class TestAPISwaggerView(BaseAPITestCase):
user_type = BaseAPITestCase.USER_AGENCY
agency_role = AgencyRole.ADMINISTRATOR
def test_view(self):
response = self.client.get('/api/doc/')
self.assertEqual(response.status_code, status.HTTP_200_OK)
class MigrationTestCase(TestCase):
def test_all_changes_migrated(self):
"""
Fail if there are changes in the models not reflected in migrations
"""
call_command('makemigrations', check=True, dry_run=True)
| <commit_before>from django.urls import reverse
from agency.roles import AgencyRole
from common.tests.base import BaseAPITestCase
from rest_framework import status
class TestGeneralConfigAPIView(BaseAPITestCase):
user_type = BaseAPITestCase.USER_AGENCY
agency_role = AgencyRole.ADMINISTRATOR
def test_view(self):
url = reverse('config:general-config')
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
class TestAPISwaggerView(BaseAPITestCase):
user_type = BaseAPITestCase.USER_AGENCY
agency_role = AgencyRole.ADMINISTRATOR
def test_view(self):
response = self.client.get('/api/doc/')
self.assertEqual(response.status_code, status.HTTP_200_OK)
<commit_msg>Add testin for unmigrated changes in models<commit_after> | from django.core.management import call_command
from django.test import TestCase
from django.urls import reverse
from agency.roles import AgencyRole
from common.tests.base import BaseAPITestCase
from rest_framework import status
class TestGeneralConfigAPIView(BaseAPITestCase):
user_type = BaseAPITestCase.USER_AGENCY
agency_role = AgencyRole.ADMINISTRATOR
def test_view(self):
url = reverse('config:general-config')
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
class TestAPISwaggerView(BaseAPITestCase):
user_type = BaseAPITestCase.USER_AGENCY
agency_role = AgencyRole.ADMINISTRATOR
def test_view(self):
response = self.client.get('/api/doc/')
self.assertEqual(response.status_code, status.HTTP_200_OK)
class MigrationTestCase(TestCase):
def test_all_changes_migrated(self):
"""
Fail if there are changes in the models not reflected in migrations
"""
call_command('makemigrations', check=True, dry_run=True)
| from django.urls import reverse
from agency.roles import AgencyRole
from common.tests.base import BaseAPITestCase
from rest_framework import status
class TestGeneralConfigAPIView(BaseAPITestCase):
user_type = BaseAPITestCase.USER_AGENCY
agency_role = AgencyRole.ADMINISTRATOR
def test_view(self):
url = reverse('config:general-config')
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
class TestAPISwaggerView(BaseAPITestCase):
user_type = BaseAPITestCase.USER_AGENCY
agency_role = AgencyRole.ADMINISTRATOR
def test_view(self):
response = self.client.get('/api/doc/')
self.assertEqual(response.status_code, status.HTTP_200_OK)
Add testin for unmigrated changes in modelsfrom django.core.management import call_command
from django.test import TestCase
from django.urls import reverse
from agency.roles import AgencyRole
from common.tests.base import BaseAPITestCase
from rest_framework import status
class TestGeneralConfigAPIView(BaseAPITestCase):
user_type = BaseAPITestCase.USER_AGENCY
agency_role = AgencyRole.ADMINISTRATOR
def test_view(self):
url = reverse('config:general-config')
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
class TestAPISwaggerView(BaseAPITestCase):
user_type = BaseAPITestCase.USER_AGENCY
agency_role = AgencyRole.ADMINISTRATOR
def test_view(self):
response = self.client.get('/api/doc/')
self.assertEqual(response.status_code, status.HTTP_200_OK)
class MigrationTestCase(TestCase):
def test_all_changes_migrated(self):
"""
Fail if there are changes in the models not reflected in migrations
"""
call_command('makemigrations', check=True, dry_run=True)
| <commit_before>from django.urls import reverse
from agency.roles import AgencyRole
from common.tests.base import BaseAPITestCase
from rest_framework import status
class TestGeneralConfigAPIView(BaseAPITestCase):
user_type = BaseAPITestCase.USER_AGENCY
agency_role = AgencyRole.ADMINISTRATOR
def test_view(self):
url = reverse('config:general-config')
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
class TestAPISwaggerView(BaseAPITestCase):
user_type = BaseAPITestCase.USER_AGENCY
agency_role = AgencyRole.ADMINISTRATOR
def test_view(self):
response = self.client.get('/api/doc/')
self.assertEqual(response.status_code, status.HTTP_200_OK)
<commit_msg>Add testin for unmigrated changes in models<commit_after>from django.core.management import call_command
from django.test import TestCase
from django.urls import reverse
from agency.roles import AgencyRole
from common.tests.base import BaseAPITestCase
from rest_framework import status
class TestGeneralConfigAPIView(BaseAPITestCase):
user_type = BaseAPITestCase.USER_AGENCY
agency_role = AgencyRole.ADMINISTRATOR
def test_view(self):
url = reverse('config:general-config')
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
class TestAPISwaggerView(BaseAPITestCase):
user_type = BaseAPITestCase.USER_AGENCY
agency_role = AgencyRole.ADMINISTRATOR
def test_view(self):
response = self.client.get('/api/doc/')
self.assertEqual(response.status_code, status.HTTP_200_OK)
class MigrationTestCase(TestCase):
def test_all_changes_migrated(self):
"""
Fail if there are changes in the models not reflected in migrations
"""
call_command('makemigrations', check=True, dry_run=True)
|
df385ac3c06018a2d151ead1e07293166ff92614 | erpnext/patches/v11_0/move_leave_approvers_from_employee.py | erpnext/patches/v11_0/move_leave_approvers_from_employee.py | import frappe
from frappe import _
from frappe.model.utils.rename_field import rename_field
def execute():
frappe.reload_doc("hr", "doctype", "department_approver")
frappe.reload_doc("hr", "doctype", "employee")
frappe.reload_doc("hr", "doctype", "department")
if frappe.db.has_column('Department', 'leave_approver'):
rename_field('Department', "leave_approver", "leave_approvers")
if frappe.db.has_column('Department', 'expense_approver'):
rename_field('Department', "expense_approver", "expense_approvers")
approvers = frappe.db.sql("""select distinct app.leave_approver, emp.department from
`tabEmployee Leave Approver` app, `tabEmployee` emp
where app.parenttype = 'Employee'
and emp.name = app.parent
""", as_dict=True)
for record in approvers:
if record.department:
department = frappe.get_doc("Department", record.department)
if not department:
return
if not len(department.leave_approvers):
department.append("leave_approvers",{
"approver": record.leave_approver
}).db_insert() | import frappe
from frappe import _
from frappe.model.utils.rename_field import rename_field
def execute():
frappe.reload_doc("hr", "doctype", "department_approver")
frappe.reload_doc("hr", "doctype", "employee")
frappe.reload_doc("hr", "doctype", "department")
if frappe.db.has_column('Department', 'leave_approver'):
rename_field('Department', "leave_approver", "leave_approvers")
if frappe.db.has_column('Department', 'expense_approver'):
rename_field('Department', "expense_approver", "expense_approvers")
if not frappe.db.table_exists("Employee Leave Approver"):
return
approvers = frappe.db.sql("""select distinct app.leave_approver, emp.department from
`tabEmployee Leave Approver` app, `tabEmployee` emp
where app.parenttype = 'Employee'
and emp.name = app.parent
""", as_dict=True)
for record in approvers:
if record.department:
department = frappe.get_doc("Department", record.department)
if not department:
return
if not len(department.leave_approvers):
department.append("leave_approvers",{
"approver": record.leave_approver
}).db_insert() | Check if table exists else return | Check if table exists else return
| Python | agpl-3.0 | gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext | import frappe
from frappe import _
from frappe.model.utils.rename_field import rename_field
def execute():
frappe.reload_doc("hr", "doctype", "department_approver")
frappe.reload_doc("hr", "doctype", "employee")
frappe.reload_doc("hr", "doctype", "department")
if frappe.db.has_column('Department', 'leave_approver'):
rename_field('Department', "leave_approver", "leave_approvers")
if frappe.db.has_column('Department', 'expense_approver'):
rename_field('Department', "expense_approver", "expense_approvers")
approvers = frappe.db.sql("""select distinct app.leave_approver, emp.department from
`tabEmployee Leave Approver` app, `tabEmployee` emp
where app.parenttype = 'Employee'
and emp.name = app.parent
""", as_dict=True)
for record in approvers:
if record.department:
department = frappe.get_doc("Department", record.department)
if not department:
return
if not len(department.leave_approvers):
department.append("leave_approvers",{
"approver": record.leave_approver
}).db_insert()Check if table exists else return | import frappe
from frappe import _
from frappe.model.utils.rename_field import rename_field
def execute():
frappe.reload_doc("hr", "doctype", "department_approver")
frappe.reload_doc("hr", "doctype", "employee")
frappe.reload_doc("hr", "doctype", "department")
if frappe.db.has_column('Department', 'leave_approver'):
rename_field('Department', "leave_approver", "leave_approvers")
if frappe.db.has_column('Department', 'expense_approver'):
rename_field('Department', "expense_approver", "expense_approvers")
if not frappe.db.table_exists("Employee Leave Approver"):
return
approvers = frappe.db.sql("""select distinct app.leave_approver, emp.department from
`tabEmployee Leave Approver` app, `tabEmployee` emp
where app.parenttype = 'Employee'
and emp.name = app.parent
""", as_dict=True)
for record in approvers:
if record.department:
department = frappe.get_doc("Department", record.department)
if not department:
return
if not len(department.leave_approvers):
department.append("leave_approvers",{
"approver": record.leave_approver
}).db_insert() | <commit_before>import frappe
from frappe import _
from frappe.model.utils.rename_field import rename_field
def execute():
frappe.reload_doc("hr", "doctype", "department_approver")
frappe.reload_doc("hr", "doctype", "employee")
frappe.reload_doc("hr", "doctype", "department")
if frappe.db.has_column('Department', 'leave_approver'):
rename_field('Department', "leave_approver", "leave_approvers")
if frappe.db.has_column('Department', 'expense_approver'):
rename_field('Department', "expense_approver", "expense_approvers")
approvers = frappe.db.sql("""select distinct app.leave_approver, emp.department from
`tabEmployee Leave Approver` app, `tabEmployee` emp
where app.parenttype = 'Employee'
and emp.name = app.parent
""", as_dict=True)
for record in approvers:
if record.department:
department = frappe.get_doc("Department", record.department)
if not department:
return
if not len(department.leave_approvers):
department.append("leave_approvers",{
"approver": record.leave_approver
}).db_insert()<commit_msg>Check if table exists else return<commit_after> | import frappe
from frappe import _
from frappe.model.utils.rename_field import rename_field
def execute():
frappe.reload_doc("hr", "doctype", "department_approver")
frappe.reload_doc("hr", "doctype", "employee")
frappe.reload_doc("hr", "doctype", "department")
if frappe.db.has_column('Department', 'leave_approver'):
rename_field('Department', "leave_approver", "leave_approvers")
if frappe.db.has_column('Department', 'expense_approver'):
rename_field('Department', "expense_approver", "expense_approvers")
if not frappe.db.table_exists("Employee Leave Approver"):
return
approvers = frappe.db.sql("""select distinct app.leave_approver, emp.department from
`tabEmployee Leave Approver` app, `tabEmployee` emp
where app.parenttype = 'Employee'
and emp.name = app.parent
""", as_dict=True)
for record in approvers:
if record.department:
department = frappe.get_doc("Department", record.department)
if not department:
return
if not len(department.leave_approvers):
department.append("leave_approvers",{
"approver": record.leave_approver
}).db_insert() | import frappe
from frappe import _
from frappe.model.utils.rename_field import rename_field
def execute():
frappe.reload_doc("hr", "doctype", "department_approver")
frappe.reload_doc("hr", "doctype", "employee")
frappe.reload_doc("hr", "doctype", "department")
if frappe.db.has_column('Department', 'leave_approver'):
rename_field('Department', "leave_approver", "leave_approvers")
if frappe.db.has_column('Department', 'expense_approver'):
rename_field('Department', "expense_approver", "expense_approvers")
approvers = frappe.db.sql("""select distinct app.leave_approver, emp.department from
`tabEmployee Leave Approver` app, `tabEmployee` emp
where app.parenttype = 'Employee'
and emp.name = app.parent
""", as_dict=True)
for record in approvers:
if record.department:
department = frappe.get_doc("Department", record.department)
if not department:
return
if not len(department.leave_approvers):
department.append("leave_approvers",{
"approver": record.leave_approver
}).db_insert()Check if table exists else returnimport frappe
from frappe import _
from frappe.model.utils.rename_field import rename_field
def execute():
frappe.reload_doc("hr", "doctype", "department_approver")
frappe.reload_doc("hr", "doctype", "employee")
frappe.reload_doc("hr", "doctype", "department")
if frappe.db.has_column('Department', 'leave_approver'):
rename_field('Department', "leave_approver", "leave_approvers")
if frappe.db.has_column('Department', 'expense_approver'):
rename_field('Department', "expense_approver", "expense_approvers")
if not frappe.db.table_exists("Employee Leave Approver"):
return
approvers = frappe.db.sql("""select distinct app.leave_approver, emp.department from
`tabEmployee Leave Approver` app, `tabEmployee` emp
where app.parenttype = 'Employee'
and emp.name = app.parent
""", as_dict=True)
for record in approvers:
if record.department:
department = frappe.get_doc("Department", record.department)
if not department:
return
if not len(department.leave_approvers):
department.append("leave_approvers",{
"approver": record.leave_approver
}).db_insert() | <commit_before>import frappe
from frappe import _
from frappe.model.utils.rename_field import rename_field
def execute():
frappe.reload_doc("hr", "doctype", "department_approver")
frappe.reload_doc("hr", "doctype", "employee")
frappe.reload_doc("hr", "doctype", "department")
if frappe.db.has_column('Department', 'leave_approver'):
rename_field('Department', "leave_approver", "leave_approvers")
if frappe.db.has_column('Department', 'expense_approver'):
rename_field('Department', "expense_approver", "expense_approvers")
approvers = frappe.db.sql("""select distinct app.leave_approver, emp.department from
`tabEmployee Leave Approver` app, `tabEmployee` emp
where app.parenttype = 'Employee'
and emp.name = app.parent
""", as_dict=True)
for record in approvers:
if record.department:
department = frappe.get_doc("Department", record.department)
if not department:
return
if not len(department.leave_approvers):
department.append("leave_approvers",{
"approver": record.leave_approver
}).db_insert()<commit_msg>Check if table exists else return<commit_after>import frappe
from frappe import _
from frappe.model.utils.rename_field import rename_field
def execute():
frappe.reload_doc("hr", "doctype", "department_approver")
frappe.reload_doc("hr", "doctype", "employee")
frappe.reload_doc("hr", "doctype", "department")
if frappe.db.has_column('Department', 'leave_approver'):
rename_field('Department', "leave_approver", "leave_approvers")
if frappe.db.has_column('Department', 'expense_approver'):
rename_field('Department', "expense_approver", "expense_approvers")
if not frappe.db.table_exists("Employee Leave Approver"):
return
approvers = frappe.db.sql("""select distinct app.leave_approver, emp.department from
`tabEmployee Leave Approver` app, `tabEmployee` emp
where app.parenttype = 'Employee'
and emp.name = app.parent
""", as_dict=True)
for record in approvers:
if record.department:
department = frappe.get_doc("Department", record.department)
if not department:
return
if not len(department.leave_approvers):
department.append("leave_approvers",{
"approver": record.leave_approver
}).db_insert() |
a50ff464da06a92edd23d743fecdfa65aa40bedd | test_swift.py | test_swift.py | #!/usr/bin/env python3
from os.path import dirname, realpath
import sys
from test import JAVA_SUITES, INTERPRETERS, run_suites, run_suite
import test
test.REPO_DIR = dirname(realpath(__file__))
SWIFT_SUITES = JAVA_SUITES
def java_to_swift_interpreter(interpreter):
if interpreter.language == 'java':
interpreter.language = 'swift'
interpreter.args = ['.build/debug/slox']
return interpreter
INTERPRETERS = {name: java_to_swift_interpreter(interpreter) for (name, interpreter) in INTERPRETERS.items()}
def main():
if len(sys.argv) < 2 or len(sys.argv) > 3:
print('Usage: test.py <interpreter> [filter]')
sys.exit(1)
if len(sys.argv) == 3:
filter_path = sys.argv[2]
if sys.argv[1] == 'all':
run_suites(sorted(INTERPRETERS.keys()))
elif sys.argv[1] == 'c':
run_suites(C_SUITES)
elif sys.argv[1] == 'swift':
run_suites(SWIFT_SUITES)
elif sys.argv[1] not in INTERPRETERS:
print('Unknown interpreter "{}"'.format(sys.argv[1]))
sys.exit(1)
else:
if not run_suite(sys.argv[1]):
sys.exit(1)
if __name__ == '__main__':
main() | #!/usr/bin/env python3
from os.path import dirname, realpath
import sys
from test import JAVA_SUITES, INTERPRETERS, run_suites, run_suite
import test
test.REPO_DIR = dirname(realpath(__file__))
SWIFT_SUITES = JAVA_SUITES
def java_to_swift_interpreter(interpreter):
if interpreter.language == 'java':
# interpreter.language = 'swift'
interpreter.args = ['.build/debug/slox']
return interpreter
INTERPRETERS = {name: java_to_swift_interpreter(interpreter) for (name, interpreter) in INTERPRETERS.items()}
def main():
if len(sys.argv) < 2 or len(sys.argv) > 3:
print('Usage: test.py <interpreter> [filter]')
sys.exit(1)
if len(sys.argv) == 3:
filter_path = sys.argv[2]
if sys.argv[1] == 'all':
run_suites(sorted(INTERPRETERS.keys()))
elif sys.argv[1] == 'c':
run_suites(C_SUITES)
elif sys.argv[1] == 'swift':
run_suites(SWIFT_SUITES)
elif sys.argv[1] not in INTERPRETERS:
print('Unknown interpreter "{}"'.format(sys.argv[1]))
sys.exit(1)
else:
if not run_suite(sys.argv[1]):
sys.exit(1)
if __name__ == '__main__':
main() | Fix small bug in swift test runner | Fix small bug in swift test runner
| Python | mit | hashemi/slox,hashemi/slox | #!/usr/bin/env python3
from os.path import dirname, realpath
import sys
from test import JAVA_SUITES, INTERPRETERS, run_suites, run_suite
import test
test.REPO_DIR = dirname(realpath(__file__))
SWIFT_SUITES = JAVA_SUITES
def java_to_swift_interpreter(interpreter):
if interpreter.language == 'java':
interpreter.language = 'swift'
interpreter.args = ['.build/debug/slox']
return interpreter
INTERPRETERS = {name: java_to_swift_interpreter(interpreter) for (name, interpreter) in INTERPRETERS.items()}
def main():
if len(sys.argv) < 2 or len(sys.argv) > 3:
print('Usage: test.py <interpreter> [filter]')
sys.exit(1)
if len(sys.argv) == 3:
filter_path = sys.argv[2]
if sys.argv[1] == 'all':
run_suites(sorted(INTERPRETERS.keys()))
elif sys.argv[1] == 'c':
run_suites(C_SUITES)
elif sys.argv[1] == 'swift':
run_suites(SWIFT_SUITES)
elif sys.argv[1] not in INTERPRETERS:
print('Unknown interpreter "{}"'.format(sys.argv[1]))
sys.exit(1)
else:
if not run_suite(sys.argv[1]):
sys.exit(1)
if __name__ == '__main__':
main()Fix small bug in swift test runner | #!/usr/bin/env python3
from os.path import dirname, realpath
import sys
from test import JAVA_SUITES, INTERPRETERS, run_suites, run_suite
import test
test.REPO_DIR = dirname(realpath(__file__))
SWIFT_SUITES = JAVA_SUITES
def java_to_swift_interpreter(interpreter):
if interpreter.language == 'java':
# interpreter.language = 'swift'
interpreter.args = ['.build/debug/slox']
return interpreter
INTERPRETERS = {name: java_to_swift_interpreter(interpreter) for (name, interpreter) in INTERPRETERS.items()}
def main():
if len(sys.argv) < 2 or len(sys.argv) > 3:
print('Usage: test.py <interpreter> [filter]')
sys.exit(1)
if len(sys.argv) == 3:
filter_path = sys.argv[2]
if sys.argv[1] == 'all':
run_suites(sorted(INTERPRETERS.keys()))
elif sys.argv[1] == 'c':
run_suites(C_SUITES)
elif sys.argv[1] == 'swift':
run_suites(SWIFT_SUITES)
elif sys.argv[1] not in INTERPRETERS:
print('Unknown interpreter "{}"'.format(sys.argv[1]))
sys.exit(1)
else:
if not run_suite(sys.argv[1]):
sys.exit(1)
if __name__ == '__main__':
main() | <commit_before>#!/usr/bin/env python3
from os.path import dirname, realpath
import sys
from test import JAVA_SUITES, INTERPRETERS, run_suites, run_suite
import test
test.REPO_DIR = dirname(realpath(__file__))
SWIFT_SUITES = JAVA_SUITES
def java_to_swift_interpreter(interpreter):
if interpreter.language == 'java':
interpreter.language = 'swift'
interpreter.args = ['.build/debug/slox']
return interpreter
INTERPRETERS = {name: java_to_swift_interpreter(interpreter) for (name, interpreter) in INTERPRETERS.items()}
def main():
if len(sys.argv) < 2 or len(sys.argv) > 3:
print('Usage: test.py <interpreter> [filter]')
sys.exit(1)
if len(sys.argv) == 3:
filter_path = sys.argv[2]
if sys.argv[1] == 'all':
run_suites(sorted(INTERPRETERS.keys()))
elif sys.argv[1] == 'c':
run_suites(C_SUITES)
elif sys.argv[1] == 'swift':
run_suites(SWIFT_SUITES)
elif sys.argv[1] not in INTERPRETERS:
print('Unknown interpreter "{}"'.format(sys.argv[1]))
sys.exit(1)
else:
if not run_suite(sys.argv[1]):
sys.exit(1)
if __name__ == '__main__':
main()<commit_msg>Fix small bug in swift test runner<commit_after> | #!/usr/bin/env python3
from os.path import dirname, realpath
import sys
from test import JAVA_SUITES, INTERPRETERS, run_suites, run_suite
import test
test.REPO_DIR = dirname(realpath(__file__))
SWIFT_SUITES = JAVA_SUITES
def java_to_swift_interpreter(interpreter):
if interpreter.language == 'java':
# interpreter.language = 'swift'
interpreter.args = ['.build/debug/slox']
return interpreter
INTERPRETERS = {name: java_to_swift_interpreter(interpreter) for (name, interpreter) in INTERPRETERS.items()}
def main():
if len(sys.argv) < 2 or len(sys.argv) > 3:
print('Usage: test.py <interpreter> [filter]')
sys.exit(1)
if len(sys.argv) == 3:
filter_path = sys.argv[2]
if sys.argv[1] == 'all':
run_suites(sorted(INTERPRETERS.keys()))
elif sys.argv[1] == 'c':
run_suites(C_SUITES)
elif sys.argv[1] == 'swift':
run_suites(SWIFT_SUITES)
elif sys.argv[1] not in INTERPRETERS:
print('Unknown interpreter "{}"'.format(sys.argv[1]))
sys.exit(1)
else:
if not run_suite(sys.argv[1]):
sys.exit(1)
if __name__ == '__main__':
main() | #!/usr/bin/env python3
from os.path import dirname, realpath
import sys
from test import JAVA_SUITES, INTERPRETERS, run_suites, run_suite
import test
test.REPO_DIR = dirname(realpath(__file__))
SWIFT_SUITES = JAVA_SUITES
def java_to_swift_interpreter(interpreter):
if interpreter.language == 'java':
interpreter.language = 'swift'
interpreter.args = ['.build/debug/slox']
return interpreter
INTERPRETERS = {name: java_to_swift_interpreter(interpreter) for (name, interpreter) in INTERPRETERS.items()}
def main():
if len(sys.argv) < 2 or len(sys.argv) > 3:
print('Usage: test.py <interpreter> [filter]')
sys.exit(1)
if len(sys.argv) == 3:
filter_path = sys.argv[2]
if sys.argv[1] == 'all':
run_suites(sorted(INTERPRETERS.keys()))
elif sys.argv[1] == 'c':
run_suites(C_SUITES)
elif sys.argv[1] == 'swift':
run_suites(SWIFT_SUITES)
elif sys.argv[1] not in INTERPRETERS:
print('Unknown interpreter "{}"'.format(sys.argv[1]))
sys.exit(1)
else:
if not run_suite(sys.argv[1]):
sys.exit(1)
if __name__ == '__main__':
main()Fix small bug in swift test runner#!/usr/bin/env python3
from os.path import dirname, realpath
import sys
from test import JAVA_SUITES, INTERPRETERS, run_suites, run_suite
import test
test.REPO_DIR = dirname(realpath(__file__))
SWIFT_SUITES = JAVA_SUITES
def java_to_swift_interpreter(interpreter):
if interpreter.language == 'java':
# interpreter.language = 'swift'
interpreter.args = ['.build/debug/slox']
return interpreter
INTERPRETERS = {name: java_to_swift_interpreter(interpreter) for (name, interpreter) in INTERPRETERS.items()}
def main():
if len(sys.argv) < 2 or len(sys.argv) > 3:
print('Usage: test.py <interpreter> [filter]')
sys.exit(1)
if len(sys.argv) == 3:
filter_path = sys.argv[2]
if sys.argv[1] == 'all':
run_suites(sorted(INTERPRETERS.keys()))
elif sys.argv[1] == 'c':
run_suites(C_SUITES)
elif sys.argv[1] == 'swift':
run_suites(SWIFT_SUITES)
elif sys.argv[1] not in INTERPRETERS:
print('Unknown interpreter "{}"'.format(sys.argv[1]))
sys.exit(1)
else:
if not run_suite(sys.argv[1]):
sys.exit(1)
if __name__ == '__main__':
main() | <commit_before>#!/usr/bin/env python3
from os.path import dirname, realpath
import sys
from test import JAVA_SUITES, INTERPRETERS, run_suites, run_suite
import test
test.REPO_DIR = dirname(realpath(__file__))
SWIFT_SUITES = JAVA_SUITES
def java_to_swift_interpreter(interpreter):
if interpreter.language == 'java':
interpreter.language = 'swift'
interpreter.args = ['.build/debug/slox']
return interpreter
INTERPRETERS = {name: java_to_swift_interpreter(interpreter) for (name, interpreter) in INTERPRETERS.items()}
def main():
if len(sys.argv) < 2 or len(sys.argv) > 3:
print('Usage: test.py <interpreter> [filter]')
sys.exit(1)
if len(sys.argv) == 3:
filter_path = sys.argv[2]
if sys.argv[1] == 'all':
run_suites(sorted(INTERPRETERS.keys()))
elif sys.argv[1] == 'c':
run_suites(C_SUITES)
elif sys.argv[1] == 'swift':
run_suites(SWIFT_SUITES)
elif sys.argv[1] not in INTERPRETERS:
print('Unknown interpreter "{}"'.format(sys.argv[1]))
sys.exit(1)
else:
if not run_suite(sys.argv[1]):
sys.exit(1)
if __name__ == '__main__':
main()<commit_msg>Fix small bug in swift test runner<commit_after>#!/usr/bin/env python3
from os.path import dirname, realpath
import sys
from test import JAVA_SUITES, INTERPRETERS, run_suites, run_suite
import test
test.REPO_DIR = dirname(realpath(__file__))
SWIFT_SUITES = JAVA_SUITES
def java_to_swift_interpreter(interpreter):
if interpreter.language == 'java':
# interpreter.language = 'swift'
interpreter.args = ['.build/debug/slox']
return interpreter
INTERPRETERS = {name: java_to_swift_interpreter(interpreter) for (name, interpreter) in INTERPRETERS.items()}
def main():
if len(sys.argv) < 2 or len(sys.argv) > 3:
print('Usage: test.py <interpreter> [filter]')
sys.exit(1)
if len(sys.argv) == 3:
filter_path = sys.argv[2]
if sys.argv[1] == 'all':
run_suites(sorted(INTERPRETERS.keys()))
elif sys.argv[1] == 'c':
run_suites(C_SUITES)
elif sys.argv[1] == 'swift':
run_suites(SWIFT_SUITES)
elif sys.argv[1] not in INTERPRETERS:
print('Unknown interpreter "{}"'.format(sys.argv[1]))
sys.exit(1)
else:
if not run_suite(sys.argv[1]):
sys.exit(1)
if __name__ == '__main__':
main() |
8f484f4e79d50f71c0a593429f3e2dad0db56fff | microcosm_flask/matchers.py | microcosm_flask/matchers.py | """
Hamcrest matching support for JSON responses.
"""
from json import dumps, loads
from hamcrest.core.base_matcher import BaseMatcher
def prettify(value):
return dumps(
value,
sort_keys=True,
indent=4,
separators=(',', ': '),
)
class JSON(object):
"""
Dictionary wrapper with JSON pretty-printing for Hamcrest's description.
"""
def __init__(self, dct):
self.dct = dct
def __getitem__(self, key):
return self.dct[key]
def describe_to(self, description):
description.append(prettify(self.dct))
def json_for(value):
if not isinstance(value, (dict, list)):
value = loads(value)
return JSON(value)
class JSONMatcher(BaseMatcher):
"""
Hamcrest matcher of a JSON encoded resource.
Subclasses must define `_matcher` and invoke `assert_that` within a request
context to ensure that Flask's `url_for` can be resolved.
Example:
with graph.app.test_request_context():
assert_that(json(response.data), matches_myresource(expected))
"""
def __init__(self, resource):
self.resource = resource
self.schema = self.schema_class()
self.expected = self.schema.dump(self.resource).data
@property
def schema_class(self):
raise NotImplementedError
def describe_to(self, description):
description.append_text("expected {}".format(prettify(self.expected)))
| """
Hamcrest matching support for JSON responses.
"""
from json import dumps, loads
from hamcrest.core.base_matcher import BaseMatcher
def prettify(value):
return dumps(
value,
sort_keys=True,
indent=4,
separators=(',', ': '),
)
class JSON(object):
"""
Dictionary wrapper with JSON pretty-printing for Hamcrest's description.
"""
def __init__(self, dct):
self.dct = dct
def __getitem__(self, key):
return self.dct[key]
def get(self, key):
return self.dct.get(key)
def describe_to(self, description):
description.append(prettify(self.dct))
def json_for(value):
if not isinstance(value, (dict, list)):
value = loads(value)
return JSON(value)
class JSONMatcher(BaseMatcher):
"""
Hamcrest matcher of a JSON encoded resource.
Subclasses must define `_matcher` and invoke `assert_that` within a request
context to ensure that Flask's `url_for` can be resolved.
Example:
with graph.app.test_request_context():
assert_that(json(response.data), matches_myresource(expected))
"""
def __init__(self, resource):
self.resource = resource
self.schema = self.schema_class()
self.expected = self.schema.dump(self.resource).data
@property
def schema_class(self):
raise NotImplementedError
def describe_to(self, description):
description.append_text("expected {}".format(prettify(self.expected)))
| Add get operation to JSON wrapper | Add get operation to JSON wrapper
| Python | apache-2.0 | globality-corp/microcosm-flask,globality-corp/microcosm-flask | """
Hamcrest matching support for JSON responses.
"""
from json import dumps, loads
from hamcrest.core.base_matcher import BaseMatcher
def prettify(value):
return dumps(
value,
sort_keys=True,
indent=4,
separators=(',', ': '),
)
class JSON(object):
"""
Dictionary wrapper with JSON pretty-printing for Hamcrest's description.
"""
def __init__(self, dct):
self.dct = dct
def __getitem__(self, key):
return self.dct[key]
def describe_to(self, description):
description.append(prettify(self.dct))
def json_for(value):
if not isinstance(value, (dict, list)):
value = loads(value)
return JSON(value)
class JSONMatcher(BaseMatcher):
"""
Hamcrest matcher of a JSON encoded resource.
Subclasses must define `_matcher` and invoke `assert_that` within a request
context to ensure that Flask's `url_for` can be resolved.
Example:
with graph.app.test_request_context():
assert_that(json(response.data), matches_myresource(expected))
"""
def __init__(self, resource):
self.resource = resource
self.schema = self.schema_class()
self.expected = self.schema.dump(self.resource).data
@property
def schema_class(self):
raise NotImplementedError
def describe_to(self, description):
description.append_text("expected {}".format(prettify(self.expected)))
Add get operation to JSON wrapper | """
Hamcrest matching support for JSON responses.
"""
from json import dumps, loads
from hamcrest.core.base_matcher import BaseMatcher
def prettify(value):
return dumps(
value,
sort_keys=True,
indent=4,
separators=(',', ': '),
)
class JSON(object):
"""
Dictionary wrapper with JSON pretty-printing for Hamcrest's description.
"""
def __init__(self, dct):
self.dct = dct
def __getitem__(self, key):
return self.dct[key]
def get(self, key):
return self.dct.get(key)
def describe_to(self, description):
description.append(prettify(self.dct))
def json_for(value):
if not isinstance(value, (dict, list)):
value = loads(value)
return JSON(value)
class JSONMatcher(BaseMatcher):
"""
Hamcrest matcher of a JSON encoded resource.
Subclasses must define `_matcher` and invoke `assert_that` within a request
context to ensure that Flask's `url_for` can be resolved.
Example:
with graph.app.test_request_context():
assert_that(json(response.data), matches_myresource(expected))
"""
def __init__(self, resource):
self.resource = resource
self.schema = self.schema_class()
self.expected = self.schema.dump(self.resource).data
@property
def schema_class(self):
raise NotImplementedError
def describe_to(self, description):
description.append_text("expected {}".format(prettify(self.expected)))
| <commit_before>"""
Hamcrest matching support for JSON responses.
"""
from json import dumps, loads
from hamcrest.core.base_matcher import BaseMatcher
def prettify(value):
return dumps(
value,
sort_keys=True,
indent=4,
separators=(',', ': '),
)
class JSON(object):
"""
Dictionary wrapper with JSON pretty-printing for Hamcrest's description.
"""
def __init__(self, dct):
self.dct = dct
def __getitem__(self, key):
return self.dct[key]
def describe_to(self, description):
description.append(prettify(self.dct))
def json_for(value):
if not isinstance(value, (dict, list)):
value = loads(value)
return JSON(value)
class JSONMatcher(BaseMatcher):
"""
Hamcrest matcher of a JSON encoded resource.
Subclasses must define `_matcher` and invoke `assert_that` within a request
context to ensure that Flask's `url_for` can be resolved.
Example:
with graph.app.test_request_context():
assert_that(json(response.data), matches_myresource(expected))
"""
def __init__(self, resource):
self.resource = resource
self.schema = self.schema_class()
self.expected = self.schema.dump(self.resource).data
@property
def schema_class(self):
raise NotImplementedError
def describe_to(self, description):
description.append_text("expected {}".format(prettify(self.expected)))
<commit_msg>Add get operation to JSON wrapper<commit_after> | """
Hamcrest matching support for JSON responses.
"""
from json import dumps, loads
from hamcrest.core.base_matcher import BaseMatcher
def prettify(value):
return dumps(
value,
sort_keys=True,
indent=4,
separators=(',', ': '),
)
class JSON(object):
"""
Dictionary wrapper with JSON pretty-printing for Hamcrest's description.
"""
def __init__(self, dct):
self.dct = dct
def __getitem__(self, key):
return self.dct[key]
def get(self, key):
return self.dct.get(key)
def describe_to(self, description):
description.append(prettify(self.dct))
def json_for(value):
if not isinstance(value, (dict, list)):
value = loads(value)
return JSON(value)
class JSONMatcher(BaseMatcher):
"""
Hamcrest matcher of a JSON encoded resource.
Subclasses must define `_matcher` and invoke `assert_that` within a request
context to ensure that Flask's `url_for` can be resolved.
Example:
with graph.app.test_request_context():
assert_that(json(response.data), matches_myresource(expected))
"""
def __init__(self, resource):
self.resource = resource
self.schema = self.schema_class()
self.expected = self.schema.dump(self.resource).data
@property
def schema_class(self):
raise NotImplementedError
def describe_to(self, description):
description.append_text("expected {}".format(prettify(self.expected)))
| """
Hamcrest matching support for JSON responses.
"""
from json import dumps, loads
from hamcrest.core.base_matcher import BaseMatcher
def prettify(value):
return dumps(
value,
sort_keys=True,
indent=4,
separators=(',', ': '),
)
class JSON(object):
"""
Dictionary wrapper with JSON pretty-printing for Hamcrest's description.
"""
def __init__(self, dct):
self.dct = dct
def __getitem__(self, key):
return self.dct[key]
def describe_to(self, description):
description.append(prettify(self.dct))
def json_for(value):
if not isinstance(value, (dict, list)):
value = loads(value)
return JSON(value)
class JSONMatcher(BaseMatcher):
"""
Hamcrest matcher of a JSON encoded resource.
Subclasses must define `_matcher` and invoke `assert_that` within a request
context to ensure that Flask's `url_for` can be resolved.
Example:
with graph.app.test_request_context():
assert_that(json(response.data), matches_myresource(expected))
"""
def __init__(self, resource):
self.resource = resource
self.schema = self.schema_class()
self.expected = self.schema.dump(self.resource).data
@property
def schema_class(self):
raise NotImplementedError
def describe_to(self, description):
description.append_text("expected {}".format(prettify(self.expected)))
Add get operation to JSON wrapper"""
Hamcrest matching support for JSON responses.
"""
from json import dumps, loads
from hamcrest.core.base_matcher import BaseMatcher
def prettify(value):
return dumps(
value,
sort_keys=True,
indent=4,
separators=(',', ': '),
)
class JSON(object):
"""
Dictionary wrapper with JSON pretty-printing for Hamcrest's description.
"""
def __init__(self, dct):
self.dct = dct
def __getitem__(self, key):
return self.dct[key]
def get(self, key):
return self.dct.get(key)
def describe_to(self, description):
description.append(prettify(self.dct))
def json_for(value):
if not isinstance(value, (dict, list)):
value = loads(value)
return JSON(value)
class JSONMatcher(BaseMatcher):
"""
Hamcrest matcher of a JSON encoded resource.
Subclasses must define `_matcher` and invoke `assert_that` within a request
context to ensure that Flask's `url_for` can be resolved.
Example:
with graph.app.test_request_context():
assert_that(json(response.data), matches_myresource(expected))
"""
def __init__(self, resource):
self.resource = resource
self.schema = self.schema_class()
self.expected = self.schema.dump(self.resource).data
@property
def schema_class(self):
raise NotImplementedError
def describe_to(self, description):
description.append_text("expected {}".format(prettify(self.expected)))
| <commit_before>"""
Hamcrest matching support for JSON responses.
"""
from json import dumps, loads
from hamcrest.core.base_matcher import BaseMatcher
def prettify(value):
return dumps(
value,
sort_keys=True,
indent=4,
separators=(',', ': '),
)
class JSON(object):
"""
Dictionary wrapper with JSON pretty-printing for Hamcrest's description.
"""
def __init__(self, dct):
self.dct = dct
def __getitem__(self, key):
return self.dct[key]
def describe_to(self, description):
description.append(prettify(self.dct))
def json_for(value):
if not isinstance(value, (dict, list)):
value = loads(value)
return JSON(value)
class JSONMatcher(BaseMatcher):
"""
Hamcrest matcher of a JSON encoded resource.
Subclasses must define `_matcher` and invoke `assert_that` within a request
context to ensure that Flask's `url_for` can be resolved.
Example:
with graph.app.test_request_context():
assert_that(json(response.data), matches_myresource(expected))
"""
def __init__(self, resource):
self.resource = resource
self.schema = self.schema_class()
self.expected = self.schema.dump(self.resource).data
@property
def schema_class(self):
raise NotImplementedError
def describe_to(self, description):
description.append_text("expected {}".format(prettify(self.expected)))
<commit_msg>Add get operation to JSON wrapper<commit_after>"""
Hamcrest matching support for JSON responses.
"""
from json import dumps, loads
from hamcrest.core.base_matcher import BaseMatcher
def prettify(value):
return dumps(
value,
sort_keys=True,
indent=4,
separators=(',', ': '),
)
class JSON(object):
"""
Dictionary wrapper with JSON pretty-printing for Hamcrest's description.
"""
def __init__(self, dct):
self.dct = dct
def __getitem__(self, key):
return self.dct[key]
def get(self, key):
return self.dct.get(key)
def describe_to(self, description):
description.append(prettify(self.dct))
def json_for(value):
if not isinstance(value, (dict, list)):
value = loads(value)
return JSON(value)
class JSONMatcher(BaseMatcher):
"""
Hamcrest matcher of a JSON encoded resource.
Subclasses must define `_matcher` and invoke `assert_that` within a request
context to ensure that Flask's `url_for` can be resolved.
Example:
with graph.app.test_request_context():
assert_that(json(response.data), matches_myresource(expected))
"""
def __init__(self, resource):
self.resource = resource
self.schema = self.schema_class()
self.expected = self.schema.dump(self.resource).data
@property
def schema_class(self):
raise NotImplementedError
def describe_to(self, description):
description.append_text("expected {}".format(prettify(self.expected)))
|
a326f2daad6817f426099518da77bc241fd9b51e | bibpy/doi/__init__.py | bibpy/doi/__init__.py | # -*- coding: utf-8 -*-
"""Tools for downloading bibtex files from digital object identifiers."""
import bibpy
try:
from urllib.request import Request, urlopen
except ImportError:
from urllib2 import Request, urlopen
def download(doi, source='http://dx.doi.org/{0}', raw=False):
"""Download a bibtex file specified by a digital object identifier.
The source is a URL containing a single format specifier which is where the
requested doi should appear.
By default, the bibtex string from the doi is parsed by bibpy. Specify
raw=True to get the raw bibtex string instead.
"""
req = Request(source.format(doi))
req.add_header('accept', 'application/x-bibtex')
try:
handle = urlopen(req)
contents = handle.read()
return contents if raw else bibpy.read_string(contents).entries[0]
finally:
handle.close()
| # -*- coding: utf-8 -*-
"""Tools for downloading bibtex files from digital object identifiers."""
import bibpy
try:
from urllib.request import Request, urlopen
except ImportError:
from urllib2 import Request, urlopen
def retrieve(doi, source='http://dx.doi.org/{0}', raw=False, **options):
"""Download a bibtex file specified by a digital object identifier.
The source is a URL containing a single format specifier which is where the
requested doi should appear.
By default, the bibtex string from the doi is parsed by bibpy. Specify
raw=True to get the raw bibtex string instead.
"""
req = Request(source.format(doi))
req.add_header('accept', 'application/x-bibtex')
try:
handle = urlopen(req)
contents = handle.read()
if raw:
return contents
else:
return bibpy.read_string(contents, **options).entries[0]
finally:
handle.close()
| Rename doi function, add keyword options | Rename doi function, add keyword options
| Python | mit | MisanthropicBit/bibpy,MisanthropicBit/bibpy | # -*- coding: utf-8 -*-
"""Tools for downloading bibtex files from digital object identifiers."""
import bibpy
try:
from urllib.request import Request, urlopen
except ImportError:
from urllib2 import Request, urlopen
def download(doi, source='http://dx.doi.org/{0}', raw=False):
"""Download a bibtex file specified by a digital object identifier.
The source is a URL containing a single format specifier which is where the
requested doi should appear.
By default, the bibtex string from the doi is parsed by bibpy. Specify
raw=True to get the raw bibtex string instead.
"""
req = Request(source.format(doi))
req.add_header('accept', 'application/x-bibtex')
try:
handle = urlopen(req)
contents = handle.read()
return contents if raw else bibpy.read_string(contents).entries[0]
finally:
handle.close()
Rename doi function, add keyword options | # -*- coding: utf-8 -*-
"""Tools for downloading bibtex files from digital object identifiers."""
import bibpy
try:
from urllib.request import Request, urlopen
except ImportError:
from urllib2 import Request, urlopen
def retrieve(doi, source='http://dx.doi.org/{0}', raw=False, **options):
"""Download a bibtex file specified by a digital object identifier.
The source is a URL containing a single format specifier which is where the
requested doi should appear.
By default, the bibtex string from the doi is parsed by bibpy. Specify
raw=True to get the raw bibtex string instead.
"""
req = Request(source.format(doi))
req.add_header('accept', 'application/x-bibtex')
try:
handle = urlopen(req)
contents = handle.read()
if raw:
return contents
else:
return bibpy.read_string(contents, **options).entries[0]
finally:
handle.close()
| <commit_before># -*- coding: utf-8 -*-
"""Tools for downloading bibtex files from digital object identifiers."""
import bibpy
try:
from urllib.request import Request, urlopen
except ImportError:
from urllib2 import Request, urlopen
def download(doi, source='http://dx.doi.org/{0}', raw=False):
"""Download a bibtex file specified by a digital object identifier.
The source is a URL containing a single format specifier which is where the
requested doi should appear.
By default, the bibtex string from the doi is parsed by bibpy. Specify
raw=True to get the raw bibtex string instead.
"""
req = Request(source.format(doi))
req.add_header('accept', 'application/x-bibtex')
try:
handle = urlopen(req)
contents = handle.read()
return contents if raw else bibpy.read_string(contents).entries[0]
finally:
handle.close()
<commit_msg>Rename doi function, add keyword options<commit_after> | # -*- coding: utf-8 -*-
"""Tools for downloading bibtex files from digital object identifiers."""
import bibpy
try:
from urllib.request import Request, urlopen
except ImportError:
from urllib2 import Request, urlopen
def retrieve(doi, source='http://dx.doi.org/{0}', raw=False, **options):
"""Download a bibtex file specified by a digital object identifier.
The source is a URL containing a single format specifier which is where the
requested doi should appear.
By default, the bibtex string from the doi is parsed by bibpy. Specify
raw=True to get the raw bibtex string instead.
"""
req = Request(source.format(doi))
req.add_header('accept', 'application/x-bibtex')
try:
handle = urlopen(req)
contents = handle.read()
if raw:
return contents
else:
return bibpy.read_string(contents, **options).entries[0]
finally:
handle.close()
| # -*- coding: utf-8 -*-
"""Tools for downloading bibtex files from digital object identifiers."""
import bibpy
try:
from urllib.request import Request, urlopen
except ImportError:
from urllib2 import Request, urlopen
def download(doi, source='http://dx.doi.org/{0}', raw=False):
"""Download a bibtex file specified by a digital object identifier.
The source is a URL containing a single format specifier which is where the
requested doi should appear.
By default, the bibtex string from the doi is parsed by bibpy. Specify
raw=True to get the raw bibtex string instead.
"""
req = Request(source.format(doi))
req.add_header('accept', 'application/x-bibtex')
try:
handle = urlopen(req)
contents = handle.read()
return contents if raw else bibpy.read_string(contents).entries[0]
finally:
handle.close()
Rename doi function, add keyword options# -*- coding: utf-8 -*-
"""Tools for downloading bibtex files from digital object identifiers."""
import bibpy
try:
from urllib.request import Request, urlopen
except ImportError:
from urllib2 import Request, urlopen
def retrieve(doi, source='http://dx.doi.org/{0}', raw=False, **options):
"""Download a bibtex file specified by a digital object identifier.
The source is a URL containing a single format specifier which is where the
requested doi should appear.
By default, the bibtex string from the doi is parsed by bibpy. Specify
raw=True to get the raw bibtex string instead.
"""
req = Request(source.format(doi))
req.add_header('accept', 'application/x-bibtex')
try:
handle = urlopen(req)
contents = handle.read()
if raw:
return contents
else:
return bibpy.read_string(contents, **options).entries[0]
finally:
handle.close()
| <commit_before># -*- coding: utf-8 -*-
"""Tools for downloading bibtex files from digital object identifiers."""
import bibpy
try:
from urllib.request import Request, urlopen
except ImportError:
from urllib2 import Request, urlopen
def download(doi, source='http://dx.doi.org/{0}', raw=False):
"""Download a bibtex file specified by a digital object identifier.
The source is a URL containing a single format specifier which is where the
requested doi should appear.
By default, the bibtex string from the doi is parsed by bibpy. Specify
raw=True to get the raw bibtex string instead.
"""
req = Request(source.format(doi))
req.add_header('accept', 'application/x-bibtex')
try:
handle = urlopen(req)
contents = handle.read()
return contents if raw else bibpy.read_string(contents).entries[0]
finally:
handle.close()
<commit_msg>Rename doi function, add keyword options<commit_after># -*- coding: utf-8 -*-
"""Tools for downloading bibtex files from digital object identifiers."""
import bibpy
try:
from urllib.request import Request, urlopen
except ImportError:
from urllib2 import Request, urlopen
def retrieve(doi, source='http://dx.doi.org/{0}', raw=False, **options):
"""Download a bibtex file specified by a digital object identifier.
The source is a URL containing a single format specifier which is where the
requested doi should appear.
By default, the bibtex string from the doi is parsed by bibpy. Specify
raw=True to get the raw bibtex string instead.
"""
req = Request(source.format(doi))
req.add_header('accept', 'application/x-bibtex')
try:
handle = urlopen(req)
contents = handle.read()
if raw:
return contents
else:
return bibpy.read_string(contents, **options).entries[0]
finally:
handle.close()
|
8cceb96ae2d8352107dc2e03b336e84e9f2bdfb3 | partner_feeds/templatetags/partner_feed_tags.py | partner_feeds/templatetags/partner_feed_tags.py | from django import template
from partner_feeds.models import Partner
register = template.Library()
@register.assignment_tag
def get_partners(*args):
partners = []
for name in args:
partner = Partner.objects.get(name=name)
partner.posts = partner.post_set.all().order_by('-date')
partners.append(partner)
return partners | from django import template
from partner_feeds.models import Partner
register = template.Library()
@register.assignment_tag
def get_partners(*args):
partners = []
for name in args:
try:
partner = Partner.objects.get(name=name)
except Partner.DoesNotExist:
continue
partner.posts = partner.post_set.all().order_by('-date')
partners.append(partner)
return partners | Make Django template tag forgiving of nonexistent partners. | Make Django template tag forgiving of nonexistent partners.
| Python | bsd-2-clause | theatlantic/django-partner-feeds | from django import template
from partner_feeds.models import Partner
register = template.Library()
@register.assignment_tag
def get_partners(*args):
partners = []
for name in args:
partner = Partner.objects.get(name=name)
partner.posts = partner.post_set.all().order_by('-date')
partners.append(partner)
return partnersMake Django template tag forgiving of nonexistent partners. | from django import template
from partner_feeds.models import Partner
register = template.Library()
@register.assignment_tag
def get_partners(*args):
partners = []
for name in args:
try:
partner = Partner.objects.get(name=name)
except Partner.DoesNotExist:
continue
partner.posts = partner.post_set.all().order_by('-date')
partners.append(partner)
return partners | <commit_before>from django import template
from partner_feeds.models import Partner
register = template.Library()
@register.assignment_tag
def get_partners(*args):
partners = []
for name in args:
partner = Partner.objects.get(name=name)
partner.posts = partner.post_set.all().order_by('-date')
partners.append(partner)
return partners<commit_msg>Make Django template tag forgiving of nonexistent partners.<commit_after> | from django import template
from partner_feeds.models import Partner
register = template.Library()
@register.assignment_tag
def get_partners(*args):
partners = []
for name in args:
try:
partner = Partner.objects.get(name=name)
except Partner.DoesNotExist:
continue
partner.posts = partner.post_set.all().order_by('-date')
partners.append(partner)
return partners | from django import template
from partner_feeds.models import Partner
register = template.Library()
@register.assignment_tag
def get_partners(*args):
partners = []
for name in args:
partner = Partner.objects.get(name=name)
partner.posts = partner.post_set.all().order_by('-date')
partners.append(partner)
return partnersMake Django template tag forgiving of nonexistent partners.from django import template
from partner_feeds.models import Partner
register = template.Library()
@register.assignment_tag
def get_partners(*args):
partners = []
for name in args:
try:
partner = Partner.objects.get(name=name)
except Partner.DoesNotExist:
continue
partner.posts = partner.post_set.all().order_by('-date')
partners.append(partner)
return partners | <commit_before>from django import template
from partner_feeds.models import Partner
register = template.Library()
@register.assignment_tag
def get_partners(*args):
partners = []
for name in args:
partner = Partner.objects.get(name=name)
partner.posts = partner.post_set.all().order_by('-date')
partners.append(partner)
return partners<commit_msg>Make Django template tag forgiving of nonexistent partners.<commit_after>from django import template
from partner_feeds.models import Partner
register = template.Library()
@register.assignment_tag
def get_partners(*args):
partners = []
for name in args:
try:
partner = Partner.objects.get(name=name)
except Partner.DoesNotExist:
continue
partner.posts = partner.post_set.all().order_by('-date')
partners.append(partner)
return partners |
f0bcc52a73d4efcc498e9c6d180552188af2359d | IPython/html/kernelspecs/handlers.py | IPython/html/kernelspecs/handlers.py | from tornado import web
from ..base.handlers import IPythonHandler
from ..services.kernelspecs.handlers import kernel_name_regex
class KernelSpecResourceHandler(web.StaticFileHandler, IPythonHandler):
SUPPORTED_METHODS = ('GET', 'HEAD')
def initialize(self):
web.StaticFileHandler.initialize(self, path='')
@web.authenticated
def get(self, kernel_name, path, include_body=True):
ksm = self.kernel_spec_manager
try:
self.root = ksm.get_kernel_spec(kernel_name).resource_dir
except KeyError:
raise web.HTTPError(404, u'Kernel spec %s not found' % kernel_name)
self.log.debug("Serving kernel resource from: %s", self.root)
return web.StaticFileHandler.get(self, path, include_body=include_body)
@web.authenticated
def head(self, kernel_name, path):
self.get(kernel_name, path, include_body=False)
default_handlers = [
(r"/kernelspecs/%s/(?P<path>.*)" % kernel_name_regex, KernelSpecResourceHandler),
] | from tornado import web
from ..base.handlers import IPythonHandler
from ..services.kernelspecs.handlers import kernel_name_regex
class KernelSpecResourceHandler(web.StaticFileHandler, IPythonHandler):
SUPPORTED_METHODS = ('GET', 'HEAD')
def initialize(self):
web.StaticFileHandler.initialize(self, path='')
@web.authenticated
def get(self, kernel_name, path, include_body=True):
ksm = self.kernel_spec_manager
try:
self.root = ksm.get_kernel_spec(kernel_name).resource_dir
except KeyError:
raise web.HTTPError(404, u'Kernel spec %s not found' % kernel_name)
self.log.debug("Serving kernel resource from: %s", self.root)
return web.StaticFileHandler.get(self, path, include_body=include_body)
@web.authenticated
def head(self, kernel_name, path):
return self.get(kernel_name, path, include_body=False)
default_handlers = [
(r"/kernelspecs/%s/(?P<path>.*)" % kernel_name_regex, KernelSpecResourceHandler),
] | Fix HEAD requests for kernelspec resources | Fix HEAD requests for kernelspec resources
Closes gh-7237
Closes gh-7258
StaticFileHandler.get() is a coroutine. When Tornado calls a handler
method, it uses the return value to determine whether or not it's a
coroutine. So when head() calls get(), it needs to pass the return value
on for Tornado to handle it properly.
| Python | bsd-3-clause | ipython/ipython,ipython/ipython | from tornado import web
from ..base.handlers import IPythonHandler
from ..services.kernelspecs.handlers import kernel_name_regex
class KernelSpecResourceHandler(web.StaticFileHandler, IPythonHandler):
SUPPORTED_METHODS = ('GET', 'HEAD')
def initialize(self):
web.StaticFileHandler.initialize(self, path='')
@web.authenticated
def get(self, kernel_name, path, include_body=True):
ksm = self.kernel_spec_manager
try:
self.root = ksm.get_kernel_spec(kernel_name).resource_dir
except KeyError:
raise web.HTTPError(404, u'Kernel spec %s not found' % kernel_name)
self.log.debug("Serving kernel resource from: %s", self.root)
return web.StaticFileHandler.get(self, path, include_body=include_body)
@web.authenticated
def head(self, kernel_name, path):
self.get(kernel_name, path, include_body=False)
default_handlers = [
(r"/kernelspecs/%s/(?P<path>.*)" % kernel_name_regex, KernelSpecResourceHandler),
]Fix HEAD requests for kernelspec resources
Closes gh-7237
Closes gh-7258
StaticFileHandler.get() is a coroutine. When Tornado calls a handler
method, it uses the return value to determine whether or not it's a
coroutine. So when head() calls get(), it needs to pass the return value
on for Tornado to handle it properly. | from tornado import web
from ..base.handlers import IPythonHandler
from ..services.kernelspecs.handlers import kernel_name_regex
class KernelSpecResourceHandler(web.StaticFileHandler, IPythonHandler):
SUPPORTED_METHODS = ('GET', 'HEAD')
def initialize(self):
web.StaticFileHandler.initialize(self, path='')
@web.authenticated
def get(self, kernel_name, path, include_body=True):
ksm = self.kernel_spec_manager
try:
self.root = ksm.get_kernel_spec(kernel_name).resource_dir
except KeyError:
raise web.HTTPError(404, u'Kernel spec %s not found' % kernel_name)
self.log.debug("Serving kernel resource from: %s", self.root)
return web.StaticFileHandler.get(self, path, include_body=include_body)
@web.authenticated
def head(self, kernel_name, path):
return self.get(kernel_name, path, include_body=False)
default_handlers = [
(r"/kernelspecs/%s/(?P<path>.*)" % kernel_name_regex, KernelSpecResourceHandler),
] | <commit_before>from tornado import web
from ..base.handlers import IPythonHandler
from ..services.kernelspecs.handlers import kernel_name_regex
class KernelSpecResourceHandler(web.StaticFileHandler, IPythonHandler):
SUPPORTED_METHODS = ('GET', 'HEAD')
def initialize(self):
web.StaticFileHandler.initialize(self, path='')
@web.authenticated
def get(self, kernel_name, path, include_body=True):
ksm = self.kernel_spec_manager
try:
self.root = ksm.get_kernel_spec(kernel_name).resource_dir
except KeyError:
raise web.HTTPError(404, u'Kernel spec %s not found' % kernel_name)
self.log.debug("Serving kernel resource from: %s", self.root)
return web.StaticFileHandler.get(self, path, include_body=include_body)
@web.authenticated
def head(self, kernel_name, path):
self.get(kernel_name, path, include_body=False)
default_handlers = [
(r"/kernelspecs/%s/(?P<path>.*)" % kernel_name_regex, KernelSpecResourceHandler),
]<commit_msg>Fix HEAD requests for kernelspec resources
Closes gh-7237
Closes gh-7258
StaticFileHandler.get() is a coroutine. When Tornado calls a handler
method, it uses the return value to determine whether or not it's a
coroutine. So when head() calls get(), it needs to pass the return value
on for Tornado to handle it properly.<commit_after> | from tornado import web
from ..base.handlers import IPythonHandler
from ..services.kernelspecs.handlers import kernel_name_regex
class KernelSpecResourceHandler(web.StaticFileHandler, IPythonHandler):
SUPPORTED_METHODS = ('GET', 'HEAD')
def initialize(self):
web.StaticFileHandler.initialize(self, path='')
@web.authenticated
def get(self, kernel_name, path, include_body=True):
ksm = self.kernel_spec_manager
try:
self.root = ksm.get_kernel_spec(kernel_name).resource_dir
except KeyError:
raise web.HTTPError(404, u'Kernel spec %s not found' % kernel_name)
self.log.debug("Serving kernel resource from: %s", self.root)
return web.StaticFileHandler.get(self, path, include_body=include_body)
@web.authenticated
def head(self, kernel_name, path):
return self.get(kernel_name, path, include_body=False)
default_handlers = [
(r"/kernelspecs/%s/(?P<path>.*)" % kernel_name_regex, KernelSpecResourceHandler),
] | from tornado import web
from ..base.handlers import IPythonHandler
from ..services.kernelspecs.handlers import kernel_name_regex
class KernelSpecResourceHandler(web.StaticFileHandler, IPythonHandler):
SUPPORTED_METHODS = ('GET', 'HEAD')
def initialize(self):
web.StaticFileHandler.initialize(self, path='')
@web.authenticated
def get(self, kernel_name, path, include_body=True):
ksm = self.kernel_spec_manager
try:
self.root = ksm.get_kernel_spec(kernel_name).resource_dir
except KeyError:
raise web.HTTPError(404, u'Kernel spec %s not found' % kernel_name)
self.log.debug("Serving kernel resource from: %s", self.root)
return web.StaticFileHandler.get(self, path, include_body=include_body)
@web.authenticated
def head(self, kernel_name, path):
self.get(kernel_name, path, include_body=False)
default_handlers = [
(r"/kernelspecs/%s/(?P<path>.*)" % kernel_name_regex, KernelSpecResourceHandler),
]Fix HEAD requests for kernelspec resources
Closes gh-7237
Closes gh-7258
StaticFileHandler.get() is a coroutine. When Tornado calls a handler
method, it uses the return value to determine whether or not it's a
coroutine. So when head() calls get(), it needs to pass the return value
on for Tornado to handle it properly.from tornado import web
from ..base.handlers import IPythonHandler
from ..services.kernelspecs.handlers import kernel_name_regex
class KernelSpecResourceHandler(web.StaticFileHandler, IPythonHandler):
SUPPORTED_METHODS = ('GET', 'HEAD')
def initialize(self):
web.StaticFileHandler.initialize(self, path='')
@web.authenticated
def get(self, kernel_name, path, include_body=True):
ksm = self.kernel_spec_manager
try:
self.root = ksm.get_kernel_spec(kernel_name).resource_dir
except KeyError:
raise web.HTTPError(404, u'Kernel spec %s not found' % kernel_name)
self.log.debug("Serving kernel resource from: %s", self.root)
return web.StaticFileHandler.get(self, path, include_body=include_body)
@web.authenticated
def head(self, kernel_name, path):
return self.get(kernel_name, path, include_body=False)
default_handlers = [
(r"/kernelspecs/%s/(?P<path>.*)" % kernel_name_regex, KernelSpecResourceHandler),
] | <commit_before>from tornado import web
from ..base.handlers import IPythonHandler
from ..services.kernelspecs.handlers import kernel_name_regex
class KernelSpecResourceHandler(web.StaticFileHandler, IPythonHandler):
SUPPORTED_METHODS = ('GET', 'HEAD')
def initialize(self):
web.StaticFileHandler.initialize(self, path='')
@web.authenticated
def get(self, kernel_name, path, include_body=True):
ksm = self.kernel_spec_manager
try:
self.root = ksm.get_kernel_spec(kernel_name).resource_dir
except KeyError:
raise web.HTTPError(404, u'Kernel spec %s not found' % kernel_name)
self.log.debug("Serving kernel resource from: %s", self.root)
return web.StaticFileHandler.get(self, path, include_body=include_body)
@web.authenticated
def head(self, kernel_name, path):
self.get(kernel_name, path, include_body=False)
default_handlers = [
(r"/kernelspecs/%s/(?P<path>.*)" % kernel_name_regex, KernelSpecResourceHandler),
]<commit_msg>Fix HEAD requests for kernelspec resources
Closes gh-7237
Closes gh-7258
StaticFileHandler.get() is a coroutine. When Tornado calls a handler
method, it uses the return value to determine whether or not it's a
coroutine. So when head() calls get(), it needs to pass the return value
on for Tornado to handle it properly.<commit_after>from tornado import web
from ..base.handlers import IPythonHandler
from ..services.kernelspecs.handlers import kernel_name_regex
class KernelSpecResourceHandler(web.StaticFileHandler, IPythonHandler):
SUPPORTED_METHODS = ('GET', 'HEAD')
def initialize(self):
web.StaticFileHandler.initialize(self, path='')
@web.authenticated
def get(self, kernel_name, path, include_body=True):
ksm = self.kernel_spec_manager
try:
self.root = ksm.get_kernel_spec(kernel_name).resource_dir
except KeyError:
raise web.HTTPError(404, u'Kernel spec %s not found' % kernel_name)
self.log.debug("Serving kernel resource from: %s", self.root)
return web.StaticFileHandler.get(self, path, include_body=include_body)
@web.authenticated
def head(self, kernel_name, path):
return self.get(kernel_name, path, include_body=False)
default_handlers = [
(r"/kernelspecs/%s/(?P<path>.*)" % kernel_name_regex, KernelSpecResourceHandler),
] |
27ee2752a71ee415154c40e1978edb9d5221a331 | IPython/lib/tests/test_deepreload.py | IPython/lib/tests/test_deepreload.py | """Test suite for the deepreload module."""
from IPython.testing import decorators as dec
from IPython.lib.deepreload import reload as dreload
@dec.skipif_not_numpy
def test_deepreload_numpy():
import numpy
exclude = [
# Standard exclusions:
'sys', 'os.path', '__builtin__', '__main__',
# Test-related exclusions:
'unittest',
]
dreload(numpy, exclude=exclude)
| # -*- coding: utf-8 -*-
"""Test suite for the deepreload module."""
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from IPython.testing import decorators as dec
from IPython.lib.deepreload import reload as dreload
#-----------------------------------------------------------------------------
# Test functions begin
#-----------------------------------------------------------------------------
@dec.skipif_not_numpy
def test_deepreload_numpy():
import numpy
exclude = [
# Standard exclusions:
'sys', 'os.path', '__builtin__', '__main__',
# Test-related exclusions:
'unittest',
]
dreload(numpy, exclude=exclude)
| Reformat test to a standard style. | Reformat test to a standard style.
| Python | bsd-3-clause | ipython/ipython,ipython/ipython | """Test suite for the deepreload module."""
from IPython.testing import decorators as dec
from IPython.lib.deepreload import reload as dreload
@dec.skipif_not_numpy
def test_deepreload_numpy():
import numpy
exclude = [
# Standard exclusions:
'sys', 'os.path', '__builtin__', '__main__',
# Test-related exclusions:
'unittest',
]
dreload(numpy, exclude=exclude)
Reformat test to a standard style. | # -*- coding: utf-8 -*-
"""Test suite for the deepreload module."""
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from IPython.testing import decorators as dec
from IPython.lib.deepreload import reload as dreload
#-----------------------------------------------------------------------------
# Test functions begin
#-----------------------------------------------------------------------------
@dec.skipif_not_numpy
def test_deepreload_numpy():
import numpy
exclude = [
# Standard exclusions:
'sys', 'os.path', '__builtin__', '__main__',
# Test-related exclusions:
'unittest',
]
dreload(numpy, exclude=exclude)
| <commit_before>"""Test suite for the deepreload module."""
from IPython.testing import decorators as dec
from IPython.lib.deepreload import reload as dreload
@dec.skipif_not_numpy
def test_deepreload_numpy():
import numpy
exclude = [
# Standard exclusions:
'sys', 'os.path', '__builtin__', '__main__',
# Test-related exclusions:
'unittest',
]
dreload(numpy, exclude=exclude)
<commit_msg>Reformat test to a standard style.<commit_after> | # -*- coding: utf-8 -*-
"""Test suite for the deepreload module."""
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from IPython.testing import decorators as dec
from IPython.lib.deepreload import reload as dreload
#-----------------------------------------------------------------------------
# Test functions begin
#-----------------------------------------------------------------------------
@dec.skipif_not_numpy
def test_deepreload_numpy():
import numpy
exclude = [
# Standard exclusions:
'sys', 'os.path', '__builtin__', '__main__',
# Test-related exclusions:
'unittest',
]
dreload(numpy, exclude=exclude)
| """Test suite for the deepreload module."""
from IPython.testing import decorators as dec
from IPython.lib.deepreload import reload as dreload
@dec.skipif_not_numpy
def test_deepreload_numpy():
import numpy
exclude = [
# Standard exclusions:
'sys', 'os.path', '__builtin__', '__main__',
# Test-related exclusions:
'unittest',
]
dreload(numpy, exclude=exclude)
Reformat test to a standard style.# -*- coding: utf-8 -*-
"""Test suite for the deepreload module."""
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from IPython.testing import decorators as dec
from IPython.lib.deepreload import reload as dreload
#-----------------------------------------------------------------------------
# Test functions begin
#-----------------------------------------------------------------------------
@dec.skipif_not_numpy
def test_deepreload_numpy():
import numpy
exclude = [
# Standard exclusions:
'sys', 'os.path', '__builtin__', '__main__',
# Test-related exclusions:
'unittest',
]
dreload(numpy, exclude=exclude)
| <commit_before>"""Test suite for the deepreload module."""
from IPython.testing import decorators as dec
from IPython.lib.deepreload import reload as dreload
@dec.skipif_not_numpy
def test_deepreload_numpy():
import numpy
exclude = [
# Standard exclusions:
'sys', 'os.path', '__builtin__', '__main__',
# Test-related exclusions:
'unittest',
]
dreload(numpy, exclude=exclude)
<commit_msg>Reformat test to a standard style.<commit_after># -*- coding: utf-8 -*-
"""Test suite for the deepreload module."""
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from IPython.testing import decorators as dec
from IPython.lib.deepreload import reload as dreload
#-----------------------------------------------------------------------------
# Test functions begin
#-----------------------------------------------------------------------------
@dec.skipif_not_numpy
def test_deepreload_numpy():
import numpy
exclude = [
# Standard exclusions:
'sys', 'os.path', '__builtin__', '__main__',
# Test-related exclusions:
'unittest',
]
dreload(numpy, exclude=exclude)
|
c2f2acc518f017a0d7b8ccfa6640595f2769aa98 | nib/plugins/prettyurls.py | nib/plugins/prettyurls.py | from __future__ import absolute_import, division, print_function, unicode_literals
from os import path
from nib import Resource, Processor, after
apache_redirects = b"""
RewriteCond %{DOCUMENT_ROOT}/$1/index.html -f
RewriteRule ^(.*)$ /$1/index.html [L]
RewriteCond %{DOCUMENT_ROOT}/$1.html -f
RewriteRule ^(.*)$ /$1.html [L]
RewriteCond %{DOCUMENT_ROOT}/$1/index.html -f
RewriteRule ^(.*)/$ /$1 [L,R]
RewriteCond %{DOCUMENT_ROOT}/$1.html -f
RewriteRule ^(.*)/$ /$1 [L,R]
"""
apache_redirects_base = b"""
RewriteEngine on
RewriteBase /
"""
@after
class PrettyURLProcessor(Processor):
def process(self, documents, resources):
for document in documents:
filename = path.basename(document.uri)
if filename == 'index.html':
document.uri = path.dirname(document.path)
elif document.extension == '.html':
document.uri = document.path
htaccess = None
for resource in resources:
if resource.path == '.htaccess':
htaccess = resource
if not htaccess:
htaccess = Resource(path='.htaccess',
content=apache_redirects_base)
resources.append(htaccess)
htaccess.content += apache_redirects
return documents, resources
| from __future__ import absolute_import, division, print_function, unicode_literals
from os import path
from nib import Resource, Processor, after
apache_redirects = b"""
RewriteCond %{DOCUMENT_ROOT}/$1/index.html -f
RewriteRule ^(.*)$ /$1/index.html [L]
RewriteCond %{DOCUMENT_ROOT}/$1.html -f
RewriteRule ^(.*)$ /$1.html [L]
RewriteCond %{DOCUMENT_ROOT}/$1/index.html -f
RewriteRule ^(.*)/$ /$1 [L,R]
RewriteCond %{DOCUMENT_ROOT}/$1.html -f
RewriteRule ^(.*)/$ /$1 [L,R]
"""
apache_redirects_base = b"""
RewriteEngine on
RewriteBase /
"""
nginx_rules = b"""
location / {
#root {0};
index index.html;
try_files $uri $uri.html $uri/index.html;
}
"""
@after
class PrettyURLProcessor(Processor):
def process(self, documents, resources):
for document in documents:
filename = path.basename(document.uri)
if filename == 'index.html':
document.uri = path.dirname(document.path)
elif document.extension == '.html':
document.uri = document.path
htaccess = None
for resource in resources:
if resource.path == '.htaccess':
htaccess = resource
if not htaccess:
htaccess = Resource(path='.htaccess',
content=apache_redirects_base)
resources.append(htaccess)
htaccess.content += apache_redirects
nginx = Resource(path='.nginx',
content=nginx_rules)
resources.append(nginx)
return documents, resources
| Add nginx rules for pretty URLs | Add nginx rules for pretty URLs
| Python | mit | jreese/nib | from __future__ import absolute_import, division, print_function, unicode_literals
from os import path
from nib import Resource, Processor, after
apache_redirects = b"""
RewriteCond %{DOCUMENT_ROOT}/$1/index.html -f
RewriteRule ^(.*)$ /$1/index.html [L]
RewriteCond %{DOCUMENT_ROOT}/$1.html -f
RewriteRule ^(.*)$ /$1.html [L]
RewriteCond %{DOCUMENT_ROOT}/$1/index.html -f
RewriteRule ^(.*)/$ /$1 [L,R]
RewriteCond %{DOCUMENT_ROOT}/$1.html -f
RewriteRule ^(.*)/$ /$1 [L,R]
"""
apache_redirects_base = b"""
RewriteEngine on
RewriteBase /
"""
@after
class PrettyURLProcessor(Processor):
def process(self, documents, resources):
for document in documents:
filename = path.basename(document.uri)
if filename == 'index.html':
document.uri = path.dirname(document.path)
elif document.extension == '.html':
document.uri = document.path
htaccess = None
for resource in resources:
if resource.path == '.htaccess':
htaccess = resource
if not htaccess:
htaccess = Resource(path='.htaccess',
content=apache_redirects_base)
resources.append(htaccess)
htaccess.content += apache_redirects
return documents, resources
Add nginx rules for pretty URLs | from __future__ import absolute_import, division, print_function, unicode_literals
from os import path
from nib import Resource, Processor, after
apache_redirects = b"""
RewriteCond %{DOCUMENT_ROOT}/$1/index.html -f
RewriteRule ^(.*)$ /$1/index.html [L]
RewriteCond %{DOCUMENT_ROOT}/$1.html -f
RewriteRule ^(.*)$ /$1.html [L]
RewriteCond %{DOCUMENT_ROOT}/$1/index.html -f
RewriteRule ^(.*)/$ /$1 [L,R]
RewriteCond %{DOCUMENT_ROOT}/$1.html -f
RewriteRule ^(.*)/$ /$1 [L,R]
"""
apache_redirects_base = b"""
RewriteEngine on
RewriteBase /
"""
nginx_rules = b"""
location / {
#root {0};
index index.html;
try_files $uri $uri.html $uri/index.html;
}
"""
@after
class PrettyURLProcessor(Processor):
def process(self, documents, resources):
for document in documents:
filename = path.basename(document.uri)
if filename == 'index.html':
document.uri = path.dirname(document.path)
elif document.extension == '.html':
document.uri = document.path
htaccess = None
for resource in resources:
if resource.path == '.htaccess':
htaccess = resource
if not htaccess:
htaccess = Resource(path='.htaccess',
content=apache_redirects_base)
resources.append(htaccess)
htaccess.content += apache_redirects
nginx = Resource(path='.nginx',
content=nginx_rules)
resources.append(nginx)
return documents, resources
| <commit_before>from __future__ import absolute_import, division, print_function, unicode_literals
from os import path
from nib import Resource, Processor, after
apache_redirects = b"""
RewriteCond %{DOCUMENT_ROOT}/$1/index.html -f
RewriteRule ^(.*)$ /$1/index.html [L]
RewriteCond %{DOCUMENT_ROOT}/$1.html -f
RewriteRule ^(.*)$ /$1.html [L]
RewriteCond %{DOCUMENT_ROOT}/$1/index.html -f
RewriteRule ^(.*)/$ /$1 [L,R]
RewriteCond %{DOCUMENT_ROOT}/$1.html -f
RewriteRule ^(.*)/$ /$1 [L,R]
"""
apache_redirects_base = b"""
RewriteEngine on
RewriteBase /
"""
@after
class PrettyURLProcessor(Processor):
def process(self, documents, resources):
for document in documents:
filename = path.basename(document.uri)
if filename == 'index.html':
document.uri = path.dirname(document.path)
elif document.extension == '.html':
document.uri = document.path
htaccess = None
for resource in resources:
if resource.path == '.htaccess':
htaccess = resource
if not htaccess:
htaccess = Resource(path='.htaccess',
content=apache_redirects_base)
resources.append(htaccess)
htaccess.content += apache_redirects
return documents, resources
<commit_msg>Add nginx rules for pretty URLs<commit_after> | from __future__ import absolute_import, division, print_function, unicode_literals
from os import path
from nib import Resource, Processor, after
apache_redirects = b"""
RewriteCond %{DOCUMENT_ROOT}/$1/index.html -f
RewriteRule ^(.*)$ /$1/index.html [L]
RewriteCond %{DOCUMENT_ROOT}/$1.html -f
RewriteRule ^(.*)$ /$1.html [L]
RewriteCond %{DOCUMENT_ROOT}/$1/index.html -f
RewriteRule ^(.*)/$ /$1 [L,R]
RewriteCond %{DOCUMENT_ROOT}/$1.html -f
RewriteRule ^(.*)/$ /$1 [L,R]
"""
apache_redirects_base = b"""
RewriteEngine on
RewriteBase /
"""
nginx_rules = b"""
location / {
#root {0};
index index.html;
try_files $uri $uri.html $uri/index.html;
}
"""
@after
class PrettyURLProcessor(Processor):
def process(self, documents, resources):
for document in documents:
filename = path.basename(document.uri)
if filename == 'index.html':
document.uri = path.dirname(document.path)
elif document.extension == '.html':
document.uri = document.path
htaccess = None
for resource in resources:
if resource.path == '.htaccess':
htaccess = resource
if not htaccess:
htaccess = Resource(path='.htaccess',
content=apache_redirects_base)
resources.append(htaccess)
htaccess.content += apache_redirects
nginx = Resource(path='.nginx',
content=nginx_rules)
resources.append(nginx)
return documents, resources
| from __future__ import absolute_import, division, print_function, unicode_literals
from os import path
from nib import Resource, Processor, after
apache_redirects = b"""
RewriteCond %{DOCUMENT_ROOT}/$1/index.html -f
RewriteRule ^(.*)$ /$1/index.html [L]
RewriteCond %{DOCUMENT_ROOT}/$1.html -f
RewriteRule ^(.*)$ /$1.html [L]
RewriteCond %{DOCUMENT_ROOT}/$1/index.html -f
RewriteRule ^(.*)/$ /$1 [L,R]
RewriteCond %{DOCUMENT_ROOT}/$1.html -f
RewriteRule ^(.*)/$ /$1 [L,R]
"""
apache_redirects_base = b"""
RewriteEngine on
RewriteBase /
"""
@after
class PrettyURLProcessor(Processor):
def process(self, documents, resources):
for document in documents:
filename = path.basename(document.uri)
if filename == 'index.html':
document.uri = path.dirname(document.path)
elif document.extension == '.html':
document.uri = document.path
htaccess = None
for resource in resources:
if resource.path == '.htaccess':
htaccess = resource
if not htaccess:
htaccess = Resource(path='.htaccess',
content=apache_redirects_base)
resources.append(htaccess)
htaccess.content += apache_redirects
return documents, resources
Add nginx rules for pretty URLsfrom __future__ import absolute_import, division, print_function, unicode_literals
from os import path
from nib import Resource, Processor, after
apache_redirects = b"""
RewriteCond %{DOCUMENT_ROOT}/$1/index.html -f
RewriteRule ^(.*)$ /$1/index.html [L]
RewriteCond %{DOCUMENT_ROOT}/$1.html -f
RewriteRule ^(.*)$ /$1.html [L]
RewriteCond %{DOCUMENT_ROOT}/$1/index.html -f
RewriteRule ^(.*)/$ /$1 [L,R]
RewriteCond %{DOCUMENT_ROOT}/$1.html -f
RewriteRule ^(.*)/$ /$1 [L,R]
"""
apache_redirects_base = b"""
RewriteEngine on
RewriteBase /
"""
nginx_rules = b"""
location / {
#root {0};
index index.html;
try_files $uri $uri.html $uri/index.html;
}
"""
@after
class PrettyURLProcessor(Processor):
def process(self, documents, resources):
for document in documents:
filename = path.basename(document.uri)
if filename == 'index.html':
document.uri = path.dirname(document.path)
elif document.extension == '.html':
document.uri = document.path
htaccess = None
for resource in resources:
if resource.path == '.htaccess':
htaccess = resource
if not htaccess:
htaccess = Resource(path='.htaccess',
content=apache_redirects_base)
resources.append(htaccess)
htaccess.content += apache_redirects
nginx = Resource(path='.nginx',
content=nginx_rules)
resources.append(nginx)
return documents, resources
| <commit_before>from __future__ import absolute_import, division, print_function, unicode_literals
from os import path
from nib import Resource, Processor, after
apache_redirects = b"""
RewriteCond %{DOCUMENT_ROOT}/$1/index.html -f
RewriteRule ^(.*)$ /$1/index.html [L]
RewriteCond %{DOCUMENT_ROOT}/$1.html -f
RewriteRule ^(.*)$ /$1.html [L]
RewriteCond %{DOCUMENT_ROOT}/$1/index.html -f
RewriteRule ^(.*)/$ /$1 [L,R]
RewriteCond %{DOCUMENT_ROOT}/$1.html -f
RewriteRule ^(.*)/$ /$1 [L,R]
"""
apache_redirects_base = b"""
RewriteEngine on
RewriteBase /
"""
@after
class PrettyURLProcessor(Processor):
def process(self, documents, resources):
for document in documents:
filename = path.basename(document.uri)
if filename == 'index.html':
document.uri = path.dirname(document.path)
elif document.extension == '.html':
document.uri = document.path
htaccess = None
for resource in resources:
if resource.path == '.htaccess':
htaccess = resource
if not htaccess:
htaccess = Resource(path='.htaccess',
content=apache_redirects_base)
resources.append(htaccess)
htaccess.content += apache_redirects
return documents, resources
<commit_msg>Add nginx rules for pretty URLs<commit_after>from __future__ import absolute_import, division, print_function, unicode_literals
from os import path
from nib import Resource, Processor, after
apache_redirects = b"""
RewriteCond %{DOCUMENT_ROOT}/$1/index.html -f
RewriteRule ^(.*)$ /$1/index.html [L]
RewriteCond %{DOCUMENT_ROOT}/$1.html -f
RewriteRule ^(.*)$ /$1.html [L]
RewriteCond %{DOCUMENT_ROOT}/$1/index.html -f
RewriteRule ^(.*)/$ /$1 [L,R]
RewriteCond %{DOCUMENT_ROOT}/$1.html -f
RewriteRule ^(.*)/$ /$1 [L,R]
"""
apache_redirects_base = b"""
RewriteEngine on
RewriteBase /
"""
nginx_rules = b"""
location / {
#root {0};
index index.html;
try_files $uri $uri.html $uri/index.html;
}
"""
@after
class PrettyURLProcessor(Processor):
def process(self, documents, resources):
for document in documents:
filename = path.basename(document.uri)
if filename == 'index.html':
document.uri = path.dirname(document.path)
elif document.extension == '.html':
document.uri = document.path
htaccess = None
for resource in resources:
if resource.path == '.htaccess':
htaccess = resource
if not htaccess:
htaccess = Resource(path='.htaccess',
content=apache_redirects_base)
resources.append(htaccess)
htaccess.content += apache_redirects
nginx = Resource(path='.nginx',
content=nginx_rules)
resources.append(nginx)
return documents, resources
|
cec7ee7e98498ec8d15f35c4aef77ea44baefe6b | adhocracy/tests/lib/test_text.py | adhocracy/tests/lib/test_text.py | from adhocracy.tests import TestController
from adhocracy.tests.testtools import tt_make_user
class TestText(TestController):
def test_render(self):
from adhocracy.lib.text import render
source = ('header\n'
'========')
result = render(source)
self.assertEqual(result, u'<h1>header</h1>\n')
def test_render_no_substitution(self):
from adhocracy.lib.text import render
tt_make_user('pudo')
source = '@pudo'
result = render(source, substitutions=False)
self.assertEqual(result, u'<p>@pudo</p>\n')
def test_render_user_substitution(self):
from adhocracy.lib.text import render
tt_make_user('pudo')
source = '@pudo'
result = render(source, substitutions=True)
self.assertTrue(u'/user/pudo"' in result)
| from adhocracy.tests import TestController
from adhocracy.tests.testtools import tt_make_user
class TestText(TestController):
def test_render(self):
from adhocracy.lib.text import render
source = ('header\n'
'========')
result = render(source)
self.assertEqual(result, u'<h1>header</h1>')
def test_render_no_substitution(self):
from adhocracy.lib.text import render
tt_make_user('pudo')
source = '@pudo'
result = render(source, substitutions=False)
self.assertEqual(result, u'<p>@pudo</p>')
def test_render_user_substitution(self):
from adhocracy.lib.text import render
tt_make_user('pudo')
source = '@pudo'
result = render(source, substitutions=True)
self.assertTrue(u'/user/pudo"' in result)
| Fix tests after markdown library change | Fix tests after markdown library change
Addendum to da03b3f033cce2b957a71cf6cb334a8c207c5047
| Python | agpl-3.0 | alkadis/vcv,phihag/adhocracy,phihag/adhocracy,phihag/adhocracy,liqd/adhocracy,DanielNeugebauer/adhocracy,phihag/adhocracy,DanielNeugebauer/adhocracy,alkadis/vcv,alkadis/vcv,liqd/adhocracy,liqd/adhocracy,DanielNeugebauer/adhocracy,phihag/adhocracy,SysTheron/adhocracy,SysTheron/adhocracy,alkadis/vcv,SysTheron/adhocracy,liqd/adhocracy,DanielNeugebauer/adhocracy,alkadis/vcv,DanielNeugebauer/adhocracy | from adhocracy.tests import TestController
from adhocracy.tests.testtools import tt_make_user
class TestText(TestController):
def test_render(self):
from adhocracy.lib.text import render
source = ('header\n'
'========')
result = render(source)
self.assertEqual(result, u'<h1>header</h1>\n')
def test_render_no_substitution(self):
from adhocracy.lib.text import render
tt_make_user('pudo')
source = '@pudo'
result = render(source, substitutions=False)
self.assertEqual(result, u'<p>@pudo</p>\n')
def test_render_user_substitution(self):
from adhocracy.lib.text import render
tt_make_user('pudo')
source = '@pudo'
result = render(source, substitutions=True)
self.assertTrue(u'/user/pudo"' in result)
Fix tests after markdown library change
Addendum to da03b3f033cce2b957a71cf6cb334a8c207c5047 | from adhocracy.tests import TestController
from adhocracy.tests.testtools import tt_make_user
class TestText(TestController):
def test_render(self):
from adhocracy.lib.text import render
source = ('header\n'
'========')
result = render(source)
self.assertEqual(result, u'<h1>header</h1>')
def test_render_no_substitution(self):
from adhocracy.lib.text import render
tt_make_user('pudo')
source = '@pudo'
result = render(source, substitutions=False)
self.assertEqual(result, u'<p>@pudo</p>')
def test_render_user_substitution(self):
from adhocracy.lib.text import render
tt_make_user('pudo')
source = '@pudo'
result = render(source, substitutions=True)
self.assertTrue(u'/user/pudo"' in result)
| <commit_before>from adhocracy.tests import TestController
from adhocracy.tests.testtools import tt_make_user
class TestText(TestController):
def test_render(self):
from adhocracy.lib.text import render
source = ('header\n'
'========')
result = render(source)
self.assertEqual(result, u'<h1>header</h1>\n')
def test_render_no_substitution(self):
from adhocracy.lib.text import render
tt_make_user('pudo')
source = '@pudo'
result = render(source, substitutions=False)
self.assertEqual(result, u'<p>@pudo</p>\n')
def test_render_user_substitution(self):
from adhocracy.lib.text import render
tt_make_user('pudo')
source = '@pudo'
result = render(source, substitutions=True)
self.assertTrue(u'/user/pudo"' in result)
<commit_msg>Fix tests after markdown library change
Addendum to da03b3f033cce2b957a71cf6cb334a8c207c5047<commit_after> | from adhocracy.tests import TestController
from adhocracy.tests.testtools import tt_make_user
class TestText(TestController):
def test_render(self):
from adhocracy.lib.text import render
source = ('header\n'
'========')
result = render(source)
self.assertEqual(result, u'<h1>header</h1>')
def test_render_no_substitution(self):
from adhocracy.lib.text import render
tt_make_user('pudo')
source = '@pudo'
result = render(source, substitutions=False)
self.assertEqual(result, u'<p>@pudo</p>')
def test_render_user_substitution(self):
from adhocracy.lib.text import render
tt_make_user('pudo')
source = '@pudo'
result = render(source, substitutions=True)
self.assertTrue(u'/user/pudo"' in result)
| from adhocracy.tests import TestController
from adhocracy.tests.testtools import tt_make_user
class TestText(TestController):
def test_render(self):
from adhocracy.lib.text import render
source = ('header\n'
'========')
result = render(source)
self.assertEqual(result, u'<h1>header</h1>\n')
def test_render_no_substitution(self):
from adhocracy.lib.text import render
tt_make_user('pudo')
source = '@pudo'
result = render(source, substitutions=False)
self.assertEqual(result, u'<p>@pudo</p>\n')
def test_render_user_substitution(self):
from adhocracy.lib.text import render
tt_make_user('pudo')
source = '@pudo'
result = render(source, substitutions=True)
self.assertTrue(u'/user/pudo"' in result)
Fix tests after markdown library change
Addendum to da03b3f033cce2b957a71cf6cb334a8c207c5047from adhocracy.tests import TestController
from adhocracy.tests.testtools import tt_make_user
class TestText(TestController):
def test_render(self):
from adhocracy.lib.text import render
source = ('header\n'
'========')
result = render(source)
self.assertEqual(result, u'<h1>header</h1>')
def test_render_no_substitution(self):
from adhocracy.lib.text import render
tt_make_user('pudo')
source = '@pudo'
result = render(source, substitutions=False)
self.assertEqual(result, u'<p>@pudo</p>')
def test_render_user_substitution(self):
from adhocracy.lib.text import render
tt_make_user('pudo')
source = '@pudo'
result = render(source, substitutions=True)
self.assertTrue(u'/user/pudo"' in result)
| <commit_before>from adhocracy.tests import TestController
from adhocracy.tests.testtools import tt_make_user
class TestText(TestController):
def test_render(self):
from adhocracy.lib.text import render
source = ('header\n'
'========')
result = render(source)
self.assertEqual(result, u'<h1>header</h1>\n')
def test_render_no_substitution(self):
from adhocracy.lib.text import render
tt_make_user('pudo')
source = '@pudo'
result = render(source, substitutions=False)
self.assertEqual(result, u'<p>@pudo</p>\n')
def test_render_user_substitution(self):
from adhocracy.lib.text import render
tt_make_user('pudo')
source = '@pudo'
result = render(source, substitutions=True)
self.assertTrue(u'/user/pudo"' in result)
<commit_msg>Fix tests after markdown library change
Addendum to da03b3f033cce2b957a71cf6cb334a8c207c5047<commit_after>from adhocracy.tests import TestController
from adhocracy.tests.testtools import tt_make_user
class TestText(TestController):
def test_render(self):
from adhocracy.lib.text import render
source = ('header\n'
'========')
result = render(source)
self.assertEqual(result, u'<h1>header</h1>')
def test_render_no_substitution(self):
from adhocracy.lib.text import render
tt_make_user('pudo')
source = '@pudo'
result = render(source, substitutions=False)
self.assertEqual(result, u'<p>@pudo</p>')
def test_render_user_substitution(self):
from adhocracy.lib.text import render
tt_make_user('pudo')
source = '@pudo'
result = render(source, substitutions=True)
self.assertTrue(u'/user/pudo"' in result)
|
13208d4656adcf52a5842200ee1d9e079fdffc2b | bin/rate_limit_watcher.py | bin/rate_limit_watcher.py | #!/usr/bin/env python
import requests
URL = 'http://tutorials.pluralsight.com/gh_rate_limit'
def main():
resp = requests.get(URL)
if resp.status_code == 200:
print resp.content
else:
print 'Failed checking rate limit, status_code: %d' % (resp.status_code)
if __name__ == '__main__':
main()
| #!/usr/bin/env python
"""
Script to print out Github API rate limit for REPO_OWNER user i.e. the main
github user account used for the guides-cms application.
"""
import argparse
from datetime import datetime
import requests
DOMAIN = 'http://tutorials.pluralsight.com/'
URL = '/gh_rate_limit'
def main(domain):
response = get_rate_limit(domain)
if response:
pprint(response)
def get_rate_limit(domain=DOMAIN):
"""Get rate limit as dictionary"""
url = '%s%s' % (domain, URL)
resp = requests.get(url)
if resp.status_code == 200:
return resp.json()
else:
print 'Failed checking rate limit, status_code: %d' % (resp.status_code)
return {}
def pprint(rate_limit):
"""
Pretty print rate limit dictionary to be easily parsable and readable
across multiple lines
"""
# Ignoring the 'rate' key b/c github API claims this will be removed in
# next major version:
# https://developer.github.com/v3/rate_limit/#deprecation-notice
def print_(name, limits):
date_ = datetime.utcfromtimestamp(limits[name]['reset'])
print '%8s remaining: %4s limit: %4s reset: %s' % (
name,
limits[name]['remaining'],
limits[name]['limit'],
date_.strftime('%d-%m-%Y %H:%M:%S'))
print_('core', rate_limit['resources'])
print_('search', rate_limit['resources'])
#u'resources': {u'core': {u'reset': 1462781427, u'limit': 5000, u'remaining': 4923}, u'search': {u'reset': 1462780271, u'limit': 30, u'remaining': 30}}}
def _parse_args():
"""Parse args and get dictionary back"""
parser = argparse.ArgumentParser(description='Get Github.com rate limit')
parser.add_argument('-d', '--domain', action='store', required=False,
default=DOMAIN,
help='Domain to ping for rate limit JSON response (default: %s)' % (DOMAIN))
# Turn odd argparse namespace object into a plain dict
return vars(parser.parse_args())
if __name__ == '__main__':
main(_parse_args()['domain'])
| Print rate limits from new JSON response url in a pretty, parsable format | Print rate limits from new JSON response url in a pretty, parsable format
| Python | agpl-3.0 | paulocheque/guides-cms,paulocheque/guides-cms,pluralsight/guides-cms,pluralsight/guides-cms,pluralsight/guides-cms,paulocheque/guides-cms | #!/usr/bin/env python
import requests
URL = 'http://tutorials.pluralsight.com/gh_rate_limit'
def main():
resp = requests.get(URL)
if resp.status_code == 200:
print resp.content
else:
print 'Failed checking rate limit, status_code: %d' % (resp.status_code)
if __name__ == '__main__':
main()
Print rate limits from new JSON response url in a pretty, parsable format | #!/usr/bin/env python
"""
Script to print out Github API rate limit for REPO_OWNER user i.e. the main
github user account used for the guides-cms application.
"""
import argparse
from datetime import datetime
import requests
DOMAIN = 'http://tutorials.pluralsight.com/'
URL = '/gh_rate_limit'
def main(domain):
response = get_rate_limit(domain)
if response:
pprint(response)
def get_rate_limit(domain=DOMAIN):
"""Get rate limit as dictionary"""
url = '%s%s' % (domain, URL)
resp = requests.get(url)
if resp.status_code == 200:
return resp.json()
else:
print 'Failed checking rate limit, status_code: %d' % (resp.status_code)
return {}
def pprint(rate_limit):
"""
Pretty print rate limit dictionary to be easily parsable and readable
across multiple lines
"""
# Ignoring the 'rate' key b/c github API claims this will be removed in
# next major version:
# https://developer.github.com/v3/rate_limit/#deprecation-notice
def print_(name, limits):
date_ = datetime.utcfromtimestamp(limits[name]['reset'])
print '%8s remaining: %4s limit: %4s reset: %s' % (
name,
limits[name]['remaining'],
limits[name]['limit'],
date_.strftime('%d-%m-%Y %H:%M:%S'))
print_('core', rate_limit['resources'])
print_('search', rate_limit['resources'])
#u'resources': {u'core': {u'reset': 1462781427, u'limit': 5000, u'remaining': 4923}, u'search': {u'reset': 1462780271, u'limit': 30, u'remaining': 30}}}
def _parse_args():
"""Parse args and get dictionary back"""
parser = argparse.ArgumentParser(description='Get Github.com rate limit')
parser.add_argument('-d', '--domain', action='store', required=False,
default=DOMAIN,
help='Domain to ping for rate limit JSON response (default: %s)' % (DOMAIN))
# Turn odd argparse namespace object into a plain dict
return vars(parser.parse_args())
if __name__ == '__main__':
main(_parse_args()['domain'])
| <commit_before>#!/usr/bin/env python
import requests
URL = 'http://tutorials.pluralsight.com/gh_rate_limit'
def main():
resp = requests.get(URL)
if resp.status_code == 200:
print resp.content
else:
print 'Failed checking rate limit, status_code: %d' % (resp.status_code)
if __name__ == '__main__':
main()
<commit_msg>Print rate limits from new JSON response url in a pretty, parsable format<commit_after> | #!/usr/bin/env python
"""
Script to print out Github API rate limit for REPO_OWNER user i.e. the main
github user account used for the guides-cms application.
"""
import argparse
from datetime import datetime
import requests
DOMAIN = 'http://tutorials.pluralsight.com/'
URL = '/gh_rate_limit'
def main(domain):
response = get_rate_limit(domain)
if response:
pprint(response)
def get_rate_limit(domain=DOMAIN):
"""Get rate limit as dictionary"""
url = '%s%s' % (domain, URL)
resp = requests.get(url)
if resp.status_code == 200:
return resp.json()
else:
print 'Failed checking rate limit, status_code: %d' % (resp.status_code)
return {}
def pprint(rate_limit):
"""
Pretty print rate limit dictionary to be easily parsable and readable
across multiple lines
"""
# Ignoring the 'rate' key b/c github API claims this will be removed in
# next major version:
# https://developer.github.com/v3/rate_limit/#deprecation-notice
def print_(name, limits):
date_ = datetime.utcfromtimestamp(limits[name]['reset'])
print '%8s remaining: %4s limit: %4s reset: %s' % (
name,
limits[name]['remaining'],
limits[name]['limit'],
date_.strftime('%d-%m-%Y %H:%M:%S'))
print_('core', rate_limit['resources'])
print_('search', rate_limit['resources'])
#u'resources': {u'core': {u'reset': 1462781427, u'limit': 5000, u'remaining': 4923}, u'search': {u'reset': 1462780271, u'limit': 30, u'remaining': 30}}}
def _parse_args():
"""Parse args and get dictionary back"""
parser = argparse.ArgumentParser(description='Get Github.com rate limit')
parser.add_argument('-d', '--domain', action='store', required=False,
default=DOMAIN,
help='Domain to ping for rate limit JSON response (default: %s)' % (DOMAIN))
# Turn odd argparse namespace object into a plain dict
return vars(parser.parse_args())
if __name__ == '__main__':
main(_parse_args()['domain'])
| #!/usr/bin/env python
import requests
URL = 'http://tutorials.pluralsight.com/gh_rate_limit'
def main():
resp = requests.get(URL)
if resp.status_code == 200:
print resp.content
else:
print 'Failed checking rate limit, status_code: %d' % (resp.status_code)
if __name__ == '__main__':
main()
Print rate limits from new JSON response url in a pretty, parsable format#!/usr/bin/env python
"""
Script to print out Github API rate limit for REPO_OWNER user i.e. the main
github user account used for the guides-cms application.
"""
import argparse
from datetime import datetime
import requests
DOMAIN = 'http://tutorials.pluralsight.com/'
URL = '/gh_rate_limit'
def main(domain):
response = get_rate_limit(domain)
if response:
pprint(response)
def get_rate_limit(domain=DOMAIN):
"""Get rate limit as dictionary"""
url = '%s%s' % (domain, URL)
resp = requests.get(url)
if resp.status_code == 200:
return resp.json()
else:
print 'Failed checking rate limit, status_code: %d' % (resp.status_code)
return {}
def pprint(rate_limit):
"""
Pretty print rate limit dictionary to be easily parsable and readable
across multiple lines
"""
# Ignoring the 'rate' key b/c github API claims this will be removed in
# next major version:
# https://developer.github.com/v3/rate_limit/#deprecation-notice
def print_(name, limits):
date_ = datetime.utcfromtimestamp(limits[name]['reset'])
print '%8s remaining: %4s limit: %4s reset: %s' % (
name,
limits[name]['remaining'],
limits[name]['limit'],
date_.strftime('%d-%m-%Y %H:%M:%S'))
print_('core', rate_limit['resources'])
print_('search', rate_limit['resources'])
#u'resources': {u'core': {u'reset': 1462781427, u'limit': 5000, u'remaining': 4923}, u'search': {u'reset': 1462780271, u'limit': 30, u'remaining': 30}}}
def _parse_args():
"""Parse args and get dictionary back"""
parser = argparse.ArgumentParser(description='Get Github.com rate limit')
parser.add_argument('-d', '--domain', action='store', required=False,
default=DOMAIN,
help='Domain to ping for rate limit JSON response (default: %s)' % (DOMAIN))
# Turn odd argparse namespace object into a plain dict
return vars(parser.parse_args())
if __name__ == '__main__':
main(_parse_args()['domain'])
| <commit_before>#!/usr/bin/env python
import requests
URL = 'http://tutorials.pluralsight.com/gh_rate_limit'
def main():
resp = requests.get(URL)
if resp.status_code == 200:
print resp.content
else:
print 'Failed checking rate limit, status_code: %d' % (resp.status_code)
if __name__ == '__main__':
main()
<commit_msg>Print rate limits from new JSON response url in a pretty, parsable format<commit_after>#!/usr/bin/env python
"""
Script to print out Github API rate limit for REPO_OWNER user i.e. the main
github user account used for the guides-cms application.
"""
import argparse
from datetime import datetime
import requests
DOMAIN = 'http://tutorials.pluralsight.com/'
URL = '/gh_rate_limit'
def main(domain):
response = get_rate_limit(domain)
if response:
pprint(response)
def get_rate_limit(domain=DOMAIN):
"""Get rate limit as dictionary"""
url = '%s%s' % (domain, URL)
resp = requests.get(url)
if resp.status_code == 200:
return resp.json()
else:
print 'Failed checking rate limit, status_code: %d' % (resp.status_code)
return {}
def pprint(rate_limit):
"""
Pretty print rate limit dictionary to be easily parsable and readable
across multiple lines
"""
# Ignoring the 'rate' key b/c github API claims this will be removed in
# next major version:
# https://developer.github.com/v3/rate_limit/#deprecation-notice
def print_(name, limits):
date_ = datetime.utcfromtimestamp(limits[name]['reset'])
print '%8s remaining: %4s limit: %4s reset: %s' % (
name,
limits[name]['remaining'],
limits[name]['limit'],
date_.strftime('%d-%m-%Y %H:%M:%S'))
print_('core', rate_limit['resources'])
print_('search', rate_limit['resources'])
#u'resources': {u'core': {u'reset': 1462781427, u'limit': 5000, u'remaining': 4923}, u'search': {u'reset': 1462780271, u'limit': 30, u'remaining': 30}}}
def _parse_args():
"""Parse args and get dictionary back"""
parser = argparse.ArgumentParser(description='Get Github.com rate limit')
parser.add_argument('-d', '--domain', action='store', required=False,
default=DOMAIN,
help='Domain to ping for rate limit JSON response (default: %s)' % (DOMAIN))
# Turn odd argparse namespace object into a plain dict
return vars(parser.parse_args())
if __name__ == '__main__':
main(_parse_args()['domain'])
|
f5a856b6c6c64ff8bba56a04a25bb639e98ff0e2 | navigator/settings/prod.py | navigator/settings/prod.py | from .base import *
DEBUG = False
ALLOWED_HOSTS = ['selling-online-overseas.export.great.gov.uk']
ADMINS = (('David Downes', 'david@downes.co.uk'),)
MIDDLEWARE_CLASSES += [
'core.middleware.IpRestrictionMiddleware',
]
INSTALLED_APPS += [
'raven.contrib.django.raven_compat'
]
RAVEN_CONFIG = {
'dsn': os.environ.get('SENTRY_DSN'),
}
ip_check = os.environ.get('RESTRICT_IPS', False)
RESTRICT_IPS = ip_check == 'True' or ip_check == '1'
ALLOWED_IPS = []
ALLOWED_IP_RANGES = ['165.225.80.0/22', '193.240.203.32/29', '94.119.64.0/24', '178.208.163.0/24']
SECURE_SSL_REDIRECT = True
| from .base import *
DEBUG = False
ALLOWED_HOSTS = ['selling-online-overseas.export.great.gov.uk']
ADMINS = (('David Downes', 'david@downes.co.uk'),)
MIDDLEWARE_CLASSES += [
'core.middleware.IpRestrictionMiddleware',
]
INSTALLED_APPS += [
'raven.contrib.django.raven_compat'
]
RAVEN_CONFIG = {
'dsn': os.environ.get('SENTRY_DSN'),
}
ip_check = os.environ.get('RESTRICT_IPS', False)
RESTRICT_IPS = ip_check == 'True' or ip_check == '1'
ALLOWED_IPS = ['54.229.170.70']
ALLOWED_IP_RANGES = ['165.225.80.0/22', '193.240.203.32/29', '94.119.64.0/24', '178.208.163.0/24']
ssl_check = os.environ.get('SSL_REDIRECT', False)
SECURE_SSL_REDIRECT = ssl_check == 'True' or ssl_check == '1'
| Add locust testing box to allowed IPs, and get the SSL redirect from the environment so that it can be turned off for load testing | Add locust testing box to allowed IPs, and get the SSL redirect from the environment so that it can be turned off for load testing
pep8
| Python | mit | dahfool/navigator,uktrade/navigator,uktrade/navigator,dahfool/navigator,dahfool/navigator,dahfool/navigator,uktrade/navigator,uktrade/navigator | from .base import *
DEBUG = False
ALLOWED_HOSTS = ['selling-online-overseas.export.great.gov.uk']
ADMINS = (('David Downes', 'david@downes.co.uk'),)
MIDDLEWARE_CLASSES += [
'core.middleware.IpRestrictionMiddleware',
]
INSTALLED_APPS += [
'raven.contrib.django.raven_compat'
]
RAVEN_CONFIG = {
'dsn': os.environ.get('SENTRY_DSN'),
}
ip_check = os.environ.get('RESTRICT_IPS', False)
RESTRICT_IPS = ip_check == 'True' or ip_check == '1'
ALLOWED_IPS = []
ALLOWED_IP_RANGES = ['165.225.80.0/22', '193.240.203.32/29', '94.119.64.0/24', '178.208.163.0/24']
SECURE_SSL_REDIRECT = True
Add locust testing box to allowed IPs, and get the SSL redirect from the environment so that it can be turned off for load testing
pep8 | from .base import *
DEBUG = False
ALLOWED_HOSTS = ['selling-online-overseas.export.great.gov.uk']
ADMINS = (('David Downes', 'david@downes.co.uk'),)
MIDDLEWARE_CLASSES += [
'core.middleware.IpRestrictionMiddleware',
]
INSTALLED_APPS += [
'raven.contrib.django.raven_compat'
]
RAVEN_CONFIG = {
'dsn': os.environ.get('SENTRY_DSN'),
}
ip_check = os.environ.get('RESTRICT_IPS', False)
RESTRICT_IPS = ip_check == 'True' or ip_check == '1'
ALLOWED_IPS = ['54.229.170.70']
ALLOWED_IP_RANGES = ['165.225.80.0/22', '193.240.203.32/29', '94.119.64.0/24', '178.208.163.0/24']
ssl_check = os.environ.get('SSL_REDIRECT', False)
SECURE_SSL_REDIRECT = ssl_check == 'True' or ssl_check == '1'
| <commit_before>from .base import *
DEBUG = False
ALLOWED_HOSTS = ['selling-online-overseas.export.great.gov.uk']
ADMINS = (('David Downes', 'david@downes.co.uk'),)
MIDDLEWARE_CLASSES += [
'core.middleware.IpRestrictionMiddleware',
]
INSTALLED_APPS += [
'raven.contrib.django.raven_compat'
]
RAVEN_CONFIG = {
'dsn': os.environ.get('SENTRY_DSN'),
}
ip_check = os.environ.get('RESTRICT_IPS', False)
RESTRICT_IPS = ip_check == 'True' or ip_check == '1'
ALLOWED_IPS = []
ALLOWED_IP_RANGES = ['165.225.80.0/22', '193.240.203.32/29', '94.119.64.0/24', '178.208.163.0/24']
SECURE_SSL_REDIRECT = True
<commit_msg>Add locust testing box to allowed IPs, and get the SSL redirect from the environment so that it can be turned off for load testing
pep8<commit_after> | from .base import *
DEBUG = False
ALLOWED_HOSTS = ['selling-online-overseas.export.great.gov.uk']
ADMINS = (('David Downes', 'david@downes.co.uk'),)
MIDDLEWARE_CLASSES += [
'core.middleware.IpRestrictionMiddleware',
]
INSTALLED_APPS += [
'raven.contrib.django.raven_compat'
]
RAVEN_CONFIG = {
'dsn': os.environ.get('SENTRY_DSN'),
}
ip_check = os.environ.get('RESTRICT_IPS', False)
RESTRICT_IPS = ip_check == 'True' or ip_check == '1'
ALLOWED_IPS = ['54.229.170.70']
ALLOWED_IP_RANGES = ['165.225.80.0/22', '193.240.203.32/29', '94.119.64.0/24', '178.208.163.0/24']
ssl_check = os.environ.get('SSL_REDIRECT', False)
SECURE_SSL_REDIRECT = ssl_check == 'True' or ssl_check == '1'
| from .base import *
DEBUG = False
ALLOWED_HOSTS = ['selling-online-overseas.export.great.gov.uk']
ADMINS = (('David Downes', 'david@downes.co.uk'),)
MIDDLEWARE_CLASSES += [
'core.middleware.IpRestrictionMiddleware',
]
INSTALLED_APPS += [
'raven.contrib.django.raven_compat'
]
RAVEN_CONFIG = {
'dsn': os.environ.get('SENTRY_DSN'),
}
ip_check = os.environ.get('RESTRICT_IPS', False)
RESTRICT_IPS = ip_check == 'True' or ip_check == '1'
ALLOWED_IPS = []
ALLOWED_IP_RANGES = ['165.225.80.0/22', '193.240.203.32/29', '94.119.64.0/24', '178.208.163.0/24']
SECURE_SSL_REDIRECT = True
Add locust testing box to allowed IPs, and get the SSL redirect from the environment so that it can be turned off for load testing
pep8from .base import *
DEBUG = False
ALLOWED_HOSTS = ['selling-online-overseas.export.great.gov.uk']
ADMINS = (('David Downes', 'david@downes.co.uk'),)
MIDDLEWARE_CLASSES += [
'core.middleware.IpRestrictionMiddleware',
]
INSTALLED_APPS += [
'raven.contrib.django.raven_compat'
]
RAVEN_CONFIG = {
'dsn': os.environ.get('SENTRY_DSN'),
}
ip_check = os.environ.get('RESTRICT_IPS', False)
RESTRICT_IPS = ip_check == 'True' or ip_check == '1'
ALLOWED_IPS = ['54.229.170.70']
ALLOWED_IP_RANGES = ['165.225.80.0/22', '193.240.203.32/29', '94.119.64.0/24', '178.208.163.0/24']
ssl_check = os.environ.get('SSL_REDIRECT', False)
SECURE_SSL_REDIRECT = ssl_check == 'True' or ssl_check == '1'
| <commit_before>from .base import *
DEBUG = False
ALLOWED_HOSTS = ['selling-online-overseas.export.great.gov.uk']
ADMINS = (('David Downes', 'david@downes.co.uk'),)
MIDDLEWARE_CLASSES += [
'core.middleware.IpRestrictionMiddleware',
]
INSTALLED_APPS += [
'raven.contrib.django.raven_compat'
]
RAVEN_CONFIG = {
'dsn': os.environ.get('SENTRY_DSN'),
}
ip_check = os.environ.get('RESTRICT_IPS', False)
RESTRICT_IPS = ip_check == 'True' or ip_check == '1'
ALLOWED_IPS = []
ALLOWED_IP_RANGES = ['165.225.80.0/22', '193.240.203.32/29', '94.119.64.0/24', '178.208.163.0/24']
SECURE_SSL_REDIRECT = True
<commit_msg>Add locust testing box to allowed IPs, and get the SSL redirect from the environment so that it can be turned off for load testing
pep8<commit_after>from .base import *
DEBUG = False
ALLOWED_HOSTS = ['selling-online-overseas.export.great.gov.uk']
ADMINS = (('David Downes', 'david@downes.co.uk'),)
MIDDLEWARE_CLASSES += [
'core.middleware.IpRestrictionMiddleware',
]
INSTALLED_APPS += [
'raven.contrib.django.raven_compat'
]
RAVEN_CONFIG = {
'dsn': os.environ.get('SENTRY_DSN'),
}
ip_check = os.environ.get('RESTRICT_IPS', False)
RESTRICT_IPS = ip_check == 'True' or ip_check == '1'
ALLOWED_IPS = ['54.229.170.70']
ALLOWED_IP_RANGES = ['165.225.80.0/22', '193.240.203.32/29', '94.119.64.0/24', '178.208.163.0/24']
ssl_check = os.environ.get('SSL_REDIRECT', False)
SECURE_SSL_REDIRECT = ssl_check == 'True' or ssl_check == '1'
|
63af2d4267f7107232777fa0d8b222dc00f07a90 | test_setup.py | test_setup.py | """Test setup.py."""
import os
import subprocess
import sys
import setup
def test_setup():
"""Run setup.py check."""
command = [
sys.executable,
setup.__file__,
'check',
'--metadata',
'--strict',
]
assert subprocess.run(command, check=False).returncode == 0
def test_console_scripts():
"""Ensure console scripts were installed correctly."""
assert all(
any(
os.path.isfile(
os.path.join(
directory,
console_script.partition('=')[0].strip(),
),
)
for directory in os.environ['PATH'].split(':')
)
for console_script in setup.ENTRY_POINTS['console_scripts']
)
| """Test setup.py."""
import os
import subprocess
import sys
import sysconfig
import setup
def test_setup():
"""Run setup.py check."""
command = [
sys.executable,
setup.__file__,
'check',
'--metadata',
'--strict',
]
assert subprocess.run(command, check=False).returncode == 0
def test_console_scripts():
"""Ensure console scripts were installed correctly."""
assert os.path.isfile(
os.path.join(sysconfig.get_path('scripts'), 'backlog'),
)
| Check sysconfig 'scripts' instead of scanning PATH | Check sysconfig 'scripts' instead of scanning PATH
| Python | lgpl-2.1 | dmtucker/backlog | """Test setup.py."""
import os
import subprocess
import sys
import setup
def test_setup():
"""Run setup.py check."""
command = [
sys.executable,
setup.__file__,
'check',
'--metadata',
'--strict',
]
assert subprocess.run(command, check=False).returncode == 0
def test_console_scripts():
"""Ensure console scripts were installed correctly."""
assert all(
any(
os.path.isfile(
os.path.join(
directory,
console_script.partition('=')[0].strip(),
),
)
for directory in os.environ['PATH'].split(':')
)
for console_script in setup.ENTRY_POINTS['console_scripts']
)
Check sysconfig 'scripts' instead of scanning PATH | """Test setup.py."""
import os
import subprocess
import sys
import sysconfig
import setup
def test_setup():
"""Run setup.py check."""
command = [
sys.executable,
setup.__file__,
'check',
'--metadata',
'--strict',
]
assert subprocess.run(command, check=False).returncode == 0
def test_console_scripts():
"""Ensure console scripts were installed correctly."""
assert os.path.isfile(
os.path.join(sysconfig.get_path('scripts'), 'backlog'),
)
| <commit_before>"""Test setup.py."""
import os
import subprocess
import sys
import setup
def test_setup():
"""Run setup.py check."""
command = [
sys.executable,
setup.__file__,
'check',
'--metadata',
'--strict',
]
assert subprocess.run(command, check=False).returncode == 0
def test_console_scripts():
"""Ensure console scripts were installed correctly."""
assert all(
any(
os.path.isfile(
os.path.join(
directory,
console_script.partition('=')[0].strip(),
),
)
for directory in os.environ['PATH'].split(':')
)
for console_script in setup.ENTRY_POINTS['console_scripts']
)
<commit_msg>Check sysconfig 'scripts' instead of scanning PATH<commit_after> | """Test setup.py."""
import os
import subprocess
import sys
import sysconfig
import setup
def test_setup():
"""Run setup.py check."""
command = [
sys.executable,
setup.__file__,
'check',
'--metadata',
'--strict',
]
assert subprocess.run(command, check=False).returncode == 0
def test_console_scripts():
"""Ensure console scripts were installed correctly."""
assert os.path.isfile(
os.path.join(sysconfig.get_path('scripts'), 'backlog'),
)
| """Test setup.py."""
import os
import subprocess
import sys
import setup
def test_setup():
"""Run setup.py check."""
command = [
sys.executable,
setup.__file__,
'check',
'--metadata',
'--strict',
]
assert subprocess.run(command, check=False).returncode == 0
def test_console_scripts():
"""Ensure console scripts were installed correctly."""
assert all(
any(
os.path.isfile(
os.path.join(
directory,
console_script.partition('=')[0].strip(),
),
)
for directory in os.environ['PATH'].split(':')
)
for console_script in setup.ENTRY_POINTS['console_scripts']
)
Check sysconfig 'scripts' instead of scanning PATH"""Test setup.py."""
import os
import subprocess
import sys
import sysconfig
import setup
def test_setup():
"""Run setup.py check."""
command = [
sys.executable,
setup.__file__,
'check',
'--metadata',
'--strict',
]
assert subprocess.run(command, check=False).returncode == 0
def test_console_scripts():
"""Ensure console scripts were installed correctly."""
assert os.path.isfile(
os.path.join(sysconfig.get_path('scripts'), 'backlog'),
)
| <commit_before>"""Test setup.py."""
import os
import subprocess
import sys
import setup
def test_setup():
"""Run setup.py check."""
command = [
sys.executable,
setup.__file__,
'check',
'--metadata',
'--strict',
]
assert subprocess.run(command, check=False).returncode == 0
def test_console_scripts():
"""Ensure console scripts were installed correctly."""
assert all(
any(
os.path.isfile(
os.path.join(
directory,
console_script.partition('=')[0].strip(),
),
)
for directory in os.environ['PATH'].split(':')
)
for console_script in setup.ENTRY_POINTS['console_scripts']
)
<commit_msg>Check sysconfig 'scripts' instead of scanning PATH<commit_after>"""Test setup.py."""
import os
import subprocess
import sys
import sysconfig
import setup
def test_setup():
"""Run setup.py check."""
command = [
sys.executable,
setup.__file__,
'check',
'--metadata',
'--strict',
]
assert subprocess.run(command, check=False).returncode == 0
def test_console_scripts():
"""Ensure console scripts were installed correctly."""
assert os.path.isfile(
os.path.join(sysconfig.get_path('scripts'), 'backlog'),
)
|
36ceeed4ff6b578e8b63b222cd9beea4e788a819 | mongo_thingy/__init__.py | mongo_thingy/__init__.py | from pymongo import MongoClient
from thingy import classproperty, DatabaseThingy
class Thingy(DatabaseThingy):
client = None
_collection = None
@classproperty
def collection(cls):
return cls._collection or cls.table
@classproperty
def collection_name(cls):
return cls.collection.name
@classproperty
def _table(cls):
return cls._collection
@classmethod
def _get_database_from_table(cls, collection):
return collection.database
@classmethod
def _get_table_from_database(cls, database):
return database[cls.table_name]
def connect(*args, **kwargs):
client = MongoClient(*args, **kwargs)
Thingy.client = client
return client
| from pymongo import MongoClient
from thingy import classproperty, DatabaseThingy
class Thingy(DatabaseThingy):
client = None
_collection = None
@classproperty
def collection(cls):
return cls._collection or cls.table
@classproperty
def collection_name(cls):
return cls.collection.name
@classproperty
def _table(cls):
return cls._collection
@classmethod
def _get_database_from_table(cls, collection):
return collection.database
@classmethod
def _get_table_from_database(cls, database):
return database[cls.table_name]
def connect(*args, **kwargs):
client = MongoClient(*args, **kwargs)
Thingy.client = client
return client
__all__ = ["Thingy", "connect"]
| Define __all__ to restrict global imports | Define __all__ to restrict global imports
| Python | mit | numberly/mongo-thingy | from pymongo import MongoClient
from thingy import classproperty, DatabaseThingy
class Thingy(DatabaseThingy):
client = None
_collection = None
@classproperty
def collection(cls):
return cls._collection or cls.table
@classproperty
def collection_name(cls):
return cls.collection.name
@classproperty
def _table(cls):
return cls._collection
@classmethod
def _get_database_from_table(cls, collection):
return collection.database
@classmethod
def _get_table_from_database(cls, database):
return database[cls.table_name]
def connect(*args, **kwargs):
client = MongoClient(*args, **kwargs)
Thingy.client = client
return client
Define __all__ to restrict global imports | from pymongo import MongoClient
from thingy import classproperty, DatabaseThingy
class Thingy(DatabaseThingy):
client = None
_collection = None
@classproperty
def collection(cls):
return cls._collection or cls.table
@classproperty
def collection_name(cls):
return cls.collection.name
@classproperty
def _table(cls):
return cls._collection
@classmethod
def _get_database_from_table(cls, collection):
return collection.database
@classmethod
def _get_table_from_database(cls, database):
return database[cls.table_name]
def connect(*args, **kwargs):
client = MongoClient(*args, **kwargs)
Thingy.client = client
return client
__all__ = ["Thingy", "connect"]
| <commit_before>from pymongo import MongoClient
from thingy import classproperty, DatabaseThingy
class Thingy(DatabaseThingy):
client = None
_collection = None
@classproperty
def collection(cls):
return cls._collection or cls.table
@classproperty
def collection_name(cls):
return cls.collection.name
@classproperty
def _table(cls):
return cls._collection
@classmethod
def _get_database_from_table(cls, collection):
return collection.database
@classmethod
def _get_table_from_database(cls, database):
return database[cls.table_name]
def connect(*args, **kwargs):
client = MongoClient(*args, **kwargs)
Thingy.client = client
return client
<commit_msg>Define __all__ to restrict global imports<commit_after> | from pymongo import MongoClient
from thingy import classproperty, DatabaseThingy
class Thingy(DatabaseThingy):
client = None
_collection = None
@classproperty
def collection(cls):
return cls._collection or cls.table
@classproperty
def collection_name(cls):
return cls.collection.name
@classproperty
def _table(cls):
return cls._collection
@classmethod
def _get_database_from_table(cls, collection):
return collection.database
@classmethod
def _get_table_from_database(cls, database):
return database[cls.table_name]
def connect(*args, **kwargs):
client = MongoClient(*args, **kwargs)
Thingy.client = client
return client
__all__ = ["Thingy", "connect"]
| from pymongo import MongoClient
from thingy import classproperty, DatabaseThingy
class Thingy(DatabaseThingy):
client = None
_collection = None
@classproperty
def collection(cls):
return cls._collection or cls.table
@classproperty
def collection_name(cls):
return cls.collection.name
@classproperty
def _table(cls):
return cls._collection
@classmethod
def _get_database_from_table(cls, collection):
return collection.database
@classmethod
def _get_table_from_database(cls, database):
return database[cls.table_name]
def connect(*args, **kwargs):
client = MongoClient(*args, **kwargs)
Thingy.client = client
return client
Define __all__ to restrict global importsfrom pymongo import MongoClient
from thingy import classproperty, DatabaseThingy
class Thingy(DatabaseThingy):
client = None
_collection = None
@classproperty
def collection(cls):
return cls._collection or cls.table
@classproperty
def collection_name(cls):
return cls.collection.name
@classproperty
def _table(cls):
return cls._collection
@classmethod
def _get_database_from_table(cls, collection):
return collection.database
@classmethod
def _get_table_from_database(cls, database):
return database[cls.table_name]
def connect(*args, **kwargs):
client = MongoClient(*args, **kwargs)
Thingy.client = client
return client
__all__ = ["Thingy", "connect"]
| <commit_before>from pymongo import MongoClient
from thingy import classproperty, DatabaseThingy
class Thingy(DatabaseThingy):
client = None
_collection = None
@classproperty
def collection(cls):
return cls._collection or cls.table
@classproperty
def collection_name(cls):
return cls.collection.name
@classproperty
def _table(cls):
return cls._collection
@classmethod
def _get_database_from_table(cls, collection):
return collection.database
@classmethod
def _get_table_from_database(cls, database):
return database[cls.table_name]
def connect(*args, **kwargs):
client = MongoClient(*args, **kwargs)
Thingy.client = client
return client
<commit_msg>Define __all__ to restrict global imports<commit_after>from pymongo import MongoClient
from thingy import classproperty, DatabaseThingy
class Thingy(DatabaseThingy):
client = None
_collection = None
@classproperty
def collection(cls):
return cls._collection or cls.table
@classproperty
def collection_name(cls):
return cls.collection.name
@classproperty
def _table(cls):
return cls._collection
@classmethod
def _get_database_from_table(cls, collection):
return collection.database
@classmethod
def _get_table_from_database(cls, database):
return database[cls.table_name]
def connect(*args, **kwargs):
client = MongoClient(*args, **kwargs)
Thingy.client = client
return client
__all__ = ["Thingy", "connect"]
|
2b30ebfb05d64dac4663ab4e37dca65c0130f83a | goodtablesio/tasks.py | goodtablesio/tasks.py | import datetime
import logging
import dataset
from celery import Celery
from sqlalchemy.types import DateTime
from sqlalchemy.dialects.postgresql import JSONB
from goodtables import Inspector
from . import config
log = logging.getLogger(__name__)
# Module API
app = Celery('tasks')
app.config_from_object(config)
# TODO: automate
app.autodiscover_tasks(['goodtablesio.plugins.github'])
@app.task(name='goodtablesio.tasks.validate')
def validate(validation_conf, job_id=None):
"""Main validation task.
Args:
validation_conf (dict): validation configuration
See `schemas/validation-conf.yml`.
"""
# Get report
inspector = Inspector(**validation_conf['settings'])
report = inspector.inspect(validation_conf['files'], preset='tables')
# Save report
database = dataset.connect(config.DATABASE_URL)
row = {
'job_id': job_id or validate.request.id,
'report': report,
'finished': datetime.datetime.utcnow()
}
database['jobs'].update(row,
['job_id'],
types={'report': JSONB, 'finished': DateTime},
ensure=True)
| import datetime
import logging
import dataset
from celery import Celery
from sqlalchemy.types import DateTime
from sqlalchemy.dialects.postgresql import JSONB
from goodtables import Inspector
from . import config
log = logging.getLogger(__name__)
# Module API
app = Celery('tasks')
app.config_from_object(config)
# TODO: automate
app.autodiscover_tasks(['goodtablesio.plugins.github'])
@app.task(name='goodtablesio.tasks.validate')
def validate(validation_conf, job_id=None):
"""Main validation task.
Args:
validation_conf (dict): validation configuration
See `schemas/validation-conf.yml`.
"""
# Get report
settings = validation_conf.get('settings', {})
inspector = Inspector(**settings)
report = inspector.inspect(validation_conf['files'], preset='tables')
# Save report
database = dataset.connect(config.DATABASE_URL)
row = {
'job_id': job_id or validate.request.id,
'report': report,
'finished': datetime.datetime.utcnow()
}
database['jobs'].update(row,
['job_id'],
types={'report': JSONB, 'finished': DateTime},
ensure=True)
| Make settings optional on validation task | Make settings optional on validation task
| Python | agpl-3.0 | frictionlessdata/goodtables.io,frictionlessdata/goodtables.io,frictionlessdata/goodtables.io,frictionlessdata/goodtables.io | import datetime
import logging
import dataset
from celery import Celery
from sqlalchemy.types import DateTime
from sqlalchemy.dialects.postgresql import JSONB
from goodtables import Inspector
from . import config
log = logging.getLogger(__name__)
# Module API
app = Celery('tasks')
app.config_from_object(config)
# TODO: automate
app.autodiscover_tasks(['goodtablesio.plugins.github'])
@app.task(name='goodtablesio.tasks.validate')
def validate(validation_conf, job_id=None):
"""Main validation task.
Args:
validation_conf (dict): validation configuration
See `schemas/validation-conf.yml`.
"""
# Get report
inspector = Inspector(**validation_conf['settings'])
report = inspector.inspect(validation_conf['files'], preset='tables')
# Save report
database = dataset.connect(config.DATABASE_URL)
row = {
'job_id': job_id or validate.request.id,
'report': report,
'finished': datetime.datetime.utcnow()
}
database['jobs'].update(row,
['job_id'],
types={'report': JSONB, 'finished': DateTime},
ensure=True)
Make settings optional on validation task | import datetime
import logging
import dataset
from celery import Celery
from sqlalchemy.types import DateTime
from sqlalchemy.dialects.postgresql import JSONB
from goodtables import Inspector
from . import config
log = logging.getLogger(__name__)
# Module API
app = Celery('tasks')
app.config_from_object(config)
# TODO: automate
app.autodiscover_tasks(['goodtablesio.plugins.github'])
@app.task(name='goodtablesio.tasks.validate')
def validate(validation_conf, job_id=None):
"""Main validation task.
Args:
validation_conf (dict): validation configuration
See `schemas/validation-conf.yml`.
"""
# Get report
settings = validation_conf.get('settings', {})
inspector = Inspector(**settings)
report = inspector.inspect(validation_conf['files'], preset='tables')
# Save report
database = dataset.connect(config.DATABASE_URL)
row = {
'job_id': job_id or validate.request.id,
'report': report,
'finished': datetime.datetime.utcnow()
}
database['jobs'].update(row,
['job_id'],
types={'report': JSONB, 'finished': DateTime},
ensure=True)
| <commit_before>import datetime
import logging
import dataset
from celery import Celery
from sqlalchemy.types import DateTime
from sqlalchemy.dialects.postgresql import JSONB
from goodtables import Inspector
from . import config
log = logging.getLogger(__name__)
# Module API
app = Celery('tasks')
app.config_from_object(config)
# TODO: automate
app.autodiscover_tasks(['goodtablesio.plugins.github'])
@app.task(name='goodtablesio.tasks.validate')
def validate(validation_conf, job_id=None):
"""Main validation task.
Args:
validation_conf (dict): validation configuration
See `schemas/validation-conf.yml`.
"""
# Get report
inspector = Inspector(**validation_conf['settings'])
report = inspector.inspect(validation_conf['files'], preset='tables')
# Save report
database = dataset.connect(config.DATABASE_URL)
row = {
'job_id': job_id or validate.request.id,
'report': report,
'finished': datetime.datetime.utcnow()
}
database['jobs'].update(row,
['job_id'],
types={'report': JSONB, 'finished': DateTime},
ensure=True)
<commit_msg>Make settings optional on validation task<commit_after> | import datetime
import logging
import dataset
from celery import Celery
from sqlalchemy.types import DateTime
from sqlalchemy.dialects.postgresql import JSONB
from goodtables import Inspector
from . import config
log = logging.getLogger(__name__)
# Module API
app = Celery('tasks')
app.config_from_object(config)
# TODO: automate
app.autodiscover_tasks(['goodtablesio.plugins.github'])
@app.task(name='goodtablesio.tasks.validate')
def validate(validation_conf, job_id=None):
"""Main validation task.
Args:
validation_conf (dict): validation configuration
See `schemas/validation-conf.yml`.
"""
# Get report
settings = validation_conf.get('settings', {})
inspector = Inspector(**settings)
report = inspector.inspect(validation_conf['files'], preset='tables')
# Save report
database = dataset.connect(config.DATABASE_URL)
row = {
'job_id': job_id or validate.request.id,
'report': report,
'finished': datetime.datetime.utcnow()
}
database['jobs'].update(row,
['job_id'],
types={'report': JSONB, 'finished': DateTime},
ensure=True)
| import datetime
import logging
import dataset
from celery import Celery
from sqlalchemy.types import DateTime
from sqlalchemy.dialects.postgresql import JSONB
from goodtables import Inspector
from . import config
log = logging.getLogger(__name__)
# Module API
app = Celery('tasks')
app.config_from_object(config)
# TODO: automate
app.autodiscover_tasks(['goodtablesio.plugins.github'])
@app.task(name='goodtablesio.tasks.validate')
def validate(validation_conf, job_id=None):
"""Main validation task.
Args:
validation_conf (dict): validation configuration
See `schemas/validation-conf.yml`.
"""
# Get report
inspector = Inspector(**validation_conf['settings'])
report = inspector.inspect(validation_conf['files'], preset='tables')
# Save report
database = dataset.connect(config.DATABASE_URL)
row = {
'job_id': job_id or validate.request.id,
'report': report,
'finished': datetime.datetime.utcnow()
}
database['jobs'].update(row,
['job_id'],
types={'report': JSONB, 'finished': DateTime},
ensure=True)
Make settings optional on validation taskimport datetime
import logging
import dataset
from celery import Celery
from sqlalchemy.types import DateTime
from sqlalchemy.dialects.postgresql import JSONB
from goodtables import Inspector
from . import config
log = logging.getLogger(__name__)
# Module API
app = Celery('tasks')
app.config_from_object(config)
# TODO: automate
app.autodiscover_tasks(['goodtablesio.plugins.github'])
@app.task(name='goodtablesio.tasks.validate')
def validate(validation_conf, job_id=None):
"""Main validation task.
Args:
validation_conf (dict): validation configuration
See `schemas/validation-conf.yml`.
"""
# Get report
settings = validation_conf.get('settings', {})
inspector = Inspector(**settings)
report = inspector.inspect(validation_conf['files'], preset='tables')
# Save report
database = dataset.connect(config.DATABASE_URL)
row = {
'job_id': job_id or validate.request.id,
'report': report,
'finished': datetime.datetime.utcnow()
}
database['jobs'].update(row,
['job_id'],
types={'report': JSONB, 'finished': DateTime},
ensure=True)
| <commit_before>import datetime
import logging
import dataset
from celery import Celery
from sqlalchemy.types import DateTime
from sqlalchemy.dialects.postgresql import JSONB
from goodtables import Inspector
from . import config
log = logging.getLogger(__name__)
# Module API
app = Celery('tasks')
app.config_from_object(config)
# TODO: automate
app.autodiscover_tasks(['goodtablesio.plugins.github'])
@app.task(name='goodtablesio.tasks.validate')
def validate(validation_conf, job_id=None):
"""Main validation task.
Args:
validation_conf (dict): validation configuration
See `schemas/validation-conf.yml`.
"""
# Get report
inspector = Inspector(**validation_conf['settings'])
report = inspector.inspect(validation_conf['files'], preset='tables')
# Save report
database = dataset.connect(config.DATABASE_URL)
row = {
'job_id': job_id or validate.request.id,
'report': report,
'finished': datetime.datetime.utcnow()
}
database['jobs'].update(row,
['job_id'],
types={'report': JSONB, 'finished': DateTime},
ensure=True)
<commit_msg>Make settings optional on validation task<commit_after>import datetime
import logging
import dataset
from celery import Celery
from sqlalchemy.types import DateTime
from sqlalchemy.dialects.postgresql import JSONB
from goodtables import Inspector
from . import config
log = logging.getLogger(__name__)
# Module API
app = Celery('tasks')
app.config_from_object(config)
# TODO: automate
app.autodiscover_tasks(['goodtablesio.plugins.github'])
@app.task(name='goodtablesio.tasks.validate')
def validate(validation_conf, job_id=None):
"""Main validation task.
Args:
validation_conf (dict): validation configuration
See `schemas/validation-conf.yml`.
"""
# Get report
settings = validation_conf.get('settings', {})
inspector = Inspector(**settings)
report = inspector.inspect(validation_conf['files'], preset='tables')
# Save report
database = dataset.connect(config.DATABASE_URL)
row = {
'job_id': job_id or validate.request.id,
'report': report,
'finished': datetime.datetime.utcnow()
}
database['jobs'].update(row,
['job_id'],
types={'report': JSONB, 'finished': DateTime},
ensure=True)
|
fb49d44cd1cb8ea8a3d291d79546914f15a58491 | greenwich/__init__.py | greenwich/__init__.py | from greenwich.raster import (driver_for_path, frombytes, geom_to_array, open,
AffineTransform, ImageDriver, Raster)
from greenwich.geometry import Envelope, Geometry
from greenwich.srs import SpatialReference
| from greenwich.raster import (driver_for_path, fromarray, frombytes,
geom_to_array, open, AffineTransform, ImageDriver, Raster)
from greenwich.geometry import Envelope, Geometry
from greenwich.srs import SpatialReference
| Add fromarray to package root | Add fromarray to package root
| Python | bsd-3-clause | bkg/greenwich | from greenwich.raster import (driver_for_path, frombytes, geom_to_array, open,
AffineTransform, ImageDriver, Raster)
from greenwich.geometry import Envelope, Geometry
from greenwich.srs import SpatialReference
Add fromarray to package root | from greenwich.raster import (driver_for_path, fromarray, frombytes,
geom_to_array, open, AffineTransform, ImageDriver, Raster)
from greenwich.geometry import Envelope, Geometry
from greenwich.srs import SpatialReference
| <commit_before>from greenwich.raster import (driver_for_path, frombytes, geom_to_array, open,
AffineTransform, ImageDriver, Raster)
from greenwich.geometry import Envelope, Geometry
from greenwich.srs import SpatialReference
<commit_msg>Add fromarray to package root<commit_after> | from greenwich.raster import (driver_for_path, fromarray, frombytes,
geom_to_array, open, AffineTransform, ImageDriver, Raster)
from greenwich.geometry import Envelope, Geometry
from greenwich.srs import SpatialReference
| from greenwich.raster import (driver_for_path, frombytes, geom_to_array, open,
AffineTransform, ImageDriver, Raster)
from greenwich.geometry import Envelope, Geometry
from greenwich.srs import SpatialReference
Add fromarray to package rootfrom greenwich.raster import (driver_for_path, fromarray, frombytes,
geom_to_array, open, AffineTransform, ImageDriver, Raster)
from greenwich.geometry import Envelope, Geometry
from greenwich.srs import SpatialReference
| <commit_before>from greenwich.raster import (driver_for_path, frombytes, geom_to_array, open,
AffineTransform, ImageDriver, Raster)
from greenwich.geometry import Envelope, Geometry
from greenwich.srs import SpatialReference
<commit_msg>Add fromarray to package root<commit_after>from greenwich.raster import (driver_for_path, fromarray, frombytes,
geom_to_array, open, AffineTransform, ImageDriver, Raster)
from greenwich.geometry import Envelope, Geometry
from greenwich.srs import SpatialReference
|
71088ebbed3f6060def0455814036185c70ba194 | shopify_auth/context_processors.py | shopify_auth/context_processors.py | import shopify
def current_shop(request):
if not shopify.ShopifyResource.site:
return {'current_shop': None}
return {'current_shop': shopify.Shop.current()} | from django.conf import settings
import shopify
def shopify_context(request):
return {
'shopify_current_shop': shopify.Shop.current() if shopify.ShopifyResource.site else None,
'shopify_app_api_key': settings.SHOPIFY_APP_API_KEY,
} | Rename `current_shop` context processor to `shopify_context`, and add a little more useful Shopify information. | Rename `current_shop` context processor to `shopify_context`, and add a little more useful Shopify information. | Python | mit | funkybob/django-shopify-auth,RafaAguilar/django-shopify-auth,discolabs/django-shopify-auth,RafaAguilar/django-shopify-auth,discolabs/django-shopify-auth,funkybob/django-shopify-auth | import shopify
def current_shop(request):
if not shopify.ShopifyResource.site:
return {'current_shop': None}
return {'current_shop': shopify.Shop.current()}Rename `current_shop` context processor to `shopify_context`, and add a little more useful Shopify information. | from django.conf import settings
import shopify
def shopify_context(request):
return {
'shopify_current_shop': shopify.Shop.current() if shopify.ShopifyResource.site else None,
'shopify_app_api_key': settings.SHOPIFY_APP_API_KEY,
} | <commit_before>import shopify
def current_shop(request):
if not shopify.ShopifyResource.site:
return {'current_shop': None}
return {'current_shop': shopify.Shop.current()}<commit_msg>Rename `current_shop` context processor to `shopify_context`, and add a little more useful Shopify information.<commit_after> | from django.conf import settings
import shopify
def shopify_context(request):
return {
'shopify_current_shop': shopify.Shop.current() if shopify.ShopifyResource.site else None,
'shopify_app_api_key': settings.SHOPIFY_APP_API_KEY,
} | import shopify
def current_shop(request):
if not shopify.ShopifyResource.site:
return {'current_shop': None}
return {'current_shop': shopify.Shop.current()}Rename `current_shop` context processor to `shopify_context`, and add a little more useful Shopify information.from django.conf import settings
import shopify
def shopify_context(request):
return {
'shopify_current_shop': shopify.Shop.current() if shopify.ShopifyResource.site else None,
'shopify_app_api_key': settings.SHOPIFY_APP_API_KEY,
} | <commit_before>import shopify
def current_shop(request):
if not shopify.ShopifyResource.site:
return {'current_shop': None}
return {'current_shop': shopify.Shop.current()}<commit_msg>Rename `current_shop` context processor to `shopify_context`, and add a little more useful Shopify information.<commit_after>from django.conf import settings
import shopify
def shopify_context(request):
return {
'shopify_current_shop': shopify.Shop.current() if shopify.ShopifyResource.site else None,
'shopify_app_api_key': settings.SHOPIFY_APP_API_KEY,
} |
4c39c270af91ddbc213e077fc06b4bf67c7c6e99 | django/core/checks/compatibility/django_1_7_0.py | django/core/checks/compatibility/django_1_7_0.py | from __future__ import unicode_literals
from .. import Warning, register, Tags
@register(Tags.compatibility)
def check_1_7_compatibility(**kwargs):
errors = []
errors.extend(_check_middleware_classes(**kwargs))
return errors
def _check_middleware_classes(app_configs=None, **kwargs):
"""
Checks if the user has *not* overridden the ``MIDDLEWARE_CLASSES`` setting &
warns them about the global default changes.
"""
from django.conf import settings
# MIDDLEWARE_CLASSES is overridden by default by startproject. If users
# have removed this override then we'll warn them about the default changes.
if not settings.is_overridden('MIDDLEWARE_CLASSES'):
return [
Warning(
"MIDDLEWARE_CLASSES is not set.",
hint=("Django 1.7 changed the global defaults for the MIDDLEWARE_CLASSES."
"django.contrib.sessions.middleware.SessionMiddleware, "
"django.contrib.auth.middleware.AuthenticationMiddleware, and "
"django.contrib.messages.middleware.MessageMiddleware were removed from the defaults."
"If your project needs these middleware then you should configure this setting."),
obj=None,
id='1_7.W001',
)
]
else:
return []
| from __future__ import unicode_literals
from .. import Warning, register, Tags
@register(Tags.compatibility)
def check_1_7_compatibility(**kwargs):
errors = []
errors.extend(_check_middleware_classes(**kwargs))
return errors
def _check_middleware_classes(app_configs=None, **kwargs):
"""
Checks if the user has *not* overridden the ``MIDDLEWARE_CLASSES`` setting &
warns them about the global default changes.
"""
from django.conf import settings
# MIDDLEWARE_CLASSES is overridden by default by startproject. If users
# have removed this override then we'll warn them about the default changes.
if not settings.is_overridden('MIDDLEWARE_CLASSES'):
return [
Warning(
"MIDDLEWARE_CLASSES is not set.",
hint=("Django 1.7 changed the global defaults for the MIDDLEWARE_CLASSES. "
"django.contrib.sessions.middleware.SessionMiddleware, "
"django.contrib.auth.middleware.AuthenticationMiddleware, and "
"django.contrib.messages.middleware.MessageMiddleware were removed from the defaults. "
"If your project needs these middleware then you should configure this setting."),
obj=None,
id='1_7.W001',
)
]
else:
return []
| Add missing spaces to implicitly joined strings | Add missing spaces to implicitly joined strings
| Python | bsd-3-clause | liu602348184/django,yask123/django,RevelSystems/django,sgzsh269/django,wweiradio/django,Anonymous-X6/django,spisneha25/django,adamchainz/django,dfunckt/django,Anonymous-X6/django,abomyi/django,jasonbot/django,syphar/django,mttr/django,ckirby/django,tbeadle/django,rmboggs/django,megaumi/django,blindroot/django,rrrene/django,kangfend/django,blueyed/django,hackerbot/DjangoDev,intgr/django,devops2014/djangosite,shaistaansari/django,auready/django,barbuza/django,guettli/django,Yong-Lee/django,mojeto/django,seocam/django,nielsvanoch/django,hunter007/django,ghedsouza/django,rsalmaso/django,avanov/django,xwolf12/django,frankvdp/django,benjaminjkraft/django,aidanlister/django,nealtodd/django,seocam/django,Vixionar/django,nemesisdesign/django,akshatharaj/django,hynekcer/django,claudep/django,djbaldey/django,peterlauri/django,ABaldwinHunter/django-clone-classic,dhruvagarwal/django,unaizalakain/django,techdragon/django,quxiaolong1504/django,riteshshrv/django,kholidfu/django,tysonclugg/django,ojengwa/django-1,adamchainz/django,Matt-Deacalion/django,MarkusH/django,myang321/django,risicle/django,rwillmer/django,pipermerriam/django,Vixionar/django,xrmx/django,alexmorozov/django,ifduyue/django,jvkops/django,irwinlove/django,jhg/django,hcsturix74/django,jallohm/django,mewtaylor/django,zsiciarz/django,tbeadle/django,takis/django,joequery/django,sjlehtin/django,stewartpark/django,gohin/django,ericfc/django,gohin/django,TridevGuha/django,kangfend/django,mathspace/django,helenst/django,yigitguler/django,jvkops/django,fenginx/django,dudepare/django,schinckel/django,apocquet/django,ticosax/django,andresgz/django,alexallah/django,cainmatt/django,koniiiik/django,shownomercy/django,double-y/django,delhivery/django,pquentin/django,adelton/django,WillGuan105/django,yamila-moreno/django,sam-tsai/django,marcelocure/django,donkirkby/django,TridevGuha/django,marissazhou/django,SujaySKumar/django,koordinates/django,coldmind/django,georgemarshall/django,zhaodelong/django,dudepare/django,megaumi/django,mitya57/django,bikong2/django,tysonclugg/django,scorphus/django,TimYi/django,crazy-canux/django,pauloxnet/django,marckuz/django,stevenewey/django,charettes/django,bikong2/django,whs/django,tayfun/django,mojeto/django,gunchleoc/django,bitcity/django,nju520/django,auready/django,kutenai/django,andela-ifageyinbo/django,tragiclifestories/django,darkryder/django,feroda/django,beni55/django,divio/django,EmadMokhtar/Django,jasonbot/django,delinhabit/django,rmboggs/django,yograterol/django,hynekcer/django,synasius/django,zerc/django,jdelight/django,unaizalakain/django,labcodes/django,kcpawan/django,treyhunner/django,mttr/django,gengue/django,PetrDlouhy/django,spisneha25/django,MikeAmy/django,vincepandolfo/django,feroda/django,lsqtongxin/django,mlavin/django,gitaarik/django,fafaman/django,jhoos/django,shaib/django,quamilek/django,taaviteska/django,drjeep/django,django/django,zsiciarz/django,avneesh91/django,RevelSystems/django,quamilek/django,xwolf12/django,PetrDlouhy/django,ticosax/django,monetate/django,sopier/django,jarshwah/django,rmboggs/django,rtindru/django,zhoulingjun/django,alilotfi/django,lwiecek/django,drjeep/django,joakim-hove/django,kosz85/django,tayfun/django,blueyed/django,techdragon/django,hassanabidpk/django,freakboy3742/django,syaiful6/django,apollo13/django,KokareIITP/django,lunafeng/django,devops2014/djangosite,runekaagaard/django-contrib-locking,WSDC-NITWarangal/django,davidharrigan/django,zhoulingjun/django,ziima/django,ABaldwinHunter/django-clone,savoirfairelinux/django,aroche/django,barbuza/django,auready/django,barbuza/django,craynot/django,tbeadle/django,koniiiik/django,mewtaylor/django,bak1an/django,oinopion/django,dgladkov/django,xwolf12/django,tayfun/django,DONIKAN/django,mattrobenolt/django,kamyu104/django,aerophile/django,simonw/django,kholidfu/django,felixxm/django,timgraham/django,yask123/django,gdub/django,treyhunner/django,bak1an/django,ajaali/django,denis-pitul/django,kswiat/django,hnakamur/django,camilonova/django,hottwaj/django,gcd0318/django,piquadrat/django,ABaldwinHunter/django-clone-classic,shtouff/django,eyohansa/django,jscn/django,lmorchard/django,elky/django,dudepare/django,avanov/django,hybrideagle/django,Beauhurst/django,Endika/django,aisipos/django,Balachan27/django,ajaali/django,jeezybrick/django,memtoko/django,Endika/django,ebar0n/django,petecummings/django,gannetson/django,ghickman/django,ojake/django,ataylor32/django,synasius/django,saydulk/django,HousekeepLtd/django,oberlin/django,WSDC-NITWarangal/django,marctc/django,ytjiang/django,beni55/django,anant-dev/django,kcpawan/django,djbaldey/django,robhudson/django,jenalgit/django,theo-l/django,rtindru/django,hottwaj/django,shacker/django,kisna72/django,takeshineshiro/django,rwillmer/django,yigitguler/django,nhippenmeyer/django,yograterol/django,abomyi/django,EmadMokhtar/Django,denys-duchier/django,googleinterns/django,vincepandolfo/django,lsqtongxin/django,andela-ooladayo/django,dsanders11/django,MarkusH/django,RossBrunton/django,MounirMesselmeni/django,ArnossArnossi/django,leekchan/django_test,ptoraskar/django,caotianwei/django,dgladkov/django,WSDC-NITWarangal/django,ecederstrand/django,gitaarik/django,nielsvanoch/django,Beauhurst/django,willharris/django,gunchleoc/django,jylaxp/django,ojake/django,jejimenez/django,craynot/django,dwightgunning/django,rlugojr/django,kevintaw/django,yamila-moreno/django,jgoclawski/django,tanmaythakur/django,shaib/django,etos/django,oberlin/django,haxoza/django,harisibrahimkv/django,marctc/django,GitAngel/django,blighj/django,riteshshrv/django,donkirkby/django,ryanahall/django,Sonicbids/django,shtouff/django,jvkops/django,takeshineshiro/django,beck/django,takis/django,camilonova/django,elijah513/django,x111ong/django,drjeep/django,jsoref/django,yograterol/django,blueyed/django,ryanahall/django,huang4fstudio/django,neiudemo1/django,himleyb85/django,helenst/django,jenalgit/django,daniponi/django,extremewaysback/django,evansd/django,ericfc/django,darkryder/django,bikong2/django,alimony/django,bitcity/django,sergei-maertens/django,schinckel/django,krisys/django,jasonwzhy/django,curtisstpierre/django,rockneurotiko/django,willhardy/django,YangSongzhou/django,TimYi/django,alrifqi/django,alilotfi/django,sadaf2605/django,chyeh727/django,DONIKAN/django,andresgz/django,nealtodd/django,dbaxa/django,RevelSystems/django,sbellem/django,evansd/django,tbeadle/django,wsmith323/django,vitaly4uk/django,sjlehtin/django,alexmorozov/django,aspidites/django,ryanahall/django,darjeeling/django,tuhangdi/django,hunter007/django,mathspace/django,waytai/django,gunchleoc/django,rrrene/django,jasonwzhy/django,pasqualguerrero/django,quamilek/django,rwillmer/django,neiudemo1/django,akaariai/django,elena/django,ytjiang/django,blindroot/django,YYWen0o0/python-frame-django,PetrDlouhy/django,kcpawan/django,anant-dev/django,ptoraskar/django,seanwestfall/django,MoritzS/django,yewang15215/django,pasqualguerrero/django,rtindru/django,extremewaysback/django,ericfc/django,delhivery/django,SujaySKumar/django,rrrene/django,hybrideagle/django,drjeep/django,darkryder/django,kswiat/django,SebasSBM/django,mattseymour/django,ghickman/django,Leila20/django,wetneb/django,jhoos/django,sjlehtin/django,ABaldwinHunter/django-clone-classic,zhoulingjun/django,gdub/django,piquadrat/django,mcardillo55/django,fenginx/django,marckuz/django,marcelocure/django,tcwicklund/django,eugena/django,elena/django,rajsadho/django,epandurski/django,auvipy/django,sarthakmeh03/django,BMJHayward/django,BlindHunter/django,waytai/django,mattrobenolt/django,TimYi/django,davidharrigan/django,tcwicklund/django,willharris/django,bobcyw/django,tuhangdi/django,zulip/django,rajsadho/django,kisna72/django,asser/django,auvipy/django,lsqtongxin/django,taaviteska/django,whs/django,zerc/django,duqiao/django,areski/django,RossBrunton/django,maxsocl/django,vmarkovtsev/django,digimarc/django,olasitarska/django,supriyantomaftuh/django,rsvip/Django,katrid/django,SebasSBM/django,uranusjr/django,shacker/django,irwinlove/django,andela-ifageyinbo/django,BMJHayward/django,ironbox360/django,risicle/django,dfunckt/django,denys-duchier/django,Leila20/django,carljm/django,mcella/django,JorgeCoock/django,frishberg/django,stevenewey/django,oinopion/django,savoirfairelinux/django,mjtamlyn/django,1013553207/django,ptoraskar/django,jgoclawski/django,nemesisdesign/django,syaiful6/django,baylee/django,hassanabidpk/django,adelton/django,ar45/django,ckirby/django,wkschwartz/django,frePPLe/django,kaedroho/django,fpy171/django,delhivery/django,akintoey/django,mewtaylor/django,takeshineshiro/django,adamchainz/django,liu602348184/django,ecederstrand/django,lsqtongxin/django,elijah513/django,sarvex/django,loic/django,Balachan27/django,atul-bhouraskar/django,moreati/django,filias/django,quxiaolong1504/django,rwillmer/django,darjeeling/django,GaussDing/django,marqueedev/django,andyzsf/django,jhoos/django,blindroot/django,koordinates/django,megaumi/django,auready/django,rhertzog/django,maxsocl/django,gannetson/django,camilonova/django,Y3K/django,waytai/django,benjaminjkraft/django,EmadMokhtar/Django,joakim-hove/django,saydulk/django,sadaf2605/django,fafaman/django,JavML/django,varunnaganathan/django,jejimenez/django,crazy-canux/django,manhhomienbienthuy/django,ghedsouza/django,intgr/django,oberlin/django,marckuz/django,hkchenhongyi/django,claudep/django,iambibhas/django,mitya57/django,mlavin/django,mmardini/django,hkchenhongyi/django,hobarrera/django,zsiciarz/django,adelton/django,waytai/django,beni55/django,MoritzS/django,gengue/django,mattseymour/django,stevenewey/django,MatthewWilkes/django,akintoey/django,yewang15215/django,github-account-because-they-want-it/django,BMJHayward/django,dydek/django,poiati/django,1013553207/django,JorgeCoock/django,gdub/django,ataylor32/django,elena/django,sephii/django,rizumu/django,akaariai/django,andela-ifageyinbo/django,zhaodelong/django,AltSchool/django,ziima/django,liuliwork/django,ABaldwinHunter/django-clone,xadahiya/django,SoftwareMaven/django,HonzaKral/django,syphar/django,tayfun/django,sgzsh269/django,crazy-canux/django,dhruvagarwal/django,raphaelmerx/django,myang321/django,szopu/django,ojengwa/django-1,tuhangdi/django,eyohansa/django,PolicyStat/django,tuhangdi/django,Leila20/django,monetate/django,ataylor32/django,dfunckt/django,jdelight/django,MounirMesselmeni/django,raphaelmerx/django,myang321/django,gitaarik/django,alimony/django,theo-l/django,weiawe/django,BrotherPhil/django,hobarrera/django,anant-dev/django,tysonclugg/django,gdi2290/django,marcelocure/django,AltSchool/django,delhivery/django,akshatharaj/django,solarissmoke/django,salamer/django,hassanabidpk/django,digimarc/django,sephii/django,willhardy/django,andreif/django,PetrDlouhy/django,tragiclifestories/django,atul-bhouraskar/django,techdragon/django,krishna-pandey-git/django,mcardillo55/django,vitaly4uk/django,jrrembert/django,dpetzold/django,ghedsouza/django,denis-pitul/django,marqueedev/django,kholidfu/django,jeezybrick/django,vmarkovtsev/django,baylee/django,darjeeling/django,GhostThrone/django,MikeAmy/django,taaviteska/django,hynekcer/django,bspink/django,kamyu104/django,rynomster/django,apollo13/django,x111ong/django,huang4fstudio/django,monetate/django,ArnossArnossi/django,rsalmaso/django,davgibbs/django,mmardini/django,arun6582/django,ebar0n/django,gdi2290/django,stewartpark/django,matiasb/django,sbellem/django,runekaagaard/django-contrib-locking,matiasb/django,baylee/django,mitchelljkotler/django,MatthewWilkes/django,sjlehtin/django,syaiful6/django,jn7163/django,denis-pitul/django,twz915/django,kutenai/django,synasius/django,synasius/django,hynekcer/django,yask123/django,daniponi/django,liuliwork/django,syaiful6/django,jscn/django,karyon/django,SoftwareMaven/django,nealtodd/django,rockneurotiko/django,elkingtonmcb/django,adelton/django,edevil/django,nemesisdesign/django,raphaelmerx/django,ulope/django,jvkops/django,dracos/django,hunter007/django,MatthewWilkes/django,asser/django,baylee/django,hkchenhongyi/django,shownomercy/django,frishberg/django,mrbox/django,davidharrigan/django,ziima/django,ajoaoff/django,stevenewey/django,zulip/django,KokareIITP/django,edevil/django,cainmatt/django,szopu/django,ivandevp/django,jsoref/django,weiawe/django,rhertzog/django,ghedsouza/django,frankvdp/django,jmcarp/django,Balachan27/django,divio/django,ivandevp/django,hackerbot/DjangoDev,djbaldey/django,twz915/django,ajaali/django,codepantry/django,b-me/django,ajoaoff/django,PolicyStat/django,HousekeepLtd/django,coldmind/django,simonw/django,twz915/django,dhruvagarwal/django,DasIch/django,MounirMesselmeni/django,xadahiya/django,yakky/django,sam-tsai/django,jasonbot/django,jsoref/django,lunafeng/django,Adnn/django,bspink/django,gohin/django,techdragon/django,eugena/django,wweiradio/django,erikr/django,mathspace/django,nhippenmeyer/django,ABaldwinHunter/django-clone,manhhomienbienthuy/django,ytjiang/django,DasIch/django,daniponi/django,edevil/django,dudepare/django,zhoulingjun/django,jpic/django,hottwaj/django,jnovinger/django,avneesh91/django,mjtamlyn/django,ar45/django,elena/django,WillGuan105/django,dursk/django,beck/django,shaistaansari/django,arun6582/django,irwinlove/django,b-me/django,liuliwork/django,duqiao/django,Yong-Lee/django,SoftwareMaven/django,nhippenmeyer/django,mewtaylor/django,JorgeCoock/django,rockneurotiko/django,Mixser/django,Argon-Zhou/django,bobcyw/django,BrotherPhil/django,Argon-Zhou/django,phalt/django,treyhunner/django,ifduyue/django,webgeodatavore/django,ryangallen/django,nealtodd/django,pauloxnet/django,krishna-pandey-git/django,Adnn/django,carljm/django,jpic/django,donkirkby/django,craynot/django,alimony/django,EliotBerriot/django,unaizalakain/django,ABaldwinHunter/django-clone,alilotfi/django,Endika/django,delinhabit/django,MarcJoan/django,nju520/django,timgraham/django,rizumu/django,poiati/django,jn7163/django,double-y/django,Matt-Deacalion/django,ojengwa/django-1,jn7163/django,dpetzold/django,lmorchard/django,beni55/django,taaviteska/django,payeldillip/django,labcodes/django,bikong2/django,salamer/django,KokareIITP/django,chyeh727/django,elky/django,zanderle/django,ojake/django,WillGuan105/django,joequery/django,wsmith323/django,ajaali/django,GaussDing/django,kaedroho/django,vitan/django,elky/django,mjtamlyn/django,Yong-Lee/django,knifenomad/django,elijah513/django,gannetson/django,twz915/django,jasonwzhy/django,shaistaansari/django,apocquet/django,xrmx/django,rapilabs/django,litchfield/django,yask123/django,aidanlister/django,theo-l/django,katrid/django,Beauhurst/django,pauloxnet/django,spisneha25/django,davgibbs/django,filias/django,takis/django,MarcJoan/django,shacker/django,rsvip/Django,gchp/django,ivandevp/django,erikr/django,aerophile/django,mshafiq9/django,mcella/django,andela-ooladayo/django,rapilabs/django,mrbox/django,gchp/django,jeezybrick/django,rsalmaso/django,joequery/django,weiawe/django,jhg/django,h4r5h1t/django-hauthy,shtouff/django,takeshineshiro/django,liuliwork/django,himleyb85/django,wetneb/django,kosz85/django,Mixser/django,jmcarp/django,maxsocl/django,piquadrat/django,follow99/django,gcd0318/django,hkchenhongyi/django,sergei-maertens/django,HonzaKral/django,dbaxa/django,apollo13/django,dracos/django,Nepherhotep/django,kosz85/django,doismellburning/django,ironbox360/django,petecummings/django,gchp/django,HonzaKral/django,darjeeling/django,jscn/django,frdb194/django,kcpawan/django,tanmaythakur/django,rlugojr/django,EliotBerriot/django,mshafiq9/django,memtoko/django,gunchleoc/django,Y3K/django,szopu/django,gitaarik/django,MarkusH/django,evansd/django,mojeto/django,yamila-moreno/django,Matt-Deacalion/django,Sonicbids/django,helenst/django,rynomster/django,vitaly4uk/django,ArnossArnossi/django,takis/django,akshatharaj/django,mcrowson/django,frePPLe/django,yakky/django,dbaxa/django,DONIKAN/django,vincepandolfo/django,huang4fstudio/django,roselleebarle04/django,aroche/django,rockneurotiko/django,caotianwei/django,gohin/django,djbaldey/django,blighj/django,dursk/django,marqueedev/django,moreati/django,kevintaw/django,dpetzold/django,sadaf2605/django,sgzsh269/django,beck/django,yograterol/django,ebar0n/django,robhudson/django,delinhabit/django,rrrene/django,jmcarp/django,darkryder/django,ajoaoff/django,litchfield/django,elky/django,sephii/django,willhardy/django,dgladkov/django,Vixionar/django,tomchristie/django,sergei-maertens/django,mttr/django,phalt/django,supriyantomaftuh/django,mitchelljkotler/django,felixxm/django,indevgr/django,koordinates/django,moreati/django,erikr/django,ASCrookes/django,mitya57/django,aspidites/django,sam-tsai/django,1013553207/django,dwightgunning/django,shownomercy/django,indevgr/django,PolicyStat/django,ajoaoff/django,maxsocl/django,savoirfairelinux/django,aerophile/django,leekchan/django_test,rhertzog/django,eugena/django,EliotBerriot/django,aspidites/django,labcodes/django,labcodes/django,mcella/django,MikeAmy/django,hackerbot/DjangoDev,dsanders11/django,sarvex/django,andela-ooladayo/django,jeezybrick/django,xadahiya/django,manhhomienbienthuy/django,hassanabidpk/django,AltSchool/django,webgeodatavore/django,yewang15215/django,katrid/django,Nepherhotep/django,charettes/django,gcd0318/django,chyeh727/django,arun6582/django,roselleebarle04/django,roselleebarle04/django,alexallah/django,aisipos/django,JavML/django,YYWen0o0/python-frame-django,MoritzS/django,ericfc/django,irwinlove/django,feroda/django,SebasSBM/django,kosz85/django,atul-bhouraskar/django,mrbox/django,tomchristie/django,bitcity/django,risicle/django,fpy171/django,Y3K/django,AndrewGrossman/django,sadaf2605/django,loic/django,jenalgit/django,zulip/django,doismellburning/django,treyhunner/django,NullSoldier/django,rynomster/django,shaistaansari/django,z0by/django,ulope/django,indevgr/django,atul-bhouraskar/django,timgraham/django,shtouff/django,oinopion/django,craynot/django,davidharrigan/django,duqiao/django,marcelocure/django,areski/django,Vixionar/django,dursk/django,vmarkovtsev/django,jylaxp/django,Argon-Zhou/django,frePPLe/django,mrfuxi/django,AltSchool/django,wkschwartz/django,andreif/django,roselleebarle04/django,zhaodelong/django,hcsturix74/django,divio/django,aroche/django,dfdx2/django,SujaySKumar/django,edmorley/django,BrotherPhil/django,Korkki/django,seocam/django,himleyb85/django,petecummings/django,elkingtonmcb/django,filias/django,bspink/django,davgibbs/django,georgemarshall/django,github-account-because-they-want-it/django,h4r5h1t/django-hauthy,ytjiang/django,divio/django,dydek/django,Yong-Lee/django,auvipy/django,ryangallen/django,rapilabs/django,ticosax/django,robhudson/django,gchp/django,sam-tsai/django,ckirby/django,sbellem/django,reinout/django,shacker/django,alexallah/django,charettes/django,payeldillip/django,mrfuxi/django,sarvex/django,jenalgit/django,elijah513/django,pipermerriam/django,knifenomad/django,mitya57/django,epandurski/django,marissazhou/django,himleyb85/django,tanmaythakur/django,kutenai/django,marissazhou/django,MoritzS/django,ironbox360/django,jgoclawski/django,syphar/django,peterlauri/django,seanwestfall/django,rapilabs/django,pipermerriam/django,knifenomad/django,gengue/django,kevintaw/django,BMJHayward/django,wetneb/django,sopier/django,dpetzold/django,double-y/django,digimarc/django,freakboy3742/django,dwightgunning/django,Korkki/django,bitcity/django,ghickman/django,shownomercy/django,z0by/django,seocam/django,JavML/django,hcsturix74/django,RossBrunton/django,guettli/django,hybrideagle/django,blighj/django,wweiradio/django,oberlin/django,anant-dev/django,jhoos/django,raphaelmerx/django,mcrowson/django,github-account-because-they-want-it/django,sarthakmeh03/django,GitAngel/django,Argon-Zhou/django,rtindru/django,charettes/django,RevelSystems/django,sdcooke/django,koniiiik/django,pasqualguerrero/django,ojengwa/django-1,DONIKAN/django,yewang15215/django,poiati/django,xrmx/django,django/django,lwiecek/django,github-account-because-they-want-it/django,savoirfairelinux/django,alexmorozov/django,karyon/django,yakky/django,jylaxp/django,SoftwareMaven/django,digimarc/django,nhippenmeyer/django,poiati/django,unaizalakain/django,tomchristie/django,gdi2290/django,fenginx/django,runekaagaard/django-contrib-locking,vitan/django,MounirMesselmeni/django,jylaxp/django,mitchelljkotler/django,ghickman/django,apollo13/django,katrid/django,willharris/django,jdelight/django,intgr/django,NullSoldier/django,frishberg/django,chyeh727/django,marqueedev/django,mrbox/django,tanmaythakur/django,evansd/django,jejimenez/django,elkingtonmcb/django,jarshwah/django,mojeto/django,bak1an/django,HousekeepLtd/django,georgemarshall/django,gannetson/django,felixxm/django,lmorchard/django,dfunckt/django,beck/django,Beauhurst/django,payeldillip/django,Matt-Deacalion/django,zanderle/django,tragiclifestories/django,ifduyue/django,lunafeng/django,z0by/django,follow99/django,double-y/django,bak1an/django,MarkusH/django,leekchan/django_test,apocquet/django,henryfjordan/django,jrrembert/django,shaib/django,joakim-hove/django,programadorjc/django,Mixser/django,quxiaolong1504/django,spisneha25/django,follow99/django,uranusjr/django,haxoza/django,AndrewGrossman/django,TridevGuha/django,rajsadho/django,sarthakmeh03/django,kamyu104/django,ryangallen/django,mcella/django,akintoey/django,cainmatt/django,ar45/django,krisys/django,dursk/django,NullSoldier/django,akshatharaj/django,Anonymous-X6/django,areski/django,jaywreddy/django,peterlauri/django,blueyed/django,huang4fstudio/django,mlavin/django,guettli/django,kevintaw/django,reinout/django,claudep/django,liu602348184/django,BrotherPhil/django,programadorjc/django,pquentin/django,supriyantomaftuh/django,alexmorozov/django,donkirkby/django,ar45/django,manhhomienbienthuy/django,bobcyw/django,rsvip/Django,hobarrera/django,jnovinger/django,druuu/django,googleinterns/django,rsalmaso/django,felixxm/django,saydulk/django,ivandevp/django,jn7163/django,RossBrunton/django,blighj/django,ptoraskar/django,georgemarshall/django,kangfend/django,intgr/django,curtisstpierre/django,doismellburning/django,jpic/django,willharris/django,extremewaysback/django,kaedroho/django,ecederstrand/django,krishna-pandey-git/django,mrfuxi/django,denis-pitul/django,scorphus/django,simonw/django,zanderle/django,mshafiq9/django,akaariai/django,mathspace/django,jarshwah/django,riteshshrv/django,epandurski/django,eyohansa/django,YangSongzhou/django,jmcarp/django,salamer/django,jnovinger/django,harisibrahimkv/django,avneesh91/django,marckuz/django,neiudemo1/django,aidanlister/django,rajsadho/django,druuu/django,lwiecek/django,jrrembert/django,asser/django,uranusjr/django,litchfield/django,alrifqi/django,tcwicklund/django,jasonwzhy/django,1013553207/django,andela-ooladayo/django,GhostThrone/django,rynomster/django,ulope/django,GaussDing/django,tcwicklund/django,TimYi/django,edmorley/django,xwolf12/django,solarissmoke/django,haxoza/django,megaumi/django,frishberg/django,iambibhas/django,riteshshrv/django,rlugojr/django,sergei-maertens/django,vincepandolfo/django,feroda/django,petecummings/django,mcrowson/django,crazy-canux/django,alrifqi/django,aroche/django,t0in4/django,jaywreddy/django,davgibbs/django,myang321/django,syphar/django,YYWen0o0/python-frame-django,frankvdp/django,aidanlister/django,sbellem/django,coldmind/django,claudep/django,apocquet/django,koordinates/django,dracos/django,hybrideagle/django,etos/django,rmboggs/django,alrifqi/django,Korkki/django,coldmind/django,mttr/django,aisipos/django,programadorjc/django,MarcJoan/django,henryfjordan/django,whs/django,olasitarska/django,x111ong/django,programadorjc/django,mmardini/django,koniiiik/django,ebar0n/django,reinout/django,frdb194/django,sgzsh269/django,mcrowson/django,sarthakmeh03/django,andresgz/django,duqiao/django,WSDC-NITWarangal/django,KokareIITP/django,fpy171/django,GhostThrone/django,webgeodatavore/django,avanov/django,solarissmoke/django,camilonova/django,BlindHunter/django,freakboy3742/django,moreati/django,mattseymour/django,edmorley/django,piquadrat/django,abomyi/django,payeldillip/django,edmorley/django,indevgr/django,hnakamur/django,jnovinger/django,frdb194/django,vitan/django,nju520/django,ryangallen/django,ironbox360/django,asser/django,mattseymour/django,henryfjordan/django,bobcyw/django,mmardini/django,scorphus/django,shaib/django,t0in4/django,alexallah/django,tragiclifestories/django,eugena/django,joequery/django,YangSongzhou/django,pipermerriam/django,jsoref/django,wetneb/django,loic/django,HonzaKral/django,hcsturix74/django,GhostThrone/django,schinckel/django,aspidites/django,aerophile/django,extremewaysback/django,varunnaganathan/django,dhruvagarwal/django,harisibrahimkv/django,Anonymous-X6/django,hottwaj/django,andyzsf/django,jhg/django,lunafeng/django,gcd0318/django,monetate/django,h4r5h1t/django-hauthy,litchfield/django,hnakamur/django,zerc/django,ziima/django,vitaly4uk/django,timgraham/django,barbuza/django,HousekeepLtd/django,auvipy/django,hnakamur/django,sdcooke/django,theo-l/django,GaussDing/django,codepantry/django,Nepherhotep/django,mitchelljkotler/django,druuu/django,JorgeCoock/django,ecederstrand/django,b-me/django,pasqualguerrero/django,varunnaganathan/django,wsmith323/django,jarshwah/django,andela-ifageyinbo/django,bspink/django,stewartpark/django,kangfend/django,aisipos/django,ASCrookes/django,devops2014/djangosite,epandurski/django,MikeAmy/django,avanov/django,benjaminjkraft/django,caotianwei/django,z0by/django,filias/django,rizumu/django,vmarkovtsev/django,denys-duchier/django,sdcooke/django,django/django,gengue/django,avneesh91/django,caotianwei/django,curtisstpierre/django,jaywreddy/django,MatthewWilkes/django,eyohansa/django,follow99/django,simonw/django,varunnaganathan/django,karyon/django,YangSongzhou/django,zsiciarz/django,NullSoldier/django,jallohm/django,sarvex/django,tomchristie/django,akintoey/django,stewartpark/django,andreif/django,frdb194/django,jrrembert/django,kamyu104/django,uranusjr/django,krisys/django,supriyantomaftuh/django,sopier/django,dgladkov/django,solarissmoke/django,TridevGuha/django,wkschwartz/django,krishna-pandey-git/django,JavML/django,Sonicbids/django,curtisstpierre/django,guettli/django,jdelight/django,elkingtonmcb/django,Adnn/django,harisibrahimkv/django,ataylor32/django,nju520/django,fafaman/django,rlugojr/django,SujaySKumar/django,EliotBerriot/django,memtoko/django,GitAngel/django,benjaminjkraft/django,nemesisdesign/django,pauloxnet/django,zanderle/django,ASCrookes/django,jscn/django,codepantry/django,peterlauri/django,Leila20/django,salamer/django,jallohm/django,hackerbot/DjangoDev,vitan/django,yakky/django,ABaldwinHunter/django-clone-classic,Adnn/django,dbaxa/django,wweiradio/django,henryfjordan/django,jgoclawski/django,mshafiq9/django,fpy171/django,dydek/django,cainmatt/django,joakim-hove/django,carljm/django,Endika/django,codepantry/django,dsanders11/django,gdub/django,t0in4/django,alimony/django,iambibhas/django,zhaodelong/django,delinhabit/django,lmorchard/django,denys-duchier/django,ojake/django,zulip/django,tysonclugg/django,blindroot/django,rsvip/Django,Nepherhotep/django,abomyi/django,jpic/django,seanwestfall/django,marctc/django,h4r5h1t/django-hauthy,rizumu/django,Y3K/django,alilotfi/django,ifduyue/django,hobarrera/django,mrfuxi/django,areski/django,krisys/django,AndrewGrossman/django,ArnossArnossi/django,akaariai/django,oinopion/django,pquentin/django,googleinterns/django,neiudemo1/django,nielsvanoch/django,yamila-moreno/django,loic/django,x111ong/django,weiawe/django,reinout/django,phalt/django,lwiecek/django,druuu/django,olasitarska/django,robhudson/django,DasIch/django,ryanahall/django,kisna72/django,risicle/django,etos/django,dsanders11/django,BlindHunter/django,dfdx2/django,matiasb/django,GitAngel/django,carljm/django,phalt/django,frankvdp/django,mcardillo55/django,ticosax/django,jaywreddy/django,Balachan27/django,zerc/django,daniponi/django,wsmith323/django,mattrobenolt/django,MarcJoan/django,schinckel/django,matiasb/django,b-me/django,andyzsf/django,fenginx/django,kutenai/django,adamchainz/django,seanwestfall/django,kswiat/django,mcardillo55/django,dwightgunning/django,marctc/django,mattrobenolt/django,wkschwartz/django,quamilek/django,karyon/django,knifenomad/django,arun6582/django,kisna72/django,t0in4/django,andreif/django,jasonbot/django,kholidfu/django,dydek/django,googleinterns/django,marissazhou/django,ckirby/django,yigitguler/django,BlindHunter/django,WillGuan105/django,DasIch/django,jejimenez/django,andresgz/django,sdcooke/django,quxiaolong1504/django,Mixser/django,jallohm/django,hunter007/django,dfdx2/django,etos/django,liu602348184/django,haxoza/django,jhg/django,whs/django,frePPLe/django,dracos/django,dfdx2/django,mjtamlyn/django,sopier/django,xrmx/django,erikr/django,webgeodatavore/django,scorphus/django,django/django,SebasSBM/django,fafaman/django,ASCrookes/django,AndrewGrossman/django,saydulk/django,Korkki/django,rhertzog/django,willhardy/django,xadahiya/django,mlavin/django | from __future__ import unicode_literals
from .. import Warning, register, Tags
@register(Tags.compatibility)
def check_1_7_compatibility(**kwargs):
errors = []
errors.extend(_check_middleware_classes(**kwargs))
return errors
def _check_middleware_classes(app_configs=None, **kwargs):
"""
Checks if the user has *not* overridden the ``MIDDLEWARE_CLASSES`` setting &
warns them about the global default changes.
"""
from django.conf import settings
# MIDDLEWARE_CLASSES is overridden by default by startproject. If users
# have removed this override then we'll warn them about the default changes.
if not settings.is_overridden('MIDDLEWARE_CLASSES'):
return [
Warning(
"MIDDLEWARE_CLASSES is not set.",
hint=("Django 1.7 changed the global defaults for the MIDDLEWARE_CLASSES."
"django.contrib.sessions.middleware.SessionMiddleware, "
"django.contrib.auth.middleware.AuthenticationMiddleware, and "
"django.contrib.messages.middleware.MessageMiddleware were removed from the defaults."
"If your project needs these middleware then you should configure this setting."),
obj=None,
id='1_7.W001',
)
]
else:
return []
Add missing spaces to implicitly joined strings | from __future__ import unicode_literals
from .. import Warning, register, Tags
@register(Tags.compatibility)
def check_1_7_compatibility(**kwargs):
errors = []
errors.extend(_check_middleware_classes(**kwargs))
return errors
def _check_middleware_classes(app_configs=None, **kwargs):
"""
Checks if the user has *not* overridden the ``MIDDLEWARE_CLASSES`` setting &
warns them about the global default changes.
"""
from django.conf import settings
# MIDDLEWARE_CLASSES is overridden by default by startproject. If users
# have removed this override then we'll warn them about the default changes.
if not settings.is_overridden('MIDDLEWARE_CLASSES'):
return [
Warning(
"MIDDLEWARE_CLASSES is not set.",
hint=("Django 1.7 changed the global defaults for the MIDDLEWARE_CLASSES. "
"django.contrib.sessions.middleware.SessionMiddleware, "
"django.contrib.auth.middleware.AuthenticationMiddleware, and "
"django.contrib.messages.middleware.MessageMiddleware were removed from the defaults. "
"If your project needs these middleware then you should configure this setting."),
obj=None,
id='1_7.W001',
)
]
else:
return []
| <commit_before>from __future__ import unicode_literals
from .. import Warning, register, Tags
@register(Tags.compatibility)
def check_1_7_compatibility(**kwargs):
errors = []
errors.extend(_check_middleware_classes(**kwargs))
return errors
def _check_middleware_classes(app_configs=None, **kwargs):
"""
Checks if the user has *not* overridden the ``MIDDLEWARE_CLASSES`` setting &
warns them about the global default changes.
"""
from django.conf import settings
# MIDDLEWARE_CLASSES is overridden by default by startproject. If users
# have removed this override then we'll warn them about the default changes.
if not settings.is_overridden('MIDDLEWARE_CLASSES'):
return [
Warning(
"MIDDLEWARE_CLASSES is not set.",
hint=("Django 1.7 changed the global defaults for the MIDDLEWARE_CLASSES."
"django.contrib.sessions.middleware.SessionMiddleware, "
"django.contrib.auth.middleware.AuthenticationMiddleware, and "
"django.contrib.messages.middleware.MessageMiddleware were removed from the defaults."
"If your project needs these middleware then you should configure this setting."),
obj=None,
id='1_7.W001',
)
]
else:
return []
<commit_msg>Add missing spaces to implicitly joined strings<commit_after> | from __future__ import unicode_literals
from .. import Warning, register, Tags
@register(Tags.compatibility)
def check_1_7_compatibility(**kwargs):
errors = []
errors.extend(_check_middleware_classes(**kwargs))
return errors
def _check_middleware_classes(app_configs=None, **kwargs):
"""
Checks if the user has *not* overridden the ``MIDDLEWARE_CLASSES`` setting &
warns them about the global default changes.
"""
from django.conf import settings
# MIDDLEWARE_CLASSES is overridden by default by startproject. If users
# have removed this override then we'll warn them about the default changes.
if not settings.is_overridden('MIDDLEWARE_CLASSES'):
return [
Warning(
"MIDDLEWARE_CLASSES is not set.",
hint=("Django 1.7 changed the global defaults for the MIDDLEWARE_CLASSES. "
"django.contrib.sessions.middleware.SessionMiddleware, "
"django.contrib.auth.middleware.AuthenticationMiddleware, and "
"django.contrib.messages.middleware.MessageMiddleware were removed from the defaults. "
"If your project needs these middleware then you should configure this setting."),
obj=None,
id='1_7.W001',
)
]
else:
return []
| from __future__ import unicode_literals
from .. import Warning, register, Tags
@register(Tags.compatibility)
def check_1_7_compatibility(**kwargs):
errors = []
errors.extend(_check_middleware_classes(**kwargs))
return errors
def _check_middleware_classes(app_configs=None, **kwargs):
"""
Checks if the user has *not* overridden the ``MIDDLEWARE_CLASSES`` setting &
warns them about the global default changes.
"""
from django.conf import settings
# MIDDLEWARE_CLASSES is overridden by default by startproject. If users
# have removed this override then we'll warn them about the default changes.
if not settings.is_overridden('MIDDLEWARE_CLASSES'):
return [
Warning(
"MIDDLEWARE_CLASSES is not set.",
hint=("Django 1.7 changed the global defaults for the MIDDLEWARE_CLASSES."
"django.contrib.sessions.middleware.SessionMiddleware, "
"django.contrib.auth.middleware.AuthenticationMiddleware, and "
"django.contrib.messages.middleware.MessageMiddleware were removed from the defaults."
"If your project needs these middleware then you should configure this setting."),
obj=None,
id='1_7.W001',
)
]
else:
return []
Add missing spaces to implicitly joined stringsfrom __future__ import unicode_literals
from .. import Warning, register, Tags
@register(Tags.compatibility)
def check_1_7_compatibility(**kwargs):
errors = []
errors.extend(_check_middleware_classes(**kwargs))
return errors
def _check_middleware_classes(app_configs=None, **kwargs):
"""
Checks if the user has *not* overridden the ``MIDDLEWARE_CLASSES`` setting &
warns them about the global default changes.
"""
from django.conf import settings
# MIDDLEWARE_CLASSES is overridden by default by startproject. If users
# have removed this override then we'll warn them about the default changes.
if not settings.is_overridden('MIDDLEWARE_CLASSES'):
return [
Warning(
"MIDDLEWARE_CLASSES is not set.",
hint=("Django 1.7 changed the global defaults for the MIDDLEWARE_CLASSES. "
"django.contrib.sessions.middleware.SessionMiddleware, "
"django.contrib.auth.middleware.AuthenticationMiddleware, and "
"django.contrib.messages.middleware.MessageMiddleware were removed from the defaults. "
"If your project needs these middleware then you should configure this setting."),
obj=None,
id='1_7.W001',
)
]
else:
return []
| <commit_before>from __future__ import unicode_literals
from .. import Warning, register, Tags
@register(Tags.compatibility)
def check_1_7_compatibility(**kwargs):
errors = []
errors.extend(_check_middleware_classes(**kwargs))
return errors
def _check_middleware_classes(app_configs=None, **kwargs):
"""
Checks if the user has *not* overridden the ``MIDDLEWARE_CLASSES`` setting &
warns them about the global default changes.
"""
from django.conf import settings
# MIDDLEWARE_CLASSES is overridden by default by startproject. If users
# have removed this override then we'll warn them about the default changes.
if not settings.is_overridden('MIDDLEWARE_CLASSES'):
return [
Warning(
"MIDDLEWARE_CLASSES is not set.",
hint=("Django 1.7 changed the global defaults for the MIDDLEWARE_CLASSES."
"django.contrib.sessions.middleware.SessionMiddleware, "
"django.contrib.auth.middleware.AuthenticationMiddleware, and "
"django.contrib.messages.middleware.MessageMiddleware were removed from the defaults."
"If your project needs these middleware then you should configure this setting."),
obj=None,
id='1_7.W001',
)
]
else:
return []
<commit_msg>Add missing spaces to implicitly joined strings<commit_after>from __future__ import unicode_literals
from .. import Warning, register, Tags
@register(Tags.compatibility)
def check_1_7_compatibility(**kwargs):
errors = []
errors.extend(_check_middleware_classes(**kwargs))
return errors
def _check_middleware_classes(app_configs=None, **kwargs):
"""
Checks if the user has *not* overridden the ``MIDDLEWARE_CLASSES`` setting &
warns them about the global default changes.
"""
from django.conf import settings
# MIDDLEWARE_CLASSES is overridden by default by startproject. If users
# have removed this override then we'll warn them about the default changes.
if not settings.is_overridden('MIDDLEWARE_CLASSES'):
return [
Warning(
"MIDDLEWARE_CLASSES is not set.",
hint=("Django 1.7 changed the global defaults for the MIDDLEWARE_CLASSES. "
"django.contrib.sessions.middleware.SessionMiddleware, "
"django.contrib.auth.middleware.AuthenticationMiddleware, and "
"django.contrib.messages.middleware.MessageMiddleware were removed from the defaults. "
"If your project needs these middleware then you should configure this setting."),
obj=None,
id='1_7.W001',
)
]
else:
return []
|
12cf94f7cccad5d9fce8b44726756c33f4219a0c | automata/pda/exceptions.py | automata/pda/exceptions.py | #!/usr/bin/env python3
"""Exception classes specific to pushdown automata."""
from automata.base.exceptions import AutomatonException
class PDAException(AutomatonException):
"""The base class for all PDA-related errors."""
pass
class NondeterminismError(PDAException):
"""A DPDA is exhibiting nondeterminism."""
pass
class InvalidAcceptanceMode(PDAException):
"""The given acceptance mode is invalid."""
pass
| #!/usr/bin/env python3
"""Exception classes specific to pushdown automata."""
from automata.base.exceptions import AutomatonException
class PDAException(AutomatonException):
"""The base class for all PDA-related errors."""
pass
class NondeterminismError(PDAException):
"""A DPDA is exhibiting nondeterminism."""
pass
class InvalidAcceptanceMode(PDAException):
"""The given acceptance mode is invalid."""
pass
| Add blank line to comply with PEP 8 | Add blank line to comply with PEP 8
| Python | mit | caleb531/automata | #!/usr/bin/env python3
"""Exception classes specific to pushdown automata."""
from automata.base.exceptions import AutomatonException
class PDAException(AutomatonException):
"""The base class for all PDA-related errors."""
pass
class NondeterminismError(PDAException):
"""A DPDA is exhibiting nondeterminism."""
pass
class InvalidAcceptanceMode(PDAException):
"""The given acceptance mode is invalid."""
pass
Add blank line to comply with PEP 8 | #!/usr/bin/env python3
"""Exception classes specific to pushdown automata."""
from automata.base.exceptions import AutomatonException
class PDAException(AutomatonException):
"""The base class for all PDA-related errors."""
pass
class NondeterminismError(PDAException):
"""A DPDA is exhibiting nondeterminism."""
pass
class InvalidAcceptanceMode(PDAException):
"""The given acceptance mode is invalid."""
pass
| <commit_before>#!/usr/bin/env python3
"""Exception classes specific to pushdown automata."""
from automata.base.exceptions import AutomatonException
class PDAException(AutomatonException):
"""The base class for all PDA-related errors."""
pass
class NondeterminismError(PDAException):
"""A DPDA is exhibiting nondeterminism."""
pass
class InvalidAcceptanceMode(PDAException):
"""The given acceptance mode is invalid."""
pass
<commit_msg>Add blank line to comply with PEP 8<commit_after> | #!/usr/bin/env python3
"""Exception classes specific to pushdown automata."""
from automata.base.exceptions import AutomatonException
class PDAException(AutomatonException):
"""The base class for all PDA-related errors."""
pass
class NondeterminismError(PDAException):
"""A DPDA is exhibiting nondeterminism."""
pass
class InvalidAcceptanceMode(PDAException):
"""The given acceptance mode is invalid."""
pass
| #!/usr/bin/env python3
"""Exception classes specific to pushdown automata."""
from automata.base.exceptions import AutomatonException
class PDAException(AutomatonException):
"""The base class for all PDA-related errors."""
pass
class NondeterminismError(PDAException):
"""A DPDA is exhibiting nondeterminism."""
pass
class InvalidAcceptanceMode(PDAException):
"""The given acceptance mode is invalid."""
pass
Add blank line to comply with PEP 8#!/usr/bin/env python3
"""Exception classes specific to pushdown automata."""
from automata.base.exceptions import AutomatonException
class PDAException(AutomatonException):
"""The base class for all PDA-related errors."""
pass
class NondeterminismError(PDAException):
"""A DPDA is exhibiting nondeterminism."""
pass
class InvalidAcceptanceMode(PDAException):
"""The given acceptance mode is invalid."""
pass
| <commit_before>#!/usr/bin/env python3
"""Exception classes specific to pushdown automata."""
from automata.base.exceptions import AutomatonException
class PDAException(AutomatonException):
"""The base class for all PDA-related errors."""
pass
class NondeterminismError(PDAException):
"""A DPDA is exhibiting nondeterminism."""
pass
class InvalidAcceptanceMode(PDAException):
"""The given acceptance mode is invalid."""
pass
<commit_msg>Add blank line to comply with PEP 8<commit_after>#!/usr/bin/env python3
"""Exception classes specific to pushdown automata."""
from automata.base.exceptions import AutomatonException
class PDAException(AutomatonException):
"""The base class for all PDA-related errors."""
pass
class NondeterminismError(PDAException):
"""A DPDA is exhibiting nondeterminism."""
pass
class InvalidAcceptanceMode(PDAException):
"""The given acceptance mode is invalid."""
pass
|
85dcb6ff036d03fd1fadc62a519147cf6b9ca8de | floq/blockmatrix.py | floq/blockmatrix.py | import numpy as np
# Provide functions to get/set blocks in numpy arrays
def get_block_from_matrix(matrix,dim_block,n_block,row,column):
start_row = row*dim_block
start_column = column*dim_block
stop_row = start_row+dim_block
stop_column = start_column+dim_block
return matrix[start_row:stop_row,start_column:stop_column]
def set_block_in_matrix(block,matrix,dim_block,n_block,row,column):
start_row = row*dim_block
start_column = column*dim_block
stop_row = start_row+dim_block
stop_column = start_column+dim_block
matrix[start_row:stop_row,start_column:stop_column] = block | import numpy as np
# Provide functions to get/set blocks in numpy arrays
def get_block_from_matrix(matrix,dim_block,n_block,row,col):
start_row = row*dim_block
start_col = col*dim_block
stop_row = start_row+dim_block
stop_col = start_col+dim_block
return matrix[start_row:stop_row,start_col:stop_col]
def set_block_in_matrix(block,matrix,dim_block,n_block,row,col):
start_row = row*dim_block
start_col = col*dim_block
stop_row = start_row+dim_block
stop_col = start_col+dim_block
matrix[start_row:stop_row,start_col:stop_col] = block | Rename column -> col for consistency with row | Rename column -> col for consistency with row
| Python | mit | sirmarcel/floq | import numpy as np
# Provide functions to get/set blocks in numpy arrays
def get_block_from_matrix(matrix,dim_block,n_block,row,column):
start_row = row*dim_block
start_column = column*dim_block
stop_row = start_row+dim_block
stop_column = start_column+dim_block
return matrix[start_row:stop_row,start_column:stop_column]
def set_block_in_matrix(block,matrix,dim_block,n_block,row,column):
start_row = row*dim_block
start_column = column*dim_block
stop_row = start_row+dim_block
stop_column = start_column+dim_block
matrix[start_row:stop_row,start_column:stop_column] = blockRename column -> col for consistency with row | import numpy as np
# Provide functions to get/set blocks in numpy arrays
def get_block_from_matrix(matrix,dim_block,n_block,row,col):
start_row = row*dim_block
start_col = col*dim_block
stop_row = start_row+dim_block
stop_col = start_col+dim_block
return matrix[start_row:stop_row,start_col:stop_col]
def set_block_in_matrix(block,matrix,dim_block,n_block,row,col):
start_row = row*dim_block
start_col = col*dim_block
stop_row = start_row+dim_block
stop_col = start_col+dim_block
matrix[start_row:stop_row,start_col:stop_col] = block | <commit_before>import numpy as np
# Provide functions to get/set blocks in numpy arrays
def get_block_from_matrix(matrix,dim_block,n_block,row,column):
start_row = row*dim_block
start_column = column*dim_block
stop_row = start_row+dim_block
stop_column = start_column+dim_block
return matrix[start_row:stop_row,start_column:stop_column]
def set_block_in_matrix(block,matrix,dim_block,n_block,row,column):
start_row = row*dim_block
start_column = column*dim_block
stop_row = start_row+dim_block
stop_column = start_column+dim_block
matrix[start_row:stop_row,start_column:stop_column] = block<commit_msg>Rename column -> col for consistency with row<commit_after> | import numpy as np
# Provide functions to get/set blocks in numpy arrays
def get_block_from_matrix(matrix,dim_block,n_block,row,col):
start_row = row*dim_block
start_col = col*dim_block
stop_row = start_row+dim_block
stop_col = start_col+dim_block
return matrix[start_row:stop_row,start_col:stop_col]
def set_block_in_matrix(block,matrix,dim_block,n_block,row,col):
start_row = row*dim_block
start_col = col*dim_block
stop_row = start_row+dim_block
stop_col = start_col+dim_block
matrix[start_row:stop_row,start_col:stop_col] = block | import numpy as np
# Provide functions to get/set blocks in numpy arrays
def get_block_from_matrix(matrix,dim_block,n_block,row,column):
start_row = row*dim_block
start_column = column*dim_block
stop_row = start_row+dim_block
stop_column = start_column+dim_block
return matrix[start_row:stop_row,start_column:stop_column]
def set_block_in_matrix(block,matrix,dim_block,n_block,row,column):
start_row = row*dim_block
start_column = column*dim_block
stop_row = start_row+dim_block
stop_column = start_column+dim_block
matrix[start_row:stop_row,start_column:stop_column] = blockRename column -> col for consistency with rowimport numpy as np
# Provide functions to get/set blocks in numpy arrays
def get_block_from_matrix(matrix,dim_block,n_block,row,col):
start_row = row*dim_block
start_col = col*dim_block
stop_row = start_row+dim_block
stop_col = start_col+dim_block
return matrix[start_row:stop_row,start_col:stop_col]
def set_block_in_matrix(block,matrix,dim_block,n_block,row,col):
start_row = row*dim_block
start_col = col*dim_block
stop_row = start_row+dim_block
stop_col = start_col+dim_block
matrix[start_row:stop_row,start_col:stop_col] = block | <commit_before>import numpy as np
# Provide functions to get/set blocks in numpy arrays
def get_block_from_matrix(matrix,dim_block,n_block,row,column):
start_row = row*dim_block
start_column = column*dim_block
stop_row = start_row+dim_block
stop_column = start_column+dim_block
return matrix[start_row:stop_row,start_column:stop_column]
def set_block_in_matrix(block,matrix,dim_block,n_block,row,column):
start_row = row*dim_block
start_column = column*dim_block
stop_row = start_row+dim_block
stop_column = start_column+dim_block
matrix[start_row:stop_row,start_column:stop_column] = block<commit_msg>Rename column -> col for consistency with row<commit_after>import numpy as np
# Provide functions to get/set blocks in numpy arrays
def get_block_from_matrix(matrix,dim_block,n_block,row,col):
start_row = row*dim_block
start_col = col*dim_block
stop_row = start_row+dim_block
stop_col = start_col+dim_block
return matrix[start_row:stop_row,start_col:stop_col]
def set_block_in_matrix(block,matrix,dim_block,n_block,row,col):
start_row = row*dim_block
start_col = col*dim_block
stop_row = start_row+dim_block
stop_col = start_col+dim_block
matrix[start_row:stop_row,start_col:stop_col] = block |
980cbb13e874f7d3769dff12992779f676a5b2f3 | plotly/plotly/__init__.py | plotly/plotly/__init__.py | """
plotly
======
This module defines functionality that requires interaction between your
local machine and Plotly. Almost all functionality used here will require a
verifiable account (username/api-key pair) and a network connection.
"""
from . plotly import (
sign_in, update_plot_options, get_plot_options, get_credentials, iplot,
plot, iplot_mpl, plot_mpl, get_figure, Stream, image
)
| """
plotly
======
This module defines functionality that requires interaction between your
local machine and Plotly. Almost all functionality used here will require a
verifiable account (username/api-key pair) and a network connection.
"""
from . plotly import (
sign_in, update_plot_options, get_plot_options, get_credentials, iplot,
plot, iplot_mpl, plot_mpl, get_figure, Stream, image, get_config
)
| Add get_config to the list of public functions for plotly.py | Add get_config to the list of public functions for plotly.py
| Python | mit | plotly/python-api,plotly/plotly.py,ee-in/python-api,plotly/python-api,plotly/plotly.py,ee-in/python-api,plotly/python-api,plotly/plotly.py,ee-in/python-api | """
plotly
======
This module defines functionality that requires interaction between your
local machine and Plotly. Almost all functionality used here will require a
verifiable account (username/api-key pair) and a network connection.
"""
from . plotly import (
sign_in, update_plot_options, get_plot_options, get_credentials, iplot,
plot, iplot_mpl, plot_mpl, get_figure, Stream, image
)
Add get_config to the list of public functions for plotly.py | """
plotly
======
This module defines functionality that requires interaction between your
local machine and Plotly. Almost all functionality used here will require a
verifiable account (username/api-key pair) and a network connection.
"""
from . plotly import (
sign_in, update_plot_options, get_plot_options, get_credentials, iplot,
plot, iplot_mpl, plot_mpl, get_figure, Stream, image, get_config
)
| <commit_before>"""
plotly
======
This module defines functionality that requires interaction between your
local machine and Plotly. Almost all functionality used here will require a
verifiable account (username/api-key pair) and a network connection.
"""
from . plotly import (
sign_in, update_plot_options, get_plot_options, get_credentials, iplot,
plot, iplot_mpl, plot_mpl, get_figure, Stream, image
)
<commit_msg>Add get_config to the list of public functions for plotly.py<commit_after> | """
plotly
======
This module defines functionality that requires interaction between your
local machine and Plotly. Almost all functionality used here will require a
verifiable account (username/api-key pair) and a network connection.
"""
from . plotly import (
sign_in, update_plot_options, get_plot_options, get_credentials, iplot,
plot, iplot_mpl, plot_mpl, get_figure, Stream, image, get_config
)
| """
plotly
======
This module defines functionality that requires interaction between your
local machine and Plotly. Almost all functionality used here will require a
verifiable account (username/api-key pair) and a network connection.
"""
from . plotly import (
sign_in, update_plot_options, get_plot_options, get_credentials, iplot,
plot, iplot_mpl, plot_mpl, get_figure, Stream, image
)
Add get_config to the list of public functions for plotly.py"""
plotly
======
This module defines functionality that requires interaction between your
local machine and Plotly. Almost all functionality used here will require a
verifiable account (username/api-key pair) and a network connection.
"""
from . plotly import (
sign_in, update_plot_options, get_plot_options, get_credentials, iplot,
plot, iplot_mpl, plot_mpl, get_figure, Stream, image, get_config
)
| <commit_before>"""
plotly
======
This module defines functionality that requires interaction between your
local machine and Plotly. Almost all functionality used here will require a
verifiable account (username/api-key pair) and a network connection.
"""
from . plotly import (
sign_in, update_plot_options, get_plot_options, get_credentials, iplot,
plot, iplot_mpl, plot_mpl, get_figure, Stream, image
)
<commit_msg>Add get_config to the list of public functions for plotly.py<commit_after>"""
plotly
======
This module defines functionality that requires interaction between your
local machine and Plotly. Almost all functionality used here will require a
verifiable account (username/api-key pair) and a network connection.
"""
from . plotly import (
sign_in, update_plot_options, get_plot_options, get_credentials, iplot,
plot, iplot_mpl, plot_mpl, get_figure, Stream, image, get_config
)
|
388826605b556a9632c3dea22ca3ba1219dfc5ea | wallp/main.py | wallp/main.py | import sys
from redcmd.api import execute_commandline
def main():
from .db.manage.db import DB
db = DB()
response = db.check()
from util.printer import printer
response and printer.printf('program maintenance', response)
from .util import log
from .db.app.config import Config, ConfigError
from . import const
config = Config()
try:
log.start(config.eget('client.logfile', default=const.logfile), loglevel=config.eget('client.loglevel', default=40))
except ConfigError as e:
print(str(e) + '\nlog start failed')
from .subcmd import all
from .version import __version__
def update_autocomplete_cb():
printer.printf('program maintenance', 'updated autocomplete data')
execute_commandline(prog=const.app_name, description=const.app_description, version=__version__, _to_hyphen=True,
default_subcommand='change', moves=True, update_autocomplete_cb=update_autocomplete_cb)
| import sys
from redcmd.api import execute_commandline
def main():
from .db.manage.db import DB
db = DB()
response = db.check()
from util.printer import printer
response and printer.printf('program maintenance', response)
from .util import log
from .db.app.config import Config, ConfigError
from . import const
config = Config()
try:
log.start(config.eget('client.logfile', default=const.logfile), loglevel=config.eget('client.loglevel', default=40))
except ConfigError as e:
print(str(e) + '\nlog start failed')
from .subcmd import all
from .version import __version__
def update_autocomplete_cb():
printer.printf('program maintenance', 'updated autocomplete data')
execute_commandline(prog=const.app_name, description=const.app_description, version=__version__, _to_hyphen=True,
default_subcommand='source random', moves=True, update_autocomplete_cb=update_autocomplete_cb)
| Change default subcommand to "source random" | Change default subcommand to "source random"
| Python | mit | amol9/wallp | import sys
from redcmd.api import execute_commandline
def main():
from .db.manage.db import DB
db = DB()
response = db.check()
from util.printer import printer
response and printer.printf('program maintenance', response)
from .util import log
from .db.app.config import Config, ConfigError
from . import const
config = Config()
try:
log.start(config.eget('client.logfile', default=const.logfile), loglevel=config.eget('client.loglevel', default=40))
except ConfigError as e:
print(str(e) + '\nlog start failed')
from .subcmd import all
from .version import __version__
def update_autocomplete_cb():
printer.printf('program maintenance', 'updated autocomplete data')
execute_commandline(prog=const.app_name, description=const.app_description, version=__version__, _to_hyphen=True,
default_subcommand='change', moves=True, update_autocomplete_cb=update_autocomplete_cb)
Change default subcommand to "source random" | import sys
from redcmd.api import execute_commandline
def main():
from .db.manage.db import DB
db = DB()
response = db.check()
from util.printer import printer
response and printer.printf('program maintenance', response)
from .util import log
from .db.app.config import Config, ConfigError
from . import const
config = Config()
try:
log.start(config.eget('client.logfile', default=const.logfile), loglevel=config.eget('client.loglevel', default=40))
except ConfigError as e:
print(str(e) + '\nlog start failed')
from .subcmd import all
from .version import __version__
def update_autocomplete_cb():
printer.printf('program maintenance', 'updated autocomplete data')
execute_commandline(prog=const.app_name, description=const.app_description, version=__version__, _to_hyphen=True,
default_subcommand='source random', moves=True, update_autocomplete_cb=update_autocomplete_cb)
| <commit_before>import sys
from redcmd.api import execute_commandline
def main():
from .db.manage.db import DB
db = DB()
response = db.check()
from util.printer import printer
response and printer.printf('program maintenance', response)
from .util import log
from .db.app.config import Config, ConfigError
from . import const
config = Config()
try:
log.start(config.eget('client.logfile', default=const.logfile), loglevel=config.eget('client.loglevel', default=40))
except ConfigError as e:
print(str(e) + '\nlog start failed')
from .subcmd import all
from .version import __version__
def update_autocomplete_cb():
printer.printf('program maintenance', 'updated autocomplete data')
execute_commandline(prog=const.app_name, description=const.app_description, version=__version__, _to_hyphen=True,
default_subcommand='change', moves=True, update_autocomplete_cb=update_autocomplete_cb)
<commit_msg>Change default subcommand to "source random"<commit_after> | import sys
from redcmd.api import execute_commandline
def main():
from .db.manage.db import DB
db = DB()
response = db.check()
from util.printer import printer
response and printer.printf('program maintenance', response)
from .util import log
from .db.app.config import Config, ConfigError
from . import const
config = Config()
try:
log.start(config.eget('client.logfile', default=const.logfile), loglevel=config.eget('client.loglevel', default=40))
except ConfigError as e:
print(str(e) + '\nlog start failed')
from .subcmd import all
from .version import __version__
def update_autocomplete_cb():
printer.printf('program maintenance', 'updated autocomplete data')
execute_commandline(prog=const.app_name, description=const.app_description, version=__version__, _to_hyphen=True,
default_subcommand='source random', moves=True, update_autocomplete_cb=update_autocomplete_cb)
| import sys
from redcmd.api import execute_commandline
def main():
from .db.manage.db import DB
db = DB()
response = db.check()
from util.printer import printer
response and printer.printf('program maintenance', response)
from .util import log
from .db.app.config import Config, ConfigError
from . import const
config = Config()
try:
log.start(config.eget('client.logfile', default=const.logfile), loglevel=config.eget('client.loglevel', default=40))
except ConfigError as e:
print(str(e) + '\nlog start failed')
from .subcmd import all
from .version import __version__
def update_autocomplete_cb():
printer.printf('program maintenance', 'updated autocomplete data')
execute_commandline(prog=const.app_name, description=const.app_description, version=__version__, _to_hyphen=True,
default_subcommand='change', moves=True, update_autocomplete_cb=update_autocomplete_cb)
Change default subcommand to "source random"import sys
from redcmd.api import execute_commandline
def main():
from .db.manage.db import DB
db = DB()
response = db.check()
from util.printer import printer
response and printer.printf('program maintenance', response)
from .util import log
from .db.app.config import Config, ConfigError
from . import const
config = Config()
try:
log.start(config.eget('client.logfile', default=const.logfile), loglevel=config.eget('client.loglevel', default=40))
except ConfigError as e:
print(str(e) + '\nlog start failed')
from .subcmd import all
from .version import __version__
def update_autocomplete_cb():
printer.printf('program maintenance', 'updated autocomplete data')
execute_commandline(prog=const.app_name, description=const.app_description, version=__version__, _to_hyphen=True,
default_subcommand='source random', moves=True, update_autocomplete_cb=update_autocomplete_cb)
| <commit_before>import sys
from redcmd.api import execute_commandline
def main():
from .db.manage.db import DB
db = DB()
response = db.check()
from util.printer import printer
response and printer.printf('program maintenance', response)
from .util import log
from .db.app.config import Config, ConfigError
from . import const
config = Config()
try:
log.start(config.eget('client.logfile', default=const.logfile), loglevel=config.eget('client.loglevel', default=40))
except ConfigError as e:
print(str(e) + '\nlog start failed')
from .subcmd import all
from .version import __version__
def update_autocomplete_cb():
printer.printf('program maintenance', 'updated autocomplete data')
execute_commandline(prog=const.app_name, description=const.app_description, version=__version__, _to_hyphen=True,
default_subcommand='change', moves=True, update_autocomplete_cb=update_autocomplete_cb)
<commit_msg>Change default subcommand to "source random"<commit_after>import sys
from redcmd.api import execute_commandline
def main():
from .db.manage.db import DB
db = DB()
response = db.check()
from util.printer import printer
response and printer.printf('program maintenance', response)
from .util import log
from .db.app.config import Config, ConfigError
from . import const
config = Config()
try:
log.start(config.eget('client.logfile', default=const.logfile), loglevel=config.eget('client.loglevel', default=40))
except ConfigError as e:
print(str(e) + '\nlog start failed')
from .subcmd import all
from .version import __version__
def update_autocomplete_cb():
printer.printf('program maintenance', 'updated autocomplete data')
execute_commandline(prog=const.app_name, description=const.app_description, version=__version__, _to_hyphen=True,
default_subcommand='source random', moves=True, update_autocomplete_cb=update_autocomplete_cb)
|
e7691a775dc745984155a5f2e07140c207c3ab20 | api/base/parsers.py | api/base/parsers.py | from rest_framework.parsers import JSONParser
from api.base.renderers import JSONAPIRenderer
from api.base.exceptions import JSONAPIException
class JSONAPIParser(JSONParser):
"""
Parses JSON-serialized data. Overrides media_type.
"""
media_type = 'application/vnd.api+json'
renderer_class = JSONAPIRenderer
def parse(self, stream, media_type=None, parser_context=None):
"""
Parses the incoming bytestream as JSON and returns the resulting data
"""
result = super(JSONAPIParser, self).parse(stream, media_type=media_type, parser_context=parser_context)
data = result.get('data', {})
if data:
if 'attributes' not in data:
raise JSONAPIException(source={'pointer': '/data/attributes'}, detail='This field is required.')
id = data.get('id')
type = data.get('type')
attributes = data.get('attributes')
parsed = {'id': id, 'type': type}
parsed.update(attributes)
return parsed
else:
raise JSONAPIException(source={'pointer': '/data'}, detail='This field is required.')
class JSONAPIParserForRegularJSON(JSONAPIParser):
media_type = 'application/json'
| from rest_framework.parsers import JSONParser
from rest_framework.exceptions import ValidationError
from api.base.renderers import JSONAPIRenderer
from api.base.exceptions import JSONAPIException
class JSONAPIParser(JSONParser):
"""
Parses JSON-serialized data. Overrides media_type.
"""
media_type = 'application/vnd.api+json'
renderer_class = JSONAPIRenderer
def parse(self, stream, media_type=None, parser_context=None):
"""
Parses the incoming bytestream as JSON and returns the resulting data
"""
result = super(JSONAPIParser, self).parse(stream, media_type=media_type, parser_context=parser_context)
if not isinstance(result, dict):
raise ValidationError("Invalid data. Expected a dictionary but got {}".format(type(result)))
data = result.get('data', {})
if data:
if 'attributes' not in data:
raise JSONAPIException(source={'pointer': '/data/attributes'}, detail='This field is required.')
id = data.get('id')
object_type = data.get('type')
attributes = data.get('attributes')
parsed = {'id': id, 'type': object_type}
parsed.update(attributes)
return parsed
else:
raise JSONAPIException(source={'pointer': '/data'}, detail='This field is required.')
class JSONAPIParserForRegularJSON(JSONAPIParser):
media_type = 'application/json'
| Enforce that request data is a dictionary. | Enforce that request data is a dictionary.
| Python | apache-2.0 | DanielSBrown/osf.io,asanfilippo7/osf.io,cwisecarver/osf.io,sloria/osf.io,Ghalko/osf.io,felliott/osf.io,caseyrygt/osf.io,jnayak1/osf.io,caneruguz/osf.io,mfraezz/osf.io,kch8qx/osf.io,KAsante95/osf.io,emetsger/osf.io,caseyrygt/osf.io,mluo613/osf.io,samchrisinger/osf.io,wearpants/osf.io,billyhunt/osf.io,chennan47/osf.io,Nesiehr/osf.io,wearpants/osf.io,binoculars/osf.io,emetsger/osf.io,erinspace/osf.io,kwierman/osf.io,DanielSBrown/osf.io,monikagrabowska/osf.io,leb2dg/osf.io,TomBaxter/osf.io,doublebits/osf.io,TomHeatwole/osf.io,erinspace/osf.io,samanehsan/osf.io,brianjgeiger/osf.io,cslzchen/osf.io,rdhyee/osf.io,icereval/osf.io,chrisseto/osf.io,wearpants/osf.io,RomanZWang/osf.io,doublebits/osf.io,mattclark/osf.io,danielneis/osf.io,binoculars/osf.io,monikagrabowska/osf.io,kwierman/osf.io,monikagrabowska/osf.io,KAsante95/osf.io,kwierman/osf.io,brianjgeiger/osf.io,mfraezz/osf.io,mluke93/osf.io,Johnetordoff/osf.io,cslzchen/osf.io,crcresearch/osf.io,acshi/osf.io,aaxelb/osf.io,hmoco/osf.io,binoculars/osf.io,DanielSBrown/osf.io,mfraezz/osf.io,HalcyonChimera/osf.io,mattclark/osf.io,caneruguz/osf.io,doublebits/osf.io,sloria/osf.io,caseyrollins/osf.io,brianjgeiger/osf.io,SSJohns/osf.io,baylee-d/osf.io,Ghalko/osf.io,crcresearch/osf.io,samanehsan/osf.io,GageGaskins/osf.io,acshi/osf.io,hmoco/osf.io,chrisseto/osf.io,baylee-d/osf.io,alexschiller/osf.io,mattclark/osf.io,mfraezz/osf.io,GageGaskins/osf.io,kch8qx/osf.io,acshi/osf.io,danielneis/osf.io,HalcyonChimera/osf.io,HalcyonChimera/osf.io,felliott/osf.io,samchrisinger/osf.io,KAsante95/osf.io,felliott/osf.io,RomanZWang/osf.io,caseyrygt/osf.io,caneruguz/osf.io,asanfilippo7/osf.io,brianjgeiger/osf.io,mluke93/osf.io,adlius/osf.io,samchrisinger/osf.io,RomanZWang/osf.io,GageGaskins/osf.io,mluo613/osf.io,mluke93/osf.io,doublebits/osf.io,aaxelb/osf.io,caneruguz/osf.io,cslzchen/osf.io,ticklemepierce/osf.io,adlius/osf.io,leb2dg/osf.io,aaxelb/osf.io,zamattiac/osf.io,brandonPurvis/osf.io,chrisseto/osf.io,leb2dg/osf.io,jnayak1/osf.io,pattisdr/osf.io,Nesiehr/osf.io,Ghalko/osf.io,ticklemepierce/osf.io,alexschiller/osf.io,doublebits/osf.io,abought/osf.io,SSJohns/osf.io,TomHeatwole/osf.io,adlius/osf.io,acshi/osf.io,CenterForOpenScience/osf.io,RomanZWang/osf.io,CenterForOpenScience/osf.io,mluo613/osf.io,abought/osf.io,chrisseto/osf.io,TomHeatwole/osf.io,billyhunt/osf.io,alexschiller/osf.io,ticklemepierce/osf.io,TomBaxter/osf.io,icereval/osf.io,billyhunt/osf.io,cwisecarver/osf.io,CenterForOpenScience/osf.io,caseyrollins/osf.io,samchrisinger/osf.io,pattisdr/osf.io,zachjanicki/osf.io,mluke93/osf.io,Johnetordoff/osf.io,brandonPurvis/osf.io,amyshi188/osf.io,KAsante95/osf.io,crcresearch/osf.io,aaxelb/osf.io,zamattiac/osf.io,ZobairAlijan/osf.io,billyhunt/osf.io,danielneis/osf.io,ZobairAlijan/osf.io,cslzchen/osf.io,wearpants/osf.io,HalcyonChimera/osf.io,Ghalko/osf.io,alexschiller/osf.io,DanielSBrown/osf.io,Johnetordoff/osf.io,zamattiac/osf.io,monikagrabowska/osf.io,Nesiehr/osf.io,hmoco/osf.io,samanehsan/osf.io,felliott/osf.io,kch8qx/osf.io,baylee-d/osf.io,mluo613/osf.io,sloria/osf.io,amyshi188/osf.io,laurenrevere/osf.io,kch8qx/osf.io,ZobairAlijan/osf.io,caseyrygt/osf.io,danielneis/osf.io,GageGaskins/osf.io,mluo613/osf.io,emetsger/osf.io,zachjanicki/osf.io,leb2dg/osf.io,caseyrollins/osf.io,chennan47/osf.io,samanehsan/osf.io,brandonPurvis/osf.io,monikagrabowska/osf.io,rdhyee/osf.io,abought/osf.io,zachjanicki/osf.io,SSJohns/osf.io,ZobairAlijan/osf.io,ticklemepierce/osf.io,amyshi188/osf.io,rdhyee/osf.io,TomBaxter/osf.io,alexschiller/osf.io,chennan47/osf.io,adlius/osf.io,erinspace/osf.io,laurenrevere/osf.io,kch8qx/osf.io,amyshi188/osf.io,hmoco/osf.io,jnayak1/osf.io,SSJohns/osf.io,rdhyee/osf.io,Johnetordoff/osf.io,GageGaskins/osf.io,zachjanicki/osf.io,CenterForOpenScience/osf.io,cwisecarver/osf.io,TomHeatwole/osf.io,Nesiehr/osf.io,RomanZWang/osf.io,zamattiac/osf.io,laurenrevere/osf.io,abought/osf.io,brandonPurvis/osf.io,emetsger/osf.io,brandonPurvis/osf.io,asanfilippo7/osf.io,asanfilippo7/osf.io,cwisecarver/osf.io,saradbowman/osf.io,billyhunt/osf.io,saradbowman/osf.io,kwierman/osf.io,jnayak1/osf.io,KAsante95/osf.io,pattisdr/osf.io,icereval/osf.io,acshi/osf.io | from rest_framework.parsers import JSONParser
from api.base.renderers import JSONAPIRenderer
from api.base.exceptions import JSONAPIException
class JSONAPIParser(JSONParser):
"""
Parses JSON-serialized data. Overrides media_type.
"""
media_type = 'application/vnd.api+json'
renderer_class = JSONAPIRenderer
def parse(self, stream, media_type=None, parser_context=None):
"""
Parses the incoming bytestream as JSON and returns the resulting data
"""
result = super(JSONAPIParser, self).parse(stream, media_type=media_type, parser_context=parser_context)
data = result.get('data', {})
if data:
if 'attributes' not in data:
raise JSONAPIException(source={'pointer': '/data/attributes'}, detail='This field is required.')
id = data.get('id')
type = data.get('type')
attributes = data.get('attributes')
parsed = {'id': id, 'type': type}
parsed.update(attributes)
return parsed
else:
raise JSONAPIException(source={'pointer': '/data'}, detail='This field is required.')
class JSONAPIParserForRegularJSON(JSONAPIParser):
media_type = 'application/json'
Enforce that request data is a dictionary. | from rest_framework.parsers import JSONParser
from rest_framework.exceptions import ValidationError
from api.base.renderers import JSONAPIRenderer
from api.base.exceptions import JSONAPIException
class JSONAPIParser(JSONParser):
"""
Parses JSON-serialized data. Overrides media_type.
"""
media_type = 'application/vnd.api+json'
renderer_class = JSONAPIRenderer
def parse(self, stream, media_type=None, parser_context=None):
"""
Parses the incoming bytestream as JSON and returns the resulting data
"""
result = super(JSONAPIParser, self).parse(stream, media_type=media_type, parser_context=parser_context)
if not isinstance(result, dict):
raise ValidationError("Invalid data. Expected a dictionary but got {}".format(type(result)))
data = result.get('data', {})
if data:
if 'attributes' not in data:
raise JSONAPIException(source={'pointer': '/data/attributes'}, detail='This field is required.')
id = data.get('id')
object_type = data.get('type')
attributes = data.get('attributes')
parsed = {'id': id, 'type': object_type}
parsed.update(attributes)
return parsed
else:
raise JSONAPIException(source={'pointer': '/data'}, detail='This field is required.')
class JSONAPIParserForRegularJSON(JSONAPIParser):
media_type = 'application/json'
| <commit_before>from rest_framework.parsers import JSONParser
from api.base.renderers import JSONAPIRenderer
from api.base.exceptions import JSONAPIException
class JSONAPIParser(JSONParser):
"""
Parses JSON-serialized data. Overrides media_type.
"""
media_type = 'application/vnd.api+json'
renderer_class = JSONAPIRenderer
def parse(self, stream, media_type=None, parser_context=None):
"""
Parses the incoming bytestream as JSON and returns the resulting data
"""
result = super(JSONAPIParser, self).parse(stream, media_type=media_type, parser_context=parser_context)
data = result.get('data', {})
if data:
if 'attributes' not in data:
raise JSONAPIException(source={'pointer': '/data/attributes'}, detail='This field is required.')
id = data.get('id')
type = data.get('type')
attributes = data.get('attributes')
parsed = {'id': id, 'type': type}
parsed.update(attributes)
return parsed
else:
raise JSONAPIException(source={'pointer': '/data'}, detail='This field is required.')
class JSONAPIParserForRegularJSON(JSONAPIParser):
media_type = 'application/json'
<commit_msg>Enforce that request data is a dictionary.<commit_after> | from rest_framework.parsers import JSONParser
from rest_framework.exceptions import ValidationError
from api.base.renderers import JSONAPIRenderer
from api.base.exceptions import JSONAPIException
class JSONAPIParser(JSONParser):
"""
Parses JSON-serialized data. Overrides media_type.
"""
media_type = 'application/vnd.api+json'
renderer_class = JSONAPIRenderer
def parse(self, stream, media_type=None, parser_context=None):
"""
Parses the incoming bytestream as JSON and returns the resulting data
"""
result = super(JSONAPIParser, self).parse(stream, media_type=media_type, parser_context=parser_context)
if not isinstance(result, dict):
raise ValidationError("Invalid data. Expected a dictionary but got {}".format(type(result)))
data = result.get('data', {})
if data:
if 'attributes' not in data:
raise JSONAPIException(source={'pointer': '/data/attributes'}, detail='This field is required.')
id = data.get('id')
object_type = data.get('type')
attributes = data.get('attributes')
parsed = {'id': id, 'type': object_type}
parsed.update(attributes)
return parsed
else:
raise JSONAPIException(source={'pointer': '/data'}, detail='This field is required.')
class JSONAPIParserForRegularJSON(JSONAPIParser):
media_type = 'application/json'
| from rest_framework.parsers import JSONParser
from api.base.renderers import JSONAPIRenderer
from api.base.exceptions import JSONAPIException
class JSONAPIParser(JSONParser):
"""
Parses JSON-serialized data. Overrides media_type.
"""
media_type = 'application/vnd.api+json'
renderer_class = JSONAPIRenderer
def parse(self, stream, media_type=None, parser_context=None):
"""
Parses the incoming bytestream as JSON and returns the resulting data
"""
result = super(JSONAPIParser, self).parse(stream, media_type=media_type, parser_context=parser_context)
data = result.get('data', {})
if data:
if 'attributes' not in data:
raise JSONAPIException(source={'pointer': '/data/attributes'}, detail='This field is required.')
id = data.get('id')
type = data.get('type')
attributes = data.get('attributes')
parsed = {'id': id, 'type': type}
parsed.update(attributes)
return parsed
else:
raise JSONAPIException(source={'pointer': '/data'}, detail='This field is required.')
class JSONAPIParserForRegularJSON(JSONAPIParser):
media_type = 'application/json'
Enforce that request data is a dictionary.from rest_framework.parsers import JSONParser
from rest_framework.exceptions import ValidationError
from api.base.renderers import JSONAPIRenderer
from api.base.exceptions import JSONAPIException
class JSONAPIParser(JSONParser):
"""
Parses JSON-serialized data. Overrides media_type.
"""
media_type = 'application/vnd.api+json'
renderer_class = JSONAPIRenderer
def parse(self, stream, media_type=None, parser_context=None):
"""
Parses the incoming bytestream as JSON and returns the resulting data
"""
result = super(JSONAPIParser, self).parse(stream, media_type=media_type, parser_context=parser_context)
if not isinstance(result, dict):
raise ValidationError("Invalid data. Expected a dictionary but got {}".format(type(result)))
data = result.get('data', {})
if data:
if 'attributes' not in data:
raise JSONAPIException(source={'pointer': '/data/attributes'}, detail='This field is required.')
id = data.get('id')
object_type = data.get('type')
attributes = data.get('attributes')
parsed = {'id': id, 'type': object_type}
parsed.update(attributes)
return parsed
else:
raise JSONAPIException(source={'pointer': '/data'}, detail='This field is required.')
class JSONAPIParserForRegularJSON(JSONAPIParser):
media_type = 'application/json'
| <commit_before>from rest_framework.parsers import JSONParser
from api.base.renderers import JSONAPIRenderer
from api.base.exceptions import JSONAPIException
class JSONAPIParser(JSONParser):
"""
Parses JSON-serialized data. Overrides media_type.
"""
media_type = 'application/vnd.api+json'
renderer_class = JSONAPIRenderer
def parse(self, stream, media_type=None, parser_context=None):
"""
Parses the incoming bytestream as JSON and returns the resulting data
"""
result = super(JSONAPIParser, self).parse(stream, media_type=media_type, parser_context=parser_context)
data = result.get('data', {})
if data:
if 'attributes' not in data:
raise JSONAPIException(source={'pointer': '/data/attributes'}, detail='This field is required.')
id = data.get('id')
type = data.get('type')
attributes = data.get('attributes')
parsed = {'id': id, 'type': type}
parsed.update(attributes)
return parsed
else:
raise JSONAPIException(source={'pointer': '/data'}, detail='This field is required.')
class JSONAPIParserForRegularJSON(JSONAPIParser):
media_type = 'application/json'
<commit_msg>Enforce that request data is a dictionary.<commit_after>from rest_framework.parsers import JSONParser
from rest_framework.exceptions import ValidationError
from api.base.renderers import JSONAPIRenderer
from api.base.exceptions import JSONAPIException
class JSONAPIParser(JSONParser):
"""
Parses JSON-serialized data. Overrides media_type.
"""
media_type = 'application/vnd.api+json'
renderer_class = JSONAPIRenderer
def parse(self, stream, media_type=None, parser_context=None):
"""
Parses the incoming bytestream as JSON and returns the resulting data
"""
result = super(JSONAPIParser, self).parse(stream, media_type=media_type, parser_context=parser_context)
if not isinstance(result, dict):
raise ValidationError("Invalid data. Expected a dictionary but got {}".format(type(result)))
data = result.get('data', {})
if data:
if 'attributes' not in data:
raise JSONAPIException(source={'pointer': '/data/attributes'}, detail='This field is required.')
id = data.get('id')
object_type = data.get('type')
attributes = data.get('attributes')
parsed = {'id': id, 'type': object_type}
parsed.update(attributes)
return parsed
else:
raise JSONAPIException(source={'pointer': '/data'}, detail='This field is required.')
class JSONAPIParserForRegularJSON(JSONAPIParser):
media_type = 'application/json'
|
bc9488b6954c172d903521df9f00c7ff71243fff | tests.py | tests.py | #! /usr/bin/python
# -*- coding: utf-8 -*-
"""
linescan.py - Effortlessly read lines from a text file using any encoding
Created 2013-2014 Triangle717
<http://Triangle717.WordPress.com/>
Licensed under The MIT License
<http://opensource.org/licenses/MIT/>
"""
from __future__ import print_function
import sys
import os
import linescan
testFile = os.path.join("test", "testfile.txt")
if __name__ == "__main__":
line = linescan.scan(testFile, 5, "utf_8")
print(line)
lines = linescan.scanlines(testFile, 8, 12, "cp1252")
print(lines)
thisshouldbefalse = linescan.scan(testFile, 55)
print(thisshouldbefalse)
| #! /usr/bin/python
# -*- coding: utf-8 -*-
"""
linescan.py - Effortlessly read lines from a text file using any encoding
Created 2013-2014 Triangle717
<http://Triangle717.WordPress.com/>
Licensed under The MIT License
<http://opensource.org/licenses/MIT/>
"""
from __future__ import print_function
import os
import linescan
testFile = os.path.join("test", "testfile.txt")
if __name__ == "__main__":
line = linescan.scan(testFile, 5, "utf_8")
print(line)
lines = linescan.scanlines(testFile, 8, 12, "cp1252")
print(lines)
thisshouldbefalse = linescan.scan(testFile, 55)
print(thisshouldbefalse)
| Fix F401 error (module imported but unused) | Fix F401 error (module imported but unused)
[ci skip] | Python | mit | le717/linescan.py | #! /usr/bin/python
# -*- coding: utf-8 -*-
"""
linescan.py - Effortlessly read lines from a text file using any encoding
Created 2013-2014 Triangle717
<http://Triangle717.WordPress.com/>
Licensed under The MIT License
<http://opensource.org/licenses/MIT/>
"""
from __future__ import print_function
import sys
import os
import linescan
testFile = os.path.join("test", "testfile.txt")
if __name__ == "__main__":
line = linescan.scan(testFile, 5, "utf_8")
print(line)
lines = linescan.scanlines(testFile, 8, 12, "cp1252")
print(lines)
thisshouldbefalse = linescan.scan(testFile, 55)
print(thisshouldbefalse)
Fix F401 error (module imported but unused)
[ci skip] | #! /usr/bin/python
# -*- coding: utf-8 -*-
"""
linescan.py - Effortlessly read lines from a text file using any encoding
Created 2013-2014 Triangle717
<http://Triangle717.WordPress.com/>
Licensed under The MIT License
<http://opensource.org/licenses/MIT/>
"""
from __future__ import print_function
import os
import linescan
testFile = os.path.join("test", "testfile.txt")
if __name__ == "__main__":
line = linescan.scan(testFile, 5, "utf_8")
print(line)
lines = linescan.scanlines(testFile, 8, 12, "cp1252")
print(lines)
thisshouldbefalse = linescan.scan(testFile, 55)
print(thisshouldbefalse)
| <commit_before>#! /usr/bin/python
# -*- coding: utf-8 -*-
"""
linescan.py - Effortlessly read lines from a text file using any encoding
Created 2013-2014 Triangle717
<http://Triangle717.WordPress.com/>
Licensed under The MIT License
<http://opensource.org/licenses/MIT/>
"""
from __future__ import print_function
import sys
import os
import linescan
testFile = os.path.join("test", "testfile.txt")
if __name__ == "__main__":
line = linescan.scan(testFile, 5, "utf_8")
print(line)
lines = linescan.scanlines(testFile, 8, 12, "cp1252")
print(lines)
thisshouldbefalse = linescan.scan(testFile, 55)
print(thisshouldbefalse)
<commit_msg>Fix F401 error (module imported but unused)
[ci skip]<commit_after> | #! /usr/bin/python
# -*- coding: utf-8 -*-
"""
linescan.py - Effortlessly read lines from a text file using any encoding
Created 2013-2014 Triangle717
<http://Triangle717.WordPress.com/>
Licensed under The MIT License
<http://opensource.org/licenses/MIT/>
"""
from __future__ import print_function
import os
import linescan
testFile = os.path.join("test", "testfile.txt")
if __name__ == "__main__":
line = linescan.scan(testFile, 5, "utf_8")
print(line)
lines = linescan.scanlines(testFile, 8, 12, "cp1252")
print(lines)
thisshouldbefalse = linescan.scan(testFile, 55)
print(thisshouldbefalse)
| #! /usr/bin/python
# -*- coding: utf-8 -*-
"""
linescan.py - Effortlessly read lines from a text file using any encoding
Created 2013-2014 Triangle717
<http://Triangle717.WordPress.com/>
Licensed under The MIT License
<http://opensource.org/licenses/MIT/>
"""
from __future__ import print_function
import sys
import os
import linescan
testFile = os.path.join("test", "testfile.txt")
if __name__ == "__main__":
line = linescan.scan(testFile, 5, "utf_8")
print(line)
lines = linescan.scanlines(testFile, 8, 12, "cp1252")
print(lines)
thisshouldbefalse = linescan.scan(testFile, 55)
print(thisshouldbefalse)
Fix F401 error (module imported but unused)
[ci skip]#! /usr/bin/python
# -*- coding: utf-8 -*-
"""
linescan.py - Effortlessly read lines from a text file using any encoding
Created 2013-2014 Triangle717
<http://Triangle717.WordPress.com/>
Licensed under The MIT License
<http://opensource.org/licenses/MIT/>
"""
from __future__ import print_function
import os
import linescan
testFile = os.path.join("test", "testfile.txt")
if __name__ == "__main__":
line = linescan.scan(testFile, 5, "utf_8")
print(line)
lines = linescan.scanlines(testFile, 8, 12, "cp1252")
print(lines)
thisshouldbefalse = linescan.scan(testFile, 55)
print(thisshouldbefalse)
| <commit_before>#! /usr/bin/python
# -*- coding: utf-8 -*-
"""
linescan.py - Effortlessly read lines from a text file using any encoding
Created 2013-2014 Triangle717
<http://Triangle717.WordPress.com/>
Licensed under The MIT License
<http://opensource.org/licenses/MIT/>
"""
from __future__ import print_function
import sys
import os
import linescan
testFile = os.path.join("test", "testfile.txt")
if __name__ == "__main__":
line = linescan.scan(testFile, 5, "utf_8")
print(line)
lines = linescan.scanlines(testFile, 8, 12, "cp1252")
print(lines)
thisshouldbefalse = linescan.scan(testFile, 55)
print(thisshouldbefalse)
<commit_msg>Fix F401 error (module imported but unused)
[ci skip]<commit_after>#! /usr/bin/python
# -*- coding: utf-8 -*-
"""
linescan.py - Effortlessly read lines from a text file using any encoding
Created 2013-2014 Triangle717
<http://Triangle717.WordPress.com/>
Licensed under The MIT License
<http://opensource.org/licenses/MIT/>
"""
from __future__ import print_function
import os
import linescan
testFile = os.path.join("test", "testfile.txt")
if __name__ == "__main__":
line = linescan.scan(testFile, 5, "utf_8")
print(line)
lines = linescan.scanlines(testFile, 8, 12, "cp1252")
print(lines)
thisshouldbefalse = linescan.scan(testFile, 55)
print(thisshouldbefalse)
|
d2106c0a6cb4bbf523914786ded873261cb174c2 | nipype/pipeline/__init__.py | nipype/pipeline/__init__.py | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""
Package contains modules for generating pipelines using interfaces
"""
__docformat__ = 'restructuredtext'
from .engine import Node, MapNode, Workflow
from .utils import write_prov
| # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""
Package contains modules for generating pipelines using interfaces
"""
__docformat__ = 'restructuredtext'
from engine import Node, MapNode, JoinNode, Workflow
from .utils import write_prov
| Add JoinNode to pipeline init | Add JoinNode to pipeline init
| Python | bsd-3-clause | arokem/nipype,gerddie/nipype,Leoniela/nipype,fprados/nipype,pearsonlab/nipype,blakedewey/nipype,carolFrohlich/nipype,blakedewey/nipype,gerddie/nipype,dgellis90/nipype,glatard/nipype,arokem/nipype,carlohamalainen/nipype,carolFrohlich/nipype,Leoniela/nipype,glatard/nipype,dmordom/nipype,grlee77/nipype,carolFrohlich/nipype,iglpdc/nipype,grlee77/nipype,sgiavasis/nipype,carlohamalainen/nipype,fprados/nipype,blakedewey/nipype,wanderine/nipype,pearsonlab/nipype,sgiavasis/nipype,wanderine/nipype,FCP-INDI/nipype,blakedewey/nipype,sgiavasis/nipype,gerddie/nipype,Leoniela/nipype,FCP-INDI/nipype,iglpdc/nipype,carolFrohlich/nipype,mick-d/nipype,dgellis90/nipype,JohnGriffiths/nipype,mick-d/nipype,FCP-INDI/nipype,pearsonlab/nipype,wanderine/nipype,JohnGriffiths/nipype,pearsonlab/nipype,gerddie/nipype,mick-d/nipype_source,dmordom/nipype,grlee77/nipype,rameshvs/nipype,wanderine/nipype,carlohamalainen/nipype,mick-d/nipype,arokem/nipype,dgellis90/nipype,mick-d/nipype,grlee77/nipype,rameshvs/nipype,fprados/nipype,iglpdc/nipype,JohnGriffiths/nipype,sgiavasis/nipype,arokem/nipype,iglpdc/nipype,JohnGriffiths/nipype,dgellis90/nipype,glatard/nipype,FCP-INDI/nipype,rameshvs/nipype,dmordom/nipype,rameshvs/nipype,mick-d/nipype_source,mick-d/nipype_source,glatard/nipype | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""
Package contains modules for generating pipelines using interfaces
"""
__docformat__ = 'restructuredtext'
from .engine import Node, MapNode, Workflow
from .utils import write_prov
Add JoinNode to pipeline init | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""
Package contains modules for generating pipelines using interfaces
"""
__docformat__ = 'restructuredtext'
from engine import Node, MapNode, JoinNode, Workflow
from .utils import write_prov
| <commit_before># emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""
Package contains modules for generating pipelines using interfaces
"""
__docformat__ = 'restructuredtext'
from .engine import Node, MapNode, Workflow
from .utils import write_prov
<commit_msg>Add JoinNode to pipeline init<commit_after> | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""
Package contains modules for generating pipelines using interfaces
"""
__docformat__ = 'restructuredtext'
from engine import Node, MapNode, JoinNode, Workflow
from .utils import write_prov
| # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""
Package contains modules for generating pipelines using interfaces
"""
__docformat__ = 'restructuredtext'
from .engine import Node, MapNode, Workflow
from .utils import write_prov
Add JoinNode to pipeline init# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""
Package contains modules for generating pipelines using interfaces
"""
__docformat__ = 'restructuredtext'
from engine import Node, MapNode, JoinNode, Workflow
from .utils import write_prov
| <commit_before># emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""
Package contains modules for generating pipelines using interfaces
"""
__docformat__ = 'restructuredtext'
from .engine import Node, MapNode, Workflow
from .utils import write_prov
<commit_msg>Add JoinNode to pipeline init<commit_after># emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""
Package contains modules for generating pipelines using interfaces
"""
__docformat__ = 'restructuredtext'
from engine import Node, MapNode, JoinNode, Workflow
from .utils import write_prov
|
780d1fa408677994c739ce489bd0fde2ed6242f0 | ideascaly/__init__.py | ideascaly/__init__.py | __author__ = 'jorgesaldivar'
| # IdeaScaly
# Copyright 2015 Jorge Saldivar
# See LICENSE for details.
"""
IdeaScaly: IdeaScale API client
"""
__version__ = '0.1'
__author__ = 'Jorge Saldivar'
__license__ = 'MIT'
from ideascaly.api import API
from ideascaly.auth import AuthNonSSO, AuthNonSSOMem, AuthSSO, AuthResearch
from ideascaly.error import IdeaScalyError
from ideascaly.models import Author, Idea, Campaign, Comment, Vote, JSONModel, ModelFactory, Model
from ideascaly.parsers import Parser, RawParser, JSONParser, ModelParser
| Add details of the project | Add details of the project
| Python | mit | joausaga/ideascaly | __author__ = 'jorgesaldivar'
Add details of the project | # IdeaScaly
# Copyright 2015 Jorge Saldivar
# See LICENSE for details.
"""
IdeaScaly: IdeaScale API client
"""
__version__ = '0.1'
__author__ = 'Jorge Saldivar'
__license__ = 'MIT'
from ideascaly.api import API
from ideascaly.auth import AuthNonSSO, AuthNonSSOMem, AuthSSO, AuthResearch
from ideascaly.error import IdeaScalyError
from ideascaly.models import Author, Idea, Campaign, Comment, Vote, JSONModel, ModelFactory, Model
from ideascaly.parsers import Parser, RawParser, JSONParser, ModelParser
| <commit_before>__author__ = 'jorgesaldivar'
<commit_msg>Add details of the project<commit_after> | # IdeaScaly
# Copyright 2015 Jorge Saldivar
# See LICENSE for details.
"""
IdeaScaly: IdeaScale API client
"""
__version__ = '0.1'
__author__ = 'Jorge Saldivar'
__license__ = 'MIT'
from ideascaly.api import API
from ideascaly.auth import AuthNonSSO, AuthNonSSOMem, AuthSSO, AuthResearch
from ideascaly.error import IdeaScalyError
from ideascaly.models import Author, Idea, Campaign, Comment, Vote, JSONModel, ModelFactory, Model
from ideascaly.parsers import Parser, RawParser, JSONParser, ModelParser
| __author__ = 'jorgesaldivar'
Add details of the project# IdeaScaly
# Copyright 2015 Jorge Saldivar
# See LICENSE for details.
"""
IdeaScaly: IdeaScale API client
"""
__version__ = '0.1'
__author__ = 'Jorge Saldivar'
__license__ = 'MIT'
from ideascaly.api import API
from ideascaly.auth import AuthNonSSO, AuthNonSSOMem, AuthSSO, AuthResearch
from ideascaly.error import IdeaScalyError
from ideascaly.models import Author, Idea, Campaign, Comment, Vote, JSONModel, ModelFactory, Model
from ideascaly.parsers import Parser, RawParser, JSONParser, ModelParser
| <commit_before>__author__ = 'jorgesaldivar'
<commit_msg>Add details of the project<commit_after># IdeaScaly
# Copyright 2015 Jorge Saldivar
# See LICENSE for details.
"""
IdeaScaly: IdeaScale API client
"""
__version__ = '0.1'
__author__ = 'Jorge Saldivar'
__license__ = 'MIT'
from ideascaly.api import API
from ideascaly.auth import AuthNonSSO, AuthNonSSOMem, AuthSSO, AuthResearch
from ideascaly.error import IdeaScalyError
from ideascaly.models import Author, Idea, Campaign, Comment, Vote, JSONModel, ModelFactory, Model
from ideascaly.parsers import Parser, RawParser, JSONParser, ModelParser
|
30b56f27cff21b93d68524fc992d6e731fb80e57 | tests.py | tests.py | from models import AuthenticationError,AuthenticationRequired
from trello import Trello
import unittest
import os
class BoardTestCase(unittest.TestCase):
def setUp(self):
self._trello = Trello(os.environ['TRELLO_TEST_USER'], os.environ['TRELLO_TEST_PASS'])
def test01_list_boards(self):
print "list boards"
self.assertEquals(
len(self._trello.list_boards()),
int(os.environ['TRELLO_TEST_BOARD_COUNT']))
def test02_board_attrs(self):
print "board attrs"
boards = self._trello.list_boards()
for b in boards:
self.assertIsNotNone(b['_id'], msg="_id not provided")
self.assertIsNotNone(b['name'], msg="name not provided")
self.assertIsNotNone(b['closed'], msg="closed not provided")
class CardTestCase(unittest.TestCase):
def setUp(self):
self._trello = Trello(os.environ['TRELLO_TEST_USER'], os.environ['TRELLO_TEST_PASS'])
if __name__ == "__main__":
unittest.main()
| from models import AuthenticationError,AuthenticationRequired
from trello import Trello
import unittest
import os
class TrelloTestCase(unittest.TestCase):
def setUp(self):
self._trello = Trello(os.environ['TRELLO_TEST_USER'], os.environ['TRELLO_TEST_PASS'])
def test01_list_boards(self):
self.assertEquals(
len(self._trello.list_boards()),
int(os.environ['TRELLO_TEST_BOARD_COUNT']))
def test10_board_attrs(self):
boards = self._trello.list_boards()
for b in boards:
self.assertIsNotNone(b['_id'], msg="_id not provided")
self.assertIsNotNone(b['name'], msg="name not provided")
self.assertIsNotNone(b['closed'], msg="closed not provided")
if __name__ == "__main__":
unittest.main()
| Make it a single test case | Make it a single test case
| Python | bsd-3-clause | Wooble/py-trello,mehdy/py-trello,ntrepid8/py-trello,portante/py-trello,nMustaki/py-trello,WoLpH/py-trello,sarumont/py-trello,merlinpatt/py-trello,gchp/py-trello | from models import AuthenticationError,AuthenticationRequired
from trello import Trello
import unittest
import os
class BoardTestCase(unittest.TestCase):
def setUp(self):
self._trello = Trello(os.environ['TRELLO_TEST_USER'], os.environ['TRELLO_TEST_PASS'])
def test01_list_boards(self):
print "list boards"
self.assertEquals(
len(self._trello.list_boards()),
int(os.environ['TRELLO_TEST_BOARD_COUNT']))
def test02_board_attrs(self):
print "board attrs"
boards = self._trello.list_boards()
for b in boards:
self.assertIsNotNone(b['_id'], msg="_id not provided")
self.assertIsNotNone(b['name'], msg="name not provided")
self.assertIsNotNone(b['closed'], msg="closed not provided")
class CardTestCase(unittest.TestCase):
def setUp(self):
self._trello = Trello(os.environ['TRELLO_TEST_USER'], os.environ['TRELLO_TEST_PASS'])
if __name__ == "__main__":
unittest.main()
Make it a single test case | from models import AuthenticationError,AuthenticationRequired
from trello import Trello
import unittest
import os
class TrelloTestCase(unittest.TestCase):
def setUp(self):
self._trello = Trello(os.environ['TRELLO_TEST_USER'], os.environ['TRELLO_TEST_PASS'])
def test01_list_boards(self):
self.assertEquals(
len(self._trello.list_boards()),
int(os.environ['TRELLO_TEST_BOARD_COUNT']))
def test10_board_attrs(self):
boards = self._trello.list_boards()
for b in boards:
self.assertIsNotNone(b['_id'], msg="_id not provided")
self.assertIsNotNone(b['name'], msg="name not provided")
self.assertIsNotNone(b['closed'], msg="closed not provided")
if __name__ == "__main__":
unittest.main()
| <commit_before>from models import AuthenticationError,AuthenticationRequired
from trello import Trello
import unittest
import os
class BoardTestCase(unittest.TestCase):
def setUp(self):
self._trello = Trello(os.environ['TRELLO_TEST_USER'], os.environ['TRELLO_TEST_PASS'])
def test01_list_boards(self):
print "list boards"
self.assertEquals(
len(self._trello.list_boards()),
int(os.environ['TRELLO_TEST_BOARD_COUNT']))
def test02_board_attrs(self):
print "board attrs"
boards = self._trello.list_boards()
for b in boards:
self.assertIsNotNone(b['_id'], msg="_id not provided")
self.assertIsNotNone(b['name'], msg="name not provided")
self.assertIsNotNone(b['closed'], msg="closed not provided")
class CardTestCase(unittest.TestCase):
def setUp(self):
self._trello = Trello(os.environ['TRELLO_TEST_USER'], os.environ['TRELLO_TEST_PASS'])
if __name__ == "__main__":
unittest.main()
<commit_msg>Make it a single test case<commit_after> | from models import AuthenticationError,AuthenticationRequired
from trello import Trello
import unittest
import os
class TrelloTestCase(unittest.TestCase):
def setUp(self):
self._trello = Trello(os.environ['TRELLO_TEST_USER'], os.environ['TRELLO_TEST_PASS'])
def test01_list_boards(self):
self.assertEquals(
len(self._trello.list_boards()),
int(os.environ['TRELLO_TEST_BOARD_COUNT']))
def test10_board_attrs(self):
boards = self._trello.list_boards()
for b in boards:
self.assertIsNotNone(b['_id'], msg="_id not provided")
self.assertIsNotNone(b['name'], msg="name not provided")
self.assertIsNotNone(b['closed'], msg="closed not provided")
if __name__ == "__main__":
unittest.main()
| from models import AuthenticationError,AuthenticationRequired
from trello import Trello
import unittest
import os
class BoardTestCase(unittest.TestCase):
def setUp(self):
self._trello = Trello(os.environ['TRELLO_TEST_USER'], os.environ['TRELLO_TEST_PASS'])
def test01_list_boards(self):
print "list boards"
self.assertEquals(
len(self._trello.list_boards()),
int(os.environ['TRELLO_TEST_BOARD_COUNT']))
def test02_board_attrs(self):
print "board attrs"
boards = self._trello.list_boards()
for b in boards:
self.assertIsNotNone(b['_id'], msg="_id not provided")
self.assertIsNotNone(b['name'], msg="name not provided")
self.assertIsNotNone(b['closed'], msg="closed not provided")
class CardTestCase(unittest.TestCase):
def setUp(self):
self._trello = Trello(os.environ['TRELLO_TEST_USER'], os.environ['TRELLO_TEST_PASS'])
if __name__ == "__main__":
unittest.main()
Make it a single test casefrom models import AuthenticationError,AuthenticationRequired
from trello import Trello
import unittest
import os
class TrelloTestCase(unittest.TestCase):
def setUp(self):
self._trello = Trello(os.environ['TRELLO_TEST_USER'], os.environ['TRELLO_TEST_PASS'])
def test01_list_boards(self):
self.assertEquals(
len(self._trello.list_boards()),
int(os.environ['TRELLO_TEST_BOARD_COUNT']))
def test10_board_attrs(self):
boards = self._trello.list_boards()
for b in boards:
self.assertIsNotNone(b['_id'], msg="_id not provided")
self.assertIsNotNone(b['name'], msg="name not provided")
self.assertIsNotNone(b['closed'], msg="closed not provided")
if __name__ == "__main__":
unittest.main()
| <commit_before>from models import AuthenticationError,AuthenticationRequired
from trello import Trello
import unittest
import os
class BoardTestCase(unittest.TestCase):
def setUp(self):
self._trello = Trello(os.environ['TRELLO_TEST_USER'], os.environ['TRELLO_TEST_PASS'])
def test01_list_boards(self):
print "list boards"
self.assertEquals(
len(self._trello.list_boards()),
int(os.environ['TRELLO_TEST_BOARD_COUNT']))
def test02_board_attrs(self):
print "board attrs"
boards = self._trello.list_boards()
for b in boards:
self.assertIsNotNone(b['_id'], msg="_id not provided")
self.assertIsNotNone(b['name'], msg="name not provided")
self.assertIsNotNone(b['closed'], msg="closed not provided")
class CardTestCase(unittest.TestCase):
def setUp(self):
self._trello = Trello(os.environ['TRELLO_TEST_USER'], os.environ['TRELLO_TEST_PASS'])
if __name__ == "__main__":
unittest.main()
<commit_msg>Make it a single test case<commit_after>from models import AuthenticationError,AuthenticationRequired
from trello import Trello
import unittest
import os
class TrelloTestCase(unittest.TestCase):
def setUp(self):
self._trello = Trello(os.environ['TRELLO_TEST_USER'], os.environ['TRELLO_TEST_PASS'])
def test01_list_boards(self):
self.assertEquals(
len(self._trello.list_boards()),
int(os.environ['TRELLO_TEST_BOARD_COUNT']))
def test10_board_attrs(self):
boards = self._trello.list_boards()
for b in boards:
self.assertIsNotNone(b['_id'], msg="_id not provided")
self.assertIsNotNone(b['name'], msg="name not provided")
self.assertIsNotNone(b['closed'], msg="closed not provided")
if __name__ == "__main__":
unittest.main()
|
7e2565007c926765750641b048607ed29b8aada0 | cmsplugin_zinnia/admin.py | cmsplugin_zinnia/admin.py | """Admin of Zinnia CMS Plugins"""
from django.contrib import admin
from django.template import RequestContext
from django.utils.translation import ugettext_lazy as _
from cms.plugin_rendering import render_placeholder
from cms.admin.placeholderadmin import PlaceholderAdminMixin
from zinnia.models import Entry
from zinnia.admin.entry import EntryAdmin
from zinnia.settings import ENTRY_BASE_MODEL
class EntryPlaceholderAdmin(PlaceholderAdminMixin, EntryAdmin):
"""
EntryPlaceholder Admin
"""
fieldsets = (
(_('Content'), {'fields': (('title', 'status'), 'image')}),) + \
EntryAdmin.fieldsets[1:]
def save_model(self, request, entry, form, change):
"""
Fill the content field with the interpretation
of the placeholder
"""
context = RequestContext(request)
try:
content = render_placeholder(entry.content_placeholder, context)
entry.content = content or ''
except KeyError:
entry.content = ''
super(EntryPlaceholderAdmin, self).save_model(
request, entry, form, change)
if ENTRY_BASE_MODEL == 'cmsplugin_zinnia.placeholder.EntryPlaceholder':
admin.site.register(Entry, EntryPlaceholderAdmin)
| """Admin of Zinnia CMS Plugins"""
from django.contrib import admin
from django.template import RequestContext
from django.utils.translation import ugettext_lazy as _
from cms.plugin_rendering import render_placeholder
from cms.admin.placeholderadmin import PlaceholderAdminMixin
from zinnia.models import Entry
from zinnia.admin.entry import EntryAdmin
from zinnia.settings import ENTRY_BASE_MODEL
class EntryPlaceholderAdmin(PlaceholderAdminMixin, EntryAdmin):
"""
EntryPlaceholder Admin
"""
fieldsets = (
(_('Content'), {'fields': (('title', 'status'), 'image')}),) + \
EntryAdmin.fieldsets[1:]
def save_model(self, request, entry, form, change):
"""
Fill the content field with the interpretation
of the placeholder
"""
context = RequestContext(request)
try:
content = render_placeholder(entry.content_placeholder, context)
entry.content = content or ''
except KeyError:
# https://github.com/django-blog-zinnia/cmsplugin-zinnia/pull/61
entry.content = ''
super(EntryPlaceholderAdmin, self).save_model(
request, entry, form, change)
if ENTRY_BASE_MODEL == 'cmsplugin_zinnia.placeholder.EntryPlaceholder':
admin.site.register(Entry, EntryPlaceholderAdmin)
| Add comment about why excepting KeyError | Add comment about why excepting KeyError
| Python | bsd-3-clause | bittner/cmsplugin-zinnia,django-blog-zinnia/cmsplugin-zinnia,django-blog-zinnia/cmsplugin-zinnia,bittner/cmsplugin-zinnia,django-blog-zinnia/cmsplugin-zinnia,bittner/cmsplugin-zinnia | """Admin of Zinnia CMS Plugins"""
from django.contrib import admin
from django.template import RequestContext
from django.utils.translation import ugettext_lazy as _
from cms.plugin_rendering import render_placeholder
from cms.admin.placeholderadmin import PlaceholderAdminMixin
from zinnia.models import Entry
from zinnia.admin.entry import EntryAdmin
from zinnia.settings import ENTRY_BASE_MODEL
class EntryPlaceholderAdmin(PlaceholderAdminMixin, EntryAdmin):
"""
EntryPlaceholder Admin
"""
fieldsets = (
(_('Content'), {'fields': (('title', 'status'), 'image')}),) + \
EntryAdmin.fieldsets[1:]
def save_model(self, request, entry, form, change):
"""
Fill the content field with the interpretation
of the placeholder
"""
context = RequestContext(request)
try:
content = render_placeholder(entry.content_placeholder, context)
entry.content = content or ''
except KeyError:
entry.content = ''
super(EntryPlaceholderAdmin, self).save_model(
request, entry, form, change)
if ENTRY_BASE_MODEL == 'cmsplugin_zinnia.placeholder.EntryPlaceholder':
admin.site.register(Entry, EntryPlaceholderAdmin)
Add comment about why excepting KeyError | """Admin of Zinnia CMS Plugins"""
from django.contrib import admin
from django.template import RequestContext
from django.utils.translation import ugettext_lazy as _
from cms.plugin_rendering import render_placeholder
from cms.admin.placeholderadmin import PlaceholderAdminMixin
from zinnia.models import Entry
from zinnia.admin.entry import EntryAdmin
from zinnia.settings import ENTRY_BASE_MODEL
class EntryPlaceholderAdmin(PlaceholderAdminMixin, EntryAdmin):
"""
EntryPlaceholder Admin
"""
fieldsets = (
(_('Content'), {'fields': (('title', 'status'), 'image')}),) + \
EntryAdmin.fieldsets[1:]
def save_model(self, request, entry, form, change):
"""
Fill the content field with the interpretation
of the placeholder
"""
context = RequestContext(request)
try:
content = render_placeholder(entry.content_placeholder, context)
entry.content = content or ''
except KeyError:
# https://github.com/django-blog-zinnia/cmsplugin-zinnia/pull/61
entry.content = ''
super(EntryPlaceholderAdmin, self).save_model(
request, entry, form, change)
if ENTRY_BASE_MODEL == 'cmsplugin_zinnia.placeholder.EntryPlaceholder':
admin.site.register(Entry, EntryPlaceholderAdmin)
| <commit_before>"""Admin of Zinnia CMS Plugins"""
from django.contrib import admin
from django.template import RequestContext
from django.utils.translation import ugettext_lazy as _
from cms.plugin_rendering import render_placeholder
from cms.admin.placeholderadmin import PlaceholderAdminMixin
from zinnia.models import Entry
from zinnia.admin.entry import EntryAdmin
from zinnia.settings import ENTRY_BASE_MODEL
class EntryPlaceholderAdmin(PlaceholderAdminMixin, EntryAdmin):
"""
EntryPlaceholder Admin
"""
fieldsets = (
(_('Content'), {'fields': (('title', 'status'), 'image')}),) + \
EntryAdmin.fieldsets[1:]
def save_model(self, request, entry, form, change):
"""
Fill the content field with the interpretation
of the placeholder
"""
context = RequestContext(request)
try:
content = render_placeholder(entry.content_placeholder, context)
entry.content = content or ''
except KeyError:
entry.content = ''
super(EntryPlaceholderAdmin, self).save_model(
request, entry, form, change)
if ENTRY_BASE_MODEL == 'cmsplugin_zinnia.placeholder.EntryPlaceholder':
admin.site.register(Entry, EntryPlaceholderAdmin)
<commit_msg>Add comment about why excepting KeyError<commit_after> | """Admin of Zinnia CMS Plugins"""
from django.contrib import admin
from django.template import RequestContext
from django.utils.translation import ugettext_lazy as _
from cms.plugin_rendering import render_placeholder
from cms.admin.placeholderadmin import PlaceholderAdminMixin
from zinnia.models import Entry
from zinnia.admin.entry import EntryAdmin
from zinnia.settings import ENTRY_BASE_MODEL
class EntryPlaceholderAdmin(PlaceholderAdminMixin, EntryAdmin):
"""
EntryPlaceholder Admin
"""
fieldsets = (
(_('Content'), {'fields': (('title', 'status'), 'image')}),) + \
EntryAdmin.fieldsets[1:]
def save_model(self, request, entry, form, change):
"""
Fill the content field with the interpretation
of the placeholder
"""
context = RequestContext(request)
try:
content = render_placeholder(entry.content_placeholder, context)
entry.content = content or ''
except KeyError:
# https://github.com/django-blog-zinnia/cmsplugin-zinnia/pull/61
entry.content = ''
super(EntryPlaceholderAdmin, self).save_model(
request, entry, form, change)
if ENTRY_BASE_MODEL == 'cmsplugin_zinnia.placeholder.EntryPlaceholder':
admin.site.register(Entry, EntryPlaceholderAdmin)
| """Admin of Zinnia CMS Plugins"""
from django.contrib import admin
from django.template import RequestContext
from django.utils.translation import ugettext_lazy as _
from cms.plugin_rendering import render_placeholder
from cms.admin.placeholderadmin import PlaceholderAdminMixin
from zinnia.models import Entry
from zinnia.admin.entry import EntryAdmin
from zinnia.settings import ENTRY_BASE_MODEL
class EntryPlaceholderAdmin(PlaceholderAdminMixin, EntryAdmin):
"""
EntryPlaceholder Admin
"""
fieldsets = (
(_('Content'), {'fields': (('title', 'status'), 'image')}),) + \
EntryAdmin.fieldsets[1:]
def save_model(self, request, entry, form, change):
"""
Fill the content field with the interpretation
of the placeholder
"""
context = RequestContext(request)
try:
content = render_placeholder(entry.content_placeholder, context)
entry.content = content or ''
except KeyError:
entry.content = ''
super(EntryPlaceholderAdmin, self).save_model(
request, entry, form, change)
if ENTRY_BASE_MODEL == 'cmsplugin_zinnia.placeholder.EntryPlaceholder':
admin.site.register(Entry, EntryPlaceholderAdmin)
Add comment about why excepting KeyError"""Admin of Zinnia CMS Plugins"""
from django.contrib import admin
from django.template import RequestContext
from django.utils.translation import ugettext_lazy as _
from cms.plugin_rendering import render_placeholder
from cms.admin.placeholderadmin import PlaceholderAdminMixin
from zinnia.models import Entry
from zinnia.admin.entry import EntryAdmin
from zinnia.settings import ENTRY_BASE_MODEL
class EntryPlaceholderAdmin(PlaceholderAdminMixin, EntryAdmin):
"""
EntryPlaceholder Admin
"""
fieldsets = (
(_('Content'), {'fields': (('title', 'status'), 'image')}),) + \
EntryAdmin.fieldsets[1:]
def save_model(self, request, entry, form, change):
"""
Fill the content field with the interpretation
of the placeholder
"""
context = RequestContext(request)
try:
content = render_placeholder(entry.content_placeholder, context)
entry.content = content or ''
except KeyError:
# https://github.com/django-blog-zinnia/cmsplugin-zinnia/pull/61
entry.content = ''
super(EntryPlaceholderAdmin, self).save_model(
request, entry, form, change)
if ENTRY_BASE_MODEL == 'cmsplugin_zinnia.placeholder.EntryPlaceholder':
admin.site.register(Entry, EntryPlaceholderAdmin)
| <commit_before>"""Admin of Zinnia CMS Plugins"""
from django.contrib import admin
from django.template import RequestContext
from django.utils.translation import ugettext_lazy as _
from cms.plugin_rendering import render_placeholder
from cms.admin.placeholderadmin import PlaceholderAdminMixin
from zinnia.models import Entry
from zinnia.admin.entry import EntryAdmin
from zinnia.settings import ENTRY_BASE_MODEL
class EntryPlaceholderAdmin(PlaceholderAdminMixin, EntryAdmin):
"""
EntryPlaceholder Admin
"""
fieldsets = (
(_('Content'), {'fields': (('title', 'status'), 'image')}),) + \
EntryAdmin.fieldsets[1:]
def save_model(self, request, entry, form, change):
"""
Fill the content field with the interpretation
of the placeholder
"""
context = RequestContext(request)
try:
content = render_placeholder(entry.content_placeholder, context)
entry.content = content or ''
except KeyError:
entry.content = ''
super(EntryPlaceholderAdmin, self).save_model(
request, entry, form, change)
if ENTRY_BASE_MODEL == 'cmsplugin_zinnia.placeholder.EntryPlaceholder':
admin.site.register(Entry, EntryPlaceholderAdmin)
<commit_msg>Add comment about why excepting KeyError<commit_after>"""Admin of Zinnia CMS Plugins"""
from django.contrib import admin
from django.template import RequestContext
from django.utils.translation import ugettext_lazy as _
from cms.plugin_rendering import render_placeholder
from cms.admin.placeholderadmin import PlaceholderAdminMixin
from zinnia.models import Entry
from zinnia.admin.entry import EntryAdmin
from zinnia.settings import ENTRY_BASE_MODEL
class EntryPlaceholderAdmin(PlaceholderAdminMixin, EntryAdmin):
"""
EntryPlaceholder Admin
"""
fieldsets = (
(_('Content'), {'fields': (('title', 'status'), 'image')}),) + \
EntryAdmin.fieldsets[1:]
def save_model(self, request, entry, form, change):
"""
Fill the content field with the interpretation
of the placeholder
"""
context = RequestContext(request)
try:
content = render_placeholder(entry.content_placeholder, context)
entry.content = content or ''
except KeyError:
# https://github.com/django-blog-zinnia/cmsplugin-zinnia/pull/61
entry.content = ''
super(EntryPlaceholderAdmin, self).save_model(
request, entry, form, change)
if ENTRY_BASE_MODEL == 'cmsplugin_zinnia.placeholder.EntryPlaceholder':
admin.site.register(Entry, EntryPlaceholderAdmin)
|
6d143e8b3d060ca4639b08c15d7e826e0ee68c8c | neutron/tests/unit/objects/test_address_scope.py | neutron/tests/unit/objects/test_address_scope.py | # Copyright (c) 2016 Intel Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.objects import address_scope
from neutron.tests.unit.objects import test_base as obj_test_base
from neutron.tests.unit import testlib_api
class AddressScopeIfaceObjectTestCase(obj_test_base.BaseObjectIfaceTestCase):
_test_class = address_scope.AddressScope
class AddressScopeDbObjectTestCase(obj_test_base._BaseObjectTestCase,
testlib_api.SqlTestCase):
_test_class = address_scope.AddressScope
| # Copyright (c) 2016 Intel Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.objects import address_scope
from neutron.tests.unit.objects import test_base as obj_test_base
from neutron.tests.unit import testlib_api
class AddressScopeIfaceObjectTestCase(obj_test_base.BaseObjectIfaceTestCase):
_test_class = address_scope.AddressScope
class AddressScopeDbObjectTestCase(obj_test_base.BaseDbObjectTestCase,
testlib_api.SqlTestCase):
_test_class = address_scope.AddressScope
| Use BaseDbObjectTestCase in AddressScope UT | Use BaseDbObjectTestCase in AddressScope UT
AddressScopeDbObjectTestCase class is using _BaseObjectTestCase class
which doesn't contain all the unit test cases for Oslo-Versioned
classes. This patch replace that class.
Partially-Implements: blueprint adopt-oslo-versioned-objects-for-db
Change-Id: I180046743471487a9f9a6e53ae0f2fd09afdf123
| Python | apache-2.0 | huntxu/neutron,noironetworks/neutron,noironetworks/neutron,eayunstack/neutron,openstack/neutron,mahak/neutron,mahak/neutron,cloudbase/neutron,mahak/neutron,sebrandon1/neutron,huntxu/neutron,sebrandon1/neutron,openstack/neutron,cloudbase/neutron,openstack/neutron,eayunstack/neutron | # Copyright (c) 2016 Intel Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.objects import address_scope
from neutron.tests.unit.objects import test_base as obj_test_base
from neutron.tests.unit import testlib_api
class AddressScopeIfaceObjectTestCase(obj_test_base.BaseObjectIfaceTestCase):
_test_class = address_scope.AddressScope
class AddressScopeDbObjectTestCase(obj_test_base._BaseObjectTestCase,
testlib_api.SqlTestCase):
_test_class = address_scope.AddressScope
Use BaseDbObjectTestCase in AddressScope UT
AddressScopeDbObjectTestCase class is using _BaseObjectTestCase class
which doesn't contain all the unit test cases for Oslo-Versioned
classes. This patch replace that class.
Partially-Implements: blueprint adopt-oslo-versioned-objects-for-db
Change-Id: I180046743471487a9f9a6e53ae0f2fd09afdf123 | # Copyright (c) 2016 Intel Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.objects import address_scope
from neutron.tests.unit.objects import test_base as obj_test_base
from neutron.tests.unit import testlib_api
class AddressScopeIfaceObjectTestCase(obj_test_base.BaseObjectIfaceTestCase):
_test_class = address_scope.AddressScope
class AddressScopeDbObjectTestCase(obj_test_base.BaseDbObjectTestCase,
testlib_api.SqlTestCase):
_test_class = address_scope.AddressScope
| <commit_before># Copyright (c) 2016 Intel Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.objects import address_scope
from neutron.tests.unit.objects import test_base as obj_test_base
from neutron.tests.unit import testlib_api
class AddressScopeIfaceObjectTestCase(obj_test_base.BaseObjectIfaceTestCase):
_test_class = address_scope.AddressScope
class AddressScopeDbObjectTestCase(obj_test_base._BaseObjectTestCase,
testlib_api.SqlTestCase):
_test_class = address_scope.AddressScope
<commit_msg>Use BaseDbObjectTestCase in AddressScope UT
AddressScopeDbObjectTestCase class is using _BaseObjectTestCase class
which doesn't contain all the unit test cases for Oslo-Versioned
classes. This patch replace that class.
Partially-Implements: blueprint adopt-oslo-versioned-objects-for-db
Change-Id: I180046743471487a9f9a6e53ae0f2fd09afdf123<commit_after> | # Copyright (c) 2016 Intel Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.objects import address_scope
from neutron.tests.unit.objects import test_base as obj_test_base
from neutron.tests.unit import testlib_api
class AddressScopeIfaceObjectTestCase(obj_test_base.BaseObjectIfaceTestCase):
_test_class = address_scope.AddressScope
class AddressScopeDbObjectTestCase(obj_test_base.BaseDbObjectTestCase,
testlib_api.SqlTestCase):
_test_class = address_scope.AddressScope
| # Copyright (c) 2016 Intel Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.objects import address_scope
from neutron.tests.unit.objects import test_base as obj_test_base
from neutron.tests.unit import testlib_api
class AddressScopeIfaceObjectTestCase(obj_test_base.BaseObjectIfaceTestCase):
_test_class = address_scope.AddressScope
class AddressScopeDbObjectTestCase(obj_test_base._BaseObjectTestCase,
testlib_api.SqlTestCase):
_test_class = address_scope.AddressScope
Use BaseDbObjectTestCase in AddressScope UT
AddressScopeDbObjectTestCase class is using _BaseObjectTestCase class
which doesn't contain all the unit test cases for Oslo-Versioned
classes. This patch replace that class.
Partially-Implements: blueprint adopt-oslo-versioned-objects-for-db
Change-Id: I180046743471487a9f9a6e53ae0f2fd09afdf123# Copyright (c) 2016 Intel Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.objects import address_scope
from neutron.tests.unit.objects import test_base as obj_test_base
from neutron.tests.unit import testlib_api
class AddressScopeIfaceObjectTestCase(obj_test_base.BaseObjectIfaceTestCase):
_test_class = address_scope.AddressScope
class AddressScopeDbObjectTestCase(obj_test_base.BaseDbObjectTestCase,
testlib_api.SqlTestCase):
_test_class = address_scope.AddressScope
| <commit_before># Copyright (c) 2016 Intel Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.objects import address_scope
from neutron.tests.unit.objects import test_base as obj_test_base
from neutron.tests.unit import testlib_api
class AddressScopeIfaceObjectTestCase(obj_test_base.BaseObjectIfaceTestCase):
_test_class = address_scope.AddressScope
class AddressScopeDbObjectTestCase(obj_test_base._BaseObjectTestCase,
testlib_api.SqlTestCase):
_test_class = address_scope.AddressScope
<commit_msg>Use BaseDbObjectTestCase in AddressScope UT
AddressScopeDbObjectTestCase class is using _BaseObjectTestCase class
which doesn't contain all the unit test cases for Oslo-Versioned
classes. This patch replace that class.
Partially-Implements: blueprint adopt-oslo-versioned-objects-for-db
Change-Id: I180046743471487a9f9a6e53ae0f2fd09afdf123<commit_after># Copyright (c) 2016 Intel Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.objects import address_scope
from neutron.tests.unit.objects import test_base as obj_test_base
from neutron.tests.unit import testlib_api
class AddressScopeIfaceObjectTestCase(obj_test_base.BaseObjectIfaceTestCase):
_test_class = address_scope.AddressScope
class AddressScopeDbObjectTestCase(obj_test_base.BaseDbObjectTestCase,
testlib_api.SqlTestCase):
_test_class = address_scope.AddressScope
|
ca62db36a14c9bcc447cb612a8fba4dd2c678629 | functional_tests.py | functional_tests.py | #!/usr/bin/env python
from unittest import TestCase
import mechanize
class ResponseTests(TestCase):
def test_close_pickle_load(self):
import pickle
b = mechanize.Browser()
r = b.open("http://wwwsearch.sf.net/bits/cctest2.txt")
r.read()
r.close()
r.seek(0)
self.assertEqual(r.read(),
"Hello ClientCookie functional test suite.\n")
HIGHEST_PROTOCOL = -1
p = pickle.dumps(b, HIGHEST_PROTOCOL)
b = pickle.loads(p)
r = b.response()
r.seek(0)
self.assertEqual(r.read(),
"Hello ClientCookie functional test suite.\n")
if __name__ == "__main__":
import unittest
unittest.main()
| #!/usr/bin/env python
from unittest import TestCase
import mechanize
class ResponseTests(TestCase):
def test_close_pickle_load(self):
print ("This test is expected to fail unless Python standard library"
"patch http://python.org/sf/1144636 has been applied")
import pickle
b = mechanize.Browser()
r = b.open("http://wwwsearch.sf.net/bits/cctest2.txt")
r.read()
r.close()
r.seek(0)
self.assertEqual(r.read(),
"Hello ClientCookie functional test suite.\n")
HIGHEST_PROTOCOL = -1
p = pickle.dumps(b, HIGHEST_PROTOCOL)
b = pickle.loads(p)
r = b.response()
r.seek(0)
self.assertEqual(r.read(),
"Hello ClientCookie functional test suite.\n")
if __name__ == "__main__":
import unittest
unittest.main()
| Add warning about failing functional test | Add warning about failing functional test
| Python | bsd-3-clause | python-mechanize/mechanize,python-mechanize/mechanize | #!/usr/bin/env python
from unittest import TestCase
import mechanize
class ResponseTests(TestCase):
def test_close_pickle_load(self):
import pickle
b = mechanize.Browser()
r = b.open("http://wwwsearch.sf.net/bits/cctest2.txt")
r.read()
r.close()
r.seek(0)
self.assertEqual(r.read(),
"Hello ClientCookie functional test suite.\n")
HIGHEST_PROTOCOL = -1
p = pickle.dumps(b, HIGHEST_PROTOCOL)
b = pickle.loads(p)
r = b.response()
r.seek(0)
self.assertEqual(r.read(),
"Hello ClientCookie functional test suite.\n")
if __name__ == "__main__":
import unittest
unittest.main()
Add warning about failing functional test | #!/usr/bin/env python
from unittest import TestCase
import mechanize
class ResponseTests(TestCase):
def test_close_pickle_load(self):
print ("This test is expected to fail unless Python standard library"
"patch http://python.org/sf/1144636 has been applied")
import pickle
b = mechanize.Browser()
r = b.open("http://wwwsearch.sf.net/bits/cctest2.txt")
r.read()
r.close()
r.seek(0)
self.assertEqual(r.read(),
"Hello ClientCookie functional test suite.\n")
HIGHEST_PROTOCOL = -1
p = pickle.dumps(b, HIGHEST_PROTOCOL)
b = pickle.loads(p)
r = b.response()
r.seek(0)
self.assertEqual(r.read(),
"Hello ClientCookie functional test suite.\n")
if __name__ == "__main__":
import unittest
unittest.main()
| <commit_before>#!/usr/bin/env python
from unittest import TestCase
import mechanize
class ResponseTests(TestCase):
def test_close_pickle_load(self):
import pickle
b = mechanize.Browser()
r = b.open("http://wwwsearch.sf.net/bits/cctest2.txt")
r.read()
r.close()
r.seek(0)
self.assertEqual(r.read(),
"Hello ClientCookie functional test suite.\n")
HIGHEST_PROTOCOL = -1
p = pickle.dumps(b, HIGHEST_PROTOCOL)
b = pickle.loads(p)
r = b.response()
r.seek(0)
self.assertEqual(r.read(),
"Hello ClientCookie functional test suite.\n")
if __name__ == "__main__":
import unittest
unittest.main()
<commit_msg>Add warning about failing functional test<commit_after> | #!/usr/bin/env python
from unittest import TestCase
import mechanize
class ResponseTests(TestCase):
def test_close_pickle_load(self):
print ("This test is expected to fail unless Python standard library"
"patch http://python.org/sf/1144636 has been applied")
import pickle
b = mechanize.Browser()
r = b.open("http://wwwsearch.sf.net/bits/cctest2.txt")
r.read()
r.close()
r.seek(0)
self.assertEqual(r.read(),
"Hello ClientCookie functional test suite.\n")
HIGHEST_PROTOCOL = -1
p = pickle.dumps(b, HIGHEST_PROTOCOL)
b = pickle.loads(p)
r = b.response()
r.seek(0)
self.assertEqual(r.read(),
"Hello ClientCookie functional test suite.\n")
if __name__ == "__main__":
import unittest
unittest.main()
| #!/usr/bin/env python
from unittest import TestCase
import mechanize
class ResponseTests(TestCase):
def test_close_pickle_load(self):
import pickle
b = mechanize.Browser()
r = b.open("http://wwwsearch.sf.net/bits/cctest2.txt")
r.read()
r.close()
r.seek(0)
self.assertEqual(r.read(),
"Hello ClientCookie functional test suite.\n")
HIGHEST_PROTOCOL = -1
p = pickle.dumps(b, HIGHEST_PROTOCOL)
b = pickle.loads(p)
r = b.response()
r.seek(0)
self.assertEqual(r.read(),
"Hello ClientCookie functional test suite.\n")
if __name__ == "__main__":
import unittest
unittest.main()
Add warning about failing functional test#!/usr/bin/env python
from unittest import TestCase
import mechanize
class ResponseTests(TestCase):
def test_close_pickle_load(self):
print ("This test is expected to fail unless Python standard library"
"patch http://python.org/sf/1144636 has been applied")
import pickle
b = mechanize.Browser()
r = b.open("http://wwwsearch.sf.net/bits/cctest2.txt")
r.read()
r.close()
r.seek(0)
self.assertEqual(r.read(),
"Hello ClientCookie functional test suite.\n")
HIGHEST_PROTOCOL = -1
p = pickle.dumps(b, HIGHEST_PROTOCOL)
b = pickle.loads(p)
r = b.response()
r.seek(0)
self.assertEqual(r.read(),
"Hello ClientCookie functional test suite.\n")
if __name__ == "__main__":
import unittest
unittest.main()
| <commit_before>#!/usr/bin/env python
from unittest import TestCase
import mechanize
class ResponseTests(TestCase):
def test_close_pickle_load(self):
import pickle
b = mechanize.Browser()
r = b.open("http://wwwsearch.sf.net/bits/cctest2.txt")
r.read()
r.close()
r.seek(0)
self.assertEqual(r.read(),
"Hello ClientCookie functional test suite.\n")
HIGHEST_PROTOCOL = -1
p = pickle.dumps(b, HIGHEST_PROTOCOL)
b = pickle.loads(p)
r = b.response()
r.seek(0)
self.assertEqual(r.read(),
"Hello ClientCookie functional test suite.\n")
if __name__ == "__main__":
import unittest
unittest.main()
<commit_msg>Add warning about failing functional test<commit_after>#!/usr/bin/env python
from unittest import TestCase
import mechanize
class ResponseTests(TestCase):
def test_close_pickle_load(self):
print ("This test is expected to fail unless Python standard library"
"patch http://python.org/sf/1144636 has been applied")
import pickle
b = mechanize.Browser()
r = b.open("http://wwwsearch.sf.net/bits/cctest2.txt")
r.read()
r.close()
r.seek(0)
self.assertEqual(r.read(),
"Hello ClientCookie functional test suite.\n")
HIGHEST_PROTOCOL = -1
p = pickle.dumps(b, HIGHEST_PROTOCOL)
b = pickle.loads(p)
r = b.response()
r.seek(0)
self.assertEqual(r.read(),
"Hello ClientCookie functional test suite.\n")
if __name__ == "__main__":
import unittest
unittest.main()
|
baaeb4fe0998bac8e0cb853d8124aa6134f55996 | poradnia/letters/admin.py | poradnia/letters/admin.py | from django.contrib import admin
from .models import Attachment, Letter
class AttachmentInline(admin.StackedInline):
'''
Stacked Inline View for Attachment
'''
model = Attachment
class LetterAdmin(admin.ModelAdmin):
'''
Admin View for Letter
'''
inlines = [
AttachmentInline,
]
admin.site.register(Letter, LetterAdmin)
| from django.contrib import admin
from .models import Attachment, Letter
class AttachmentInline(admin.StackedInline):
'''
Stacked Inline View for Attachment
'''
model = Attachment
@admin.register(Letter)
class LetterAdmin(admin.ModelAdmin):
'''
Admin View for Letter
'''
inlines = [
AttachmentInline,
]
| Rewrite DjangoAdmin in letters for decorators | Rewrite DjangoAdmin in letters for decorators
| Python | mit | rwakulszowa/poradnia,rwakulszowa/poradnia,watchdogpolska/poradnia.siecobywatelska.pl,watchdogpolska/poradnia.siecobywatelska.pl,watchdogpolska/poradnia,watchdogpolska/poradnia,watchdogpolska/poradnia,watchdogpolska/poradnia,watchdogpolska/poradnia.siecobywatelska.pl,rwakulszowa/poradnia,rwakulszowa/poradnia | from django.contrib import admin
from .models import Attachment, Letter
class AttachmentInline(admin.StackedInline):
'''
Stacked Inline View for Attachment
'''
model = Attachment
class LetterAdmin(admin.ModelAdmin):
'''
Admin View for Letter
'''
inlines = [
AttachmentInline,
]
admin.site.register(Letter, LetterAdmin)
Rewrite DjangoAdmin in letters for decorators | from django.contrib import admin
from .models import Attachment, Letter
class AttachmentInline(admin.StackedInline):
'''
Stacked Inline View for Attachment
'''
model = Attachment
@admin.register(Letter)
class LetterAdmin(admin.ModelAdmin):
'''
Admin View for Letter
'''
inlines = [
AttachmentInline,
]
| <commit_before>from django.contrib import admin
from .models import Attachment, Letter
class AttachmentInline(admin.StackedInline):
'''
Stacked Inline View for Attachment
'''
model = Attachment
class LetterAdmin(admin.ModelAdmin):
'''
Admin View for Letter
'''
inlines = [
AttachmentInline,
]
admin.site.register(Letter, LetterAdmin)
<commit_msg>Rewrite DjangoAdmin in letters for decorators<commit_after> | from django.contrib import admin
from .models import Attachment, Letter
class AttachmentInline(admin.StackedInline):
'''
Stacked Inline View for Attachment
'''
model = Attachment
@admin.register(Letter)
class LetterAdmin(admin.ModelAdmin):
'''
Admin View for Letter
'''
inlines = [
AttachmentInline,
]
| from django.contrib import admin
from .models import Attachment, Letter
class AttachmentInline(admin.StackedInline):
'''
Stacked Inline View for Attachment
'''
model = Attachment
class LetterAdmin(admin.ModelAdmin):
'''
Admin View for Letter
'''
inlines = [
AttachmentInline,
]
admin.site.register(Letter, LetterAdmin)
Rewrite DjangoAdmin in letters for decoratorsfrom django.contrib import admin
from .models import Attachment, Letter
class AttachmentInline(admin.StackedInline):
'''
Stacked Inline View for Attachment
'''
model = Attachment
@admin.register(Letter)
class LetterAdmin(admin.ModelAdmin):
'''
Admin View for Letter
'''
inlines = [
AttachmentInline,
]
| <commit_before>from django.contrib import admin
from .models import Attachment, Letter
class AttachmentInline(admin.StackedInline):
'''
Stacked Inline View for Attachment
'''
model = Attachment
class LetterAdmin(admin.ModelAdmin):
'''
Admin View for Letter
'''
inlines = [
AttachmentInline,
]
admin.site.register(Letter, LetterAdmin)
<commit_msg>Rewrite DjangoAdmin in letters for decorators<commit_after>from django.contrib import admin
from .models import Attachment, Letter
class AttachmentInline(admin.StackedInline):
'''
Stacked Inline View for Attachment
'''
model = Attachment
@admin.register(Letter)
class LetterAdmin(admin.ModelAdmin):
'''
Admin View for Letter
'''
inlines = [
AttachmentInline,
]
|
e36e6c4db61381ca9f29ce1dc5f645cb65d3ba21 | capstone/util/play.py | capstone/util/play.py | from __future__ import print_function
import random
from capstone.util import print_header
def play_match(game, players, verbose=True):
"""Plays a match between the given players"""
if verbose:
print(game)
while not game.is_over():
cur_player = players[game.cur_player()]
move = cur_player.choose_move(game.copy())
game.make_move(move)
if verbose:
print(game)
def play_series(game, players, n_matches=100):
"""
Plays a series of 'n_matches' of a 'game' between
the given 'players'.
"""
print_header('Series')
print('Game:', game.name)
print('Players:', players)
print('No. Matches: %d\n' % n_matches)
counters = {'W': 0, 'L': 0, 'D': 0}
for n_match in range(1, n_matches + 1):
print('Match %d/%d:' % (n_match, n_matches), end=' ')
new_game = game.copy()
play_match(new_game, players, verbose=False)
outcomes = new_game.outcomes()
counters[outcomes[0]] += 1
print(outcomes)
print('\nOutcomes:', counters)
| from __future__ import print_function
import random
from . import print_header
def play_match(game, players, verbose=True):
"""Plays a match between the given players"""
if verbose:
print(game)
while not game.is_over():
cur_player = players[game.cur_player()]
move = cur_player.choose_move(game.copy())
game.make_move(move)
if verbose:
print(game)
def play_series(game, players, n_matches=100):
"""
Plays a series of 'n_matches' of a 'game' between
the given 'players'.
"""
print_header('Series')
print('Game:', game.name)
print('Players:', players)
print('No. Matches: %d\n' % n_matches)
counters = {'W': 0, 'L': 0, 'D': 0}
for n_match in range(1, n_matches + 1):
print('Match %d/%d:' % (n_match, n_matches), end=' ')
new_game = game.copy()
play_match(new_game, players, verbose=False)
outcomes = new_game.outcomes()
counters[outcomes[0]] += 1
print(outcomes)
print('\nOutcomes:', counters)
| Change absolute import to relative | Change absolute import to relative
| Python | mit | davidrobles/mlnd-capstone-code | from __future__ import print_function
import random
from capstone.util import print_header
def play_match(game, players, verbose=True):
"""Plays a match between the given players"""
if verbose:
print(game)
while not game.is_over():
cur_player = players[game.cur_player()]
move = cur_player.choose_move(game.copy())
game.make_move(move)
if verbose:
print(game)
def play_series(game, players, n_matches=100):
"""
Plays a series of 'n_matches' of a 'game' between
the given 'players'.
"""
print_header('Series')
print('Game:', game.name)
print('Players:', players)
print('No. Matches: %d\n' % n_matches)
counters = {'W': 0, 'L': 0, 'D': 0}
for n_match in range(1, n_matches + 1):
print('Match %d/%d:' % (n_match, n_matches), end=' ')
new_game = game.copy()
play_match(new_game, players, verbose=False)
outcomes = new_game.outcomes()
counters[outcomes[0]] += 1
print(outcomes)
print('\nOutcomes:', counters)
Change absolute import to relative | from __future__ import print_function
import random
from . import print_header
def play_match(game, players, verbose=True):
"""Plays a match between the given players"""
if verbose:
print(game)
while not game.is_over():
cur_player = players[game.cur_player()]
move = cur_player.choose_move(game.copy())
game.make_move(move)
if verbose:
print(game)
def play_series(game, players, n_matches=100):
"""
Plays a series of 'n_matches' of a 'game' between
the given 'players'.
"""
print_header('Series')
print('Game:', game.name)
print('Players:', players)
print('No. Matches: %d\n' % n_matches)
counters = {'W': 0, 'L': 0, 'D': 0}
for n_match in range(1, n_matches + 1):
print('Match %d/%d:' % (n_match, n_matches), end=' ')
new_game = game.copy()
play_match(new_game, players, verbose=False)
outcomes = new_game.outcomes()
counters[outcomes[0]] += 1
print(outcomes)
print('\nOutcomes:', counters)
| <commit_before>from __future__ import print_function
import random
from capstone.util import print_header
def play_match(game, players, verbose=True):
"""Plays a match between the given players"""
if verbose:
print(game)
while not game.is_over():
cur_player = players[game.cur_player()]
move = cur_player.choose_move(game.copy())
game.make_move(move)
if verbose:
print(game)
def play_series(game, players, n_matches=100):
"""
Plays a series of 'n_matches' of a 'game' between
the given 'players'.
"""
print_header('Series')
print('Game:', game.name)
print('Players:', players)
print('No. Matches: %d\n' % n_matches)
counters = {'W': 0, 'L': 0, 'D': 0}
for n_match in range(1, n_matches + 1):
print('Match %d/%d:' % (n_match, n_matches), end=' ')
new_game = game.copy()
play_match(new_game, players, verbose=False)
outcomes = new_game.outcomes()
counters[outcomes[0]] += 1
print(outcomes)
print('\nOutcomes:', counters)
<commit_msg>Change absolute import to relative<commit_after> | from __future__ import print_function
import random
from . import print_header
def play_match(game, players, verbose=True):
"""Plays a match between the given players"""
if verbose:
print(game)
while not game.is_over():
cur_player = players[game.cur_player()]
move = cur_player.choose_move(game.copy())
game.make_move(move)
if verbose:
print(game)
def play_series(game, players, n_matches=100):
"""
Plays a series of 'n_matches' of a 'game' between
the given 'players'.
"""
print_header('Series')
print('Game:', game.name)
print('Players:', players)
print('No. Matches: %d\n' % n_matches)
counters = {'W': 0, 'L': 0, 'D': 0}
for n_match in range(1, n_matches + 1):
print('Match %d/%d:' % (n_match, n_matches), end=' ')
new_game = game.copy()
play_match(new_game, players, verbose=False)
outcomes = new_game.outcomes()
counters[outcomes[0]] += 1
print(outcomes)
print('\nOutcomes:', counters)
| from __future__ import print_function
import random
from capstone.util import print_header
def play_match(game, players, verbose=True):
"""Plays a match between the given players"""
if verbose:
print(game)
while not game.is_over():
cur_player = players[game.cur_player()]
move = cur_player.choose_move(game.copy())
game.make_move(move)
if verbose:
print(game)
def play_series(game, players, n_matches=100):
"""
Plays a series of 'n_matches' of a 'game' between
the given 'players'.
"""
print_header('Series')
print('Game:', game.name)
print('Players:', players)
print('No. Matches: %d\n' % n_matches)
counters = {'W': 0, 'L': 0, 'D': 0}
for n_match in range(1, n_matches + 1):
print('Match %d/%d:' % (n_match, n_matches), end=' ')
new_game = game.copy()
play_match(new_game, players, verbose=False)
outcomes = new_game.outcomes()
counters[outcomes[0]] += 1
print(outcomes)
print('\nOutcomes:', counters)
Change absolute import to relativefrom __future__ import print_function
import random
from . import print_header
def play_match(game, players, verbose=True):
"""Plays a match between the given players"""
if verbose:
print(game)
while not game.is_over():
cur_player = players[game.cur_player()]
move = cur_player.choose_move(game.copy())
game.make_move(move)
if verbose:
print(game)
def play_series(game, players, n_matches=100):
"""
Plays a series of 'n_matches' of a 'game' between
the given 'players'.
"""
print_header('Series')
print('Game:', game.name)
print('Players:', players)
print('No. Matches: %d\n' % n_matches)
counters = {'W': 0, 'L': 0, 'D': 0}
for n_match in range(1, n_matches + 1):
print('Match %d/%d:' % (n_match, n_matches), end=' ')
new_game = game.copy()
play_match(new_game, players, verbose=False)
outcomes = new_game.outcomes()
counters[outcomes[0]] += 1
print(outcomes)
print('\nOutcomes:', counters)
| <commit_before>from __future__ import print_function
import random
from capstone.util import print_header
def play_match(game, players, verbose=True):
"""Plays a match between the given players"""
if verbose:
print(game)
while not game.is_over():
cur_player = players[game.cur_player()]
move = cur_player.choose_move(game.copy())
game.make_move(move)
if verbose:
print(game)
def play_series(game, players, n_matches=100):
"""
Plays a series of 'n_matches' of a 'game' between
the given 'players'.
"""
print_header('Series')
print('Game:', game.name)
print('Players:', players)
print('No. Matches: %d\n' % n_matches)
counters = {'W': 0, 'L': 0, 'D': 0}
for n_match in range(1, n_matches + 1):
print('Match %d/%d:' % (n_match, n_matches), end=' ')
new_game = game.copy()
play_match(new_game, players, verbose=False)
outcomes = new_game.outcomes()
counters[outcomes[0]] += 1
print(outcomes)
print('\nOutcomes:', counters)
<commit_msg>Change absolute import to relative<commit_after>from __future__ import print_function
import random
from . import print_header
def play_match(game, players, verbose=True):
"""Plays a match between the given players"""
if verbose:
print(game)
while not game.is_over():
cur_player = players[game.cur_player()]
move = cur_player.choose_move(game.copy())
game.make_move(move)
if verbose:
print(game)
def play_series(game, players, n_matches=100):
"""
Plays a series of 'n_matches' of a 'game' between
the given 'players'.
"""
print_header('Series')
print('Game:', game.name)
print('Players:', players)
print('No. Matches: %d\n' % n_matches)
counters = {'W': 0, 'L': 0, 'D': 0}
for n_match in range(1, n_matches + 1):
print('Match %d/%d:' % (n_match, n_matches), end=' ')
new_game = game.copy()
play_match(new_game, players, verbose=False)
outcomes = new_game.outcomes()
counters[outcomes[0]] += 1
print(outcomes)
print('\nOutcomes:', counters)
|
5486503c3e9664c1683e5de9381b4d0d413182c3 | ipywidgets/widgets/widget_description.py | ipywidgets/widgets/widget_description.py | # Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
"""Contains the DOMWidget class"""
from traitlets import Unicode
from .widget import Widget, widget_serialization, register
from .trait_types import InstanceDict
from .widget_style import Style
from .widget_core import CoreWidget
from .domwidget import DOMWidget
@register
class DescriptionStyle(Style, CoreWidget, Widget):
"""Description style widget."""
_model_name = Unicode('DescriptionStyleModel').tag(sync=True)
description_width = Unicode(help="Width of the description to the side of the control.").tag(sync=True)
class DescriptionWidget(DOMWidget, CoreWidget):
"""Widget that has a description label to the side."""
_model_name = Unicode('DescriptionModel').tag(sync=True)
description = Unicode('', help="Description of the control.").tag(sync=True)
description_tooltip = Unicode(None, allow_none=True, help="Tooltip for the description.").tag(sync=True)
style = InstanceDict(DescriptionStyle, help="Styling customizations").tag(sync=True, **widget_serialization)
def _repr_keys(self):
for key in super(DescriptionWidget, self)._repr_keys():
# Exclude style if it had the default value
if key == 'style':
value = getattr(self, key)
if repr(value) == '%s()' % value.__class__.__name__:
continue
yield key
| # Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
"""Contains the DOMWidget class"""
from traitlets import Unicode
from .widget import Widget, widget_serialization, register
from .trait_types import InstanceDict
from .widget_style import Style
from .widget_core import CoreWidget
from .domwidget import DOMWidget
@register
class DescriptionStyle(Style, CoreWidget, Widget):
"""Description style widget."""
_model_name = Unicode('DescriptionStyleModel').tag(sync=True)
description_width = Unicode(help="Width of the description to the side of the control.").tag(sync=True)
class DescriptionWidget(DOMWidget, CoreWidget):
"""Widget that has a description label to the side."""
_model_name = Unicode('DescriptionModel').tag(sync=True)
description = Unicode('', help="Description of the control.").tag(sync=True)
description_tooltip = Unicode(None, allow_none=True, help="Tooltip for the description (defaults to description).").tag(sync=True)
style = InstanceDict(DescriptionStyle, help="Styling customizations").tag(sync=True, **widget_serialization)
def _repr_keys(self):
for key in super(DescriptionWidget, self)._repr_keys():
# Exclude style if it had the default value
if key == 'style':
value = getattr(self, key)
if repr(value) == '%s()' % value.__class__.__name__:
continue
yield key
| Tweak the help text for the new tooltip attribute. | Tweak the help text for the new tooltip attribute. | Python | bsd-3-clause | ipython/ipywidgets,jupyter-widgets/ipywidgets,SylvainCorlay/ipywidgets,ipython/ipywidgets,SylvainCorlay/ipywidgets,ipython/ipywidgets,jupyter-widgets/ipywidgets,SylvainCorlay/ipywidgets,SylvainCorlay/ipywidgets,ipython/ipywidgets,jupyter-widgets/ipywidgets,jupyter-widgets/ipywidgets,ipython/ipywidgets | # Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
"""Contains the DOMWidget class"""
from traitlets import Unicode
from .widget import Widget, widget_serialization, register
from .trait_types import InstanceDict
from .widget_style import Style
from .widget_core import CoreWidget
from .domwidget import DOMWidget
@register
class DescriptionStyle(Style, CoreWidget, Widget):
"""Description style widget."""
_model_name = Unicode('DescriptionStyleModel').tag(sync=True)
description_width = Unicode(help="Width of the description to the side of the control.").tag(sync=True)
class DescriptionWidget(DOMWidget, CoreWidget):
"""Widget that has a description label to the side."""
_model_name = Unicode('DescriptionModel').tag(sync=True)
description = Unicode('', help="Description of the control.").tag(sync=True)
description_tooltip = Unicode(None, allow_none=True, help="Tooltip for the description.").tag(sync=True)
style = InstanceDict(DescriptionStyle, help="Styling customizations").tag(sync=True, **widget_serialization)
def _repr_keys(self):
for key in super(DescriptionWidget, self)._repr_keys():
# Exclude style if it had the default value
if key == 'style':
value = getattr(self, key)
if repr(value) == '%s()' % value.__class__.__name__:
continue
yield key
Tweak the help text for the new tooltip attribute. | # Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
"""Contains the DOMWidget class"""
from traitlets import Unicode
from .widget import Widget, widget_serialization, register
from .trait_types import InstanceDict
from .widget_style import Style
from .widget_core import CoreWidget
from .domwidget import DOMWidget
@register
class DescriptionStyle(Style, CoreWidget, Widget):
"""Description style widget."""
_model_name = Unicode('DescriptionStyleModel').tag(sync=True)
description_width = Unicode(help="Width of the description to the side of the control.").tag(sync=True)
class DescriptionWidget(DOMWidget, CoreWidget):
"""Widget that has a description label to the side."""
_model_name = Unicode('DescriptionModel').tag(sync=True)
description = Unicode('', help="Description of the control.").tag(sync=True)
description_tooltip = Unicode(None, allow_none=True, help="Tooltip for the description (defaults to description).").tag(sync=True)
style = InstanceDict(DescriptionStyle, help="Styling customizations").tag(sync=True, **widget_serialization)
def _repr_keys(self):
for key in super(DescriptionWidget, self)._repr_keys():
# Exclude style if it had the default value
if key == 'style':
value = getattr(self, key)
if repr(value) == '%s()' % value.__class__.__name__:
continue
yield key
| <commit_before># Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
"""Contains the DOMWidget class"""
from traitlets import Unicode
from .widget import Widget, widget_serialization, register
from .trait_types import InstanceDict
from .widget_style import Style
from .widget_core import CoreWidget
from .domwidget import DOMWidget
@register
class DescriptionStyle(Style, CoreWidget, Widget):
"""Description style widget."""
_model_name = Unicode('DescriptionStyleModel').tag(sync=True)
description_width = Unicode(help="Width of the description to the side of the control.").tag(sync=True)
class DescriptionWidget(DOMWidget, CoreWidget):
"""Widget that has a description label to the side."""
_model_name = Unicode('DescriptionModel').tag(sync=True)
description = Unicode('', help="Description of the control.").tag(sync=True)
description_tooltip = Unicode(None, allow_none=True, help="Tooltip for the description.").tag(sync=True)
style = InstanceDict(DescriptionStyle, help="Styling customizations").tag(sync=True, **widget_serialization)
def _repr_keys(self):
for key in super(DescriptionWidget, self)._repr_keys():
# Exclude style if it had the default value
if key == 'style':
value = getattr(self, key)
if repr(value) == '%s()' % value.__class__.__name__:
continue
yield key
<commit_msg>Tweak the help text for the new tooltip attribute.<commit_after> | # Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
"""Contains the DOMWidget class"""
from traitlets import Unicode
from .widget import Widget, widget_serialization, register
from .trait_types import InstanceDict
from .widget_style import Style
from .widget_core import CoreWidget
from .domwidget import DOMWidget
@register
class DescriptionStyle(Style, CoreWidget, Widget):
"""Description style widget."""
_model_name = Unicode('DescriptionStyleModel').tag(sync=True)
description_width = Unicode(help="Width of the description to the side of the control.").tag(sync=True)
class DescriptionWidget(DOMWidget, CoreWidget):
"""Widget that has a description label to the side."""
_model_name = Unicode('DescriptionModel').tag(sync=True)
description = Unicode('', help="Description of the control.").tag(sync=True)
description_tooltip = Unicode(None, allow_none=True, help="Tooltip for the description (defaults to description).").tag(sync=True)
style = InstanceDict(DescriptionStyle, help="Styling customizations").tag(sync=True, **widget_serialization)
def _repr_keys(self):
for key in super(DescriptionWidget, self)._repr_keys():
# Exclude style if it had the default value
if key == 'style':
value = getattr(self, key)
if repr(value) == '%s()' % value.__class__.__name__:
continue
yield key
| # Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
"""Contains the DOMWidget class"""
from traitlets import Unicode
from .widget import Widget, widget_serialization, register
from .trait_types import InstanceDict
from .widget_style import Style
from .widget_core import CoreWidget
from .domwidget import DOMWidget
@register
class DescriptionStyle(Style, CoreWidget, Widget):
"""Description style widget."""
_model_name = Unicode('DescriptionStyleModel').tag(sync=True)
description_width = Unicode(help="Width of the description to the side of the control.").tag(sync=True)
class DescriptionWidget(DOMWidget, CoreWidget):
"""Widget that has a description label to the side."""
_model_name = Unicode('DescriptionModel').tag(sync=True)
description = Unicode('', help="Description of the control.").tag(sync=True)
description_tooltip = Unicode(None, allow_none=True, help="Tooltip for the description.").tag(sync=True)
style = InstanceDict(DescriptionStyle, help="Styling customizations").tag(sync=True, **widget_serialization)
def _repr_keys(self):
for key in super(DescriptionWidget, self)._repr_keys():
# Exclude style if it had the default value
if key == 'style':
value = getattr(self, key)
if repr(value) == '%s()' % value.__class__.__name__:
continue
yield key
Tweak the help text for the new tooltip attribute.# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
"""Contains the DOMWidget class"""
from traitlets import Unicode
from .widget import Widget, widget_serialization, register
from .trait_types import InstanceDict
from .widget_style import Style
from .widget_core import CoreWidget
from .domwidget import DOMWidget
@register
class DescriptionStyle(Style, CoreWidget, Widget):
"""Description style widget."""
_model_name = Unicode('DescriptionStyleModel').tag(sync=True)
description_width = Unicode(help="Width of the description to the side of the control.").tag(sync=True)
class DescriptionWidget(DOMWidget, CoreWidget):
"""Widget that has a description label to the side."""
_model_name = Unicode('DescriptionModel').tag(sync=True)
description = Unicode('', help="Description of the control.").tag(sync=True)
description_tooltip = Unicode(None, allow_none=True, help="Tooltip for the description (defaults to description).").tag(sync=True)
style = InstanceDict(DescriptionStyle, help="Styling customizations").tag(sync=True, **widget_serialization)
def _repr_keys(self):
for key in super(DescriptionWidget, self)._repr_keys():
# Exclude style if it had the default value
if key == 'style':
value = getattr(self, key)
if repr(value) == '%s()' % value.__class__.__name__:
continue
yield key
| <commit_before># Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
"""Contains the DOMWidget class"""
from traitlets import Unicode
from .widget import Widget, widget_serialization, register
from .trait_types import InstanceDict
from .widget_style import Style
from .widget_core import CoreWidget
from .domwidget import DOMWidget
@register
class DescriptionStyle(Style, CoreWidget, Widget):
"""Description style widget."""
_model_name = Unicode('DescriptionStyleModel').tag(sync=True)
description_width = Unicode(help="Width of the description to the side of the control.").tag(sync=True)
class DescriptionWidget(DOMWidget, CoreWidget):
"""Widget that has a description label to the side."""
_model_name = Unicode('DescriptionModel').tag(sync=True)
description = Unicode('', help="Description of the control.").tag(sync=True)
description_tooltip = Unicode(None, allow_none=True, help="Tooltip for the description.").tag(sync=True)
style = InstanceDict(DescriptionStyle, help="Styling customizations").tag(sync=True, **widget_serialization)
def _repr_keys(self):
for key in super(DescriptionWidget, self)._repr_keys():
# Exclude style if it had the default value
if key == 'style':
value = getattr(self, key)
if repr(value) == '%s()' % value.__class__.__name__:
continue
yield key
<commit_msg>Tweak the help text for the new tooltip attribute.<commit_after># Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
"""Contains the DOMWidget class"""
from traitlets import Unicode
from .widget import Widget, widget_serialization, register
from .trait_types import InstanceDict
from .widget_style import Style
from .widget_core import CoreWidget
from .domwidget import DOMWidget
@register
class DescriptionStyle(Style, CoreWidget, Widget):
"""Description style widget."""
_model_name = Unicode('DescriptionStyleModel').tag(sync=True)
description_width = Unicode(help="Width of the description to the side of the control.").tag(sync=True)
class DescriptionWidget(DOMWidget, CoreWidget):
"""Widget that has a description label to the side."""
_model_name = Unicode('DescriptionModel').tag(sync=True)
description = Unicode('', help="Description of the control.").tag(sync=True)
description_tooltip = Unicode(None, allow_none=True, help="Tooltip for the description (defaults to description).").tag(sync=True)
style = InstanceDict(DescriptionStyle, help="Styling customizations").tag(sync=True, **widget_serialization)
def _repr_keys(self):
for key in super(DescriptionWidget, self)._repr_keys():
# Exclude style if it had the default value
if key == 'style':
value = getattr(self, key)
if repr(value) == '%s()' % value.__class__.__name__:
continue
yield key
|
0b41090984647de0e5b3228e871c26c57a005938 | omp/__init__.py | omp/__init__.py | '''
OpenMP wrapper using a (user provided) libgomp dynamically loaded library
'''
import sys
import glob
import ctypes
class omp(object):
LD_LIBRARY_PATHS = [
"/usr/lib/x86_64-linux-gnu/",
# MacPorts install gcc in a "non standard" path on OSX
] + glob.glob("/opt/local/lib/gcc*/")
def __init__(self):
# Paths are "non-standard" place to lookup
paths = omp.LD_LIBRARY_PATHS
# Try to load find libgomp shared library using loader search dirs
libgomp_path = ctypes.util.find_library("libgomp")
# Try to use custom paths if lookup failed
for path in paths:
if libgomp_path:
break
libgomp_path = ctypes.util.find_library(path+"libgomp")
if not libgomp_path:
raise EnvironmentError("I can't find a shared library for libgomp,"
" you may need to install it or adjust the "
"LD_LIBRARY_PATH environment variable.")
else:
# Load the library (shouldn't fail with an absolute path right?)
self.libomp = ctypes.CDLL(libgomp_path)
def __getattribute__(self, name):
if name == 'libomp':
return object.__getattribute__(self, 'libomp')
else:
return getattr(self.libomp, 'omp_' + name)
# see http://mail.python.org/pipermail/python-ideas/2012-May/014969.html
sys.modules[__name__] = omp()
| '''
OpenMP wrapper using a (user provided) libgomp dynamically loaded library
'''
import sys
import glob
import ctypes
class omp(object):
LD_LIBRARY_PATHS = [
"/usr/lib/x86_64-linux-gnu/",
# MacPorts install gcc in a "non standard" path on OSX
] + glob.glob("/opt/local/lib/gcc*/")
def __init__(self):
# Paths are "non-standard" place to lookup
paths = omp.LD_LIBRARY_PATHS
# Try to load find libgomp shared library using loader search dirs
libgomp_path = ctypes.util.find_library("gomp")
# Try to use custom paths if lookup failed
for path in paths:
if libgomp_path:
break
libgomp_path = ctypes.util.find_library(path+"gomp")
if not libgomp_path:
raise EnvironmentError("I can't find a shared library for libgomp,"
" you may need to install it or adjust the "
"LD_LIBRARY_PATH environment variable.")
else:
# Load the library (shouldn't fail with an absolute path right?)
self.libomp = ctypes.CDLL(libgomp_path)
def __getattribute__(self, name):
if name == 'libomp':
return object.__getattribute__(self, 'libomp')
else:
return getattr(self.libomp, 'omp_' + name)
# see http://mail.python.org/pipermail/python-ideas/2012-May/014969.html
sys.modules[__name__] = omp()
| Fix gomp library dynamic loading issues | Fix gomp library dynamic loading issues
* bug introduce from commit : 4a4676258bfd47a7fbefc51644eb58ffc60ab6ad
| Python | bsd-3-clause | pbrunet/pythran,artas360/pythran,pbrunet/pythran,hainm/pythran,pbrunet/pythran,hainm/pythran,pombredanne/pythran,artas360/pythran,pombredanne/pythran,artas360/pythran,hainm/pythran,serge-sans-paille/pythran,pombredanne/pythran,serge-sans-paille/pythran | '''
OpenMP wrapper using a (user provided) libgomp dynamically loaded library
'''
import sys
import glob
import ctypes
class omp(object):
LD_LIBRARY_PATHS = [
"/usr/lib/x86_64-linux-gnu/",
# MacPorts install gcc in a "non standard" path on OSX
] + glob.glob("/opt/local/lib/gcc*/")
def __init__(self):
# Paths are "non-standard" place to lookup
paths = omp.LD_LIBRARY_PATHS
# Try to load find libgomp shared library using loader search dirs
libgomp_path = ctypes.util.find_library("libgomp")
# Try to use custom paths if lookup failed
for path in paths:
if libgomp_path:
break
libgomp_path = ctypes.util.find_library(path+"libgomp")
if not libgomp_path:
raise EnvironmentError("I can't find a shared library for libgomp,"
" you may need to install it or adjust the "
"LD_LIBRARY_PATH environment variable.")
else:
# Load the library (shouldn't fail with an absolute path right?)
self.libomp = ctypes.CDLL(libgomp_path)
def __getattribute__(self, name):
if name == 'libomp':
return object.__getattribute__(self, 'libomp')
else:
return getattr(self.libomp, 'omp_' + name)
# see http://mail.python.org/pipermail/python-ideas/2012-May/014969.html
sys.modules[__name__] = omp()
Fix gomp library dynamic loading issues
* bug introduce from commit : 4a4676258bfd47a7fbefc51644eb58ffc60ab6ad | '''
OpenMP wrapper using a (user provided) libgomp dynamically loaded library
'''
import sys
import glob
import ctypes
class omp(object):
LD_LIBRARY_PATHS = [
"/usr/lib/x86_64-linux-gnu/",
# MacPorts install gcc in a "non standard" path on OSX
] + glob.glob("/opt/local/lib/gcc*/")
def __init__(self):
# Paths are "non-standard" place to lookup
paths = omp.LD_LIBRARY_PATHS
# Try to load find libgomp shared library using loader search dirs
libgomp_path = ctypes.util.find_library("gomp")
# Try to use custom paths if lookup failed
for path in paths:
if libgomp_path:
break
libgomp_path = ctypes.util.find_library(path+"gomp")
if not libgomp_path:
raise EnvironmentError("I can't find a shared library for libgomp,"
" you may need to install it or adjust the "
"LD_LIBRARY_PATH environment variable.")
else:
# Load the library (shouldn't fail with an absolute path right?)
self.libomp = ctypes.CDLL(libgomp_path)
def __getattribute__(self, name):
if name == 'libomp':
return object.__getattribute__(self, 'libomp')
else:
return getattr(self.libomp, 'omp_' + name)
# see http://mail.python.org/pipermail/python-ideas/2012-May/014969.html
sys.modules[__name__] = omp()
| <commit_before>'''
OpenMP wrapper using a (user provided) libgomp dynamically loaded library
'''
import sys
import glob
import ctypes
class omp(object):
LD_LIBRARY_PATHS = [
"/usr/lib/x86_64-linux-gnu/",
# MacPorts install gcc in a "non standard" path on OSX
] + glob.glob("/opt/local/lib/gcc*/")
def __init__(self):
# Paths are "non-standard" place to lookup
paths = omp.LD_LIBRARY_PATHS
# Try to load find libgomp shared library using loader search dirs
libgomp_path = ctypes.util.find_library("libgomp")
# Try to use custom paths if lookup failed
for path in paths:
if libgomp_path:
break
libgomp_path = ctypes.util.find_library(path+"libgomp")
if not libgomp_path:
raise EnvironmentError("I can't find a shared library for libgomp,"
" you may need to install it or adjust the "
"LD_LIBRARY_PATH environment variable.")
else:
# Load the library (shouldn't fail with an absolute path right?)
self.libomp = ctypes.CDLL(libgomp_path)
def __getattribute__(self, name):
if name == 'libomp':
return object.__getattribute__(self, 'libomp')
else:
return getattr(self.libomp, 'omp_' + name)
# see http://mail.python.org/pipermail/python-ideas/2012-May/014969.html
sys.modules[__name__] = omp()
<commit_msg>Fix gomp library dynamic loading issues
* bug introduce from commit : 4a4676258bfd47a7fbefc51644eb58ffc60ab6ad<commit_after> | '''
OpenMP wrapper using a (user provided) libgomp dynamically loaded library
'''
import sys
import glob
import ctypes
class omp(object):
LD_LIBRARY_PATHS = [
"/usr/lib/x86_64-linux-gnu/",
# MacPorts install gcc in a "non standard" path on OSX
] + glob.glob("/opt/local/lib/gcc*/")
def __init__(self):
# Paths are "non-standard" place to lookup
paths = omp.LD_LIBRARY_PATHS
# Try to load find libgomp shared library using loader search dirs
libgomp_path = ctypes.util.find_library("gomp")
# Try to use custom paths if lookup failed
for path in paths:
if libgomp_path:
break
libgomp_path = ctypes.util.find_library(path+"gomp")
if not libgomp_path:
raise EnvironmentError("I can't find a shared library for libgomp,"
" you may need to install it or adjust the "
"LD_LIBRARY_PATH environment variable.")
else:
# Load the library (shouldn't fail with an absolute path right?)
self.libomp = ctypes.CDLL(libgomp_path)
def __getattribute__(self, name):
if name == 'libomp':
return object.__getattribute__(self, 'libomp')
else:
return getattr(self.libomp, 'omp_' + name)
# see http://mail.python.org/pipermail/python-ideas/2012-May/014969.html
sys.modules[__name__] = omp()
| '''
OpenMP wrapper using a (user provided) libgomp dynamically loaded library
'''
import sys
import glob
import ctypes
class omp(object):
LD_LIBRARY_PATHS = [
"/usr/lib/x86_64-linux-gnu/",
# MacPorts install gcc in a "non standard" path on OSX
] + glob.glob("/opt/local/lib/gcc*/")
def __init__(self):
# Paths are "non-standard" place to lookup
paths = omp.LD_LIBRARY_PATHS
# Try to load find libgomp shared library using loader search dirs
libgomp_path = ctypes.util.find_library("libgomp")
# Try to use custom paths if lookup failed
for path in paths:
if libgomp_path:
break
libgomp_path = ctypes.util.find_library(path+"libgomp")
if not libgomp_path:
raise EnvironmentError("I can't find a shared library for libgomp,"
" you may need to install it or adjust the "
"LD_LIBRARY_PATH environment variable.")
else:
# Load the library (shouldn't fail with an absolute path right?)
self.libomp = ctypes.CDLL(libgomp_path)
def __getattribute__(self, name):
if name == 'libomp':
return object.__getattribute__(self, 'libomp')
else:
return getattr(self.libomp, 'omp_' + name)
# see http://mail.python.org/pipermail/python-ideas/2012-May/014969.html
sys.modules[__name__] = omp()
Fix gomp library dynamic loading issues
* bug introduce from commit : 4a4676258bfd47a7fbefc51644eb58ffc60ab6ad'''
OpenMP wrapper using a (user provided) libgomp dynamically loaded library
'''
import sys
import glob
import ctypes
class omp(object):
LD_LIBRARY_PATHS = [
"/usr/lib/x86_64-linux-gnu/",
# MacPorts install gcc in a "non standard" path on OSX
] + glob.glob("/opt/local/lib/gcc*/")
def __init__(self):
# Paths are "non-standard" place to lookup
paths = omp.LD_LIBRARY_PATHS
# Try to load find libgomp shared library using loader search dirs
libgomp_path = ctypes.util.find_library("gomp")
# Try to use custom paths if lookup failed
for path in paths:
if libgomp_path:
break
libgomp_path = ctypes.util.find_library(path+"gomp")
if not libgomp_path:
raise EnvironmentError("I can't find a shared library for libgomp,"
" you may need to install it or adjust the "
"LD_LIBRARY_PATH environment variable.")
else:
# Load the library (shouldn't fail with an absolute path right?)
self.libomp = ctypes.CDLL(libgomp_path)
def __getattribute__(self, name):
if name == 'libomp':
return object.__getattribute__(self, 'libomp')
else:
return getattr(self.libomp, 'omp_' + name)
# see http://mail.python.org/pipermail/python-ideas/2012-May/014969.html
sys.modules[__name__] = omp()
| <commit_before>'''
OpenMP wrapper using a (user provided) libgomp dynamically loaded library
'''
import sys
import glob
import ctypes
class omp(object):
LD_LIBRARY_PATHS = [
"/usr/lib/x86_64-linux-gnu/",
# MacPorts install gcc in a "non standard" path on OSX
] + glob.glob("/opt/local/lib/gcc*/")
def __init__(self):
# Paths are "non-standard" place to lookup
paths = omp.LD_LIBRARY_PATHS
# Try to load find libgomp shared library using loader search dirs
libgomp_path = ctypes.util.find_library("libgomp")
# Try to use custom paths if lookup failed
for path in paths:
if libgomp_path:
break
libgomp_path = ctypes.util.find_library(path+"libgomp")
if not libgomp_path:
raise EnvironmentError("I can't find a shared library for libgomp,"
" you may need to install it or adjust the "
"LD_LIBRARY_PATH environment variable.")
else:
# Load the library (shouldn't fail with an absolute path right?)
self.libomp = ctypes.CDLL(libgomp_path)
def __getattribute__(self, name):
if name == 'libomp':
return object.__getattribute__(self, 'libomp')
else:
return getattr(self.libomp, 'omp_' + name)
# see http://mail.python.org/pipermail/python-ideas/2012-May/014969.html
sys.modules[__name__] = omp()
<commit_msg>Fix gomp library dynamic loading issues
* bug introduce from commit : 4a4676258bfd47a7fbefc51644eb58ffc60ab6ad<commit_after>'''
OpenMP wrapper using a (user provided) libgomp dynamically loaded library
'''
import sys
import glob
import ctypes
class omp(object):
LD_LIBRARY_PATHS = [
"/usr/lib/x86_64-linux-gnu/",
# MacPorts install gcc in a "non standard" path on OSX
] + glob.glob("/opt/local/lib/gcc*/")
def __init__(self):
# Paths are "non-standard" place to lookup
paths = omp.LD_LIBRARY_PATHS
# Try to load find libgomp shared library using loader search dirs
libgomp_path = ctypes.util.find_library("gomp")
# Try to use custom paths if lookup failed
for path in paths:
if libgomp_path:
break
libgomp_path = ctypes.util.find_library(path+"gomp")
if not libgomp_path:
raise EnvironmentError("I can't find a shared library for libgomp,"
" you may need to install it or adjust the "
"LD_LIBRARY_PATH environment variable.")
else:
# Load the library (shouldn't fail with an absolute path right?)
self.libomp = ctypes.CDLL(libgomp_path)
def __getattribute__(self, name):
if name == 'libomp':
return object.__getattribute__(self, 'libomp')
else:
return getattr(self.libomp, 'omp_' + name)
# see http://mail.python.org/pipermail/python-ideas/2012-May/014969.html
sys.modules[__name__] = omp()
|
3fcb69fbb623184a30d1d5ecb41e4c3c33128f1a | src/lyra/tests/dictionary/dict_descr_example.py | src/lyra/tests/dictionary/dict_descr_example.py |
important: Set[str] = {"Albert Einstein" , "Alan Turing"}
texts: Dict[str, str] = input() # {"<author >" : "<t e x t >"}
freqdict: Dict[str, int] = {} # defaultdict(int) err: int recognized as varId #initialized to 0
a: str = "" #necessary?
b: str = ""
for a, b in texts.items():
if a in important: #texts of important authors weighted twice
weight: int = 2
else:
weight: int = 1
words: List[str] = a.split() #Bug A: Should be `b' (values)
for word in words: #and Bug B: Wrong indentation
word: str = word.lower()
freqdict[word]: int = freqdict[word] + weight
print(freqdict) #outputs <word>:<count>, ... |
important: Set[str] = {"Albert Einstein" , "Alan Turing"}
texts: Dict[str, str] = input() # {"<author >" : "<t e x t >"}
freqdict: Dict[str, int] = {} # defaultdict(int) err: int recognized as varId #initialized to 0
a: str = "" #necessary?
b: str = ""
for a, b in texts.items():
if a in important: #texts of important authors weighted twice
weight: int = 2
else:
weight: int = 1
words: List[str] = a.split() #Bug A: Should be `b' (values)
word: str = ""
for word in words: #and Bug B: Wrong indentation
word: str = word.lower()
freqdict[word]: int = freqdict[word] + weight
print(freqdict) #outputs <word>:<count>, ... | Fix to typing of loop variables | Fix to typing of loop variables
| Python | mpl-2.0 | caterinaurban/Lyra |
important: Set[str] = {"Albert Einstein" , "Alan Turing"}
texts: Dict[str, str] = input() # {"<author >" : "<t e x t >"}
freqdict: Dict[str, int] = {} # defaultdict(int) err: int recognized as varId #initialized to 0
a: str = "" #necessary?
b: str = ""
for a, b in texts.items():
if a in important: #texts of important authors weighted twice
weight: int = 2
else:
weight: int = 1
words: List[str] = a.split() #Bug A: Should be `b' (values)
for word in words: #and Bug B: Wrong indentation
word: str = word.lower()
freqdict[word]: int = freqdict[word] + weight
print(freqdict) #outputs <word>:<count>, ...Fix to typing of loop variables |
important: Set[str] = {"Albert Einstein" , "Alan Turing"}
texts: Dict[str, str] = input() # {"<author >" : "<t e x t >"}
freqdict: Dict[str, int] = {} # defaultdict(int) err: int recognized as varId #initialized to 0
a: str = "" #necessary?
b: str = ""
for a, b in texts.items():
if a in important: #texts of important authors weighted twice
weight: int = 2
else:
weight: int = 1
words: List[str] = a.split() #Bug A: Should be `b' (values)
word: str = ""
for word in words: #and Bug B: Wrong indentation
word: str = word.lower()
freqdict[word]: int = freqdict[word] + weight
print(freqdict) #outputs <word>:<count>, ... | <commit_before>
important: Set[str] = {"Albert Einstein" , "Alan Turing"}
texts: Dict[str, str] = input() # {"<author >" : "<t e x t >"}
freqdict: Dict[str, int] = {} # defaultdict(int) err: int recognized as varId #initialized to 0
a: str = "" #necessary?
b: str = ""
for a, b in texts.items():
if a in important: #texts of important authors weighted twice
weight: int = 2
else:
weight: int = 1
words: List[str] = a.split() #Bug A: Should be `b' (values)
for word in words: #and Bug B: Wrong indentation
word: str = word.lower()
freqdict[word]: int = freqdict[word] + weight
print(freqdict) #outputs <word>:<count>, ...<commit_msg>Fix to typing of loop variables<commit_after> |
important: Set[str] = {"Albert Einstein" , "Alan Turing"}
texts: Dict[str, str] = input() # {"<author >" : "<t e x t >"}
freqdict: Dict[str, int] = {} # defaultdict(int) err: int recognized as varId #initialized to 0
a: str = "" #necessary?
b: str = ""
for a, b in texts.items():
if a in important: #texts of important authors weighted twice
weight: int = 2
else:
weight: int = 1
words: List[str] = a.split() #Bug A: Should be `b' (values)
word: str = ""
for word in words: #and Bug B: Wrong indentation
word: str = word.lower()
freqdict[word]: int = freqdict[word] + weight
print(freqdict) #outputs <word>:<count>, ... |
important: Set[str] = {"Albert Einstein" , "Alan Turing"}
texts: Dict[str, str] = input() # {"<author >" : "<t e x t >"}
freqdict: Dict[str, int] = {} # defaultdict(int) err: int recognized as varId #initialized to 0
a: str = "" #necessary?
b: str = ""
for a, b in texts.items():
if a in important: #texts of important authors weighted twice
weight: int = 2
else:
weight: int = 1
words: List[str] = a.split() #Bug A: Should be `b' (values)
for word in words: #and Bug B: Wrong indentation
word: str = word.lower()
freqdict[word]: int = freqdict[word] + weight
print(freqdict) #outputs <word>:<count>, ...Fix to typing of loop variables
important: Set[str] = {"Albert Einstein" , "Alan Turing"}
texts: Dict[str, str] = input() # {"<author >" : "<t e x t >"}
freqdict: Dict[str, int] = {} # defaultdict(int) err: int recognized as varId #initialized to 0
a: str = "" #necessary?
b: str = ""
for a, b in texts.items():
if a in important: #texts of important authors weighted twice
weight: int = 2
else:
weight: int = 1
words: List[str] = a.split() #Bug A: Should be `b' (values)
word: str = ""
for word in words: #and Bug B: Wrong indentation
word: str = word.lower()
freqdict[word]: int = freqdict[word] + weight
print(freqdict) #outputs <word>:<count>, ... | <commit_before>
important: Set[str] = {"Albert Einstein" , "Alan Turing"}
texts: Dict[str, str] = input() # {"<author >" : "<t e x t >"}
freqdict: Dict[str, int] = {} # defaultdict(int) err: int recognized as varId #initialized to 0
a: str = "" #necessary?
b: str = ""
for a, b in texts.items():
if a in important: #texts of important authors weighted twice
weight: int = 2
else:
weight: int = 1
words: List[str] = a.split() #Bug A: Should be `b' (values)
for word in words: #and Bug B: Wrong indentation
word: str = word.lower()
freqdict[word]: int = freqdict[word] + weight
print(freqdict) #outputs <word>:<count>, ...<commit_msg>Fix to typing of loop variables<commit_after>
important: Set[str] = {"Albert Einstein" , "Alan Turing"}
texts: Dict[str, str] = input() # {"<author >" : "<t e x t >"}
freqdict: Dict[str, int] = {} # defaultdict(int) err: int recognized as varId #initialized to 0
a: str = "" #necessary?
b: str = ""
for a, b in texts.items():
if a in important: #texts of important authors weighted twice
weight: int = 2
else:
weight: int = 1
words: List[str] = a.split() #Bug A: Should be `b' (values)
word: str = ""
for word in words: #and Bug B: Wrong indentation
word: str = word.lower()
freqdict[word]: int = freqdict[word] + weight
print(freqdict) #outputs <word>:<count>, ... |
e99a0bdde697a0508bc17a8dd66943cdf97bdc3d | Main_program/src/main_program.py | Main_program/src/main_program.py | # To change this license header, choose License Headers in Project Properties.
# To change this template file, choose Tools | Templates
# and open the template in the editor.
if __name__ == "__main__":
print "Hello World"
| # To change this license header, choose License Headers in Project Properties.
# To change this template file, choose Tools | Templates
# and open the template in the editor.
print("Hello World")
print("Hessel is een home") | Test commit push and pull | Test commit push and pull
| Python | apache-2.0 | HesselTjeerdsma/Cyber-Physical-Pacman-Game,HesselTjeerdsma/Cyber-Physical-Pacman-Game,HesselTjeerdsma/Cyber-Physical-Pacman-Game,HesselTjeerdsma/Cyber-Physical-Pacman-Game,HesselTjeerdsma/Cyber-Physical-Pacman-Game,HesselTjeerdsma/Cyber-Physical-Pacman-Game | # To change this license header, choose License Headers in Project Properties.
# To change this template file, choose Tools | Templates
# and open the template in the editor.
if __name__ == "__main__":
print "Hello World"
Test commit push and pull | # To change this license header, choose License Headers in Project Properties.
# To change this template file, choose Tools | Templates
# and open the template in the editor.
print("Hello World")
print("Hessel is een home") | <commit_before># To change this license header, choose License Headers in Project Properties.
# To change this template file, choose Tools | Templates
# and open the template in the editor.
if __name__ == "__main__":
print "Hello World"
<commit_msg>Test commit push and pull<commit_after> | # To change this license header, choose License Headers in Project Properties.
# To change this template file, choose Tools | Templates
# and open the template in the editor.
print("Hello World")
print("Hessel is een home") | # To change this license header, choose License Headers in Project Properties.
# To change this template file, choose Tools | Templates
# and open the template in the editor.
if __name__ == "__main__":
print "Hello World"
Test commit push and pull# To change this license header, choose License Headers in Project Properties.
# To change this template file, choose Tools | Templates
# and open the template in the editor.
print("Hello World")
print("Hessel is een home") | <commit_before># To change this license header, choose License Headers in Project Properties.
# To change this template file, choose Tools | Templates
# and open the template in the editor.
if __name__ == "__main__":
print "Hello World"
<commit_msg>Test commit push and pull<commit_after># To change this license header, choose License Headers in Project Properties.
# To change this template file, choose Tools | Templates
# and open the template in the editor.
print("Hello World")
print("Hessel is een home") |
70cb8045b00445089db6aaa710a0899e1f4cbab1 | porick/settings.py | porick/settings.py | SITE_NAME = 'Porick'
WELCOME_TEXT = '''Porick is yet another IRC Quotes web application, designed to replace <i>Chirpy!</i>'''
HOMEPAGE_BUTTON_TEXT = 'Start browsing'
QUOTES_PER_PAGE = 10
TABLES = {
'accounts': 'chirpy_accounts',
'event_metadata': 'chirpy_event_metadata',
'events': 'chirpy_events',
'news': 'chirpy_news',
'quote_to_tag': 'chirpy_quote_tag',
'quotes': 'chirpy_quotes',
'sessions': 'chirpy_sessions',
'tags': 'chirpy_tags',
'vars': 'chirpy_vars'
}
| SITE_NAME = 'Porick.'
WELCOME_TEXT = '''Porick is yet another IRC Quotes web application, designed to replace <i>Chirpy!</i>'''
HOMEPAGE_BUTTON_TEXT = 'Start browsing'
QUOTES_PER_PAGE = 10
TABLES = {
'accounts': 'chirpy_accounts',
'event_metadata': 'chirpy_event_metadata',
'events': 'chirpy_events',
'news': 'chirpy_news',
'quote_to_tag': 'chirpy_quote_tag',
'quotes': 'chirpy_quotes',
'sessions': 'chirpy_sessions',
'tags': 'chirpy_tags',
'vars': 'chirpy_vars'
}
| Add hipster full-stop to the front-page | Add hipster full-stop to the front-page
| Python | apache-2.0 | kopf/porick,kopf/porick,kopf/porick | SITE_NAME = 'Porick'
WELCOME_TEXT = '''Porick is yet another IRC Quotes web application, designed to replace <i>Chirpy!</i>'''
HOMEPAGE_BUTTON_TEXT = 'Start browsing'
QUOTES_PER_PAGE = 10
TABLES = {
'accounts': 'chirpy_accounts',
'event_metadata': 'chirpy_event_metadata',
'events': 'chirpy_events',
'news': 'chirpy_news',
'quote_to_tag': 'chirpy_quote_tag',
'quotes': 'chirpy_quotes',
'sessions': 'chirpy_sessions',
'tags': 'chirpy_tags',
'vars': 'chirpy_vars'
}
Add hipster full-stop to the front-page | SITE_NAME = 'Porick.'
WELCOME_TEXT = '''Porick is yet another IRC Quotes web application, designed to replace <i>Chirpy!</i>'''
HOMEPAGE_BUTTON_TEXT = 'Start browsing'
QUOTES_PER_PAGE = 10
TABLES = {
'accounts': 'chirpy_accounts',
'event_metadata': 'chirpy_event_metadata',
'events': 'chirpy_events',
'news': 'chirpy_news',
'quote_to_tag': 'chirpy_quote_tag',
'quotes': 'chirpy_quotes',
'sessions': 'chirpy_sessions',
'tags': 'chirpy_tags',
'vars': 'chirpy_vars'
}
| <commit_before>SITE_NAME = 'Porick'
WELCOME_TEXT = '''Porick is yet another IRC Quotes web application, designed to replace <i>Chirpy!</i>'''
HOMEPAGE_BUTTON_TEXT = 'Start browsing'
QUOTES_PER_PAGE = 10
TABLES = {
'accounts': 'chirpy_accounts',
'event_metadata': 'chirpy_event_metadata',
'events': 'chirpy_events',
'news': 'chirpy_news',
'quote_to_tag': 'chirpy_quote_tag',
'quotes': 'chirpy_quotes',
'sessions': 'chirpy_sessions',
'tags': 'chirpy_tags',
'vars': 'chirpy_vars'
}
<commit_msg>Add hipster full-stop to the front-page<commit_after> | SITE_NAME = 'Porick.'
WELCOME_TEXT = '''Porick is yet another IRC Quotes web application, designed to replace <i>Chirpy!</i>'''
HOMEPAGE_BUTTON_TEXT = 'Start browsing'
QUOTES_PER_PAGE = 10
TABLES = {
'accounts': 'chirpy_accounts',
'event_metadata': 'chirpy_event_metadata',
'events': 'chirpy_events',
'news': 'chirpy_news',
'quote_to_tag': 'chirpy_quote_tag',
'quotes': 'chirpy_quotes',
'sessions': 'chirpy_sessions',
'tags': 'chirpy_tags',
'vars': 'chirpy_vars'
}
| SITE_NAME = 'Porick'
WELCOME_TEXT = '''Porick is yet another IRC Quotes web application, designed to replace <i>Chirpy!</i>'''
HOMEPAGE_BUTTON_TEXT = 'Start browsing'
QUOTES_PER_PAGE = 10
TABLES = {
'accounts': 'chirpy_accounts',
'event_metadata': 'chirpy_event_metadata',
'events': 'chirpy_events',
'news': 'chirpy_news',
'quote_to_tag': 'chirpy_quote_tag',
'quotes': 'chirpy_quotes',
'sessions': 'chirpy_sessions',
'tags': 'chirpy_tags',
'vars': 'chirpy_vars'
}
Add hipster full-stop to the front-pageSITE_NAME = 'Porick.'
WELCOME_TEXT = '''Porick is yet another IRC Quotes web application, designed to replace <i>Chirpy!</i>'''
HOMEPAGE_BUTTON_TEXT = 'Start browsing'
QUOTES_PER_PAGE = 10
TABLES = {
'accounts': 'chirpy_accounts',
'event_metadata': 'chirpy_event_metadata',
'events': 'chirpy_events',
'news': 'chirpy_news',
'quote_to_tag': 'chirpy_quote_tag',
'quotes': 'chirpy_quotes',
'sessions': 'chirpy_sessions',
'tags': 'chirpy_tags',
'vars': 'chirpy_vars'
}
| <commit_before>SITE_NAME = 'Porick'
WELCOME_TEXT = '''Porick is yet another IRC Quotes web application, designed to replace <i>Chirpy!</i>'''
HOMEPAGE_BUTTON_TEXT = 'Start browsing'
QUOTES_PER_PAGE = 10
TABLES = {
'accounts': 'chirpy_accounts',
'event_metadata': 'chirpy_event_metadata',
'events': 'chirpy_events',
'news': 'chirpy_news',
'quote_to_tag': 'chirpy_quote_tag',
'quotes': 'chirpy_quotes',
'sessions': 'chirpy_sessions',
'tags': 'chirpy_tags',
'vars': 'chirpy_vars'
}
<commit_msg>Add hipster full-stop to the front-page<commit_after>SITE_NAME = 'Porick.'
WELCOME_TEXT = '''Porick is yet another IRC Quotes web application, designed to replace <i>Chirpy!</i>'''
HOMEPAGE_BUTTON_TEXT = 'Start browsing'
QUOTES_PER_PAGE = 10
TABLES = {
'accounts': 'chirpy_accounts',
'event_metadata': 'chirpy_event_metadata',
'events': 'chirpy_events',
'news': 'chirpy_news',
'quote_to_tag': 'chirpy_quote_tag',
'quotes': 'chirpy_quotes',
'sessions': 'chirpy_sessions',
'tags': 'chirpy_tags',
'vars': 'chirpy_vars'
}
|
43a672574d13ce3ce267f5e4773c5d26827a4b9a | app/sso/backends.py | app/sso/backends.py | from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.models import User
from hashlib import sha1
class SimpleHashModelBackend(ModelBackend):
supports_anonymous_user = False
supports_object_permissions = False
supports_inactive_user = False
def authenticate(self, username=None, password=None):
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
return None
if '$' in user.password:
if user.check_password(password):
user.password = sha1(password).hexdigest()
user.save()
return user
else:
if user.password == sha1(password).hexdigest():
return user
return None
| from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.models import User
from hashlib import sha1
class SimpleHashModelBackend(ModelBackend):
supports_anonymous_user = False
supports_object_permissions = False
supports_inactive_user = False
def authenticate(self, username=None, password=None):
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
return None
if '$' in user.password:
if user.check_password(password):
return user
else:
if user.password == sha1(password).hexdigest():
user.set_password(password)
return user
return None
| Switch authenticator to migrate back to Django style passwords | Switch authenticator to migrate back to Django style passwords
| Python | bsd-3-clause | nikdoof/test-auth | from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.models import User
from hashlib import sha1
class SimpleHashModelBackend(ModelBackend):
supports_anonymous_user = False
supports_object_permissions = False
supports_inactive_user = False
def authenticate(self, username=None, password=None):
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
return None
if '$' in user.password:
if user.check_password(password):
user.password = sha1(password).hexdigest()
user.save()
return user
else:
if user.password == sha1(password).hexdigest():
return user
return None
Switch authenticator to migrate back to Django style passwords | from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.models import User
from hashlib import sha1
class SimpleHashModelBackend(ModelBackend):
supports_anonymous_user = False
supports_object_permissions = False
supports_inactive_user = False
def authenticate(self, username=None, password=None):
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
return None
if '$' in user.password:
if user.check_password(password):
return user
else:
if user.password == sha1(password).hexdigest():
user.set_password(password)
return user
return None
| <commit_before>from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.models import User
from hashlib import sha1
class SimpleHashModelBackend(ModelBackend):
supports_anonymous_user = False
supports_object_permissions = False
supports_inactive_user = False
def authenticate(self, username=None, password=None):
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
return None
if '$' in user.password:
if user.check_password(password):
user.password = sha1(password).hexdigest()
user.save()
return user
else:
if user.password == sha1(password).hexdigest():
return user
return None
<commit_msg>Switch authenticator to migrate back to Django style passwords<commit_after> | from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.models import User
from hashlib import sha1
class SimpleHashModelBackend(ModelBackend):
supports_anonymous_user = False
supports_object_permissions = False
supports_inactive_user = False
def authenticate(self, username=None, password=None):
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
return None
if '$' in user.password:
if user.check_password(password):
return user
else:
if user.password == sha1(password).hexdigest():
user.set_password(password)
return user
return None
| from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.models import User
from hashlib import sha1
class SimpleHashModelBackend(ModelBackend):
supports_anonymous_user = False
supports_object_permissions = False
supports_inactive_user = False
def authenticate(self, username=None, password=None):
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
return None
if '$' in user.password:
if user.check_password(password):
user.password = sha1(password).hexdigest()
user.save()
return user
else:
if user.password == sha1(password).hexdigest():
return user
return None
Switch authenticator to migrate back to Django style passwordsfrom django.contrib.auth.backends import ModelBackend
from django.contrib.auth.models import User
from hashlib import sha1
class SimpleHashModelBackend(ModelBackend):
supports_anonymous_user = False
supports_object_permissions = False
supports_inactive_user = False
def authenticate(self, username=None, password=None):
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
return None
if '$' in user.password:
if user.check_password(password):
return user
else:
if user.password == sha1(password).hexdigest():
user.set_password(password)
return user
return None
| <commit_before>from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.models import User
from hashlib import sha1
class SimpleHashModelBackend(ModelBackend):
supports_anonymous_user = False
supports_object_permissions = False
supports_inactive_user = False
def authenticate(self, username=None, password=None):
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
return None
if '$' in user.password:
if user.check_password(password):
user.password = sha1(password).hexdigest()
user.save()
return user
else:
if user.password == sha1(password).hexdigest():
return user
return None
<commit_msg>Switch authenticator to migrate back to Django style passwords<commit_after>from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.models import User
from hashlib import sha1
class SimpleHashModelBackend(ModelBackend):
supports_anonymous_user = False
supports_object_permissions = False
supports_inactive_user = False
def authenticate(self, username=None, password=None):
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
return None
if '$' in user.password:
if user.check_password(password):
return user
else:
if user.password == sha1(password).hexdigest():
user.set_password(password)
return user
return None
|
74cd91babf7466c35fbb680c10d5f2451b1409b0 | appengine_config.py | appengine_config.py | import os
import sys
from google.appengine.ext import vendor
# Add any libraries installed in the "lib" folder.
vendor.add('lib')
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) + os.sep
SHARED_SOURCE_DIRECTORIES = [
os.path.abspath(os.environ.get('ISB_CGC_COMMON_MODULE_PATH'))
]
# Add the shared Django application subdirectory to the Python module search path
for path in SHARED_SOURCE_DIRECTORIES:
sys.path.append(path)
| import os
import sys
from google.appengine.ext import vendor
# Add any libraries installed in the "lib" folder.
vendor.add('lib')
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) + os.sep
SHARED_SOURCE_DIRECTORIES = [
os.path.abspath('./ISB-CGC-Common')
]
# Add the shared Django application subdirectory to the Python module search path
for path in SHARED_SOURCE_DIRECTORIES:
sys.path.append(path)
| Use hardcoded path for ISB-CGC-Common | Use hardcoded path for ISB-CGC-Common
| Python | apache-2.0 | isb-cgc/ISB-CGC-API,isb-cgc/ISB-CGC-API,isb-cgc/ISB-CGC-API | import os
import sys
from google.appengine.ext import vendor
# Add any libraries installed in the "lib" folder.
vendor.add('lib')
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) + os.sep
SHARED_SOURCE_DIRECTORIES = [
os.path.abspath(os.environ.get('ISB_CGC_COMMON_MODULE_PATH'))
]
# Add the shared Django application subdirectory to the Python module search path
for path in SHARED_SOURCE_DIRECTORIES:
sys.path.append(path)
Use hardcoded path for ISB-CGC-Common | import os
import sys
from google.appengine.ext import vendor
# Add any libraries installed in the "lib" folder.
vendor.add('lib')
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) + os.sep
SHARED_SOURCE_DIRECTORIES = [
os.path.abspath('./ISB-CGC-Common')
]
# Add the shared Django application subdirectory to the Python module search path
for path in SHARED_SOURCE_DIRECTORIES:
sys.path.append(path)
| <commit_before>import os
import sys
from google.appengine.ext import vendor
# Add any libraries installed in the "lib" folder.
vendor.add('lib')
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) + os.sep
SHARED_SOURCE_DIRECTORIES = [
os.path.abspath(os.environ.get('ISB_CGC_COMMON_MODULE_PATH'))
]
# Add the shared Django application subdirectory to the Python module search path
for path in SHARED_SOURCE_DIRECTORIES:
sys.path.append(path)
<commit_msg>Use hardcoded path for ISB-CGC-Common<commit_after> | import os
import sys
from google.appengine.ext import vendor
# Add any libraries installed in the "lib" folder.
vendor.add('lib')
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) + os.sep
SHARED_SOURCE_DIRECTORIES = [
os.path.abspath('./ISB-CGC-Common')
]
# Add the shared Django application subdirectory to the Python module search path
for path in SHARED_SOURCE_DIRECTORIES:
sys.path.append(path)
| import os
import sys
from google.appengine.ext import vendor
# Add any libraries installed in the "lib" folder.
vendor.add('lib')
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) + os.sep
SHARED_SOURCE_DIRECTORIES = [
os.path.abspath(os.environ.get('ISB_CGC_COMMON_MODULE_PATH'))
]
# Add the shared Django application subdirectory to the Python module search path
for path in SHARED_SOURCE_DIRECTORIES:
sys.path.append(path)
Use hardcoded path for ISB-CGC-Commonimport os
import sys
from google.appengine.ext import vendor
# Add any libraries installed in the "lib" folder.
vendor.add('lib')
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) + os.sep
SHARED_SOURCE_DIRECTORIES = [
os.path.abspath('./ISB-CGC-Common')
]
# Add the shared Django application subdirectory to the Python module search path
for path in SHARED_SOURCE_DIRECTORIES:
sys.path.append(path)
| <commit_before>import os
import sys
from google.appengine.ext import vendor
# Add any libraries installed in the "lib" folder.
vendor.add('lib')
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) + os.sep
SHARED_SOURCE_DIRECTORIES = [
os.path.abspath(os.environ.get('ISB_CGC_COMMON_MODULE_PATH'))
]
# Add the shared Django application subdirectory to the Python module search path
for path in SHARED_SOURCE_DIRECTORIES:
sys.path.append(path)
<commit_msg>Use hardcoded path for ISB-CGC-Common<commit_after>import os
import sys
from google.appengine.ext import vendor
# Add any libraries installed in the "lib" folder.
vendor.add('lib')
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) + os.sep
SHARED_SOURCE_DIRECTORIES = [
os.path.abspath('./ISB-CGC-Common')
]
# Add the shared Django application subdirectory to the Python module search path
for path in SHARED_SOURCE_DIRECTORIES:
sys.path.append(path)
|
edfc43f4c6041166845e5e4ffd2db58802d3e8c6 | ml/pytorch/image_classification/architectures.py | ml/pytorch/image_classification/architectures.py | import torch
from torch.autograd import Variable
import torch.nn as nn
import torch.nn.functional as F
################################################################################
# SUPPORT
################################################################################
class Flatten(nn.Module):
""" Module to Flatten a layer """
def forward(self, input):
return input.view(input.size(0), -1)
def flatten(x):
return x.view(x.size(0), -1)
################################################################################
# LAYERS
################################################################################
def conv(fin, out, k=3, s=1, d=1, bn=True, bias=False, dropout=None, activation=nn.ReLU):
""" Convolutional module
By default uses same padding
CONV > BatchNorm > Activation > Dropout
"""
# naive calculation of padding
p = (k-1)//2
# Conv
sq = nn.Sequential()
sq.add_module("conv", nn.Conv2d(fin, out, k, stride=s, padding=p, dilation=d, bias=bias))
# Optional components
if bn:
sq.add_module("bn", nn.BatchNorm2d(out))
if activation is not None:
sq.add_module("activation", activation())
if dropout is not None:
sq.add_module("dropout", nn.Dropout2d(p=dropout))
return sq
| import torch
from torch.autograd import Variable
import torch.nn as nn
import torch.nn.functional as F
################################################################################
# SUPPORT
################################################################################
class Flatten(nn.Module):
""" Module to Flatten a layer """
def forward(self, input):
return input.view(input.size(0), -1)
def flatten(x):
return x.view(x.size(0), -1)
################################################################################
# LAYERS
################################################################################
def conv(fin, out, k=3, s=1, d=1, bn=True, bias=False, dropout=None, activation=nn.ReLU):
""" Convolutional module
By default uses same padding
CONV > BatchNorm > Activation > Dropout
"""
# naive calculation of padding
p = (k-1)//2
# Conv
sq = nn.Sequential()
sq.add_module("conv", nn.Conv2d(fin, out, k, stride=s, padding=p, dilation=d, bias=bias))
# Optional components
if bn:
sq.add_module("bn", nn.BatchNorm2d(out))
if activation is not None:
sq.add_module("activation", activation())
if dropout is not None:
sq.add_module("dropout", nn.Dropout2d(p=dropout))
return sq
def fc(fin, out, bn=True, bias=False, dropout=None, activation=nn.ReLU):
""" Fully connected module
FC > BatchNorm > Activation > Dropout
"""
sq = nn.Sequential()
sq.add_module("fc", nn.Linear(fin, out, bias=bias))
if bn is not None:
sq.add_module("bn", nn.BatchNorm1d(out))
if activation is not None:
sq.add_module("activation", activation())
if dropout is not None:
sq.add_module("dropout", nn.Dropout(p=dropout))
return sq
| Add custom FC layer module | FEAT: Add custom FC layer module
| Python | apache-2.0 | ronrest/convenience_py,ronrest/convenience_py | import torch
from torch.autograd import Variable
import torch.nn as nn
import torch.nn.functional as F
################################################################################
# SUPPORT
################################################################################
class Flatten(nn.Module):
""" Module to Flatten a layer """
def forward(self, input):
return input.view(input.size(0), -1)
def flatten(x):
return x.view(x.size(0), -1)
################################################################################
# LAYERS
################################################################################
def conv(fin, out, k=3, s=1, d=1, bn=True, bias=False, dropout=None, activation=nn.ReLU):
""" Convolutional module
By default uses same padding
CONV > BatchNorm > Activation > Dropout
"""
# naive calculation of padding
p = (k-1)//2
# Conv
sq = nn.Sequential()
sq.add_module("conv", nn.Conv2d(fin, out, k, stride=s, padding=p, dilation=d, bias=bias))
# Optional components
if bn:
sq.add_module("bn", nn.BatchNorm2d(out))
if activation is not None:
sq.add_module("activation", activation())
if dropout is not None:
sq.add_module("dropout", nn.Dropout2d(p=dropout))
return sq
FEAT: Add custom FC layer module | import torch
from torch.autograd import Variable
import torch.nn as nn
import torch.nn.functional as F
################################################################################
# SUPPORT
################################################################################
class Flatten(nn.Module):
""" Module to Flatten a layer """
def forward(self, input):
return input.view(input.size(0), -1)
def flatten(x):
return x.view(x.size(0), -1)
################################################################################
# LAYERS
################################################################################
def conv(fin, out, k=3, s=1, d=1, bn=True, bias=False, dropout=None, activation=nn.ReLU):
""" Convolutional module
By default uses same padding
CONV > BatchNorm > Activation > Dropout
"""
# naive calculation of padding
p = (k-1)//2
# Conv
sq = nn.Sequential()
sq.add_module("conv", nn.Conv2d(fin, out, k, stride=s, padding=p, dilation=d, bias=bias))
# Optional components
if bn:
sq.add_module("bn", nn.BatchNorm2d(out))
if activation is not None:
sq.add_module("activation", activation())
if dropout is not None:
sq.add_module("dropout", nn.Dropout2d(p=dropout))
return sq
def fc(fin, out, bn=True, bias=False, dropout=None, activation=nn.ReLU):
""" Fully connected module
FC > BatchNorm > Activation > Dropout
"""
sq = nn.Sequential()
sq.add_module("fc", nn.Linear(fin, out, bias=bias))
if bn is not None:
sq.add_module("bn", nn.BatchNorm1d(out))
if activation is not None:
sq.add_module("activation", activation())
if dropout is not None:
sq.add_module("dropout", nn.Dropout(p=dropout))
return sq
| <commit_before>import torch
from torch.autograd import Variable
import torch.nn as nn
import torch.nn.functional as F
################################################################################
# SUPPORT
################################################################################
class Flatten(nn.Module):
""" Module to Flatten a layer """
def forward(self, input):
return input.view(input.size(0), -1)
def flatten(x):
return x.view(x.size(0), -1)
################################################################################
# LAYERS
################################################################################
def conv(fin, out, k=3, s=1, d=1, bn=True, bias=False, dropout=None, activation=nn.ReLU):
""" Convolutional module
By default uses same padding
CONV > BatchNorm > Activation > Dropout
"""
# naive calculation of padding
p = (k-1)//2
# Conv
sq = nn.Sequential()
sq.add_module("conv", nn.Conv2d(fin, out, k, stride=s, padding=p, dilation=d, bias=bias))
# Optional components
if bn:
sq.add_module("bn", nn.BatchNorm2d(out))
if activation is not None:
sq.add_module("activation", activation())
if dropout is not None:
sq.add_module("dropout", nn.Dropout2d(p=dropout))
return sq
<commit_msg>FEAT: Add custom FC layer module<commit_after> | import torch
from torch.autograd import Variable
import torch.nn as nn
import torch.nn.functional as F
################################################################################
# SUPPORT
################################################################################
class Flatten(nn.Module):
""" Module to Flatten a layer """
def forward(self, input):
return input.view(input.size(0), -1)
def flatten(x):
return x.view(x.size(0), -1)
################################################################################
# LAYERS
################################################################################
def conv(fin, out, k=3, s=1, d=1, bn=True, bias=False, dropout=None, activation=nn.ReLU):
""" Convolutional module
By default uses same padding
CONV > BatchNorm > Activation > Dropout
"""
# naive calculation of padding
p = (k-1)//2
# Conv
sq = nn.Sequential()
sq.add_module("conv", nn.Conv2d(fin, out, k, stride=s, padding=p, dilation=d, bias=bias))
# Optional components
if bn:
sq.add_module("bn", nn.BatchNorm2d(out))
if activation is not None:
sq.add_module("activation", activation())
if dropout is not None:
sq.add_module("dropout", nn.Dropout2d(p=dropout))
return sq
def fc(fin, out, bn=True, bias=False, dropout=None, activation=nn.ReLU):
""" Fully connected module
FC > BatchNorm > Activation > Dropout
"""
sq = nn.Sequential()
sq.add_module("fc", nn.Linear(fin, out, bias=bias))
if bn is not None:
sq.add_module("bn", nn.BatchNorm1d(out))
if activation is not None:
sq.add_module("activation", activation())
if dropout is not None:
sq.add_module("dropout", nn.Dropout(p=dropout))
return sq
| import torch
from torch.autograd import Variable
import torch.nn as nn
import torch.nn.functional as F
################################################################################
# SUPPORT
################################################################################
class Flatten(nn.Module):
""" Module to Flatten a layer """
def forward(self, input):
return input.view(input.size(0), -1)
def flatten(x):
return x.view(x.size(0), -1)
################################################################################
# LAYERS
################################################################################
def conv(fin, out, k=3, s=1, d=1, bn=True, bias=False, dropout=None, activation=nn.ReLU):
""" Convolutional module
By default uses same padding
CONV > BatchNorm > Activation > Dropout
"""
# naive calculation of padding
p = (k-1)//2
# Conv
sq = nn.Sequential()
sq.add_module("conv", nn.Conv2d(fin, out, k, stride=s, padding=p, dilation=d, bias=bias))
# Optional components
if bn:
sq.add_module("bn", nn.BatchNorm2d(out))
if activation is not None:
sq.add_module("activation", activation())
if dropout is not None:
sq.add_module("dropout", nn.Dropout2d(p=dropout))
return sq
FEAT: Add custom FC layer moduleimport torch
from torch.autograd import Variable
import torch.nn as nn
import torch.nn.functional as F
################################################################################
# SUPPORT
################################################################################
class Flatten(nn.Module):
""" Module to Flatten a layer """
def forward(self, input):
return input.view(input.size(0), -1)
def flatten(x):
return x.view(x.size(0), -1)
################################################################################
# LAYERS
################################################################################
def conv(fin, out, k=3, s=1, d=1, bn=True, bias=False, dropout=None, activation=nn.ReLU):
""" Convolutional module
By default uses same padding
CONV > BatchNorm > Activation > Dropout
"""
# naive calculation of padding
p = (k-1)//2
# Conv
sq = nn.Sequential()
sq.add_module("conv", nn.Conv2d(fin, out, k, stride=s, padding=p, dilation=d, bias=bias))
# Optional components
if bn:
sq.add_module("bn", nn.BatchNorm2d(out))
if activation is not None:
sq.add_module("activation", activation())
if dropout is not None:
sq.add_module("dropout", nn.Dropout2d(p=dropout))
return sq
def fc(fin, out, bn=True, bias=False, dropout=None, activation=nn.ReLU):
""" Fully connected module
FC > BatchNorm > Activation > Dropout
"""
sq = nn.Sequential()
sq.add_module("fc", nn.Linear(fin, out, bias=bias))
if bn is not None:
sq.add_module("bn", nn.BatchNorm1d(out))
if activation is not None:
sq.add_module("activation", activation())
if dropout is not None:
sq.add_module("dropout", nn.Dropout(p=dropout))
return sq
| <commit_before>import torch
from torch.autograd import Variable
import torch.nn as nn
import torch.nn.functional as F
################################################################################
# SUPPORT
################################################################################
class Flatten(nn.Module):
""" Module to Flatten a layer """
def forward(self, input):
return input.view(input.size(0), -1)
def flatten(x):
return x.view(x.size(0), -1)
################################################################################
# LAYERS
################################################################################
def conv(fin, out, k=3, s=1, d=1, bn=True, bias=False, dropout=None, activation=nn.ReLU):
""" Convolutional module
By default uses same padding
CONV > BatchNorm > Activation > Dropout
"""
# naive calculation of padding
p = (k-1)//2
# Conv
sq = nn.Sequential()
sq.add_module("conv", nn.Conv2d(fin, out, k, stride=s, padding=p, dilation=d, bias=bias))
# Optional components
if bn:
sq.add_module("bn", nn.BatchNorm2d(out))
if activation is not None:
sq.add_module("activation", activation())
if dropout is not None:
sq.add_module("dropout", nn.Dropout2d(p=dropout))
return sq
<commit_msg>FEAT: Add custom FC layer module<commit_after>import torch
from torch.autograd import Variable
import torch.nn as nn
import torch.nn.functional as F
################################################################################
# SUPPORT
################################################################################
class Flatten(nn.Module):
""" Module to Flatten a layer """
def forward(self, input):
return input.view(input.size(0), -1)
def flatten(x):
return x.view(x.size(0), -1)
################################################################################
# LAYERS
################################################################################
def conv(fin, out, k=3, s=1, d=1, bn=True, bias=False, dropout=None, activation=nn.ReLU):
""" Convolutional module
By default uses same padding
CONV > BatchNorm > Activation > Dropout
"""
# naive calculation of padding
p = (k-1)//2
# Conv
sq = nn.Sequential()
sq.add_module("conv", nn.Conv2d(fin, out, k, stride=s, padding=p, dilation=d, bias=bias))
# Optional components
if bn:
sq.add_module("bn", nn.BatchNorm2d(out))
if activation is not None:
sq.add_module("activation", activation())
if dropout is not None:
sq.add_module("dropout", nn.Dropout2d(p=dropout))
return sq
def fc(fin, out, bn=True, bias=False, dropout=None, activation=nn.ReLU):
""" Fully connected module
FC > BatchNorm > Activation > Dropout
"""
sq = nn.Sequential()
sq.add_module("fc", nn.Linear(fin, out, bias=bias))
if bn is not None:
sq.add_module("bn", nn.BatchNorm1d(out))
if activation is not None:
sq.add_module("activation", activation())
if dropout is not None:
sq.add_module("dropout", nn.Dropout(p=dropout))
return sq
|
23606cec326b75cb73ba31b93410770659481d41 | test_echo.py | test_echo.py | # -*- coding: utf-8 -*-
import subprocess
import pytest
def test_basic(string="This is a test."):
"""Test function to test echo server and client with inputted string."""
process = subprocess.Popen(['./echo_client.py', string],
stdout=subprocess.PIPE)
assert string == process.stdout.readline().rstrip()
def test_exact32():
"""Test echo server and client with string length 32, the buffer size."""
test_basic("12345678901234567890123456789012")
def test_unicode():
"""Test that server and client handle decoding and encoding of unicode."""
with pytest.raises(AssertionError):
inp = 'Testing «ταБЬℓσ»: 1<2 & 4+1>3, now 20 off!'
inp = inp.decode('utf-8')
process = subprocess.Popen(['./echo_client.py', inp],
stdout=subprocess.PIPE)
assert inp == process.stdout.readline().rstrip()
def test_long():
"""Test server and client can handle long messages."""
test_basic("Running the server script in one terminal should allow you to \
run the client script in a separate terminal. The client script should\
take an argument which is the message to send. Upon completing, the \
response from the server should be printed to stdout.")
| # -*- coding: utf-8 -*-
import subprocess
def test_basic(string=u"This is a test."):
"""Test function to test echo server and client with inputted string."""
process = subprocess.Popen(['./echo_client.py', string],
stdout=subprocess.PIPE)
assert string == process.stdout.readline().rstrip()
def test_exact32():
"""Test echo server and client with string length 32, the buffer size."""
test_basic(u"12345678901234567890123456789012")
def test_unicode():
"""Test that server and client handle encoding and decoding of unicode."""
inp = u'Testing «ταБЬℓσ»: 1<2 & 4+1>3, now 20 off!'
process = subprocess.Popen(['./echo_client.py', inp],
stdout=subprocess.PIPE)
assert inp == process.stdout.readline().rstrip().decode('utf-8')
def test_long():
"""Test server and client can handle long messages."""
test_basic(u"Running the server script in one terminal should allow you to \
run the client script in a separate terminal. The client script should\
take an argument which is the message to send. Upon completing, the \
response from the server should be printed to stdout.")
| Change unicode test, to test decoding and encoding is done correctly. | Change unicode test, to test decoding and encoding is done correctly.
| Python | mit | bm5w/network_tools | # -*- coding: utf-8 -*-
import subprocess
import pytest
def test_basic(string="This is a test."):
"""Test function to test echo server and client with inputted string."""
process = subprocess.Popen(['./echo_client.py', string],
stdout=subprocess.PIPE)
assert string == process.stdout.readline().rstrip()
def test_exact32():
"""Test echo server and client with string length 32, the buffer size."""
test_basic("12345678901234567890123456789012")
def test_unicode():
"""Test that server and client handle decoding and encoding of unicode."""
with pytest.raises(AssertionError):
inp = 'Testing «ταБЬℓσ»: 1<2 & 4+1>3, now 20 off!'
inp = inp.decode('utf-8')
process = subprocess.Popen(['./echo_client.py', inp],
stdout=subprocess.PIPE)
assert inp == process.stdout.readline().rstrip()
def test_long():
"""Test server and client can handle long messages."""
test_basic("Running the server script in one terminal should allow you to \
run the client script in a separate terminal. The client script should\
take an argument which is the message to send. Upon completing, the \
response from the server should be printed to stdout.")
Change unicode test, to test decoding and encoding is done correctly. | # -*- coding: utf-8 -*-
import subprocess
def test_basic(string=u"This is a test."):
"""Test function to test echo server and client with inputted string."""
process = subprocess.Popen(['./echo_client.py', string],
stdout=subprocess.PIPE)
assert string == process.stdout.readline().rstrip()
def test_exact32():
"""Test echo server and client with string length 32, the buffer size."""
test_basic(u"12345678901234567890123456789012")
def test_unicode():
"""Test that server and client handle encoding and decoding of unicode."""
inp = u'Testing «ταБЬℓσ»: 1<2 & 4+1>3, now 20 off!'
process = subprocess.Popen(['./echo_client.py', inp],
stdout=subprocess.PIPE)
assert inp == process.stdout.readline().rstrip().decode('utf-8')
def test_long():
"""Test server and client can handle long messages."""
test_basic(u"Running the server script in one terminal should allow you to \
run the client script in a separate terminal. The client script should\
take an argument which is the message to send. Upon completing, the \
response from the server should be printed to stdout.")
| <commit_before># -*- coding: utf-8 -*-
import subprocess
import pytest
def test_basic(string="This is a test."):
"""Test function to test echo server and client with inputted string."""
process = subprocess.Popen(['./echo_client.py', string],
stdout=subprocess.PIPE)
assert string == process.stdout.readline().rstrip()
def test_exact32():
"""Test echo server and client with string length 32, the buffer size."""
test_basic("12345678901234567890123456789012")
def test_unicode():
"""Test that server and client handle decoding and encoding of unicode."""
with pytest.raises(AssertionError):
inp = 'Testing «ταБЬℓσ»: 1<2 & 4+1>3, now 20 off!'
inp = inp.decode('utf-8')
process = subprocess.Popen(['./echo_client.py', inp],
stdout=subprocess.PIPE)
assert inp == process.stdout.readline().rstrip()
def test_long():
"""Test server and client can handle long messages."""
test_basic("Running the server script in one terminal should allow you to \
run the client script in a separate terminal. The client script should\
take an argument which is the message to send. Upon completing, the \
response from the server should be printed to stdout.")
<commit_msg>Change unicode test, to test decoding and encoding is done correctly.<commit_after> | # -*- coding: utf-8 -*-
import subprocess
def test_basic(string=u"This is a test."):
"""Test function to test echo server and client with inputted string."""
process = subprocess.Popen(['./echo_client.py', string],
stdout=subprocess.PIPE)
assert string == process.stdout.readline().rstrip()
def test_exact32():
"""Test echo server and client with string length 32, the buffer size."""
test_basic(u"12345678901234567890123456789012")
def test_unicode():
"""Test that server and client handle encoding and decoding of unicode."""
inp = u'Testing «ταБЬℓσ»: 1<2 & 4+1>3, now 20 off!'
process = subprocess.Popen(['./echo_client.py', inp],
stdout=subprocess.PIPE)
assert inp == process.stdout.readline().rstrip().decode('utf-8')
def test_long():
"""Test server and client can handle long messages."""
test_basic(u"Running the server script in one terminal should allow you to \
run the client script in a separate terminal. The client script should\
take an argument which is the message to send. Upon completing, the \
response from the server should be printed to stdout.")
| # -*- coding: utf-8 -*-
import subprocess
import pytest
def test_basic(string="This is a test."):
"""Test function to test echo server and client with inputted string."""
process = subprocess.Popen(['./echo_client.py', string],
stdout=subprocess.PIPE)
assert string == process.stdout.readline().rstrip()
def test_exact32():
"""Test echo server and client with string length 32, the buffer size."""
test_basic("12345678901234567890123456789012")
def test_unicode():
"""Test that server and client handle decoding and encoding of unicode."""
with pytest.raises(AssertionError):
inp = 'Testing «ταБЬℓσ»: 1<2 & 4+1>3, now 20 off!'
inp = inp.decode('utf-8')
process = subprocess.Popen(['./echo_client.py', inp],
stdout=subprocess.PIPE)
assert inp == process.stdout.readline().rstrip()
def test_long():
"""Test server and client can handle long messages."""
test_basic("Running the server script in one terminal should allow you to \
run the client script in a separate terminal. The client script should\
take an argument which is the message to send. Upon completing, the \
response from the server should be printed to stdout.")
Change unicode test, to test decoding and encoding is done correctly.# -*- coding: utf-8 -*-
import subprocess
def test_basic(string=u"This is a test."):
"""Test function to test echo server and client with inputted string."""
process = subprocess.Popen(['./echo_client.py', string],
stdout=subprocess.PIPE)
assert string == process.stdout.readline().rstrip()
def test_exact32():
"""Test echo server and client with string length 32, the buffer size."""
test_basic(u"12345678901234567890123456789012")
def test_unicode():
"""Test that server and client handle encoding and decoding of unicode."""
inp = u'Testing «ταБЬℓσ»: 1<2 & 4+1>3, now 20 off!'
process = subprocess.Popen(['./echo_client.py', inp],
stdout=subprocess.PIPE)
assert inp == process.stdout.readline().rstrip().decode('utf-8')
def test_long():
"""Test server and client can handle long messages."""
test_basic(u"Running the server script in one terminal should allow you to \
run the client script in a separate terminal. The client script should\
take an argument which is the message to send. Upon completing, the \
response from the server should be printed to stdout.")
| <commit_before># -*- coding: utf-8 -*-
import subprocess
import pytest
def test_basic(string="This is a test."):
"""Test function to test echo server and client with inputted string."""
process = subprocess.Popen(['./echo_client.py', string],
stdout=subprocess.PIPE)
assert string == process.stdout.readline().rstrip()
def test_exact32():
"""Test echo server and client with string length 32, the buffer size."""
test_basic("12345678901234567890123456789012")
def test_unicode():
"""Test that server and client handle decoding and encoding of unicode."""
with pytest.raises(AssertionError):
inp = 'Testing «ταБЬℓσ»: 1<2 & 4+1>3, now 20 off!'
inp = inp.decode('utf-8')
process = subprocess.Popen(['./echo_client.py', inp],
stdout=subprocess.PIPE)
assert inp == process.stdout.readline().rstrip()
def test_long():
"""Test server and client can handle long messages."""
test_basic("Running the server script in one terminal should allow you to \
run the client script in a separate terminal. The client script should\
take an argument which is the message to send. Upon completing, the \
response from the server should be printed to stdout.")
<commit_msg>Change unicode test, to test decoding and encoding is done correctly.<commit_after># -*- coding: utf-8 -*-
import subprocess
def test_basic(string=u"This is a test."):
"""Test function to test echo server and client with inputted string."""
process = subprocess.Popen(['./echo_client.py', string],
stdout=subprocess.PIPE)
assert string == process.stdout.readline().rstrip()
def test_exact32():
"""Test echo server and client with string length 32, the buffer size."""
test_basic(u"12345678901234567890123456789012")
def test_unicode():
"""Test that server and client handle encoding and decoding of unicode."""
inp = u'Testing «ταБЬℓσ»: 1<2 & 4+1>3, now 20 off!'
process = subprocess.Popen(['./echo_client.py', inp],
stdout=subprocess.PIPE)
assert inp == process.stdout.readline().rstrip().decode('utf-8')
def test_long():
"""Test server and client can handle long messages."""
test_basic(u"Running the server script in one terminal should allow you to \
run the client script in a separate terminal. The client script should\
take an argument which is the message to send. Upon completing, the \
response from the server should be printed to stdout.")
|
54461c89b61ba118ef98389c09b45138b32224ab | stagecraft/apps/datasets/admin/backdrop_user.py | stagecraft/apps/datasets/admin/backdrop_user.py | from __future__ import unicode_literals
from django.contrib import admin
from django.db import models
import reversion
from stagecraft.apps.datasets.models.backdrop_user import BackdropUser
from stagecraft.apps.datasets.models.data_set import DataSet
class DataSetInline(admin.StackedInline):
model = DataSet
fields = ('name',)
extra = 0
class BackdropUserAdmin(reversion.VersionAdmin):
search_fields = ['email', 'data_sets']
list_display = ('email', 'numer_of_datasets_user_has_access_to',)
list_per_page = 30
def queryset(self, request):
return BackdropUser.objects.annotate(
dataset_count=models.Count('data_sets')
)
def numer_of_datasets_user_has_access_to(self, obj):
return obj.dataset_count
numer_of_datasets_user_has_access_to.admin_order_field = 'dataset_count'
admin.site.register(BackdropUser, BackdropUserAdmin)
| from __future__ import unicode_literals
from django.contrib import admin
from django.db import models
import reversion
from stagecraft.apps.datasets.models.backdrop_user import BackdropUser
from stagecraft.apps.datasets.models.data_set import DataSet
class DataSetInline(admin.StackedInline):
model = DataSet
fields = ('name',)
extra = 0
class BackdropUserAdmin(reversion.VersionAdmin):
search_fields = ['email', 'data_sets']
list_display = ('email', 'numer_of_datasets_user_has_access_to',)
list_per_page = 30
filter_horizontal = ('data_sets',)
def queryset(self, request):
return BackdropUser.objects.annotate(
dataset_count=models.Count('data_sets')
)
def numer_of_datasets_user_has_access_to(self, obj):
return obj.dataset_count
numer_of_datasets_user_has_access_to.admin_order_field = 'dataset_count'
admin.site.register(BackdropUser, BackdropUserAdmin)
| Add filter_horizontal to backdrop user admin | Add filter_horizontal to backdrop user admin
- Provides a much more usable interface to filter and add data_sets for backdrop admin users
| Python | mit | alphagov/stagecraft,alphagov/stagecraft,alphagov/stagecraft,alphagov/stagecraft | from __future__ import unicode_literals
from django.contrib import admin
from django.db import models
import reversion
from stagecraft.apps.datasets.models.backdrop_user import BackdropUser
from stagecraft.apps.datasets.models.data_set import DataSet
class DataSetInline(admin.StackedInline):
model = DataSet
fields = ('name',)
extra = 0
class BackdropUserAdmin(reversion.VersionAdmin):
search_fields = ['email', 'data_sets']
list_display = ('email', 'numer_of_datasets_user_has_access_to',)
list_per_page = 30
def queryset(self, request):
return BackdropUser.objects.annotate(
dataset_count=models.Count('data_sets')
)
def numer_of_datasets_user_has_access_to(self, obj):
return obj.dataset_count
numer_of_datasets_user_has_access_to.admin_order_field = 'dataset_count'
admin.site.register(BackdropUser, BackdropUserAdmin)
Add filter_horizontal to backdrop user admin
- Provides a much more usable interface to filter and add data_sets for backdrop admin users | from __future__ import unicode_literals
from django.contrib import admin
from django.db import models
import reversion
from stagecraft.apps.datasets.models.backdrop_user import BackdropUser
from stagecraft.apps.datasets.models.data_set import DataSet
class DataSetInline(admin.StackedInline):
model = DataSet
fields = ('name',)
extra = 0
class BackdropUserAdmin(reversion.VersionAdmin):
search_fields = ['email', 'data_sets']
list_display = ('email', 'numer_of_datasets_user_has_access_to',)
list_per_page = 30
filter_horizontal = ('data_sets',)
def queryset(self, request):
return BackdropUser.objects.annotate(
dataset_count=models.Count('data_sets')
)
def numer_of_datasets_user_has_access_to(self, obj):
return obj.dataset_count
numer_of_datasets_user_has_access_to.admin_order_field = 'dataset_count'
admin.site.register(BackdropUser, BackdropUserAdmin)
| <commit_before>from __future__ import unicode_literals
from django.contrib import admin
from django.db import models
import reversion
from stagecraft.apps.datasets.models.backdrop_user import BackdropUser
from stagecraft.apps.datasets.models.data_set import DataSet
class DataSetInline(admin.StackedInline):
model = DataSet
fields = ('name',)
extra = 0
class BackdropUserAdmin(reversion.VersionAdmin):
search_fields = ['email', 'data_sets']
list_display = ('email', 'numer_of_datasets_user_has_access_to',)
list_per_page = 30
def queryset(self, request):
return BackdropUser.objects.annotate(
dataset_count=models.Count('data_sets')
)
def numer_of_datasets_user_has_access_to(self, obj):
return obj.dataset_count
numer_of_datasets_user_has_access_to.admin_order_field = 'dataset_count'
admin.site.register(BackdropUser, BackdropUserAdmin)
<commit_msg>Add filter_horizontal to backdrop user admin
- Provides a much more usable interface to filter and add data_sets for backdrop admin users<commit_after> | from __future__ import unicode_literals
from django.contrib import admin
from django.db import models
import reversion
from stagecraft.apps.datasets.models.backdrop_user import BackdropUser
from stagecraft.apps.datasets.models.data_set import DataSet
class DataSetInline(admin.StackedInline):
model = DataSet
fields = ('name',)
extra = 0
class BackdropUserAdmin(reversion.VersionAdmin):
search_fields = ['email', 'data_sets']
list_display = ('email', 'numer_of_datasets_user_has_access_to',)
list_per_page = 30
filter_horizontal = ('data_sets',)
def queryset(self, request):
return BackdropUser.objects.annotate(
dataset_count=models.Count('data_sets')
)
def numer_of_datasets_user_has_access_to(self, obj):
return obj.dataset_count
numer_of_datasets_user_has_access_to.admin_order_field = 'dataset_count'
admin.site.register(BackdropUser, BackdropUserAdmin)
| from __future__ import unicode_literals
from django.contrib import admin
from django.db import models
import reversion
from stagecraft.apps.datasets.models.backdrop_user import BackdropUser
from stagecraft.apps.datasets.models.data_set import DataSet
class DataSetInline(admin.StackedInline):
model = DataSet
fields = ('name',)
extra = 0
class BackdropUserAdmin(reversion.VersionAdmin):
search_fields = ['email', 'data_sets']
list_display = ('email', 'numer_of_datasets_user_has_access_to',)
list_per_page = 30
def queryset(self, request):
return BackdropUser.objects.annotate(
dataset_count=models.Count('data_sets')
)
def numer_of_datasets_user_has_access_to(self, obj):
return obj.dataset_count
numer_of_datasets_user_has_access_to.admin_order_field = 'dataset_count'
admin.site.register(BackdropUser, BackdropUserAdmin)
Add filter_horizontal to backdrop user admin
- Provides a much more usable interface to filter and add data_sets for backdrop admin usersfrom __future__ import unicode_literals
from django.contrib import admin
from django.db import models
import reversion
from stagecraft.apps.datasets.models.backdrop_user import BackdropUser
from stagecraft.apps.datasets.models.data_set import DataSet
class DataSetInline(admin.StackedInline):
model = DataSet
fields = ('name',)
extra = 0
class BackdropUserAdmin(reversion.VersionAdmin):
search_fields = ['email', 'data_sets']
list_display = ('email', 'numer_of_datasets_user_has_access_to',)
list_per_page = 30
filter_horizontal = ('data_sets',)
def queryset(self, request):
return BackdropUser.objects.annotate(
dataset_count=models.Count('data_sets')
)
def numer_of_datasets_user_has_access_to(self, obj):
return obj.dataset_count
numer_of_datasets_user_has_access_to.admin_order_field = 'dataset_count'
admin.site.register(BackdropUser, BackdropUserAdmin)
| <commit_before>from __future__ import unicode_literals
from django.contrib import admin
from django.db import models
import reversion
from stagecraft.apps.datasets.models.backdrop_user import BackdropUser
from stagecraft.apps.datasets.models.data_set import DataSet
class DataSetInline(admin.StackedInline):
model = DataSet
fields = ('name',)
extra = 0
class BackdropUserAdmin(reversion.VersionAdmin):
search_fields = ['email', 'data_sets']
list_display = ('email', 'numer_of_datasets_user_has_access_to',)
list_per_page = 30
def queryset(self, request):
return BackdropUser.objects.annotate(
dataset_count=models.Count('data_sets')
)
def numer_of_datasets_user_has_access_to(self, obj):
return obj.dataset_count
numer_of_datasets_user_has_access_to.admin_order_field = 'dataset_count'
admin.site.register(BackdropUser, BackdropUserAdmin)
<commit_msg>Add filter_horizontal to backdrop user admin
- Provides a much more usable interface to filter and add data_sets for backdrop admin users<commit_after>from __future__ import unicode_literals
from django.contrib import admin
from django.db import models
import reversion
from stagecraft.apps.datasets.models.backdrop_user import BackdropUser
from stagecraft.apps.datasets.models.data_set import DataSet
class DataSetInline(admin.StackedInline):
model = DataSet
fields = ('name',)
extra = 0
class BackdropUserAdmin(reversion.VersionAdmin):
search_fields = ['email', 'data_sets']
list_display = ('email', 'numer_of_datasets_user_has_access_to',)
list_per_page = 30
filter_horizontal = ('data_sets',)
def queryset(self, request):
return BackdropUser.objects.annotate(
dataset_count=models.Count('data_sets')
)
def numer_of_datasets_user_has_access_to(self, obj):
return obj.dataset_count
numer_of_datasets_user_has_access_to.admin_order_field = 'dataset_count'
admin.site.register(BackdropUser, BackdropUserAdmin)
|
3af95029c3f784e17247abcd0123156ff9384513 | pronto/serializers/base.py | pronto/serializers/base.py | import abc
import io
import typing
from typing import BinaryIO, ClassVar
from ..ontology import Ontology
class BaseSerializer(abc.ABC):
format: ClassVar[str] = NotImplemented
def __init__(self, ont: Ontology):
self.ont = ont
@abc.abstractmethod
def dump(self, file: BinaryIO, encoding: str = "utf-8") -> None:
return NotImplemented
def dumps(self) -> str:
s = io.BytesIO()
self.dump(s)
return s.getvalue().decode('utf-8')
| import abc
import io
import typing
from typing import BinaryIO, ClassVar
from ..ontology import Ontology
class BaseSerializer(abc.ABC):
format: ClassVar[str] = NotImplemented
def __init__(self, ont: Ontology):
self.ont = ont
@abc.abstractmethod
def dump(self, file: BinaryIO) -> None:
return NotImplemented
def dumps(self) -> str:
s = io.BytesIO()
self.dump(s)
return s.getvalue().decode('utf-8')
| Fix signature of `BaseSerializer.dump` to remove `encoding` argument | Fix signature of `BaseSerializer.dump` to remove `encoding` argument
| Python | mit | althonos/pronto | import abc
import io
import typing
from typing import BinaryIO, ClassVar
from ..ontology import Ontology
class BaseSerializer(abc.ABC):
format: ClassVar[str] = NotImplemented
def __init__(self, ont: Ontology):
self.ont = ont
@abc.abstractmethod
def dump(self, file: BinaryIO, encoding: str = "utf-8") -> None:
return NotImplemented
def dumps(self) -> str:
s = io.BytesIO()
self.dump(s)
return s.getvalue().decode('utf-8')
Fix signature of `BaseSerializer.dump` to remove `encoding` argument | import abc
import io
import typing
from typing import BinaryIO, ClassVar
from ..ontology import Ontology
class BaseSerializer(abc.ABC):
format: ClassVar[str] = NotImplemented
def __init__(self, ont: Ontology):
self.ont = ont
@abc.abstractmethod
def dump(self, file: BinaryIO) -> None:
return NotImplemented
def dumps(self) -> str:
s = io.BytesIO()
self.dump(s)
return s.getvalue().decode('utf-8')
| <commit_before>import abc
import io
import typing
from typing import BinaryIO, ClassVar
from ..ontology import Ontology
class BaseSerializer(abc.ABC):
format: ClassVar[str] = NotImplemented
def __init__(self, ont: Ontology):
self.ont = ont
@abc.abstractmethod
def dump(self, file: BinaryIO, encoding: str = "utf-8") -> None:
return NotImplemented
def dumps(self) -> str:
s = io.BytesIO()
self.dump(s)
return s.getvalue().decode('utf-8')
<commit_msg>Fix signature of `BaseSerializer.dump` to remove `encoding` argument<commit_after> | import abc
import io
import typing
from typing import BinaryIO, ClassVar
from ..ontology import Ontology
class BaseSerializer(abc.ABC):
format: ClassVar[str] = NotImplemented
def __init__(self, ont: Ontology):
self.ont = ont
@abc.abstractmethod
def dump(self, file: BinaryIO) -> None:
return NotImplemented
def dumps(self) -> str:
s = io.BytesIO()
self.dump(s)
return s.getvalue().decode('utf-8')
| import abc
import io
import typing
from typing import BinaryIO, ClassVar
from ..ontology import Ontology
class BaseSerializer(abc.ABC):
format: ClassVar[str] = NotImplemented
def __init__(self, ont: Ontology):
self.ont = ont
@abc.abstractmethod
def dump(self, file: BinaryIO, encoding: str = "utf-8") -> None:
return NotImplemented
def dumps(self) -> str:
s = io.BytesIO()
self.dump(s)
return s.getvalue().decode('utf-8')
Fix signature of `BaseSerializer.dump` to remove `encoding` argumentimport abc
import io
import typing
from typing import BinaryIO, ClassVar
from ..ontology import Ontology
class BaseSerializer(abc.ABC):
format: ClassVar[str] = NotImplemented
def __init__(self, ont: Ontology):
self.ont = ont
@abc.abstractmethod
def dump(self, file: BinaryIO) -> None:
return NotImplemented
def dumps(self) -> str:
s = io.BytesIO()
self.dump(s)
return s.getvalue().decode('utf-8')
| <commit_before>import abc
import io
import typing
from typing import BinaryIO, ClassVar
from ..ontology import Ontology
class BaseSerializer(abc.ABC):
format: ClassVar[str] = NotImplemented
def __init__(self, ont: Ontology):
self.ont = ont
@abc.abstractmethod
def dump(self, file: BinaryIO, encoding: str = "utf-8") -> None:
return NotImplemented
def dumps(self) -> str:
s = io.BytesIO()
self.dump(s)
return s.getvalue().decode('utf-8')
<commit_msg>Fix signature of `BaseSerializer.dump` to remove `encoding` argument<commit_after>import abc
import io
import typing
from typing import BinaryIO, ClassVar
from ..ontology import Ontology
class BaseSerializer(abc.ABC):
format: ClassVar[str] = NotImplemented
def __init__(self, ont: Ontology):
self.ont = ont
@abc.abstractmethod
def dump(self, file: BinaryIO) -> None:
return NotImplemented
def dumps(self) -> str:
s = io.BytesIO()
self.dump(s)
return s.getvalue().decode('utf-8')
|
dbc234df7541f6aab32bce0c8f8ba149f9e4ad22 | speeches/management/commands/populatespeakers.py | speeches/management/commands/populatespeakers.py | from django.core.management.base import NoArgsCommand
import pprint
from popit import PopIt
from speeches.models import Speaker
class Command(NoArgsCommand):
help = 'Populates the database with people from Popit'
def handle_noargs(self, **options):
pp = pprint.PrettyPrinter(indent=4)
# Do populating
api = PopIt(instance = 'ukcabinet', hostname = 'ukcabinet.popit.mysociety.org', api_version = 'v1')
results = api.person.get()
self.stdout.write('Names will be:\n')
for person in results['results']:
self.stdout.write('Processing person: ' + pp.pformat(person) + '\n')
try:
speaker = Speaker.objects.get(popit_id=person['_id'])
except Speaker.DoesNotExist:
speaker = Speaker()
speaker.popit_id = person['_id']
speaker.name = person['name']
speaker.save();
| from django.core.management.base import NoArgsCommand
import pprint
from popit import PopIt
from speeches.models import Speaker
class Command(NoArgsCommand):
help = 'Populates the database with people from Popit'
def handle_noargs(self, **options):
pp = pprint.PrettyPrinter(indent=4)
# Do populating
api = PopIt(instance = 'ukcabinet', hostname = 'ukcabinet.popit.mysociety.org', api_version = 'v1')
results = api.person.get()
for person in results['results']:
speaker, created = Speaker.objects.get_or_create(popit_id=person['_id'])
# we ignore created for now, just always set the name
speaker.name = person['name']
speaker.save();
| Make speaker population code use get_or_create instead of exception | Make speaker population code use get_or_create instead of exception
| Python | agpl-3.0 | opencorato/sayit,opencorato/sayit,opencorato/sayit,opencorato/sayit | from django.core.management.base import NoArgsCommand
import pprint
from popit import PopIt
from speeches.models import Speaker
class Command(NoArgsCommand):
help = 'Populates the database with people from Popit'
def handle_noargs(self, **options):
pp = pprint.PrettyPrinter(indent=4)
# Do populating
api = PopIt(instance = 'ukcabinet', hostname = 'ukcabinet.popit.mysociety.org', api_version = 'v1')
results = api.person.get()
self.stdout.write('Names will be:\n')
for person in results['results']:
self.stdout.write('Processing person: ' + pp.pformat(person) + '\n')
try:
speaker = Speaker.objects.get(popit_id=person['_id'])
except Speaker.DoesNotExist:
speaker = Speaker()
speaker.popit_id = person['_id']
speaker.name = person['name']
speaker.save();
Make speaker population code use get_or_create instead of exception | from django.core.management.base import NoArgsCommand
import pprint
from popit import PopIt
from speeches.models import Speaker
class Command(NoArgsCommand):
help = 'Populates the database with people from Popit'
def handle_noargs(self, **options):
pp = pprint.PrettyPrinter(indent=4)
# Do populating
api = PopIt(instance = 'ukcabinet', hostname = 'ukcabinet.popit.mysociety.org', api_version = 'v1')
results = api.person.get()
for person in results['results']:
speaker, created = Speaker.objects.get_or_create(popit_id=person['_id'])
# we ignore created for now, just always set the name
speaker.name = person['name']
speaker.save();
| <commit_before>from django.core.management.base import NoArgsCommand
import pprint
from popit import PopIt
from speeches.models import Speaker
class Command(NoArgsCommand):
help = 'Populates the database with people from Popit'
def handle_noargs(self, **options):
pp = pprint.PrettyPrinter(indent=4)
# Do populating
api = PopIt(instance = 'ukcabinet', hostname = 'ukcabinet.popit.mysociety.org', api_version = 'v1')
results = api.person.get()
self.stdout.write('Names will be:\n')
for person in results['results']:
self.stdout.write('Processing person: ' + pp.pformat(person) + '\n')
try:
speaker = Speaker.objects.get(popit_id=person['_id'])
except Speaker.DoesNotExist:
speaker = Speaker()
speaker.popit_id = person['_id']
speaker.name = person['name']
speaker.save();
<commit_msg>Make speaker population code use get_or_create instead of exception<commit_after> | from django.core.management.base import NoArgsCommand
import pprint
from popit import PopIt
from speeches.models import Speaker
class Command(NoArgsCommand):
help = 'Populates the database with people from Popit'
def handle_noargs(self, **options):
pp = pprint.PrettyPrinter(indent=4)
# Do populating
api = PopIt(instance = 'ukcabinet', hostname = 'ukcabinet.popit.mysociety.org', api_version = 'v1')
results = api.person.get()
for person in results['results']:
speaker, created = Speaker.objects.get_or_create(popit_id=person['_id'])
# we ignore created for now, just always set the name
speaker.name = person['name']
speaker.save();
| from django.core.management.base import NoArgsCommand
import pprint
from popit import PopIt
from speeches.models import Speaker
class Command(NoArgsCommand):
help = 'Populates the database with people from Popit'
def handle_noargs(self, **options):
pp = pprint.PrettyPrinter(indent=4)
# Do populating
api = PopIt(instance = 'ukcabinet', hostname = 'ukcabinet.popit.mysociety.org', api_version = 'v1')
results = api.person.get()
self.stdout.write('Names will be:\n')
for person in results['results']:
self.stdout.write('Processing person: ' + pp.pformat(person) + '\n')
try:
speaker = Speaker.objects.get(popit_id=person['_id'])
except Speaker.DoesNotExist:
speaker = Speaker()
speaker.popit_id = person['_id']
speaker.name = person['name']
speaker.save();
Make speaker population code use get_or_create instead of exceptionfrom django.core.management.base import NoArgsCommand
import pprint
from popit import PopIt
from speeches.models import Speaker
class Command(NoArgsCommand):
help = 'Populates the database with people from Popit'
def handle_noargs(self, **options):
pp = pprint.PrettyPrinter(indent=4)
# Do populating
api = PopIt(instance = 'ukcabinet', hostname = 'ukcabinet.popit.mysociety.org', api_version = 'v1')
results = api.person.get()
for person in results['results']:
speaker, created = Speaker.objects.get_or_create(popit_id=person['_id'])
# we ignore created for now, just always set the name
speaker.name = person['name']
speaker.save();
| <commit_before>from django.core.management.base import NoArgsCommand
import pprint
from popit import PopIt
from speeches.models import Speaker
class Command(NoArgsCommand):
help = 'Populates the database with people from Popit'
def handle_noargs(self, **options):
pp = pprint.PrettyPrinter(indent=4)
# Do populating
api = PopIt(instance = 'ukcabinet', hostname = 'ukcabinet.popit.mysociety.org', api_version = 'v1')
results = api.person.get()
self.stdout.write('Names will be:\n')
for person in results['results']:
self.stdout.write('Processing person: ' + pp.pformat(person) + '\n')
try:
speaker = Speaker.objects.get(popit_id=person['_id'])
except Speaker.DoesNotExist:
speaker = Speaker()
speaker.popit_id = person['_id']
speaker.name = person['name']
speaker.save();
<commit_msg>Make speaker population code use get_or_create instead of exception<commit_after>from django.core.management.base import NoArgsCommand
import pprint
from popit import PopIt
from speeches.models import Speaker
class Command(NoArgsCommand):
help = 'Populates the database with people from Popit'
def handle_noargs(self, **options):
pp = pprint.PrettyPrinter(indent=4)
# Do populating
api = PopIt(instance = 'ukcabinet', hostname = 'ukcabinet.popit.mysociety.org', api_version = 'v1')
results = api.person.get()
for person in results['results']:
speaker, created = Speaker.objects.get_or_create(popit_id=person['_id'])
# we ignore created for now, just always set the name
speaker.name = person['name']
speaker.save();
|
93e7703e6b7c9115e441c1a285e508a7d0738639 | mica/starcheck/__init__.py | mica/starcheck/__init__.py | # Licensed under a 3-clause BSD style license - see LICENSE.rst
from .starcheck import get_starcheck_catalog, get_starcheck_catalog_at_date, get_mp_dir, get_monitor_windows
| # Licensed under a 3-clause BSD style license - see LICENSE.rst
from .starcheck import (get_starcheck_catalog, get_starcheck_catalog_at_date,
get_mp_dir, get_monitor_windows, get_dither, get_att, get_starcat)
| Add new get_ starcheck methods to exported/init methods | Add new get_ starcheck methods to exported/init methods
| Python | bsd-3-clause | sot/mica,sot/mica | # Licensed under a 3-clause BSD style license - see LICENSE.rst
from .starcheck import get_starcheck_catalog, get_starcheck_catalog_at_date, get_mp_dir, get_monitor_windows
Add new get_ starcheck methods to exported/init methods | # Licensed under a 3-clause BSD style license - see LICENSE.rst
from .starcheck import (get_starcheck_catalog, get_starcheck_catalog_at_date,
get_mp_dir, get_monitor_windows, get_dither, get_att, get_starcat)
| <commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst
from .starcheck import get_starcheck_catalog, get_starcheck_catalog_at_date, get_mp_dir, get_monitor_windows
<commit_msg>Add new get_ starcheck methods to exported/init methods<commit_after> | # Licensed under a 3-clause BSD style license - see LICENSE.rst
from .starcheck import (get_starcheck_catalog, get_starcheck_catalog_at_date,
get_mp_dir, get_monitor_windows, get_dither, get_att, get_starcat)
| # Licensed under a 3-clause BSD style license - see LICENSE.rst
from .starcheck import get_starcheck_catalog, get_starcheck_catalog_at_date, get_mp_dir, get_monitor_windows
Add new get_ starcheck methods to exported/init methods# Licensed under a 3-clause BSD style license - see LICENSE.rst
from .starcheck import (get_starcheck_catalog, get_starcheck_catalog_at_date,
get_mp_dir, get_monitor_windows, get_dither, get_att, get_starcat)
| <commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst
from .starcheck import get_starcheck_catalog, get_starcheck_catalog_at_date, get_mp_dir, get_monitor_windows
<commit_msg>Add new get_ starcheck methods to exported/init methods<commit_after># Licensed under a 3-clause BSD style license - see LICENSE.rst
from .starcheck import (get_starcheck_catalog, get_starcheck_catalog_at_date,
get_mp_dir, get_monitor_windows, get_dither, get_att, get_starcat)
|
ab16cd72a2f2ed093f206b48379fb9f03f8d2f36 | tests/example.py | tests/example.py | import unittest
from src.dojo import Dojo
"""
Call "python -m tests.example" while being in parent directory to run tests in this file.
"""
class ExampleTest(unittest.TestCase):
def test_example(self):
dojo = Dojo()
self.assertEqual(dojo.get_random_number(), 4)
if __name__ == '__main__':
unittest.main()
| import unittest
from src.dojo import Dojo
"""
Call "python -m tests.example" while being in parent directory to run tests in this file.
"""
class ExampleTest(unittest.TestCase):
def setUp(self):
""" This method will be called *before* each test run. """
pass
def tearDown(self):
""" This method will be called *after* each test run. """
pass
def test_example(self):
dojo = Dojo()
self.assertEqual(dojo.get_random_number(), 4)
if __name__ == '__main__':
unittest.main()
| Add setUp and tearDown to test file | Add setUp and tearDown to test file
| Python | mit | pawel-lewtak/coding-dojo-template-python | import unittest
from src.dojo import Dojo
"""
Call "python -m tests.example" while being in parent directory to run tests in this file.
"""
class ExampleTest(unittest.TestCase):
def test_example(self):
dojo = Dojo()
self.assertEqual(dojo.get_random_number(), 4)
if __name__ == '__main__':
unittest.main()
Add setUp and tearDown to test file | import unittest
from src.dojo import Dojo
"""
Call "python -m tests.example" while being in parent directory to run tests in this file.
"""
class ExampleTest(unittest.TestCase):
def setUp(self):
""" This method will be called *before* each test run. """
pass
def tearDown(self):
""" This method will be called *after* each test run. """
pass
def test_example(self):
dojo = Dojo()
self.assertEqual(dojo.get_random_number(), 4)
if __name__ == '__main__':
unittest.main()
| <commit_before>import unittest
from src.dojo import Dojo
"""
Call "python -m tests.example" while being in parent directory to run tests in this file.
"""
class ExampleTest(unittest.TestCase):
def test_example(self):
dojo = Dojo()
self.assertEqual(dojo.get_random_number(), 4)
if __name__ == '__main__':
unittest.main()
<commit_msg>Add setUp and tearDown to test file<commit_after> | import unittest
from src.dojo import Dojo
"""
Call "python -m tests.example" while being in parent directory to run tests in this file.
"""
class ExampleTest(unittest.TestCase):
def setUp(self):
""" This method will be called *before* each test run. """
pass
def tearDown(self):
""" This method will be called *after* each test run. """
pass
def test_example(self):
dojo = Dojo()
self.assertEqual(dojo.get_random_number(), 4)
if __name__ == '__main__':
unittest.main()
| import unittest
from src.dojo import Dojo
"""
Call "python -m tests.example" while being in parent directory to run tests in this file.
"""
class ExampleTest(unittest.TestCase):
def test_example(self):
dojo = Dojo()
self.assertEqual(dojo.get_random_number(), 4)
if __name__ == '__main__':
unittest.main()
Add setUp and tearDown to test fileimport unittest
from src.dojo import Dojo
"""
Call "python -m tests.example" while being in parent directory to run tests in this file.
"""
class ExampleTest(unittest.TestCase):
def setUp(self):
""" This method will be called *before* each test run. """
pass
def tearDown(self):
""" This method will be called *after* each test run. """
pass
def test_example(self):
dojo = Dojo()
self.assertEqual(dojo.get_random_number(), 4)
if __name__ == '__main__':
unittest.main()
| <commit_before>import unittest
from src.dojo import Dojo
"""
Call "python -m tests.example" while being in parent directory to run tests in this file.
"""
class ExampleTest(unittest.TestCase):
def test_example(self):
dojo = Dojo()
self.assertEqual(dojo.get_random_number(), 4)
if __name__ == '__main__':
unittest.main()
<commit_msg>Add setUp and tearDown to test file<commit_after>import unittest
from src.dojo import Dojo
"""
Call "python -m tests.example" while being in parent directory to run tests in this file.
"""
class ExampleTest(unittest.TestCase):
def setUp(self):
""" This method will be called *before* each test run. """
pass
def tearDown(self):
""" This method will be called *after* each test run. """
pass
def test_example(self):
dojo = Dojo()
self.assertEqual(dojo.get_random_number(), 4)
if __name__ == '__main__':
unittest.main()
|
0ff0a3137ea938b7db8167d132b08b9e8620e864 | contrib/internal/run-pyflakes.py | contrib/internal/run-pyflakes.py | #!/usr/bin/env python
#
# Utility script to run pyflakes with the modules we care about and
# exclude errors we know to be fine.
import os
import subprocess
import sys
module_exclusions = [
'djblets',
'django_evolution',
'dist',
'ez_setup.py',
'settings_local.py',
'ReviewBoard.egg-info',
]
def scan_for_modules():
return [entry
for entry in os.listdir(os.getcwd())
if ((os.path.isdir(entry) or entry.endswith(".py")) and
entry not in module_exclusions)]
def main():
cur_dir = os.path.dirname(__file__)
os.chdir(os.path.join(cur_dir, "..", ".."))
modules = sys.argv[1:]
if not modules:
# The user didn't specify anything specific. Scan for modules.
modules = scan_for_modules()
p = subprocess.Popen(['pyflakes'] + modules,
stderr=subprocess.PIPE,
stdout=subprocess.PIPE,
close_fds=True)
contents = p.stdout.readlines()
# Read in the exclusions file
exclusions = {}
fp = open(os.path.join(cur_dir, "pyflakes.exclude"), "r")
for line in fp.readlines():
exclusions[line.rstrip()] = 1
fp.close()
# Now filter thin
for line in contents:
line = line.rstrip()
if line not in exclusions:
print line
if __name__ == "__main__":
main()
| #!/usr/bin/env python
#
# Utility script to run pyflakes with the modules we care about and
# exclude errors we know to be fine.
import os
import subprocess
import sys
module_exclusions = [
'djblets',
'django_evolution',
'dist',
'ez_setup.py',
'htdocs',
'settings_local.py',
'ReviewBoard.egg-info',
]
def scan_for_modules():
return [entry
for entry in os.listdir(os.getcwd())
if ((os.path.isdir(entry) or entry.endswith(".py")) and
entry not in module_exclusions)]
def main():
cur_dir = os.path.dirname(__file__)
os.chdir(os.path.join(cur_dir, "..", ".."))
modules = sys.argv[1:]
if not modules:
# The user didn't specify anything specific. Scan for modules.
modules = scan_for_modules()
p = subprocess.Popen(['pyflakes'] + modules,
stderr=subprocess.PIPE,
stdout=subprocess.PIPE,
close_fds=True)
contents = p.stdout.readlines()
# Read in the exclusions file
exclusions = {}
fp = open(os.path.join(cur_dir, "pyflakes.exclude"), "r")
for line in fp.readlines():
exclusions[line.rstrip()] = 1
fp.close()
# Now filter thin
for line in contents:
line = line.rstrip()
if line not in exclusions:
print line
if __name__ == "__main__":
main()
| Exclude htdocs, because that just takes way too long to scan. | Exclude htdocs, because that just takes way too long to scan.
| Python | mit | atagar/ReviewBoard,atagar/ReviewBoard,sgallagher/reviewboard,chazy/reviewboard,chipx86/reviewboard,Khan/reviewboard,custode/reviewboard,atagar/ReviewBoard,Khan/reviewboard,chazy/reviewboard,atagar/ReviewBoard,bkochendorfer/reviewboard,davidt/reviewboard,asutherland/opc-reviewboard,Khan/reviewboard,Khan/reviewboard,beol/reviewboard,beol/reviewboard,Khan/reviewboard,sgallagher/reviewboard,Khan/reviewboard,brennie/reviewboard,chipx86/reviewboard,brennie/reviewboard,atagar/ReviewBoard,KnowNo/reviewboard,atagar/ReviewBoard,1tush/reviewboard,1tush/reviewboard,chazy/reviewboard,atagar/ReviewBoard,asutherland/opc-reviewboard,chazy/reviewboard,chipx86/reviewboard,asutherland/opc-reviewboard,Khan/reviewboard,chazy/reviewboard,beol/reviewboard,chipx86/reviewboard,reviewboard/reviewboard,custode/reviewboard,bkochendorfer/reviewboard,KnowNo/reviewboard,Khan/reviewboard,1tush/reviewboard,chazy/reviewboard,atagar/ReviewBoard,asutherland/opc-reviewboard,brennie/reviewboard,sgallagher/reviewboard,custode/reviewboard,sgallagher/reviewboard,KnowNo/reviewboard,atagar/ReviewBoard,beol/reviewboard,brennie/reviewboard,Khan/reviewboard,KnowNo/reviewboard,1tush/reviewboard,bkochendorfer/reviewboard,1tush/reviewboard,davidt/reviewboard,1tush/reviewboard,reviewboard/reviewboard,custode/reviewboard,reviewboard/reviewboard,reviewboard/reviewboard,bkochendorfer/reviewboard,chazy/reviewboard,chazy/reviewboard,davidt/reviewboard,1tush/reviewboard,1tush/reviewboard,1tush/reviewboard,chazy/reviewboard,davidt/reviewboard | #!/usr/bin/env python
#
# Utility script to run pyflakes with the modules we care about and
# exclude errors we know to be fine.
import os
import subprocess
import sys
module_exclusions = [
'djblets',
'django_evolution',
'dist',
'ez_setup.py',
'settings_local.py',
'ReviewBoard.egg-info',
]
def scan_for_modules():
return [entry
for entry in os.listdir(os.getcwd())
if ((os.path.isdir(entry) or entry.endswith(".py")) and
entry not in module_exclusions)]
def main():
cur_dir = os.path.dirname(__file__)
os.chdir(os.path.join(cur_dir, "..", ".."))
modules = sys.argv[1:]
if not modules:
# The user didn't specify anything specific. Scan for modules.
modules = scan_for_modules()
p = subprocess.Popen(['pyflakes'] + modules,
stderr=subprocess.PIPE,
stdout=subprocess.PIPE,
close_fds=True)
contents = p.stdout.readlines()
# Read in the exclusions file
exclusions = {}
fp = open(os.path.join(cur_dir, "pyflakes.exclude"), "r")
for line in fp.readlines():
exclusions[line.rstrip()] = 1
fp.close()
# Now filter thin
for line in contents:
line = line.rstrip()
if line not in exclusions:
print line
if __name__ == "__main__":
main()
Exclude htdocs, because that just takes way too long to scan. | #!/usr/bin/env python
#
# Utility script to run pyflakes with the modules we care about and
# exclude errors we know to be fine.
import os
import subprocess
import sys
module_exclusions = [
'djblets',
'django_evolution',
'dist',
'ez_setup.py',
'htdocs',
'settings_local.py',
'ReviewBoard.egg-info',
]
def scan_for_modules():
return [entry
for entry in os.listdir(os.getcwd())
if ((os.path.isdir(entry) or entry.endswith(".py")) and
entry not in module_exclusions)]
def main():
cur_dir = os.path.dirname(__file__)
os.chdir(os.path.join(cur_dir, "..", ".."))
modules = sys.argv[1:]
if not modules:
# The user didn't specify anything specific. Scan for modules.
modules = scan_for_modules()
p = subprocess.Popen(['pyflakes'] + modules,
stderr=subprocess.PIPE,
stdout=subprocess.PIPE,
close_fds=True)
contents = p.stdout.readlines()
# Read in the exclusions file
exclusions = {}
fp = open(os.path.join(cur_dir, "pyflakes.exclude"), "r")
for line in fp.readlines():
exclusions[line.rstrip()] = 1
fp.close()
# Now filter thin
for line in contents:
line = line.rstrip()
if line not in exclusions:
print line
if __name__ == "__main__":
main()
| <commit_before>#!/usr/bin/env python
#
# Utility script to run pyflakes with the modules we care about and
# exclude errors we know to be fine.
import os
import subprocess
import sys
module_exclusions = [
'djblets',
'django_evolution',
'dist',
'ez_setup.py',
'settings_local.py',
'ReviewBoard.egg-info',
]
def scan_for_modules():
return [entry
for entry in os.listdir(os.getcwd())
if ((os.path.isdir(entry) or entry.endswith(".py")) and
entry not in module_exclusions)]
def main():
cur_dir = os.path.dirname(__file__)
os.chdir(os.path.join(cur_dir, "..", ".."))
modules = sys.argv[1:]
if not modules:
# The user didn't specify anything specific. Scan for modules.
modules = scan_for_modules()
p = subprocess.Popen(['pyflakes'] + modules,
stderr=subprocess.PIPE,
stdout=subprocess.PIPE,
close_fds=True)
contents = p.stdout.readlines()
# Read in the exclusions file
exclusions = {}
fp = open(os.path.join(cur_dir, "pyflakes.exclude"), "r")
for line in fp.readlines():
exclusions[line.rstrip()] = 1
fp.close()
# Now filter thin
for line in contents:
line = line.rstrip()
if line not in exclusions:
print line
if __name__ == "__main__":
main()
<commit_msg>Exclude htdocs, because that just takes way too long to scan.<commit_after> | #!/usr/bin/env python
#
# Utility script to run pyflakes with the modules we care about and
# exclude errors we know to be fine.
import os
import subprocess
import sys
module_exclusions = [
'djblets',
'django_evolution',
'dist',
'ez_setup.py',
'htdocs',
'settings_local.py',
'ReviewBoard.egg-info',
]
def scan_for_modules():
return [entry
for entry in os.listdir(os.getcwd())
if ((os.path.isdir(entry) or entry.endswith(".py")) and
entry not in module_exclusions)]
def main():
cur_dir = os.path.dirname(__file__)
os.chdir(os.path.join(cur_dir, "..", ".."))
modules = sys.argv[1:]
if not modules:
# The user didn't specify anything specific. Scan for modules.
modules = scan_for_modules()
p = subprocess.Popen(['pyflakes'] + modules,
stderr=subprocess.PIPE,
stdout=subprocess.PIPE,
close_fds=True)
contents = p.stdout.readlines()
# Read in the exclusions file
exclusions = {}
fp = open(os.path.join(cur_dir, "pyflakes.exclude"), "r")
for line in fp.readlines():
exclusions[line.rstrip()] = 1
fp.close()
# Now filter thin
for line in contents:
line = line.rstrip()
if line not in exclusions:
print line
if __name__ == "__main__":
main()
| #!/usr/bin/env python
#
# Utility script to run pyflakes with the modules we care about and
# exclude errors we know to be fine.
import os
import subprocess
import sys
module_exclusions = [
'djblets',
'django_evolution',
'dist',
'ez_setup.py',
'settings_local.py',
'ReviewBoard.egg-info',
]
def scan_for_modules():
return [entry
for entry in os.listdir(os.getcwd())
if ((os.path.isdir(entry) or entry.endswith(".py")) and
entry not in module_exclusions)]
def main():
cur_dir = os.path.dirname(__file__)
os.chdir(os.path.join(cur_dir, "..", ".."))
modules = sys.argv[1:]
if not modules:
# The user didn't specify anything specific. Scan for modules.
modules = scan_for_modules()
p = subprocess.Popen(['pyflakes'] + modules,
stderr=subprocess.PIPE,
stdout=subprocess.PIPE,
close_fds=True)
contents = p.stdout.readlines()
# Read in the exclusions file
exclusions = {}
fp = open(os.path.join(cur_dir, "pyflakes.exclude"), "r")
for line in fp.readlines():
exclusions[line.rstrip()] = 1
fp.close()
# Now filter thin
for line in contents:
line = line.rstrip()
if line not in exclusions:
print line
if __name__ == "__main__":
main()
Exclude htdocs, because that just takes way too long to scan.#!/usr/bin/env python
#
# Utility script to run pyflakes with the modules we care about and
# exclude errors we know to be fine.
import os
import subprocess
import sys
module_exclusions = [
'djblets',
'django_evolution',
'dist',
'ez_setup.py',
'htdocs',
'settings_local.py',
'ReviewBoard.egg-info',
]
def scan_for_modules():
return [entry
for entry in os.listdir(os.getcwd())
if ((os.path.isdir(entry) or entry.endswith(".py")) and
entry not in module_exclusions)]
def main():
cur_dir = os.path.dirname(__file__)
os.chdir(os.path.join(cur_dir, "..", ".."))
modules = sys.argv[1:]
if not modules:
# The user didn't specify anything specific. Scan for modules.
modules = scan_for_modules()
p = subprocess.Popen(['pyflakes'] + modules,
stderr=subprocess.PIPE,
stdout=subprocess.PIPE,
close_fds=True)
contents = p.stdout.readlines()
# Read in the exclusions file
exclusions = {}
fp = open(os.path.join(cur_dir, "pyflakes.exclude"), "r")
for line in fp.readlines():
exclusions[line.rstrip()] = 1
fp.close()
# Now filter thin
for line in contents:
line = line.rstrip()
if line not in exclusions:
print line
if __name__ == "__main__":
main()
| <commit_before>#!/usr/bin/env python
#
# Utility script to run pyflakes with the modules we care about and
# exclude errors we know to be fine.
import os
import subprocess
import sys
module_exclusions = [
'djblets',
'django_evolution',
'dist',
'ez_setup.py',
'settings_local.py',
'ReviewBoard.egg-info',
]
def scan_for_modules():
return [entry
for entry in os.listdir(os.getcwd())
if ((os.path.isdir(entry) or entry.endswith(".py")) and
entry not in module_exclusions)]
def main():
cur_dir = os.path.dirname(__file__)
os.chdir(os.path.join(cur_dir, "..", ".."))
modules = sys.argv[1:]
if not modules:
# The user didn't specify anything specific. Scan for modules.
modules = scan_for_modules()
p = subprocess.Popen(['pyflakes'] + modules,
stderr=subprocess.PIPE,
stdout=subprocess.PIPE,
close_fds=True)
contents = p.stdout.readlines()
# Read in the exclusions file
exclusions = {}
fp = open(os.path.join(cur_dir, "pyflakes.exclude"), "r")
for line in fp.readlines():
exclusions[line.rstrip()] = 1
fp.close()
# Now filter thin
for line in contents:
line = line.rstrip()
if line not in exclusions:
print line
if __name__ == "__main__":
main()
<commit_msg>Exclude htdocs, because that just takes way too long to scan.<commit_after>#!/usr/bin/env python
#
# Utility script to run pyflakes with the modules we care about and
# exclude errors we know to be fine.
import os
import subprocess
import sys
module_exclusions = [
'djblets',
'django_evolution',
'dist',
'ez_setup.py',
'htdocs',
'settings_local.py',
'ReviewBoard.egg-info',
]
def scan_for_modules():
return [entry
for entry in os.listdir(os.getcwd())
if ((os.path.isdir(entry) or entry.endswith(".py")) and
entry not in module_exclusions)]
def main():
cur_dir = os.path.dirname(__file__)
os.chdir(os.path.join(cur_dir, "..", ".."))
modules = sys.argv[1:]
if not modules:
# The user didn't specify anything specific. Scan for modules.
modules = scan_for_modules()
p = subprocess.Popen(['pyflakes'] + modules,
stderr=subprocess.PIPE,
stdout=subprocess.PIPE,
close_fds=True)
contents = p.stdout.readlines()
# Read in the exclusions file
exclusions = {}
fp = open(os.path.join(cur_dir, "pyflakes.exclude"), "r")
for line in fp.readlines():
exclusions[line.rstrip()] = 1
fp.close()
# Now filter thin
for line in contents:
line = line.rstrip()
if line not in exclusions:
print line
if __name__ == "__main__":
main()
|
16d928d67b76843d8eeea4e145e43e3d073b1410 | qiskit/__init__.py | qiskit/__init__.py | from ._classicalregister import ClassicalRegister
from ._quantumregister import QuantumRegister
from ._quantumcircuit import QuantumCircuit
from ._gate import Gate
from ._compositegate import CompositeGate
from ._instruction import Instruction
from ._instructionset import InstructionSet
from ._qiskiterror import QISKitError
import qiskit.extensions.standard
from ._jobprocessor import JobProcessor, QuantumJob
from ._quantumprogram import QuantumProgram
from ._quantumprogram import Result
__version__ = '0.3.5'
| from ._classicalregister import ClassicalRegister
from ._quantumregister import QuantumRegister
from ._quantumcircuit import QuantumCircuit
from ._gate import Gate
from ._compositegate import CompositeGate
from ._instruction import Instruction
from ._instructionset import InstructionSet
from ._qiskiterror import QISKitError
import qiskit.extensions.standard
from ._jobprocessor import JobProcessor, QuantumJob
from ._quantumprogram import QuantumProgram
from ._quantumprogram import Result
__version__ = '0.4.0'
| Change version number to next major stable | Change version number to next major stable
Master branch is used as a development branch, so the version number should point to the next major stable version. | Python | apache-2.0 | ChristopheVuillot/qiskit-sdk-py,ChristopheVuillot/qiskit-sdk-py,QISKit/qiskit-sdk-py,ChristopheVuillot/qiskit-sdk-py,atilag/qiskit-sdk-py,atilag/qiskit-sdk-py,atilag/qiskit-sdk-py,QISKit/qiskit-sdk-py,QISKit/qiskit-sdk-py | from ._classicalregister import ClassicalRegister
from ._quantumregister import QuantumRegister
from ._quantumcircuit import QuantumCircuit
from ._gate import Gate
from ._compositegate import CompositeGate
from ._instruction import Instruction
from ._instructionset import InstructionSet
from ._qiskiterror import QISKitError
import qiskit.extensions.standard
from ._jobprocessor import JobProcessor, QuantumJob
from ._quantumprogram import QuantumProgram
from ._quantumprogram import Result
__version__ = '0.3.5'
Change version number to next major stable
Master branch is used as a development branch, so the version number should point to the next major stable version. | from ._classicalregister import ClassicalRegister
from ._quantumregister import QuantumRegister
from ._quantumcircuit import QuantumCircuit
from ._gate import Gate
from ._compositegate import CompositeGate
from ._instruction import Instruction
from ._instructionset import InstructionSet
from ._qiskiterror import QISKitError
import qiskit.extensions.standard
from ._jobprocessor import JobProcessor, QuantumJob
from ._quantumprogram import QuantumProgram
from ._quantumprogram import Result
__version__ = '0.4.0'
| <commit_before>from ._classicalregister import ClassicalRegister
from ._quantumregister import QuantumRegister
from ._quantumcircuit import QuantumCircuit
from ._gate import Gate
from ._compositegate import CompositeGate
from ._instruction import Instruction
from ._instructionset import InstructionSet
from ._qiskiterror import QISKitError
import qiskit.extensions.standard
from ._jobprocessor import JobProcessor, QuantumJob
from ._quantumprogram import QuantumProgram
from ._quantumprogram import Result
__version__ = '0.3.5'
<commit_msg>Change version number to next major stable
Master branch is used as a development branch, so the version number should point to the next major stable version.<commit_after> | from ._classicalregister import ClassicalRegister
from ._quantumregister import QuantumRegister
from ._quantumcircuit import QuantumCircuit
from ._gate import Gate
from ._compositegate import CompositeGate
from ._instruction import Instruction
from ._instructionset import InstructionSet
from ._qiskiterror import QISKitError
import qiskit.extensions.standard
from ._jobprocessor import JobProcessor, QuantumJob
from ._quantumprogram import QuantumProgram
from ._quantumprogram import Result
__version__ = '0.4.0'
| from ._classicalregister import ClassicalRegister
from ._quantumregister import QuantumRegister
from ._quantumcircuit import QuantumCircuit
from ._gate import Gate
from ._compositegate import CompositeGate
from ._instruction import Instruction
from ._instructionset import InstructionSet
from ._qiskiterror import QISKitError
import qiskit.extensions.standard
from ._jobprocessor import JobProcessor, QuantumJob
from ._quantumprogram import QuantumProgram
from ._quantumprogram import Result
__version__ = '0.3.5'
Change version number to next major stable
Master branch is used as a development branch, so the version number should point to the next major stable version.from ._classicalregister import ClassicalRegister
from ._quantumregister import QuantumRegister
from ._quantumcircuit import QuantumCircuit
from ._gate import Gate
from ._compositegate import CompositeGate
from ._instruction import Instruction
from ._instructionset import InstructionSet
from ._qiskiterror import QISKitError
import qiskit.extensions.standard
from ._jobprocessor import JobProcessor, QuantumJob
from ._quantumprogram import QuantumProgram
from ._quantumprogram import Result
__version__ = '0.4.0'
| <commit_before>from ._classicalregister import ClassicalRegister
from ._quantumregister import QuantumRegister
from ._quantumcircuit import QuantumCircuit
from ._gate import Gate
from ._compositegate import CompositeGate
from ._instruction import Instruction
from ._instructionset import InstructionSet
from ._qiskiterror import QISKitError
import qiskit.extensions.standard
from ._jobprocessor import JobProcessor, QuantumJob
from ._quantumprogram import QuantumProgram
from ._quantumprogram import Result
__version__ = '0.3.5'
<commit_msg>Change version number to next major stable
Master branch is used as a development branch, so the version number should point to the next major stable version.<commit_after>from ._classicalregister import ClassicalRegister
from ._quantumregister import QuantumRegister
from ._quantumcircuit import QuantumCircuit
from ._gate import Gate
from ._compositegate import CompositeGate
from ._instruction import Instruction
from ._instructionset import InstructionSet
from ._qiskiterror import QISKitError
import qiskit.extensions.standard
from ._jobprocessor import JobProcessor, QuantumJob
from ._quantumprogram import QuantumProgram
from ._quantumprogram import Result
__version__ = '0.4.0'
|
84257aaf865a913a0ed1888cdf6ce6f22d63f4e5 | tests/functional/firefox/os/test_fxos_navigation.py | tests/functional/firefox/os/test_fxos_navigation.py | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import pytest
from pages.firefox.fxos_navigation import FirefoxPage
@pytest.mark.nondestructive
@pytest.mark.parametrize('slug', [
pytest.mark.smoke(('os')),
pytest.mark.smoke(('os/devices')),
pytest.mark.smoke(('os/devices/tv'))])
def test_fxos_navigation_active_nav(slug, base_url, selenium):
page = FirefoxPage(base_url, selenium, slug=slug).open()
assert page.fxos_navigation.active_primary_nav_id == slug.replace('/', '-')
@pytest.mark.nondestructive
@pytest.mark.parametrize('slug', [
pytest.mark.smoke(('os')),
pytest.mark.smoke(('os/devices')),
pytest.mark.smoke(('os/devices/tv'))])
def test_fxos_navigation_adjunct_menu(slug, base_url, selenium):
page = FirefoxPage(base_url, selenium, slug=slug).open()
page.fxos_navigation.open_adjunct_menu()
assert page.fxos_navigation.is_adjunct_menu_displayed
| # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import pytest
from pages.firefox.family_navigation import FirefoxPage
@pytest.mark.nondestructive
@pytest.mark.parametrize('slug', [
pytest.mark.smoke(('os')),
pytest.mark.smoke(('os/devices')),
pytest.mark.smoke(('os/devices/tv'))])
def test_fxos_navigation_active_nav(slug, base_url, selenium):
page = FirefoxPage(base_url, selenium, slug=slug).open()
assert page.fxos_navigation.active_primary_nav_id == slug.replace('/', '-')
@pytest.mark.nondestructive
@pytest.mark.parametrize('slug', [
pytest.mark.smoke(('os')),
pytest.mark.smoke(('os/devices')),
pytest.mark.smoke(('os/devices/tv'))])
def test_fxos_navigation_adjunct_menu(slug, base_url, selenium):
page = FirefoxPage(base_url, selenium, slug=slug).open()
page.fxos_navigation.open_adjunct_menu()
assert page.fxos_navigation.is_adjunct_menu_displayed
| Fix incorrect import for FxOS nav tests. | Fix incorrect import for FxOS nav tests.
| Python | mpl-2.0 | mozilla/bedrock,Sancus/bedrock,TheJJ100100/bedrock,flodolo/bedrock,MichaelKohler/bedrock,Sancus/bedrock,ericawright/bedrock,sgarrity/bedrock,CSCI-462-01-2017/bedrock,jgmize/bedrock,TheoChevalier/bedrock,sylvestre/bedrock,analytics-pros/mozilla-bedrock,gerv/bedrock,mkmelin/bedrock,analytics-pros/mozilla-bedrock,kyoshino/bedrock,craigcook/bedrock,Sancus/bedrock,CSCI-462-01-2017/bedrock,MichaelKohler/bedrock,flodolo/bedrock,kyoshino/bedrock,ericawright/bedrock,craigcook/bedrock,mozilla/bedrock,sylvestre/bedrock,TheJJ100100/bedrock,TheJJ100100/bedrock,TheoChevalier/bedrock,sgarrity/bedrock,flodolo/bedrock,craigcook/bedrock,sgarrity/bedrock,schalkneethling/bedrock,gerv/bedrock,glogiotatidis/bedrock,pascalchevrel/bedrock,mozilla/bedrock,hoosteeno/bedrock,alexgibson/bedrock,glogiotatidis/bedrock,MichaelKohler/bedrock,mkmelin/bedrock,Sancus/bedrock,ericawright/bedrock,flodolo/bedrock,craigcook/bedrock,hoosteeno/bedrock,kyoshino/bedrock,TheJJ100100/bedrock,alexgibson/bedrock,gerv/bedrock,analytics-pros/mozilla-bedrock,schalkneethling/bedrock,kyoshino/bedrock,CSCI-462-01-2017/bedrock,mkmelin/bedrock,jgmize/bedrock,analytics-pros/mozilla-bedrock,MichaelKohler/bedrock,ericawright/bedrock,sylvestre/bedrock,pascalchevrel/bedrock,schalkneethling/bedrock,TheoChevalier/bedrock,CSCI-462-01-2017/bedrock,glogiotatidis/bedrock,mozilla/bedrock,pascalchevrel/bedrock,schalkneethling/bedrock,sgarrity/bedrock,alexgibson/bedrock,hoosteeno/bedrock,sylvestre/bedrock,alexgibson/bedrock,pascalchevrel/bedrock,glogiotatidis/bedrock,TheoChevalier/bedrock,gerv/bedrock,jgmize/bedrock,mkmelin/bedrock,hoosteeno/bedrock,jgmize/bedrock | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import pytest
from pages.firefox.fxos_navigation import FirefoxPage
@pytest.mark.nondestructive
@pytest.mark.parametrize('slug', [
pytest.mark.smoke(('os')),
pytest.mark.smoke(('os/devices')),
pytest.mark.smoke(('os/devices/tv'))])
def test_fxos_navigation_active_nav(slug, base_url, selenium):
page = FirefoxPage(base_url, selenium, slug=slug).open()
assert page.fxos_navigation.active_primary_nav_id == slug.replace('/', '-')
@pytest.mark.nondestructive
@pytest.mark.parametrize('slug', [
pytest.mark.smoke(('os')),
pytest.mark.smoke(('os/devices')),
pytest.mark.smoke(('os/devices/tv'))])
def test_fxos_navigation_adjunct_menu(slug, base_url, selenium):
page = FirefoxPage(base_url, selenium, slug=slug).open()
page.fxos_navigation.open_adjunct_menu()
assert page.fxos_navigation.is_adjunct_menu_displayed
Fix incorrect import for FxOS nav tests. | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import pytest
from pages.firefox.family_navigation import FirefoxPage
@pytest.mark.nondestructive
@pytest.mark.parametrize('slug', [
pytest.mark.smoke(('os')),
pytest.mark.smoke(('os/devices')),
pytest.mark.smoke(('os/devices/tv'))])
def test_fxos_navigation_active_nav(slug, base_url, selenium):
page = FirefoxPage(base_url, selenium, slug=slug).open()
assert page.fxos_navigation.active_primary_nav_id == slug.replace('/', '-')
@pytest.mark.nondestructive
@pytest.mark.parametrize('slug', [
pytest.mark.smoke(('os')),
pytest.mark.smoke(('os/devices')),
pytest.mark.smoke(('os/devices/tv'))])
def test_fxos_navigation_adjunct_menu(slug, base_url, selenium):
page = FirefoxPage(base_url, selenium, slug=slug).open()
page.fxos_navigation.open_adjunct_menu()
assert page.fxos_navigation.is_adjunct_menu_displayed
| <commit_before># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import pytest
from pages.firefox.fxos_navigation import FirefoxPage
@pytest.mark.nondestructive
@pytest.mark.parametrize('slug', [
pytest.mark.smoke(('os')),
pytest.mark.smoke(('os/devices')),
pytest.mark.smoke(('os/devices/tv'))])
def test_fxos_navigation_active_nav(slug, base_url, selenium):
page = FirefoxPage(base_url, selenium, slug=slug).open()
assert page.fxos_navigation.active_primary_nav_id == slug.replace('/', '-')
@pytest.mark.nondestructive
@pytest.mark.parametrize('slug', [
pytest.mark.smoke(('os')),
pytest.mark.smoke(('os/devices')),
pytest.mark.smoke(('os/devices/tv'))])
def test_fxos_navigation_adjunct_menu(slug, base_url, selenium):
page = FirefoxPage(base_url, selenium, slug=slug).open()
page.fxos_navigation.open_adjunct_menu()
assert page.fxos_navigation.is_adjunct_menu_displayed
<commit_msg>Fix incorrect import for FxOS nav tests.<commit_after> | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import pytest
from pages.firefox.family_navigation import FirefoxPage
@pytest.mark.nondestructive
@pytest.mark.parametrize('slug', [
pytest.mark.smoke(('os')),
pytest.mark.smoke(('os/devices')),
pytest.mark.smoke(('os/devices/tv'))])
def test_fxos_navigation_active_nav(slug, base_url, selenium):
page = FirefoxPage(base_url, selenium, slug=slug).open()
assert page.fxos_navigation.active_primary_nav_id == slug.replace('/', '-')
@pytest.mark.nondestructive
@pytest.mark.parametrize('slug', [
pytest.mark.smoke(('os')),
pytest.mark.smoke(('os/devices')),
pytest.mark.smoke(('os/devices/tv'))])
def test_fxos_navigation_adjunct_menu(slug, base_url, selenium):
page = FirefoxPage(base_url, selenium, slug=slug).open()
page.fxos_navigation.open_adjunct_menu()
assert page.fxos_navigation.is_adjunct_menu_displayed
| # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import pytest
from pages.firefox.fxos_navigation import FirefoxPage
@pytest.mark.nondestructive
@pytest.mark.parametrize('slug', [
pytest.mark.smoke(('os')),
pytest.mark.smoke(('os/devices')),
pytest.mark.smoke(('os/devices/tv'))])
def test_fxos_navigation_active_nav(slug, base_url, selenium):
page = FirefoxPage(base_url, selenium, slug=slug).open()
assert page.fxos_navigation.active_primary_nav_id == slug.replace('/', '-')
@pytest.mark.nondestructive
@pytest.mark.parametrize('slug', [
pytest.mark.smoke(('os')),
pytest.mark.smoke(('os/devices')),
pytest.mark.smoke(('os/devices/tv'))])
def test_fxos_navigation_adjunct_menu(slug, base_url, selenium):
page = FirefoxPage(base_url, selenium, slug=slug).open()
page.fxos_navigation.open_adjunct_menu()
assert page.fxos_navigation.is_adjunct_menu_displayed
Fix incorrect import for FxOS nav tests.# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import pytest
from pages.firefox.family_navigation import FirefoxPage
@pytest.mark.nondestructive
@pytest.mark.parametrize('slug', [
pytest.mark.smoke(('os')),
pytest.mark.smoke(('os/devices')),
pytest.mark.smoke(('os/devices/tv'))])
def test_fxos_navigation_active_nav(slug, base_url, selenium):
page = FirefoxPage(base_url, selenium, slug=slug).open()
assert page.fxos_navigation.active_primary_nav_id == slug.replace('/', '-')
@pytest.mark.nondestructive
@pytest.mark.parametrize('slug', [
pytest.mark.smoke(('os')),
pytest.mark.smoke(('os/devices')),
pytest.mark.smoke(('os/devices/tv'))])
def test_fxos_navigation_adjunct_menu(slug, base_url, selenium):
page = FirefoxPage(base_url, selenium, slug=slug).open()
page.fxos_navigation.open_adjunct_menu()
assert page.fxos_navigation.is_adjunct_menu_displayed
| <commit_before># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import pytest
from pages.firefox.fxos_navigation import FirefoxPage
@pytest.mark.nondestructive
@pytest.mark.parametrize('slug', [
pytest.mark.smoke(('os')),
pytest.mark.smoke(('os/devices')),
pytest.mark.smoke(('os/devices/tv'))])
def test_fxos_navigation_active_nav(slug, base_url, selenium):
page = FirefoxPage(base_url, selenium, slug=slug).open()
assert page.fxos_navigation.active_primary_nav_id == slug.replace('/', '-')
@pytest.mark.nondestructive
@pytest.mark.parametrize('slug', [
pytest.mark.smoke(('os')),
pytest.mark.smoke(('os/devices')),
pytest.mark.smoke(('os/devices/tv'))])
def test_fxos_navigation_adjunct_menu(slug, base_url, selenium):
page = FirefoxPage(base_url, selenium, slug=slug).open()
page.fxos_navigation.open_adjunct_menu()
assert page.fxos_navigation.is_adjunct_menu_displayed
<commit_msg>Fix incorrect import for FxOS nav tests.<commit_after># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import pytest
from pages.firefox.family_navigation import FirefoxPage
@pytest.mark.nondestructive
@pytest.mark.parametrize('slug', [
pytest.mark.smoke(('os')),
pytest.mark.smoke(('os/devices')),
pytest.mark.smoke(('os/devices/tv'))])
def test_fxos_navigation_active_nav(slug, base_url, selenium):
page = FirefoxPage(base_url, selenium, slug=slug).open()
assert page.fxos_navigation.active_primary_nav_id == slug.replace('/', '-')
@pytest.mark.nondestructive
@pytest.mark.parametrize('slug', [
pytest.mark.smoke(('os')),
pytest.mark.smoke(('os/devices')),
pytest.mark.smoke(('os/devices/tv'))])
def test_fxos_navigation_adjunct_menu(slug, base_url, selenium):
page = FirefoxPage(base_url, selenium, slug=slug).open()
page.fxos_navigation.open_adjunct_menu()
assert page.fxos_navigation.is_adjunct_menu_displayed
|
63146c651817ebad688bbcd154fefbf109a39ba0 | server.py | server.py | from igc.util import cache, util
util.setupLog()
from flask import Flask
from flask import send_from_directory
from flask_cors import CORS
from igc.controller.controller_register import register_controllers
app = Flask(__name__)
CORS(app)
app.config['SQLALCHEMY_DATABASE_URI'] = "sqlite:///./sqllite.db"
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
register_controllers(app)
cache.initalizeCache()
for x in range(5):
print "Starting Cache Thread: " + str(x)
thread = cache.CacheThread()
thread.start()
thread = cache.CacheSchedulerThread()
thread.start()
@app.route("/")
def index():
return app.send_static_file('index.html')
@app.route("/<path:path>")
def send_static(path):
return send_from_directory('static', path)
app.run(debug=False, port=5000) | from igc.util import cache, util
util.setupLog()
from flask import Flask
from flask import send_from_directory
from flask_cors import CORS
from igc.controller.controller_register import register_controllers
app = Flask(__name__)
CORS(app)
app.config['SQLALCHEMY_DATABASE_URI'] = "sqlite:///./sqllite.db"
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
register_controllers(app)
cache.initalizeCache()
for x in range(3):
print "Starting Cache Thread: " + str(x)
thread = cache.CacheThread()
thread.start()
thread = cache.CacheSchedulerThread()
thread.start()
@app.route("/")
def index():
return app.send_static_file('index.html')
@app.route("/<path:path>")
def send_static(path):
return send_from_directory('static', path)
app.run(debug=False, port=5000) | Scale back threading, beacause Rasp Pi doesn't have enough threads | Scale back threading, beacause Rasp Pi doesn't have enough threads
| Python | agpl-3.0 | pac-club-2017/instant-grade-checker,pac-club-2017/instant-grade-checker,pac-club-2017/instant-grade-checker,pac-club-2017/instant-grade-checker | from igc.util import cache, util
util.setupLog()
from flask import Flask
from flask import send_from_directory
from flask_cors import CORS
from igc.controller.controller_register import register_controllers
app = Flask(__name__)
CORS(app)
app.config['SQLALCHEMY_DATABASE_URI'] = "sqlite:///./sqllite.db"
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
register_controllers(app)
cache.initalizeCache()
for x in range(5):
print "Starting Cache Thread: " + str(x)
thread = cache.CacheThread()
thread.start()
thread = cache.CacheSchedulerThread()
thread.start()
@app.route("/")
def index():
return app.send_static_file('index.html')
@app.route("/<path:path>")
def send_static(path):
return send_from_directory('static', path)
app.run(debug=False, port=5000)Scale back threading, beacause Rasp Pi doesn't have enough threads | from igc.util import cache, util
util.setupLog()
from flask import Flask
from flask import send_from_directory
from flask_cors import CORS
from igc.controller.controller_register import register_controllers
app = Flask(__name__)
CORS(app)
app.config['SQLALCHEMY_DATABASE_URI'] = "sqlite:///./sqllite.db"
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
register_controllers(app)
cache.initalizeCache()
for x in range(3):
print "Starting Cache Thread: " + str(x)
thread = cache.CacheThread()
thread.start()
thread = cache.CacheSchedulerThread()
thread.start()
@app.route("/")
def index():
return app.send_static_file('index.html')
@app.route("/<path:path>")
def send_static(path):
return send_from_directory('static', path)
app.run(debug=False, port=5000) | <commit_before>from igc.util import cache, util
util.setupLog()
from flask import Flask
from flask import send_from_directory
from flask_cors import CORS
from igc.controller.controller_register import register_controllers
app = Flask(__name__)
CORS(app)
app.config['SQLALCHEMY_DATABASE_URI'] = "sqlite:///./sqllite.db"
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
register_controllers(app)
cache.initalizeCache()
for x in range(5):
print "Starting Cache Thread: " + str(x)
thread = cache.CacheThread()
thread.start()
thread = cache.CacheSchedulerThread()
thread.start()
@app.route("/")
def index():
return app.send_static_file('index.html')
@app.route("/<path:path>")
def send_static(path):
return send_from_directory('static', path)
app.run(debug=False, port=5000)<commit_msg>Scale back threading, beacause Rasp Pi doesn't have enough threads<commit_after> | from igc.util import cache, util
util.setupLog()
from flask import Flask
from flask import send_from_directory
from flask_cors import CORS
from igc.controller.controller_register import register_controllers
app = Flask(__name__)
CORS(app)
app.config['SQLALCHEMY_DATABASE_URI'] = "sqlite:///./sqllite.db"
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
register_controllers(app)
cache.initalizeCache()
for x in range(3):
print "Starting Cache Thread: " + str(x)
thread = cache.CacheThread()
thread.start()
thread = cache.CacheSchedulerThread()
thread.start()
@app.route("/")
def index():
return app.send_static_file('index.html')
@app.route("/<path:path>")
def send_static(path):
return send_from_directory('static', path)
app.run(debug=False, port=5000) | from igc.util import cache, util
util.setupLog()
from flask import Flask
from flask import send_from_directory
from flask_cors import CORS
from igc.controller.controller_register import register_controllers
app = Flask(__name__)
CORS(app)
app.config['SQLALCHEMY_DATABASE_URI'] = "sqlite:///./sqllite.db"
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
register_controllers(app)
cache.initalizeCache()
for x in range(5):
print "Starting Cache Thread: " + str(x)
thread = cache.CacheThread()
thread.start()
thread = cache.CacheSchedulerThread()
thread.start()
@app.route("/")
def index():
return app.send_static_file('index.html')
@app.route("/<path:path>")
def send_static(path):
return send_from_directory('static', path)
app.run(debug=False, port=5000)Scale back threading, beacause Rasp Pi doesn't have enough threadsfrom igc.util import cache, util
util.setupLog()
from flask import Flask
from flask import send_from_directory
from flask_cors import CORS
from igc.controller.controller_register import register_controllers
app = Flask(__name__)
CORS(app)
app.config['SQLALCHEMY_DATABASE_URI'] = "sqlite:///./sqllite.db"
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
register_controllers(app)
cache.initalizeCache()
for x in range(3):
print "Starting Cache Thread: " + str(x)
thread = cache.CacheThread()
thread.start()
thread = cache.CacheSchedulerThread()
thread.start()
@app.route("/")
def index():
return app.send_static_file('index.html')
@app.route("/<path:path>")
def send_static(path):
return send_from_directory('static', path)
app.run(debug=False, port=5000) | <commit_before>from igc.util import cache, util
util.setupLog()
from flask import Flask
from flask import send_from_directory
from flask_cors import CORS
from igc.controller.controller_register import register_controllers
app = Flask(__name__)
CORS(app)
app.config['SQLALCHEMY_DATABASE_URI'] = "sqlite:///./sqllite.db"
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
register_controllers(app)
cache.initalizeCache()
for x in range(5):
print "Starting Cache Thread: " + str(x)
thread = cache.CacheThread()
thread.start()
thread = cache.CacheSchedulerThread()
thread.start()
@app.route("/")
def index():
return app.send_static_file('index.html')
@app.route("/<path:path>")
def send_static(path):
return send_from_directory('static', path)
app.run(debug=False, port=5000)<commit_msg>Scale back threading, beacause Rasp Pi doesn't have enough threads<commit_after>from igc.util import cache, util
util.setupLog()
from flask import Flask
from flask import send_from_directory
from flask_cors import CORS
from igc.controller.controller_register import register_controllers
app = Flask(__name__)
CORS(app)
app.config['SQLALCHEMY_DATABASE_URI'] = "sqlite:///./sqllite.db"
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
register_controllers(app)
cache.initalizeCache()
for x in range(3):
print "Starting Cache Thread: " + str(x)
thread = cache.CacheThread()
thread.start()
thread = cache.CacheSchedulerThread()
thread.start()
@app.route("/")
def index():
return app.send_static_file('index.html')
@app.route("/<path:path>")
def send_static(path):
return send_from_directory('static', path)
app.run(debug=False, port=5000) |
f996038681aed05645164642c8fed7d46735ca4b | deferrable/backend/sqs.py | deferrable/backend/sqs.py | from .base import BackendFactory, Backend
from ..queue.sqs import SQSQueue
class SQSBackendFactory(BackendFactory):
def __init__(self, sqs_connection_or_thunk, visibility_timeout=30, wait_time=10, create_if_missing=False):
if callable(sqs_connection_or_thunk):
self.sqs_connection_thunk = sqs_connection_or_thunk
else:
self.sqs_connection_thunk = lambda: sqs_connection_or_thunk
self.visibility_timeout = visibility_timeout
self.wait_time = wait_time
self.create_if_missing = create_if_missing
def _create_backend_for_group(self, group):
queue = SQSQueue(self.sqs_connection_thunk,
self._queue_name(group),
self.visibility_timeout,
self.wait_time,
create_if_missing=self.create_if_missing)
error_queue = SQSQueue(self.sqs_connection_thunk,
self._queue_name('{}_error'.format(group)),
self.visibility_timeout,
self.wait_time,
create_if_missing=self.create_if_missing)
return SQSBackend(group, queue, error_queue)
class SQSBackend(Backend):
pass
| from .base import BackendFactory, Backend
from ..queue.sqs import SQSQueue
class SQSBackendFactory(BackendFactory):
def __init__(self, sqs_connection_thunk, visibility_timeout=30, wait_time=10, create_if_missing=False):
"""To allow backends to be initialized lazily, this factory requires a thunk
(parameter-less closure) which returns an initialized SQS connection. This thunk
is called as late as possible to initialize the connection and perform operations
against the SQS API. We do this so that backends can be made available at import time
without requiring a connection to be created at import time as well."""
self.sqs_connection_thunk = sqs_connection_thunk
self.visibility_timeout = visibility_timeout
self.wait_time = wait_time
self.create_if_missing = create_if_missing
def _create_backend_for_group(self, group):
queue = SQSQueue(self.sqs_connection_thunk,
self._queue_name(group),
self.visibility_timeout,
self.wait_time,
create_if_missing=self.create_if_missing)
error_queue = SQSQueue(self.sqs_connection_thunk,
self._queue_name('{}_error'.format(group)),
self.visibility_timeout,
self.wait_time,
create_if_missing=self.create_if_missing)
return SQSBackend(group, queue, error_queue)
class SQSBackend(Backend):
pass
| Remove option for non-thunk SQS initialization | Remove option for non-thunk SQS initialization
| Python | mit | gamechanger/deferrable | from .base import BackendFactory, Backend
from ..queue.sqs import SQSQueue
class SQSBackendFactory(BackendFactory):
def __init__(self, sqs_connection_or_thunk, visibility_timeout=30, wait_time=10, create_if_missing=False):
if callable(sqs_connection_or_thunk):
self.sqs_connection_thunk = sqs_connection_or_thunk
else:
self.sqs_connection_thunk = lambda: sqs_connection_or_thunk
self.visibility_timeout = visibility_timeout
self.wait_time = wait_time
self.create_if_missing = create_if_missing
def _create_backend_for_group(self, group):
queue = SQSQueue(self.sqs_connection_thunk,
self._queue_name(group),
self.visibility_timeout,
self.wait_time,
create_if_missing=self.create_if_missing)
error_queue = SQSQueue(self.sqs_connection_thunk,
self._queue_name('{}_error'.format(group)),
self.visibility_timeout,
self.wait_time,
create_if_missing=self.create_if_missing)
return SQSBackend(group, queue, error_queue)
class SQSBackend(Backend):
pass
Remove option for non-thunk SQS initialization | from .base import BackendFactory, Backend
from ..queue.sqs import SQSQueue
class SQSBackendFactory(BackendFactory):
def __init__(self, sqs_connection_thunk, visibility_timeout=30, wait_time=10, create_if_missing=False):
"""To allow backends to be initialized lazily, this factory requires a thunk
(parameter-less closure) which returns an initialized SQS connection. This thunk
is called as late as possible to initialize the connection and perform operations
against the SQS API. We do this so that backends can be made available at import time
without requiring a connection to be created at import time as well."""
self.sqs_connection_thunk = sqs_connection_thunk
self.visibility_timeout = visibility_timeout
self.wait_time = wait_time
self.create_if_missing = create_if_missing
def _create_backend_for_group(self, group):
queue = SQSQueue(self.sqs_connection_thunk,
self._queue_name(group),
self.visibility_timeout,
self.wait_time,
create_if_missing=self.create_if_missing)
error_queue = SQSQueue(self.sqs_connection_thunk,
self._queue_name('{}_error'.format(group)),
self.visibility_timeout,
self.wait_time,
create_if_missing=self.create_if_missing)
return SQSBackend(group, queue, error_queue)
class SQSBackend(Backend):
pass
| <commit_before>from .base import BackendFactory, Backend
from ..queue.sqs import SQSQueue
class SQSBackendFactory(BackendFactory):
def __init__(self, sqs_connection_or_thunk, visibility_timeout=30, wait_time=10, create_if_missing=False):
if callable(sqs_connection_or_thunk):
self.sqs_connection_thunk = sqs_connection_or_thunk
else:
self.sqs_connection_thunk = lambda: sqs_connection_or_thunk
self.visibility_timeout = visibility_timeout
self.wait_time = wait_time
self.create_if_missing = create_if_missing
def _create_backend_for_group(self, group):
queue = SQSQueue(self.sqs_connection_thunk,
self._queue_name(group),
self.visibility_timeout,
self.wait_time,
create_if_missing=self.create_if_missing)
error_queue = SQSQueue(self.sqs_connection_thunk,
self._queue_name('{}_error'.format(group)),
self.visibility_timeout,
self.wait_time,
create_if_missing=self.create_if_missing)
return SQSBackend(group, queue, error_queue)
class SQSBackend(Backend):
pass
<commit_msg>Remove option for non-thunk SQS initialization<commit_after> | from .base import BackendFactory, Backend
from ..queue.sqs import SQSQueue
class SQSBackendFactory(BackendFactory):
def __init__(self, sqs_connection_thunk, visibility_timeout=30, wait_time=10, create_if_missing=False):
"""To allow backends to be initialized lazily, this factory requires a thunk
(parameter-less closure) which returns an initialized SQS connection. This thunk
is called as late as possible to initialize the connection and perform operations
against the SQS API. We do this so that backends can be made available at import time
without requiring a connection to be created at import time as well."""
self.sqs_connection_thunk = sqs_connection_thunk
self.visibility_timeout = visibility_timeout
self.wait_time = wait_time
self.create_if_missing = create_if_missing
def _create_backend_for_group(self, group):
queue = SQSQueue(self.sqs_connection_thunk,
self._queue_name(group),
self.visibility_timeout,
self.wait_time,
create_if_missing=self.create_if_missing)
error_queue = SQSQueue(self.sqs_connection_thunk,
self._queue_name('{}_error'.format(group)),
self.visibility_timeout,
self.wait_time,
create_if_missing=self.create_if_missing)
return SQSBackend(group, queue, error_queue)
class SQSBackend(Backend):
pass
| from .base import BackendFactory, Backend
from ..queue.sqs import SQSQueue
class SQSBackendFactory(BackendFactory):
def __init__(self, sqs_connection_or_thunk, visibility_timeout=30, wait_time=10, create_if_missing=False):
if callable(sqs_connection_or_thunk):
self.sqs_connection_thunk = sqs_connection_or_thunk
else:
self.sqs_connection_thunk = lambda: sqs_connection_or_thunk
self.visibility_timeout = visibility_timeout
self.wait_time = wait_time
self.create_if_missing = create_if_missing
def _create_backend_for_group(self, group):
queue = SQSQueue(self.sqs_connection_thunk,
self._queue_name(group),
self.visibility_timeout,
self.wait_time,
create_if_missing=self.create_if_missing)
error_queue = SQSQueue(self.sqs_connection_thunk,
self._queue_name('{}_error'.format(group)),
self.visibility_timeout,
self.wait_time,
create_if_missing=self.create_if_missing)
return SQSBackend(group, queue, error_queue)
class SQSBackend(Backend):
pass
Remove option for non-thunk SQS initializationfrom .base import BackendFactory, Backend
from ..queue.sqs import SQSQueue
class SQSBackendFactory(BackendFactory):
def __init__(self, sqs_connection_thunk, visibility_timeout=30, wait_time=10, create_if_missing=False):
"""To allow backends to be initialized lazily, this factory requires a thunk
(parameter-less closure) which returns an initialized SQS connection. This thunk
is called as late as possible to initialize the connection and perform operations
against the SQS API. We do this so that backends can be made available at import time
without requiring a connection to be created at import time as well."""
self.sqs_connection_thunk = sqs_connection_thunk
self.visibility_timeout = visibility_timeout
self.wait_time = wait_time
self.create_if_missing = create_if_missing
def _create_backend_for_group(self, group):
queue = SQSQueue(self.sqs_connection_thunk,
self._queue_name(group),
self.visibility_timeout,
self.wait_time,
create_if_missing=self.create_if_missing)
error_queue = SQSQueue(self.sqs_connection_thunk,
self._queue_name('{}_error'.format(group)),
self.visibility_timeout,
self.wait_time,
create_if_missing=self.create_if_missing)
return SQSBackend(group, queue, error_queue)
class SQSBackend(Backend):
pass
| <commit_before>from .base import BackendFactory, Backend
from ..queue.sqs import SQSQueue
class SQSBackendFactory(BackendFactory):
def __init__(self, sqs_connection_or_thunk, visibility_timeout=30, wait_time=10, create_if_missing=False):
if callable(sqs_connection_or_thunk):
self.sqs_connection_thunk = sqs_connection_or_thunk
else:
self.sqs_connection_thunk = lambda: sqs_connection_or_thunk
self.visibility_timeout = visibility_timeout
self.wait_time = wait_time
self.create_if_missing = create_if_missing
def _create_backend_for_group(self, group):
queue = SQSQueue(self.sqs_connection_thunk,
self._queue_name(group),
self.visibility_timeout,
self.wait_time,
create_if_missing=self.create_if_missing)
error_queue = SQSQueue(self.sqs_connection_thunk,
self._queue_name('{}_error'.format(group)),
self.visibility_timeout,
self.wait_time,
create_if_missing=self.create_if_missing)
return SQSBackend(group, queue, error_queue)
class SQSBackend(Backend):
pass
<commit_msg>Remove option for non-thunk SQS initialization<commit_after>from .base import BackendFactory, Backend
from ..queue.sqs import SQSQueue
class SQSBackendFactory(BackendFactory):
def __init__(self, sqs_connection_thunk, visibility_timeout=30, wait_time=10, create_if_missing=False):
"""To allow backends to be initialized lazily, this factory requires a thunk
(parameter-less closure) which returns an initialized SQS connection. This thunk
is called as late as possible to initialize the connection and perform operations
against the SQS API. We do this so that backends can be made available at import time
without requiring a connection to be created at import time as well."""
self.sqs_connection_thunk = sqs_connection_thunk
self.visibility_timeout = visibility_timeout
self.wait_time = wait_time
self.create_if_missing = create_if_missing
def _create_backend_for_group(self, group):
queue = SQSQueue(self.sqs_connection_thunk,
self._queue_name(group),
self.visibility_timeout,
self.wait_time,
create_if_missing=self.create_if_missing)
error_queue = SQSQueue(self.sqs_connection_thunk,
self._queue_name('{}_error'.format(group)),
self.visibility_timeout,
self.wait_time,
create_if_missing=self.create_if_missing)
return SQSBackend(group, queue, error_queue)
class SQSBackend(Backend):
pass
|
48f6329a74bde4b045aff31e6b2def11c151d294 | couchdb/tests/testutil.py | couchdb/tests/testutil.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2007-2009 Christopher Lenz
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
import uuid
from couchdb import client
class TempDatabaseMixin(object):
temp_dbs = None
_db = None
def setUp(self):
self.server = client.Server(full_commit=False)
def tearDown(self):
if self.temp_dbs:
for name in self.temp_dbs:
self.server.delete(name)
def temp_db(self):
if self.temp_dbs is None:
self.temp_dbs = {}
name = 'couchdb-python/' + uuid.uuid4().hex
db = self.server.create(name)
self.temp_dbs[name] = db
return name, db
def del_db(self, name):
del self.temp_dbs[name]
self.server.delete(name)
@property
def db(self):
if self._db is None:
name, self._db = self.temp_db()
return self._db
| # -*- coding: utf-8 -*-
#
# Copyright (C) 2007-2009 Christopher Lenz
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
import random
import sys
from couchdb import client
class TempDatabaseMixin(object):
temp_dbs = None
_db = None
def setUp(self):
self.server = client.Server(full_commit=False)
def tearDown(self):
if self.temp_dbs:
for name in self.temp_dbs:
self.server.delete(name)
def temp_db(self):
if self.temp_dbs is None:
self.temp_dbs = {}
# Find an unused database name
while True:
name = 'couchdb-python/%d' % random.randint(0, sys.maxint)
if name not in self.temp_dbs:
break
print '%s already used' % name
db = self.server.create(name)
self.temp_dbs[name] = db
return name, db
def del_db(self, name):
del self.temp_dbs[name]
self.server.delete(name)
@property
def db(self):
if self._db is None:
name, self._db = self.temp_db()
return self._db
| Use a random number instead of uuid for temp database name. | Use a random number instead of uuid for temp database name.
| Python | bsd-3-clause | gcarranza/couchdb-python,jur9526/couchdb-python | # -*- coding: utf-8 -*-
#
# Copyright (C) 2007-2009 Christopher Lenz
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
import uuid
from couchdb import client
class TempDatabaseMixin(object):
temp_dbs = None
_db = None
def setUp(self):
self.server = client.Server(full_commit=False)
def tearDown(self):
if self.temp_dbs:
for name in self.temp_dbs:
self.server.delete(name)
def temp_db(self):
if self.temp_dbs is None:
self.temp_dbs = {}
name = 'couchdb-python/' + uuid.uuid4().hex
db = self.server.create(name)
self.temp_dbs[name] = db
return name, db
def del_db(self, name):
del self.temp_dbs[name]
self.server.delete(name)
@property
def db(self):
if self._db is None:
name, self._db = self.temp_db()
return self._db
Use a random number instead of uuid for temp database name. | # -*- coding: utf-8 -*-
#
# Copyright (C) 2007-2009 Christopher Lenz
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
import random
import sys
from couchdb import client
class TempDatabaseMixin(object):
temp_dbs = None
_db = None
def setUp(self):
self.server = client.Server(full_commit=False)
def tearDown(self):
if self.temp_dbs:
for name in self.temp_dbs:
self.server.delete(name)
def temp_db(self):
if self.temp_dbs is None:
self.temp_dbs = {}
# Find an unused database name
while True:
name = 'couchdb-python/%d' % random.randint(0, sys.maxint)
if name not in self.temp_dbs:
break
print '%s already used' % name
db = self.server.create(name)
self.temp_dbs[name] = db
return name, db
def del_db(self, name):
del self.temp_dbs[name]
self.server.delete(name)
@property
def db(self):
if self._db is None:
name, self._db = self.temp_db()
return self._db
| <commit_before># -*- coding: utf-8 -*-
#
# Copyright (C) 2007-2009 Christopher Lenz
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
import uuid
from couchdb import client
class TempDatabaseMixin(object):
temp_dbs = None
_db = None
def setUp(self):
self.server = client.Server(full_commit=False)
def tearDown(self):
if self.temp_dbs:
for name in self.temp_dbs:
self.server.delete(name)
def temp_db(self):
if self.temp_dbs is None:
self.temp_dbs = {}
name = 'couchdb-python/' + uuid.uuid4().hex
db = self.server.create(name)
self.temp_dbs[name] = db
return name, db
def del_db(self, name):
del self.temp_dbs[name]
self.server.delete(name)
@property
def db(self):
if self._db is None:
name, self._db = self.temp_db()
return self._db
<commit_msg>Use a random number instead of uuid for temp database name.<commit_after> | # -*- coding: utf-8 -*-
#
# Copyright (C) 2007-2009 Christopher Lenz
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
import random
import sys
from couchdb import client
class TempDatabaseMixin(object):
temp_dbs = None
_db = None
def setUp(self):
self.server = client.Server(full_commit=False)
def tearDown(self):
if self.temp_dbs:
for name in self.temp_dbs:
self.server.delete(name)
def temp_db(self):
if self.temp_dbs is None:
self.temp_dbs = {}
# Find an unused database name
while True:
name = 'couchdb-python/%d' % random.randint(0, sys.maxint)
if name not in self.temp_dbs:
break
print '%s already used' % name
db = self.server.create(name)
self.temp_dbs[name] = db
return name, db
def del_db(self, name):
del self.temp_dbs[name]
self.server.delete(name)
@property
def db(self):
if self._db is None:
name, self._db = self.temp_db()
return self._db
| # -*- coding: utf-8 -*-
#
# Copyright (C) 2007-2009 Christopher Lenz
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
import uuid
from couchdb import client
class TempDatabaseMixin(object):
temp_dbs = None
_db = None
def setUp(self):
self.server = client.Server(full_commit=False)
def tearDown(self):
if self.temp_dbs:
for name in self.temp_dbs:
self.server.delete(name)
def temp_db(self):
if self.temp_dbs is None:
self.temp_dbs = {}
name = 'couchdb-python/' + uuid.uuid4().hex
db = self.server.create(name)
self.temp_dbs[name] = db
return name, db
def del_db(self, name):
del self.temp_dbs[name]
self.server.delete(name)
@property
def db(self):
if self._db is None:
name, self._db = self.temp_db()
return self._db
Use a random number instead of uuid for temp database name.# -*- coding: utf-8 -*-
#
# Copyright (C) 2007-2009 Christopher Lenz
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
import random
import sys
from couchdb import client
class TempDatabaseMixin(object):
temp_dbs = None
_db = None
def setUp(self):
self.server = client.Server(full_commit=False)
def tearDown(self):
if self.temp_dbs:
for name in self.temp_dbs:
self.server.delete(name)
def temp_db(self):
if self.temp_dbs is None:
self.temp_dbs = {}
# Find an unused database name
while True:
name = 'couchdb-python/%d' % random.randint(0, sys.maxint)
if name not in self.temp_dbs:
break
print '%s already used' % name
db = self.server.create(name)
self.temp_dbs[name] = db
return name, db
def del_db(self, name):
del self.temp_dbs[name]
self.server.delete(name)
@property
def db(self):
if self._db is None:
name, self._db = self.temp_db()
return self._db
| <commit_before># -*- coding: utf-8 -*-
#
# Copyright (C) 2007-2009 Christopher Lenz
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
import uuid
from couchdb import client
class TempDatabaseMixin(object):
temp_dbs = None
_db = None
def setUp(self):
self.server = client.Server(full_commit=False)
def tearDown(self):
if self.temp_dbs:
for name in self.temp_dbs:
self.server.delete(name)
def temp_db(self):
if self.temp_dbs is None:
self.temp_dbs = {}
name = 'couchdb-python/' + uuid.uuid4().hex
db = self.server.create(name)
self.temp_dbs[name] = db
return name, db
def del_db(self, name):
del self.temp_dbs[name]
self.server.delete(name)
@property
def db(self):
if self._db is None:
name, self._db = self.temp_db()
return self._db
<commit_msg>Use a random number instead of uuid for temp database name.<commit_after># -*- coding: utf-8 -*-
#
# Copyright (C) 2007-2009 Christopher Lenz
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
import random
import sys
from couchdb import client
class TempDatabaseMixin(object):
temp_dbs = None
_db = None
def setUp(self):
self.server = client.Server(full_commit=False)
def tearDown(self):
if self.temp_dbs:
for name in self.temp_dbs:
self.server.delete(name)
def temp_db(self):
if self.temp_dbs is None:
self.temp_dbs = {}
# Find an unused database name
while True:
name = 'couchdb-python/%d' % random.randint(0, sys.maxint)
if name not in self.temp_dbs:
break
print '%s already used' % name
db = self.server.create(name)
self.temp_dbs[name] = db
return name, db
def del_db(self, name):
del self.temp_dbs[name]
self.server.delete(name)
@property
def db(self):
if self._db is None:
name, self._db = self.temp_db()
return self._db
|
f87585d51493157e89c0117a25bf7a6d4b2b135f | runtests.py | runtests.py | #!/usr/bin/env python
import os
import sys
from django.conf import settings
import django
DEFAULT_SETTINGS = {
'INSTALLED_APPS': (
'spillway',
'tests',
),
'DATABASES': {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.spatialite',
'NAME': ':memory:'
}
},
}
def runtests():
if not settings.configured:
settings.configure(**DEFAULT_SETTINGS)
# Compatibility with Django 1.7's stricter initialization
if hasattr(django, 'setup'):
django.setup()
parent = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, parent)
try:
from django.test.runner import DiscoverRunner
runner_class = DiscoverRunner
except ImportError:
from django.test.simple import DjangoTestSuiteRunner
runner_class = DjangoTestSuiteRunner
failures = runner_class(
verbosity=1, interactive=True, failfast=False).run_tests(['tests'])
sys.exit(failures)
if __name__ == '__main__':
runtests()
| #!/usr/bin/env python
import os
import sys
from django.conf import settings
import django
DEFAULT_SETTINGS = {
'INSTALLED_APPS': (
'django.contrib.gis',
'spillway',
'tests',
),
'DATABASES': {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.spatialite',
'NAME': ':memory:'
}
},
}
def runtests():
if not settings.configured:
settings.configure(**DEFAULT_SETTINGS)
# Compatibility with Django 1.7's stricter initialization
if hasattr(django, 'setup'):
django.setup()
parent = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, parent)
try:
from django.test.runner import DiscoverRunner
runner_class = DiscoverRunner
except ImportError:
from django.test.simple import DjangoTestSuiteRunner
runner_class = DjangoTestSuiteRunner
failures = runner_class(
verbosity=1, interactive=True, failfast=False).run_tests(['tests'])
sys.exit(failures)
if __name__ == '__main__':
runtests()
| Enable contrib.gis for test runs so templates are found | Enable contrib.gis for test runs so templates are found
| Python | bsd-3-clause | kuzmich/django-spillway,barseghyanartur/django-spillway,bkg/django-spillway | #!/usr/bin/env python
import os
import sys
from django.conf import settings
import django
DEFAULT_SETTINGS = {
'INSTALLED_APPS': (
'spillway',
'tests',
),
'DATABASES': {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.spatialite',
'NAME': ':memory:'
}
},
}
def runtests():
if not settings.configured:
settings.configure(**DEFAULT_SETTINGS)
# Compatibility with Django 1.7's stricter initialization
if hasattr(django, 'setup'):
django.setup()
parent = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, parent)
try:
from django.test.runner import DiscoverRunner
runner_class = DiscoverRunner
except ImportError:
from django.test.simple import DjangoTestSuiteRunner
runner_class = DjangoTestSuiteRunner
failures = runner_class(
verbosity=1, interactive=True, failfast=False).run_tests(['tests'])
sys.exit(failures)
if __name__ == '__main__':
runtests()
Enable contrib.gis for test runs so templates are found | #!/usr/bin/env python
import os
import sys
from django.conf import settings
import django
DEFAULT_SETTINGS = {
'INSTALLED_APPS': (
'django.contrib.gis',
'spillway',
'tests',
),
'DATABASES': {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.spatialite',
'NAME': ':memory:'
}
},
}
def runtests():
if not settings.configured:
settings.configure(**DEFAULT_SETTINGS)
# Compatibility with Django 1.7's stricter initialization
if hasattr(django, 'setup'):
django.setup()
parent = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, parent)
try:
from django.test.runner import DiscoverRunner
runner_class = DiscoverRunner
except ImportError:
from django.test.simple import DjangoTestSuiteRunner
runner_class = DjangoTestSuiteRunner
failures = runner_class(
verbosity=1, interactive=True, failfast=False).run_tests(['tests'])
sys.exit(failures)
if __name__ == '__main__':
runtests()
| <commit_before>#!/usr/bin/env python
import os
import sys
from django.conf import settings
import django
DEFAULT_SETTINGS = {
'INSTALLED_APPS': (
'spillway',
'tests',
),
'DATABASES': {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.spatialite',
'NAME': ':memory:'
}
},
}
def runtests():
if not settings.configured:
settings.configure(**DEFAULT_SETTINGS)
# Compatibility with Django 1.7's stricter initialization
if hasattr(django, 'setup'):
django.setup()
parent = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, parent)
try:
from django.test.runner import DiscoverRunner
runner_class = DiscoverRunner
except ImportError:
from django.test.simple import DjangoTestSuiteRunner
runner_class = DjangoTestSuiteRunner
failures = runner_class(
verbosity=1, interactive=True, failfast=False).run_tests(['tests'])
sys.exit(failures)
if __name__ == '__main__':
runtests()
<commit_msg>Enable contrib.gis for test runs so templates are found<commit_after> | #!/usr/bin/env python
import os
import sys
from django.conf import settings
import django
DEFAULT_SETTINGS = {
'INSTALLED_APPS': (
'django.contrib.gis',
'spillway',
'tests',
),
'DATABASES': {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.spatialite',
'NAME': ':memory:'
}
},
}
def runtests():
if not settings.configured:
settings.configure(**DEFAULT_SETTINGS)
# Compatibility with Django 1.7's stricter initialization
if hasattr(django, 'setup'):
django.setup()
parent = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, parent)
try:
from django.test.runner import DiscoverRunner
runner_class = DiscoverRunner
except ImportError:
from django.test.simple import DjangoTestSuiteRunner
runner_class = DjangoTestSuiteRunner
failures = runner_class(
verbosity=1, interactive=True, failfast=False).run_tests(['tests'])
sys.exit(failures)
if __name__ == '__main__':
runtests()
| #!/usr/bin/env python
import os
import sys
from django.conf import settings
import django
DEFAULT_SETTINGS = {
'INSTALLED_APPS': (
'spillway',
'tests',
),
'DATABASES': {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.spatialite',
'NAME': ':memory:'
}
},
}
def runtests():
if not settings.configured:
settings.configure(**DEFAULT_SETTINGS)
# Compatibility with Django 1.7's stricter initialization
if hasattr(django, 'setup'):
django.setup()
parent = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, parent)
try:
from django.test.runner import DiscoverRunner
runner_class = DiscoverRunner
except ImportError:
from django.test.simple import DjangoTestSuiteRunner
runner_class = DjangoTestSuiteRunner
failures = runner_class(
verbosity=1, interactive=True, failfast=False).run_tests(['tests'])
sys.exit(failures)
if __name__ == '__main__':
runtests()
Enable contrib.gis for test runs so templates are found#!/usr/bin/env python
import os
import sys
from django.conf import settings
import django
DEFAULT_SETTINGS = {
'INSTALLED_APPS': (
'django.contrib.gis',
'spillway',
'tests',
),
'DATABASES': {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.spatialite',
'NAME': ':memory:'
}
},
}
def runtests():
if not settings.configured:
settings.configure(**DEFAULT_SETTINGS)
# Compatibility with Django 1.7's stricter initialization
if hasattr(django, 'setup'):
django.setup()
parent = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, parent)
try:
from django.test.runner import DiscoverRunner
runner_class = DiscoverRunner
except ImportError:
from django.test.simple import DjangoTestSuiteRunner
runner_class = DjangoTestSuiteRunner
failures = runner_class(
verbosity=1, interactive=True, failfast=False).run_tests(['tests'])
sys.exit(failures)
if __name__ == '__main__':
runtests()
| <commit_before>#!/usr/bin/env python
import os
import sys
from django.conf import settings
import django
DEFAULT_SETTINGS = {
'INSTALLED_APPS': (
'spillway',
'tests',
),
'DATABASES': {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.spatialite',
'NAME': ':memory:'
}
},
}
def runtests():
if not settings.configured:
settings.configure(**DEFAULT_SETTINGS)
# Compatibility with Django 1.7's stricter initialization
if hasattr(django, 'setup'):
django.setup()
parent = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, parent)
try:
from django.test.runner import DiscoverRunner
runner_class = DiscoverRunner
except ImportError:
from django.test.simple import DjangoTestSuiteRunner
runner_class = DjangoTestSuiteRunner
failures = runner_class(
verbosity=1, interactive=True, failfast=False).run_tests(['tests'])
sys.exit(failures)
if __name__ == '__main__':
runtests()
<commit_msg>Enable contrib.gis for test runs so templates are found<commit_after>#!/usr/bin/env python
import os
import sys
from django.conf import settings
import django
DEFAULT_SETTINGS = {
'INSTALLED_APPS': (
'django.contrib.gis',
'spillway',
'tests',
),
'DATABASES': {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.spatialite',
'NAME': ':memory:'
}
},
}
def runtests():
if not settings.configured:
settings.configure(**DEFAULT_SETTINGS)
# Compatibility with Django 1.7's stricter initialization
if hasattr(django, 'setup'):
django.setup()
parent = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, parent)
try:
from django.test.runner import DiscoverRunner
runner_class = DiscoverRunner
except ImportError:
from django.test.simple import DjangoTestSuiteRunner
runner_class = DjangoTestSuiteRunner
failures = runner_class(
verbosity=1, interactive=True, failfast=False).run_tests(['tests'])
sys.exit(failures)
if __name__ == '__main__':
runtests()
|
e01140053a2a906084d0ba50801b17d4ae7ce850 | samples/unmanage_node.py | samples/unmanage_node.py | import requests
from orionsdk import SwisClient
from datetime import datetime, timedelta
def main():
hostname = 'localhost'
username = 'admin'
password = ''
swis = SwisClient(hostname, username, password)
results = swis.query('SELECT TOP 1 NodeID FROM Orion.Nodes')
interfaceId = results['results'][0]['NodeID']
netObjectId = 'N:{}'.format(interfaceId)
now = datetime.utcnow()
tomorrow = now + timedelta(days=1)
swis.invoke('Orion.Nodes', 'Unmanage', netObjectId, now, tomorrow, False)
requests.packages.urllib3.disable_warnings()
if __name__ == '__main__':
main()
| import requests
from orionsdk import SwisClient
from datetime import datetime, timedelta
def main():
hostname = 'localhost'
username = 'admin'
password = ''
swis = SwisClient(hostname, username, password)
results = swis.query('SELECT NodeID, Caption FROM Orion.Nodes WHERE IPAddress = @ip_addr', ip_addr='127.0.0.1')
if results['results']:
nodeId = results['results'][0]['NodeID']
caption = results['results'][0]['Caption']
netObjectId = 'N:{}'.format(nodeId)
now = datetime.utcnow()
tomorrow = now + timedelta(days=1)
swis.invoke('Orion.Nodes', 'Unmanage', netObjectId, now, tomorrow, False)
print('Done...{} will be unmanaged until {}'.format(caption, tomorrow))
else:
print("Device doesn't Exist")
requests.packages.urllib3.disable_warnings()
if __name__ == '__main__':
main()
| Correct node variable name and validate results | Correct node variable name and validate results
| Python | apache-2.0 | solarwinds/orionsdk-python | import requests
from orionsdk import SwisClient
from datetime import datetime, timedelta
def main():
hostname = 'localhost'
username = 'admin'
password = ''
swis = SwisClient(hostname, username, password)
results = swis.query('SELECT TOP 1 NodeID FROM Orion.Nodes')
interfaceId = results['results'][0]['NodeID']
netObjectId = 'N:{}'.format(interfaceId)
now = datetime.utcnow()
tomorrow = now + timedelta(days=1)
swis.invoke('Orion.Nodes', 'Unmanage', netObjectId, now, tomorrow, False)
requests.packages.urllib3.disable_warnings()
if __name__ == '__main__':
main()
Correct node variable name and validate results | import requests
from orionsdk import SwisClient
from datetime import datetime, timedelta
def main():
hostname = 'localhost'
username = 'admin'
password = ''
swis = SwisClient(hostname, username, password)
results = swis.query('SELECT NodeID, Caption FROM Orion.Nodes WHERE IPAddress = @ip_addr', ip_addr='127.0.0.1')
if results['results']:
nodeId = results['results'][0]['NodeID']
caption = results['results'][0]['Caption']
netObjectId = 'N:{}'.format(nodeId)
now = datetime.utcnow()
tomorrow = now + timedelta(days=1)
swis.invoke('Orion.Nodes', 'Unmanage', netObjectId, now, tomorrow, False)
print('Done...{} will be unmanaged until {}'.format(caption, tomorrow))
else:
print("Device doesn't Exist")
requests.packages.urllib3.disable_warnings()
if __name__ == '__main__':
main()
| <commit_before>import requests
from orionsdk import SwisClient
from datetime import datetime, timedelta
def main():
hostname = 'localhost'
username = 'admin'
password = ''
swis = SwisClient(hostname, username, password)
results = swis.query('SELECT TOP 1 NodeID FROM Orion.Nodes')
interfaceId = results['results'][0]['NodeID']
netObjectId = 'N:{}'.format(interfaceId)
now = datetime.utcnow()
tomorrow = now + timedelta(days=1)
swis.invoke('Orion.Nodes', 'Unmanage', netObjectId, now, tomorrow, False)
requests.packages.urllib3.disable_warnings()
if __name__ == '__main__':
main()
<commit_msg>Correct node variable name and validate results<commit_after> | import requests
from orionsdk import SwisClient
from datetime import datetime, timedelta
def main():
hostname = 'localhost'
username = 'admin'
password = ''
swis = SwisClient(hostname, username, password)
results = swis.query('SELECT NodeID, Caption FROM Orion.Nodes WHERE IPAddress = @ip_addr', ip_addr='127.0.0.1')
if results['results']:
nodeId = results['results'][0]['NodeID']
caption = results['results'][0]['Caption']
netObjectId = 'N:{}'.format(nodeId)
now = datetime.utcnow()
tomorrow = now + timedelta(days=1)
swis.invoke('Orion.Nodes', 'Unmanage', netObjectId, now, tomorrow, False)
print('Done...{} will be unmanaged until {}'.format(caption, tomorrow))
else:
print("Device doesn't Exist")
requests.packages.urllib3.disable_warnings()
if __name__ == '__main__':
main()
| import requests
from orionsdk import SwisClient
from datetime import datetime, timedelta
def main():
hostname = 'localhost'
username = 'admin'
password = ''
swis = SwisClient(hostname, username, password)
results = swis.query('SELECT TOP 1 NodeID FROM Orion.Nodes')
interfaceId = results['results'][0]['NodeID']
netObjectId = 'N:{}'.format(interfaceId)
now = datetime.utcnow()
tomorrow = now + timedelta(days=1)
swis.invoke('Orion.Nodes', 'Unmanage', netObjectId, now, tomorrow, False)
requests.packages.urllib3.disable_warnings()
if __name__ == '__main__':
main()
Correct node variable name and validate resultsimport requests
from orionsdk import SwisClient
from datetime import datetime, timedelta
def main():
hostname = 'localhost'
username = 'admin'
password = ''
swis = SwisClient(hostname, username, password)
results = swis.query('SELECT NodeID, Caption FROM Orion.Nodes WHERE IPAddress = @ip_addr', ip_addr='127.0.0.1')
if results['results']:
nodeId = results['results'][0]['NodeID']
caption = results['results'][0]['Caption']
netObjectId = 'N:{}'.format(nodeId)
now = datetime.utcnow()
tomorrow = now + timedelta(days=1)
swis.invoke('Orion.Nodes', 'Unmanage', netObjectId, now, tomorrow, False)
print('Done...{} will be unmanaged until {}'.format(caption, tomorrow))
else:
print("Device doesn't Exist")
requests.packages.urllib3.disable_warnings()
if __name__ == '__main__':
main()
| <commit_before>import requests
from orionsdk import SwisClient
from datetime import datetime, timedelta
def main():
hostname = 'localhost'
username = 'admin'
password = ''
swis = SwisClient(hostname, username, password)
results = swis.query('SELECT TOP 1 NodeID FROM Orion.Nodes')
interfaceId = results['results'][0]['NodeID']
netObjectId = 'N:{}'.format(interfaceId)
now = datetime.utcnow()
tomorrow = now + timedelta(days=1)
swis.invoke('Orion.Nodes', 'Unmanage', netObjectId, now, tomorrow, False)
requests.packages.urllib3.disable_warnings()
if __name__ == '__main__':
main()
<commit_msg>Correct node variable name and validate results<commit_after>import requests
from orionsdk import SwisClient
from datetime import datetime, timedelta
def main():
hostname = 'localhost'
username = 'admin'
password = ''
swis = SwisClient(hostname, username, password)
results = swis.query('SELECT NodeID, Caption FROM Orion.Nodes WHERE IPAddress = @ip_addr', ip_addr='127.0.0.1')
if results['results']:
nodeId = results['results'][0]['NodeID']
caption = results['results'][0]['Caption']
netObjectId = 'N:{}'.format(nodeId)
now = datetime.utcnow()
tomorrow = now + timedelta(days=1)
swis.invoke('Orion.Nodes', 'Unmanage', netObjectId, now, tomorrow, False)
print('Done...{} will be unmanaged until {}'.format(caption, tomorrow))
else:
print("Device doesn't Exist")
requests.packages.urllib3.disable_warnings()
if __name__ == '__main__':
main()
|
fd3c24537b9af6f7c79fc17b932c01372a569856 | brake/backends/dummybe.py | brake/backends/dummybe.py | import random
from cachebe import CacheBackend
class DummyBackend(CacheBackend):
"""
A dummy rate-limiting backend that disables rate-limiting,
for testing.
"""
def get_ip(self, request):
return str(random.randrange(10e20))
| import random
from cachebe import CacheBackend
class DummyBackend(CacheBackend):
"""
A dummy rate-limiting backend that disables rate-limiting,
for testing.
"""
def get_ip(self, request):
return str(random.randrange(10e20))
def limit(self, func_name, request,
ip=True, field=None, count=5, period=None):
"""Return limit data about any keys relevant for requst."""
return []
| Make the dummy limit function always return nothing. | Make the dummy limit function always return nothing.
| Python | bsd-3-clause | SilentCircle/django-brake,SilentCircle/django-brake,skorokithakis/django-brake,skorokithakis/django-brake | import random
from cachebe import CacheBackend
class DummyBackend(CacheBackend):
"""
A dummy rate-limiting backend that disables rate-limiting,
for testing.
"""
def get_ip(self, request):
return str(random.randrange(10e20))
Make the dummy limit function always return nothing. | import random
from cachebe import CacheBackend
class DummyBackend(CacheBackend):
"""
A dummy rate-limiting backend that disables rate-limiting,
for testing.
"""
def get_ip(self, request):
return str(random.randrange(10e20))
def limit(self, func_name, request,
ip=True, field=None, count=5, period=None):
"""Return limit data about any keys relevant for requst."""
return []
| <commit_before>import random
from cachebe import CacheBackend
class DummyBackend(CacheBackend):
"""
A dummy rate-limiting backend that disables rate-limiting,
for testing.
"""
def get_ip(self, request):
return str(random.randrange(10e20))
<commit_msg>Make the dummy limit function always return nothing.<commit_after> | import random
from cachebe import CacheBackend
class DummyBackend(CacheBackend):
"""
A dummy rate-limiting backend that disables rate-limiting,
for testing.
"""
def get_ip(self, request):
return str(random.randrange(10e20))
def limit(self, func_name, request,
ip=True, field=None, count=5, period=None):
"""Return limit data about any keys relevant for requst."""
return []
| import random
from cachebe import CacheBackend
class DummyBackend(CacheBackend):
"""
A dummy rate-limiting backend that disables rate-limiting,
for testing.
"""
def get_ip(self, request):
return str(random.randrange(10e20))
Make the dummy limit function always return nothing.import random
from cachebe import CacheBackend
class DummyBackend(CacheBackend):
"""
A dummy rate-limiting backend that disables rate-limiting,
for testing.
"""
def get_ip(self, request):
return str(random.randrange(10e20))
def limit(self, func_name, request,
ip=True, field=None, count=5, period=None):
"""Return limit data about any keys relevant for requst."""
return []
| <commit_before>import random
from cachebe import CacheBackend
class DummyBackend(CacheBackend):
"""
A dummy rate-limiting backend that disables rate-limiting,
for testing.
"""
def get_ip(self, request):
return str(random.randrange(10e20))
<commit_msg>Make the dummy limit function always return nothing.<commit_after>import random
from cachebe import CacheBackend
class DummyBackend(CacheBackend):
"""
A dummy rate-limiting backend that disables rate-limiting,
for testing.
"""
def get_ip(self, request):
return str(random.randrange(10e20))
def limit(self, func_name, request,
ip=True, field=None, count=5, period=None):
"""Return limit data about any keys relevant for requst."""
return []
|
ded82a3f9f438206a5f6ffb02739ad2fe71a08ce | rtrss/config_production.py | rtrss/config_production.py | import os
SQLALCHEMY_DATABASE_URI = os.environ.get('OPENSHIFT_POSTGRESQL_DB_URL')
# directory to store runtime data, write access required
DATA_DIR = os.environ.get('OPENSHIFT_DATA_DIR')
SECRET_KEY = os.environ.get('RTRSS_SECRET_KEY')
FILESTORAGE_URL = os.environ.get('RTRSS_FILESTORAGE_URL')
GCS_PRIVATEKEY_URL = os.environ.get('RTRSS_GCS_PRIVATEKEY_URL')
PORT = int(os.environ.get('OPENSHIFT_PYTHON_PORT'))
IP = os.environ.get('OPENSHIFT_PYTHON_IP')
| import os
SQLALCHEMY_DATABASE_URI = os.environ.get('OPENSHIFT_POSTGRESQL_DB_URL')
# directory to store runtime data, write access required
DATA_DIR = os.environ.get('OPENSHIFT_DATA_DIR')
SECRET_KEY = os.environ.get('RTRSS_SECRET_KEY')
FILESTORAGE_URL = os.environ.get('RTRSS_FILESTORAGE_URL')
GCS_PRIVATEKEY_URL = os.environ.get('RTRSS_GCS_PRIVATEKEY_URL')
PORT = int(os.environ.get('OPENSHIFT_PYTHON_PORT'))
IP = os.environ.get('OPENSHIFT_PYTHON_IP')
DEBUG = False
| Add DABUG config value to production config | Add DABUG config value to production config
| Python | apache-2.0 | notapresent/rtrss,notapresent/rtrss,notapresent/rtrss,notapresent/rtrss | import os
SQLALCHEMY_DATABASE_URI = os.environ.get('OPENSHIFT_POSTGRESQL_DB_URL')
# directory to store runtime data, write access required
DATA_DIR = os.environ.get('OPENSHIFT_DATA_DIR')
SECRET_KEY = os.environ.get('RTRSS_SECRET_KEY')
FILESTORAGE_URL = os.environ.get('RTRSS_FILESTORAGE_URL')
GCS_PRIVATEKEY_URL = os.environ.get('RTRSS_GCS_PRIVATEKEY_URL')
PORT = int(os.environ.get('OPENSHIFT_PYTHON_PORT'))
IP = os.environ.get('OPENSHIFT_PYTHON_IP')
Add DABUG config value to production config | import os
SQLALCHEMY_DATABASE_URI = os.environ.get('OPENSHIFT_POSTGRESQL_DB_URL')
# directory to store runtime data, write access required
DATA_DIR = os.environ.get('OPENSHIFT_DATA_DIR')
SECRET_KEY = os.environ.get('RTRSS_SECRET_KEY')
FILESTORAGE_URL = os.environ.get('RTRSS_FILESTORAGE_URL')
GCS_PRIVATEKEY_URL = os.environ.get('RTRSS_GCS_PRIVATEKEY_URL')
PORT = int(os.environ.get('OPENSHIFT_PYTHON_PORT'))
IP = os.environ.get('OPENSHIFT_PYTHON_IP')
DEBUG = False
| <commit_before>import os
SQLALCHEMY_DATABASE_URI = os.environ.get('OPENSHIFT_POSTGRESQL_DB_URL')
# directory to store runtime data, write access required
DATA_DIR = os.environ.get('OPENSHIFT_DATA_DIR')
SECRET_KEY = os.environ.get('RTRSS_SECRET_KEY')
FILESTORAGE_URL = os.environ.get('RTRSS_FILESTORAGE_URL')
GCS_PRIVATEKEY_URL = os.environ.get('RTRSS_GCS_PRIVATEKEY_URL')
PORT = int(os.environ.get('OPENSHIFT_PYTHON_PORT'))
IP = os.environ.get('OPENSHIFT_PYTHON_IP')
<commit_msg>Add DABUG config value to production config<commit_after> | import os
SQLALCHEMY_DATABASE_URI = os.environ.get('OPENSHIFT_POSTGRESQL_DB_URL')
# directory to store runtime data, write access required
DATA_DIR = os.environ.get('OPENSHIFT_DATA_DIR')
SECRET_KEY = os.environ.get('RTRSS_SECRET_KEY')
FILESTORAGE_URL = os.environ.get('RTRSS_FILESTORAGE_URL')
GCS_PRIVATEKEY_URL = os.environ.get('RTRSS_GCS_PRIVATEKEY_URL')
PORT = int(os.environ.get('OPENSHIFT_PYTHON_PORT'))
IP = os.environ.get('OPENSHIFT_PYTHON_IP')
DEBUG = False
| import os
SQLALCHEMY_DATABASE_URI = os.environ.get('OPENSHIFT_POSTGRESQL_DB_URL')
# directory to store runtime data, write access required
DATA_DIR = os.environ.get('OPENSHIFT_DATA_DIR')
SECRET_KEY = os.environ.get('RTRSS_SECRET_KEY')
FILESTORAGE_URL = os.environ.get('RTRSS_FILESTORAGE_URL')
GCS_PRIVATEKEY_URL = os.environ.get('RTRSS_GCS_PRIVATEKEY_URL')
PORT = int(os.environ.get('OPENSHIFT_PYTHON_PORT'))
IP = os.environ.get('OPENSHIFT_PYTHON_IP')
Add DABUG config value to production configimport os
SQLALCHEMY_DATABASE_URI = os.environ.get('OPENSHIFT_POSTGRESQL_DB_URL')
# directory to store runtime data, write access required
DATA_DIR = os.environ.get('OPENSHIFT_DATA_DIR')
SECRET_KEY = os.environ.get('RTRSS_SECRET_KEY')
FILESTORAGE_URL = os.environ.get('RTRSS_FILESTORAGE_URL')
GCS_PRIVATEKEY_URL = os.environ.get('RTRSS_GCS_PRIVATEKEY_URL')
PORT = int(os.environ.get('OPENSHIFT_PYTHON_PORT'))
IP = os.environ.get('OPENSHIFT_PYTHON_IP')
DEBUG = False
| <commit_before>import os
SQLALCHEMY_DATABASE_URI = os.environ.get('OPENSHIFT_POSTGRESQL_DB_URL')
# directory to store runtime data, write access required
DATA_DIR = os.environ.get('OPENSHIFT_DATA_DIR')
SECRET_KEY = os.environ.get('RTRSS_SECRET_KEY')
FILESTORAGE_URL = os.environ.get('RTRSS_FILESTORAGE_URL')
GCS_PRIVATEKEY_URL = os.environ.get('RTRSS_GCS_PRIVATEKEY_URL')
PORT = int(os.environ.get('OPENSHIFT_PYTHON_PORT'))
IP = os.environ.get('OPENSHIFT_PYTHON_IP')
<commit_msg>Add DABUG config value to production config<commit_after>import os
SQLALCHEMY_DATABASE_URI = os.environ.get('OPENSHIFT_POSTGRESQL_DB_URL')
# directory to store runtime data, write access required
DATA_DIR = os.environ.get('OPENSHIFT_DATA_DIR')
SECRET_KEY = os.environ.get('RTRSS_SECRET_KEY')
FILESTORAGE_URL = os.environ.get('RTRSS_FILESTORAGE_URL')
GCS_PRIVATEKEY_URL = os.environ.get('RTRSS_GCS_PRIVATEKEY_URL')
PORT = int(os.environ.get('OPENSHIFT_PYTHON_PORT'))
IP = os.environ.get('OPENSHIFT_PYTHON_IP')
DEBUG = False
|
f9b87aa94295473fa1a630cb812058554a1cf542 | openacademy/__openerp__.py | openacademy/__openerp__.py | # -*- coding: utf-8 -*-
{
'name': "Open Academy",
'summary': """Manage trainings""",
'description': """
Open Academy module for managing trainings:
- training courses
- training sessions
- attendees registration
""",
'author': "tebanep",
'website': "https://github.com/tebanep",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/openerp/addons/base/module/module_data.xml
# for the full list
'category': 'Test',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base'],
# always loaded
'data': [
# 'security/ir.model.access.csv',
#'templates.xml',
'view/openacademy_course_view',
],
# only loaded in demonstration mode
'demo': [
'demo/openacademy_course_demo.xml',
],
'installable': True,
'auto_install': False,
}
| # -*- coding: utf-8 -*-
{
'name': "Open Academy",
'summary': """Manage trainings""",
'description': """
Open Academy module for managing trainings:
- training courses
- training sessions
- attendees registration
""",
'author': "tebanep",
'website': "https://github.com/tebanep",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/openerp/addons/base/module/module_data.xml
# for the full list
'category': 'Test',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base'],
# always loaded
'data': [
# 'security/ir.model.access.csv',
#'templates.xml',
'view/openacademy_course_view.xml',
],
# only loaded in demonstration mode
'demo': [
'demo/openacademy_course_demo.xml',
],
'installable': True,
'auto_install': False,
}
| Fix missing extension in course view file | Fix missing extension in course view file
| Python | mit | tebanep/odoo_training_addons | # -*- coding: utf-8 -*-
{
'name': "Open Academy",
'summary': """Manage trainings""",
'description': """
Open Academy module for managing trainings:
- training courses
- training sessions
- attendees registration
""",
'author': "tebanep",
'website': "https://github.com/tebanep",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/openerp/addons/base/module/module_data.xml
# for the full list
'category': 'Test',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base'],
# always loaded
'data': [
# 'security/ir.model.access.csv',
#'templates.xml',
'view/openacademy_course_view',
],
# only loaded in demonstration mode
'demo': [
'demo/openacademy_course_demo.xml',
],
'installable': True,
'auto_install': False,
}
Fix missing extension in course view file | # -*- coding: utf-8 -*-
{
'name': "Open Academy",
'summary': """Manage trainings""",
'description': """
Open Academy module for managing trainings:
- training courses
- training sessions
- attendees registration
""",
'author': "tebanep",
'website': "https://github.com/tebanep",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/openerp/addons/base/module/module_data.xml
# for the full list
'category': 'Test',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base'],
# always loaded
'data': [
# 'security/ir.model.access.csv',
#'templates.xml',
'view/openacademy_course_view.xml',
],
# only loaded in demonstration mode
'demo': [
'demo/openacademy_course_demo.xml',
],
'installable': True,
'auto_install': False,
}
| <commit_before># -*- coding: utf-8 -*-
{
'name': "Open Academy",
'summary': """Manage trainings""",
'description': """
Open Academy module for managing trainings:
- training courses
- training sessions
- attendees registration
""",
'author': "tebanep",
'website': "https://github.com/tebanep",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/openerp/addons/base/module/module_data.xml
# for the full list
'category': 'Test',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base'],
# always loaded
'data': [
# 'security/ir.model.access.csv',
#'templates.xml',
'view/openacademy_course_view',
],
# only loaded in demonstration mode
'demo': [
'demo/openacademy_course_demo.xml',
],
'installable': True,
'auto_install': False,
}
<commit_msg>Fix missing extension in course view file<commit_after> | # -*- coding: utf-8 -*-
{
'name': "Open Academy",
'summary': """Manage trainings""",
'description': """
Open Academy module for managing trainings:
- training courses
- training sessions
- attendees registration
""",
'author': "tebanep",
'website': "https://github.com/tebanep",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/openerp/addons/base/module/module_data.xml
# for the full list
'category': 'Test',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base'],
# always loaded
'data': [
# 'security/ir.model.access.csv',
#'templates.xml',
'view/openacademy_course_view.xml',
],
# only loaded in demonstration mode
'demo': [
'demo/openacademy_course_demo.xml',
],
'installable': True,
'auto_install': False,
}
| # -*- coding: utf-8 -*-
{
'name': "Open Academy",
'summary': """Manage trainings""",
'description': """
Open Academy module for managing trainings:
- training courses
- training sessions
- attendees registration
""",
'author': "tebanep",
'website': "https://github.com/tebanep",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/openerp/addons/base/module/module_data.xml
# for the full list
'category': 'Test',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base'],
# always loaded
'data': [
# 'security/ir.model.access.csv',
#'templates.xml',
'view/openacademy_course_view',
],
# only loaded in demonstration mode
'demo': [
'demo/openacademy_course_demo.xml',
],
'installable': True,
'auto_install': False,
}
Fix missing extension in course view file# -*- coding: utf-8 -*-
{
'name': "Open Academy",
'summary': """Manage trainings""",
'description': """
Open Academy module for managing trainings:
- training courses
- training sessions
- attendees registration
""",
'author': "tebanep",
'website': "https://github.com/tebanep",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/openerp/addons/base/module/module_data.xml
# for the full list
'category': 'Test',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base'],
# always loaded
'data': [
# 'security/ir.model.access.csv',
#'templates.xml',
'view/openacademy_course_view.xml',
],
# only loaded in demonstration mode
'demo': [
'demo/openacademy_course_demo.xml',
],
'installable': True,
'auto_install': False,
}
| <commit_before># -*- coding: utf-8 -*-
{
'name': "Open Academy",
'summary': """Manage trainings""",
'description': """
Open Academy module for managing trainings:
- training courses
- training sessions
- attendees registration
""",
'author': "tebanep",
'website': "https://github.com/tebanep",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/openerp/addons/base/module/module_data.xml
# for the full list
'category': 'Test',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base'],
# always loaded
'data': [
# 'security/ir.model.access.csv',
#'templates.xml',
'view/openacademy_course_view',
],
# only loaded in demonstration mode
'demo': [
'demo/openacademy_course_demo.xml',
],
'installable': True,
'auto_install': False,
}
<commit_msg>Fix missing extension in course view file<commit_after># -*- coding: utf-8 -*-
{
'name': "Open Academy",
'summary': """Manage trainings""",
'description': """
Open Academy module for managing trainings:
- training courses
- training sessions
- attendees registration
""",
'author': "tebanep",
'website': "https://github.com/tebanep",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/openerp/addons/base/module/module_data.xml
# for the full list
'category': 'Test',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base'],
# always loaded
'data': [
# 'security/ir.model.access.csv',
#'templates.xml',
'view/openacademy_course_view.xml',
],
# only loaded in demonstration mode
'demo': [
'demo/openacademy_course_demo.xml',
],
'installable': True,
'auto_install': False,
}
|
f1939ef0eadb164dfbe95bf90b3a4cd8757c8a75 | src/ovirtsdk/infrastructure/common.py | src/ovirtsdk/infrastructure/common.py | #
# Copyright (c) 2010 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from ovirtsdk.utils.comperator import Comparator
from ovirtsdk.infrastructure.errors import ImmutableError
class Base(object):
''' Decorator base class '''
def __init__(self, context):
self.__context = context
@property
def context(self):
return self.__context
def __getattr__(self, item):
if not self.__dict__.has_key('superclass'):
return self.__getattribute__(item)
return self.superclass.__getattribute__(item)
def __eq__(self, other):
return Comparator.compare(self, other)
def __ne__(self, other):
return not self.__eq__(other)
def __setattr__(self, name, value):
if name in ['__context', 'context']:
raise ImmutableError(name)
else:
super(Base, self).__setattr__(name, value)
| #
# Copyright (c) 2010 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from ovirtsdk.utils.comperator import Comparator
from ovirtsdk.infrastructure.errors import ImmutableError
class Base(object):
''' Decorator base class '''
def __init__(self, context):
self.__context = context
@property
def context(self):
return self.__context
def __getattr__(self, item):
if not self.__dict__.has_key('superclass'):
return self.__getattribute__(item)
return self.superclass.__getattribute__(item)
def __eq__(self, other):
return Comparator.compare(self, other)
def __ne__(self, other):
return not self.__eq__(other)
def __setattr__(self, name, value):
if name in ['__context', 'context']:
raise ImmutableError(name)
else:
super(Base, self).__setattr__(name, value)
def export(self, outfile, level, namespace_='', name_='', namespacedef_='', pretty_print=True):
# This empty method is necessary in order to avoid exceptions when the
# infrastructure tries to invoke it on a collection decorator that is
# used as a parameter.
pass
| Add empty export method to decorator base | sdk: Add empty export method to decorator base
Decorators for resources and collections extend a common base class, for
example:
class VMSnapshotDisks(Base)
The resource decorators also extend the corresponding parameter class:
class VMSnapshotDisk(params.Disk, Base)
This means that resource decorators implement the "export" method,
responsible for generating the XML representation of the entity, but
collection decorators don't implement it.
There are situations where decorators are used as parameters, for
example, when creating a VM from a snapshot one could use the following
code:
snapshot = vm.snapshots.get(id="...")
The resulting object is a decorator, and it contains references to
decorators of collections, for example to the collection of disks. Later
this object can be used as a parameter, as follows:
snapshots = ovirtsdk.xml.params.Snapshots()
snapshots.add_snapshot(snapshot)
newvm = ovirtsdk.xml.params.VM(
name="newvm",
snapshots=snapshots,
...)
api.vms.add(newvm)
When doing this the infrastructure will try to generate the XML
document, calling the "export" method on the new VM object, and this
will recursively call the "export" methods of all the referenced
objects, including the collection decorators, which will fail because
they don't have such method.
This usage is not good practice, and not efficient, it is better to
avoid using decorators as parameters:
snapshot = ovirtsdk.params.Snapshot(id="...")
snapshots = ovirtsdk.params.Snapshots()
snapshots.add_snapshot(snapshot)
newvm = ovirtsdk.xml.params.VM(
name="newvm",
snapshots=snapshots,
...)
api.vms.add(newvm)
As this is difficult to enforce this patch adds to the Base class an
empty "export" method, so that these operations won't fail.
Change-Id: I6d2e6b9a42ad1a878f8edbbd41f3bb9d60db2bc8
Bug-Url: https://bugzilla.redhat.com/1024696
Signed-off-by: Juan Hernandez <59e5b8140de97cc91c3fb6c5342dce948469af8c@redhat.com>
| Python | apache-2.0 | DragonRoman/ovirt-engine-sdk,DragonRoman/ovirt-engine-sdk,DragonRoman/ovirt-engine-sdk | #
# Copyright (c) 2010 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from ovirtsdk.utils.comperator import Comparator
from ovirtsdk.infrastructure.errors import ImmutableError
class Base(object):
''' Decorator base class '''
def __init__(self, context):
self.__context = context
@property
def context(self):
return self.__context
def __getattr__(self, item):
if not self.__dict__.has_key('superclass'):
return self.__getattribute__(item)
return self.superclass.__getattribute__(item)
def __eq__(self, other):
return Comparator.compare(self, other)
def __ne__(self, other):
return not self.__eq__(other)
def __setattr__(self, name, value):
if name in ['__context', 'context']:
raise ImmutableError(name)
else:
super(Base, self).__setattr__(name, value)
sdk: Add empty export method to decorator base
Decorators for resources and collections extend a common base class, for
example:
class VMSnapshotDisks(Base)
The resource decorators also extend the corresponding parameter class:
class VMSnapshotDisk(params.Disk, Base)
This means that resource decorators implement the "export" method,
responsible for generating the XML representation of the entity, but
collection decorators don't implement it.
There are situations where decorators are used as parameters, for
example, when creating a VM from a snapshot one could use the following
code:
snapshot = vm.snapshots.get(id="...")
The resulting object is a decorator, and it contains references to
decorators of collections, for example to the collection of disks. Later
this object can be used as a parameter, as follows:
snapshots = ovirtsdk.xml.params.Snapshots()
snapshots.add_snapshot(snapshot)
newvm = ovirtsdk.xml.params.VM(
name="newvm",
snapshots=snapshots,
...)
api.vms.add(newvm)
When doing this the infrastructure will try to generate the XML
document, calling the "export" method on the new VM object, and this
will recursively call the "export" methods of all the referenced
objects, including the collection decorators, which will fail because
they don't have such method.
This usage is not good practice, and not efficient, it is better to
avoid using decorators as parameters:
snapshot = ovirtsdk.params.Snapshot(id="...")
snapshots = ovirtsdk.params.Snapshots()
snapshots.add_snapshot(snapshot)
newvm = ovirtsdk.xml.params.VM(
name="newvm",
snapshots=snapshots,
...)
api.vms.add(newvm)
As this is difficult to enforce this patch adds to the Base class an
empty "export" method, so that these operations won't fail.
Change-Id: I6d2e6b9a42ad1a878f8edbbd41f3bb9d60db2bc8
Bug-Url: https://bugzilla.redhat.com/1024696
Signed-off-by: Juan Hernandez <59e5b8140de97cc91c3fb6c5342dce948469af8c@redhat.com> | #
# Copyright (c) 2010 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from ovirtsdk.utils.comperator import Comparator
from ovirtsdk.infrastructure.errors import ImmutableError
class Base(object):
''' Decorator base class '''
def __init__(self, context):
self.__context = context
@property
def context(self):
return self.__context
def __getattr__(self, item):
if not self.__dict__.has_key('superclass'):
return self.__getattribute__(item)
return self.superclass.__getattribute__(item)
def __eq__(self, other):
return Comparator.compare(self, other)
def __ne__(self, other):
return not self.__eq__(other)
def __setattr__(self, name, value):
if name in ['__context', 'context']:
raise ImmutableError(name)
else:
super(Base, self).__setattr__(name, value)
def export(self, outfile, level, namespace_='', name_='', namespacedef_='', pretty_print=True):
# This empty method is necessary in order to avoid exceptions when the
# infrastructure tries to invoke it on a collection decorator that is
# used as a parameter.
pass
| <commit_before>#
# Copyright (c) 2010 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from ovirtsdk.utils.comperator import Comparator
from ovirtsdk.infrastructure.errors import ImmutableError
class Base(object):
''' Decorator base class '''
def __init__(self, context):
self.__context = context
@property
def context(self):
return self.__context
def __getattr__(self, item):
if not self.__dict__.has_key('superclass'):
return self.__getattribute__(item)
return self.superclass.__getattribute__(item)
def __eq__(self, other):
return Comparator.compare(self, other)
def __ne__(self, other):
return not self.__eq__(other)
def __setattr__(self, name, value):
if name in ['__context', 'context']:
raise ImmutableError(name)
else:
super(Base, self).__setattr__(name, value)
<commit_msg>sdk: Add empty export method to decorator base
Decorators for resources and collections extend a common base class, for
example:
class VMSnapshotDisks(Base)
The resource decorators also extend the corresponding parameter class:
class VMSnapshotDisk(params.Disk, Base)
This means that resource decorators implement the "export" method,
responsible for generating the XML representation of the entity, but
collection decorators don't implement it.
There are situations where decorators are used as parameters, for
example, when creating a VM from a snapshot one could use the following
code:
snapshot = vm.snapshots.get(id="...")
The resulting object is a decorator, and it contains references to
decorators of collections, for example to the collection of disks. Later
this object can be used as a parameter, as follows:
snapshots = ovirtsdk.xml.params.Snapshots()
snapshots.add_snapshot(snapshot)
newvm = ovirtsdk.xml.params.VM(
name="newvm",
snapshots=snapshots,
...)
api.vms.add(newvm)
When doing this the infrastructure will try to generate the XML
document, calling the "export" method on the new VM object, and this
will recursively call the "export" methods of all the referenced
objects, including the collection decorators, which will fail because
they don't have such method.
This usage is not good practice, and not efficient, it is better to
avoid using decorators as parameters:
snapshot = ovirtsdk.params.Snapshot(id="...")
snapshots = ovirtsdk.params.Snapshots()
snapshots.add_snapshot(snapshot)
newvm = ovirtsdk.xml.params.VM(
name="newvm",
snapshots=snapshots,
...)
api.vms.add(newvm)
As this is difficult to enforce this patch adds to the Base class an
empty "export" method, so that these operations won't fail.
Change-Id: I6d2e6b9a42ad1a878f8edbbd41f3bb9d60db2bc8
Bug-Url: https://bugzilla.redhat.com/1024696
Signed-off-by: Juan Hernandez <59e5b8140de97cc91c3fb6c5342dce948469af8c@redhat.com><commit_after> | #
# Copyright (c) 2010 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from ovirtsdk.utils.comperator import Comparator
from ovirtsdk.infrastructure.errors import ImmutableError
class Base(object):
''' Decorator base class '''
def __init__(self, context):
self.__context = context
@property
def context(self):
return self.__context
def __getattr__(self, item):
if not self.__dict__.has_key('superclass'):
return self.__getattribute__(item)
return self.superclass.__getattribute__(item)
def __eq__(self, other):
return Comparator.compare(self, other)
def __ne__(self, other):
return not self.__eq__(other)
def __setattr__(self, name, value):
if name in ['__context', 'context']:
raise ImmutableError(name)
else:
super(Base, self).__setattr__(name, value)
def export(self, outfile, level, namespace_='', name_='', namespacedef_='', pretty_print=True):
# This empty method is necessary in order to avoid exceptions when the
# infrastructure tries to invoke it on a collection decorator that is
# used as a parameter.
pass
| #
# Copyright (c) 2010 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from ovirtsdk.utils.comperator import Comparator
from ovirtsdk.infrastructure.errors import ImmutableError
class Base(object):
''' Decorator base class '''
def __init__(self, context):
self.__context = context
@property
def context(self):
return self.__context
def __getattr__(self, item):
if not self.__dict__.has_key('superclass'):
return self.__getattribute__(item)
return self.superclass.__getattribute__(item)
def __eq__(self, other):
return Comparator.compare(self, other)
def __ne__(self, other):
return not self.__eq__(other)
def __setattr__(self, name, value):
if name in ['__context', 'context']:
raise ImmutableError(name)
else:
super(Base, self).__setattr__(name, value)
sdk: Add empty export method to decorator base
Decorators for resources and collections extend a common base class, for
example:
class VMSnapshotDisks(Base)
The resource decorators also extend the corresponding parameter class:
class VMSnapshotDisk(params.Disk, Base)
This means that resource decorators implement the "export" method,
responsible for generating the XML representation of the entity, but
collection decorators don't implement it.
There are situations where decorators are used as parameters, for
example, when creating a VM from a snapshot one could use the following
code:
snapshot = vm.snapshots.get(id="...")
The resulting object is a decorator, and it contains references to
decorators of collections, for example to the collection of disks. Later
this object can be used as a parameter, as follows:
snapshots = ovirtsdk.xml.params.Snapshots()
snapshots.add_snapshot(snapshot)
newvm = ovirtsdk.xml.params.VM(
name="newvm",
snapshots=snapshots,
...)
api.vms.add(newvm)
When doing this the infrastructure will try to generate the XML
document, calling the "export" method on the new VM object, and this
will recursively call the "export" methods of all the referenced
objects, including the collection decorators, which will fail because
they don't have such method.
This usage is not good practice, and not efficient, it is better to
avoid using decorators as parameters:
snapshot = ovirtsdk.params.Snapshot(id="...")
snapshots = ovirtsdk.params.Snapshots()
snapshots.add_snapshot(snapshot)
newvm = ovirtsdk.xml.params.VM(
name="newvm",
snapshots=snapshots,
...)
api.vms.add(newvm)
As this is difficult to enforce this patch adds to the Base class an
empty "export" method, so that these operations won't fail.
Change-Id: I6d2e6b9a42ad1a878f8edbbd41f3bb9d60db2bc8
Bug-Url: https://bugzilla.redhat.com/1024696
Signed-off-by: Juan Hernandez <59e5b8140de97cc91c3fb6c5342dce948469af8c@redhat.com>#
# Copyright (c) 2010 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from ovirtsdk.utils.comperator import Comparator
from ovirtsdk.infrastructure.errors import ImmutableError
class Base(object):
''' Decorator base class '''
def __init__(self, context):
self.__context = context
@property
def context(self):
return self.__context
def __getattr__(self, item):
if not self.__dict__.has_key('superclass'):
return self.__getattribute__(item)
return self.superclass.__getattribute__(item)
def __eq__(self, other):
return Comparator.compare(self, other)
def __ne__(self, other):
return not self.__eq__(other)
def __setattr__(self, name, value):
if name in ['__context', 'context']:
raise ImmutableError(name)
else:
super(Base, self).__setattr__(name, value)
def export(self, outfile, level, namespace_='', name_='', namespacedef_='', pretty_print=True):
# This empty method is necessary in order to avoid exceptions when the
# infrastructure tries to invoke it on a collection decorator that is
# used as a parameter.
pass
| <commit_before>#
# Copyright (c) 2010 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from ovirtsdk.utils.comperator import Comparator
from ovirtsdk.infrastructure.errors import ImmutableError
class Base(object):
''' Decorator base class '''
def __init__(self, context):
self.__context = context
@property
def context(self):
return self.__context
def __getattr__(self, item):
if not self.__dict__.has_key('superclass'):
return self.__getattribute__(item)
return self.superclass.__getattribute__(item)
def __eq__(self, other):
return Comparator.compare(self, other)
def __ne__(self, other):
return not self.__eq__(other)
def __setattr__(self, name, value):
if name in ['__context', 'context']:
raise ImmutableError(name)
else:
super(Base, self).__setattr__(name, value)
<commit_msg>sdk: Add empty export method to decorator base
Decorators for resources and collections extend a common base class, for
example:
class VMSnapshotDisks(Base)
The resource decorators also extend the corresponding parameter class:
class VMSnapshotDisk(params.Disk, Base)
This means that resource decorators implement the "export" method,
responsible for generating the XML representation of the entity, but
collection decorators don't implement it.
There are situations where decorators are used as parameters, for
example, when creating a VM from a snapshot one could use the following
code:
snapshot = vm.snapshots.get(id="...")
The resulting object is a decorator, and it contains references to
decorators of collections, for example to the collection of disks. Later
this object can be used as a parameter, as follows:
snapshots = ovirtsdk.xml.params.Snapshots()
snapshots.add_snapshot(snapshot)
newvm = ovirtsdk.xml.params.VM(
name="newvm",
snapshots=snapshots,
...)
api.vms.add(newvm)
When doing this the infrastructure will try to generate the XML
document, calling the "export" method on the new VM object, and this
will recursively call the "export" methods of all the referenced
objects, including the collection decorators, which will fail because
they don't have such method.
This usage is not good practice, and not efficient, it is better to
avoid using decorators as parameters:
snapshot = ovirtsdk.params.Snapshot(id="...")
snapshots = ovirtsdk.params.Snapshots()
snapshots.add_snapshot(snapshot)
newvm = ovirtsdk.xml.params.VM(
name="newvm",
snapshots=snapshots,
...)
api.vms.add(newvm)
As this is difficult to enforce this patch adds to the Base class an
empty "export" method, so that these operations won't fail.
Change-Id: I6d2e6b9a42ad1a878f8edbbd41f3bb9d60db2bc8
Bug-Url: https://bugzilla.redhat.com/1024696
Signed-off-by: Juan Hernandez <59e5b8140de97cc91c3fb6c5342dce948469af8c@redhat.com><commit_after>#
# Copyright (c) 2010 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from ovirtsdk.utils.comperator import Comparator
from ovirtsdk.infrastructure.errors import ImmutableError
class Base(object):
''' Decorator base class '''
def __init__(self, context):
self.__context = context
@property
def context(self):
return self.__context
def __getattr__(self, item):
if not self.__dict__.has_key('superclass'):
return self.__getattribute__(item)
return self.superclass.__getattribute__(item)
def __eq__(self, other):
return Comparator.compare(self, other)
def __ne__(self, other):
return not self.__eq__(other)
def __setattr__(self, name, value):
if name in ['__context', 'context']:
raise ImmutableError(name)
else:
super(Base, self).__setattr__(name, value)
def export(self, outfile, level, namespace_='', name_='', namespacedef_='', pretty_print=True):
# This empty method is necessary in order to avoid exceptions when the
# infrastructure tries to invoke it on a collection decorator that is
# used as a parameter.
pass
|
18e07d14cf0c0f72e1af50b55bd054d917cb346b | docs/source/parameters.py | docs/source/parameters.py |
def _getvar(var, path, default=None):
with open(path) as f:
for line in f:
if var in line:
g = {}
l = {}
exec line in g, l
return l[var]
return default
def _version():
import os
return _getvar("__version__", os.path.join(os.path.dirname(__file__),
"../../setup.py"),
"X.X")
def _repo():
return "staging"
version = _version()
repo = _repo()
install = "https://packagecloud.io/datawire/%s/install" % repo
script_rpm = "https://packagecloud.io/install/repositories/datawire/%s/script.rpm.sh" % repo
script_deb = "https://packagecloud.io/install/repositories/datawire/%s/script.deb.sh" % repo
|
def _getvar(var, path, default=None):
with open(path) as f:
for line in f:
if var in line and "=" in line and "__all__" not in line:
g = {}
l = {}
exec line in g, l
return l[var]
return default
def _version():
import os
return _getvar("__version__", os.path.join(os.path.dirname(__file__),
"../../_metadata.py"),
"X.X")
def _repo():
return "staging"
version = _version()
repo = _repo()
install = "https://packagecloud.io/datawire/%s/install" % repo
script_rpm = "https://packagecloud.io/install/repositories/datawire/%s/script.rpm.sh" % repo
script_deb = "https://packagecloud.io/install/repositories/datawire/%s/script.deb.sh" % repo
| Read doc version number from _metadata.py | Read doc version number from _metadata.py
| Python | apache-2.0 | datawire/bakerstreet,datawire/bakerstreet |
def _getvar(var, path, default=None):
with open(path) as f:
for line in f:
if var in line:
g = {}
l = {}
exec line in g, l
return l[var]
return default
def _version():
import os
return _getvar("__version__", os.path.join(os.path.dirname(__file__),
"../../setup.py"),
"X.X")
def _repo():
return "staging"
version = _version()
repo = _repo()
install = "https://packagecloud.io/datawire/%s/install" % repo
script_rpm = "https://packagecloud.io/install/repositories/datawire/%s/script.rpm.sh" % repo
script_deb = "https://packagecloud.io/install/repositories/datawire/%s/script.deb.sh" % repo
Read doc version number from _metadata.py |
def _getvar(var, path, default=None):
with open(path) as f:
for line in f:
if var in line and "=" in line and "__all__" not in line:
g = {}
l = {}
exec line in g, l
return l[var]
return default
def _version():
import os
return _getvar("__version__", os.path.join(os.path.dirname(__file__),
"../../_metadata.py"),
"X.X")
def _repo():
return "staging"
version = _version()
repo = _repo()
install = "https://packagecloud.io/datawire/%s/install" % repo
script_rpm = "https://packagecloud.io/install/repositories/datawire/%s/script.rpm.sh" % repo
script_deb = "https://packagecloud.io/install/repositories/datawire/%s/script.deb.sh" % repo
| <commit_before>
def _getvar(var, path, default=None):
with open(path) as f:
for line in f:
if var in line:
g = {}
l = {}
exec line in g, l
return l[var]
return default
def _version():
import os
return _getvar("__version__", os.path.join(os.path.dirname(__file__),
"../../setup.py"),
"X.X")
def _repo():
return "staging"
version = _version()
repo = _repo()
install = "https://packagecloud.io/datawire/%s/install" % repo
script_rpm = "https://packagecloud.io/install/repositories/datawire/%s/script.rpm.sh" % repo
script_deb = "https://packagecloud.io/install/repositories/datawire/%s/script.deb.sh" % repo
<commit_msg>Read doc version number from _metadata.py<commit_after> |
def _getvar(var, path, default=None):
with open(path) as f:
for line in f:
if var in line and "=" in line and "__all__" not in line:
g = {}
l = {}
exec line in g, l
return l[var]
return default
def _version():
import os
return _getvar("__version__", os.path.join(os.path.dirname(__file__),
"../../_metadata.py"),
"X.X")
def _repo():
return "staging"
version = _version()
repo = _repo()
install = "https://packagecloud.io/datawire/%s/install" % repo
script_rpm = "https://packagecloud.io/install/repositories/datawire/%s/script.rpm.sh" % repo
script_deb = "https://packagecloud.io/install/repositories/datawire/%s/script.deb.sh" % repo
|
def _getvar(var, path, default=None):
with open(path) as f:
for line in f:
if var in line:
g = {}
l = {}
exec line in g, l
return l[var]
return default
def _version():
import os
return _getvar("__version__", os.path.join(os.path.dirname(__file__),
"../../setup.py"),
"X.X")
def _repo():
return "staging"
version = _version()
repo = _repo()
install = "https://packagecloud.io/datawire/%s/install" % repo
script_rpm = "https://packagecloud.io/install/repositories/datawire/%s/script.rpm.sh" % repo
script_deb = "https://packagecloud.io/install/repositories/datawire/%s/script.deb.sh" % repo
Read doc version number from _metadata.py
def _getvar(var, path, default=None):
with open(path) as f:
for line in f:
if var in line and "=" in line and "__all__" not in line:
g = {}
l = {}
exec line in g, l
return l[var]
return default
def _version():
import os
return _getvar("__version__", os.path.join(os.path.dirname(__file__),
"../../_metadata.py"),
"X.X")
def _repo():
return "staging"
version = _version()
repo = _repo()
install = "https://packagecloud.io/datawire/%s/install" % repo
script_rpm = "https://packagecloud.io/install/repositories/datawire/%s/script.rpm.sh" % repo
script_deb = "https://packagecloud.io/install/repositories/datawire/%s/script.deb.sh" % repo
| <commit_before>
def _getvar(var, path, default=None):
with open(path) as f:
for line in f:
if var in line:
g = {}
l = {}
exec line in g, l
return l[var]
return default
def _version():
import os
return _getvar("__version__", os.path.join(os.path.dirname(__file__),
"../../setup.py"),
"X.X")
def _repo():
return "staging"
version = _version()
repo = _repo()
install = "https://packagecloud.io/datawire/%s/install" % repo
script_rpm = "https://packagecloud.io/install/repositories/datawire/%s/script.rpm.sh" % repo
script_deb = "https://packagecloud.io/install/repositories/datawire/%s/script.deb.sh" % repo
<commit_msg>Read doc version number from _metadata.py<commit_after>
def _getvar(var, path, default=None):
with open(path) as f:
for line in f:
if var in line and "=" in line and "__all__" not in line:
g = {}
l = {}
exec line in g, l
return l[var]
return default
def _version():
import os
return _getvar("__version__", os.path.join(os.path.dirname(__file__),
"../../_metadata.py"),
"X.X")
def _repo():
return "staging"
version = _version()
repo = _repo()
install = "https://packagecloud.io/datawire/%s/install" % repo
script_rpm = "https://packagecloud.io/install/repositories/datawire/%s/script.rpm.sh" % repo
script_deb = "https://packagecloud.io/install/repositories/datawire/%s/script.deb.sh" % repo
|
b2a56e8f731f3f0d35e8fe4d2379909a3151a667 | services/flickr.py | services/flickr.py | import foauth.providers
class Flickr(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://www.flickr.com/'
docs_url = 'http://www.flickr.com/services/api/'
category = 'Pictures'
# URLs to interact with the API
request_token_url = 'http://www.flickr.com/services/oauth/request_token'
authorize_url = 'http://www.flickr.com/services/oauth/authorize'
access_token_url = 'http://www.flickr.com/services/oauth/access_token'
api_domain = 'secure.flickr.com'
available_permissions = [
(None, 'access your public and private photos'),
('write', 'upload, edit and replace your photos'),
('delete', 'upload, edit, replace and delete your photos'),
]
permissions_widget = 'radio'
def get_authorize_params(self, redirect_uri, scopes):
params = super(Flickr, self).get_authorize_params(redirect_uri, scopes)
params['perms'] = scopes[0] or 'read'
return params
def get_user_id(self, key):
url = u'/services/rest/?method=flickr.people.getLimits'
url += u'&format=json&nojsoncallback=1'
r = self.api(key, self.api_domain, url)
return r.json()[u'person'][u'nsid']
| import foauth.providers
class Flickr(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://www.flickr.com/'
docs_url = 'http://www.flickr.com/services/api/'
category = 'Pictures'
# URLs to interact with the API
request_token_url = 'http://www.flickr.com/services/oauth/request_token'
authorize_url = 'http://www.flickr.com/services/oauth/authorize'
access_token_url = 'http://www.flickr.com/services/oauth/access_token'
api_domain = 'api.flickr.com'
available_permissions = [
(None, 'access your public and private photos'),
('write', 'upload, edit and replace your photos'),
('delete', 'upload, edit, replace and delete your photos'),
]
permissions_widget = 'radio'
def get_authorize_params(self, redirect_uri, scopes):
params = super(Flickr, self).get_authorize_params(redirect_uri, scopes)
params['perms'] = scopes[0] or 'read'
return params
def get_user_id(self, key):
url = u'/services/rest/?method=flickr.people.getLimits'
url += u'&format=json&nojsoncallback=1'
r = self.api(key, self.api_domain, url)
return r.json()[u'person'][u'nsid']
| Move Flickr over to its newly-secured API domain | Move Flickr over to its newly-secured API domain
| Python | bsd-3-clause | foauth/foauth.org,foauth/foauth.org,foauth/foauth.org | import foauth.providers
class Flickr(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://www.flickr.com/'
docs_url = 'http://www.flickr.com/services/api/'
category = 'Pictures'
# URLs to interact with the API
request_token_url = 'http://www.flickr.com/services/oauth/request_token'
authorize_url = 'http://www.flickr.com/services/oauth/authorize'
access_token_url = 'http://www.flickr.com/services/oauth/access_token'
api_domain = 'secure.flickr.com'
available_permissions = [
(None, 'access your public and private photos'),
('write', 'upload, edit and replace your photos'),
('delete', 'upload, edit, replace and delete your photos'),
]
permissions_widget = 'radio'
def get_authorize_params(self, redirect_uri, scopes):
params = super(Flickr, self).get_authorize_params(redirect_uri, scopes)
params['perms'] = scopes[0] or 'read'
return params
def get_user_id(self, key):
url = u'/services/rest/?method=flickr.people.getLimits'
url += u'&format=json&nojsoncallback=1'
r = self.api(key, self.api_domain, url)
return r.json()[u'person'][u'nsid']
Move Flickr over to its newly-secured API domain | import foauth.providers
class Flickr(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://www.flickr.com/'
docs_url = 'http://www.flickr.com/services/api/'
category = 'Pictures'
# URLs to interact with the API
request_token_url = 'http://www.flickr.com/services/oauth/request_token'
authorize_url = 'http://www.flickr.com/services/oauth/authorize'
access_token_url = 'http://www.flickr.com/services/oauth/access_token'
api_domain = 'api.flickr.com'
available_permissions = [
(None, 'access your public and private photos'),
('write', 'upload, edit and replace your photos'),
('delete', 'upload, edit, replace and delete your photos'),
]
permissions_widget = 'radio'
def get_authorize_params(self, redirect_uri, scopes):
params = super(Flickr, self).get_authorize_params(redirect_uri, scopes)
params['perms'] = scopes[0] or 'read'
return params
def get_user_id(self, key):
url = u'/services/rest/?method=flickr.people.getLimits'
url += u'&format=json&nojsoncallback=1'
r = self.api(key, self.api_domain, url)
return r.json()[u'person'][u'nsid']
| <commit_before>import foauth.providers
class Flickr(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://www.flickr.com/'
docs_url = 'http://www.flickr.com/services/api/'
category = 'Pictures'
# URLs to interact with the API
request_token_url = 'http://www.flickr.com/services/oauth/request_token'
authorize_url = 'http://www.flickr.com/services/oauth/authorize'
access_token_url = 'http://www.flickr.com/services/oauth/access_token'
api_domain = 'secure.flickr.com'
available_permissions = [
(None, 'access your public and private photos'),
('write', 'upload, edit and replace your photos'),
('delete', 'upload, edit, replace and delete your photos'),
]
permissions_widget = 'radio'
def get_authorize_params(self, redirect_uri, scopes):
params = super(Flickr, self).get_authorize_params(redirect_uri, scopes)
params['perms'] = scopes[0] or 'read'
return params
def get_user_id(self, key):
url = u'/services/rest/?method=flickr.people.getLimits'
url += u'&format=json&nojsoncallback=1'
r = self.api(key, self.api_domain, url)
return r.json()[u'person'][u'nsid']
<commit_msg>Move Flickr over to its newly-secured API domain<commit_after> | import foauth.providers
class Flickr(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://www.flickr.com/'
docs_url = 'http://www.flickr.com/services/api/'
category = 'Pictures'
# URLs to interact with the API
request_token_url = 'http://www.flickr.com/services/oauth/request_token'
authorize_url = 'http://www.flickr.com/services/oauth/authorize'
access_token_url = 'http://www.flickr.com/services/oauth/access_token'
api_domain = 'api.flickr.com'
available_permissions = [
(None, 'access your public and private photos'),
('write', 'upload, edit and replace your photos'),
('delete', 'upload, edit, replace and delete your photos'),
]
permissions_widget = 'radio'
def get_authorize_params(self, redirect_uri, scopes):
params = super(Flickr, self).get_authorize_params(redirect_uri, scopes)
params['perms'] = scopes[0] or 'read'
return params
def get_user_id(self, key):
url = u'/services/rest/?method=flickr.people.getLimits'
url += u'&format=json&nojsoncallback=1'
r = self.api(key, self.api_domain, url)
return r.json()[u'person'][u'nsid']
| import foauth.providers
class Flickr(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://www.flickr.com/'
docs_url = 'http://www.flickr.com/services/api/'
category = 'Pictures'
# URLs to interact with the API
request_token_url = 'http://www.flickr.com/services/oauth/request_token'
authorize_url = 'http://www.flickr.com/services/oauth/authorize'
access_token_url = 'http://www.flickr.com/services/oauth/access_token'
api_domain = 'secure.flickr.com'
available_permissions = [
(None, 'access your public and private photos'),
('write', 'upload, edit and replace your photos'),
('delete', 'upload, edit, replace and delete your photos'),
]
permissions_widget = 'radio'
def get_authorize_params(self, redirect_uri, scopes):
params = super(Flickr, self).get_authorize_params(redirect_uri, scopes)
params['perms'] = scopes[0] or 'read'
return params
def get_user_id(self, key):
url = u'/services/rest/?method=flickr.people.getLimits'
url += u'&format=json&nojsoncallback=1'
r = self.api(key, self.api_domain, url)
return r.json()[u'person'][u'nsid']
Move Flickr over to its newly-secured API domainimport foauth.providers
class Flickr(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://www.flickr.com/'
docs_url = 'http://www.flickr.com/services/api/'
category = 'Pictures'
# URLs to interact with the API
request_token_url = 'http://www.flickr.com/services/oauth/request_token'
authorize_url = 'http://www.flickr.com/services/oauth/authorize'
access_token_url = 'http://www.flickr.com/services/oauth/access_token'
api_domain = 'api.flickr.com'
available_permissions = [
(None, 'access your public and private photos'),
('write', 'upload, edit and replace your photos'),
('delete', 'upload, edit, replace and delete your photos'),
]
permissions_widget = 'radio'
def get_authorize_params(self, redirect_uri, scopes):
params = super(Flickr, self).get_authorize_params(redirect_uri, scopes)
params['perms'] = scopes[0] or 'read'
return params
def get_user_id(self, key):
url = u'/services/rest/?method=flickr.people.getLimits'
url += u'&format=json&nojsoncallback=1'
r = self.api(key, self.api_domain, url)
return r.json()[u'person'][u'nsid']
| <commit_before>import foauth.providers
class Flickr(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://www.flickr.com/'
docs_url = 'http://www.flickr.com/services/api/'
category = 'Pictures'
# URLs to interact with the API
request_token_url = 'http://www.flickr.com/services/oauth/request_token'
authorize_url = 'http://www.flickr.com/services/oauth/authorize'
access_token_url = 'http://www.flickr.com/services/oauth/access_token'
api_domain = 'secure.flickr.com'
available_permissions = [
(None, 'access your public and private photos'),
('write', 'upload, edit and replace your photos'),
('delete', 'upload, edit, replace and delete your photos'),
]
permissions_widget = 'radio'
def get_authorize_params(self, redirect_uri, scopes):
params = super(Flickr, self).get_authorize_params(redirect_uri, scopes)
params['perms'] = scopes[0] or 'read'
return params
def get_user_id(self, key):
url = u'/services/rest/?method=flickr.people.getLimits'
url += u'&format=json&nojsoncallback=1'
r = self.api(key, self.api_domain, url)
return r.json()[u'person'][u'nsid']
<commit_msg>Move Flickr over to its newly-secured API domain<commit_after>import foauth.providers
class Flickr(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://www.flickr.com/'
docs_url = 'http://www.flickr.com/services/api/'
category = 'Pictures'
# URLs to interact with the API
request_token_url = 'http://www.flickr.com/services/oauth/request_token'
authorize_url = 'http://www.flickr.com/services/oauth/authorize'
access_token_url = 'http://www.flickr.com/services/oauth/access_token'
api_domain = 'api.flickr.com'
available_permissions = [
(None, 'access your public and private photos'),
('write', 'upload, edit and replace your photos'),
('delete', 'upload, edit, replace and delete your photos'),
]
permissions_widget = 'radio'
def get_authorize_params(self, redirect_uri, scopes):
params = super(Flickr, self).get_authorize_params(redirect_uri, scopes)
params['perms'] = scopes[0] or 'read'
return params
def get_user_id(self, key):
url = u'/services/rest/?method=flickr.people.getLimits'
url += u'&format=json&nojsoncallback=1'
r = self.api(key, self.api_domain, url)
return r.json()[u'person'][u'nsid']
|
b1b4b0efd8619e1cb00ee317cc4d57e4dce00eec | projects/wsgi.py | projects/wsgi.py | """
WSGI config for projects project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "projects.settings")
application = get_wsgi_application()
| """
WSGI config for projects project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "projects.settings")
_application = get_wsgi_application()
def application(environ, start_response):
script_name = environ.get('HTTP_X_SCRIPT_NAME', '')
if script_name:
environ['SCRIPT_NAME'] = script_name
path_info = environ['PATH_INFO']
if path_info.startswith(script_name):
environ['PATH_INFO'] = path_info[len(script_name):]
scheme = environ.get('HTTP_X_SCHEME', '')
if scheme:
environ['wsgi.url_scheme'] = scheme
return _application(environ, start_response) | Read the script name if the proxy passes it | Read the script name if the proxy passes it
| Python | mit | cmheisel/project-status-dashboard,cmheisel/project-status-dashboard,cmheisel/project-status-dashboard | """
WSGI config for projects project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "projects.settings")
application = get_wsgi_application()
Read the script name if the proxy passes it | """
WSGI config for projects project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "projects.settings")
_application = get_wsgi_application()
def application(environ, start_response):
script_name = environ.get('HTTP_X_SCRIPT_NAME', '')
if script_name:
environ['SCRIPT_NAME'] = script_name
path_info = environ['PATH_INFO']
if path_info.startswith(script_name):
environ['PATH_INFO'] = path_info[len(script_name):]
scheme = environ.get('HTTP_X_SCHEME', '')
if scheme:
environ['wsgi.url_scheme'] = scheme
return _application(environ, start_response) | <commit_before>"""
WSGI config for projects project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "projects.settings")
application = get_wsgi_application()
<commit_msg>Read the script name if the proxy passes it<commit_after> | """
WSGI config for projects project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "projects.settings")
_application = get_wsgi_application()
def application(environ, start_response):
script_name = environ.get('HTTP_X_SCRIPT_NAME', '')
if script_name:
environ['SCRIPT_NAME'] = script_name
path_info = environ['PATH_INFO']
if path_info.startswith(script_name):
environ['PATH_INFO'] = path_info[len(script_name):]
scheme = environ.get('HTTP_X_SCHEME', '')
if scheme:
environ['wsgi.url_scheme'] = scheme
return _application(environ, start_response) | """
WSGI config for projects project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "projects.settings")
application = get_wsgi_application()
Read the script name if the proxy passes it"""
WSGI config for projects project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "projects.settings")
_application = get_wsgi_application()
def application(environ, start_response):
script_name = environ.get('HTTP_X_SCRIPT_NAME', '')
if script_name:
environ['SCRIPT_NAME'] = script_name
path_info = environ['PATH_INFO']
if path_info.startswith(script_name):
environ['PATH_INFO'] = path_info[len(script_name):]
scheme = environ.get('HTTP_X_SCHEME', '')
if scheme:
environ['wsgi.url_scheme'] = scheme
return _application(environ, start_response) | <commit_before>"""
WSGI config for projects project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "projects.settings")
application = get_wsgi_application()
<commit_msg>Read the script name if the proxy passes it<commit_after>"""
WSGI config for projects project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "projects.settings")
_application = get_wsgi_application()
def application(environ, start_response):
script_name = environ.get('HTTP_X_SCRIPT_NAME', '')
if script_name:
environ['SCRIPT_NAME'] = script_name
path_info = environ['PATH_INFO']
if path_info.startswith(script_name):
environ['PATH_INFO'] = path_info[len(script_name):]
scheme = environ.get('HTTP_X_SCHEME', '')
if scheme:
environ['wsgi.url_scheme'] = scheme
return _application(environ, start_response) |
5ab86a2456b54d5af73e15b61de95994b95be8cb | kovot/remote_mod.py | kovot/remote_mod.py | #! /usr/bin/env python
# coding:utf-8
import requests
import logging
from kovot.response import Response
class RemoteCallerMod:
def __init__(self, server, port):
self._root_url = "http://{}:{}".format(server, port)
def generate_responses(self, bot, message):
url = "{}/api/get_responses".format(self._root_url)
logging.debug("{} requests to {}".format(self.__class__.__name__, url))
json = {"message": message.dict()}
res = requests.post(url=url, json=json)
return [Response.from_dict(res_)
for res_ in res.json()["responses"]] | #! /usr/bin/env python
# coding:utf-8
import requests
import logging
from kovot.response import Response
class RemoteCallerMod:
def __init__(self, server, port):
self._root_url = "http://{}:{}".format(server, port)
def generate_responses(self, bot, message):
url = "{}/api/generate_responses".format(self._root_url)
logging.debug("{} requests to {}".format(self.__class__.__name__, url))
json = {"message": message.dict()}
res = requests.post(url=url, json=json)
return [Response.from_dict(res_)
for res_ in res.json()["responses"]] | Fix bug; no generate_responses method in RemoteCallerMod | Fix bug; no generate_responses method in RemoteCallerMod
| Python | mit | kenkov/kovot,kenkov/kovot | #! /usr/bin/env python
# coding:utf-8
import requests
import logging
from kovot.response import Response
class RemoteCallerMod:
def __init__(self, server, port):
self._root_url = "http://{}:{}".format(server, port)
def generate_responses(self, bot, message):
url = "{}/api/get_responses".format(self._root_url)
logging.debug("{} requests to {}".format(self.__class__.__name__, url))
json = {"message": message.dict()}
res = requests.post(url=url, json=json)
return [Response.from_dict(res_)
for res_ in res.json()["responses"]]Fix bug; no generate_responses method in RemoteCallerMod | #! /usr/bin/env python
# coding:utf-8
import requests
import logging
from kovot.response import Response
class RemoteCallerMod:
def __init__(self, server, port):
self._root_url = "http://{}:{}".format(server, port)
def generate_responses(self, bot, message):
url = "{}/api/generate_responses".format(self._root_url)
logging.debug("{} requests to {}".format(self.__class__.__name__, url))
json = {"message": message.dict()}
res = requests.post(url=url, json=json)
return [Response.from_dict(res_)
for res_ in res.json()["responses"]] | <commit_before>#! /usr/bin/env python
# coding:utf-8
import requests
import logging
from kovot.response import Response
class RemoteCallerMod:
def __init__(self, server, port):
self._root_url = "http://{}:{}".format(server, port)
def generate_responses(self, bot, message):
url = "{}/api/get_responses".format(self._root_url)
logging.debug("{} requests to {}".format(self.__class__.__name__, url))
json = {"message": message.dict()}
res = requests.post(url=url, json=json)
return [Response.from_dict(res_)
for res_ in res.json()["responses"]]<commit_msg>Fix bug; no generate_responses method in RemoteCallerMod<commit_after> | #! /usr/bin/env python
# coding:utf-8
import requests
import logging
from kovot.response import Response
class RemoteCallerMod:
def __init__(self, server, port):
self._root_url = "http://{}:{}".format(server, port)
def generate_responses(self, bot, message):
url = "{}/api/generate_responses".format(self._root_url)
logging.debug("{} requests to {}".format(self.__class__.__name__, url))
json = {"message": message.dict()}
res = requests.post(url=url, json=json)
return [Response.from_dict(res_)
for res_ in res.json()["responses"]] | #! /usr/bin/env python
# coding:utf-8
import requests
import logging
from kovot.response import Response
class RemoteCallerMod:
def __init__(self, server, port):
self._root_url = "http://{}:{}".format(server, port)
def generate_responses(self, bot, message):
url = "{}/api/get_responses".format(self._root_url)
logging.debug("{} requests to {}".format(self.__class__.__name__, url))
json = {"message": message.dict()}
res = requests.post(url=url, json=json)
return [Response.from_dict(res_)
for res_ in res.json()["responses"]]Fix bug; no generate_responses method in RemoteCallerMod#! /usr/bin/env python
# coding:utf-8
import requests
import logging
from kovot.response import Response
class RemoteCallerMod:
def __init__(self, server, port):
self._root_url = "http://{}:{}".format(server, port)
def generate_responses(self, bot, message):
url = "{}/api/generate_responses".format(self._root_url)
logging.debug("{} requests to {}".format(self.__class__.__name__, url))
json = {"message": message.dict()}
res = requests.post(url=url, json=json)
return [Response.from_dict(res_)
for res_ in res.json()["responses"]] | <commit_before>#! /usr/bin/env python
# coding:utf-8
import requests
import logging
from kovot.response import Response
class RemoteCallerMod:
def __init__(self, server, port):
self._root_url = "http://{}:{}".format(server, port)
def generate_responses(self, bot, message):
url = "{}/api/get_responses".format(self._root_url)
logging.debug("{} requests to {}".format(self.__class__.__name__, url))
json = {"message": message.dict()}
res = requests.post(url=url, json=json)
return [Response.from_dict(res_)
for res_ in res.json()["responses"]]<commit_msg>Fix bug; no generate_responses method in RemoteCallerMod<commit_after>#! /usr/bin/env python
# coding:utf-8
import requests
import logging
from kovot.response import Response
class RemoteCallerMod:
def __init__(self, server, port):
self._root_url = "http://{}:{}".format(server, port)
def generate_responses(self, bot, message):
url = "{}/api/generate_responses".format(self._root_url)
logging.debug("{} requests to {}".format(self.__class__.__name__, url))
json = {"message": message.dict()}
res = requests.post(url=url, json=json)
return [Response.from_dict(res_)
for res_ in res.json()["responses"]] |
d8e5dce3489817a5065c045688b03f9e85c0b9a4 | tests/data_structures/commons/binary_search_tree_unit_test.py | tests/data_structures/commons/binary_search_tree_unit_test.py | import unittest
from pyalgs.data_structures.commons.binary_search_tree import BinarySearchTree
class BinarySearchTreeUnitTest(unittest.TestCase):
def test_binarySearchTree(self):
bst = BinarySearchTree.create()
bst.put("one", 1)
bst.put("two", 2)
bst.put("three", 3)
bst.put("six", 6)
bst.put("ten", 10)
self.assertEqual(1, bst.get("one"))
self.assertEqual(2, bst.get("two"))
self.assertEqual(3, bst.get("three"))
self.assertTrue(bst.contains_key("one"))
self.assertTrue(bst.contains_key("two"))
self.assertEqual(5, bst.size())
self.assertFalse(bst.is_empty())
bst.delete("one")
self.assertFalse(bst.contains_key("one"))
self.assertEqual(4, bst.size())
bst.delete("ten")
self.assertFalse(bst.contains_key("ten"))
self.assertEqual(3, bst.size())
bst.delete("three")
self.assertFalse(bst.contains_key("three"))
self.assertEqual(2, bst.size())
if __name__ == '__main__':
unittest.main() | import unittest
from pyalgs.data_structures.commons.binary_search_tree import BinarySearchTree
class BinarySearchTreeUnitTest(unittest.TestCase):
def test_binarySearchTree(self):
bst = BinarySearchTree.create()
bst.put("one", 1)
bst.put("two", 2)
bst.put("three", 3)
bst.put("six", 6)
bst.put("ten", 10)
bst.put("ten", 10)
self.assertEqual(1, bst.get("one"))
self.assertEqual(2, bst.get("two"))
self.assertEqual(3, bst.get("three"))
self.assertTrue(bst.contains_key("one"))
self.assertTrue(bst.contains_key("two"))
self.assertEqual(5, bst.size())
self.assertFalse(bst.is_empty())
bst.delete("one")
self.assertFalse(bst.contains_key("one"))
self.assertEqual(4, bst.size())
bst.delete("ten")
self.assertFalse(bst.contains_key("ten"))
self.assertEqual(3, bst.size())
bst.delete("three")
self.assertFalse(bst.contains_key("three"))
self.assertEqual(2, bst.size())
for i in range(100):
bst.put(str(i), i)
self.assertEqual(i, bst.get(str(i)))
for i in range(100):
bst.delete(str(i))
self.assertFalse(bst.contains_key(str(i)))
if __name__ == '__main__':
unittest.main() | Increase the unit test coverage for the binary search tree | Increase the unit test coverage for the binary search tree
| Python | bsd-3-clause | chen0040/pyalgs | import unittest
from pyalgs.data_structures.commons.binary_search_tree import BinarySearchTree
class BinarySearchTreeUnitTest(unittest.TestCase):
def test_binarySearchTree(self):
bst = BinarySearchTree.create()
bst.put("one", 1)
bst.put("two", 2)
bst.put("three", 3)
bst.put("six", 6)
bst.put("ten", 10)
self.assertEqual(1, bst.get("one"))
self.assertEqual(2, bst.get("two"))
self.assertEqual(3, bst.get("three"))
self.assertTrue(bst.contains_key("one"))
self.assertTrue(bst.contains_key("two"))
self.assertEqual(5, bst.size())
self.assertFalse(bst.is_empty())
bst.delete("one")
self.assertFalse(bst.contains_key("one"))
self.assertEqual(4, bst.size())
bst.delete("ten")
self.assertFalse(bst.contains_key("ten"))
self.assertEqual(3, bst.size())
bst.delete("three")
self.assertFalse(bst.contains_key("three"))
self.assertEqual(2, bst.size())
if __name__ == '__main__':
unittest.main()Increase the unit test coverage for the binary search tree | import unittest
from pyalgs.data_structures.commons.binary_search_tree import BinarySearchTree
class BinarySearchTreeUnitTest(unittest.TestCase):
def test_binarySearchTree(self):
bst = BinarySearchTree.create()
bst.put("one", 1)
bst.put("two", 2)
bst.put("three", 3)
bst.put("six", 6)
bst.put("ten", 10)
bst.put("ten", 10)
self.assertEqual(1, bst.get("one"))
self.assertEqual(2, bst.get("two"))
self.assertEqual(3, bst.get("three"))
self.assertTrue(bst.contains_key("one"))
self.assertTrue(bst.contains_key("two"))
self.assertEqual(5, bst.size())
self.assertFalse(bst.is_empty())
bst.delete("one")
self.assertFalse(bst.contains_key("one"))
self.assertEqual(4, bst.size())
bst.delete("ten")
self.assertFalse(bst.contains_key("ten"))
self.assertEqual(3, bst.size())
bst.delete("three")
self.assertFalse(bst.contains_key("three"))
self.assertEqual(2, bst.size())
for i in range(100):
bst.put(str(i), i)
self.assertEqual(i, bst.get(str(i)))
for i in range(100):
bst.delete(str(i))
self.assertFalse(bst.contains_key(str(i)))
if __name__ == '__main__':
unittest.main() | <commit_before>import unittest
from pyalgs.data_structures.commons.binary_search_tree import BinarySearchTree
class BinarySearchTreeUnitTest(unittest.TestCase):
def test_binarySearchTree(self):
bst = BinarySearchTree.create()
bst.put("one", 1)
bst.put("two", 2)
bst.put("three", 3)
bst.put("six", 6)
bst.put("ten", 10)
self.assertEqual(1, bst.get("one"))
self.assertEqual(2, bst.get("two"))
self.assertEqual(3, bst.get("three"))
self.assertTrue(bst.contains_key("one"))
self.assertTrue(bst.contains_key("two"))
self.assertEqual(5, bst.size())
self.assertFalse(bst.is_empty())
bst.delete("one")
self.assertFalse(bst.contains_key("one"))
self.assertEqual(4, bst.size())
bst.delete("ten")
self.assertFalse(bst.contains_key("ten"))
self.assertEqual(3, bst.size())
bst.delete("three")
self.assertFalse(bst.contains_key("three"))
self.assertEqual(2, bst.size())
if __name__ == '__main__':
unittest.main()<commit_msg>Increase the unit test coverage for the binary search tree<commit_after> | import unittest
from pyalgs.data_structures.commons.binary_search_tree import BinarySearchTree
class BinarySearchTreeUnitTest(unittest.TestCase):
def test_binarySearchTree(self):
bst = BinarySearchTree.create()
bst.put("one", 1)
bst.put("two", 2)
bst.put("three", 3)
bst.put("six", 6)
bst.put("ten", 10)
bst.put("ten", 10)
self.assertEqual(1, bst.get("one"))
self.assertEqual(2, bst.get("two"))
self.assertEqual(3, bst.get("three"))
self.assertTrue(bst.contains_key("one"))
self.assertTrue(bst.contains_key("two"))
self.assertEqual(5, bst.size())
self.assertFalse(bst.is_empty())
bst.delete("one")
self.assertFalse(bst.contains_key("one"))
self.assertEqual(4, bst.size())
bst.delete("ten")
self.assertFalse(bst.contains_key("ten"))
self.assertEqual(3, bst.size())
bst.delete("three")
self.assertFalse(bst.contains_key("three"))
self.assertEqual(2, bst.size())
for i in range(100):
bst.put(str(i), i)
self.assertEqual(i, bst.get(str(i)))
for i in range(100):
bst.delete(str(i))
self.assertFalse(bst.contains_key(str(i)))
if __name__ == '__main__':
unittest.main() | import unittest
from pyalgs.data_structures.commons.binary_search_tree import BinarySearchTree
class BinarySearchTreeUnitTest(unittest.TestCase):
def test_binarySearchTree(self):
bst = BinarySearchTree.create()
bst.put("one", 1)
bst.put("two", 2)
bst.put("three", 3)
bst.put("six", 6)
bst.put("ten", 10)
self.assertEqual(1, bst.get("one"))
self.assertEqual(2, bst.get("two"))
self.assertEqual(3, bst.get("three"))
self.assertTrue(bst.contains_key("one"))
self.assertTrue(bst.contains_key("two"))
self.assertEqual(5, bst.size())
self.assertFalse(bst.is_empty())
bst.delete("one")
self.assertFalse(bst.contains_key("one"))
self.assertEqual(4, bst.size())
bst.delete("ten")
self.assertFalse(bst.contains_key("ten"))
self.assertEqual(3, bst.size())
bst.delete("three")
self.assertFalse(bst.contains_key("three"))
self.assertEqual(2, bst.size())
if __name__ == '__main__':
unittest.main()Increase the unit test coverage for the binary search treeimport unittest
from pyalgs.data_structures.commons.binary_search_tree import BinarySearchTree
class BinarySearchTreeUnitTest(unittest.TestCase):
def test_binarySearchTree(self):
bst = BinarySearchTree.create()
bst.put("one", 1)
bst.put("two", 2)
bst.put("three", 3)
bst.put("six", 6)
bst.put("ten", 10)
bst.put("ten", 10)
self.assertEqual(1, bst.get("one"))
self.assertEqual(2, bst.get("two"))
self.assertEqual(3, bst.get("three"))
self.assertTrue(bst.contains_key("one"))
self.assertTrue(bst.contains_key("two"))
self.assertEqual(5, bst.size())
self.assertFalse(bst.is_empty())
bst.delete("one")
self.assertFalse(bst.contains_key("one"))
self.assertEqual(4, bst.size())
bst.delete("ten")
self.assertFalse(bst.contains_key("ten"))
self.assertEqual(3, bst.size())
bst.delete("three")
self.assertFalse(bst.contains_key("three"))
self.assertEqual(2, bst.size())
for i in range(100):
bst.put(str(i), i)
self.assertEqual(i, bst.get(str(i)))
for i in range(100):
bst.delete(str(i))
self.assertFalse(bst.contains_key(str(i)))
if __name__ == '__main__':
unittest.main() | <commit_before>import unittest
from pyalgs.data_structures.commons.binary_search_tree import BinarySearchTree
class BinarySearchTreeUnitTest(unittest.TestCase):
def test_binarySearchTree(self):
bst = BinarySearchTree.create()
bst.put("one", 1)
bst.put("two", 2)
bst.put("three", 3)
bst.put("six", 6)
bst.put("ten", 10)
self.assertEqual(1, bst.get("one"))
self.assertEqual(2, bst.get("two"))
self.assertEqual(3, bst.get("three"))
self.assertTrue(bst.contains_key("one"))
self.assertTrue(bst.contains_key("two"))
self.assertEqual(5, bst.size())
self.assertFalse(bst.is_empty())
bst.delete("one")
self.assertFalse(bst.contains_key("one"))
self.assertEqual(4, bst.size())
bst.delete("ten")
self.assertFalse(bst.contains_key("ten"))
self.assertEqual(3, bst.size())
bst.delete("three")
self.assertFalse(bst.contains_key("three"))
self.assertEqual(2, bst.size())
if __name__ == '__main__':
unittest.main()<commit_msg>Increase the unit test coverage for the binary search tree<commit_after>import unittest
from pyalgs.data_structures.commons.binary_search_tree import BinarySearchTree
class BinarySearchTreeUnitTest(unittest.TestCase):
def test_binarySearchTree(self):
bst = BinarySearchTree.create()
bst.put("one", 1)
bst.put("two", 2)
bst.put("three", 3)
bst.put("six", 6)
bst.put("ten", 10)
bst.put("ten", 10)
self.assertEqual(1, bst.get("one"))
self.assertEqual(2, bst.get("two"))
self.assertEqual(3, bst.get("three"))
self.assertTrue(bst.contains_key("one"))
self.assertTrue(bst.contains_key("two"))
self.assertEqual(5, bst.size())
self.assertFalse(bst.is_empty())
bst.delete("one")
self.assertFalse(bst.contains_key("one"))
self.assertEqual(4, bst.size())
bst.delete("ten")
self.assertFalse(bst.contains_key("ten"))
self.assertEqual(3, bst.size())
bst.delete("three")
self.assertFalse(bst.contains_key("three"))
self.assertEqual(2, bst.size())
for i in range(100):
bst.put(str(i), i)
self.assertEqual(i, bst.get(str(i)))
for i in range(100):
bst.delete(str(i))
self.assertFalse(bst.contains_key(str(i)))
if __name__ == '__main__':
unittest.main() |
c8e7400008f19f89519cbfd067c8e82f41fc503a | signac/__init__.py | signac/__init__.py | """
signac aids in the management, access and analysis of large-scale
computational investigations.
The framework provides a simple data model, which helps to organize
data production and post-processing as well as distribution among collaboratos.
"""
# The VERSION string represents the actual (development) version of the
# package.
VERSION = '0.1.7'
# The VERSION_TUPLE is used to identify whether signac projects, are
# required to be updated and can therefore lag behind the actual version.
VERSION_TUPLE = 0, 1, 7
__all__ = ['VERSION', 'VERSION_TUPLE']
| """
signac aids in the management, access and analysis of large-scale
computational investigations.
The framework provides a simple data model, which helps to organize
data production and post-processing as well as distribution among collaboratos.
"""
from . import contrib
from . import db
# The VERSION string represents the actual (development) version of the
# package.
VERSION = '0.1.7'
# The VERSION_TUPLE is used to identify whether signac projects, are
# required to be updated and can therefore lag behind the actual version.
VERSION_TUPLE = 0, 1, 7
__all__ = ['VERSION', 'VERSION_TUPLE', 'contrib', 'db']
| Put contrib and db into global API. | Put contrib and db into global API.
| Python | bsd-3-clause | csadorf/signac,csadorf/signac | """
signac aids in the management, access and analysis of large-scale
computational investigations.
The framework provides a simple data model, which helps to organize
data production and post-processing as well as distribution among collaboratos.
"""
# The VERSION string represents the actual (development) version of the
# package.
VERSION = '0.1.7'
# The VERSION_TUPLE is used to identify whether signac projects, are
# required to be updated and can therefore lag behind the actual version.
VERSION_TUPLE = 0, 1, 7
__all__ = ['VERSION', 'VERSION_TUPLE']
Put contrib and db into global API. | """
signac aids in the management, access and analysis of large-scale
computational investigations.
The framework provides a simple data model, which helps to organize
data production and post-processing as well as distribution among collaboratos.
"""
from . import contrib
from . import db
# The VERSION string represents the actual (development) version of the
# package.
VERSION = '0.1.7'
# The VERSION_TUPLE is used to identify whether signac projects, are
# required to be updated and can therefore lag behind the actual version.
VERSION_TUPLE = 0, 1, 7
__all__ = ['VERSION', 'VERSION_TUPLE', 'contrib', 'db']
| <commit_before>"""
signac aids in the management, access and analysis of large-scale
computational investigations.
The framework provides a simple data model, which helps to organize
data production and post-processing as well as distribution among collaboratos.
"""
# The VERSION string represents the actual (development) version of the
# package.
VERSION = '0.1.7'
# The VERSION_TUPLE is used to identify whether signac projects, are
# required to be updated and can therefore lag behind the actual version.
VERSION_TUPLE = 0, 1, 7
__all__ = ['VERSION', 'VERSION_TUPLE']
<commit_msg>Put contrib and db into global API.<commit_after> | """
signac aids in the management, access and analysis of large-scale
computational investigations.
The framework provides a simple data model, which helps to organize
data production and post-processing as well as distribution among collaboratos.
"""
from . import contrib
from . import db
# The VERSION string represents the actual (development) version of the
# package.
VERSION = '0.1.7'
# The VERSION_TUPLE is used to identify whether signac projects, are
# required to be updated and can therefore lag behind the actual version.
VERSION_TUPLE = 0, 1, 7
__all__ = ['VERSION', 'VERSION_TUPLE', 'contrib', 'db']
| """
signac aids in the management, access and analysis of large-scale
computational investigations.
The framework provides a simple data model, which helps to organize
data production and post-processing as well as distribution among collaboratos.
"""
# The VERSION string represents the actual (development) version of the
# package.
VERSION = '0.1.7'
# The VERSION_TUPLE is used to identify whether signac projects, are
# required to be updated and can therefore lag behind the actual version.
VERSION_TUPLE = 0, 1, 7
__all__ = ['VERSION', 'VERSION_TUPLE']
Put contrib and db into global API."""
signac aids in the management, access and analysis of large-scale
computational investigations.
The framework provides a simple data model, which helps to organize
data production and post-processing as well as distribution among collaboratos.
"""
from . import contrib
from . import db
# The VERSION string represents the actual (development) version of the
# package.
VERSION = '0.1.7'
# The VERSION_TUPLE is used to identify whether signac projects, are
# required to be updated and can therefore lag behind the actual version.
VERSION_TUPLE = 0, 1, 7
__all__ = ['VERSION', 'VERSION_TUPLE', 'contrib', 'db']
| <commit_before>"""
signac aids in the management, access and analysis of large-scale
computational investigations.
The framework provides a simple data model, which helps to organize
data production and post-processing as well as distribution among collaboratos.
"""
# The VERSION string represents the actual (development) version of the
# package.
VERSION = '0.1.7'
# The VERSION_TUPLE is used to identify whether signac projects, are
# required to be updated and can therefore lag behind the actual version.
VERSION_TUPLE = 0, 1, 7
__all__ = ['VERSION', 'VERSION_TUPLE']
<commit_msg>Put contrib and db into global API.<commit_after>"""
signac aids in the management, access and analysis of large-scale
computational investigations.
The framework provides a simple data model, which helps to organize
data production and post-processing as well as distribution among collaboratos.
"""
from . import contrib
from . import db
# The VERSION string represents the actual (development) version of the
# package.
VERSION = '0.1.7'
# The VERSION_TUPLE is used to identify whether signac projects, are
# required to be updated and can therefore lag behind the actual version.
VERSION_TUPLE = 0, 1, 7
__all__ = ['VERSION', 'VERSION_TUPLE', 'contrib', 'db']
|
17198f73f66190711a2df3c7b47008b2a0c50f8e | transaction_downloader/transaction_downloader.py | transaction_downloader/transaction_downloader.py | """Transaction Downloader.
Usage:
transaction-downloader auth --account=<account-name>
transaction-downloader -h | --help
transaction-downloader --version
Options:
-h --help Show this screen.
--version Show version.
--account=<account-name> Account to work with.
"""
from docopt import docopt
from pkg_resources import require
def main():
version = require("transaction-downloader")[0].version
arguments = docopt(__doc__, version=version)
print(arguments)
if __name__ == '__main__':
main()
| """Transaction Downloader.
Usage:
transaction-downloader auth --account=<account-name>
transaction-downloader -h | --help
transaction-downloader --version
Options:
-h --help Show this screen.
--version Show version.
--account=<account-name> Account to work with.
"""
import json
from docopt import docopt
from pkg_resources import require
def read_credentials(account):
credentials = {}
with open('plaid-credentials.json') as json_data:
credentials = json.load(json_data)
with open('cfg/%s.json'%account) as json_data:
credentials["account"] = {};
credentials["account"]["name"] = account
credentials["account"]["credentials"] = json.load(json_data)
return credentials
def main():
version = require("transaction-downloader")[0].version
arguments = docopt(__doc__, version=version)
print(arguments)
if __name__ == '__main__':
main()
| Add function to read in plaid + account credentials into one structure. | Add function to read in plaid + account credentials into one structure.
| Python | mit | ebridges/plaid2qif,ebridges/plaid2qif,ebridges/plaid2qif | """Transaction Downloader.
Usage:
transaction-downloader auth --account=<account-name>
transaction-downloader -h | --help
transaction-downloader --version
Options:
-h --help Show this screen.
--version Show version.
--account=<account-name> Account to work with.
"""
from docopt import docopt
from pkg_resources import require
def main():
version = require("transaction-downloader")[0].version
arguments = docopt(__doc__, version=version)
print(arguments)
if __name__ == '__main__':
main()
Add function to read in plaid + account credentials into one structure. | """Transaction Downloader.
Usage:
transaction-downloader auth --account=<account-name>
transaction-downloader -h | --help
transaction-downloader --version
Options:
-h --help Show this screen.
--version Show version.
--account=<account-name> Account to work with.
"""
import json
from docopt import docopt
from pkg_resources import require
def read_credentials(account):
credentials = {}
with open('plaid-credentials.json') as json_data:
credentials = json.load(json_data)
with open('cfg/%s.json'%account) as json_data:
credentials["account"] = {};
credentials["account"]["name"] = account
credentials["account"]["credentials"] = json.load(json_data)
return credentials
def main():
version = require("transaction-downloader")[0].version
arguments = docopt(__doc__, version=version)
print(arguments)
if __name__ == '__main__':
main()
| <commit_before>"""Transaction Downloader.
Usage:
transaction-downloader auth --account=<account-name>
transaction-downloader -h | --help
transaction-downloader --version
Options:
-h --help Show this screen.
--version Show version.
--account=<account-name> Account to work with.
"""
from docopt import docopt
from pkg_resources import require
def main():
version = require("transaction-downloader")[0].version
arguments = docopt(__doc__, version=version)
print(arguments)
if __name__ == '__main__':
main()
<commit_msg>Add function to read in plaid + account credentials into one structure.<commit_after> | """Transaction Downloader.
Usage:
transaction-downloader auth --account=<account-name>
transaction-downloader -h | --help
transaction-downloader --version
Options:
-h --help Show this screen.
--version Show version.
--account=<account-name> Account to work with.
"""
import json
from docopt import docopt
from pkg_resources import require
def read_credentials(account):
credentials = {}
with open('plaid-credentials.json') as json_data:
credentials = json.load(json_data)
with open('cfg/%s.json'%account) as json_data:
credentials["account"] = {};
credentials["account"]["name"] = account
credentials["account"]["credentials"] = json.load(json_data)
return credentials
def main():
version = require("transaction-downloader")[0].version
arguments = docopt(__doc__, version=version)
print(arguments)
if __name__ == '__main__':
main()
| """Transaction Downloader.
Usage:
transaction-downloader auth --account=<account-name>
transaction-downloader -h | --help
transaction-downloader --version
Options:
-h --help Show this screen.
--version Show version.
--account=<account-name> Account to work with.
"""
from docopt import docopt
from pkg_resources import require
def main():
version = require("transaction-downloader")[0].version
arguments = docopt(__doc__, version=version)
print(arguments)
if __name__ == '__main__':
main()
Add function to read in plaid + account credentials into one structure."""Transaction Downloader.
Usage:
transaction-downloader auth --account=<account-name>
transaction-downloader -h | --help
transaction-downloader --version
Options:
-h --help Show this screen.
--version Show version.
--account=<account-name> Account to work with.
"""
import json
from docopt import docopt
from pkg_resources import require
def read_credentials(account):
credentials = {}
with open('plaid-credentials.json') as json_data:
credentials = json.load(json_data)
with open('cfg/%s.json'%account) as json_data:
credentials["account"] = {};
credentials["account"]["name"] = account
credentials["account"]["credentials"] = json.load(json_data)
return credentials
def main():
version = require("transaction-downloader")[0].version
arguments = docopt(__doc__, version=version)
print(arguments)
if __name__ == '__main__':
main()
| <commit_before>"""Transaction Downloader.
Usage:
transaction-downloader auth --account=<account-name>
transaction-downloader -h | --help
transaction-downloader --version
Options:
-h --help Show this screen.
--version Show version.
--account=<account-name> Account to work with.
"""
from docopt import docopt
from pkg_resources import require
def main():
version = require("transaction-downloader")[0].version
arguments = docopt(__doc__, version=version)
print(arguments)
if __name__ == '__main__':
main()
<commit_msg>Add function to read in plaid + account credentials into one structure.<commit_after>"""Transaction Downloader.
Usage:
transaction-downloader auth --account=<account-name>
transaction-downloader -h | --help
transaction-downloader --version
Options:
-h --help Show this screen.
--version Show version.
--account=<account-name> Account to work with.
"""
import json
from docopt import docopt
from pkg_resources import require
def read_credentials(account):
credentials = {}
with open('plaid-credentials.json') as json_data:
credentials = json.load(json_data)
with open('cfg/%s.json'%account) as json_data:
credentials["account"] = {};
credentials["account"]["name"] = account
credentials["account"]["credentials"] = json.load(json_data)
return credentials
def main():
version = require("transaction-downloader")[0].version
arguments = docopt(__doc__, version=version)
print(arguments)
if __name__ == '__main__':
main()
|
c9372c38fb86494f1b7d0ba32ecfafa073d6da77 | tohu/v6/utils.py | tohu/v6/utils.py | from collections import namedtuple
__all__ = ['identity', 'print_generated_sequence']
def identity(x):
"""
Helper function which returns its argument unchanged.
That is, `identity(x)` returns `x` for any input `x`.
"""
return x
def print_generated_sequence(gen, num, *, sep=", ", fmt='', seed=None):
"""
Helper function which prints a sequence of `num` items
produced by the random generator `gen`.
"""
if seed:
gen.reset(seed)
elems = [format(next(gen), fmt) for _ in range(num)]
sep_initial = "\n\n" if '\n' in sep else " "
print("Generated sequence:{}{}".format(sep_initial, sep.join(elems)))
| from collections import namedtuple
__all__ = ['identity', 'print_generated_sequence']
def identity(x):
"""
Helper function which returns its argument unchanged.
That is, `identity(x)` returns `x` for any input `x`.
"""
return x
def print_generated_sequence(gen, num, *, sep=", ", fmt='', seed=None):
"""
Helper function which prints a sequence of `num` items
produced by the random generator `gen`.
"""
if seed:
gen.reset(seed)
elems = [format(next(gen), fmt) for _ in range(num)]
sep_initial = "\n\n" if '\n' in sep else " "
print("Generated sequence:{}{}".format(sep_initial, sep.join(elems)))
def make_dummy_tuples(chars='abcde'):
"""
Helper function to create a list of namedtuples which are useful
for testing and debugging (especially of custom generators).
Example
-------
>>> make_dummy_tuples(chars='abcd')
[Quux(x='AA', y='aa'),
Quux(x='BB', y='bb'),
Quux(x='CC', y='cc'),
Quux(x='DD', y='dd')]
"""
Quux = namedtuple('Quux', ['x', 'y'])
some_tuples = [Quux((c*2).upper(), c*2) for c in chars]
return some_tuples
| Add helper function to create a list of namedtuples useful for testing and debugging | Add helper function to create a list of namedtuples useful for testing and debugging
| Python | mit | maxalbert/tohu | from collections import namedtuple
__all__ = ['identity', 'print_generated_sequence']
def identity(x):
"""
Helper function which returns its argument unchanged.
That is, `identity(x)` returns `x` for any input `x`.
"""
return x
def print_generated_sequence(gen, num, *, sep=", ", fmt='', seed=None):
"""
Helper function which prints a sequence of `num` items
produced by the random generator `gen`.
"""
if seed:
gen.reset(seed)
elems = [format(next(gen), fmt) for _ in range(num)]
sep_initial = "\n\n" if '\n' in sep else " "
print("Generated sequence:{}{}".format(sep_initial, sep.join(elems)))
Add helper function to create a list of namedtuples useful for testing and debugging | from collections import namedtuple
__all__ = ['identity', 'print_generated_sequence']
def identity(x):
"""
Helper function which returns its argument unchanged.
That is, `identity(x)` returns `x` for any input `x`.
"""
return x
def print_generated_sequence(gen, num, *, sep=", ", fmt='', seed=None):
"""
Helper function which prints a sequence of `num` items
produced by the random generator `gen`.
"""
if seed:
gen.reset(seed)
elems = [format(next(gen), fmt) for _ in range(num)]
sep_initial = "\n\n" if '\n' in sep else " "
print("Generated sequence:{}{}".format(sep_initial, sep.join(elems)))
def make_dummy_tuples(chars='abcde'):
"""
Helper function to create a list of namedtuples which are useful
for testing and debugging (especially of custom generators).
Example
-------
>>> make_dummy_tuples(chars='abcd')
[Quux(x='AA', y='aa'),
Quux(x='BB', y='bb'),
Quux(x='CC', y='cc'),
Quux(x='DD', y='dd')]
"""
Quux = namedtuple('Quux', ['x', 'y'])
some_tuples = [Quux((c*2).upper(), c*2) for c in chars]
return some_tuples
| <commit_before>from collections import namedtuple
__all__ = ['identity', 'print_generated_sequence']
def identity(x):
"""
Helper function which returns its argument unchanged.
That is, `identity(x)` returns `x` for any input `x`.
"""
return x
def print_generated_sequence(gen, num, *, sep=", ", fmt='', seed=None):
"""
Helper function which prints a sequence of `num` items
produced by the random generator `gen`.
"""
if seed:
gen.reset(seed)
elems = [format(next(gen), fmt) for _ in range(num)]
sep_initial = "\n\n" if '\n' in sep else " "
print("Generated sequence:{}{}".format(sep_initial, sep.join(elems)))
<commit_msg>Add helper function to create a list of namedtuples useful for testing and debugging<commit_after> | from collections import namedtuple
__all__ = ['identity', 'print_generated_sequence']
def identity(x):
"""
Helper function which returns its argument unchanged.
That is, `identity(x)` returns `x` for any input `x`.
"""
return x
def print_generated_sequence(gen, num, *, sep=", ", fmt='', seed=None):
"""
Helper function which prints a sequence of `num` items
produced by the random generator `gen`.
"""
if seed:
gen.reset(seed)
elems = [format(next(gen), fmt) for _ in range(num)]
sep_initial = "\n\n" if '\n' in sep else " "
print("Generated sequence:{}{}".format(sep_initial, sep.join(elems)))
def make_dummy_tuples(chars='abcde'):
"""
Helper function to create a list of namedtuples which are useful
for testing and debugging (especially of custom generators).
Example
-------
>>> make_dummy_tuples(chars='abcd')
[Quux(x='AA', y='aa'),
Quux(x='BB', y='bb'),
Quux(x='CC', y='cc'),
Quux(x='DD', y='dd')]
"""
Quux = namedtuple('Quux', ['x', 'y'])
some_tuples = [Quux((c*2).upper(), c*2) for c in chars]
return some_tuples
| from collections import namedtuple
__all__ = ['identity', 'print_generated_sequence']
def identity(x):
"""
Helper function which returns its argument unchanged.
That is, `identity(x)` returns `x` for any input `x`.
"""
return x
def print_generated_sequence(gen, num, *, sep=", ", fmt='', seed=None):
"""
Helper function which prints a sequence of `num` items
produced by the random generator `gen`.
"""
if seed:
gen.reset(seed)
elems = [format(next(gen), fmt) for _ in range(num)]
sep_initial = "\n\n" if '\n' in sep else " "
print("Generated sequence:{}{}".format(sep_initial, sep.join(elems)))
Add helper function to create a list of namedtuples useful for testing and debuggingfrom collections import namedtuple
__all__ = ['identity', 'print_generated_sequence']
def identity(x):
"""
Helper function which returns its argument unchanged.
That is, `identity(x)` returns `x` for any input `x`.
"""
return x
def print_generated_sequence(gen, num, *, sep=", ", fmt='', seed=None):
"""
Helper function which prints a sequence of `num` items
produced by the random generator `gen`.
"""
if seed:
gen.reset(seed)
elems = [format(next(gen), fmt) for _ in range(num)]
sep_initial = "\n\n" if '\n' in sep else " "
print("Generated sequence:{}{}".format(sep_initial, sep.join(elems)))
def make_dummy_tuples(chars='abcde'):
"""
Helper function to create a list of namedtuples which are useful
for testing and debugging (especially of custom generators).
Example
-------
>>> make_dummy_tuples(chars='abcd')
[Quux(x='AA', y='aa'),
Quux(x='BB', y='bb'),
Quux(x='CC', y='cc'),
Quux(x='DD', y='dd')]
"""
Quux = namedtuple('Quux', ['x', 'y'])
some_tuples = [Quux((c*2).upper(), c*2) for c in chars]
return some_tuples
| <commit_before>from collections import namedtuple
__all__ = ['identity', 'print_generated_sequence']
def identity(x):
"""
Helper function which returns its argument unchanged.
That is, `identity(x)` returns `x` for any input `x`.
"""
return x
def print_generated_sequence(gen, num, *, sep=", ", fmt='', seed=None):
"""
Helper function which prints a sequence of `num` items
produced by the random generator `gen`.
"""
if seed:
gen.reset(seed)
elems = [format(next(gen), fmt) for _ in range(num)]
sep_initial = "\n\n" if '\n' in sep else " "
print("Generated sequence:{}{}".format(sep_initial, sep.join(elems)))
<commit_msg>Add helper function to create a list of namedtuples useful for testing and debugging<commit_after>from collections import namedtuple
__all__ = ['identity', 'print_generated_sequence']
def identity(x):
"""
Helper function which returns its argument unchanged.
That is, `identity(x)` returns `x` for any input `x`.
"""
return x
def print_generated_sequence(gen, num, *, sep=", ", fmt='', seed=None):
"""
Helper function which prints a sequence of `num` items
produced by the random generator `gen`.
"""
if seed:
gen.reset(seed)
elems = [format(next(gen), fmt) for _ in range(num)]
sep_initial = "\n\n" if '\n' in sep else " "
print("Generated sequence:{}{}".format(sep_initial, sep.join(elems)))
def make_dummy_tuples(chars='abcde'):
"""
Helper function to create a list of namedtuples which are useful
for testing and debugging (especially of custom generators).
Example
-------
>>> make_dummy_tuples(chars='abcd')
[Quux(x='AA', y='aa'),
Quux(x='BB', y='bb'),
Quux(x='CC', y='cc'),
Quux(x='DD', y='dd')]
"""
Quux = namedtuple('Quux', ['x', 'y'])
some_tuples = [Quux((c*2).upper(), c*2) for c in chars]
return some_tuples
|
d9adcad2b67b7e25e5997f3bfafb0208ab225fa9 | tests/integration/cattletest/core/test_proxy.py | tests/integration/cattletest/core/test_proxy.py | from common_fixtures import * # NOQA
import requests
def test_proxy(client, admin_user_client):
domain = 'releases.rancher.com'
s = admin_user_client.by_id_setting('api.proxy.whitelist')
if domain not in s.value:
s.value += ',{}'.format(domain)
admin_user_client.update(s, value=s.value)
def func():
s = admin_user_client.by_id_setting('api.proxy.whitelist')
return domain in s.activeValue
wait_for(func)
base_url = client.schema.types['schema'].links['collection']
base_url = base_url.replace('/schemas', '')
r = requests.get(base_url + '/proxy/{}/{}'.format(domain,
'ui/latest/humans.txt'),
headers=auth_header_map(client))
assert r.status_code == 200
assert 'Darren' in r.text
def test_aws_proxy(client):
base_url = client.schema.types['schema'].links['collection']
base_url = base_url.replace('/schemas', '')
host = 'ec2.us-west-2.amazonaws.com'
r = requests.post(base_url + '/proxy/{}'.format(host),
headers=auth_header_map(client))
assert r.status_code == 400
| from common_fixtures import * # NOQA
import requests
def test_proxy(client, admin_user_client):
domain = 'releases.rancher.com'
s = admin_user_client.by_id_setting('api.proxy.whitelist')
if domain not in s.value:
s.value += ',{}'.format(domain)
admin_user_client.update(s, value=s.value)
def func():
s = admin_user_client.by_id_setting('api.proxy.whitelist')
return domain in s.activeValue
wait_for(func)
base_url = client.schema.types['schema'].links['collection']
base_url = base_url.replace('/schemas', '')
r = requests.get(base_url + '/proxy/http://{}/{}'
.format(domain, 'ui/latest/humans.txt'),
headers=auth_header_map(client))
assert r.status_code == 200
assert 'Darren' in r.text
def test_aws_proxy(client):
base_url = client.schema.types['schema'].links['collection']
base_url = base_url.replace('/schemas', '')
host = 'ec2.us-west-2.amazonaws.com'
r = requests.post(base_url + '/proxy/{}'.format(host),
headers=auth_header_map(client))
assert r.status_code == 400
| Use http for proxy test | Use http for proxy test
| Python | apache-2.0 | cjellick/cattle,vincent99/cattle,vincent99/cattle,cloudnautique/cattle,jimengliu/cattle,wlan0/cattle,rancher/cattle,cjellick/cattle,Cerfoglg/cattle,rancherio/cattle,rancherio/cattle,rancherio/cattle,wlan0/cattle,cloudnautique/cattle,jimengliu/cattle,rancher/cattle,cjellick/cattle,cloudnautique/cattle,Cerfoglg/cattle,Cerfoglg/cattle,vincent99/cattle,rancher/cattle,cloudnautique/cattle,cjellick/cattle,jimengliu/cattle,wlan0/cattle | from common_fixtures import * # NOQA
import requests
def test_proxy(client, admin_user_client):
domain = 'releases.rancher.com'
s = admin_user_client.by_id_setting('api.proxy.whitelist')
if domain not in s.value:
s.value += ',{}'.format(domain)
admin_user_client.update(s, value=s.value)
def func():
s = admin_user_client.by_id_setting('api.proxy.whitelist')
return domain in s.activeValue
wait_for(func)
base_url = client.schema.types['schema'].links['collection']
base_url = base_url.replace('/schemas', '')
r = requests.get(base_url + '/proxy/{}/{}'.format(domain,
'ui/latest/humans.txt'),
headers=auth_header_map(client))
assert r.status_code == 200
assert 'Darren' in r.text
def test_aws_proxy(client):
base_url = client.schema.types['schema'].links['collection']
base_url = base_url.replace('/schemas', '')
host = 'ec2.us-west-2.amazonaws.com'
r = requests.post(base_url + '/proxy/{}'.format(host),
headers=auth_header_map(client))
assert r.status_code == 400
Use http for proxy test | from common_fixtures import * # NOQA
import requests
def test_proxy(client, admin_user_client):
domain = 'releases.rancher.com'
s = admin_user_client.by_id_setting('api.proxy.whitelist')
if domain not in s.value:
s.value += ',{}'.format(domain)
admin_user_client.update(s, value=s.value)
def func():
s = admin_user_client.by_id_setting('api.proxy.whitelist')
return domain in s.activeValue
wait_for(func)
base_url = client.schema.types['schema'].links['collection']
base_url = base_url.replace('/schemas', '')
r = requests.get(base_url + '/proxy/http://{}/{}'
.format(domain, 'ui/latest/humans.txt'),
headers=auth_header_map(client))
assert r.status_code == 200
assert 'Darren' in r.text
def test_aws_proxy(client):
base_url = client.schema.types['schema'].links['collection']
base_url = base_url.replace('/schemas', '')
host = 'ec2.us-west-2.amazonaws.com'
r = requests.post(base_url + '/proxy/{}'.format(host),
headers=auth_header_map(client))
assert r.status_code == 400
| <commit_before>from common_fixtures import * # NOQA
import requests
def test_proxy(client, admin_user_client):
domain = 'releases.rancher.com'
s = admin_user_client.by_id_setting('api.proxy.whitelist')
if domain not in s.value:
s.value += ',{}'.format(domain)
admin_user_client.update(s, value=s.value)
def func():
s = admin_user_client.by_id_setting('api.proxy.whitelist')
return domain in s.activeValue
wait_for(func)
base_url = client.schema.types['schema'].links['collection']
base_url = base_url.replace('/schemas', '')
r = requests.get(base_url + '/proxy/{}/{}'.format(domain,
'ui/latest/humans.txt'),
headers=auth_header_map(client))
assert r.status_code == 200
assert 'Darren' in r.text
def test_aws_proxy(client):
base_url = client.schema.types['schema'].links['collection']
base_url = base_url.replace('/schemas', '')
host = 'ec2.us-west-2.amazonaws.com'
r = requests.post(base_url + '/proxy/{}'.format(host),
headers=auth_header_map(client))
assert r.status_code == 400
<commit_msg>Use http for proxy test<commit_after> | from common_fixtures import * # NOQA
import requests
def test_proxy(client, admin_user_client):
domain = 'releases.rancher.com'
s = admin_user_client.by_id_setting('api.proxy.whitelist')
if domain not in s.value:
s.value += ',{}'.format(domain)
admin_user_client.update(s, value=s.value)
def func():
s = admin_user_client.by_id_setting('api.proxy.whitelist')
return domain in s.activeValue
wait_for(func)
base_url = client.schema.types['schema'].links['collection']
base_url = base_url.replace('/schemas', '')
r = requests.get(base_url + '/proxy/http://{}/{}'
.format(domain, 'ui/latest/humans.txt'),
headers=auth_header_map(client))
assert r.status_code == 200
assert 'Darren' in r.text
def test_aws_proxy(client):
base_url = client.schema.types['schema'].links['collection']
base_url = base_url.replace('/schemas', '')
host = 'ec2.us-west-2.amazonaws.com'
r = requests.post(base_url + '/proxy/{}'.format(host),
headers=auth_header_map(client))
assert r.status_code == 400
| from common_fixtures import * # NOQA
import requests
def test_proxy(client, admin_user_client):
domain = 'releases.rancher.com'
s = admin_user_client.by_id_setting('api.proxy.whitelist')
if domain not in s.value:
s.value += ',{}'.format(domain)
admin_user_client.update(s, value=s.value)
def func():
s = admin_user_client.by_id_setting('api.proxy.whitelist')
return domain in s.activeValue
wait_for(func)
base_url = client.schema.types['schema'].links['collection']
base_url = base_url.replace('/schemas', '')
r = requests.get(base_url + '/proxy/{}/{}'.format(domain,
'ui/latest/humans.txt'),
headers=auth_header_map(client))
assert r.status_code == 200
assert 'Darren' in r.text
def test_aws_proxy(client):
base_url = client.schema.types['schema'].links['collection']
base_url = base_url.replace('/schemas', '')
host = 'ec2.us-west-2.amazonaws.com'
r = requests.post(base_url + '/proxy/{}'.format(host),
headers=auth_header_map(client))
assert r.status_code == 400
Use http for proxy testfrom common_fixtures import * # NOQA
import requests
def test_proxy(client, admin_user_client):
domain = 'releases.rancher.com'
s = admin_user_client.by_id_setting('api.proxy.whitelist')
if domain not in s.value:
s.value += ',{}'.format(domain)
admin_user_client.update(s, value=s.value)
def func():
s = admin_user_client.by_id_setting('api.proxy.whitelist')
return domain in s.activeValue
wait_for(func)
base_url = client.schema.types['schema'].links['collection']
base_url = base_url.replace('/schemas', '')
r = requests.get(base_url + '/proxy/http://{}/{}'
.format(domain, 'ui/latest/humans.txt'),
headers=auth_header_map(client))
assert r.status_code == 200
assert 'Darren' in r.text
def test_aws_proxy(client):
base_url = client.schema.types['schema'].links['collection']
base_url = base_url.replace('/schemas', '')
host = 'ec2.us-west-2.amazonaws.com'
r = requests.post(base_url + '/proxy/{}'.format(host),
headers=auth_header_map(client))
assert r.status_code == 400
| <commit_before>from common_fixtures import * # NOQA
import requests
def test_proxy(client, admin_user_client):
domain = 'releases.rancher.com'
s = admin_user_client.by_id_setting('api.proxy.whitelist')
if domain not in s.value:
s.value += ',{}'.format(domain)
admin_user_client.update(s, value=s.value)
def func():
s = admin_user_client.by_id_setting('api.proxy.whitelist')
return domain in s.activeValue
wait_for(func)
base_url = client.schema.types['schema'].links['collection']
base_url = base_url.replace('/schemas', '')
r = requests.get(base_url + '/proxy/{}/{}'.format(domain,
'ui/latest/humans.txt'),
headers=auth_header_map(client))
assert r.status_code == 200
assert 'Darren' in r.text
def test_aws_proxy(client):
base_url = client.schema.types['schema'].links['collection']
base_url = base_url.replace('/schemas', '')
host = 'ec2.us-west-2.amazonaws.com'
r = requests.post(base_url + '/proxy/{}'.format(host),
headers=auth_header_map(client))
assert r.status_code == 400
<commit_msg>Use http for proxy test<commit_after>from common_fixtures import * # NOQA
import requests
def test_proxy(client, admin_user_client):
domain = 'releases.rancher.com'
s = admin_user_client.by_id_setting('api.proxy.whitelist')
if domain not in s.value:
s.value += ',{}'.format(domain)
admin_user_client.update(s, value=s.value)
def func():
s = admin_user_client.by_id_setting('api.proxy.whitelist')
return domain in s.activeValue
wait_for(func)
base_url = client.schema.types['schema'].links['collection']
base_url = base_url.replace('/schemas', '')
r = requests.get(base_url + '/proxy/http://{}/{}'
.format(domain, 'ui/latest/humans.txt'),
headers=auth_header_map(client))
assert r.status_code == 200
assert 'Darren' in r.text
def test_aws_proxy(client):
base_url = client.schema.types['schema'].links['collection']
base_url = base_url.replace('/schemas', '')
host = 'ec2.us-west-2.amazonaws.com'
r = requests.post(base_url + '/proxy/{}'.format(host),
headers=auth_header_map(client))
assert r.status_code == 400
|
e39bcde813d35c8079743fbed7e77f2c8e4b4596 | examples/mainwindow.py | examples/mainwindow.py | import sys
from os.path import join, dirname, abspath
from qtpy import uic
from qtpy.QtCore import Slot
from qtpy.QtWidgets import QApplication, QMainWindow, QMessageBox
import qtmodern.styles
import qtmodern.windows
_UI = join(dirname(abspath(__file__)), 'mainwindow.ui')
class MainWindow(QMainWindow):
def __init__(self):
QMainWindow.__init__(self)
uic.loadUi(_UI, self)
@Slot()
def on_pushButton_clicked(self):
self.close()
@Slot()
def closeEvent(self, event):
reply = QMessageBox.question(self, 'Exit', 'Do you want to exit?')
if reply == QMessageBox.Yes:
event.accept()
else:
event.ignore()
if __name__ == '__main__':
app = QApplication(sys.argv)
qtmodern.styles.dark(app)
mw = qtmodern.windows.ModernWindow(MainWindow())
mw.show()
sys.exit(app.exec_())
| import sys
from os.path import join, dirname, abspath
from qtpy import uic
from qtpy.QtCore import Slot
from qtpy.QtWidgets import QApplication, QMainWindow, QMessageBox
import qtmodern.styles
import qtmodern.windows
_UI = join(dirname(abspath(__file__)), 'mainwindow.ui')
class MainWindow(QMainWindow):
def __init__(self):
QMainWindow.__init__(self)
self.ui = uic.loadUi(_UI, self)
self.ui.actionLight.triggered.connect(self.lightTheme)
self.ui.actionDark.triggered.connect(self.darkTheme)
def lightTheme(self):
qtmodern.styles.light(QApplication.instance())
def darkTheme(self):
qtmodern.styles.dark(QApplication.instance())
@Slot()
def on_pushButton_clicked(self):
self.close()
@Slot()
def closeEvent(self, event):
reply = QMessageBox.question(self, 'Exit', 'Do you want to exit?')
if reply == QMessageBox.Yes:
event.accept()
else:
event.ignore()
if __name__ == '__main__':
app = QApplication(sys.argv)
qtmodern.styles.dark(app)
mw = qtmodern.windows.ModernWindow(MainWindow())
mw.show()
sys.exit(app.exec_())
| Update example to switch between light and dark themes | Update example to switch between light and dark themes | Python | mit | gmarull/qtmodern | import sys
from os.path import join, dirname, abspath
from qtpy import uic
from qtpy.QtCore import Slot
from qtpy.QtWidgets import QApplication, QMainWindow, QMessageBox
import qtmodern.styles
import qtmodern.windows
_UI = join(dirname(abspath(__file__)), 'mainwindow.ui')
class MainWindow(QMainWindow):
def __init__(self):
QMainWindow.__init__(self)
uic.loadUi(_UI, self)
@Slot()
def on_pushButton_clicked(self):
self.close()
@Slot()
def closeEvent(self, event):
reply = QMessageBox.question(self, 'Exit', 'Do you want to exit?')
if reply == QMessageBox.Yes:
event.accept()
else:
event.ignore()
if __name__ == '__main__':
app = QApplication(sys.argv)
qtmodern.styles.dark(app)
mw = qtmodern.windows.ModernWindow(MainWindow())
mw.show()
sys.exit(app.exec_())
Update example to switch between light and dark themes | import sys
from os.path import join, dirname, abspath
from qtpy import uic
from qtpy.QtCore import Slot
from qtpy.QtWidgets import QApplication, QMainWindow, QMessageBox
import qtmodern.styles
import qtmodern.windows
_UI = join(dirname(abspath(__file__)), 'mainwindow.ui')
class MainWindow(QMainWindow):
def __init__(self):
QMainWindow.__init__(self)
self.ui = uic.loadUi(_UI, self)
self.ui.actionLight.triggered.connect(self.lightTheme)
self.ui.actionDark.triggered.connect(self.darkTheme)
def lightTheme(self):
qtmodern.styles.light(QApplication.instance())
def darkTheme(self):
qtmodern.styles.dark(QApplication.instance())
@Slot()
def on_pushButton_clicked(self):
self.close()
@Slot()
def closeEvent(self, event):
reply = QMessageBox.question(self, 'Exit', 'Do you want to exit?')
if reply == QMessageBox.Yes:
event.accept()
else:
event.ignore()
if __name__ == '__main__':
app = QApplication(sys.argv)
qtmodern.styles.dark(app)
mw = qtmodern.windows.ModernWindow(MainWindow())
mw.show()
sys.exit(app.exec_())
| <commit_before>import sys
from os.path import join, dirname, abspath
from qtpy import uic
from qtpy.QtCore import Slot
from qtpy.QtWidgets import QApplication, QMainWindow, QMessageBox
import qtmodern.styles
import qtmodern.windows
_UI = join(dirname(abspath(__file__)), 'mainwindow.ui')
class MainWindow(QMainWindow):
def __init__(self):
QMainWindow.__init__(self)
uic.loadUi(_UI, self)
@Slot()
def on_pushButton_clicked(self):
self.close()
@Slot()
def closeEvent(self, event):
reply = QMessageBox.question(self, 'Exit', 'Do you want to exit?')
if reply == QMessageBox.Yes:
event.accept()
else:
event.ignore()
if __name__ == '__main__':
app = QApplication(sys.argv)
qtmodern.styles.dark(app)
mw = qtmodern.windows.ModernWindow(MainWindow())
mw.show()
sys.exit(app.exec_())
<commit_msg>Update example to switch between light and dark themes<commit_after> | import sys
from os.path import join, dirname, abspath
from qtpy import uic
from qtpy.QtCore import Slot
from qtpy.QtWidgets import QApplication, QMainWindow, QMessageBox
import qtmodern.styles
import qtmodern.windows
_UI = join(dirname(abspath(__file__)), 'mainwindow.ui')
class MainWindow(QMainWindow):
def __init__(self):
QMainWindow.__init__(self)
self.ui = uic.loadUi(_UI, self)
self.ui.actionLight.triggered.connect(self.lightTheme)
self.ui.actionDark.triggered.connect(self.darkTheme)
def lightTheme(self):
qtmodern.styles.light(QApplication.instance())
def darkTheme(self):
qtmodern.styles.dark(QApplication.instance())
@Slot()
def on_pushButton_clicked(self):
self.close()
@Slot()
def closeEvent(self, event):
reply = QMessageBox.question(self, 'Exit', 'Do you want to exit?')
if reply == QMessageBox.Yes:
event.accept()
else:
event.ignore()
if __name__ == '__main__':
app = QApplication(sys.argv)
qtmodern.styles.dark(app)
mw = qtmodern.windows.ModernWindow(MainWindow())
mw.show()
sys.exit(app.exec_())
| import sys
from os.path import join, dirname, abspath
from qtpy import uic
from qtpy.QtCore import Slot
from qtpy.QtWidgets import QApplication, QMainWindow, QMessageBox
import qtmodern.styles
import qtmodern.windows
_UI = join(dirname(abspath(__file__)), 'mainwindow.ui')
class MainWindow(QMainWindow):
def __init__(self):
QMainWindow.__init__(self)
uic.loadUi(_UI, self)
@Slot()
def on_pushButton_clicked(self):
self.close()
@Slot()
def closeEvent(self, event):
reply = QMessageBox.question(self, 'Exit', 'Do you want to exit?')
if reply == QMessageBox.Yes:
event.accept()
else:
event.ignore()
if __name__ == '__main__':
app = QApplication(sys.argv)
qtmodern.styles.dark(app)
mw = qtmodern.windows.ModernWindow(MainWindow())
mw.show()
sys.exit(app.exec_())
Update example to switch between light and dark themesimport sys
from os.path import join, dirname, abspath
from qtpy import uic
from qtpy.QtCore import Slot
from qtpy.QtWidgets import QApplication, QMainWindow, QMessageBox
import qtmodern.styles
import qtmodern.windows
_UI = join(dirname(abspath(__file__)), 'mainwindow.ui')
class MainWindow(QMainWindow):
def __init__(self):
QMainWindow.__init__(self)
self.ui = uic.loadUi(_UI, self)
self.ui.actionLight.triggered.connect(self.lightTheme)
self.ui.actionDark.triggered.connect(self.darkTheme)
def lightTheme(self):
qtmodern.styles.light(QApplication.instance())
def darkTheme(self):
qtmodern.styles.dark(QApplication.instance())
@Slot()
def on_pushButton_clicked(self):
self.close()
@Slot()
def closeEvent(self, event):
reply = QMessageBox.question(self, 'Exit', 'Do you want to exit?')
if reply == QMessageBox.Yes:
event.accept()
else:
event.ignore()
if __name__ == '__main__':
app = QApplication(sys.argv)
qtmodern.styles.dark(app)
mw = qtmodern.windows.ModernWindow(MainWindow())
mw.show()
sys.exit(app.exec_())
| <commit_before>import sys
from os.path import join, dirname, abspath
from qtpy import uic
from qtpy.QtCore import Slot
from qtpy.QtWidgets import QApplication, QMainWindow, QMessageBox
import qtmodern.styles
import qtmodern.windows
_UI = join(dirname(abspath(__file__)), 'mainwindow.ui')
class MainWindow(QMainWindow):
def __init__(self):
QMainWindow.__init__(self)
uic.loadUi(_UI, self)
@Slot()
def on_pushButton_clicked(self):
self.close()
@Slot()
def closeEvent(self, event):
reply = QMessageBox.question(self, 'Exit', 'Do you want to exit?')
if reply == QMessageBox.Yes:
event.accept()
else:
event.ignore()
if __name__ == '__main__':
app = QApplication(sys.argv)
qtmodern.styles.dark(app)
mw = qtmodern.windows.ModernWindow(MainWindow())
mw.show()
sys.exit(app.exec_())
<commit_msg>Update example to switch between light and dark themes<commit_after>import sys
from os.path import join, dirname, abspath
from qtpy import uic
from qtpy.QtCore import Slot
from qtpy.QtWidgets import QApplication, QMainWindow, QMessageBox
import qtmodern.styles
import qtmodern.windows
_UI = join(dirname(abspath(__file__)), 'mainwindow.ui')
class MainWindow(QMainWindow):
def __init__(self):
QMainWindow.__init__(self)
self.ui = uic.loadUi(_UI, self)
self.ui.actionLight.triggered.connect(self.lightTheme)
self.ui.actionDark.triggered.connect(self.darkTheme)
def lightTheme(self):
qtmodern.styles.light(QApplication.instance())
def darkTheme(self):
qtmodern.styles.dark(QApplication.instance())
@Slot()
def on_pushButton_clicked(self):
self.close()
@Slot()
def closeEvent(self, event):
reply = QMessageBox.question(self, 'Exit', 'Do you want to exit?')
if reply == QMessageBox.Yes:
event.accept()
else:
event.ignore()
if __name__ == '__main__':
app = QApplication(sys.argv)
qtmodern.styles.dark(app)
mw = qtmodern.windows.ModernWindow(MainWindow())
mw.show()
sys.exit(app.exec_())
|
990d98b323e21d8824b2aead8700f56d66fe6ba3 | plasmapy/utils/__init__.py | plasmapy/utils/__init__.py | from .checks import (check_quantity,
check_relativistic,
_check_quantity,
_check_relativistic)
from .exceptions import (PlasmaPyError,
PhysicsError,
RelativityError,
AtomicError,
MissingAtomicDataError,
ChargeError,
InvalidIonError,
InvalidIsotopeError,
InvalidElementError,
InvalidParticleError,
DataStandardError,
PlasmaPyWarning,
PhysicsWarning,
CouplingWarning,
RelativityWarning,
AtomicWarning,
MissingAtomicDataWarning)
from .pytest_helpers import (
run_test,
run_test_equivalent_calls,
call_string,
InconsistentTypeError,
UnexpectedResultError,
UnexpectedExceptionError,
RunTestError,
IncorrectResultError,
MissingExceptionError,
MissingWarningError,
assert_can_handle_nparray,
)
import roman
| from .checks import (check_quantity,
check_relativistic,
_check_quantity,
_check_relativistic)
from .exceptions import (PlasmaPyError,
PhysicsError,
RelativityError,
AtomicError,
MissingAtomicDataError,
ChargeError,
InvalidIonError,
InvalidIsotopeError,
InvalidElementError,
InvalidParticleError,
DataStandardError,
PlasmaPyWarning,
PhysicsWarning,
CouplingWarning,
RelativityWarning,
AtomicWarning,
MissingAtomicDataWarning)
from .pytest_helpers import (
run_test,
run_test_equivalent_calls,
call_string,
InconsistentTypeError,
UnexpectedResultError,
UnexpectedExceptionError,
RunTestError,
IncorrectResultError,
MissingExceptionError,
MissingWarningError,
assert_can_handle_nparray,
)
from . import roman
| Fix AppVeyor build or break it in a different way | Fix AppVeyor build or break it in a different way
| Python | bsd-3-clause | StanczakDominik/PlasmaPy | from .checks import (check_quantity,
check_relativistic,
_check_quantity,
_check_relativistic)
from .exceptions import (PlasmaPyError,
PhysicsError,
RelativityError,
AtomicError,
MissingAtomicDataError,
ChargeError,
InvalidIonError,
InvalidIsotopeError,
InvalidElementError,
InvalidParticleError,
DataStandardError,
PlasmaPyWarning,
PhysicsWarning,
CouplingWarning,
RelativityWarning,
AtomicWarning,
MissingAtomicDataWarning)
from .pytest_helpers import (
run_test,
run_test_equivalent_calls,
call_string,
InconsistentTypeError,
UnexpectedResultError,
UnexpectedExceptionError,
RunTestError,
IncorrectResultError,
MissingExceptionError,
MissingWarningError,
assert_can_handle_nparray,
)
import roman
Fix AppVeyor build or break it in a different way | from .checks import (check_quantity,
check_relativistic,
_check_quantity,
_check_relativistic)
from .exceptions import (PlasmaPyError,
PhysicsError,
RelativityError,
AtomicError,
MissingAtomicDataError,
ChargeError,
InvalidIonError,
InvalidIsotopeError,
InvalidElementError,
InvalidParticleError,
DataStandardError,
PlasmaPyWarning,
PhysicsWarning,
CouplingWarning,
RelativityWarning,
AtomicWarning,
MissingAtomicDataWarning)
from .pytest_helpers import (
run_test,
run_test_equivalent_calls,
call_string,
InconsistentTypeError,
UnexpectedResultError,
UnexpectedExceptionError,
RunTestError,
IncorrectResultError,
MissingExceptionError,
MissingWarningError,
assert_can_handle_nparray,
)
from . import roman
| <commit_before>from .checks import (check_quantity,
check_relativistic,
_check_quantity,
_check_relativistic)
from .exceptions import (PlasmaPyError,
PhysicsError,
RelativityError,
AtomicError,
MissingAtomicDataError,
ChargeError,
InvalidIonError,
InvalidIsotopeError,
InvalidElementError,
InvalidParticleError,
DataStandardError,
PlasmaPyWarning,
PhysicsWarning,
CouplingWarning,
RelativityWarning,
AtomicWarning,
MissingAtomicDataWarning)
from .pytest_helpers import (
run_test,
run_test_equivalent_calls,
call_string,
InconsistentTypeError,
UnexpectedResultError,
UnexpectedExceptionError,
RunTestError,
IncorrectResultError,
MissingExceptionError,
MissingWarningError,
assert_can_handle_nparray,
)
import roman
<commit_msg>Fix AppVeyor build or break it in a different way<commit_after> | from .checks import (check_quantity,
check_relativistic,
_check_quantity,
_check_relativistic)
from .exceptions import (PlasmaPyError,
PhysicsError,
RelativityError,
AtomicError,
MissingAtomicDataError,
ChargeError,
InvalidIonError,
InvalidIsotopeError,
InvalidElementError,
InvalidParticleError,
DataStandardError,
PlasmaPyWarning,
PhysicsWarning,
CouplingWarning,
RelativityWarning,
AtomicWarning,
MissingAtomicDataWarning)
from .pytest_helpers import (
run_test,
run_test_equivalent_calls,
call_string,
InconsistentTypeError,
UnexpectedResultError,
UnexpectedExceptionError,
RunTestError,
IncorrectResultError,
MissingExceptionError,
MissingWarningError,
assert_can_handle_nparray,
)
from . import roman
| from .checks import (check_quantity,
check_relativistic,
_check_quantity,
_check_relativistic)
from .exceptions import (PlasmaPyError,
PhysicsError,
RelativityError,
AtomicError,
MissingAtomicDataError,
ChargeError,
InvalidIonError,
InvalidIsotopeError,
InvalidElementError,
InvalidParticleError,
DataStandardError,
PlasmaPyWarning,
PhysicsWarning,
CouplingWarning,
RelativityWarning,
AtomicWarning,
MissingAtomicDataWarning)
from .pytest_helpers import (
run_test,
run_test_equivalent_calls,
call_string,
InconsistentTypeError,
UnexpectedResultError,
UnexpectedExceptionError,
RunTestError,
IncorrectResultError,
MissingExceptionError,
MissingWarningError,
assert_can_handle_nparray,
)
import roman
Fix AppVeyor build or break it in a different wayfrom .checks import (check_quantity,
check_relativistic,
_check_quantity,
_check_relativistic)
from .exceptions import (PlasmaPyError,
PhysicsError,
RelativityError,
AtomicError,
MissingAtomicDataError,
ChargeError,
InvalidIonError,
InvalidIsotopeError,
InvalidElementError,
InvalidParticleError,
DataStandardError,
PlasmaPyWarning,
PhysicsWarning,
CouplingWarning,
RelativityWarning,
AtomicWarning,
MissingAtomicDataWarning)
from .pytest_helpers import (
run_test,
run_test_equivalent_calls,
call_string,
InconsistentTypeError,
UnexpectedResultError,
UnexpectedExceptionError,
RunTestError,
IncorrectResultError,
MissingExceptionError,
MissingWarningError,
assert_can_handle_nparray,
)
from . import roman
| <commit_before>from .checks import (check_quantity,
check_relativistic,
_check_quantity,
_check_relativistic)
from .exceptions import (PlasmaPyError,
PhysicsError,
RelativityError,
AtomicError,
MissingAtomicDataError,
ChargeError,
InvalidIonError,
InvalidIsotopeError,
InvalidElementError,
InvalidParticleError,
DataStandardError,
PlasmaPyWarning,
PhysicsWarning,
CouplingWarning,
RelativityWarning,
AtomicWarning,
MissingAtomicDataWarning)
from .pytest_helpers import (
run_test,
run_test_equivalent_calls,
call_string,
InconsistentTypeError,
UnexpectedResultError,
UnexpectedExceptionError,
RunTestError,
IncorrectResultError,
MissingExceptionError,
MissingWarningError,
assert_can_handle_nparray,
)
import roman
<commit_msg>Fix AppVeyor build or break it in a different way<commit_after>from .checks import (check_quantity,
check_relativistic,
_check_quantity,
_check_relativistic)
from .exceptions import (PlasmaPyError,
PhysicsError,
RelativityError,
AtomicError,
MissingAtomicDataError,
ChargeError,
InvalidIonError,
InvalidIsotopeError,
InvalidElementError,
InvalidParticleError,
DataStandardError,
PlasmaPyWarning,
PhysicsWarning,
CouplingWarning,
RelativityWarning,
AtomicWarning,
MissingAtomicDataWarning)
from .pytest_helpers import (
run_test,
run_test_equivalent_calls,
call_string,
InconsistentTypeError,
UnexpectedResultError,
UnexpectedExceptionError,
RunTestError,
IncorrectResultError,
MissingExceptionError,
MissingWarningError,
assert_can_handle_nparray,
)
from . import roman
|
8da480a92f3e27807275868c27cb41cbde8504d8 | neo/test/rawiotest/test_alphaomegarawio.py | neo/test/rawiotest/test_alphaomegarawio.py | """
Tests of neo.rawio.examplerawio
Note for dev:
if you write a new RawIO class your need to put some file
to be tested at g-node portal, Ask neuralensemble list for that.
The file need to be small.
Then you have to copy/paste/renamed the TestExampleRawIO
class and a full test will be done to test if the new coded IO
is compliant with the RawIO API.
If you have problems, do not hesitate to ask help github (prefered)
of neuralensemble list.
Note that same mechanism is used a neo.io API so files are tested
several time with neo.rawio (numpy buffer) and neo.io (neo object tree).
See neo.test.iotest.*
Author: Samuel Garcia
"""
import unittest
from neo.rawio.alphaomegarawio import AlphaOmegaRawIO
from neo.test.rawiotest.common_rawio_test import BaseTestRawIO
class TestAlphaOmegaRawIO(BaseTestRawIO, unittest.TestCase):
rawioclass = AlphaOmegaRawIO
entities_to_download = [
"alphaomega",
]
entities_to_test = [
"alphaomega/",
]
if __name__ == "__main__":
unittest.main()
| """
Tests of neo.rawio.examplerawio
Note for dev:
if you write a new RawIO class your need to put some file
to be tested at g-node portal, Ask neuralensemble list for that.
The file need to be small.
Then you have to copy/paste/renamed the TestExampleRawIO
class and a full test will be done to test if the new coded IO
is compliant with the RawIO API.
If you have problems, do not hesitate to ask help github (prefered)
of neuralensemble list.
Note that same mechanism is used a neo.io API so files are tested
several time with neo.rawio (numpy buffer) and neo.io (neo object tree).
See neo.test.iotest.*
Author: Samuel Garcia
"""
import logging
import unittest
from neo.rawio.alphaomegarawio import AlphaOmegaRawIO
from neo.test.rawiotest.common_rawio_test import BaseTestRawIO
logging.getLogger().setLevel(logging.INFO)
class TestAlphaOmegaRawIO(BaseTestRawIO, unittest.TestCase):
rawioclass = AlphaOmegaRawIO
entities_to_download = [
"alphaomega",
]
entities_to_test = [
"alphaomega/",
]
if __name__ == "__main__":
unittest.main()
| Set logging level higher so we don't spam tests with debug messages | Set logging level higher so we don't spam tests with debug messages
| Python | bsd-3-clause | INM-6/python-neo,apdavison/python-neo,JuliaSprenger/python-neo,NeuralEnsemble/python-neo,samuelgarcia/python-neo | """
Tests of neo.rawio.examplerawio
Note for dev:
if you write a new RawIO class your need to put some file
to be tested at g-node portal, Ask neuralensemble list for that.
The file need to be small.
Then you have to copy/paste/renamed the TestExampleRawIO
class and a full test will be done to test if the new coded IO
is compliant with the RawIO API.
If you have problems, do not hesitate to ask help github (prefered)
of neuralensemble list.
Note that same mechanism is used a neo.io API so files are tested
several time with neo.rawio (numpy buffer) and neo.io (neo object tree).
See neo.test.iotest.*
Author: Samuel Garcia
"""
import unittest
from neo.rawio.alphaomegarawio import AlphaOmegaRawIO
from neo.test.rawiotest.common_rawio_test import BaseTestRawIO
class TestAlphaOmegaRawIO(BaseTestRawIO, unittest.TestCase):
rawioclass = AlphaOmegaRawIO
entities_to_download = [
"alphaomega",
]
entities_to_test = [
"alphaomega/",
]
if __name__ == "__main__":
unittest.main()
Set logging level higher so we don't spam tests with debug messages | """
Tests of neo.rawio.examplerawio
Note for dev:
if you write a new RawIO class your need to put some file
to be tested at g-node portal, Ask neuralensemble list for that.
The file need to be small.
Then you have to copy/paste/renamed the TestExampleRawIO
class and a full test will be done to test if the new coded IO
is compliant with the RawIO API.
If you have problems, do not hesitate to ask help github (prefered)
of neuralensemble list.
Note that same mechanism is used a neo.io API so files are tested
several time with neo.rawio (numpy buffer) and neo.io (neo object tree).
See neo.test.iotest.*
Author: Samuel Garcia
"""
import logging
import unittest
from neo.rawio.alphaomegarawio import AlphaOmegaRawIO
from neo.test.rawiotest.common_rawio_test import BaseTestRawIO
logging.getLogger().setLevel(logging.INFO)
class TestAlphaOmegaRawIO(BaseTestRawIO, unittest.TestCase):
rawioclass = AlphaOmegaRawIO
entities_to_download = [
"alphaomega",
]
entities_to_test = [
"alphaomega/",
]
if __name__ == "__main__":
unittest.main()
| <commit_before>"""
Tests of neo.rawio.examplerawio
Note for dev:
if you write a new RawIO class your need to put some file
to be tested at g-node portal, Ask neuralensemble list for that.
The file need to be small.
Then you have to copy/paste/renamed the TestExampleRawIO
class and a full test will be done to test if the new coded IO
is compliant with the RawIO API.
If you have problems, do not hesitate to ask help github (prefered)
of neuralensemble list.
Note that same mechanism is used a neo.io API so files are tested
several time with neo.rawio (numpy buffer) and neo.io (neo object tree).
See neo.test.iotest.*
Author: Samuel Garcia
"""
import unittest
from neo.rawio.alphaomegarawio import AlphaOmegaRawIO
from neo.test.rawiotest.common_rawio_test import BaseTestRawIO
class TestAlphaOmegaRawIO(BaseTestRawIO, unittest.TestCase):
rawioclass = AlphaOmegaRawIO
entities_to_download = [
"alphaomega",
]
entities_to_test = [
"alphaomega/",
]
if __name__ == "__main__":
unittest.main()
<commit_msg>Set logging level higher so we don't spam tests with debug messages<commit_after> | """
Tests of neo.rawio.examplerawio
Note for dev:
if you write a new RawIO class your need to put some file
to be tested at g-node portal, Ask neuralensemble list for that.
The file need to be small.
Then you have to copy/paste/renamed the TestExampleRawIO
class and a full test will be done to test if the new coded IO
is compliant with the RawIO API.
If you have problems, do not hesitate to ask help github (prefered)
of neuralensemble list.
Note that same mechanism is used a neo.io API so files are tested
several time with neo.rawio (numpy buffer) and neo.io (neo object tree).
See neo.test.iotest.*
Author: Samuel Garcia
"""
import logging
import unittest
from neo.rawio.alphaomegarawio import AlphaOmegaRawIO
from neo.test.rawiotest.common_rawio_test import BaseTestRawIO
logging.getLogger().setLevel(logging.INFO)
class TestAlphaOmegaRawIO(BaseTestRawIO, unittest.TestCase):
rawioclass = AlphaOmegaRawIO
entities_to_download = [
"alphaomega",
]
entities_to_test = [
"alphaomega/",
]
if __name__ == "__main__":
unittest.main()
| """
Tests of neo.rawio.examplerawio
Note for dev:
if you write a new RawIO class your need to put some file
to be tested at g-node portal, Ask neuralensemble list for that.
The file need to be small.
Then you have to copy/paste/renamed the TestExampleRawIO
class and a full test will be done to test if the new coded IO
is compliant with the RawIO API.
If you have problems, do not hesitate to ask help github (prefered)
of neuralensemble list.
Note that same mechanism is used a neo.io API so files are tested
several time with neo.rawio (numpy buffer) and neo.io (neo object tree).
See neo.test.iotest.*
Author: Samuel Garcia
"""
import unittest
from neo.rawio.alphaomegarawio import AlphaOmegaRawIO
from neo.test.rawiotest.common_rawio_test import BaseTestRawIO
class TestAlphaOmegaRawIO(BaseTestRawIO, unittest.TestCase):
rawioclass = AlphaOmegaRawIO
entities_to_download = [
"alphaomega",
]
entities_to_test = [
"alphaomega/",
]
if __name__ == "__main__":
unittest.main()
Set logging level higher so we don't spam tests with debug messages"""
Tests of neo.rawio.examplerawio
Note for dev:
if you write a new RawIO class your need to put some file
to be tested at g-node portal, Ask neuralensemble list for that.
The file need to be small.
Then you have to copy/paste/renamed the TestExampleRawIO
class and a full test will be done to test if the new coded IO
is compliant with the RawIO API.
If you have problems, do not hesitate to ask help github (prefered)
of neuralensemble list.
Note that same mechanism is used a neo.io API so files are tested
several time with neo.rawio (numpy buffer) and neo.io (neo object tree).
See neo.test.iotest.*
Author: Samuel Garcia
"""
import logging
import unittest
from neo.rawio.alphaomegarawio import AlphaOmegaRawIO
from neo.test.rawiotest.common_rawio_test import BaseTestRawIO
logging.getLogger().setLevel(logging.INFO)
class TestAlphaOmegaRawIO(BaseTestRawIO, unittest.TestCase):
rawioclass = AlphaOmegaRawIO
entities_to_download = [
"alphaomega",
]
entities_to_test = [
"alphaomega/",
]
if __name__ == "__main__":
unittest.main()
| <commit_before>"""
Tests of neo.rawio.examplerawio
Note for dev:
if you write a new RawIO class your need to put some file
to be tested at g-node portal, Ask neuralensemble list for that.
The file need to be small.
Then you have to copy/paste/renamed the TestExampleRawIO
class and a full test will be done to test if the new coded IO
is compliant with the RawIO API.
If you have problems, do not hesitate to ask help github (prefered)
of neuralensemble list.
Note that same mechanism is used a neo.io API so files are tested
several time with neo.rawio (numpy buffer) and neo.io (neo object tree).
See neo.test.iotest.*
Author: Samuel Garcia
"""
import unittest
from neo.rawio.alphaomegarawio import AlphaOmegaRawIO
from neo.test.rawiotest.common_rawio_test import BaseTestRawIO
class TestAlphaOmegaRawIO(BaseTestRawIO, unittest.TestCase):
rawioclass = AlphaOmegaRawIO
entities_to_download = [
"alphaomega",
]
entities_to_test = [
"alphaomega/",
]
if __name__ == "__main__":
unittest.main()
<commit_msg>Set logging level higher so we don't spam tests with debug messages<commit_after>"""
Tests of neo.rawio.examplerawio
Note for dev:
if you write a new RawIO class your need to put some file
to be tested at g-node portal, Ask neuralensemble list for that.
The file need to be small.
Then you have to copy/paste/renamed the TestExampleRawIO
class and a full test will be done to test if the new coded IO
is compliant with the RawIO API.
If you have problems, do not hesitate to ask help github (prefered)
of neuralensemble list.
Note that same mechanism is used a neo.io API so files are tested
several time with neo.rawio (numpy buffer) and neo.io (neo object tree).
See neo.test.iotest.*
Author: Samuel Garcia
"""
import logging
import unittest
from neo.rawio.alphaomegarawio import AlphaOmegaRawIO
from neo.test.rawiotest.common_rawio_test import BaseTestRawIO
logging.getLogger().setLevel(logging.INFO)
class TestAlphaOmegaRawIO(BaseTestRawIO, unittest.TestCase):
rawioclass = AlphaOmegaRawIO
entities_to_download = [
"alphaomega",
]
entities_to_test = [
"alphaomega/",
]
if __name__ == "__main__":
unittest.main()
|
369adf5a3a303612edf9f0169c7b37b7c711a852 | frappe/website/page_renderers/web_page.py | frappe/website/page_renderers/web_page.py | import frappe
class WebPage(object):
def __init__(self, path=None, http_status_code=None):
self.headers = None
self.http_status_code = http_status_code or 200
if not path:
path = frappe.local.request.path
self.path = path.strip('/ ')
self.basepath = None
self.basename = None
self.name = None
self.route = None
self.file_dir = None
def can_render(self):
pass
def render(self):
pass
| import frappe
class WebPage(object):
def __init__(self, path=None, http_status_code=None):
self.headers = None
self.http_status_code = http_status_code or 200
if not path:
path = frappe.local.request.path
self.path = path.strip('/ ')
self.basepath = ''
self.basename = ''
self.name = ''
self.route = ''
self.file_dir = None
def can_render(self):
pass
def render(self):
pass
| Set default value as empty string | fix: Set default value as empty string
| Python | mit | frappe/frappe,mhbu50/frappe,frappe/frappe,StrellaGroup/frappe,frappe/frappe,almeidapaulopt/frappe,yashodhank/frappe,almeidapaulopt/frappe,mhbu50/frappe,yashodhank/frappe,StrellaGroup/frappe,mhbu50/frappe,almeidapaulopt/frappe,yashodhank/frappe,almeidapaulopt/frappe,StrellaGroup/frappe,mhbu50/frappe,yashodhank/frappe | import frappe
class WebPage(object):
def __init__(self, path=None, http_status_code=None):
self.headers = None
self.http_status_code = http_status_code or 200
if not path:
path = frappe.local.request.path
self.path = path.strip('/ ')
self.basepath = None
self.basename = None
self.name = None
self.route = None
self.file_dir = None
def can_render(self):
pass
def render(self):
pass
fix: Set default value as empty string | import frappe
class WebPage(object):
def __init__(self, path=None, http_status_code=None):
self.headers = None
self.http_status_code = http_status_code or 200
if not path:
path = frappe.local.request.path
self.path = path.strip('/ ')
self.basepath = ''
self.basename = ''
self.name = ''
self.route = ''
self.file_dir = None
def can_render(self):
pass
def render(self):
pass
| <commit_before>import frappe
class WebPage(object):
def __init__(self, path=None, http_status_code=None):
self.headers = None
self.http_status_code = http_status_code or 200
if not path:
path = frappe.local.request.path
self.path = path.strip('/ ')
self.basepath = None
self.basename = None
self.name = None
self.route = None
self.file_dir = None
def can_render(self):
pass
def render(self):
pass
<commit_msg>fix: Set default value as empty string<commit_after> | import frappe
class WebPage(object):
def __init__(self, path=None, http_status_code=None):
self.headers = None
self.http_status_code = http_status_code or 200
if not path:
path = frappe.local.request.path
self.path = path.strip('/ ')
self.basepath = ''
self.basename = ''
self.name = ''
self.route = ''
self.file_dir = None
def can_render(self):
pass
def render(self):
pass
| import frappe
class WebPage(object):
def __init__(self, path=None, http_status_code=None):
self.headers = None
self.http_status_code = http_status_code or 200
if not path:
path = frappe.local.request.path
self.path = path.strip('/ ')
self.basepath = None
self.basename = None
self.name = None
self.route = None
self.file_dir = None
def can_render(self):
pass
def render(self):
pass
fix: Set default value as empty stringimport frappe
class WebPage(object):
def __init__(self, path=None, http_status_code=None):
self.headers = None
self.http_status_code = http_status_code or 200
if not path:
path = frappe.local.request.path
self.path = path.strip('/ ')
self.basepath = ''
self.basename = ''
self.name = ''
self.route = ''
self.file_dir = None
def can_render(self):
pass
def render(self):
pass
| <commit_before>import frappe
class WebPage(object):
def __init__(self, path=None, http_status_code=None):
self.headers = None
self.http_status_code = http_status_code or 200
if not path:
path = frappe.local.request.path
self.path = path.strip('/ ')
self.basepath = None
self.basename = None
self.name = None
self.route = None
self.file_dir = None
def can_render(self):
pass
def render(self):
pass
<commit_msg>fix: Set default value as empty string<commit_after>import frappe
class WebPage(object):
def __init__(self, path=None, http_status_code=None):
self.headers = None
self.http_status_code = http_status_code or 200
if not path:
path = frappe.local.request.path
self.path = path.strip('/ ')
self.basepath = ''
self.basename = ''
self.name = ''
self.route = ''
self.file_dir = None
def can_render(self):
pass
def render(self):
pass
|
5c6dd036e9fc14d04805a0f31af5a9c28fe51cf5 | tx_salaries/management/commands/generate_transformer_hash.py | tx_salaries/management/commands/generate_transformer_hash.py | from django.core.management.base import BaseCommand
from optparse import make_option
from ...utils import transformer
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--sheet', action='store', dest='sheet', default=None,
help='Sheet name'),
make_option('--row', action='store', dest='label_row', default=1,
help='Location of the row of labels, defaults to 1'),
)
def handle(self, filename, label_row=1, sheet=None, *args, **kwargs):
reader = transformer.convert_to_csv_reader(filename, sheet=sheet)
for i in range(1, int(label_row)):
reader.next()
labels = reader.next()
print transformer.generate_key(labels)
| from django.core.management.base import BaseCommand
from optparse import make_option
from ...utils import transformer
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--sheet', action='store', dest='sheet', default=None,
help='Sheet name'),
make_option('--row', action='store', dest='label_row', default=1,
help='Location of the row of labels, defaults to 1'),
)
def handle(self, filename, label_row=1, sheet=None, *args, **kwargs):
reader = transformer.convert_to_csv_reader(filename, sheet=sheet)
for i in range(1, int(label_row)):
reader.next()
labels = reader.next()
transformer_key = transformer.generate_key(labels)
if transformer_key in transformer.TRANSFORMERS.keys():
print transformer_key + ' (exists)'
else:
print transformer_key
| Add message if transformer_hash already exists | Add message if transformer_hash already exists
| Python | apache-2.0 | texastribune/tx_salaries,texastribune/tx_salaries | from django.core.management.base import BaseCommand
from optparse import make_option
from ...utils import transformer
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--sheet', action='store', dest='sheet', default=None,
help='Sheet name'),
make_option('--row', action='store', dest='label_row', default=1,
help='Location of the row of labels, defaults to 1'),
)
def handle(self, filename, label_row=1, sheet=None, *args, **kwargs):
reader = transformer.convert_to_csv_reader(filename, sheet=sheet)
for i in range(1, int(label_row)):
reader.next()
labels = reader.next()
print transformer.generate_key(labels)
Add message if transformer_hash already exists | from django.core.management.base import BaseCommand
from optparse import make_option
from ...utils import transformer
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--sheet', action='store', dest='sheet', default=None,
help='Sheet name'),
make_option('--row', action='store', dest='label_row', default=1,
help='Location of the row of labels, defaults to 1'),
)
def handle(self, filename, label_row=1, sheet=None, *args, **kwargs):
reader = transformer.convert_to_csv_reader(filename, sheet=sheet)
for i in range(1, int(label_row)):
reader.next()
labels = reader.next()
transformer_key = transformer.generate_key(labels)
if transformer_key in transformer.TRANSFORMERS.keys():
print transformer_key + ' (exists)'
else:
print transformer_key
| <commit_before>from django.core.management.base import BaseCommand
from optparse import make_option
from ...utils import transformer
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--sheet', action='store', dest='sheet', default=None,
help='Sheet name'),
make_option('--row', action='store', dest='label_row', default=1,
help='Location of the row of labels, defaults to 1'),
)
def handle(self, filename, label_row=1, sheet=None, *args, **kwargs):
reader = transformer.convert_to_csv_reader(filename, sheet=sheet)
for i in range(1, int(label_row)):
reader.next()
labels = reader.next()
print transformer.generate_key(labels)
<commit_msg>Add message if transformer_hash already exists<commit_after> | from django.core.management.base import BaseCommand
from optparse import make_option
from ...utils import transformer
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--sheet', action='store', dest='sheet', default=None,
help='Sheet name'),
make_option('--row', action='store', dest='label_row', default=1,
help='Location of the row of labels, defaults to 1'),
)
def handle(self, filename, label_row=1, sheet=None, *args, **kwargs):
reader = transformer.convert_to_csv_reader(filename, sheet=sheet)
for i in range(1, int(label_row)):
reader.next()
labels = reader.next()
transformer_key = transformer.generate_key(labels)
if transformer_key in transformer.TRANSFORMERS.keys():
print transformer_key + ' (exists)'
else:
print transformer_key
| from django.core.management.base import BaseCommand
from optparse import make_option
from ...utils import transformer
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--sheet', action='store', dest='sheet', default=None,
help='Sheet name'),
make_option('--row', action='store', dest='label_row', default=1,
help='Location of the row of labels, defaults to 1'),
)
def handle(self, filename, label_row=1, sheet=None, *args, **kwargs):
reader = transformer.convert_to_csv_reader(filename, sheet=sheet)
for i in range(1, int(label_row)):
reader.next()
labels = reader.next()
print transformer.generate_key(labels)
Add message if transformer_hash already existsfrom django.core.management.base import BaseCommand
from optparse import make_option
from ...utils import transformer
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--sheet', action='store', dest='sheet', default=None,
help='Sheet name'),
make_option('--row', action='store', dest='label_row', default=1,
help='Location of the row of labels, defaults to 1'),
)
def handle(self, filename, label_row=1, sheet=None, *args, **kwargs):
reader = transformer.convert_to_csv_reader(filename, sheet=sheet)
for i in range(1, int(label_row)):
reader.next()
labels = reader.next()
transformer_key = transformer.generate_key(labels)
if transformer_key in transformer.TRANSFORMERS.keys():
print transformer_key + ' (exists)'
else:
print transformer_key
| <commit_before>from django.core.management.base import BaseCommand
from optparse import make_option
from ...utils import transformer
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--sheet', action='store', dest='sheet', default=None,
help='Sheet name'),
make_option('--row', action='store', dest='label_row', default=1,
help='Location of the row of labels, defaults to 1'),
)
def handle(self, filename, label_row=1, sheet=None, *args, **kwargs):
reader = transformer.convert_to_csv_reader(filename, sheet=sheet)
for i in range(1, int(label_row)):
reader.next()
labels = reader.next()
print transformer.generate_key(labels)
<commit_msg>Add message if transformer_hash already exists<commit_after>from django.core.management.base import BaseCommand
from optparse import make_option
from ...utils import transformer
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--sheet', action='store', dest='sheet', default=None,
help='Sheet name'),
make_option('--row', action='store', dest='label_row', default=1,
help='Location of the row of labels, defaults to 1'),
)
def handle(self, filename, label_row=1, sheet=None, *args, **kwargs):
reader = transformer.convert_to_csv_reader(filename, sheet=sheet)
for i in range(1, int(label_row)):
reader.next()
labels = reader.next()
transformer_key = transformer.generate_key(labels)
if transformer_key in transformer.TRANSFORMERS.keys():
print transformer_key + ' (exists)'
else:
print transformer_key
|
3c742914bd032648665f9069456d78c0a03e5568 | bluebottle/projects/documents.py | bluebottle/projects/documents.py | from django_elasticsearch_dsl import DocType, Index
from bluebottle.projects.models import Project
# The name of your index
project = Index('projects')
# See Elasticsearch Indices API reference for available settings
project.settings(
number_of_shards=1,
number_of_replicas=0
)
@project.doc_type
class ProjectDocument(DocType):
class Meta:
model = Project
fields = [
'title',
'story',
'pitch',
]
| from django_elasticsearch_dsl import DocType
from bluebottle.projects.models import Project
from bluebottle.utils.documents import MultiTenantIndex
# The name of your index
project = MultiTenantIndex('projects')
# See Elasticsearch Indices API reference for available settings
project.settings(
number_of_shards=1,
number_of_replicas=0
)
@project.doc_type
class ProjectDocument(DocType):
class Meta:
model = Project
fields = [
'title',
'story',
'pitch',
]
| Use a different index for different tenants | Use a different index for different tenants
| Python | bsd-3-clause | onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle | from django_elasticsearch_dsl import DocType, Index
from bluebottle.projects.models import Project
# The name of your index
project = Index('projects')
# See Elasticsearch Indices API reference for available settings
project.settings(
number_of_shards=1,
number_of_replicas=0
)
@project.doc_type
class ProjectDocument(DocType):
class Meta:
model = Project
fields = [
'title',
'story',
'pitch',
]
Use a different index for different tenants | from django_elasticsearch_dsl import DocType
from bluebottle.projects.models import Project
from bluebottle.utils.documents import MultiTenantIndex
# The name of your index
project = MultiTenantIndex('projects')
# See Elasticsearch Indices API reference for available settings
project.settings(
number_of_shards=1,
number_of_replicas=0
)
@project.doc_type
class ProjectDocument(DocType):
class Meta:
model = Project
fields = [
'title',
'story',
'pitch',
]
| <commit_before>from django_elasticsearch_dsl import DocType, Index
from bluebottle.projects.models import Project
# The name of your index
project = Index('projects')
# See Elasticsearch Indices API reference for available settings
project.settings(
number_of_shards=1,
number_of_replicas=0
)
@project.doc_type
class ProjectDocument(DocType):
class Meta:
model = Project
fields = [
'title',
'story',
'pitch',
]
<commit_msg>Use a different index for different tenants<commit_after> | from django_elasticsearch_dsl import DocType
from bluebottle.projects.models import Project
from bluebottle.utils.documents import MultiTenantIndex
# The name of your index
project = MultiTenantIndex('projects')
# See Elasticsearch Indices API reference for available settings
project.settings(
number_of_shards=1,
number_of_replicas=0
)
@project.doc_type
class ProjectDocument(DocType):
class Meta:
model = Project
fields = [
'title',
'story',
'pitch',
]
| from django_elasticsearch_dsl import DocType, Index
from bluebottle.projects.models import Project
# The name of your index
project = Index('projects')
# See Elasticsearch Indices API reference for available settings
project.settings(
number_of_shards=1,
number_of_replicas=0
)
@project.doc_type
class ProjectDocument(DocType):
class Meta:
model = Project
fields = [
'title',
'story',
'pitch',
]
Use a different index for different tenantsfrom django_elasticsearch_dsl import DocType
from bluebottle.projects.models import Project
from bluebottle.utils.documents import MultiTenantIndex
# The name of your index
project = MultiTenantIndex('projects')
# See Elasticsearch Indices API reference for available settings
project.settings(
number_of_shards=1,
number_of_replicas=0
)
@project.doc_type
class ProjectDocument(DocType):
class Meta:
model = Project
fields = [
'title',
'story',
'pitch',
]
| <commit_before>from django_elasticsearch_dsl import DocType, Index
from bluebottle.projects.models import Project
# The name of your index
project = Index('projects')
# See Elasticsearch Indices API reference for available settings
project.settings(
number_of_shards=1,
number_of_replicas=0
)
@project.doc_type
class ProjectDocument(DocType):
class Meta:
model = Project
fields = [
'title',
'story',
'pitch',
]
<commit_msg>Use a different index for different tenants<commit_after>from django_elasticsearch_dsl import DocType
from bluebottle.projects.models import Project
from bluebottle.utils.documents import MultiTenantIndex
# The name of your index
project = MultiTenantIndex('projects')
# See Elasticsearch Indices API reference for available settings
project.settings(
number_of_shards=1,
number_of_replicas=0
)
@project.doc_type
class ProjectDocument(DocType):
class Meta:
model = Project
fields = [
'title',
'story',
'pitch',
]
|
3f81676d8bc39b459d98a1a91b9ced97be58451d | celestial/exoplanets_importer.py | celestial/exoplanets_importer.py | import requests
import csv
from models import Planet, SolarSystem
from django.core.exceptions import ValidationError
class ExoplanetsImporter:
@staticmethod
def run(filename = None):
if filename!=None:
csv_data = open(filename)
else:
csv_data = requests.get('http://exoplanets.org/exoplanets.csv')
rows = csv.reader(csv_data)
headers = {}
got_headers = False
for row in rows:
if got_headers == 0:
# Store headers
colnum = 0
for col in row:
headers[col] = colnum
colnum += 1
got_headers = True
else:
# Find and store system data
stardata = {
'name': row[headers['STAR']],
'temperature': row[headers['TEFF']] or None
}
try:
system, created = SolarSystem.objects.get_or_create(**stardata)
except ValidationError:
print stardata
raise
# Find and store planet data
planetdata = {
'name': row[headers['NAME']],
'radius': row[headers['R']] or None,
#'temperature': row[headers['NAME']],
'semi_major_axis': row[headers['A']],
'solar_system': system
}
try:
planet, created = Planet.objects.get_or_create(**planetdata)
except ValidationError:
print planetdata
raise
| import requests
import csv
from models import Planet, SolarSystem
from django.core.exceptions import ValidationError
class ExoplanetsImporter:
@staticmethod
def run(filename = None):
if filename!=None:
csv_data = open(filename)
else:
csv_data = requests.get('http://exoplanets.org/exoplanets.csv')
rows = csv.reader(csv_data)
headers = {}
got_headers = False
for row in rows:
if got_headers == 0:
# Store headers
colnum = 0
for col in row:
headers[col] = colnum
colnum += 1
got_headers = True
else:
# Find and store system data
try:
system, created = SolarSystem.objects.get_or_create(name = row[headers['STAR']])
system.temperature = row[headers['TEFF']] or None
system.save()
except ValidationError:
print stardata
raise
# Find and store planet data
try:
planet, created = Planet.objects.get_or_create(name = row[headers['NAME']], solar_system = system)
planet.radius = row[headers['R']] or None
planet.semi_major_axis = row[headers['A']]
planet.save()
except ValidationError:
print planetdata
raise
| Refactor importer slightly to avoid creation problems | Refactor importer slightly to avoid creation problems
| Python | mit | Floppy/kepler-explorer,Floppy/kepler-explorer,Floppy/kepler-explorer | import requests
import csv
from models import Planet, SolarSystem
from django.core.exceptions import ValidationError
class ExoplanetsImporter:
@staticmethod
def run(filename = None):
if filename!=None:
csv_data = open(filename)
else:
csv_data = requests.get('http://exoplanets.org/exoplanets.csv')
rows = csv.reader(csv_data)
headers = {}
got_headers = False
for row in rows:
if got_headers == 0:
# Store headers
colnum = 0
for col in row:
headers[col] = colnum
colnum += 1
got_headers = True
else:
# Find and store system data
stardata = {
'name': row[headers['STAR']],
'temperature': row[headers['TEFF']] or None
}
try:
system, created = SolarSystem.objects.get_or_create(**stardata)
except ValidationError:
print stardata
raise
# Find and store planet data
planetdata = {
'name': row[headers['NAME']],
'radius': row[headers['R']] or None,
#'temperature': row[headers['NAME']],
'semi_major_axis': row[headers['A']],
'solar_system': system
}
try:
planet, created = Planet.objects.get_or_create(**planetdata)
except ValidationError:
print planetdata
raise
Refactor importer slightly to avoid creation problems | import requests
import csv
from models import Planet, SolarSystem
from django.core.exceptions import ValidationError
class ExoplanetsImporter:
@staticmethod
def run(filename = None):
if filename!=None:
csv_data = open(filename)
else:
csv_data = requests.get('http://exoplanets.org/exoplanets.csv')
rows = csv.reader(csv_data)
headers = {}
got_headers = False
for row in rows:
if got_headers == 0:
# Store headers
colnum = 0
for col in row:
headers[col] = colnum
colnum += 1
got_headers = True
else:
# Find and store system data
try:
system, created = SolarSystem.objects.get_or_create(name = row[headers['STAR']])
system.temperature = row[headers['TEFF']] or None
system.save()
except ValidationError:
print stardata
raise
# Find and store planet data
try:
planet, created = Planet.objects.get_or_create(name = row[headers['NAME']], solar_system = system)
planet.radius = row[headers['R']] or None
planet.semi_major_axis = row[headers['A']]
planet.save()
except ValidationError:
print planetdata
raise
| <commit_before>import requests
import csv
from models import Planet, SolarSystem
from django.core.exceptions import ValidationError
class ExoplanetsImporter:
@staticmethod
def run(filename = None):
if filename!=None:
csv_data = open(filename)
else:
csv_data = requests.get('http://exoplanets.org/exoplanets.csv')
rows = csv.reader(csv_data)
headers = {}
got_headers = False
for row in rows:
if got_headers == 0:
# Store headers
colnum = 0
for col in row:
headers[col] = colnum
colnum += 1
got_headers = True
else:
# Find and store system data
stardata = {
'name': row[headers['STAR']],
'temperature': row[headers['TEFF']] or None
}
try:
system, created = SolarSystem.objects.get_or_create(**stardata)
except ValidationError:
print stardata
raise
# Find and store planet data
planetdata = {
'name': row[headers['NAME']],
'radius': row[headers['R']] or None,
#'temperature': row[headers['NAME']],
'semi_major_axis': row[headers['A']],
'solar_system': system
}
try:
planet, created = Planet.objects.get_or_create(**planetdata)
except ValidationError:
print planetdata
raise
<commit_msg>Refactor importer slightly to avoid creation problems<commit_after> | import requests
import csv
from models import Planet, SolarSystem
from django.core.exceptions import ValidationError
class ExoplanetsImporter:
@staticmethod
def run(filename = None):
if filename!=None:
csv_data = open(filename)
else:
csv_data = requests.get('http://exoplanets.org/exoplanets.csv')
rows = csv.reader(csv_data)
headers = {}
got_headers = False
for row in rows:
if got_headers == 0:
# Store headers
colnum = 0
for col in row:
headers[col] = colnum
colnum += 1
got_headers = True
else:
# Find and store system data
try:
system, created = SolarSystem.objects.get_or_create(name = row[headers['STAR']])
system.temperature = row[headers['TEFF']] or None
system.save()
except ValidationError:
print stardata
raise
# Find and store planet data
try:
planet, created = Planet.objects.get_or_create(name = row[headers['NAME']], solar_system = system)
planet.radius = row[headers['R']] or None
planet.semi_major_axis = row[headers['A']]
planet.save()
except ValidationError:
print planetdata
raise
| import requests
import csv
from models import Planet, SolarSystem
from django.core.exceptions import ValidationError
class ExoplanetsImporter:
@staticmethod
def run(filename = None):
if filename!=None:
csv_data = open(filename)
else:
csv_data = requests.get('http://exoplanets.org/exoplanets.csv')
rows = csv.reader(csv_data)
headers = {}
got_headers = False
for row in rows:
if got_headers == 0:
# Store headers
colnum = 0
for col in row:
headers[col] = colnum
colnum += 1
got_headers = True
else:
# Find and store system data
stardata = {
'name': row[headers['STAR']],
'temperature': row[headers['TEFF']] or None
}
try:
system, created = SolarSystem.objects.get_or_create(**stardata)
except ValidationError:
print stardata
raise
# Find and store planet data
planetdata = {
'name': row[headers['NAME']],
'radius': row[headers['R']] or None,
#'temperature': row[headers['NAME']],
'semi_major_axis': row[headers['A']],
'solar_system': system
}
try:
planet, created = Planet.objects.get_or_create(**planetdata)
except ValidationError:
print planetdata
raise
Refactor importer slightly to avoid creation problemsimport requests
import csv
from models import Planet, SolarSystem
from django.core.exceptions import ValidationError
class ExoplanetsImporter:
@staticmethod
def run(filename = None):
if filename!=None:
csv_data = open(filename)
else:
csv_data = requests.get('http://exoplanets.org/exoplanets.csv')
rows = csv.reader(csv_data)
headers = {}
got_headers = False
for row in rows:
if got_headers == 0:
# Store headers
colnum = 0
for col in row:
headers[col] = colnum
colnum += 1
got_headers = True
else:
# Find and store system data
try:
system, created = SolarSystem.objects.get_or_create(name = row[headers['STAR']])
system.temperature = row[headers['TEFF']] or None
system.save()
except ValidationError:
print stardata
raise
# Find and store planet data
try:
planet, created = Planet.objects.get_or_create(name = row[headers['NAME']], solar_system = system)
planet.radius = row[headers['R']] or None
planet.semi_major_axis = row[headers['A']]
planet.save()
except ValidationError:
print planetdata
raise
| <commit_before>import requests
import csv
from models import Planet, SolarSystem
from django.core.exceptions import ValidationError
class ExoplanetsImporter:
@staticmethod
def run(filename = None):
if filename!=None:
csv_data = open(filename)
else:
csv_data = requests.get('http://exoplanets.org/exoplanets.csv')
rows = csv.reader(csv_data)
headers = {}
got_headers = False
for row in rows:
if got_headers == 0:
# Store headers
colnum = 0
for col in row:
headers[col] = colnum
colnum += 1
got_headers = True
else:
# Find and store system data
stardata = {
'name': row[headers['STAR']],
'temperature': row[headers['TEFF']] or None
}
try:
system, created = SolarSystem.objects.get_or_create(**stardata)
except ValidationError:
print stardata
raise
# Find and store planet data
planetdata = {
'name': row[headers['NAME']],
'radius': row[headers['R']] or None,
#'temperature': row[headers['NAME']],
'semi_major_axis': row[headers['A']],
'solar_system': system
}
try:
planet, created = Planet.objects.get_or_create(**planetdata)
except ValidationError:
print planetdata
raise
<commit_msg>Refactor importer slightly to avoid creation problems<commit_after>import requests
import csv
from models import Planet, SolarSystem
from django.core.exceptions import ValidationError
class ExoplanetsImporter:
@staticmethod
def run(filename = None):
if filename!=None:
csv_data = open(filename)
else:
csv_data = requests.get('http://exoplanets.org/exoplanets.csv')
rows = csv.reader(csv_data)
headers = {}
got_headers = False
for row in rows:
if got_headers == 0:
# Store headers
colnum = 0
for col in row:
headers[col] = colnum
colnum += 1
got_headers = True
else:
# Find and store system data
try:
system, created = SolarSystem.objects.get_or_create(name = row[headers['STAR']])
system.temperature = row[headers['TEFF']] or None
system.save()
except ValidationError:
print stardata
raise
# Find and store planet data
try:
planet, created = Planet.objects.get_or_create(name = row[headers['NAME']], solar_system = system)
planet.radius = row[headers['R']] or None
planet.semi_major_axis = row[headers['A']]
planet.save()
except ValidationError:
print planetdata
raise
|
5387fa4c96bb0cdc62e83203065dda84d91c8a57 | project_recalculate/models/resource_calendar.py | project_recalculate/models/resource_calendar.py | # -*- coding: utf-8 -*-
# See README.rst file on addon root folder for license details
from openerp import models, api
from datetime import datetime, timedelta
class ResourceCalendar(models.Model):
_inherit = 'resource.calendar'
@api.v7
def get_working_days_of_date(self, cr, uid, id, start_dt=None, end_dt=None,
leaves=None, compute_leaves=False,
resource_id=None, default_interval=None,
context=None):
if start_dt is None:
start_dt = datetime.now().replace(hour=0, minute=0, second=0)
if end_dt is None:
end_dt = datetime.now().replace(hour=23, minute=59, second=59)
days = 0
current = start_dt
while current <= end_dt:
if id is None:
days += 1
else:
end_day = current.replace(hour=23, minute=59, second=59)
end = end_dt if end_day > end_dt else end_day
working_intervals = self.get_working_intervals_of_day(
cr, uid, id, start_dt=current, end_dt=end, leaves=leaves,
compute_leaves=compute_leaves, resource_id=resource_id,
default_interval=default_interval, context=context)
if working_intervals:
days += 1
next = current + timedelta(days=1)
current = next
return days
| # -*- coding: utf-8 -*-
# See README.rst file on addon root folder for license details
from openerp import models, api
from datetime import datetime, timedelta
class ResourceCalendar(models.Model):
_inherit = 'resource.calendar'
@api.v7
def get_working_days_of_date(self, cr, uid, id, start_dt=None, end_dt=None,
leaves=None, compute_leaves=False,
resource_id=None, default_interval=None,
context=None):
context = context or {}
context['tz'] = 'UTC'
if start_dt is None:
start_dt = datetime.now().replace(hour=0, minute=0, second=0)
if end_dt is None:
end_dt = datetime.now().replace(hour=23, minute=59, second=59)
days = 0
current = start_dt
while current <= end_dt:
if id is None:
days += 1
else:
end_day = current.replace(hour=23, minute=59, second=59)
end = end_dt if end_day > end_dt else end_day
working_intervals = self.get_working_intervals_of_day(
cr, uid, id, start_dt=current, end_dt=end, leaves=leaves,
compute_leaves=compute_leaves, resource_id=resource_id,
default_interval=default_interval, context=context)
if working_intervals:
days += 1
next = current + timedelta(days=1)
current = next
return days
| Define UTC as tz in get_working_days_of_date method | [FIX] Define UTC as tz in get_working_days_of_date method
| Python | agpl-3.0 | OCA/project,OCA/project | # -*- coding: utf-8 -*-
# See README.rst file on addon root folder for license details
from openerp import models, api
from datetime import datetime, timedelta
class ResourceCalendar(models.Model):
_inherit = 'resource.calendar'
@api.v7
def get_working_days_of_date(self, cr, uid, id, start_dt=None, end_dt=None,
leaves=None, compute_leaves=False,
resource_id=None, default_interval=None,
context=None):
if start_dt is None:
start_dt = datetime.now().replace(hour=0, minute=0, second=0)
if end_dt is None:
end_dt = datetime.now().replace(hour=23, minute=59, second=59)
days = 0
current = start_dt
while current <= end_dt:
if id is None:
days += 1
else:
end_day = current.replace(hour=23, minute=59, second=59)
end = end_dt if end_day > end_dt else end_day
working_intervals = self.get_working_intervals_of_day(
cr, uid, id, start_dt=current, end_dt=end, leaves=leaves,
compute_leaves=compute_leaves, resource_id=resource_id,
default_interval=default_interval, context=context)
if working_intervals:
days += 1
next = current + timedelta(days=1)
current = next
return days
[FIX] Define UTC as tz in get_working_days_of_date method | # -*- coding: utf-8 -*-
# See README.rst file on addon root folder for license details
from openerp import models, api
from datetime import datetime, timedelta
class ResourceCalendar(models.Model):
_inherit = 'resource.calendar'
@api.v7
def get_working_days_of_date(self, cr, uid, id, start_dt=None, end_dt=None,
leaves=None, compute_leaves=False,
resource_id=None, default_interval=None,
context=None):
context = context or {}
context['tz'] = 'UTC'
if start_dt is None:
start_dt = datetime.now().replace(hour=0, minute=0, second=0)
if end_dt is None:
end_dt = datetime.now().replace(hour=23, minute=59, second=59)
days = 0
current = start_dt
while current <= end_dt:
if id is None:
days += 1
else:
end_day = current.replace(hour=23, minute=59, second=59)
end = end_dt if end_day > end_dt else end_day
working_intervals = self.get_working_intervals_of_day(
cr, uid, id, start_dt=current, end_dt=end, leaves=leaves,
compute_leaves=compute_leaves, resource_id=resource_id,
default_interval=default_interval, context=context)
if working_intervals:
days += 1
next = current + timedelta(days=1)
current = next
return days
| <commit_before># -*- coding: utf-8 -*-
# See README.rst file on addon root folder for license details
from openerp import models, api
from datetime import datetime, timedelta
class ResourceCalendar(models.Model):
_inherit = 'resource.calendar'
@api.v7
def get_working_days_of_date(self, cr, uid, id, start_dt=None, end_dt=None,
leaves=None, compute_leaves=False,
resource_id=None, default_interval=None,
context=None):
if start_dt is None:
start_dt = datetime.now().replace(hour=0, minute=0, second=0)
if end_dt is None:
end_dt = datetime.now().replace(hour=23, minute=59, second=59)
days = 0
current = start_dt
while current <= end_dt:
if id is None:
days += 1
else:
end_day = current.replace(hour=23, minute=59, second=59)
end = end_dt if end_day > end_dt else end_day
working_intervals = self.get_working_intervals_of_day(
cr, uid, id, start_dt=current, end_dt=end, leaves=leaves,
compute_leaves=compute_leaves, resource_id=resource_id,
default_interval=default_interval, context=context)
if working_intervals:
days += 1
next = current + timedelta(days=1)
current = next
return days
<commit_msg>[FIX] Define UTC as tz in get_working_days_of_date method<commit_after> | # -*- coding: utf-8 -*-
# See README.rst file on addon root folder for license details
from openerp import models, api
from datetime import datetime, timedelta
class ResourceCalendar(models.Model):
_inherit = 'resource.calendar'
@api.v7
def get_working_days_of_date(self, cr, uid, id, start_dt=None, end_dt=None,
leaves=None, compute_leaves=False,
resource_id=None, default_interval=None,
context=None):
context = context or {}
context['tz'] = 'UTC'
if start_dt is None:
start_dt = datetime.now().replace(hour=0, minute=0, second=0)
if end_dt is None:
end_dt = datetime.now().replace(hour=23, minute=59, second=59)
days = 0
current = start_dt
while current <= end_dt:
if id is None:
days += 1
else:
end_day = current.replace(hour=23, minute=59, second=59)
end = end_dt if end_day > end_dt else end_day
working_intervals = self.get_working_intervals_of_day(
cr, uid, id, start_dt=current, end_dt=end, leaves=leaves,
compute_leaves=compute_leaves, resource_id=resource_id,
default_interval=default_interval, context=context)
if working_intervals:
days += 1
next = current + timedelta(days=1)
current = next
return days
| # -*- coding: utf-8 -*-
# See README.rst file on addon root folder for license details
from openerp import models, api
from datetime import datetime, timedelta
class ResourceCalendar(models.Model):
_inherit = 'resource.calendar'
@api.v7
def get_working_days_of_date(self, cr, uid, id, start_dt=None, end_dt=None,
leaves=None, compute_leaves=False,
resource_id=None, default_interval=None,
context=None):
if start_dt is None:
start_dt = datetime.now().replace(hour=0, minute=0, second=0)
if end_dt is None:
end_dt = datetime.now().replace(hour=23, minute=59, second=59)
days = 0
current = start_dt
while current <= end_dt:
if id is None:
days += 1
else:
end_day = current.replace(hour=23, minute=59, second=59)
end = end_dt if end_day > end_dt else end_day
working_intervals = self.get_working_intervals_of_day(
cr, uid, id, start_dt=current, end_dt=end, leaves=leaves,
compute_leaves=compute_leaves, resource_id=resource_id,
default_interval=default_interval, context=context)
if working_intervals:
days += 1
next = current + timedelta(days=1)
current = next
return days
[FIX] Define UTC as tz in get_working_days_of_date method# -*- coding: utf-8 -*-
# See README.rst file on addon root folder for license details
from openerp import models, api
from datetime import datetime, timedelta
class ResourceCalendar(models.Model):
_inherit = 'resource.calendar'
@api.v7
def get_working_days_of_date(self, cr, uid, id, start_dt=None, end_dt=None,
leaves=None, compute_leaves=False,
resource_id=None, default_interval=None,
context=None):
context = context or {}
context['tz'] = 'UTC'
if start_dt is None:
start_dt = datetime.now().replace(hour=0, minute=0, second=0)
if end_dt is None:
end_dt = datetime.now().replace(hour=23, minute=59, second=59)
days = 0
current = start_dt
while current <= end_dt:
if id is None:
days += 1
else:
end_day = current.replace(hour=23, minute=59, second=59)
end = end_dt if end_day > end_dt else end_day
working_intervals = self.get_working_intervals_of_day(
cr, uid, id, start_dt=current, end_dt=end, leaves=leaves,
compute_leaves=compute_leaves, resource_id=resource_id,
default_interval=default_interval, context=context)
if working_intervals:
days += 1
next = current + timedelta(days=1)
current = next
return days
| <commit_before># -*- coding: utf-8 -*-
# See README.rst file on addon root folder for license details
from openerp import models, api
from datetime import datetime, timedelta
class ResourceCalendar(models.Model):
_inherit = 'resource.calendar'
@api.v7
def get_working_days_of_date(self, cr, uid, id, start_dt=None, end_dt=None,
leaves=None, compute_leaves=False,
resource_id=None, default_interval=None,
context=None):
if start_dt is None:
start_dt = datetime.now().replace(hour=0, minute=0, second=0)
if end_dt is None:
end_dt = datetime.now().replace(hour=23, minute=59, second=59)
days = 0
current = start_dt
while current <= end_dt:
if id is None:
days += 1
else:
end_day = current.replace(hour=23, minute=59, second=59)
end = end_dt if end_day > end_dt else end_day
working_intervals = self.get_working_intervals_of_day(
cr, uid, id, start_dt=current, end_dt=end, leaves=leaves,
compute_leaves=compute_leaves, resource_id=resource_id,
default_interval=default_interval, context=context)
if working_intervals:
days += 1
next = current + timedelta(days=1)
current = next
return days
<commit_msg>[FIX] Define UTC as tz in get_working_days_of_date method<commit_after># -*- coding: utf-8 -*-
# See README.rst file on addon root folder for license details
from openerp import models, api
from datetime import datetime, timedelta
class ResourceCalendar(models.Model):
_inherit = 'resource.calendar'
@api.v7
def get_working_days_of_date(self, cr, uid, id, start_dt=None, end_dt=None,
leaves=None, compute_leaves=False,
resource_id=None, default_interval=None,
context=None):
context = context or {}
context['tz'] = 'UTC'
if start_dt is None:
start_dt = datetime.now().replace(hour=0, minute=0, second=0)
if end_dt is None:
end_dt = datetime.now().replace(hour=23, minute=59, second=59)
days = 0
current = start_dt
while current <= end_dt:
if id is None:
days += 1
else:
end_day = current.replace(hour=23, minute=59, second=59)
end = end_dt if end_day > end_dt else end_day
working_intervals = self.get_working_intervals_of_day(
cr, uid, id, start_dt=current, end_dt=end, leaves=leaves,
compute_leaves=compute_leaves, resource_id=resource_id,
default_interval=default_interval, context=context)
if working_intervals:
days += 1
next = current + timedelta(days=1)
current = next
return days
|
354af0bd82da57e718e9612ffb11e3b56d335fbf | projects/search_indexes.py | projects/search_indexes.py | import datetime
import os
from haystack.indexes import *
from haystack import site
from projects.models import File, ImportedFile
from projects import constants
class FileIndex(SearchIndex):
text = CharField(document=True, use_template=True)
author = CharField(model_attr='project__user')
project = CharField(model_attr='project__name')
title = CharField(model_attr='heading')
def get_queryset(self):
return File.objects.filter(project__status=constants.LIVE_STATUS)
class ImportedFileIndex(SearchIndex):
text = CharField(document=True)
author = CharField(model_attr='project__user')
project = CharField(model_attr='project__name')
title = CharField(model_attr='name')
def prepare_text(self, obj):
full_path = obj.project.full_html_path
to_read = os.path.join(full_path, obj.path.lstrip('/'))
content = open(to_read, 'r').read()
return content
site.register(File, FileIndex)
site.register(ImportedFile, ImportedFileIndex)
| import datetime
import os
import codecs
from haystack.indexes import *
from haystack import site
from projects.models import File, ImportedFile
from projects import constants
class FileIndex(SearchIndex):
text = CharField(document=True, use_template=True)
author = CharField(model_attr='project__user')
project = CharField(model_attr='project__name')
title = CharField(model_attr='heading')
def get_queryset(self):
return File.objects.filter(project__status=constants.LIVE_STATUS)
class ImportedFileIndex(SearchIndex):
text = CharField(document=True)
author = CharField(model_attr='project__user')
project = CharField(model_attr='project__name')
title = CharField(model_attr='name')
def prepare_text(self, obj):
full_path = obj.project.full_html_path
to_read = os.path.join(full_path, obj.path.lstrip('/'))
content = codecs.open(to_read, encoding="utf-8", mode='r').read()
return content
site.register(File, FileIndex)
site.register(ImportedFile, ImportedFileIndex)
| Fix unicode fail in search indexing. | Fix unicode fail in search indexing.
| Python | mit | atsuyim/readthedocs.org,tddv/readthedocs.org,singingwolfboy/readthedocs.org,raven47git/readthedocs.org,LukasBoersma/readthedocs.org,stevepiercy/readthedocs.org,johncosta/private-readthedocs.org,michaelmcandrew/readthedocs.org,espdev/readthedocs.org,wijerasa/readthedocs.org,Tazer/readthedocs.org,sid-kap/readthedocs.org,takluyver/readthedocs.org,stevepiercy/readthedocs.org,mhils/readthedocs.org,VishvajitP/readthedocs.org,cgourlay/readthedocs.org,agjohnson/readthedocs.org,sils1297/readthedocs.org,rtfd/readthedocs.org,dirn/readthedocs.org,mrshoki/readthedocs.org,soulshake/readthedocs.org,emawind84/readthedocs.org,emawind84/readthedocs.org,espdev/readthedocs.org,alex/readthedocs.org,istresearch/readthedocs.org,GovReady/readthedocs.org,soulshake/readthedocs.org,attakei/readthedocs-oauth,rtfd/readthedocs.org,rtfd/readthedocs.org,fujita-shintaro/readthedocs.org,gjtorikian/readthedocs.org,kenshinthebattosai/readthedocs.org,davidfischer/readthedocs.org,asampat3090/readthedocs.org,GovReady/readthedocs.org,pombredanne/readthedocs.org,SteveViss/readthedocs.org,attakei/readthedocs-oauth,agjohnson/readthedocs.org,sils1297/readthedocs.org,CedarLogic/readthedocs.org,laplaceliu/readthedocs.org,ojii/readthedocs.org,sid-kap/readthedocs.org,wanghaven/readthedocs.org,atsuyim/readthedocs.org,sunnyzwh/readthedocs.org,soulshake/readthedocs.org,kenwang76/readthedocs.org,Tazer/readthedocs.org,michaelmcandrew/readthedocs.org,asampat3090/readthedocs.org,agjohnson/readthedocs.org,Tazer/readthedocs.org,atsuyim/readthedocs.org,nyergler/pythonslides,jerel/readthedocs.org,atsuyim/readthedocs.org,nikolas/readthedocs.org,wanghaven/readthedocs.org,emawind84/readthedocs.org,wijerasa/readthedocs.org,safwanrahman/readthedocs.org,fujita-shintaro/readthedocs.org,mhils/readthedocs.org,mrshoki/readthedocs.org,mrshoki/readthedocs.org,istresearch/readthedocs.org,davidfischer/readthedocs.org,jerel/readthedocs.org,singingwolfboy/readthedocs.org,VishvajitP/readthedocs.org,davidfischer/readthedocs.org,istresearch/readthedocs.org,attakei/readthedocs-oauth,laplaceliu/readthedocs.org,tddv/readthedocs.org,nikolas/readthedocs.org,Tazer/readthedocs.org,sunnyzwh/readthedocs.org,techtonik/readthedocs.org,takluyver/readthedocs.org,attakei/readthedocs-oauth,titiushko/readthedocs.org,jerel/readthedocs.org,hach-que/readthedocs.org,kdkeyser/readthedocs.org,pombredanne/readthedocs.org,cgourlay/readthedocs.org,kenshinthebattosai/readthedocs.org,kdkeyser/readthedocs.org,royalwang/readthedocs.org,johncosta/private-readthedocs.org,safwanrahman/readthedocs.org,alex/readthedocs.org,dirn/readthedocs.org,michaelmcandrew/readthedocs.org,alex/readthedocs.org,techtonik/readthedocs.org,clarkperkins/readthedocs.org,kenwang76/readthedocs.org,sunnyzwh/readthedocs.org,asampat3090/readthedocs.org,titiushko/readthedocs.org,clarkperkins/readthedocs.org,sils1297/readthedocs.org,kenshinthebattosai/readthedocs.org,davidfischer/readthedocs.org,espdev/readthedocs.org,VishvajitP/readthedocs.org,GovReady/readthedocs.org,clarkperkins/readthedocs.org,d0ugal/readthedocs.org,nikolas/readthedocs.org,raven47git/readthedocs.org,hach-que/readthedocs.org,wanghaven/readthedocs.org,espdev/readthedocs.org,michaelmcandrew/readthedocs.org,nyergler/pythonslides,kdkeyser/readthedocs.org,sid-kap/readthedocs.org,titiushko/readthedocs.org,ojii/readthedocs.org,raven47git/readthedocs.org,KamranMackey/readthedocs.org,laplaceliu/readthedocs.org,takluyver/readthedocs.org,espdev/readthedocs.org,SteveViss/readthedocs.org,laplaceliu/readthedocs.org,alex/readthedocs.org,safwanrahman/readthedocs.org,royalwang/readthedocs.org,takluyver/readthedocs.org,ojii/readthedocs.org,tddv/readthedocs.org,jerel/readthedocs.org,gjtorikian/readthedocs.org,johncosta/private-readthedocs.org,Carreau/readthedocs.org,LukasBoersma/readthedocs.org,Carreau/readthedocs.org,raven47git/readthedocs.org,d0ugal/readthedocs.org,mhils/readthedocs.org,nyergler/pythonslides,techtonik/readthedocs.org,nikolas/readthedocs.org,royalwang/readthedocs.org,dirn/readthedocs.org,gjtorikian/readthedocs.org,kenwang76/readthedocs.org,CedarLogic/readthedocs.org,singingwolfboy/readthedocs.org,GovReady/readthedocs.org,Carreau/readthedocs.org,sid-kap/readthedocs.org,mrshoki/readthedocs.org,hach-que/readthedocs.org,ojii/readthedocs.org,LukasBoersma/readthedocs.org,titiushko/readthedocs.org,d0ugal/readthedocs.org,Carreau/readthedocs.org,soulshake/readthedocs.org,sunnyzwh/readthedocs.org,pombredanne/readthedocs.org,VishvajitP/readthedocs.org,cgourlay/readthedocs.org,sils1297/readthedocs.org,cgourlay/readthedocs.org,wijerasa/readthedocs.org,kenshinthebattosai/readthedocs.org,safwanrahman/readthedocs.org,asampat3090/readthedocs.org,CedarLogic/readthedocs.org,nyergler/pythonslides,agjohnson/readthedocs.org,kdkeyser/readthedocs.org,istresearch/readthedocs.org,d0ugal/readthedocs.org,KamranMackey/readthedocs.org,kenwang76/readthedocs.org,fujita-shintaro/readthedocs.org,SteveViss/readthedocs.org,SteveViss/readthedocs.org,CedarLogic/readthedocs.org,emawind84/readthedocs.org,dirn/readthedocs.org,rtfd/readthedocs.org,singingwolfboy/readthedocs.org,LukasBoersma/readthedocs.org,wijerasa/readthedocs.org,royalwang/readthedocs.org,stevepiercy/readthedocs.org,KamranMackey/readthedocs.org,wanghaven/readthedocs.org,stevepiercy/readthedocs.org,gjtorikian/readthedocs.org,mhils/readthedocs.org,clarkperkins/readthedocs.org,KamranMackey/readthedocs.org,fujita-shintaro/readthedocs.org,hach-que/readthedocs.org,techtonik/readthedocs.org | import datetime
import os
from haystack.indexes import *
from haystack import site
from projects.models import File, ImportedFile
from projects import constants
class FileIndex(SearchIndex):
text = CharField(document=True, use_template=True)
author = CharField(model_attr='project__user')
project = CharField(model_attr='project__name')
title = CharField(model_attr='heading')
def get_queryset(self):
return File.objects.filter(project__status=constants.LIVE_STATUS)
class ImportedFileIndex(SearchIndex):
text = CharField(document=True)
author = CharField(model_attr='project__user')
project = CharField(model_attr='project__name')
title = CharField(model_attr='name')
def prepare_text(self, obj):
full_path = obj.project.full_html_path
to_read = os.path.join(full_path, obj.path.lstrip('/'))
content = open(to_read, 'r').read()
return content
site.register(File, FileIndex)
site.register(ImportedFile, ImportedFileIndex)
Fix unicode fail in search indexing. | import datetime
import os
import codecs
from haystack.indexes import *
from haystack import site
from projects.models import File, ImportedFile
from projects import constants
class FileIndex(SearchIndex):
text = CharField(document=True, use_template=True)
author = CharField(model_attr='project__user')
project = CharField(model_attr='project__name')
title = CharField(model_attr='heading')
def get_queryset(self):
return File.objects.filter(project__status=constants.LIVE_STATUS)
class ImportedFileIndex(SearchIndex):
text = CharField(document=True)
author = CharField(model_attr='project__user')
project = CharField(model_attr='project__name')
title = CharField(model_attr='name')
def prepare_text(self, obj):
full_path = obj.project.full_html_path
to_read = os.path.join(full_path, obj.path.lstrip('/'))
content = codecs.open(to_read, encoding="utf-8", mode='r').read()
return content
site.register(File, FileIndex)
site.register(ImportedFile, ImportedFileIndex)
| <commit_before>import datetime
import os
from haystack.indexes import *
from haystack import site
from projects.models import File, ImportedFile
from projects import constants
class FileIndex(SearchIndex):
text = CharField(document=True, use_template=True)
author = CharField(model_attr='project__user')
project = CharField(model_attr='project__name')
title = CharField(model_attr='heading')
def get_queryset(self):
return File.objects.filter(project__status=constants.LIVE_STATUS)
class ImportedFileIndex(SearchIndex):
text = CharField(document=True)
author = CharField(model_attr='project__user')
project = CharField(model_attr='project__name')
title = CharField(model_attr='name')
def prepare_text(self, obj):
full_path = obj.project.full_html_path
to_read = os.path.join(full_path, obj.path.lstrip('/'))
content = open(to_read, 'r').read()
return content
site.register(File, FileIndex)
site.register(ImportedFile, ImportedFileIndex)
<commit_msg>Fix unicode fail in search indexing.<commit_after> | import datetime
import os
import codecs
from haystack.indexes import *
from haystack import site
from projects.models import File, ImportedFile
from projects import constants
class FileIndex(SearchIndex):
text = CharField(document=True, use_template=True)
author = CharField(model_attr='project__user')
project = CharField(model_attr='project__name')
title = CharField(model_attr='heading')
def get_queryset(self):
return File.objects.filter(project__status=constants.LIVE_STATUS)
class ImportedFileIndex(SearchIndex):
text = CharField(document=True)
author = CharField(model_attr='project__user')
project = CharField(model_attr='project__name')
title = CharField(model_attr='name')
def prepare_text(self, obj):
full_path = obj.project.full_html_path
to_read = os.path.join(full_path, obj.path.lstrip('/'))
content = codecs.open(to_read, encoding="utf-8", mode='r').read()
return content
site.register(File, FileIndex)
site.register(ImportedFile, ImportedFileIndex)
| import datetime
import os
from haystack.indexes import *
from haystack import site
from projects.models import File, ImportedFile
from projects import constants
class FileIndex(SearchIndex):
text = CharField(document=True, use_template=True)
author = CharField(model_attr='project__user')
project = CharField(model_attr='project__name')
title = CharField(model_attr='heading')
def get_queryset(self):
return File.objects.filter(project__status=constants.LIVE_STATUS)
class ImportedFileIndex(SearchIndex):
text = CharField(document=True)
author = CharField(model_attr='project__user')
project = CharField(model_attr='project__name')
title = CharField(model_attr='name')
def prepare_text(self, obj):
full_path = obj.project.full_html_path
to_read = os.path.join(full_path, obj.path.lstrip('/'))
content = open(to_read, 'r').read()
return content
site.register(File, FileIndex)
site.register(ImportedFile, ImportedFileIndex)
Fix unicode fail in search indexing.import datetime
import os
import codecs
from haystack.indexes import *
from haystack import site
from projects.models import File, ImportedFile
from projects import constants
class FileIndex(SearchIndex):
text = CharField(document=True, use_template=True)
author = CharField(model_attr='project__user')
project = CharField(model_attr='project__name')
title = CharField(model_attr='heading')
def get_queryset(self):
return File.objects.filter(project__status=constants.LIVE_STATUS)
class ImportedFileIndex(SearchIndex):
text = CharField(document=True)
author = CharField(model_attr='project__user')
project = CharField(model_attr='project__name')
title = CharField(model_attr='name')
def prepare_text(self, obj):
full_path = obj.project.full_html_path
to_read = os.path.join(full_path, obj.path.lstrip('/'))
content = codecs.open(to_read, encoding="utf-8", mode='r').read()
return content
site.register(File, FileIndex)
site.register(ImportedFile, ImportedFileIndex)
| <commit_before>import datetime
import os
from haystack.indexes import *
from haystack import site
from projects.models import File, ImportedFile
from projects import constants
class FileIndex(SearchIndex):
text = CharField(document=True, use_template=True)
author = CharField(model_attr='project__user')
project = CharField(model_attr='project__name')
title = CharField(model_attr='heading')
def get_queryset(self):
return File.objects.filter(project__status=constants.LIVE_STATUS)
class ImportedFileIndex(SearchIndex):
text = CharField(document=True)
author = CharField(model_attr='project__user')
project = CharField(model_attr='project__name')
title = CharField(model_attr='name')
def prepare_text(self, obj):
full_path = obj.project.full_html_path
to_read = os.path.join(full_path, obj.path.lstrip('/'))
content = open(to_read, 'r').read()
return content
site.register(File, FileIndex)
site.register(ImportedFile, ImportedFileIndex)
<commit_msg>Fix unicode fail in search indexing.<commit_after>import datetime
import os
import codecs
from haystack.indexes import *
from haystack import site
from projects.models import File, ImportedFile
from projects import constants
class FileIndex(SearchIndex):
text = CharField(document=True, use_template=True)
author = CharField(model_attr='project__user')
project = CharField(model_attr='project__name')
title = CharField(model_attr='heading')
def get_queryset(self):
return File.objects.filter(project__status=constants.LIVE_STATUS)
class ImportedFileIndex(SearchIndex):
text = CharField(document=True)
author = CharField(model_attr='project__user')
project = CharField(model_attr='project__name')
title = CharField(model_attr='name')
def prepare_text(self, obj):
full_path = obj.project.full_html_path
to_read = os.path.join(full_path, obj.path.lstrip('/'))
content = codecs.open(to_read, encoding="utf-8", mode='r').read()
return content
site.register(File, FileIndex)
site.register(ImportedFile, ImportedFileIndex)
|
a4e402caf7b5a90607b6a206046c96c53a37e860 | slack_client/exceptions.py | slack_client/exceptions.py | class SlackError(Exception):
pass
class SlackNo(SlackError):
def __init__(self, msg_error):
self.msg = msg_error
def __str__(self):
return repr(self.msg)
class SlackTooManyRequests(SlackError):
def __init__(self, time_to_wait):
self.time_to_wait = time_to_wait
def __str__(self):
return ("Too many requests. Wait %d seconds before trying again" \
% (self.time_to_wait))
| class SlackError(Exception):
pass
class SlackNo(SlackError):
pass
class SlackMissingAPI(SlackError):
pass
| Add a custom exception (SlackMissingAPI) | Add a custom exception (SlackMissingAPI)
This exception is raised when a SlackObject is created without
any way to get an API.
Remove custom implementation for exceptions. There is no reason strong
enough for this.
| Python | mit | Shir0kamii/slack-client | class SlackError(Exception):
pass
class SlackNo(SlackError):
def __init__(self, msg_error):
self.msg = msg_error
def __str__(self):
return repr(self.msg)
class SlackTooManyRequests(SlackError):
def __init__(self, time_to_wait):
self.time_to_wait = time_to_wait
def __str__(self):
return ("Too many requests. Wait %d seconds before trying again" \
% (self.time_to_wait))
Add a custom exception (SlackMissingAPI)
This exception is raised when a SlackObject is created without
any way to get an API.
Remove custom implementation for exceptions. There is no reason strong
enough for this. | class SlackError(Exception):
pass
class SlackNo(SlackError):
pass
class SlackMissingAPI(SlackError):
pass
| <commit_before>class SlackError(Exception):
pass
class SlackNo(SlackError):
def __init__(self, msg_error):
self.msg = msg_error
def __str__(self):
return repr(self.msg)
class SlackTooManyRequests(SlackError):
def __init__(self, time_to_wait):
self.time_to_wait = time_to_wait
def __str__(self):
return ("Too many requests. Wait %d seconds before trying again" \
% (self.time_to_wait))
<commit_msg>Add a custom exception (SlackMissingAPI)
This exception is raised when a SlackObject is created without
any way to get an API.
Remove custom implementation for exceptions. There is no reason strong
enough for this.<commit_after> | class SlackError(Exception):
pass
class SlackNo(SlackError):
pass
class SlackMissingAPI(SlackError):
pass
| class SlackError(Exception):
pass
class SlackNo(SlackError):
def __init__(self, msg_error):
self.msg = msg_error
def __str__(self):
return repr(self.msg)
class SlackTooManyRequests(SlackError):
def __init__(self, time_to_wait):
self.time_to_wait = time_to_wait
def __str__(self):
return ("Too many requests. Wait %d seconds before trying again" \
% (self.time_to_wait))
Add a custom exception (SlackMissingAPI)
This exception is raised when a SlackObject is created without
any way to get an API.
Remove custom implementation for exceptions. There is no reason strong
enough for this.class SlackError(Exception):
pass
class SlackNo(SlackError):
pass
class SlackMissingAPI(SlackError):
pass
| <commit_before>class SlackError(Exception):
pass
class SlackNo(SlackError):
def __init__(self, msg_error):
self.msg = msg_error
def __str__(self):
return repr(self.msg)
class SlackTooManyRequests(SlackError):
def __init__(self, time_to_wait):
self.time_to_wait = time_to_wait
def __str__(self):
return ("Too many requests. Wait %d seconds before trying again" \
% (self.time_to_wait))
<commit_msg>Add a custom exception (SlackMissingAPI)
This exception is raised when a SlackObject is created without
any way to get an API.
Remove custom implementation for exceptions. There is no reason strong
enough for this.<commit_after>class SlackError(Exception):
pass
class SlackNo(SlackError):
pass
class SlackMissingAPI(SlackError):
pass
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.